formatting

This commit is contained in:
Greg Burd 2017-08-17 10:10:38 -04:00
parent 602e3521b4
commit 6ad99fc459
245 changed files with 11567 additions and 11917 deletions

View file

@ -5,7 +5,9 @@ import java.util.List;
public class DefaultMetadata extends Metadata { public class DefaultMetadata extends Metadata {
public DefaultMetadata() { super(null); } public DefaultMetadata() {
super(null);
}
private DefaultMetadata(Cluster.Manager cluster) { private DefaultMetadata(Cluster.Manager cluster) {
super(cluster); super(cluster);
@ -18,5 +20,4 @@ public class DefaultMetadata extends Metadata {
public TupleType newTupleType(List<DataType> types) { public TupleType newTupleType(List<DataType> types) {
return new TupleType(types, ProtocolVersion.NEWEST_SUPPORTED, CodecRegistry.DEFAULT_INSTANCE); return new TupleType(types, ProtocolVersion.NEWEST_SUPPORTED, CodecRegistry.DEFAULT_INSTANCE);
} }
} }

View file

@ -1,11 +1,11 @@
package com.datastax.driver.core.schemabuilder; package com.datastax.driver.core.schemabuilder;
import com.google.common.base.Optional;
import static com.datastax.driver.core.schemabuilder.SchemaStatement.STATEMENT_START; import static com.datastax.driver.core.schemabuilder.SchemaStatement.STATEMENT_START;
import static com.datastax.driver.core.schemabuilder.SchemaStatement.validateNotEmpty; import static com.datastax.driver.core.schemabuilder.SchemaStatement.validateNotEmpty;
import static com.datastax.driver.core.schemabuilder.SchemaStatement.validateNotKeyWord; import static com.datastax.driver.core.schemabuilder.SchemaStatement.validateNotKeyWord;
import com.google.common.base.Optional;
public class CreateCustomIndex extends CreateIndex { public class CreateCustomIndex extends CreateIndex {
private String indexName; private String indexName;
@ -18,7 +18,10 @@ public class CreateCustomIndex extends CreateIndex {
CreateCustomIndex(String indexName) { CreateCustomIndex(String indexName) {
super(indexName); super(indexName);
validateNotEmpty(indexName, "Index name"); validateNotEmpty(indexName, "Index name");
validateNotKeyWord(indexName, String.format("The index name '%s' is not allowed because it is a reserved keyword", indexName)); validateNotKeyWord(
indexName,
String.format(
"The index name '%s' is not allowed because it is a reserved keyword", indexName));
this.indexName = indexName; this.indexName = indexName;
} }
@ -42,8 +45,15 @@ public class CreateCustomIndex extends CreateIndex {
public CreateIndex.CreateIndexOn onTable(String keyspaceName, String tableName) { public CreateIndex.CreateIndexOn onTable(String keyspaceName, String tableName) {
validateNotEmpty(keyspaceName, "Keyspace name"); validateNotEmpty(keyspaceName, "Keyspace name");
validateNotEmpty(tableName, "Table name"); validateNotEmpty(tableName, "Table name");
validateNotKeyWord(keyspaceName, String.format("The keyspace name '%s' is not allowed because it is a reserved keyword", keyspaceName)); validateNotKeyWord(
validateNotKeyWord(tableName, String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName)); keyspaceName,
String.format(
"The keyspace name '%s' is not allowed because it is a reserved keyword",
keyspaceName));
validateNotKeyWord(
tableName,
String.format(
"The table name '%s' is not allowed because it is a reserved keyword", tableName));
this.keyspaceName = Optional.fromNullable(keyspaceName); this.keyspaceName = Optional.fromNullable(keyspaceName);
this.tableName = tableName; this.tableName = tableName;
return new CreateCustomIndex.CreateIndexOn(); return new CreateCustomIndex.CreateIndexOn();
@ -57,7 +67,10 @@ public class CreateCustomIndex extends CreateIndex {
*/ */
public CreateIndex.CreateIndexOn onTable(String tableName) { public CreateIndex.CreateIndexOn onTable(String tableName) {
validateNotEmpty(tableName, "Table name"); validateNotEmpty(tableName, "Table name");
validateNotKeyWord(tableName, String.format("The table name '%s' is not allowed because it is a reserved keyword", tableName)); validateNotKeyWord(
tableName,
String.format(
"The table name '%s' is not allowed because it is a reserved keyword", tableName));
this.tableName = tableName; this.tableName = tableName;
return new CreateCustomIndex.CreateIndexOn(); return new CreateCustomIndex.CreateIndexOn();
} }
@ -71,7 +84,10 @@ public class CreateCustomIndex extends CreateIndex {
*/ */
public SchemaStatement andColumn(String columnName) { public SchemaStatement andColumn(String columnName) {
validateNotEmpty(columnName, "Column name"); validateNotEmpty(columnName, "Column name");
validateNotKeyWord(columnName, String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName)); validateNotKeyWord(
columnName,
String.format(
"The column name '%s' is not allowed because it is a reserved keyword", columnName));
CreateCustomIndex.this.columnName = columnName; CreateCustomIndex.this.columnName = columnName;
return SchemaStatement.fromQueryString(buildInternal()); return SchemaStatement.fromQueryString(buildInternal());
} }
@ -84,19 +100,28 @@ public class CreateCustomIndex extends CreateIndex {
*/ */
public SchemaStatement andKeysOfColumn(String columnName) { public SchemaStatement andKeysOfColumn(String columnName) {
validateNotEmpty(columnName, "Column name"); validateNotEmpty(columnName, "Column name");
validateNotKeyWord(columnName, String.format("The column name '%s' is not allowed because it is a reserved keyword", columnName)); validateNotKeyWord(
columnName,
String.format(
"The column name '%s' is not allowed because it is a reserved keyword", columnName));
CreateCustomIndex.this.columnName = columnName; CreateCustomIndex.this.columnName = columnName;
CreateCustomIndex.this.keys = true; CreateCustomIndex.this.keys = true;
return SchemaStatement.fromQueryString(buildInternal()); return SchemaStatement.fromQueryString(buildInternal());
} }
} }
String getCustomClassName() { return ""; } String getCustomClassName() {
String getOptions() { return ""; } return "";
}
String getOptions() {
return "";
}
@Override @Override
public String buildInternal() { public String buildInternal() {
StringBuilder createStatement = new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX "); StringBuilder createStatement =
new StringBuilder(STATEMENT_START).append("CREATE CUSTOM INDEX ");
if (ifNotExists) { if (ifNotExists) {
createStatement.append("IF NOT EXISTS "); createStatement.append("IF NOT EXISTS ");

View file

@ -17,9 +17,7 @@ package com.datastax.driver.core.schemabuilder;
import com.datastax.driver.core.CodecRegistry; import com.datastax.driver.core.CodecRegistry;
/** /** A built CREATE TABLE statement. */
* A built CREATE TABLE statement.
*/
public class CreateTable extends Create { public class CreateTable extends Create {
public CreateTable(String keyspaceName, String tableName) { public CreateTable(String keyspaceName, String tableName) {

View file

@ -17,7 +17,6 @@ package net.helenus.config;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.function.Function; import java.util.function.Function;
import net.helenus.core.DslInstantiator; import net.helenus.core.DslInstantiator;
import net.helenus.core.MapperInstantiator; import net.helenus.core.MapperInstantiator;
import net.helenus.core.reflect.ReflectionDslInstantiator; import net.helenus.core.reflect.ReflectionDslInstantiator;
@ -45,5 +44,4 @@ public class DefaultHelenusSettings implements HelenusSettings {
public MapperInstantiator getMapperInstantiator() { public MapperInstantiator getMapperInstantiator() {
return ReflectionMapperInstantiator.INSTANCE; return ReflectionMapperInstantiator.INSTANCE;
} }
} }

View file

@ -15,13 +15,11 @@
*/ */
package net.helenus.config; package net.helenus.config;
import net.helenus.mapping.annotation.Transient;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.function.Function; import java.util.function.Function;
import net.helenus.mapping.annotation.Transient;
public enum GetterMethodDetector implements Function<Method, Boolean> { public enum GetterMethodDetector implements Function<Method, Boolean> {
INSTANCE; INSTANCE;
@Override @Override
@ -41,7 +39,5 @@ public enum GetterMethodDetector implements Function<Method, Boolean> {
} }
return true; return true;
} }
} }

View file

@ -17,7 +17,6 @@ package net.helenus.config;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.function.Function; import java.util.function.Function;
import net.helenus.core.DslInstantiator; import net.helenus.core.DslInstantiator;
import net.helenus.core.MapperInstantiator; import net.helenus.core.MapperInstantiator;
@ -30,5 +29,4 @@ public interface HelenusSettings {
DslInstantiator getDslInstantiator(); DslInstantiator getDslInstantiator();
MapperInstantiator getMapperInstantiator(); MapperInstantiator getMapperInstantiator();
} }

View file

@ -15,48 +15,41 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.io.PrintStream;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.Executor;
import brave.Tracer; import brave.Tracer;
import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.schemabuilder.SchemaStatement;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datastax.driver.core.*; import com.datastax.driver.core.*;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import java.io.PrintStream;
import java.util.concurrent.Executor;
import net.helenus.mapping.value.ColumnValuePreparer; import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.mapping.value.ColumnValueProvider; import net.helenus.mapping.value.ColumnValueProvider;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.xml.validation.Schema;
public abstract class AbstractSessionOperations { public abstract class AbstractSessionOperations {
final Logger logger = LoggerFactory.getLogger(getClass()); final Logger logger = LoggerFactory.getLogger(getClass());
abstract public Session currentSession(); public abstract Session currentSession();
abstract public String usingKeyspace(); public abstract String usingKeyspace();
abstract public boolean isShowCql(); public abstract boolean isShowCql();
abstract public PrintStream getPrintStream(); public abstract PrintStream getPrintStream();
abstract public Executor getExecutor(); public abstract Executor getExecutor();
abstract public SessionRepository getSessionRepository(); public abstract SessionRepository getSessionRepository();
abstract public ColumnValueProvider getValueProvider(); public abstract ColumnValueProvider getValueProvider();
abstract public ColumnValuePreparer getValuePreparer(); public abstract ColumnValuePreparer getValuePreparer();
abstract public ConsistencyLevel getDefaultConsistencyLevel();
public abstract ConsistencyLevel getDefaultConsistencyLevel();
public PreparedStatement prepare(RegularStatement statement) { public PreparedStatement prepare(RegularStatement statement) {
try { try {
@ -111,13 +104,16 @@ public abstract class AbstractSessionOperations {
} }
} }
public Tracer getZipkinTracer() { return null; } public Tracer getZipkinTracer() {
return null;
public MetricRegistry getMetricRegistry() { return null; }
public void cache(String key, Object value) {
} }
public MetricRegistry getMetricRegistry() {
return null;
}
public void cache(String key, Object value) {}
RuntimeException translateException(RuntimeException e) { RuntimeException translateException(RuntimeException e) {
if (e instanceof HelenusException) { if (e instanceof HelenusException) {
return e; return e;
@ -128,5 +124,4 @@ public abstract class AbstractSessionOperations {
void printCql(String cql) { void printCql(String cql) {
getPrintStream().println(cql); getPrintStream().println(cql);
} }
} }

View file

@ -16,5 +16,8 @@
package net.helenus.core; package net.helenus.core;
public enum AutoDdl { public enum AutoDdl {
VALIDATE, UPDATE, CREATE, CREATE_DROP; VALIDATE,
UPDATE,
CREATE,
CREATE_DROP;
} }

View file

@ -7,5 +7,4 @@ public class ConflictingUnitOfWorkException extends Exception {
ConflictingUnitOfWorkException(UnitOfWork uow) { ConflictingUnitOfWorkException(UnitOfWork uow) {
this.uow = uow; this.uow = uow;
} }
} }

View file

@ -15,13 +15,15 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.Optional;
import com.datastax.driver.core.Metadata; import com.datastax.driver.core.Metadata;
import java.util.Optional;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
public interface DslInstantiator { public interface DslInstantiator {
<E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, Metadata metadata); <E> E instantiate(
Class<E> iface,
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
Metadata metadata);
} }

View file

@ -15,10 +15,8 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.Objects;
import com.datastax.driver.core.querybuilder.Clause; import com.datastax.driver.core.querybuilder.Clause;
import java.util.Objects;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.MappingUtil; import net.helenus.mapping.MappingUtil;
import net.helenus.mapping.value.ColumnValuePreparer; import net.helenus.mapping.value.ColumnValuePreparer;
@ -95,7 +93,8 @@ public final class Filter<V> {
Objects.requireNonNull(val, "empty value"); Objects.requireNonNull(val, "empty value");
if (op == Operator.IN) { if (op == Operator.IN) {
throw new IllegalArgumentException("invalid usage of the 'in' operator, use Filter.in() static method"); throw new IllegalArgumentException(
"invalid usage of the 'in' operator, use Filter.in() static method");
} }
HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter); HelenusPropertyNode node = MappingUtil.resolveMappingProperty(getter);
@ -109,5 +108,4 @@ public final class Filter<V> {
public String toString() { public String toString() {
return node.getColumnName() + postulate.toString(); return node.getColumnName() + postulate.toString();
} }
} }

View file

@ -18,5 +18,4 @@ package net.helenus.core;
public interface Getter<V> { public interface Getter<V> {
V get(); V get();
} }

View file

@ -15,14 +15,12 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import com.datastax.driver.core.Cluster; import com.datastax.driver.core.Cluster;
import com.datastax.driver.core.Metadata; import com.datastax.driver.core.Metadata;
import com.datastax.driver.core.Session; import com.datastax.driver.core.Session;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import net.helenus.config.DefaultHelenusSettings; import net.helenus.config.DefaultHelenusSettings;
import net.helenus.config.HelenusSettings; import net.helenus.config.HelenusSettings;
import net.helenus.core.reflect.DslExportable; import net.helenus.core.reflect.DslExportable;
@ -33,14 +31,14 @@ import net.helenus.support.HelenusMappingException;
public final class Helenus { public final class Helenus {
private static volatile HelenusSettings settings = new DefaultHelenusSettings(); private static volatile HelenusSettings settings = new DefaultHelenusSettings();
private static final ConcurrentMap<Class<?>, Object> dslCache = new ConcurrentHashMap<Class<?>, Object>(); private static final ConcurrentMap<Class<?>, Object> dslCache =
private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity = new ConcurrentHashMap<Class<?>, Metadata>(); new ConcurrentHashMap<Class<?>, Object>();
private static final ConcurrentMap<Class<?>, Metadata> metadataForEntity =
new ConcurrentHashMap<Class<?>, Metadata>();
private static final Set<HelenusSession> sessions = new HashSet<HelenusSession>(); private static final Set<HelenusSession> sessions = new HashSet<HelenusSession>();
private static volatile HelenusSession singleton; private static volatile HelenusSession singleton;
private Helenus() {}
private Helenus() {
}
protected static void setSession(HelenusSession session) { protected static void setSession(HelenusSession session) {
sessions.add(session); sessions.add(session);
@ -52,7 +50,8 @@ public final class Helenus {
} }
public static void shutdown() { public static void shutdown() {
sessions.forEach((session) -> { sessions.forEach(
(session) -> {
session.close(); session.close();
sessions.remove(session); sessions.remove(session);
}); });
@ -104,7 +103,10 @@ public final class Helenus {
return dsl(iface, classLoader, Optional.empty(), metadata); return dsl(iface, classLoader, Optional.empty(), metadata);
} }
public static <E> E dsl(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, public static <E> E dsl(
Class<E> iface,
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
Metadata metadata) { Metadata metadata) {
Object instance = null; Object instance = null;
@ -123,7 +125,6 @@ public final class Helenus {
if (c != null) { if (c != null) {
instance = c; instance = c;
} }
} }
} }
@ -178,10 +179,8 @@ public final class Helenus {
metadataForEntity.putIfAbsent(iface, metadata); metadataForEntity.putIfAbsent(iface, metadata);
return entity(iface, metadata); return entity(iface, metadata);
} }
throw new HelenusMappingException("unknown dsl object or mapping interface " + ifaceOrDsl); throw new HelenusMappingException("unknown dsl object or mapping interface " + ifaceOrDsl);
} }
} }

View file

@ -15,6 +15,12 @@
*/ */
package net.helenus.core; package net.helenus.core;
import brave.Tracer;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.diffplug.common.base.Errors;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import java.io.Closeable; import java.io.Closeable;
import java.io.PrintStream; import java.io.PrintStream;
import java.util.Map; import java.util.Map;
@ -23,14 +29,6 @@ import java.util.Set;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.function.Function; import java.util.function.Function;
import brave.Tracer;
import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*;
import com.google.common.cache.Cache;
import com.google.common.cache.CacheBuilder;
import com.diffplug.common.base.Errors;
import net.helenus.core.operation.*; import net.helenus.core.operation.*;
import net.helenus.core.reflect.Drafted; import net.helenus.core.reflect.Drafted;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
@ -65,14 +63,23 @@ public final class HelenusSession extends AbstractSessionOperations implements C
private final Cache<String, Object> sessionCache; private final Cache<String, Object> sessionCache;
private UnitOfWork currentUnitOfWork; private UnitOfWork currentUnitOfWork;
HelenusSession(Session session, String usingKeyspace, CodecRegistry registry, boolean showCql, HelenusSession(
PrintStream printStream, SessionRepositoryBuilder sessionRepositoryBuilder, Executor executor, Session session,
boolean dropSchemaOnClose, ConsistencyLevel consistencyLevel, MetricRegistry metricRegistry, String usingKeyspace,
CodecRegistry registry,
boolean showCql,
PrintStream printStream,
SessionRepositoryBuilder sessionRepositoryBuilder,
Executor executor,
boolean dropSchemaOnClose,
ConsistencyLevel consistencyLevel,
MetricRegistry metricRegistry,
Tracer tracer) { Tracer tracer) {
this.session = session; this.session = session;
this.registry = registry == null ? CodecRegistry.DEFAULT_INSTANCE : registry; this.registry = registry == null ? CodecRegistry.DEFAULT_INSTANCE : registry;
this.usingKeyspace = Objects.requireNonNull(usingKeyspace, this.usingKeyspace =
"keyspace needs to be selected before creating session"); Objects.requireNonNull(
usingKeyspace, "keyspace needs to be selected before creating session");
this.showCql = showCql; this.showCql = showCql;
this.printStream = printStream; this.printStream = printStream;
this.sessionRepository = sessionRepositoryBuilder.build(); this.sessionRepository = sessionRepositoryBuilder.build();
@ -85,12 +92,15 @@ public final class HelenusSession extends AbstractSessionOperations implements C
this.valueProvider = new RowColumnValueProvider(this.sessionRepository); this.valueProvider = new RowColumnValueProvider(this.sessionRepository);
this.valuePreparer = new StatementColumnValuePreparer(this.sessionRepository); this.valuePreparer = new StatementColumnValuePreparer(this.sessionRepository);
this.metadata = session.getCluster().getMetadata(); this.metadata = session.getCluster().getMetadata();
this.sessionCache = CacheBuilder.newBuilder().maximumSize(MAX_CACHE_SIZE) this.sessionCache =
.expireAfterAccess(MAX_CACHE_EXPIRE_SECONDS, TimeUnit.SECONDS).recordStats().build(); CacheBuilder.newBuilder()
.maximumSize(MAX_CACHE_SIZE)
.expireAfterAccess(MAX_CACHE_EXPIRE_SECONDS, TimeUnit.SECONDS)
.recordStats()
.build();
this.currentUnitOfWork = null; this.currentUnitOfWork = null;
} }
@Override @Override
public Session currentSession() { public Session currentSession() {
return session; return session;
@ -148,16 +158,22 @@ public final class HelenusSession extends AbstractSessionOperations implements C
} }
@Override @Override
public Tracer getZipkinTracer() { return zipkinTracer; } public Tracer getZipkinTracer() {
return zipkinTracer;
}
@Override @Override
public MetricRegistry getMetricRegistry() { return metricRegistry; } public MetricRegistry getMetricRegistry() {
return metricRegistry;
}
public ConsistencyLevel getDefaultConsistencyLevel() { public ConsistencyLevel getDefaultConsistencyLevel() {
return defaultConsistencyLevel; return defaultConsistencyLevel;
} }
public Metadata getMetadata() { return metadata; } public Metadata getMetadata() {
return metadata;
}
public synchronized UnitOfWork begin() { public synchronized UnitOfWork begin() {
if (currentUnitOfWork == null) { if (currentUnitOfWork == null) {
@ -194,11 +210,12 @@ public final class HelenusSession extends AbstractSessionOperations implements C
ColumnValueProvider valueProvider = getValueProvider(); ColumnValueProvider valueProvider = getValueProvider();
HelenusEntity entity = Helenus.entity(entityClass); HelenusEntity entity = Helenus.entity(entityClass);
return new SelectOperation<E>(this, entity, (r) -> { return new SelectOperation<E>(
this,
entity,
(r) -> {
Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity); Map<String, Object> map = new ValueProviderMap(r, valueProvider, entity);
return (E) Helenus.map(entityClass, map); return (E) Helenus.map(entityClass, map);
}); });
} }
@ -221,7 +238,8 @@ public final class HelenusSession extends AbstractSessionOperations implements C
Objects.requireNonNull(getter1, "field 1 is empty"); Objects.requireNonNull(getter1, "field 1 is empty");
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1); HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
return new SelectOperation<Tuple1<V1>>(this, new Mappers.Mapper1<V1>(getValueProvider(), p1), p1); return new SelectOperation<Tuple1<V1>>(
this, new Mappers.Mapper1<V1>(getValueProvider(), p1), p1);
} }
public <V1, V2> SelectOperation<Tuple2<V1, V2>> select(Getter<V1> getter1, Getter<V2> getter2) { public <V1, V2> SelectOperation<Tuple2<V1, V2>> select(Getter<V1> getter1, Getter<V2> getter2) {
@ -230,12 +248,12 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1); HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2); HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
return new SelectOperation<Fun.Tuple2<V1, V2>>(this, new Mappers.Mapper2<V1, V2>(getValueProvider(), p1, p2), return new SelectOperation<Fun.Tuple2<V1, V2>>(
p1, p2); this, new Mappers.Mapper2<V1, V2>(getValueProvider(), p1, p2), p1, p2);
} }
public <V1, V2, V3> SelectOperation<Fun.Tuple3<V1, V2, V3>> select(Getter<V1> getter1, Getter<V2> getter2, public <V1, V2, V3> SelectOperation<Fun.Tuple3<V1, V2, V3>> select(
Getter<V3> getter3) { Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3) {
Objects.requireNonNull(getter1, "field 1 is empty"); Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty"); Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty"); Objects.requireNonNull(getter3, "field 3 is empty");
@ -243,12 +261,12 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1); HelenusPropertyNode p1 = MappingUtil.resolveMappingProperty(getter1);
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2); HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3); HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
return new SelectOperation<Fun.Tuple3<V1, V2, V3>>(this, return new SelectOperation<Fun.Tuple3<V1, V2, V3>>(
new Mappers.Mapper3<V1, V2, V3>(getValueProvider(), p1, p2, p3), p1, p2, p3); this, new Mappers.Mapper3<V1, V2, V3>(getValueProvider(), p1, p2, p3), p1, p2, p3);
} }
public <V1, V2, V3, V4> SelectOperation<Fun.Tuple4<V1, V2, V3, V4>> select(Getter<V1> getter1, Getter<V2> getter2, public <V1, V2, V3, V4> SelectOperation<Fun.Tuple4<V1, V2, V3, V4>> select(
Getter<V3> getter3, Getter<V4> getter4) { Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4) {
Objects.requireNonNull(getter1, "field 1 is empty"); Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty"); Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty"); Objects.requireNonNull(getter3, "field 3 is empty");
@ -258,12 +276,21 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2); HelenusPropertyNode p2 = MappingUtil.resolveMappingProperty(getter2);
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3); HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4); HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
return new SelectOperation<Fun.Tuple4<V1, V2, V3, V4>>(this, return new SelectOperation<Fun.Tuple4<V1, V2, V3, V4>>(
new Mappers.Mapper4<V1, V2, V3, V4>(getValueProvider(), p1, p2, p3, p4), p1, p2, p3, p4); this,
new Mappers.Mapper4<V1, V2, V3, V4>(getValueProvider(), p1, p2, p3, p4),
p1,
p2,
p3,
p4);
} }
public <V1, V2, V3, V4, V5> SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>> select(Getter<V1> getter1, public <V1, V2, V3, V4, V5> SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>> select(
Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5) { Getter<V1> getter1,
Getter<V2> getter2,
Getter<V3> getter3,
Getter<V4> getter4,
Getter<V5> getter5) {
Objects.requireNonNull(getter1, "field 1 is empty"); Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty"); Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty"); Objects.requireNonNull(getter3, "field 3 is empty");
@ -275,12 +302,23 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3); HelenusPropertyNode p3 = MappingUtil.resolveMappingProperty(getter3);
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4); HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5); HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
return new SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>>(this, return new SelectOperation<Fun.Tuple5<V1, V2, V3, V4, V5>>(
new Mappers.Mapper5<V1, V2, V3, V4, V5>(getValueProvider(), p1, p2, p3, p4, p5), p1, p2, p3, p4, p5); this,
new Mappers.Mapper5<V1, V2, V3, V4, V5>(getValueProvider(), p1, p2, p3, p4, p5),
p1,
p2,
p3,
p4,
p5);
} }
public <V1, V2, V3, V4, V5, V6> SelectOperation<Fun.Tuple6<V1, V2, V3, V4, V5, V6>> select(Getter<V1> getter1, public <V1, V2, V3, V4, V5, V6> SelectOperation<Fun.Tuple6<V1, V2, V3, V4, V5, V6>> select(
Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5, Getter<V6> getter6) { Getter<V1> getter1,
Getter<V2> getter2,
Getter<V3> getter3,
Getter<V4> getter4,
Getter<V5> getter5,
Getter<V6> getter6) {
Objects.requireNonNull(getter1, "field 1 is empty"); Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty"); Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty"); Objects.requireNonNull(getter3, "field 3 is empty");
@ -294,14 +332,26 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4); HelenusPropertyNode p4 = MappingUtil.resolveMappingProperty(getter4);
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5); HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
HelenusPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6); HelenusPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6);
return new SelectOperation<Tuple6<V1, V2, V3, V4, V5, V6>>(this, return new SelectOperation<Tuple6<V1, V2, V3, V4, V5, V6>>(
new Mappers.Mapper6<V1, V2, V3, V4, V5, V6>(getValueProvider(), p1, p2, p3, p4, p5, p6), p1, p2, p3, p4, this,
p5, p6); new Mappers.Mapper6<V1, V2, V3, V4, V5, V6>(getValueProvider(), p1, p2, p3, p4, p5, p6),
p1,
p2,
p3,
p4,
p5,
p6);
} }
public <V1, V2, V3, V4, V5, V6, V7> SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>> select( public <V1, V2, V3, V4, V5, V6, V7>
Getter<V1> getter1, Getter<V2> getter2, Getter<V3> getter3, Getter<V4> getter4, Getter<V5> getter5, SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>> select(
Getter<V6> getter6, Getter<V7> getter7) { Getter<V1> getter1,
Getter<V2> getter2,
Getter<V3> getter3,
Getter<V4> getter4,
Getter<V5> getter5,
Getter<V6> getter6,
Getter<V7> getter7) {
Objects.requireNonNull(getter1, "field 1 is empty"); Objects.requireNonNull(getter1, "field 1 is empty");
Objects.requireNonNull(getter2, "field 2 is empty"); Objects.requireNonNull(getter2, "field 2 is empty");
Objects.requireNonNull(getter3, "field 3 is empty"); Objects.requireNonNull(getter3, "field 3 is empty");
@ -317,9 +367,17 @@ public final class HelenusSession extends AbstractSessionOperations implements C
HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5); HelenusPropertyNode p5 = MappingUtil.resolveMappingProperty(getter5);
HelenusPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6); HelenusPropertyNode p6 = MappingUtil.resolveMappingProperty(getter6);
HelenusPropertyNode p7 = MappingUtil.resolveMappingProperty(getter7); HelenusPropertyNode p7 = MappingUtil.resolveMappingProperty(getter7);
return new SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>>(this, return new SelectOperation<Fun.Tuple7<V1, V2, V3, V4, V5, V6, V7>>(
new Mappers.Mapper7<V1, V2, V3, V4, V5, V6, V7>(getValueProvider(), p1, p2, p3, p4, p5, p6, p7), p1, p2, this,
p3, p4, p5, p6, p7); new Mappers.Mapper7<V1, V2, V3, V4, V5, V6, V7>(
getValueProvider(), p1, p2, p3, p4, p5, p6, p7),
p1,
p2,
p3,
p4,
p5,
p6,
p7);
} }
public CountOperation count() { public CountOperation count() {
@ -344,11 +402,17 @@ public final class HelenusSession extends AbstractSessionOperations implements C
return new UpdateOperation(this, p, v); return new UpdateOperation(this, p, v);
} }
public <T> InsertOperation<T> insert() { return new InsertOperation<T>(this, true); } public <T> InsertOperation<T> insert() {
return new InsertOperation<T>(this, true);
}
public <T> InsertOperation<T> insert(Object pojo) { return this.<T>insert(pojo, null); } public <T> InsertOperation<T> insert(Object pojo) {
return this.<T>insert(pojo, null);
}
public <T> InsertOperation<T> insert(Drafted draft) { return this.<T>insert(draft.build(), draft.mutated()); } public <T> InsertOperation<T> insert(Drafted draft) {
return this.<T>insert(draft.build(), draft.mutated());
}
public <T> InsertOperation<T> insert(Object pojo, Set<String> mutations) { public <T> InsertOperation<T> insert(Object pojo, Set<String> mutations) {
Objects.requireNonNull(pojo, "pojo is empty"); Objects.requireNonNull(pojo, "pojo is empty");
@ -363,9 +427,13 @@ public final class HelenusSession extends AbstractSessionOperations implements C
return new InsertOperation<T>(this, false); return new InsertOperation<T>(this, false);
} }
public <T> InsertOperation<T> upsert(Drafted draft) { return this.<T>upsert(draft.build(), draft.mutated()); } public <T> InsertOperation<T> upsert(Drafted draft) {
return this.<T>upsert(draft.build(), draft.mutated());
}
public <T> InsertOperation<T> upsert(Object pojo) { return this.<T>upsert(pojo, null); } public <T> InsertOperation<T> upsert(Object pojo) {
return this.<T>upsert(pojo, null);
}
public <T> InsertOperation<T> upsert(Object pojo, Set<String> mutations) { public <T> InsertOperation<T> upsert(Object pojo, Set<String> mutations) {
Objects.requireNonNull(pojo, "pojo is empty"); Objects.requireNonNull(pojo, "pojo is empty");
@ -418,23 +486,18 @@ public final class HelenusSession extends AbstractSessionOperations implements C
private void dropSchema() { private void dropSchema() {
sessionRepository.entities().forEach(e -> dropEntity(e)); sessionRepository.entities().forEach(e -> dropEntity(e));
} }
private void dropEntity(HelenusEntity entity) { private void dropEntity(HelenusEntity entity) {
switch (entity.getType()) { switch (entity.getType()) {
case TABLE:
case TABLE :
execute(SchemaUtil.dropTable(entity), true); execute(SchemaUtil.dropTable(entity), true);
break; break;
case UDT : case UDT:
execute(SchemaUtil.dropUserType(entity), true); execute(SchemaUtil.dropUserType(entity), true);
break; break;
} }
} }
} }

View file

@ -16,15 +16,12 @@
package net.helenus.core; package net.helenus.core;
import java.lang.annotation.Annotation; import java.lang.annotation.Annotation;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public enum HelenusValidator implements PropertyValueValidator { public enum HelenusValidator implements PropertyValueValidator {
INSTANCE; INSTANCE;
public void validate(HelenusProperty prop, Object value) { public void validate(HelenusProperty prop, Object value) {
@ -38,14 +35,13 @@ public enum HelenusValidator implements PropertyValueValidator {
try { try {
valid = typeless.isValid(value, null); valid = typeless.isValid(value, null);
} catch (ClassCastException e) { } catch (ClassCastException e) {
throw new HelenusMappingException("validator was used for wrong type '" + value + "' in " + prop, e); throw new HelenusMappingException(
"validator was used for wrong type '" + value + "' in " + prop, e);
} }
if (!valid) { if (!valid) {
throw new HelenusException("wrong value '" + value + "' for " + prop); throw new HelenusException("wrong value '" + value + "' for " + prop);
} }
} }
} }
} }

View file

@ -20,5 +20,4 @@ import java.util.Map;
public interface MapperInstantiator { public interface MapperInstantiator {
<E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader); <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader);
} }

View file

@ -15,10 +15,8 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.function.Function;
import com.datastax.driver.core.Row; import com.datastax.driver.core.Row;
import java.util.function.Function;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.value.ColumnValueProvider; import net.helenus.mapping.value.ColumnValueProvider;
@ -26,10 +24,9 @@ import net.helenus.support.Fun;
public final class Mappers { public final class Mappers {
private Mappers() { private Mappers() {}
}
public final static class Mapper1<A> implements Function<Row, Fun.Tuple1<A>> { public static final class Mapper1<A> implements Function<Row, Fun.Tuple1<A>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1; private final HelenusProperty p1;
@ -45,7 +42,7 @@ public final class Mappers {
} }
} }
public final static class Mapper2<A, B> implements Function<Row, Fun.Tuple2<A, B>> { public static final class Mapper2<A, B> implements Function<Row, Fun.Tuple2<A, B>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1; private final HelenusProperty p1;
@ -59,18 +56,22 @@ public final class Mappers {
@Override @Override
public Fun.Tuple2<A, B> apply(Row row) { public Fun.Tuple2<A, B> apply(Row row) {
return new Fun.Tuple2<A, B>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2)); return new Fun.Tuple2<A, B>(
provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2));
} }
} }
public final static class Mapper3<A, B, C> implements Function<Row, Fun.Tuple3<A, B, C>> { public static final class Mapper3<A, B, C> implements Function<Row, Fun.Tuple3<A, B, C>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1; private final HelenusProperty p1;
private final HelenusProperty p2; private final HelenusProperty p2;
private final HelenusProperty p3; private final HelenusProperty p3;
public Mapper3(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2, public Mapper3(
ColumnValueProvider provider,
HelenusPropertyNode p1,
HelenusPropertyNode p2,
HelenusPropertyNode p3) { HelenusPropertyNode p3) {
this.provider = provider; this.provider = provider;
this.p1 = p1.getProperty(); this.p1 = p1.getProperty();
@ -80,12 +81,14 @@ public final class Mappers {
@Override @Override
public Fun.Tuple3<A, B, C> apply(Row row) { public Fun.Tuple3<A, B, C> apply(Row row) {
return new Fun.Tuple3<A, B, C>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2), return new Fun.Tuple3<A, B, C>(
provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3)); provider.getColumnValue(row, 2, p3));
} }
} }
public final static class Mapper4<A, B, C, D> implements Function<Row, Fun.Tuple4<A, B, C, D>> { public static final class Mapper4<A, B, C, D> implements Function<Row, Fun.Tuple4<A, B, C, D>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1; private final HelenusProperty p1;
@ -93,8 +96,12 @@ public final class Mappers {
private final HelenusProperty p3; private final HelenusProperty p3;
private final HelenusProperty p4; private final HelenusProperty p4;
public Mapper4(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2, public Mapper4(
HelenusPropertyNode p3, HelenusPropertyNode p4) { ColumnValueProvider provider,
HelenusPropertyNode p1,
HelenusPropertyNode p2,
HelenusPropertyNode p3,
HelenusPropertyNode p4) {
this.provider = provider; this.provider = provider;
this.p1 = p1.getProperty(); this.p1 = p1.getProperty();
this.p2 = p2.getProperty(); this.p2 = p2.getProperty();
@ -104,18 +111,27 @@ public final class Mappers {
@Override @Override
public Fun.Tuple4<A, B, C, D> apply(Row row) { public Fun.Tuple4<A, B, C, D> apply(Row row) {
return new Fun.Tuple4<A, B, C, D>(provider.getColumnValue(row, 0, p1), provider.getColumnValue(row, 1, p2), return new Fun.Tuple4<A, B, C, D>(
provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 3, p4)); provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4));
} }
} }
public final static class Mapper5<A, B, C, D, E> implements Function<Row, Fun.Tuple5<A, B, C, D, E>> { public static final class Mapper5<A, B, C, D, E>
implements Function<Row, Fun.Tuple5<A, B, C, D, E>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5; private final HelenusProperty p1, p2, p3, p4, p5;
public Mapper5(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2, public Mapper5(
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5) { ColumnValueProvider provider,
HelenusPropertyNode p1,
HelenusPropertyNode p2,
HelenusPropertyNode p3,
HelenusPropertyNode p4,
HelenusPropertyNode p5) {
this.provider = provider; this.provider = provider;
this.p1 = p1.getProperty(); this.p1 = p1.getProperty();
this.p2 = p2.getProperty(); this.p2 = p2.getProperty();
@ -126,19 +142,29 @@ public final class Mappers {
@Override @Override
public Fun.Tuple5<A, B, C, D, E> apply(Row row) { public Fun.Tuple5<A, B, C, D, E> apply(Row row) {
return new Fun.Tuple5<A, B, C, D, E>(provider.getColumnValue(row, 0, p1), return new Fun.Tuple5<A, B, C, D, E>(
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5)); provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4),
provider.getColumnValue(row, 4, p5));
} }
} }
public final static class Mapper6<A, B, C, D, E, F> implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> { public static final class Mapper6<A, B, C, D, E, F>
implements Function<Row, Fun.Tuple6<A, B, C, D, E, F>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5, p6; private final HelenusProperty p1, p2, p3, p4, p5, p6;
public Mapper6(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2, public Mapper6(
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6) { ColumnValueProvider provider,
HelenusPropertyNode p1,
HelenusPropertyNode p2,
HelenusPropertyNode p3,
HelenusPropertyNode p4,
HelenusPropertyNode p5,
HelenusPropertyNode p6) {
this.provider = provider; this.provider = provider;
this.p1 = p1.getProperty(); this.p1 = p1.getProperty();
this.p2 = p2.getProperty(); this.p2 = p2.getProperty();
@ -150,20 +176,30 @@ public final class Mappers {
@Override @Override
public Fun.Tuple6<A, B, C, D, E, F> apply(Row row) { public Fun.Tuple6<A, B, C, D, E, F> apply(Row row) {
return new Fun.Tuple6<A, B, C, D, E, F>(provider.getColumnValue(row, 0, p1), return new Fun.Tuple6<A, B, C, D, E, F>(
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5), provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4),
provider.getColumnValue(row, 4, p5),
provider.getColumnValue(row, 5, p6)); provider.getColumnValue(row, 5, p6));
} }
} }
public final static class Mapper7<A, B, C, D, E, F, G> implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> { public static final class Mapper7<A, B, C, D, E, F, G>
implements Function<Row, Fun.Tuple7<A, B, C, D, E, F, G>> {
private final ColumnValueProvider provider; private final ColumnValueProvider provider;
private final HelenusProperty p1, p2, p3, p4, p5, p6, p7; private final HelenusProperty p1, p2, p3, p4, p5, p6, p7;
public Mapper7(ColumnValueProvider provider, HelenusPropertyNode p1, HelenusPropertyNode p2, public Mapper7(
HelenusPropertyNode p3, HelenusPropertyNode p4, HelenusPropertyNode p5, HelenusPropertyNode p6, ColumnValueProvider provider,
HelenusPropertyNode p1,
HelenusPropertyNode p2,
HelenusPropertyNode p3,
HelenusPropertyNode p4,
HelenusPropertyNode p5,
HelenusPropertyNode p6,
HelenusPropertyNode p7) { HelenusPropertyNode p7) {
this.provider = provider; this.provider = provider;
this.p1 = p1.getProperty(); this.p1 = p1.getProperty();
@ -177,11 +213,14 @@ public final class Mappers {
@Override @Override
public Fun.Tuple7<A, B, C, D, E, F, G> apply(Row row) { public Fun.Tuple7<A, B, C, D, E, F, G> apply(Row row) {
return new Fun.Tuple7<A, B, C, D, E, F, G>(provider.getColumnValue(row, 0, p1), return new Fun.Tuple7<A, B, C, D, E, F, G>(
provider.getColumnValue(row, 1, p2), provider.getColumnValue(row, 2, p3), provider.getColumnValue(row, 0, p1),
provider.getColumnValue(row, 3, p4), provider.getColumnValue(row, 4, p5), provider.getColumnValue(row, 1, p2),
provider.getColumnValue(row, 5, p6), provider.getColumnValue(row, 6, p7)); provider.getColumnValue(row, 2, p3),
provider.getColumnValue(row, 3, p4),
provider.getColumnValue(row, 4, p5),
provider.getColumnValue(row, 5, p6),
provider.getColumnValue(row, 6, p7));
} }
} }
} }

View file

@ -19,7 +19,6 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
public enum Operator { public enum Operator {
EQ("=="), EQ("=="),
IN("in"), IN("in"),
@ -34,7 +33,7 @@ public enum Operator {
private final String name; private final String name;
private final static Map<String, Operator> indexByName = new HashMap<String, Operator>(); private static final Map<String, Operator> indexByName = new HashMap<String, Operator>();
static { static {
for (Operator fo : Operator.values()) { for (Operator fo : Operator.values()) {
@ -53,5 +52,4 @@ public enum Operator {
public static Operator findByOperator(String name) { public static Operator findByOperator(String name) {
return indexByName.get(name); return indexByName.get(name);
} }
} }

View file

@ -1,10 +1,8 @@
package net.helenus.core; package net.helenus.core;
import java.util.Objects;
import com.datastax.driver.core.querybuilder.Ordering; import com.datastax.driver.core.querybuilder.Ordering;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import java.util.Objects;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.ColumnType; import net.helenus.mapping.ColumnType;
import net.helenus.mapping.MappingUtil; import net.helenus.mapping.MappingUtil;
@ -34,16 +32,13 @@ public final class Ordered {
} }
switch (direction) { switch (direction) {
case ASC:
case ASC :
return QueryBuilder.asc(propNode.getColumnName()); return QueryBuilder.asc(propNode.getColumnName());
case DESC : case DESC:
return QueryBuilder.desc(propNode.getColumnName()); return QueryBuilder.desc(propNode.getColumnName());
} }
throw new HelenusMappingException("invalid direction " + direction); throw new HelenusMappingException("invalid direction " + direction);
} }
} }

View file

@ -17,7 +17,6 @@ package net.helenus.core;
import com.datastax.driver.core.querybuilder.Clause; import com.datastax.driver.core.querybuilder.Clause;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.value.ColumnValuePreparer; import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
@ -39,38 +38,36 @@ public final class Postulate<V> {
public Clause getClause(HelenusPropertyNode node, ColumnValuePreparer valuePreparer) { public Clause getClause(HelenusPropertyNode node, ColumnValuePreparer valuePreparer) {
switch (operator) { switch (operator) {
case EQ:
return QueryBuilder.eq(
node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case EQ : case IN:
return QueryBuilder.eq(node.getColumnName(),
valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case IN :
Object[] preparedValues = new Object[values.length]; Object[] preparedValues = new Object[values.length];
for (int i = 0; i != values.length; ++i) { for (int i = 0; i != values.length; ++i) {
preparedValues[i] = valuePreparer.prepareColumnValue(values[i], node.getProperty()); preparedValues[i] = valuePreparer.prepareColumnValue(values[i], node.getProperty());
} }
return QueryBuilder.in(node.getColumnName(), preparedValues); return QueryBuilder.in(node.getColumnName(), preparedValues);
case LT : case LT:
return QueryBuilder.lt(node.getColumnName(), return QueryBuilder.lt(
valuePreparer.prepareColumnValue(values[0], node.getProperty())); node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case LTE : case LTE:
return QueryBuilder.lte(node.getColumnName(), return QueryBuilder.lte(
valuePreparer.prepareColumnValue(values[0], node.getProperty())); node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case GT : case GT:
return QueryBuilder.gt(node.getColumnName(), return QueryBuilder.gt(
valuePreparer.prepareColumnValue(values[0], node.getProperty())); node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
case GTE : case GTE:
return QueryBuilder.gte(node.getColumnName(), return QueryBuilder.gte(
valuePreparer.prepareColumnValue(values[0], node.getProperty())); node.getColumnName(), valuePreparer.prepareColumnValue(values[0], node.getProperty()));
default : default:
throw new HelenusMappingException("unknown filter operation " + operator); throw new HelenusMappingException("unknown filter operation " + operator);
} }
} }
@Override @Override
@ -92,11 +89,8 @@ public final class Postulate<V> {
b.append(String.valueOf(values[i])); b.append(String.valueOf(values[i]));
} }
return b.append(')').toString(); return b.append(')').toString();
} }
return operator.getName() + values[0]; return operator.getName() + values[0];
} }
} }

View file

@ -20,5 +20,4 @@ import net.helenus.mapping.HelenusProperty;
public interface PropertyValueValidator { public interface PropertyValueValidator {
void validate(HelenusProperty prop, Object value); void validate(HelenusProperty prop, Object value);
} }

View file

@ -15,24 +15,17 @@
*/ */
package net.helenus.core; package net.helenus.core;
import com.datastax.driver.core.querybuilder.BindMarker;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Objects; import java.util.Objects;
import com.datastax.driver.core.querybuilder.BindMarker;
import com.datastax.driver.core.querybuilder.QueryBuilder;
import net.helenus.mapping.OrderingDirection; import net.helenus.mapping.OrderingDirection;
/** /** Sugar methods for the queries */
* Sugar methods for the queries
*
*/
public final class Query { public final class Query {
private Query() { private Query() {}
}
public static BindMarker marker() { public static BindMarker marker() {
return QueryBuilder.bindMarker(); return QueryBuilder.bindMarker();
@ -83,7 +76,6 @@ public final class Query {
public V get() { public V get() {
return listGetter.get().get(index); return listGetter.get().get(index);
} }
}; };
} }
@ -97,8 +89,6 @@ public final class Query {
public V get() { public V get() {
return mapGetter.get().get(k); return mapGetter.get().get(k);
} }
}; };
} }
} }

View file

@ -15,14 +15,12 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.*;
import java.util.stream.Collectors;
import com.datastax.driver.core.*; import com.datastax.driver.core.*;
import com.datastax.driver.core.IndexMetadata; import com.datastax.driver.core.IndexMetadata;
import com.datastax.driver.core.schemabuilder.*; import com.datastax.driver.core.schemabuilder.*;
import com.datastax.driver.core.schemabuilder.Create.Options; import com.datastax.driver.core.schemabuilder.Create.Options;
import java.util.*;
import java.util.stream.Collectors;
import net.helenus.mapping.*; import net.helenus.mapping.*;
import net.helenus.mapping.ColumnType; import net.helenus.mapping.ColumnType;
import net.helenus.mapping.type.OptionalColumnMetadata; import net.helenus.mapping.type.OptionalColumnMetadata;
@ -31,8 +29,7 @@ import net.helenus.support.HelenusMappingException;
public final class SchemaUtil { public final class SchemaUtil {
private SchemaUtil() { private SchemaUtil() {}
}
public static RegularStatement use(String keyspace, boolean forceQuote) { public static RegularStatement use(String keyspace, boolean forceQuote) {
if (forceQuote) { if (forceQuote) {
@ -55,23 +52,31 @@ public final class SchemaUtil {
ColumnType columnType = prop.getColumnType(); ColumnType columnType = prop.getColumnType();
if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) { if (columnType == ColumnType.PARTITION_KEY || columnType == ColumnType.CLUSTERING_COLUMN) {
throw new HelenusMappingException("primary key columns are not supported in UserDefinedType for " throw new HelenusMappingException(
+ prop.getPropertyName() + " in entity " + entity); "primary key columns are not supported in UserDefinedType for "
+ prop.getPropertyName()
+ " in entity "
+ entity);
} }
try { try {
prop.getDataType().addColumn(create, prop.getColumnName()); prop.getDataType().addColumn(create, prop.getColumnName());
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
throw new HelenusMappingException("invalid column name '" + prop.getColumnName() + "' in entity '" throw new HelenusMappingException(
+ entity.getName().getName() + "'", e); "invalid column name '"
+ prop.getColumnName()
+ "' in entity '"
+ entity.getName().getName()
+ "'",
e);
} }
} }
return create; return create;
} }
public static List<SchemaStatement> alterUserType(UserType userType, HelenusEntity entity, public static List<SchemaStatement> alterUserType(
boolean dropUnusedColumns) { UserType userType, HelenusEntity entity, boolean dropUnusedColumns) {
if (entity.getType() != HelenusEntityType.UDT) { if (entity.getType() != HelenusEntityType.UDT) {
throw new HelenusMappingException("expected UDT entity " + entity); throw new HelenusMappingException("expected UDT entity " + entity);
@ -80,13 +85,13 @@ public final class SchemaUtil {
List<SchemaStatement> result = new ArrayList<SchemaStatement>(); List<SchemaStatement> result = new ArrayList<SchemaStatement>();
/** /**
* TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType * TODO: In future replace SchemaBuilder.alterTable by SchemaBuilder.alterType when it will
* when it will exist * exist
*/ */
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql()); Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet(); final Set<String> visitedColumns =
dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
for (HelenusProperty prop : entity.getOrderedProperties()) { for (HelenusProperty prop : entity.getOrderedProperties()) {
@ -103,13 +108,13 @@ public final class SchemaUtil {
} }
DataType dataType = userType.getFieldType(columnName); DataType dataType = userType.getFieldType(columnName);
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(), SchemaStatement stmt =
optional(columnName, dataType)); prop.getDataType()
.alterColumn(alter, prop.getColumnName(), optional(columnName, dataType));
if (stmt != null) { if (stmt != null) {
result.add(stmt); result.add(stmt);
} }
} }
if (dropUnusedColumns) { if (dropUnusedColumns) {
@ -117,13 +122,11 @@ public final class SchemaUtil {
if (!visitedColumns.contains(field)) { if (!visitedColumns.contains(field)) {
result.add(alter.dropColumn(field)); result.add(alter.dropColumn(field));
} }
} }
} }
return result; return result;
} }
public static SchemaStatement dropUserType(HelenusEntity entity) { public static SchemaStatement dropUserType(HelenusEntity entity) {
@ -133,7 +136,6 @@ public final class SchemaUtil {
} }
return SchemaBuilder.dropType(entity.getName().toCql()).ifExists(); return SchemaBuilder.dropType(entity.getName().toCql()).ifExists();
} }
public static SchemaStatement dropUserType(UserType type) { public static SchemaStatement dropUserType(UserType type) {
@ -166,20 +168,19 @@ public final class SchemaUtil {
} }
prop.getDataType().addColumn(create, prop.getColumnName()); prop.getDataType().addColumn(create, prop.getColumnName());
} }
if (!clusteringColumns.isEmpty()) { if (!clusteringColumns.isEmpty()) {
Options options = create.withOptions(); Options options = create.withOptions();
clusteringColumns clusteringColumns.forEach(
.forEach(p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering()))); p -> options.clusteringOrder(p.getColumnName().toCql(), mapDirection(p.getOrdering())));
} }
return create; return create;
} }
public static List<SchemaStatement> alterTable(TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) { public static List<SchemaStatement> alterTable(
TableMetadata tmd, HelenusEntity entity, boolean dropUnusedColumns) {
if (entity.getType() != HelenusEntityType.TABLE) { if (entity.getType() != HelenusEntityType.TABLE) {
throw new HelenusMappingException("expected table entity " + entity); throw new HelenusMappingException("expected table entity " + entity);
@ -189,7 +190,8 @@ public final class SchemaUtil {
Alter alter = SchemaBuilder.alterTable(entity.getName().toCql()); Alter alter = SchemaBuilder.alterTable(entity.getName().toCql());
final Set<String> visitedColumns = dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet(); final Set<String> visitedColumns =
dropUnusedColumns ? new HashSet<String>() : Collections.<String>emptySet();
for (HelenusProperty prop : entity.getOrderedProperties()) { for (HelenusProperty prop : entity.getOrderedProperties()) {
@ -206,13 +208,12 @@ public final class SchemaUtil {
} }
ColumnMetadata columnMetadata = tmd.getColumn(columnName); ColumnMetadata columnMetadata = tmd.getColumn(columnName);
SchemaStatement stmt = prop.getDataType().alterColumn(alter, prop.getColumnName(), SchemaStatement stmt =
optional(columnMetadata)); prop.getDataType().alterColumn(alter, prop.getColumnName(), optional(columnMetadata));
if (stmt != null) { if (stmt != null) {
result.add(stmt); result.add(stmt);
} }
} }
if (dropUnusedColumns) { if (dropUnusedColumns) {
@ -220,7 +221,6 @@ public final class SchemaUtil {
if (!visitedColumns.contains(cm.getName())) { if (!visitedColumns.contains(cm.getName())) {
result.add(alter.dropColumn(cm.getName())); result.add(alter.dropColumn(cm.getName()));
} }
} }
} }
@ -235,7 +235,6 @@ public final class SchemaUtil {
} }
return SchemaBuilder.dropTable(entity.getName().toCql()).ifExists(); return SchemaBuilder.dropTable(entity.getName().toCql()).ifExists();
} }
public static SchemaStatement createIndex(HelenusProperty prop) { public static SchemaStatement createIndex(HelenusProperty prop) {
@ -254,20 +253,28 @@ public final class SchemaUtil {
public static List<SchemaStatement> createIndexes(HelenusEntity entity) { public static List<SchemaStatement> createIndexes(HelenusEntity entity) {
return entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent()) return entity
.map(p -> SchemaUtil.createIndex(p)).collect(Collectors.toList()); .getOrderedProperties()
.stream()
.filter(p -> p.getIndexName().isPresent())
.map(p -> SchemaUtil.createIndex(p))
.collect(Collectors.toList());
} }
public static List<SchemaStatement> alterIndexes(TableMetadata tmd, HelenusEntity entity, public static List<SchemaStatement> alterIndexes(
boolean dropUnusedIndexes) { TableMetadata tmd, HelenusEntity entity, boolean dropUnusedIndexes) {
List<SchemaStatement> list = new ArrayList<SchemaStatement>(); List<SchemaStatement> list = new ArrayList<SchemaStatement>();
final Set<String> visitedColumns = dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet(); final Set<String> visitedColumns =
dropUnusedIndexes ? new HashSet<String>() : Collections.<String>emptySet();
entity.getOrderedProperties().stream().filter(p -> p.getIndexName().isPresent()).forEach(p -> {
entity
.getOrderedProperties()
.stream()
.filter(p -> p.getIndexName().isPresent())
.forEach(
p -> {
String columnName = p.getColumnName().getName(); String columnName = p.getColumnName().getName();
if (dropUnusedIndexes) { if (dropUnusedIndexes) {
@ -284,21 +291,20 @@ public final class SchemaUtil {
} else { } else {
list.add(createIndex(p)); list.add(createIndex(p));
} }
}); });
if (dropUnusedIndexes) { if (dropUnusedIndexes) {
tmd.getColumns().stream().filter(c -> tmd.getIndex(c.getName()) != null && !visitedColumns.contains(c.getName())) tmd.getColumns()
.forEach(c -> { .stream()
.filter(c -> tmd.getIndex(c.getName()) != null && !visitedColumns.contains(c.getName()))
.forEach(
c -> {
list.add(SchemaBuilder.dropIndex(tmd.getIndex(c.getName()).getName()).ifExists()); list.add(SchemaBuilder.dropIndex(tmd.getIndex(c.getName()).getName()).ifExists());
}); });
} }
return list; return list;
} }
public static SchemaStatement dropIndex(HelenusProperty prop) { public static SchemaStatement dropIndex(HelenusProperty prop) {
@ -307,9 +313,9 @@ public final class SchemaUtil {
private static SchemaBuilder.Direction mapDirection(OrderingDirection o) { private static SchemaBuilder.Direction mapDirection(OrderingDirection o) {
switch (o) { switch (o) {
case ASC : case ASC:
return SchemaBuilder.Direction.ASC; return SchemaBuilder.Direction.ASC;
case DESC : case DESC:
return SchemaBuilder.Direction.DESC; return SchemaBuilder.Direction.DESC;
} }
throw new HelenusMappingException("unknown ordering " + o); throw new HelenusMappingException("unknown ordering " + o);
@ -319,9 +325,11 @@ public final class SchemaUtil {
throw new HelenusMappingException( throw new HelenusMappingException(
"only primitive types and Set,List,Map collections and UserDefinedTypes are allowed, unknown type for property '" "only primitive types and Set,List,Map collections and UserDefinedTypes are allowed, unknown type for property '"
+ prop.getPropertyName() + "' type is '" + prop.getJavaType() + "' in the entity " + prop.getPropertyName()
+ "' type is '"
+ prop.getJavaType()
+ "' in the entity "
+ prop.getEntity()); + prop.getEntity());
} }
private static OptionalColumnMetadata optional(final ColumnMetadata columnMetadata) { private static OptionalColumnMetadata optional(final ColumnMetadata columnMetadata) {
@ -337,7 +345,6 @@ public final class SchemaUtil {
public DataType getType() { public DataType getType() {
return columnMetadata.getType(); return columnMetadata.getType();
} }
}; };
} }
return null; return null;
@ -356,10 +363,8 @@ public final class SchemaUtil {
public DataType getType() { public DataType getType() {
return dataType; return dataType;
} }
}; };
} }
return null; return null;
} }
} }

View file

@ -15,24 +15,22 @@
*/ */
package net.helenus.core; package net.helenus.core;
import brave.Tracer;
import com.codahale.metrics.MetricRegistry; import com.codahale.metrics.MetricRegistry;
import com.datastax.driver.core.*; import com.datastax.driver.core.*;
import com.google.common.util.concurrent.MoreExecutors; import com.google.common.util.concurrent.MoreExecutors;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusEntityType;
import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.mapping.value.ColumnValueProvider;
import net.helenus.support.HelenusException;
import net.helenus.support.PackageUtil;
import brave.Tracer;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream; import java.io.PrintStream;
import java.util.*; import java.util.*;
import java.util.concurrent.Executor; import java.util.concurrent.Executor;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.function.Consumer; import java.util.function.Consumer;
import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusEntityType;
import net.helenus.mapping.value.ColumnValuePreparer;
import net.helenus.mapping.value.ColumnValueProvider;
import net.helenus.support.HelenusException;
import net.helenus.support.PackageUtil;
public final class SessionInitializer extends AbstractSessionOperations { public final class SessionInitializer extends AbstractSessionOperations {
@ -164,10 +162,11 @@ public final class SessionInitializer extends AbstractSessionOperations {
public SessionInitializer addPackage(String packageName) { public SessionInitializer addPackage(String packageName) {
try { try {
PackageUtil.getClasses(packageName).stream().filter(c -> c.isInterface() && !c.isAnnotation()) PackageUtil.getClasses(packageName)
.stream()
.filter(c -> c.isInterface() && !c.isAnnotation())
.forEach(initList::add); .forEach(initList::add);
} } catch (IOException | ClassNotFoundException e) {
catch (IOException | ClassNotFoundException e) {
throw new HelenusException("fail to add package " + packageName, e); throw new HelenusException("fail to add package " + packageName, e);
} }
return this; return this;
@ -227,8 +226,18 @@ public final class SessionInitializer extends AbstractSessionOperations {
public synchronized HelenusSession get() { public synchronized HelenusSession get() {
initialize(); initialize();
return new HelenusSession(session, usingKeyspace, registry, showCql, printStream, sessionRepository, executor, return new HelenusSession(
autoDdl == AutoDdl.CREATE_DROP, consistencyLevel, metricRegistry, zipkinTracer); session,
usingKeyspace,
registry,
showCql,
printStream,
sessionRepository,
executor,
autoDdl == AutoDdl.CREATE_DROP,
consistencyLevel,
metricRegistry,
zipkinTracer);
} }
private void initialize() { private void initialize() {
@ -241,42 +250,49 @@ public final class SessionInitializer extends AbstractSessionOperations {
UserTypeOperations userTypeOps = new UserTypeOperations(this, dropUnusedColumns); UserTypeOperations userTypeOps = new UserTypeOperations(this, dropUnusedColumns);
switch (autoDdl) { switch (autoDdl) {
case CREATE_DROP: case CREATE_DROP:
// Drop tables first, otherwise a `DROP TYPE ...` will fail as the type is still referenced // Drop tables first, otherwise a `DROP TYPE ...` will fail as the type is still referenced
// by a table. // by a table.
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE) sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.dropTable(e)); .forEach(e -> tableOps.dropTable(e));
eachUserTypeInReverseOrder(userTypeOps, e -> userTypeOps.dropUserType(e)); eachUserTypeInReverseOrder(userTypeOps, e -> userTypeOps.dropUserType(e));
// FALLTHRU to CREATE case (read: the absence of a `break;` statement here is intentional!) // FALLTHRU to CREATE case (read: the absence of a `break;` statement here is intentional!)
case CREATE: case CREATE:
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.createUserType(e)); eachUserTypeInOrder(userTypeOps, e -> userTypeOps.createUserType(e));
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE) sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.createTable(e)); .forEach(e -> tableOps.createTable(e));
break; break;
case VALIDATE: case VALIDATE:
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.validateUserType(getUserType(e), e)); eachUserTypeInOrder(userTypeOps, e -> userTypeOps.validateUserType(getUserType(e), e));
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE) sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.validateTable(getTableMetadata(e), e)); .forEach(e -> tableOps.validateTable(getTableMetadata(e), e));
break; break;
case UPDATE: case UPDATE:
eachUserTypeInOrder(userTypeOps, e -> userTypeOps.updateUserType(getUserType(e), e)); eachUserTypeInOrder(userTypeOps, e -> userTypeOps.updateUserType(getUserType(e), e));
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.TABLE) sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.TABLE)
.forEach(e -> tableOps.updateTable(getTableMetadata(e), e)); .forEach(e -> tableOps.updateTable(getTableMetadata(e), e));
break; break;
} }
KeyspaceMetadata km = getKeyspaceMetadata(); KeyspaceMetadata km = getKeyspaceMetadata();
@ -284,28 +300,36 @@ public final class SessionInitializer extends AbstractSessionOperations {
for (UserType userType : km.getUserTypes()) { for (UserType userType : km.getUserTypes()) {
sessionRepository.addUserType(userType.getTypeName(), userType); sessionRepository.addUserType(userType.getTypeName(), userType);
} }
} }
private void eachUserTypeInOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) { private void eachUserTypeInOrder(
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>(); Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>();
Set<HelenusEntity> stack = new HashSet<HelenusEntity>(); Set<HelenusEntity> stack = new HashSet<HelenusEntity>();
sessionRepository.entities().stream().filter(e -> e.getType() == HelenusEntityType.UDT).forEach(e -> { sessionRepository
.entities()
.stream()
.filter(e -> e.getType() == HelenusEntityType.UDT)
.forEach(
e -> {
stack.clear(); stack.clear();
eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action); eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action);
}); });
} }
private void eachUserTypeInReverseOrder(UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) { private void eachUserTypeInReverseOrder(
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) {
ArrayDeque<HelenusEntity> deque = new ArrayDeque<>(); ArrayDeque<HelenusEntity> deque = new ArrayDeque<>();
eachUserTypeInOrder(userTypeOps, e -> deque.addFirst(e)); eachUserTypeInOrder(userTypeOps, e -> deque.addFirst(e));
deque.stream().forEach(e -> { deque
.stream()
.forEach(
e -> {
action.accept(e); action.accept(e);
}); });
/* /*
Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>(); Set<HelenusEntity> processedSet = new HashSet<HelenusEntity>();
Set<HelenusEntity> stack = new HashSet<HelenusEntity>(); Set<HelenusEntity> stack = new HashSet<HelenusEntity>();
@ -317,11 +341,15 @@ public final class SessionInitializer extends AbstractSessionOperations {
stack.clear(); stack.clear();
eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action); eachUserTypeInRecursion(e, processedSet, stack, userTypeOps, action);
}); });
*/ */
} }
private void eachUserTypeInRecursion(HelenusEntity e, Set<HelenusEntity> processedSet, Set<HelenusEntity> stack, private void eachUserTypeInRecursion(
UserTypeOperations userTypeOps, Consumer<? super HelenusEntity> action) { HelenusEntity e,
Set<HelenusEntity> processedSet,
Set<HelenusEntity> stack,
UserTypeOperations userTypeOps,
Consumer<? super HelenusEntity> action) {
stack.add(e); stack.add(e);
@ -338,19 +366,18 @@ public final class SessionInitializer extends AbstractSessionOperations {
action.accept(e); action.accept(e);
processedSet.add(e); processedSet.add(e);
} }
} }
private KeyspaceMetadata getKeyspaceMetadata() { private KeyspaceMetadata getKeyspaceMetadata() {
if (keyspaceMetadata == null) { if (keyspaceMetadata == null) {
keyspaceMetadata = session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase()); keyspaceMetadata =
session.getCluster().getMetadata().getKeyspace(usingKeyspace.toLowerCase());
} }
return keyspaceMetadata; return keyspaceMetadata;
} }
private TableMetadata getTableMetadata(HelenusEntity entity) { private TableMetadata getTableMetadata(HelenusEntity entity) {
return getKeyspaceMetadata().getTable(entity.getName().getName()); return getKeyspaceMetadata().getTable(entity.getName().getName());
} }
private UserType getUserType(HelenusEntity entity) { private UserType getUserType(HelenusEntity entity) {

View file

@ -15,11 +15,9 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.Collection;
import com.datastax.driver.core.UserType; import com.datastax.driver.core.UserType;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import java.util.Collection;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
public final class SessionRepository { public final class SessionRepository {
@ -32,7 +30,8 @@ public final class SessionRepository {
userTypeMap = ImmutableMap.<String, UserType>builder().putAll(builder.getUserTypeMap()).build(); userTypeMap = ImmutableMap.<String, UserType>builder().putAll(builder.getUserTypeMap()).build();
entityMap = ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build(); entityMap =
ImmutableMap.<Class<?>, HelenusEntity>builder().putAll(builder.getEntityMap()).build();
} }
public UserType findUserType(String name) { public UserType findUserType(String name) {
@ -42,5 +41,4 @@ public final class SessionRepository {
public Collection<HelenusEntity> entities() { public Collection<HelenusEntity> entities() {
return entityMap.values(); return entityMap.values();
} }
} }

View file

@ -15,17 +15,15 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import com.datastax.driver.core.Session; import com.datastax.driver.core.Session;
import com.datastax.driver.core.UDTValue; import com.datastax.driver.core.UDTValue;
import com.datastax.driver.core.UserType; import com.datastax.driver.core.UserType;
import com.google.common.collect.HashMultimap; import com.google.common.collect.HashMultimap;
import com.google.common.collect.Multimap; import com.google.common.collect.Multimap;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusEntityType; import net.helenus.mapping.HelenusEntityType;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
@ -35,7 +33,8 @@ import net.helenus.support.HelenusMappingException;
public final class SessionRepositoryBuilder { public final class SessionRepositoryBuilder {
private static final Optional<HelenusEntityType> OPTIONAL_UDT = Optional.of(HelenusEntityType.UDT); private static final Optional<HelenusEntityType> OPTIONAL_UDT =
Optional.of(HelenusEntityType.UDT);
private final Map<Class<?>, HelenusEntity> entityMap = new HashMap<Class<?>, HelenusEntity>(); private final Map<Class<?>, HelenusEntity> entityMap = new HashMap<Class<?>, HelenusEntity>();
@ -45,7 +44,6 @@ public final class SessionRepositoryBuilder {
private final Session session; private final Session session;
SessionRepositoryBuilder(Session session) { SessionRepositoryBuilder(Session session) {
this.session = session; this.session = session;
} }
@ -85,7 +83,6 @@ public final class SessionRepositoryBuilder {
if (concurrentEntity == null) { if (concurrentEntity == null) {
addUserDefinedTypes(entity.getOrderedProperties()); addUserDefinedTypes(entity.getOrderedProperties());
} }
} }
public HelenusEntity add(Object dsl, Optional<HelenusEntityType> type) { public HelenusEntity add(Object dsl, Optional<HelenusEntityType> type) {
@ -101,7 +98,8 @@ public final class SessionRepositoryBuilder {
entity = helenusEntity; entity = helenusEntity;
if (type.isPresent() && entity.getType() != type.get()) { if (type.isPresent() && entity.getType() != type.get()) {
throw new HelenusMappingException("unexpected entity type " + entity.getType() + " for " + entity); throw new HelenusMappingException(
"unexpected entity type " + entity.getType() + " for " + entity);
} }
HelenusEntity concurrentEntity = entityMap.putIfAbsent(iface, entity); HelenusEntity concurrentEntity = entityMap.putIfAbsent(iface, entity);
@ -111,7 +109,6 @@ public final class SessionRepositoryBuilder {
} else { } else {
entity = concurrentEntity; entity = concurrentEntity;
} }
} }
return entity; return entity;
@ -140,13 +137,8 @@ public final class SessionRepositoryBuilder {
if (HelenusEntityType.UDT == prop.getEntity().getType()) { if (HelenusEntityType.UDT == prop.getEntity().getType()) {
userTypeUsesMap.put(prop.getEntity(), addedUserType); userTypeUsesMap.put(prop.getEntity(), addedUserType);
} }
} }
} }
} }
} }
} }

View file

@ -15,11 +15,9 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.List;
import com.datastax.driver.core.TableMetadata; import com.datastax.driver.core.TableMetadata;
import com.datastax.driver.core.schemabuilder.SchemaStatement; import com.datastax.driver.core.schemabuilder.SchemaStatement;
import java.util.List;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
@ -29,7 +27,8 @@ public final class TableOperations {
private final boolean dropUnusedColumns; private final boolean dropUnusedColumns;
private final boolean dropUnusedIndexes; private final boolean dropUnusedIndexes;
public TableOperations(AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) { public TableOperations(
AbstractSessionOperations sessionOps, boolean dropUnusedColumns, boolean dropUnusedIndexes) {
this.sessionOps = sessionOps; this.sessionOps = sessionOps;
this.dropUnusedColumns = dropUnusedColumns; this.dropUnusedColumns = dropUnusedColumns;
this.dropUnusedIndexes = dropUnusedIndexes; this.dropUnusedIndexes = dropUnusedIndexes;
@ -40,13 +39,11 @@ public final class TableOperations {
sessionOps.execute(SchemaUtil.createTable(entity), true); sessionOps.execute(SchemaUtil.createTable(entity), true);
executeBatch(SchemaUtil.createIndexes(entity)); executeBatch(SchemaUtil.createIndexes(entity));
} }
public void dropTable(HelenusEntity entity) { public void dropTable(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.dropTable(entity), true); sessionOps.execute(SchemaUtil.dropTable(entity), true);
} }
public void validateTable(TableMetadata tmd, HelenusEntity entity) { public void validateTable(TableMetadata tmd, HelenusEntity entity) {
@ -62,7 +59,10 @@ public final class TableOperations {
if (!list.isEmpty()) { if (!list.isEmpty()) {
throw new HelenusException( throw new HelenusException(
"schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list); "schema changed for entity "
+ entity.getMappingInterface()
+ ", apply this command: "
+ list);
} }
} }
@ -79,10 +79,9 @@ public final class TableOperations {
private void executeBatch(List<SchemaStatement> list) { private void executeBatch(List<SchemaStatement> list) {
list.forEach(s -> { list.forEach(
s -> {
sessionOps.execute(s, true); sessionOps.execute(s, true);
}); });
} }
} }

View file

@ -1,13 +1,10 @@
package net.helenus.core; package net.helenus.core;
import com.diffplug.common.base.Errors; import com.diffplug.common.base.Errors;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.function.Function; import java.util.function.Function;
/** /** Encapsulates the concept of a "transaction" as a unit-of-work. */
* Encapsulates the concept of a "transaction" as a unit-of-work.
*/
public class UnitOfWork { public class UnitOfWork {
private final HelenusSession session; private final HelenusSession session;
@ -19,8 +16,8 @@ public class UnitOfWork {
} }
/** /**
* Marks the beginning of a transactional section of work. Will write a record * Marks the beginning of a transactional section of work. Will write a record to the shared
* to the shared write-ahead log. * write-ahead log.
* *
* @return the handle used to commit or abort the work. * @return the handle used to commit or abort the work.
*/ */
@ -34,8 +31,7 @@ public class UnitOfWork {
} }
/** /**
* Checks to see if the work performed between calling begin and now can be * Checks to see if the work performed between calling begin and now can be committed or not.
* committed or not.
* *
* @return a function from which to chain work that only happens when commit is successful * @return a function from which to chain work that only happens when commit is successful
* @throws ConflictingUnitOfWorkException when the work overlaps with other concurrent writers. * @throws ConflictingUnitOfWorkException when the work overlaps with other concurrent writers.
@ -51,9 +47,7 @@ public class UnitOfWork {
return Function.<Void>identity(); return Function.<Void>identity();
} }
/** /** Explicitly discard the work and mark it as as such in the log. */
* Explicitly discard the work and mark it as as such in the log.
*/
public void abort() { public void abort() {
// log.record(txn::abort) // log.record(txn::abort)
// cache.invalidateSince(txn::start time) // cache.invalidateSince(txn::start time)
@ -62,6 +56,4 @@ public class UnitOfWork {
public String describeConflicts() { public String describeConflicts() {
return "it's complex..."; return "it's complex...";
} }
} }

View file

@ -15,11 +15,9 @@
*/ */
package net.helenus.core; package net.helenus.core;
import java.util.List;
import com.datastax.driver.core.UserType; import com.datastax.driver.core.UserType;
import com.datastax.driver.core.schemabuilder.SchemaStatement; import com.datastax.driver.core.schemabuilder.SchemaStatement;
import java.util.List;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
@ -36,13 +34,11 @@ public final class UserTypeOperations {
public void createUserType(HelenusEntity entity) { public void createUserType(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.createUserType(entity), true); sessionOps.execute(SchemaUtil.createUserType(entity), true);
} }
public void dropUserType(HelenusEntity entity) { public void dropUserType(HelenusEntity entity) {
sessionOps.execute(SchemaUtil.dropUserType(entity), true); sessionOps.execute(SchemaUtil.dropUserType(entity), true);
} }
public void validateUserType(UserType userType, HelenusEntity entity) { public void validateUserType(UserType userType, HelenusEntity entity) {
@ -56,9 +52,11 @@ public final class UserTypeOperations {
if (!list.isEmpty()) { if (!list.isEmpty()) {
throw new HelenusException( throw new HelenusException(
"schema changed for entity " + entity.getMappingInterface() + ", apply this command: " + list); "schema changed for entity "
+ entity.getMappingInterface()
+ ", apply this command: "
+ list);
} }
} }
public void updateUserType(UserType userType, HelenusEntity entity) { public void updateUserType(UserType userType, HelenusEntity entity) {
@ -69,15 +67,13 @@ public final class UserTypeOperations {
} }
executeBatch(SchemaUtil.alterUserType(userType, entity, dropUnusedColumns)); executeBatch(SchemaUtil.alterUserType(userType, entity, dropUnusedColumns));
} }
private void executeBatch(List<SchemaStatement> list) { private void executeBatch(List<SchemaStatement> list) {
list.forEach(s -> { list.forEach(
s -> {
sessionOps.execute(s, true); sessionOps.execute(s, true);
}); });
} }
} }

View file

@ -7,5 +7,4 @@ import java.lang.annotation.Target;
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.TYPE) @Target(ElementType.TYPE)
public @interface Cacheable { public @interface Cacheable {}
}

View file

@ -4,7 +4,6 @@ import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target; import java.lang.annotation.Target;
import net.helenus.core.ConflictingUnitOfWorkException; import net.helenus.core.ConflictingUnitOfWorkException;
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)

View file

@ -2,7 +2,7 @@ package net.helenus.core.aspect;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Arrays; import java.util.Arrays;
import net.helenus.core.annotation.Retry;
import org.apache.commons.lang3.exception.ExceptionUtils; import org.apache.commons.lang3.exception.ExceptionUtils;
import org.aspectj.lang.ProceedingJoinPoint; import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around; import org.aspectj.lang.annotation.Around;
@ -13,8 +13,6 @@ import org.slf4j.LoggerFactory;
import org.springframework.core.annotation.AnnotationUtils; import org.springframework.core.annotation.AnnotationUtils;
import org.springframework.util.Assert; import org.springframework.util.Assert;
import net.helenus.core.annotation.Retry;
@Aspect @Aspect
public class RetryConcurrentUnitOfWorkAspect { public class RetryConcurrentUnitOfWorkAspect {
@ -39,8 +37,8 @@ public class RetryConcurrentUnitOfWorkAspect {
return tryProceeding(pjp, times, retryOn); return tryProceeding(pjp, times, retryOn);
} }
private Object tryProceeding(ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) private Object tryProceeding(
throws Throwable { ProceedingJoinPoint pjp, int times, Class<? extends Throwable>[] retryOn) throws Throwable {
try { try {
return proceed(pjp); return proceed(pjp);
} catch (Throwable throwable) { } catch (Throwable throwable) {

View file

@ -17,12 +17,10 @@ package net.helenus.core.operation;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import net.helenus.core.*; import net.helenus.core.*;
public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>> public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperation<E, O>>
extends extends AbstractOperation<E, O> {
AbstractOperation<E, O> {
protected List<Filter<?>> filters = null; protected List<Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null; protected List<Filter<?>> ifFilters = null;
@ -107,5 +105,4 @@ public abstract class AbstractFilterOperation<E, O extends AbstractFilterOperati
} }
ifFilters.add(filter); ifFilters.add(filter);
} }
} }

View file

@ -17,12 +17,11 @@ package net.helenus.core.operation;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import net.helenus.core.*; import net.helenus.core.*;
public abstract class AbstractFilterOptionalOperation<E, O extends AbstractFilterOptionalOperation<E, O>> public abstract class AbstractFilterOptionalOperation<
extends E, O extends AbstractFilterOptionalOperation<E, O>>
AbstractOptionalOperation<E, O> { extends AbstractOptionalOperation<E, O> {
protected List<Filter<?>> filters = null; protected List<Filter<?>> filters = null;
protected List<Filter<?>> ifFilters = null; protected List<Filter<?>> ifFilters = null;
@ -107,5 +106,4 @@ public abstract class AbstractFilterOptionalOperation<E, O extends AbstractFilte
} }
ifFilters.add(filter); ifFilters.add(filter);
} }
} }

View file

@ -17,10 +17,10 @@ package net.helenus.core.operation;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import net.helenus.core.*; import net.helenus.core.*;
public abstract class AbstractFilterStreamOperation<E, O extends AbstractFilterStreamOperation<E, O>> public abstract class AbstractFilterStreamOperation<
E, O extends AbstractFilterStreamOperation<E, O>>
extends AbstractStreamOperation<E, O> { extends AbstractStreamOperation<E, O> {
protected List<Filter<?>> filters = null; protected List<Filter<?>> filters = null;
@ -106,5 +106,4 @@ public abstract class AbstractFilterStreamOperation<E, O extends AbstractFilterS
} }
ifFilters.add(filter); ifFilters.add(filter);
} }
} }

View file

@ -16,12 +16,11 @@
package net.helenus.core.operation; package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import java.util.concurrent.CompletableFuture;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import java.util.concurrent.CompletableFuture; public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>>
extends AbstractStatementOperation<E, O> {
public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> extends AbstractStatementOperation<E, O> {
public abstract E transform(ResultSet resultSet); public abstract E transform(ResultSet resultSet);
@ -42,7 +41,8 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> ex
} }
public E sync() { public E sync() {
ResultSet resultSet = sessionOps.executeAsync(options(buildStatement()), showValues).getUninterruptibly(); ResultSet resultSet =
sessionOps.executeAsync(options(buildStatement()), showValues).getUninterruptibly();
E result = transform(resultSet); E result = transform(resultSet);
if (cacheable()) { if (cacheable()) {
sessionOps.cache(getCacheKey(), result); sessionOps.cache(getCacheKey(), result);
@ -50,6 +50,7 @@ public abstract class AbstractOperation<E, O extends AbstractOperation<E, O>> ex
return result; return result;
} }
public CompletableFuture<E> async() { return CompletableFuture.supplyAsync(this::sync); } public CompletableFuture<E> async() {
return CompletableFuture.supplyAsync(this::sync);
}
} }

View file

@ -23,9 +23,8 @@ import com.datastax.driver.core.ResultSetFuture;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import net.helenus.core.AbstractSessionOperations;
import java.util.Optional; import java.util.Optional;
import net.helenus.core.AbstractSessionOperations;
public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOperation<E, O>> public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOperation<E, O>>
extends AbstractStatementOperation<E, O> { extends AbstractStatementOperation<E, O> {
@ -42,7 +41,8 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
public ListenableFuture<PreparedOptionalOperation<E>> prepareAsync() { public ListenableFuture<PreparedOptionalOperation<E>> prepareAsync() {
final O _this = (O) this; final O _this = (O) this;
return Futures.transform(prepareStatementAsync(), return Futures.transform(
prepareStatementAsync(),
new Function<PreparedStatement, PreparedOptionalOperation<E>>() { new Function<PreparedStatement, PreparedOptionalOperation<E>>() {
@Override @Override
public PreparedOptionalOperation<E> apply(PreparedStatement preparedStatement) { public PreparedOptionalOperation<E> apply(PreparedStatement preparedStatement) {
@ -53,13 +53,15 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
public Optional<E> sync() { public Optional<E> sync() {
Tracer tracer = this.sessionOps.getZipkinTracer(); Tracer tracer = this.sessionOps.getZipkinTracer();
final Span cassandraSpan = (tracer != null && traceContext != null) ? tracer.newChild(traceContext) : null; final Span cassandraSpan =
(tracer != null && traceContext != null) ? tracer.newChild(traceContext) : null;
if (cassandraSpan != null) { if (cassandraSpan != null) {
cassandraSpan.name("cassandra"); cassandraSpan.name("cassandra");
cassandraSpan.start(); cassandraSpan.start();
} }
ResultSet resultSet = sessionOps.executeAsync(options(buildStatement()), showValues).getUninterruptibly(); ResultSet resultSet =
sessionOps.executeAsync(options(buildStatement()), showValues).getUninterruptibly();
Optional<E> result = transform(resultSet); Optional<E> result = transform(resultSet);
if (cassandraSpan != null) { if (cassandraSpan != null) {
@ -71,14 +73,18 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
public ListenableFuture<Optional<E>> async() { public ListenableFuture<Optional<E>> async() {
final Tracer tracer = this.sessionOps.getZipkinTracer(); final Tracer tracer = this.sessionOps.getZipkinTracer();
final Span cassandraSpan = (tracer != null && traceContext != null) ? tracer.newChild(traceContext) : null; final Span cassandraSpan =
(tracer != null && traceContext != null) ? tracer.newChild(traceContext) : null;
if (cassandraSpan != null) { if (cassandraSpan != null) {
cassandraSpan.name("cassandra"); cassandraSpan.name("cassandra");
cassandraSpan.start(); cassandraSpan.start();
} }
ResultSetFuture resultSetFuture = sessionOps.executeAsync(options(buildStatement()), showValues); ResultSetFuture resultSetFuture =
ListenableFuture<Optional<E>> future = Futures.transform(resultSetFuture, sessionOps.executeAsync(options(buildStatement()), showValues);
ListenableFuture<Optional<E>> future =
Futures.transform(
resultSetFuture,
new Function<ResultSet, Optional<E>>() { new Function<ResultSet, Optional<E>>() {
@Override @Override
public Optional<E> apply(ResultSet resultSet) { public Optional<E> apply(ResultSet resultSet) {
@ -88,9 +94,9 @@ public abstract class AbstractOptionalOperation<E, O extends AbstractOptionalOpe
} }
return result; return result;
} }
}, sessionOps.getExecutor()); },
sessionOps.getExecutor());
return future; return future;
} }
} }

View file

@ -17,9 +17,6 @@ package net.helenus.core.operation;
import brave.Tracer; import brave.Tracer;
import brave.propagation.TraceContext; import brave.propagation.TraceContext;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.datastax.driver.core.ConsistencyLevel; import com.datastax.driver.core.ConsistencyLevel;
import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.PreparedStatement;
import com.datastax.driver.core.RegularStatement; import com.datastax.driver.core.RegularStatement;
@ -30,10 +27,10 @@ import com.datastax.driver.core.policies.FallthroughRetryPolicy;
import com.datastax.driver.core.policies.RetryPolicy; import com.datastax.driver.core.policies.RetryPolicy;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>> { public abstract class AbstractStatementOperation<E, O extends AbstractStatementOperation<E, O>> {
@ -264,10 +261,8 @@ public abstract class AbstractStatementOperation<E, O extends AbstractStatementO
RegularStatement regularStatement = (RegularStatement) statement; RegularStatement regularStatement = (RegularStatement) statement;
return sessionOps.prepareAsync(regularStatement); return sessionOps.prepareAsync(regularStatement);
} }
throw new HelenusException("only RegularStatements can be prepared"); throw new HelenusException("only RegularStatements can be prepared");
} }
} }

View file

@ -15,8 +15,6 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.stream.Stream;
import brave.Span; import brave.Span;
import brave.Tracer; import brave.Tracer;
import com.datastax.driver.core.PreparedStatement; import com.datastax.driver.core.PreparedStatement;
@ -25,7 +23,7 @@ import com.datastax.driver.core.ResultSetFuture;
import com.google.common.base.Function; import com.google.common.base.Function;
import com.google.common.util.concurrent.Futures; import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture; import com.google.common.util.concurrent.ListenableFuture;
import java.util.stream.Stream;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperation<E, O>> public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperation<E, O>>
@ -43,7 +41,8 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
public ListenableFuture<PreparedStreamOperation<E>> prepareAsync() { public ListenableFuture<PreparedStreamOperation<E>> prepareAsync() {
final O _this = (O) this; final O _this = (O) this;
return Futures.transform(prepareStatementAsync(), return Futures.transform(
prepareStatementAsync(),
new Function<PreparedStatement, PreparedStreamOperation<E>>() { new Function<PreparedStatement, PreparedStreamOperation<E>>() {
@Override @Override
public PreparedStreamOperation<E> apply(PreparedStatement preparedStatement) { public PreparedStreamOperation<E> apply(PreparedStatement preparedStatement) {
@ -54,13 +53,15 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
public Stream<E> sync() { public Stream<E> sync() {
Tracer tracer = this.sessionOps.getZipkinTracer(); Tracer tracer = this.sessionOps.getZipkinTracer();
final Span cassandraSpan = (tracer != null && traceContext != null) ? tracer.newChild(traceContext) : null; final Span cassandraSpan =
(tracer != null && traceContext != null) ? tracer.newChild(traceContext) : null;
if (cassandraSpan != null) { if (cassandraSpan != null) {
cassandraSpan.name("cassandra"); cassandraSpan.name("cassandra");
cassandraSpan.start(); cassandraSpan.start();
} }
ResultSet resultSet = sessionOps.executeAsync(options(buildStatement()), showValues).getUninterruptibly(); ResultSet resultSet =
sessionOps.executeAsync(options(buildStatement()), showValues).getUninterruptibly();
Stream<E> result = transform(resultSet); Stream<E> result = transform(resultSet);
if (cassandraSpan != null) { if (cassandraSpan != null) {
@ -72,14 +73,18 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
public ListenableFuture<Stream<E>> async() { public ListenableFuture<Stream<E>> async() {
Tracer tracer = this.sessionOps.getZipkinTracer(); Tracer tracer = this.sessionOps.getZipkinTracer();
final Span cassandraSpan = (tracer != null && traceContext != null) ? tracer.newChild(traceContext) : null; final Span cassandraSpan =
(tracer != null && traceContext != null) ? tracer.newChild(traceContext) : null;
if (cassandraSpan != null) { if (cassandraSpan != null) {
cassandraSpan.name("cassandra"); cassandraSpan.name("cassandra");
cassandraSpan.start(); cassandraSpan.start();
} }
ResultSetFuture resultSetFuture = sessionOps.executeAsync(options(buildStatement()), showValues); ResultSetFuture resultSetFuture =
ListenableFuture<Stream<E>> future = Futures.transform(resultSetFuture, sessionOps.executeAsync(options(buildStatement()), showValues);
ListenableFuture<Stream<E>> future =
Futures.transform(
resultSetFuture,
new Function<ResultSet, Stream<E>>() { new Function<ResultSet, Stream<E>>() {
@Override @Override
public Stream<E> apply(ResultSet resultSet) { public Stream<E> apply(ResultSet resultSet) {
@ -89,8 +94,8 @@ public abstract class AbstractStreamOperation<E, O extends AbstractStreamOperati
} }
return result; return result;
} }
}, sessionOps.getExecutor()); },
sessionOps.getExecutor());
return future; return future;
} }
} }

View file

@ -39,5 +39,4 @@ public final class BoundOperation<E> extends AbstractOperation<E, BoundOperation
public Statement buildStatement() { public Statement buildStatement() {
return boundStatement; return boundStatement;
} }
} }

View file

@ -15,18 +15,19 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.Optional;
import com.datastax.driver.core.BoundStatement; import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Statement; import com.datastax.driver.core.Statement;
import java.util.Optional;
public final class BoundOptionalOperation<E> extends AbstractOptionalOperation<E, BoundOptionalOperation<E>> { public final class BoundOptionalOperation<E>
extends AbstractOptionalOperation<E, BoundOptionalOperation<E>> {
private final BoundStatement boundStatement; private final BoundStatement boundStatement;
private final AbstractOptionalOperation<E, ?> delegate; private final AbstractOptionalOperation<E, ?> delegate;
public BoundOptionalOperation(BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) { public BoundOptionalOperation(
BoundStatement boundStatement, AbstractOptionalOperation<E, ?> operation) {
super(operation.sessionOps); super(operation.sessionOps);
this.boundStatement = boundStatement; this.boundStatement = boundStatement;
this.delegate = operation; this.delegate = operation;
@ -41,5 +42,4 @@ public final class BoundOptionalOperation<E> extends AbstractOptionalOperation<E
public Statement buildStatement() { public Statement buildStatement() {
return boundStatement; return boundStatement;
} }
} }

View file

@ -15,18 +15,19 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.stream.Stream;
import com.datastax.driver.core.BoundStatement; import com.datastax.driver.core.BoundStatement;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Statement; import com.datastax.driver.core.Statement;
import java.util.stream.Stream;
public final class BoundStreamOperation<E> extends AbstractStreamOperation<E, BoundStreamOperation<E>> { public final class BoundStreamOperation<E>
extends AbstractStreamOperation<E, BoundStreamOperation<E>> {
private final BoundStatement boundStatement; private final BoundStatement boundStatement;
private final AbstractStreamOperation<E, ?> delegate; private final AbstractStreamOperation<E, ?> delegate;
public BoundStreamOperation(BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) { public BoundStreamOperation(
BoundStatement boundStatement, AbstractStreamOperation<E, ?> operation) {
super(operation.sessionOps); super(operation.sessionOps);
this.boundStatement = boundStatement; this.boundStatement = boundStatement;
this.delegate = operation; this.delegate = operation;
@ -41,5 +42,4 @@ public final class BoundStreamOperation<E> extends AbstractStreamOperation<E, Bo
public Statement buildStatement() { public Statement buildStatement() {
return boundStatement; return boundStatement;
} }
} }

View file

@ -20,7 +20,6 @@ import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select; import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.querybuilder.Select.Where; import com.datastax.driver.core.querybuilder.Select.Where;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Filter; import net.helenus.core.Filter;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
@ -74,9 +73,11 @@ public final class CountOperation extends AbstractFilterOperation<Long, CountOpe
if (entity == null) { if (entity == null) {
entity = p.getEntity(); entity = p.getEntity();
} else if (entity != p.getEntity()) { } else if (entity != p.getEntity()) {
throw new HelenusMappingException("you can count columns only in single entity " throw new HelenusMappingException(
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface()); "you can count columns only in single entity "
+ entity.getMappingInterface()
+ " or "
+ p.getEntity().getMappingInterface());
} }
} }
} }

View file

@ -20,7 +20,6 @@ import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.Delete; import com.datastax.driver.core.querybuilder.Delete;
import com.datastax.driver.core.querybuilder.Delete.Where; import com.datastax.driver.core.querybuilder.Delete.Where;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Filter; import net.helenus.core.Filter;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
@ -118,8 +117,11 @@ public final class DeleteOperation extends AbstractFilterOperation<ResultSet, De
if (entity == null) { if (entity == null) {
entity = p.getEntity(); entity = p.getEntity();
} else if (entity != p.getEntity()) { } else if (entity != p.getEntity()) {
throw new HelenusMappingException("you can delete rows only in single entity " throw new HelenusMappingException(
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface()); "you can delete rows only in single entity "
+ entity.getMappingInterface()
+ " or "
+ p.getEntity().getMappingInterface());
} }
} }
} }

View file

@ -19,12 +19,13 @@ import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.Insert; import com.datastax.driver.core.querybuilder.Insert;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import java.util.*;
import java.util.function.Function;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Getter; import net.helenus.core.Getter;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.reflect.DefaultPrimitiveTypes; import net.helenus.core.reflect.DefaultPrimitiveTypes;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.core.reflect.MapExportable;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.MappingUtil; import net.helenus.mapping.MappingUtil;
@ -33,14 +34,12 @@ import net.helenus.support.Fun;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
import java.util.*;
import java.util.function.Function;
public final class InsertOperation<T> extends AbstractOperation<T, InsertOperation<T>> { public final class InsertOperation<T> extends AbstractOperation<T, InsertOperation<T>> {
private HelenusEntity entity; private HelenusEntity entity;
private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values = new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>(); private final List<Fun.Tuple2<HelenusPropertyNode, Object>> values =
new ArrayList<Fun.Tuple2<HelenusPropertyNode, Object>>();
private boolean ifNotExists; private boolean ifNotExists;
private Object pojo; private Object pojo;
@ -53,8 +52,12 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
this.ifNotExists = ifNotExists; this.ifNotExists = ifNotExists;
} }
public InsertOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity, public InsertOperation(
Object pojo, Set<String> mutations, boolean ifNotExists) { AbstractSessionOperations sessionOperations,
HelenusEntity entity,
Object pojo,
Set<String> mutations,
boolean ifNotExists) {
super(sessionOperations); super(sessionOperations);
this.entity = entity; this.entity = entity;
@ -74,11 +77,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
HelenusPropertyNode node = new HelenusPropertyNode(prop, Optional.empty()); HelenusPropertyNode node = new HelenusPropertyNode(prop, Optional.empty());
values.add(Fun.Tuple2.of(node, value)); values.add(Fun.Tuple2.of(node, value));
} }
} }
} }
} }
public InsertOperation<T> ifNotExists() { public InsertOperation<T> ifNotExists() {
@ -112,8 +112,7 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
values.forEach(t -> addPropertyNode(t._1)); values.forEach(t -> addPropertyNode(t._1));
if (values.isEmpty()) if (values.isEmpty()) return null;
return null;
if (entity == null) { if (entity == null) {
throw new HelenusMappingException("unknown entity"); throw new HelenusMappingException("unknown entity");
@ -125,7 +124,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
insert.ifNotExists(); insert.ifNotExists();
} }
values.forEach(t -> { values.forEach(
t -> {
insert.value(t._1.getColumnName(), t._2); insert.value(t._1.getColumnName(), t._2);
}); });
@ -157,7 +157,8 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
if (backingMap.containsKey(key)) { if (backingMap.containsKey(key)) {
// Some values man need to be converted (e.g. from String to Enum). This is done // Some values man need to be converted (e.g. from String to Enum). This is done
// within the BeanColumnValueProvider below. // within the BeanColumnValueProvider below.
Optional<Function<Object, Object>> converter = prop.getReadConverter(sessionOps.getSessionRepository()); Optional<Function<Object, Object>> converter =
prop.getReadConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) { if (converter.isPresent()) {
backingMap.put(key, converter.get().apply(backingMap.get(key))); backingMap.put(key, converter.get().apply(backingMap.get(key)));
} }
@ -203,8 +204,11 @@ public final class InsertOperation<T> extends AbstractOperation<T, InsertOperati
if (entity == null) { if (entity == null) {
entity = p.getEntity(); entity = p.getEntity();
} else if (entity != p.getEntity()) { } else if (entity != p.getEntity()) {
throw new HelenusMappingException("you can insert only single entity " + entity.getMappingInterface() throw new HelenusMappingException(
+ " or " + p.getEntity().getMappingInterface()); "you can insert only single entity "
+ entity.getMappingInterface()
+ " or "
+ p.getEntity().getMappingInterface());
} }
} }
} }

View file

@ -43,5 +43,4 @@ public final class PreparedOperation<E> {
public String toString() { public String toString() {
return preparedStatement.getQueryString(); return preparedStatement.getQueryString();
} }
} }

View file

@ -23,7 +23,8 @@ public final class PreparedOptionalOperation<E> {
private final PreparedStatement preparedStatement; private final PreparedStatement preparedStatement;
private final AbstractOptionalOperation<E, ?> operation; private final AbstractOptionalOperation<E, ?> operation;
public PreparedOptionalOperation(PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) { public PreparedOptionalOperation(
PreparedStatement statement, AbstractOptionalOperation<E, ?> operation) {
this.preparedStatement = statement; this.preparedStatement = statement;
this.operation = operation; this.operation = operation;
} }
@ -43,5 +44,4 @@ public final class PreparedOptionalOperation<E> {
public String toString() { public String toString() {
return preparedStatement.getQueryString(); return preparedStatement.getQueryString();
} }
} }

View file

@ -23,7 +23,8 @@ public final class PreparedStreamOperation<E> {
private final PreparedStatement preparedStatement; private final PreparedStatement preparedStatement;
private final AbstractStreamOperation<E, ?> operation; private final AbstractStreamOperation<E, ?> operation;
public PreparedStreamOperation(PreparedStatement statement, AbstractStreamOperation<E, ?> operation) { public PreparedStreamOperation(
PreparedStatement statement, AbstractStreamOperation<E, ?> operation) {
this.preparedStatement = statement; this.preparedStatement = statement;
this.operation = operation; this.operation = operation;
} }
@ -43,5 +44,4 @@ public final class PreparedStreamOperation<E> {
public String toString() { public String toString() {
return preparedStatement.getQueryString(); return preparedStatement.getQueryString();
} }
} }

View file

@ -15,13 +15,13 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import com.datastax.driver.core.ResultSet; public final class SelectFirstOperation<E>
import com.datastax.driver.core.querybuilder.BuiltStatement; extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> {
public final class SelectFirstOperation<E> extends AbstractFilterOptionalOperation<E, SelectFirstOperation<E>> {
private final SelectOperation<E> src; private final SelectOperation<E> src;
@ -46,5 +46,4 @@ public final class SelectFirstOperation<E> extends AbstractFilterOptionalOperati
public Optional<E> transform(ResultSet resultSet) { public Optional<E> transform(ResultSet resultSet) {
return src.transform(resultSet).findFirst(); return src.transform(resultSet).findFirst();
} }
} }

View file

@ -15,15 +15,13 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
public final class SelectFirstTransformingOperation<R, E> public final class SelectFirstTransformingOperation<R, E>
extends extends AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
AbstractFilterOptionalOperation<R, SelectFirstTransformingOperation<R, E>> {
private final SelectOperation<E> src; private final SelectOperation<E> src;
private final Function<E, R> fn; private final Function<E, R> fn;
@ -46,5 +44,4 @@ public final class SelectFirstTransformingOperation<R, E>
public Optional<R> transform(ResultSet resultSet) { public Optional<R> transform(ResultSet resultSet) {
return src.transform(resultSet).findFirst().map(fn); return src.transform(resultSet).findFirst().map(fn);
} }
} }

View file

@ -15,11 +15,6 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.Row; import com.datastax.driver.core.Row;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
@ -28,7 +23,10 @@ import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Select; import com.datastax.driver.core.querybuilder.Select;
import com.datastax.driver.core.querybuilder.Select.Selection; import com.datastax.driver.core.querybuilder.Select.Selection;
import com.datastax.driver.core.querybuilder.Select.Where; import com.datastax.driver.core.querybuilder.Select.Where;
import java.util.*;
import java.util.function.Function;
import java.util.stream.Stream;
import java.util.stream.StreamSupport;
import net.helenus.core.*; import net.helenus.core.*;
import net.helenus.core.reflect.HelenusPropertyNode; import net.helenus.core.reflect.HelenusPropertyNode;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
@ -51,7 +49,8 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
public SelectOperation(AbstractSessionOperations sessionOperations) { public SelectOperation(AbstractSessionOperations sessionOperations) {
super(sessionOperations); super(sessionOperations);
this.rowMapper = new Function<Row, E>() { this.rowMapper =
new Function<Row, E>() {
@Override @Override
public E apply(Row source) { public E apply(Row source) {
@ -67,7 +66,6 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
return (E) Fun.ArrayTuple.of(arr); return (E) Fun.ArrayTuple.of(arr);
} }
}; };
} }
@ -75,23 +73,31 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
super(sessionOperations); super(sessionOperations);
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty())) entity
.getOrderedProperties()
.stream()
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p)); .forEach(p -> this.props.add(p));
} }
public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity, public SelectOperation(
AbstractSessionOperations sessionOperations,
HelenusEntity entity,
Function<Row, E> rowMapper) { Function<Row, E> rowMapper) {
super(sessionOperations); super(sessionOperations);
this.rowMapper = rowMapper; this.rowMapper = rowMapper;
entity.getOrderedProperties().stream().map(p -> new HelenusPropertyNode(p, Optional.empty())) entity
.getOrderedProperties()
.stream()
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
.forEach(p -> this.props.add(p)); .forEach(p -> this.props.add(p));
} }
public SelectOperation(AbstractSessionOperations sessionOperations, Function<Row, E> rowMapper, public SelectOperation(
AbstractSessionOperations sessionOperations,
Function<Row, E> rowMapper,
HelenusPropertyNode... props) { HelenusPropertyNode... props) {
super(sessionOperations); super(sessionOperations);
@ -107,8 +113,11 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
if (entity == null) { if (entity == null) {
entity = prop.getEntity(); entity = prop.getEntity();
} else if (entity != prop.getEntity()) { } else if (entity != prop.getEntity()) {
throw new HelenusMappingException("you can count records only from a single entity " throw new HelenusMappingException(
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface()); "you can count records only from a single entity "
+ entity.getMappingInterface()
+ " or "
+ prop.getEntity().getMappingInterface());
} }
} }
@ -128,11 +137,11 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
this.rowMapper = null; this.rowMapper = null;
return new SelectTransformingOperation<R, E>(this, (r) -> { return new SelectTransformingOperation<R, E>(
this,
(r) -> {
Map<String, Object> map = new ValueProviderMap(r, sessionOps.getValueProvider(), entity); Map<String, Object> map = new ValueProviderMap(r, sessionOps.getValueProvider(), entity);
return (R) Helenus.map(entityClass, map); return (R) Helenus.map(entityClass, map);
}); });
} }
@ -182,8 +191,11 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
if (entity == null) { if (entity == null) {
entity = prop.getEntity(); entity = prop.getEntity();
} else if (entity != prop.getEntity()) { } else if (entity != prop.getEntity()) {
throw new HelenusMappingException("you can select columns only from a single entity " throw new HelenusMappingException(
+ entity.getMappingInterface() + " or " + prop.getEntity().getMappingInterface()); "you can select columns only from a single entity "
+ entity.getMappingInterface()
+ " or "
+ prop.getEntity().getMappingInterface());
} }
} }
@ -211,7 +223,8 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
} }
if (ifFilters != null && !ifFilters.isEmpty()) { if (ifFilters != null && !ifFilters.isEmpty()) {
logger.error("onlyIf conditions " + ifFilters + " would be ignored in the statement " + select); logger.error(
"onlyIf conditions " + ifFilters + " would be ignored in the statement " + select);
} }
if (allowFiltering) { if (allowFiltering) {
@ -227,16 +240,15 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
if (rowMapper != null) { if (rowMapper != null) {
return StreamSupport return StreamSupport.stream(
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false) Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false)
.map(rowMapper); .map(rowMapper);
} } else {
else {
return (Stream<E>) StreamSupport
.stream(Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED), false);
return (Stream<E>)
StreamSupport.stream(
Spliterators.spliteratorUnknownSize(resultSet.iterator(), Spliterator.ORDERED),
false);
} }
} }
@ -246,5 +258,4 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
} }
return ordering; return ordering;
} }
} }

View file

@ -15,15 +15,13 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
import java.util.function.Function; import java.util.function.Function;
import java.util.stream.Stream; import java.util.stream.Stream;
import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.BuiltStatement;
public final class SelectTransformingOperation<R, E> public final class SelectTransformingOperation<R, E>
extends extends AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
AbstractFilterStreamOperation<R, SelectTransformingOperation<R, E>> {
private final SelectOperation<E> src; private final SelectOperation<E> src;
private final Function<E, R> fn; private final Function<E, R> fn;
@ -46,5 +44,4 @@ public final class SelectTransformingOperation<R, E>
public Stream<R> transform(ResultSet resultSet) { public Stream<R> transform(ResultSet resultSet) {
return src.transform(resultSet).map(fn); return src.transform(resultSet).map(fn);
} }
} }

View file

@ -15,15 +15,13 @@
*/ */
package net.helenus.core.operation; package net.helenus.core.operation;
import java.util.*;
import java.util.function.Function;
import com.datastax.driver.core.ResultSet; import com.datastax.driver.core.ResultSet;
import com.datastax.driver.core.querybuilder.Assignment; import com.datastax.driver.core.querybuilder.Assignment;
import com.datastax.driver.core.querybuilder.BuiltStatement; import com.datastax.driver.core.querybuilder.BuiltStatement;
import com.datastax.driver.core.querybuilder.QueryBuilder; import com.datastax.driver.core.querybuilder.QueryBuilder;
import com.datastax.driver.core.querybuilder.Update; import com.datastax.driver.core.querybuilder.Update;
import java.util.*;
import java.util.function.Function;
import net.helenus.core.AbstractSessionOperations; import net.helenus.core.AbstractSessionOperations;
import net.helenus.core.Filter; import net.helenus.core.Filter;
import net.helenus.core.Getter; import net.helenus.core.Getter;
@ -47,7 +45,8 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
super(sessionOperations); super(sessionOperations);
} }
public UpdateOperation(AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) { public UpdateOperation(
AbstractSessionOperations sessionOperations, HelenusPropertyNode p, Object v) {
super(sessionOperations); super(sessionOperations);
Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty()); Object value = sessionOps.getValuePreparer().prepareColumnValue(v, p.getProperty());
@ -91,7 +90,6 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
addPropertyNode(p); addPropertyNode(p);
return this; return this;
} }
public <V> UpdateOperation decrement(Getter<V> counterGetter) { public <V> UpdateOperation decrement(Getter<V> counterGetter) {
@ -108,7 +106,6 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
addPropertyNode(p); addPropertyNode(p);
return this; return this;
} }
/* /*
@ -221,7 +218,8 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
Object valueObj = value; Object valueObj = value;
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository()); Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) { if (converter.isPresent()) {
List convertedList = (List) converter.get().apply(Immutables.listOf(value)); List convertedList = (List) converter.get().apply(Immutables.listOf(value));
valueObj = convertedList.get(0); valueObj = convertedList.get(0);
@ -236,7 +234,8 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
List valueObj = value; List valueObj = value;
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository()); Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) { if (converter.isPresent()) {
valueObj = (List) converter.get().apply(value); valueObj = (List) converter.get().apply(value);
} }
@ -313,7 +312,8 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
Object valueObj = value; Object valueObj = value;
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository()); Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) { if (converter.isPresent()) {
Set convertedSet = (Set) converter.get().apply(Immutables.setOf(value)); Set convertedSet = (Set) converter.get().apply(Immutables.setOf(value));
valueObj = convertedSet.iterator().next(); valueObj = convertedSet.iterator().next();
@ -327,7 +327,8 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
Set valueObj = value; Set valueObj = value;
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository()); Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) { if (converter.isPresent()) {
valueObj = (Set) converter.get().apply(value); valueObj = (Set) converter.get().apply(value);
} }
@ -351,10 +352,11 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository()); Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) { if (converter.isPresent()) {
Map<Object, Object> convertedMap = (Map<Object, Object>) converter.get() Map<Object, Object> convertedMap =
.apply(Immutables.mapOf(key, value)); (Map<Object, Object>) converter.get().apply(Immutables.mapOf(key, value));
for (Map.Entry<Object, Object> e : convertedMap.entrySet()) { for (Map.Entry<Object, Object> e : convertedMap.entrySet()) {
assignments.add(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue())); assignments.add(QueryBuilder.put(p.getColumnName(), e.getKey(), e.getValue()));
} }
@ -374,7 +376,8 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter); HelenusPropertyNode p = MappingUtil.resolveMappingProperty(mapGetter);
HelenusProperty prop = p.getProperty(); HelenusProperty prop = p.getProperty();
Optional<Function<Object, Object>> converter = prop.getWriteConverter(sessionOps.getSessionRepository()); Optional<Function<Object, Object>> converter =
prop.getWriteConverter(sessionOps.getSessionRepository());
if (converter.isPresent()) { if (converter.isPresent()) {
Map convertedMap = (Map) converter.get().apply(map); Map convertedMap = (Map) converter.get().apply(map);
assignments.add(QueryBuilder.putAll(p.getColumnName(), convertedMap)); assignments.add(QueryBuilder.putAll(p.getColumnName(), convertedMap));
@ -444,8 +447,11 @@ public final class UpdateOperation extends AbstractFilterOperation<ResultSet, Up
if (entity == null) { if (entity == null) {
entity = p.getEntity(); entity = p.getEntity();
} else if (entity != p.getEntity()) { } else if (entity != p.getEntity()) {
throw new HelenusMappingException("you can update columns only in single entity " throw new HelenusMappingException(
+ entity.getMappingInterface() + " or " + p.getEntity().getMappingInterface()); "you can update columns only in single entity "
+ entity.getMappingInterface()
+ " or "
+ p.getEntity().getMappingInterface());
} }
} }
} }

View file

@ -19,14 +19,20 @@ import java.util.HashMap;
import java.util.Map; import java.util.Map;
public enum DefaultPrimitiveTypes { public enum DefaultPrimitiveTypes {
BOOLEAN(boolean.class, false),
BOOLEAN(boolean.class, false), BYTE(byte.class, (byte) 0x0), CHAR(char.class, (char) 0x0), SHORT(short.class, BYTE(byte.class, (byte) 0x0),
(short) 0), INT(int.class, 0), LONG(long.class, 0L), FLOAT(float.class, 0.0f), DOUBLE(double.class, 0.0); CHAR(char.class, (char) 0x0),
SHORT(short.class, (short) 0),
INT(int.class, 0),
LONG(long.class, 0L),
FLOAT(float.class, 0.0f),
DOUBLE(double.class, 0.0);
private final Class<?> primitiveClass; private final Class<?> primitiveClass;
private final Object defaultValue; private final Object defaultValue;
private final static Map<Class<?>, DefaultPrimitiveTypes> map = new HashMap<Class<?>, DefaultPrimitiveTypes>(); private static final Map<Class<?>, DefaultPrimitiveTypes> map =
new HashMap<Class<?>, DefaultPrimitiveTypes>();
static { static {
for (DefaultPrimitiveTypes type : DefaultPrimitiveTypes.values()) { for (DefaultPrimitiveTypes type : DefaultPrimitiveTypes.values()) {
@ -50,5 +56,4 @@ public enum DefaultPrimitiveTypes {
public Object getDefaultValue() { public Object getDefaultValue() {
return defaultValue; return defaultValue;
} }
} }

View file

@ -7,5 +7,4 @@ public interface Drafted<T> extends MapExportable {
Set<String> mutated(); Set<String> mutated();
T build(); T build();
} }

View file

@ -25,5 +25,4 @@ public interface DslExportable {
HelenusEntity getHelenusMappingEntity(); HelenusEntity getHelenusMappingEntity();
HelenusPropertyNode getParentDslHelenusPropertyNode(); HelenusPropertyNode getParentDslHelenusPropertyNode();
} }

View file

@ -15,18 +15,13 @@
*/ */
package net.helenus.core.reflect; package net.helenus.core.reflect;
import java.lang.invoke.MethodHandle; import com.datastax.driver.core.*;
import java.lang.invoke.MethodHandles;
import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationHandler;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.lang.reflect.Proxy; import java.lang.reflect.Proxy;
import java.util.HashMap; import java.util.HashMap;
import java.util.Map; import java.util.Map;
import java.util.Objects;
import java.util.Optional; import java.util.Optional;
import com.datastax.driver.core.*;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusMappingEntity; import net.helenus.mapping.HelenusMappingEntity;
@ -47,7 +42,11 @@ public class DslInvocationHandler<E> implements InvocationHandler {
private final Map<Method, Object> udtMap = new HashMap<Method, Object>(); private final Map<Method, Object> udtMap = new HashMap<Method, Object>();
private final Map<Method, Object> tupleMap = new HashMap<Method, Object>(); private final Map<Method, Object> tupleMap = new HashMap<Method, Object>();
public DslInvocationHandler(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, Metadata metadata) { public DslInvocationHandler(
Class<E> iface,
ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
Metadata metadata) {
this.entity = new HelenusMappingEntity(iface, metadata); this.entity = new HelenusMappingEntity(iface, metadata);
this.parent = parent; this.parent = parent;
@ -62,8 +61,12 @@ public class DslInvocationHandler<E> implements InvocationHandler {
if (type instanceof UDTDataType && !UDTValue.class.isAssignableFrom(javaType)) { if (type instanceof UDTDataType && !UDTValue.class.isAssignableFrom(javaType)) {
Object childDsl = Helenus.dsl(javaType, classLoader, Object childDsl =
Optional.of(new HelenusPropertyNode(prop, parent)), metadata); Helenus.dsl(
javaType,
classLoader,
Optional.of(new HelenusPropertyNode(prop, parent)),
metadata);
udtMap.put(prop.getGetterMethod(), childDsl); udtMap.put(prop.getGetterMethod(), childDsl);
} }
@ -71,16 +74,19 @@ public class DslInvocationHandler<E> implements InvocationHandler {
if (type instanceof DTDataType) { if (type instanceof DTDataType) {
DTDataType dataType = (DTDataType) type; DTDataType dataType = (DTDataType) type;
if (dataType.getDataType() instanceof TupleType && !TupleValue.class.isAssignableFrom(javaType)) { if (dataType.getDataType() instanceof TupleType
&& !TupleValue.class.isAssignableFrom(javaType)) {
Object childDsl = Helenus.dsl(javaType, classLoader, Object childDsl =
Optional.of(new HelenusPropertyNode(prop, parent)), metadata); Helenus.dsl(
javaType,
classLoader,
Optional.of(new HelenusPropertyNode(prop, parent)),
metadata);
tupleMap.put(prop.getGetterMethod(), childDsl); tupleMap.put(prop.getGetterMethod(), childDsl);
} }
} }
} }
} }
} }
@ -121,7 +127,6 @@ public class DslInvocationHandler<E> implements InvocationHandler {
return parent.get(); return parent.get();
} }
HelenusProperty prop = map.get(method); HelenusProperty prop = map.get(method);
if (prop == null) { if (prop == null) {
prop = entity.getProperty(methodName); prop = entity.getProperty(methodName);
@ -138,7 +143,6 @@ public class DslInvocationHandler<E> implements InvocationHandler {
if (childDsl != null) { if (childDsl != null) {
return childDsl; return childDsl;
} }
} }
if (type instanceof DTDataType) { if (type instanceof DTDataType) {
@ -146,9 +150,7 @@ public class DslInvocationHandler<E> implements InvocationHandler {
DataType dt = dataType.getDataType(); DataType dt = dataType.getDataType();
switch (dt.getName()) { switch (dt.getName()) {
case TUPLE:
case TUPLE :
Object childDsl = tupleMap.get(method); Object childDsl = tupleMap.get(method);
if (childDsl != null) { if (childDsl != null) {
@ -157,20 +159,18 @@ public class DslInvocationHandler<E> implements InvocationHandler {
break; break;
case SET : case SET:
return new SetDsl(new HelenusPropertyNode(prop, parent)); return new SetDsl(new HelenusPropertyNode(prop, parent));
case LIST : case LIST:
return new ListDsl(new HelenusPropertyNode(prop, parent)); return new ListDsl(new HelenusPropertyNode(prop, parent));
case MAP : case MAP:
return new MapDsl(new HelenusPropertyNode(prop, parent)); return new MapDsl(new HelenusPropertyNode(prop, parent));
default : default:
break; break;
} }
} }
throw new DslPropertyException(new HelenusPropertyNode(prop, parent)); throw new DslPropertyException(new HelenusPropertyNode(prop, parent));
@ -178,5 +178,4 @@ public class DslInvocationHandler<E> implements InvocationHandler {
throw new HelenusException("invalid method call " + method); throw new HelenusException("invalid method call " + method);
} }
} }

View file

@ -19,9 +19,7 @@ import java.lang.annotation.Annotation;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.core.SessionRepository; import net.helenus.core.SessionRepository;
import net.helenus.mapping.*; import net.helenus.mapping.*;
import net.helenus.mapping.type.AbstractDataType; import net.helenus.mapping.type.AbstractDataType;
@ -61,7 +59,9 @@ public final class HelenusNamedProperty implements HelenusProperty {
} }
@Override @Override
public boolean caseSensitiveIndex() { return false; } public boolean caseSensitiveIndex() {
return false;
}
@Override @Override
public Class<?> getJavaType() { public Class<?> getJavaType() {

View file

@ -17,7 +17,6 @@ package net.helenus.core.reflect;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
@ -102,7 +101,5 @@ public final class HelenusPropertyNode implements Iterable<HelenusProperty> {
next = node.next; next = node.next;
return node.prop; return node.prop;
} }
} }
} }

View file

@ -16,7 +16,6 @@
package net.helenus.core.reflect; package net.helenus.core.reflect;
import java.util.*; import java.util.*;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.support.DslPropertyException; import net.helenus.support.DslPropertyException;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;

View file

@ -19,7 +19,6 @@ import java.util.Collection;
import java.util.Map; import java.util.Map;
import java.util.Optional; import java.util.Optional;
import java.util.Set; import java.util.Set;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.support.DslPropertyException; import net.helenus.support.DslPropertyException;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
@ -114,5 +113,4 @@ public final class MapDsl<K, V> implements Map<K, V> {
public String toString() { public String toString() {
return "MapDsl"; return "MapDsl";
} }
} }

View file

@ -22,5 +22,4 @@ public interface MapExportable {
public static final String TO_MAP_METHOD = "toMap"; public static final String TO_MAP_METHOD = "toMap";
Map<String, Object> toMap(); Map<String, Object> toMap();
} }

View file

@ -16,7 +16,6 @@
package net.helenus.core.reflect; package net.helenus.core.reflect;
import java.io.Serializable; import java.io.Serializable;
import java.lang.invoke.MethodHandle;
import java.lang.invoke.MethodHandles; import java.lang.invoke.MethodHandles;
import java.lang.reflect.Constructor; import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationHandler; import java.lang.reflect.InvocationHandler;
@ -24,7 +23,6 @@ import java.lang.reflect.Method;
import java.lang.reflect.Proxy; import java.lang.reflect.Proxy;
import java.util.Collections; import java.util.Collections;
import java.util.Map; import java.util.Map;
import net.helenus.mapping.annotation.Transient; import net.helenus.mapping.annotation.Transient;
import net.helenus.support.HelenusException; import net.helenus.support.HelenusException;
@ -45,13 +43,15 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
// https://zeroturnaround.com/rebellabs/recognize-and-conquer-java-proxies-default-methods-and-method-handles/ // https://zeroturnaround.com/rebellabs/recognize-and-conquer-java-proxies-default-methods-and-method-handles/
// First, we need an instance of a private inner-class found in MethodHandles. // First, we need an instance of a private inner-class found in MethodHandles.
Constructor<MethodHandles.Lookup> constructor = MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, Constructor<MethodHandles.Lookup> constructor =
int.class); MethodHandles.Lookup.class.getDeclaredConstructor(Class.class, int.class);
constructor.setAccessible(true); constructor.setAccessible(true);
// Now we need to lookup and invoke special the default method on the interface class. // Now we need to lookup and invoke special the default method on the interface class.
final Class<?> declaringClass = method.getDeclaringClass(); final Class<?> declaringClass = method.getDeclaringClass();
Object result = constructor.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE) Object result =
constructor
.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
.unreflectSpecial(method, declaringClass) .unreflectSpecial(method, declaringClass)
.bindTo(proxy) .bindTo(proxy)
.invokeWithArguments(args); .invokeWithArguments(args);
@ -116,10 +116,8 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
} }
return type.getDefaultValue(); return type.getDefaultValue();
} }
} }
return value; return value;
} }
} }

View file

@ -15,22 +15,25 @@
*/ */
package net.helenus.core.reflect; package net.helenus.core.reflect;
import com.datastax.driver.core.Metadata;
import java.lang.reflect.Proxy; import java.lang.reflect.Proxy;
import java.util.Optional; import java.util.Optional;
import com.datastax.driver.core.Metadata;
import net.helenus.core.DslInstantiator; import net.helenus.core.DslInstantiator;
public enum ReflectionDslInstantiator implements DslInstantiator { public enum ReflectionDslInstantiator implements DslInstantiator {
INSTANCE; INSTANCE;
@Override @Override
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public <E> E instantiate(Class<E> iface, ClassLoader classLoader, Optional<HelenusPropertyNode> parent, Metadata metadata) { public <E> E instantiate(
DslInvocationHandler<E> handler = new DslInvocationHandler<E>(iface, classLoader, parent, metadata); Class<E> iface,
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, DslExportable.class}, handler); ClassLoader classLoader,
Optional<HelenusPropertyNode> parent,
Metadata metadata) {
DslInvocationHandler<E> handler =
new DslInvocationHandler<E>(iface, classLoader, parent, metadata);
E proxy =
(E) Proxy.newProxyInstance(classLoader, new Class[] {iface, DslExportable.class}, handler);
return proxy; return proxy;
} }
} }

View file

@ -19,8 +19,7 @@ import net.helenus.support.HelenusMappingException;
public final class ReflectionInstantiator { public final class ReflectionInstantiator {
private ReflectionInstantiator() { private ReflectionInstantiator() {}
}
public static <T> T instantiateClass(Class<T> clazz) { public static <T> T instantiateClass(Class<T> clazz) {
@ -29,7 +28,5 @@ public final class ReflectionInstantiator {
} catch (InstantiationException | IllegalAccessException e) { } catch (InstantiationException | IllegalAccessException e) {
throw new HelenusMappingException("invalid class " + clazz, e); throw new HelenusMappingException("invalid class " + clazz, e);
} }
} }
} }

View file

@ -17,11 +17,9 @@ package net.helenus.core.reflect;
import java.lang.reflect.Proxy; import java.lang.reflect.Proxy;
import java.util.Map; import java.util.Map;
import net.helenus.core.MapperInstantiator; import net.helenus.core.MapperInstantiator;
public enum ReflectionMapperInstantiator implements MapperInstantiator { public enum ReflectionMapperInstantiator implements MapperInstantiator {
INSTANCE; INSTANCE;
@Override @Override
@ -29,9 +27,8 @@ public enum ReflectionMapperInstantiator implements MapperInstantiator {
public <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) { public <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
MapperInvocationHandler<E> handler = new MapperInvocationHandler<E>(iface, src); MapperInvocationHandler<E> handler = new MapperInvocationHandler<E>(iface, src);
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, MapExportable.class}, handler); E proxy =
(E) Proxy.newProxyInstance(classLoader, new Class[] {iface, MapExportable.class}, handler);
return proxy; return proxy;
} }
} }

View file

@ -18,7 +18,6 @@ package net.helenus.core.reflect;
import java.util.Collection; import java.util.Collection;
import java.util.Iterator; import java.util.Iterator;
import java.util.Set; import java.util.Set;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public final class SetDsl<V> implements Set<V> { public final class SetDsl<V> implements Set<V> {

View file

@ -16,7 +16,6 @@
package net.helenus.mapping; package net.helenus.mapping;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import net.helenus.mapping.annotation.ClusteringColumn; import net.helenus.mapping.annotation.ClusteringColumn;
import net.helenus.mapping.annotation.Column; import net.helenus.mapping.annotation.Column;
import net.helenus.mapping.annotation.PartitionKey; import net.helenus.mapping.annotation.PartitionKey;
@ -103,15 +102,21 @@ public final class ColumnInformation {
private void ensureSingleColumnType(ColumnType columnTypeLocal, Method getter) { private void ensureSingleColumnType(ColumnType columnTypeLocal, Method getter) {
if (columnTypeLocal != ColumnType.COLUMN) { if (columnTypeLocal != ColumnType.COLUMN) {
throw new HelenusMappingException("property can be annotated only by a single column type " + getter); throw new HelenusMappingException(
"property can be annotated only by a single column type " + getter);
} }
} }
@Override @Override
public String toString() { public String toString() {
return "ColumnInformation [columnName=" + columnName + ", columnType=" + columnType + ", ordinal=" + ordinal return "ColumnInformation [columnName="
+ ", ordering=" + ordering + "]"; + columnName
+ ", columnType="
+ columnType
+ ", ordinal="
+ ordinal
+ ", ordering="
+ ordering
+ "]";
} }
} }

View file

@ -16,5 +16,8 @@
package net.helenus.mapping; package net.helenus.mapping;
public enum ColumnType { public enum ColumnType {
PARTITION_KEY, CLUSTERING_COLUMN, STATIC_COLUMN, COLUMN; PARTITION_KEY,
CLUSTERING_COLUMN,
STATIC_COLUMN,
COLUMN;
} }

View file

@ -30,5 +30,4 @@ public interface HelenusEntity {
Collection<HelenusProperty> getOrderedProperties(); Collection<HelenusProperty> getOrderedProperties();
HelenusProperty getProperty(String name); HelenusProperty getProperty(String name);
} }

View file

@ -16,5 +16,7 @@
package net.helenus.mapping; package net.helenus.mapping;
public enum HelenusEntityType { public enum HelenusEntityType {
TABLE, TUPLE, UDT; TABLE,
TUPLE,
UDT;
} }

View file

@ -15,15 +15,11 @@
*/ */
package net.helenus.mapping; package net.helenus.mapping;
import java.lang.reflect.Method;
import java.util.*;
import com.datastax.driver.core.*; import com.datastax.driver.core.*;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableMap;
import java.lang.reflect.Method;
import com.google.common.collect.ImmutableSet; import java.util.*;
import com.google.common.reflect.TypeToken;
import net.helenus.config.HelenusSettings; import net.helenus.config.HelenusSettings;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.annotation.Cacheable; import net.helenus.core.annotation.Cacheable;
@ -56,7 +52,6 @@ public final class HelenusMappingEntity implements HelenusEntity {
HelenusSettings settings = Helenus.settings(); HelenusSettings settings = Helenus.settings();
List<Method> methods = new ArrayList<Method>(); List<Method> methods = new ArrayList<Method>();
methods.addAll(Arrays.asList(iface.getDeclaredMethods())); methods.addAll(Arrays.asList(iface.getDeclaredMethods()));
@ -80,9 +75,7 @@ public final class HelenusMappingEntity implements HelenusEntity {
propsBuilder.put(prop.getPropertyName(), prop); propsBuilder.put(prop.getPropertyName(), prop);
propsLocal.add(prop); propsLocal.add(prop);
} }
} }
} }
this.methods = methodsBuilder.build(); this.methods = methodsBuilder.build();
@ -134,19 +127,17 @@ public final class HelenusMappingEntity implements HelenusEntity {
private static IdentityName resolveName(Class<?> iface, HelenusEntityType type) { private static IdentityName resolveName(Class<?> iface, HelenusEntityType type) {
switch (type) { switch (type) {
case TABLE:
case TABLE :
return MappingUtil.getTableName(iface, true); return MappingUtil.getTableName(iface, true);
case TUPLE : case TUPLE:
return IdentityName.of(MappingUtil.getDefaultEntityName(iface), false); return IdentityName.of(MappingUtil.getDefaultEntityName(iface), false);
case UDT : case UDT:
return MappingUtil.getUserDefinedTypeName(iface, true); return MappingUtil.getUserDefinedTypeName(iface, true);
} }
throw new HelenusMappingException("invalid entity type " + type + " in " + type); throw new HelenusMappingException("invalid entity type " + type + " in " + type);
} }
private static HelenusEntityType autoDetectType(Class<?> iface) { private static HelenusEntityType autoDetectType(Class<?> iface) {
@ -155,35 +146,30 @@ public final class HelenusMappingEntity implements HelenusEntity {
if (null != iface.getDeclaredAnnotation(Table.class)) { if (null != iface.getDeclaredAnnotation(Table.class)) {
return HelenusEntityType.TABLE; return HelenusEntityType.TABLE;
} } else if (null != iface.getDeclaredAnnotation(Tuple.class)) {
else if (null != iface.getDeclaredAnnotation(Tuple.class)) {
return HelenusEntityType.TUPLE; return HelenusEntityType.TUPLE;
} } else if (null != iface.getDeclaredAnnotation(UDT.class)) {
else if (null != iface.getDeclaredAnnotation(UDT.class)) {
return HelenusEntityType.UDT; return HelenusEntityType.UDT;
} }
throw new HelenusMappingException("entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface); throw new HelenusMappingException(
"entity must be annotated by @Table or @Tuple or @UserDefinedType " + iface);
} }
private void validateOrdinals() { private void validateOrdinals() {
switch (getType()) { switch (getType()) {
case TABLE:
case TABLE :
validateOrdinalsForTable(); validateOrdinalsForTable();
break; break;
case TUPLE : case TUPLE:
validateOrdinalsInTuple(); validateOrdinalsInTuple();
break; break;
default : default:
break; break;
} }
} }
private void validateOrdinalsForTable() { private void validateOrdinalsForTable() {
@ -198,51 +184,61 @@ public final class HelenusMappingEntity implements HelenusEntity {
int ordinal = prop.getOrdinal(); int ordinal = prop.getOrdinal();
switch (type) { switch (type) {
case PARTITION_KEY:
case PARTITION_KEY :
if (partitionKeys.get(ordinal)) { if (partitionKeys.get(ordinal)) {
throw new HelenusMappingException( throw new HelenusMappingException(
"detected two or more partition key columns with the same ordinal " + ordinal + " in " "detected two or more partition key columns with the same ordinal "
+ ordinal
+ " in "
+ prop.getEntity()); + prop.getEntity());
} }
partitionKeys.set(ordinal); partitionKeys.set(ordinal);
break; break;
case CLUSTERING_COLUMN : case CLUSTERING_COLUMN:
if (clusteringColumns.get(ordinal)) { if (clusteringColumns.get(ordinal)) {
throw new HelenusMappingException("detected two or clustering columns with the same ordinal " throw new HelenusMappingException(
+ ordinal + " in " + prop.getEntity()); "detected two or clustering columns with the same ordinal "
+ ordinal
+ " in "
+ prop.getEntity());
} }
clusteringColumns.set(ordinal); clusteringColumns.set(ordinal);
break; break;
default : default:
break; break;
} }
} }
} }
private void validateOrdinalsInTuple() { private void validateOrdinalsInTuple() {
boolean[] ordinals = new boolean[props.size()]; boolean[] ordinals = new boolean[props.size()];
getOrderedProperties().forEach(p -> { getOrderedProperties()
.forEach(
p -> {
int ordinal = p.getOrdinal(); int ordinal = p.getOrdinal();
if (ordinal < 0 || ordinal >= ordinals.length) { if (ordinal < 0 || ordinal >= ordinals.length) {
throw new HelenusMappingException("invalid ordinal " + ordinal + " found for property " throw new HelenusMappingException(
+ p.getPropertyName() + " in " + p.getEntity()); "invalid ordinal "
+ ordinal
+ " found for property "
+ p.getPropertyName()
+ " in "
+ p.getEntity());
} }
if (ordinals[ordinal]) { if (ordinals[ordinal]) {
throw new HelenusMappingException( throw new HelenusMappingException(
"detected two or more properties with the same ordinal " + ordinal + " in " + p.getEntity()); "detected two or more properties with the same ordinal "
+ ordinal
+ " in "
+ p.getEntity());
} }
ordinals[ordinal] = true; ordinals[ordinal] = true;
}); });
for (int i = 0; i != ordinals.length; ++i) { for (int i = 0; i != ordinals.length; ++i) {
@ -250,15 +246,18 @@ public final class HelenusMappingEntity implements HelenusEntity {
throw new HelenusMappingException("detected absent ordinal " + i + " in " + this); throw new HelenusMappingException("detected absent ordinal " + i + " in " + this);
} }
} }
} }
@Override @Override
public String toString() { public String toString() {
StringBuilder str = new StringBuilder(); StringBuilder str = new StringBuilder();
str.append(iface.getSimpleName()).append("(").append(name.getName()).append(") ") str.append(iface.getSimpleName())
.append(type.name().toLowerCase()).append(":\n"); .append("(")
.append(name.getName())
.append(") ")
.append(type.name().toLowerCase())
.append(":\n");
for (HelenusProperty prop : getOrderedProperties()) { for (HelenusProperty prop : getOrderedProperties()) {
str.append(prop.toString()); str.append(prop.toString());
@ -266,5 +265,4 @@ public final class HelenusMappingEntity implements HelenusEntity {
} }
return str.toString(); return str.toString();
} }
} }

View file

@ -15,15 +15,13 @@
*/ */
package net.helenus.mapping; package net.helenus.mapping;
import com.datastax.driver.core.Metadata;
import java.lang.annotation.Annotation; import java.lang.annotation.Annotation;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.lang.reflect.Type; import java.lang.reflect.Type;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import com.datastax.driver.core.Metadata;
import net.helenus.core.SessionRepository; import net.helenus.core.SessionRepository;
import net.helenus.mapping.javatype.AbstractJavaType; import net.helenus.mapping.javatype.AbstractJavaType;
import net.helenus.mapping.javatype.MappingJavaTypes; import net.helenus.mapping.javatype.MappingJavaTypes;
@ -64,8 +62,9 @@ public final class HelenusMappingProperty implements HelenusProperty {
this.javaType = getter.getReturnType(); this.javaType = getter.getReturnType();
this.abstractJavaType = MappingJavaTypes.resolveJavaType(this.javaType); this.abstractJavaType = MappingJavaTypes.resolveJavaType(this.javaType);
this.dataType = abstractJavaType.resolveDataType(this.getter, this.genericJavaType, this.dataType =
this.columnInfo.getColumnType(), metadata); abstractJavaType.resolveDataType(
this.getter, this.genericJavaType, this.columnInfo.getColumnType(), metadata);
this.validators = MappingUtil.getValidators(getter); this.validators = MappingUtil.getValidators(getter);
} }
@ -169,14 +168,13 @@ public final class HelenusMappingProperty implements HelenusProperty {
ColumnType type = this.getColumnType(); ColumnType type = this.getColumnType();
switch (type) { switch (type) {
case PARTITION_KEY:
case PARTITION_KEY :
str.append("partition_key["); str.append("partition_key[");
str.append(this.getOrdinal()); str.append(this.getOrdinal());
str.append("] "); str.append("] ");
break; break;
case CLUSTERING_COLUMN : case CLUSTERING_COLUMN:
str.append("clustering_column["); str.append("clustering_column[");
str.append(this.getOrdinal()); str.append(this.getOrdinal());
str.append("] "); str.append("] ");
@ -186,13 +184,12 @@ public final class HelenusMappingProperty implements HelenusProperty {
} }
break; break;
case STATIC_COLUMN : case STATIC_COLUMN:
str.append("static "); str.append("static ");
break; break;
case COLUMN : case COLUMN:
break; break;
} }
Optional<IdentityName> idx = this.getIndexName(); Optional<IdentityName> idx = this.getIndexName();
@ -202,5 +199,4 @@ public final class HelenusMappingProperty implements HelenusProperty {
return str.toString(); return str.toString();
} }
} }

View file

@ -19,9 +19,7 @@ import java.lang.annotation.Annotation;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.Optional; import java.util.Optional;
import java.util.function.Function; import java.util.function.Function;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.core.SessionRepository; import net.helenus.core.SessionRepository;
import net.helenus.mapping.type.AbstractDataType; import net.helenus.mapping.type.AbstractDataType;
@ -54,5 +52,4 @@ public interface HelenusProperty {
Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository); Optional<Function<Object, Object>> getWriteConverter(SessionRepository repository);
ConstraintValidator<? extends Annotation, ?>[] getValidators(); ConstraintValidator<? extends Annotation, ?>[] getValidators();
} }

View file

@ -56,5 +56,4 @@ public final class IdentityName {
public String toString() { public String toString() {
return toCql(); return toCql();
} }
} }

View file

@ -20,10 +20,8 @@ import java.lang.reflect.Method;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import javax.validation.Constraint; import javax.validation.Constraint;
import javax.validation.ConstraintValidator; import javax.validation.ConstraintValidator;
import net.helenus.core.Getter; import net.helenus.core.Getter;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.reflect.*; import net.helenus.core.reflect.*;
@ -37,10 +35,10 @@ import net.helenus.support.HelenusMappingException;
public final class MappingUtil { public final class MappingUtil {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public static final ConstraintValidator<? extends Annotation, ?>[] EMPTY_VALIDATORS = new ConstraintValidator[0]; public static final ConstraintValidator<? extends Annotation, ?>[] EMPTY_VALIDATORS =
new ConstraintValidator[0];
private MappingUtil() { private MappingUtil() {}
}
public static ConstraintValidator<? extends Annotation, ?>[] getValidators(Method getterMethod) { public static ConstraintValidator<? extends Annotation, ?>[] getValidators(Method getterMethod) {
@ -55,9 +53,7 @@ public final class MappingUtil {
for (Annotation possibleConstraint : annotationType.getDeclaredAnnotations()) { for (Annotation possibleConstraint : annotationType.getDeclaredAnnotations()) {
list = addValidators(possibleConstraint, list); list = addValidators(possibleConstraint, list);
} }
} }
if (list == null) { if (list == null) {
@ -67,8 +63,8 @@ public final class MappingUtil {
} }
} }
private static List<ConstraintValidator<? extends Annotation, ?>> addValidators(Annotation constraintAnnotation, private static List<ConstraintValidator<? extends Annotation, ?>> addValidators(
List<ConstraintValidator<? extends Annotation, ?>> list) { Annotation constraintAnnotation, List<ConstraintValidator<? extends Annotation, ?>> list) {
Class<? extends Annotation> annotationType = constraintAnnotation.annotationType(); Class<? extends Annotation> annotationType = constraintAnnotation.annotationType();
@ -80,8 +76,8 @@ public final class MappingUtil {
for (Class<? extends ConstraintValidator<?, ?>> clazz : constraint.validatedBy()) { for (Class<? extends ConstraintValidator<?, ?>> clazz : constraint.validatedBy()) {
ConstraintValidator<? extends Annotation, ?> validator = ReflectionInstantiator ConstraintValidator<? extends Annotation, ?> validator =
.instantiateClass(clazz); ReflectionInstantiator.instantiateClass(clazz);
((ConstraintValidator) validator).initialize(constraintAnnotation); ((ConstraintValidator) validator).initialize(constraintAnnotation);
@ -90,15 +86,11 @@ public final class MappingUtil {
} }
list.add(validator); list.add(validator);
} }
} }
} }
return list; return list;
} }
public static Optional<IdentityName> getIndexName(Method getterMethod) { public static Optional<IdentityName> getIndexName(Method getterMethod) {
@ -115,10 +107,11 @@ public final class MappingUtil {
if (indexName == null || indexName.isEmpty()) { if (indexName == null || indexName.isEmpty()) {
indexName = getDefaultColumnName(getterMethod); indexName = getDefaultColumnName(getterMethod);
} }
} }
return indexName != null ? Optional.of(new IdentityName(indexName, forceQuote)) : Optional.empty(); return indexName != null
? Optional.of(new IdentityName(indexName, forceQuote))
: Optional.empty();
} }
public static boolean caseSensitiveIndex(Method getterMethod) { public static boolean caseSensitiveIndex(Method getterMethod) {
@ -156,7 +149,6 @@ public final class MappingUtil {
} }
return new IdentityName(userTypeName, forceQuote); return new IdentityName(userTypeName, forceQuote);
} }
if (required) { if (required) {
@ -164,7 +156,6 @@ public final class MappingUtil {
} }
return null; return null;
} }
public static boolean isTuple(Class<?> iface) { public static boolean isTuple(Class<?> iface) {
@ -172,7 +163,6 @@ public final class MappingUtil {
Tuple tuple = iface.getDeclaredAnnotation(Tuple.class); Tuple tuple = iface.getDeclaredAnnotation(Tuple.class);
return tuple != null; return tuple != null;
} }
public static boolean isUDT(Class<?> iface) { public static boolean isUDT(Class<?> iface) {
@ -180,7 +170,6 @@ public final class MappingUtil {
UDT udt = iface.getDeclaredAnnotation(UDT.class); UDT udt = iface.getDeclaredAnnotation(UDT.class);
return udt != null; return udt != null;
} }
public static IdentityName getTableName(Class<?> iface, boolean required) { public static IdentityName getTableName(Class<?> iface, boolean required) {
@ -232,15 +221,13 @@ public final class MappingUtil {
continue; continue;
} }
if (iface.getDeclaredAnnotation(Table.class) != null || iface.getDeclaredAnnotation(UDT.class) != null if (iface.getDeclaredAnnotation(Table.class) != null
|| iface.getDeclaredAnnotation(UDT.class) != null
|| iface.getDeclaredAnnotation(Tuple.class) != null) { || iface.getDeclaredAnnotation(Tuple.class) != null) {
break; break;
} }
} }
} }
if (iface == null) { if (iface == null) {
@ -248,7 +235,6 @@ public final class MappingUtil {
} }
return iface; return iface;
} }
public static HelenusPropertyNode resolveMappingProperty(Getter<?> getter) { public static HelenusPropertyNode resolveMappingProperty(Getter<?> getter) {
@ -259,19 +245,13 @@ public final class MappingUtil {
if (childDsl instanceof DslExportable) { if (childDsl instanceof DslExportable) {
DslExportable e = (DslExportable) childDsl; DslExportable e = (DslExportable) childDsl;
return e.getParentDslHelenusPropertyNode(); return e.getParentDslHelenusPropertyNode();
} } else if (childDsl instanceof MapDsl) {
else if (childDsl instanceof MapDsl) {
MapDsl mapDsl = (MapDsl) childDsl; MapDsl mapDsl = (MapDsl) childDsl;
return mapDsl.getParent(); return mapDsl.getParent();
} } else if (childDsl instanceof ListDsl) {
else if (childDsl instanceof ListDsl) {
ListDsl listDsl = (ListDsl) childDsl; ListDsl listDsl = (ListDsl) childDsl;
return listDsl.getParent(); return listDsl.getParent();
} } else if (childDsl instanceof SetDsl) {
else if (childDsl instanceof SetDsl) {
SetDsl setDsl = (SetDsl) childDsl; SetDsl setDsl = (SetDsl) childDsl;
return setDsl.getParent(); return setDsl.getParent();
} }
@ -281,7 +261,5 @@ public final class MappingUtil {
} catch (DslPropertyException e) { } catch (DslPropertyException e) {
return e.getPropertyNode(); return e.getPropertyNode();
} }
} }
} }

View file

@ -18,7 +18,6 @@ package net.helenus.mapping;
import net.helenus.support.HelenusMappingException; import net.helenus.support.HelenusMappingException;
public enum OrderingDirection { public enum OrderingDirection {
ASC("ASC"), ASC("ASC"),
DESC("DESC"); DESC("DESC");
@ -37,13 +36,10 @@ public enum OrderingDirection {
if (ASC.cql.equalsIgnoreCase(name)) { if (ASC.cql.equalsIgnoreCase(name)) {
return ASC; return ASC;
} } else if (DESC.cql.equalsIgnoreCase(name)) {
else if (DESC.cql.equalsIgnoreCase(name)) {
return DESC; return DESC;
} }
throw new HelenusMappingException("invalid ordering direction name " + name); throw new HelenusMappingException("invalid ordering direction name " + name);
} }
} }

View file

@ -18,12 +18,12 @@ package net.helenus.mapping;
import java.util.Comparator; import java.util.Comparator;
public enum TypeAndOrdinalColumnComparator implements Comparator<HelenusProperty> { public enum TypeAndOrdinalColumnComparator implements Comparator<HelenusProperty> {
INSTANCE; INSTANCE;
public int compare(HelenusProperty thisVal, HelenusProperty anotherVal) { public int compare(HelenusProperty thisVal, HelenusProperty anotherVal) {
int c = Integer.compare(thisVal.getColumnType().ordinal(), anotherVal.getColumnType().ordinal()); int c =
Integer.compare(thisVal.getColumnType().ordinal(), anotherVal.getColumnType().ordinal());
if (c == 0) { if (c == 0) {
c = Integer.compare(thisVal.getOrdinal(), anotherVal.getOrdinal()); c = Integer.compare(thisVal.getOrdinal(), anotherVal.getOrdinal());
@ -31,5 +31,4 @@ public enum TypeAndOrdinalColumnComparator implements Comparator<HelenusProperty
return c; return c;
} }
} }

View file

@ -19,43 +19,35 @@ import java.lang.annotation.ElementType;
import java.lang.annotation.Retention; import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy; import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target; import java.lang.annotation.Target;
import net.helenus.mapping.OrderingDirection; import net.helenus.mapping.OrderingDirection;
/** /**
* ClusteringColumn is the family column in legacy Cassandra API * ClusteringColumn is the family column in legacy Cassandra API
* *
* The purpose of this column is have additional dimension in the table. * <p>The purpose of this column is have additional dimension in the table. Both @PartitionKey
* Both @PartitionKey and @ClusteringColumn together are parts of the primary * and @ClusteringColumn together are parts of the primary key of the table. The primary difference
* key of the table. The primary difference between them is that the first one * between them is that the first one is using for routing purposes in order to locate a data node
* is using for routing purposes in order to locate a data node in the cluster, * in the cluster, otherwise the second one is using inside the node to locate peace of data in
* otherwise the second one is using inside the node to locate peace of data in
* concrete machine. * concrete machine.
* *
* ClusteringColumn can be represented as a Key in SortedMap that fully stored * <p>ClusteringColumn can be represented as a Key in SortedMap that fully stored in a single node.
* in a single node. All developers must be careful for selecting fields for * All developers must be careful for selecting fields for clustering columns, because all data
* clustering columns, because all data inside this SortedMap must fit in to one * inside this SortedMap must fit in to one node.
* node.
* *
* ClusteringColumn can have more than one part and the order of parts is * <p>ClusteringColumn can have more than one part and the order of parts is important. This order
* important. This order defines the way how Cassandra joins the parts and * defines the way how Cassandra joins the parts and influence of data retrieval operations. Each
* influence of data retrieval operations. Each part can have ordering property * part can have ordering property that defines default ascending or descending order of data. In
* that defines default ascending or descending order of data. In case of two * case of two and more parts in select queries developer needs to have consisdent order of all
* and more parts in select queries developer needs to have consisdent order of * parts as they defined in table.
* all parts as they defined in table.
*
* For example, first part is ASC ordering, second is also ASC, so Cassandra
* will sort entries like this: a-a a-b b-a b-b In this case we are able run
* queries: ORDER BY first ASC, second ASC ORDER BY first DESC, second DESC
* WHERE first=? ORDER BY second ASC WHERE first=? ORDER BY second DESC WHERE
* first=? AND second=?
*
* But, we can not run queries: ORDER BY first DESC, second ASC ORDER BY first
* ASC, second DESC WHERE second=? ORDER BY first (ASC,DESC)
* *
* <p>For example, first part is ASC ordering, second is also ASC, so Cassandra will sort entries
* like this: a-a a-b b-a b-b In this case we are able run queries: ORDER BY first ASC, second ASC
* ORDER BY first DESC, second DESC WHERE first=? ORDER BY second ASC WHERE first=? ORDER BY second
* DESC WHERE first=? AND second=?
* *
* <p>But, we can not run queries: ORDER BY first DESC, second ASC ORDER BY first ASC, second DESC
* WHERE second=? ORDER BY first (ASC,DESC)
*/ */
@Retention(value = RetentionPolicy.RUNTIME) @Retention(value = RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface ClusteringColumn { public @interface ClusteringColumn {
@ -65,47 +57,40 @@ public @interface ClusteringColumn {
* *
* @return name of the column * @return name of the column
*/ */
String value() default ""; String value() default "";
/** /**
* ClusteringColumn parts must be ordered in the @Table. It is the requirement * ClusteringColumn parts must be ordered in the @Table. It is the requirement of Cassandra.
* of Cassandra. Cassandra joins all parts to the final clustering key that is * Cassandra joins all parts to the final clustering key that is stored in column family name.
* stored in column family name. Additionally all parts can have some ordering * Additionally all parts can have some ordering (ASC, DESC) that with sequence of parts
* (ASC, DESC) that with sequence of parts determines key comparison function, * determines key comparison function, so Cassandra storing column family names always in sorted
* so Cassandra storing column family names always in sorted order. * order.
* *
* Be default ordinal has 0 value, that's because in most cases @Table have * <p>Be default ordinal has 0 value, that's because in most cases @Table have single column for
* single column for ClusteringColumn If you have 2 and more parts of the * ClusteringColumn If you have 2 and more parts of the ClusteringColumn, then you need to use
* ClusteringColumn, then you need to use ordinal() to define the sequence of * ordinal() to define the sequence of the parts
* the parts
* *
* @return number that used to sort clustering columns * @return number that used to sort clustering columns
*/ */
int ordinal() default 0; int ordinal() default 0;
/** /**
* Default order of values in the ClusteringColumn This ordering is using for * Default order of values in the ClusteringColumn This ordering is using for comparison of the
* comparison of the clustering column values when Cassandra stores it in the * clustering column values when Cassandra stores it in the sorted order.
* sorted order.
* *
* Default value is the ascending order * <p>Default value is the ascending order
* *
* @return ascending order or descending order of clustering column values * @return ascending order or descending order of clustering column values
*/ */
OrderingDirection ordering() default OrderingDirection.ASC; OrderingDirection ordering() default OrderingDirection.ASC;
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the UDT type needs to be quoted. * name of the UDT type needs to be quoted.
* *
* Default value is false, we are quoting only selected names. * <p>Default value is false, we are quoting only selected names.
* *
* @return true if name have to be quoted * @return true if name have to be quoted
*/ */
boolean forceQuote() default false; boolean forceQuote() default false;
} }

View file

@ -18,20 +18,16 @@ package net.helenus.mapping.annotation;
import java.lang.annotation.*; import java.lang.annotation.*;
/** /**
* Column annotation is used to define additional properties of the column in * Column annotation is used to define additional properties of the column in entity mapping
* entity mapping interfaces: @Table, @UDT, @Tuple * interfaces: @Table, @UDT, @Tuple
*
* Column annotation can be used to override default name of the column or to
* setup order of the columns in the mapping
*
* Usually for @Table and @UDT types it is not important to define order of the
* columns, but in @Tuple mapping it is required, because tuple itself
* represents the sequence of the types with particular order in the table's
* column
* *
* <p>Column annotation can be used to override default name of the column or to setup order of the
* columns in the mapping
* *
* <p>Usually for @Table and @UDT types it is not important to define order of the columns, but
* in @Tuple mapping it is required, because tuple itself represents the sequence of the types with
* particular order in the table's column
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -42,29 +38,25 @@ public @interface Column {
* *
* @return name of the column * @return name of the column
*/ */
String value() default ""; String value() default "";
/** /**
* Ordinal will be used for ascending sorting of columns * Ordinal will be used for ascending sorting of columns
* *
* Default value is 0, because not all mapping entities require all fields to * <p>Default value is 0, because not all mapping entities require all fields to have unique
* have unique ordinals, only @Tuple mapping entity requires all of them to be * ordinals, only @Tuple mapping entity requires all of them to be unique.
* unique.
* *
* @return number that used to sort columns, usually for @Tuple only * @return number that used to sort columns, usually for @Tuple only
*/ */
int ordinal() default 0; int ordinal() default 0;
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the UDT type needs to be quoted. * name of the UDT type needs to be quoted.
* *
* Default value is false, we are quoting only selected names. * <p>Default value is false, we are quoting only selected names.
* *
* @return true if name have to be quoted * @return true if name have to be quoted
*/ */
boolean forceQuote() default false; boolean forceQuote() default false;
} }

View file

@ -16,111 +16,84 @@
package net.helenus.mapping.annotation; package net.helenus.mapping.annotation;
import java.lang.annotation.*; import java.lang.annotation.*;
import javax.validation.Constraint; import javax.validation.Constraint;
import net.helenus.mapping.validator.*; import net.helenus.mapping.validator.*;
/** /**
* Constraint annotations are using for data integrity mostly * Constraint annotations are using for data integrity mostly for @java.lang.String types. The place
* for @java.lang.String types. The place of the annotation is the particular * of the annotation is the particular method in model interface.
* method in model interface.
*
* All of them does not have effect on selects and data retrieval operations.
*
* Support types: - @NotNull supports any @java.lang.Object type - All
* annotations support @java.lang.String type
* *
* <p>All of them does not have effect on selects and data retrieval operations.
* *
* <p>Support types: - @NotNull supports any @java.lang.Object type - All annotations
* support @java.lang.String type
*/ */
public final class Constraints { public final class Constraints {
private Constraints() { private Constraints() {}
}
/** /**
* NotNull annotation is using to check that value is not null before storing it * NotNull annotation is using to check that value is not null before storing it
* *
* Applicable to use in any @java.lang.Object * <p>Applicable to use in any @java.lang.Object
*
* It does not check on selects and data retrieval operations
* *
* <p>It does not check on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = NotNullValidator.class) @Constraint(validatedBy = NotNullValidator.class)
public @interface NotNull { public @interface NotNull {}
}
/** /**
* NotEmpty annotation is using to check that value has text before storing it * NotEmpty annotation is using to check that value has text before storing it
* *
* Also checks for the null and it is more strict annotation then @NotNull * <p>Also checks for the null and it is more strict annotation then @NotNull
* *
* Can be used for @java.lang.CharSequence, @ByteBuffer and any array * <p>Can be used for @java.lang.CharSequence, @ByteBuffer and any array
*
* It does not check on selects and data retrieval operations
* *
* <p>It does not check on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = NotEmptyValidator.class) @Constraint(validatedBy = NotEmptyValidator.class)
public @interface NotEmpty { public @interface NotEmpty {}
}
/** /**
* Email annotation is using to check that value has a valid email before * Email annotation is using to check that value has a valid email before storing it
* storing it
* *
* Can be used only for @CharSequence * <p>Can be used only for @CharSequence
*
* It does not check on selects and data retrieval operations
* *
* <p>It does not check on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = EmailValidator.class) @Constraint(validatedBy = EmailValidator.class)
public @interface Email { public @interface Email {}
}
/** /**
* Number annotation is using to check that all letters in value are digits * Number annotation is using to check that all letters in value are digits before storing it
* before storing it
* *
* Can be used only for @java.lang.CharSequence * <p>Can be used only for @java.lang.CharSequence
*
* It does not check on selects and data retrieval operations
* *
* <p>It does not check on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = NumberValidator.class) @Constraint(validatedBy = NumberValidator.class)
public @interface Number { public @interface Number {}
}
/** /**
* Alphabet annotation is using to check that all letters in value are in * Alphabet annotation is using to check that all letters in value are in specific alphabet before
* specific alphabet before storing it * storing it
* *
* Can be used only for @java.lang.CharSequence * <p>Can be used only for @java.lang.CharSequence
*
* It does not check on selects and data retrieval operations
* *
* <p>It does not check on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -132,21 +105,16 @@ public final class Constraints {
* *
* @return alphabet characters in the string * @return alphabet characters in the string
*/ */
String value(); String value();
} }
/** /**
* Length annotation is using to ensure that value has exact length before * Length annotation is using to ensure that value has exact length before storing it
* storing it
* *
* Can be used for @java.lang.CharSequence, @ByteBuffer and any array * <p>Can be used for @java.lang.CharSequence, @ByteBuffer and any array
*
* It does not have effect on selects and data retrieval operations
* *
* <p>It does not have effect on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -154,19 +122,16 @@ public final class Constraints {
public @interface Length { public @interface Length {
int value(); int value();
} }
/** /**
* MaxLength annotation is using to ensure that value has length less or equal * MaxLength annotation is using to ensure that value has length less or equal to some threshold
* to some threshold before storing it * before storing it
* *
* Can be used for @java.lang.CharSequence, @ByteBuffer and byte[] * <p>Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
*
* It does not have effect on selects and data retrieval operations
* *
* <p>It does not have effect on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -174,19 +139,16 @@ public final class Constraints {
public @interface MaxLength { public @interface MaxLength {
int value(); int value();
} }
/** /**
* MinLength annotation is using to ensure that value has length greater or * MinLength annotation is using to ensure that value has length greater or equal to some
* equal to some threshold before storing it * threshold before storing it
* *
* Can be used for @java.lang.CharSequence, @ByteBuffer and byte[] * <p>Can be used for @java.lang.CharSequence, @ByteBuffer and byte[]
*
* It does not have effect on selects and data retrieval operations
* *
* <p>It does not have effect on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -194,55 +156,42 @@ public final class Constraints {
public @interface MinLength { public @interface MinLength {
int value(); int value();
} }
/** /**
* LowerCase annotation is using to ensure that value is in lower case before * LowerCase annotation is using to ensure that value is in lower case before storing it
* storing it
* *
* Can be used only for @java.lang.CharSequence * <p>Can be used only for @java.lang.CharSequence
*
* It does not have effect on selects and data retrieval operations
* *
* <p>It does not have effect on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = LowerCaseValidator.class) @Constraint(validatedBy = LowerCaseValidator.class)
public @interface LowerCase { public @interface LowerCase {}
}
/** /**
* UpperCase annotation is using to ensure that value is in upper case before * UpperCase annotation is using to ensure that value is in upper case before storing it
* storing it
* *
* Can be used only for @java.lang.CharSequence * <p>Can be used only for @java.lang.CharSequence
*
* It does not have effect on selects and data retrieval operations
* *
* <p>It does not have effect on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@Constraint(validatedBy = UpperCaseValidator.class) @Constraint(validatedBy = UpperCaseValidator.class)
public @interface UpperCase { public @interface UpperCase {}
}
/** /**
* Pattern annotation is LowerCase annotation is using to ensure that value is * Pattern annotation is LowerCase annotation is using to ensure that value is upper case before
* upper case before storing it * storing it
* *
* Can be used only for @java.lang.CharSequence * <p>Can be used only for @java.lang.CharSequence
*
* It does not have effect on selects and data retrieval operations
* *
* <p>It does not have effect on selects and data retrieval operations
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -254,7 +203,6 @@ public final class Constraints {
* *
* @return Java regex pattern * @return Java regex pattern
*/ */
String value(); String value();
/** /**
@ -262,9 +210,6 @@ public final class Constraints {
* *
* @return Java regex flags * @return Java regex flags
*/ */
int flags(); int flags();
} }
} }

View file

@ -18,23 +18,19 @@ package net.helenus.mapping.annotation;
import java.lang.annotation.*; import java.lang.annotation.*;
/** /**
* Index annotation is using under the specific column or method in entity * Index annotation is using under the specific column or method in entity interface with @Table
* interface with @Table annotation. * annotation.
* *
* The corresponding secondary index will be created in the underline @Table for * <p>The corresponding secondary index will be created in the underline @Table for the specific
* the specific column. * column.
*
* Currently Cassandra supports only single column index, so this index works
* only for single column.
*
* Make sure that you are using low cardinality columns for this index, that is
* the requirement of the Cassandra. Low cardinality fields examples: gender,
* country, age, status and etc High cardinality fields examples: id, email,
* timestamp, UUID and etc
* *
* <p>Currently Cassandra supports only single column index, so this index works only for single
* column.
* *
* <p>Make sure that you are using low cardinality columns for this index, that is the requirement
* of the Cassandra. Low cardinality fields examples: gender, country, age, status and etc High
* cardinality fields examples: id, email, timestamp, UUID and etc
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -45,18 +41,16 @@ public @interface Index {
* *
* @return name of the index * @return name of the index
*/ */
String value() default ""; String value() default "";
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the UDT type needs to be quoted. * name of the UDT type needs to be quoted.
* *
* Default value is false, we are quoting only selected names. * <p>Default value is false, we are quoting only selected names.
* *
* @return true if name have to be quoted * @return true if name have to be quoted
*/ */
boolean forceQuote() default false; boolean forceQuote() default false;
/** /**
@ -64,6 +58,5 @@ public @interface Index {
* *
* @return true if the index should ignore case when comparing * @return true if the index should ignore case when comparing
*/ */
boolean caseSensitive() default true; boolean caseSensitive() default true;
} }

View file

@ -20,10 +20,8 @@ import java.lang.annotation.*;
/** /**
* Inherited Entity annotation * Inherited Entity annotation
* *
* Inherited Table annotation is used to indicate that the methods should also be mapped * <p>Inherited Table annotation is used to indicate that the methods should also be mapped
*
*/ */
@Inherited @Inherited
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE}) @Target({ElementType.TYPE})

View file

@ -21,22 +21,17 @@ import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target; import java.lang.annotation.Target;
/** /**
* PartitionKey annotation is using to define that particular column is the part * PartitionKey annotation is using to define that particular column is the part of partition key in
* of partition key in the table. * the table.
*
* Partition Key is the routing key. Cassandra is using it to find the primary
* data node in the cluster that holds data. Cassandra combines all parts of the
* partition key to byte array and then calculates hash function by using good
* distribution algorithm (by default MurMur3). After that it uses hash number
* as a token in the ring to find a virtual and then a physical data server.
*
* For @Table mapping entity it is required to have as minimum one PartitionKey
* column. For @UDT and @Tuple mapping entities @PartitionKey annotation is not
* using.
* *
* <p>Partition Key is the routing key. Cassandra is using it to find the primary data node in the
* cluster that holds data. Cassandra combines all parts of the partition key to byte array and then
* calculates hash function by using good distribution algorithm (by default MurMur3). After that it
* uses hash number as a token in the ring to find a virtual and then a physical data server.
* *
* <p>For @Table mapping entity it is required to have as minimum one PartitionKey column. For @UDT
* and @Tuple mapping entities @PartitionKey annotation is not using.
*/ */
@Retention(value = RetentionPolicy.RUNTIME) @Retention(value = RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface PartitionKey { public @interface PartitionKey {
@ -46,33 +41,28 @@ public @interface PartitionKey {
* *
* @return name of the column * @return name of the column
*/ */
String value() default ""; String value() default "";
/** /**
* PartitionKey parts must be ordered in the @Table. It is the requirement of * PartitionKey parts must be ordered in the @Table. It is the requirement of Cassandra. That is
* Cassandra. That is how the partition key calculation works, column parts will * how the partition key calculation works, column parts will be joined based on some order and
* be joined based on some order and final hash/token will be calculated. * final hash/token will be calculated.
* *
* Be default ordinal has 0 value, that's because in most cases @Table have * <p>Be default ordinal has 0 value, that's because in most cases @Table have single column
* single column for @PartitionKey If you have 2 and more parts of the * for @PartitionKey If you have 2 and more parts of the PartitionKey, then you need to use
* PartitionKey, then you need to use ordinal() to define the sequence of the * ordinal() to define the sequence of the parts
* parts
* *
* @return number that used to sort columns in PartitionKey * @return number that used to sort columns in PartitionKey
*/ */
int ordinal() default 0; int ordinal() default 0;
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the UDT type needs to be quoted. * name of the UDT type needs to be quoted.
* *
* Default value is false, we are quoting only selected names. * <p>Default value is false, we are quoting only selected names.
* *
* @return true if name have to be quoted * @return true if name have to be quoted
*/ */
boolean forceQuote() default false; boolean forceQuote() default false;
} }

View file

@ -23,16 +23,13 @@ import java.lang.annotation.Target;
/** /**
* StaticColumn annotation is using to define a static column in Cassandra Table * StaticColumn annotation is using to define a static column in Cassandra Table
* *
* It does not have effect in @UDT and @Tuple types and in @Table-s that does * <p>It does not have effect in @UDT and @Tuple types and in @Table-s that does not
* not have @ClusteringColumn-s * have @ClusteringColumn-s
*
* In case of using @ClusteringColumn we can repeat some information that is
* unique for a row. For this purpose we can define @StaticColumn annotation,
* that will create static column in the table
*
* *
* <p>In case of using @ClusteringColumn we can repeat some information that is unique for a row.
* For this purpose we can define @StaticColumn annotation, that will create static column in the
* table
*/ */
@Retention(value = RetentionPolicy.RUNTIME) @Retention(value = RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface StaticColumn { public @interface StaticColumn {
@ -42,7 +39,6 @@ public @interface StaticColumn {
* *
* @return name of the column * @return name of the column
*/ */
String value() default ""; String value() default "";
/** /**
@ -50,18 +46,15 @@ public @interface StaticColumn {
* *
* @return number that used to sort columns in PartitionKey * @return number that used to sort columns in PartitionKey
*/ */
int ordinal() default 0; int ordinal() default 0;
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the UDT type needs to be quoted. * name of the UDT type needs to be quoted.
* *
* Default value is false, we are quoting only selected names. * <p>Default value is false, we are quoting only selected names.
* *
* @return true if name have to be quoted * @return true if name have to be quoted
*/ */
boolean forceQuote() default false; boolean forceQuote() default false;
} }

View file

@ -20,16 +20,13 @@ import java.lang.annotation.*;
/** /**
* Entity annotation * Entity annotation
* *
* Table annotation is used to define Table mapping to some interface * <p>Table annotation is used to define Table mapping to some interface
*
* There are three types of Entity mapping annotations: @Table, @UDT, @Tuple
*
* For each @Table annotated interface Helenus will create/update/verify
* Cassandra Table and some indexes if needed on startup.
* *
* <p>There are three types of Entity mapping annotations: @Table, @UDT, @Tuple
* *
* <p>For each @Table annotated interface Helenus will create/update/verify Cassandra Table and some
* indexes if needed on startup.
*/ */
@Inherited @Inherited
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE}) @Target({ElementType.TYPE})
@ -40,18 +37,15 @@ public @interface Table {
* *
* @return name of the UDT type * @return name of the UDT type
*/ */
String value() default ""; String value() default "";
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the UDT type needs to be quoted. * name of the UDT type needs to be quoted.
* *
* Default value is false, we are quoting only selected names. * <p>Default value is false, we are quoting only selected names.
* *
* @return true if name have to be quoted * @return true if name have to be quoted
*/ */
boolean forceQuote() default false; boolean forceQuote() default false;
} }

View file

@ -17,13 +17,8 @@ package net.helenus.mapping.annotation;
import java.lang.annotation.*; import java.lang.annotation.*;
/** /** Transient annotation is used to mark properties that are need not be mapped to the database. */
* Transient annotation is used to mark properties that are need not be mapped
* to the database.
*/
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Transient { public @interface Transient {}
}

View file

@ -20,20 +20,15 @@ import java.lang.annotation.*;
/** /**
* Entity annotation * Entity annotation
* *
* Tuple annotation is used to define Tuple type mapping to some interface * <p>Tuple annotation is used to define Tuple type mapping to some interface
*
* There are three types of Entity mapping annotations: @Table, @UDT, @Tuple
*
* Tuple is fully embedded type, it is the sequence of the underline types and
* the order of the sub-types is important, therefore all @Column-s must have
* ordinal() and only @Column annotation supported for underline types
* *
* <p>There are three types of Entity mapping annotations: @Table, @UDT, @Tuple
* *
* <p>Tuple is fully embedded type, it is the sequence of the underline types and the order of the
* sub-types is important, therefore all @Column-s must have ordinal() and only @Column annotation
* supported for underline types
*/ */
@Inherited @Inherited
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE}) @Target({ElementType.TYPE})
public @interface Tuple { public @interface Tuple {}
}

View file

@ -15,211 +15,163 @@
*/ */
package net.helenus.mapping.annotation; package net.helenus.mapping.annotation;
import com.datastax.driver.core.DataType;
import java.lang.annotation.*; import java.lang.annotation.*;
import com.datastax.driver.core.DataType;
/** /**
* Types annotations are using for clarification of Cassandra data type for * Types annotations are using for clarification of Cassandra data type for particular Java type.
* particular Java type.
* *
* Sometimes it is possible to have for single Java type multiple Cassandra data * <p>Sometimes it is possible to have for single Java type multiple Cassandra data types: - @String
* types: - @String can be @DataType.Name.ASCII or @DataType.Name.TEXT * can be @DataType.Name.ASCII or @DataType.Name.TEXT or @DataType.Name.VARCHAR - @Long can
* or @DataType.Name.VARCHAR - @Long can be @DataType.Name.BIGINT * be @DataType.Name.BIGINT or @DataType.Name.COUNTER
* or @DataType.Name.COUNTER
* *
* All those type annotations simplify mapping between Java types and Cassandra * <p>All those type annotations simplify mapping between Java types and Cassandra data types. They
* data types. They are not required, for each Java type there is a default * are not required, for each Java type there is a default Cassandra data type in Helenus, but in
* Cassandra data type in Helenus, but in some cases you would like to control * some cases you would like to control mapping to make sure that the right Cassandra data type is
* mapping to make sure that the right Cassandra data type is using. * using.
*
* For complex types like collections, UDF and Tuple types all those annotations
* are using to clarify the sub-type(s) or class/UDF names.
*
* Has significant effect on schema operations.
* *
* <p>For complex types like collections, UDF and Tuple types all those annotations are using to
* clarify the sub-type(s) or class/UDF names.
* *
* <p>Has significant effect on schema operations.
*/ */
public final class Types { public final class Types {
private Types() { private Types() {}
}
/**
* Says to use @DataType.Name.ASCII data type in schema Java type is @String
*/
/** Says to use @DataType.Name.ASCII data type in schema Java type is @String */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Ascii { public @interface Ascii {}
}
/**
* Says to use @DataType.Name.BIGINT data type in schema Java type is @Long
*/
/** Says to use @DataType.Name.BIGINT data type in schema Java type is @Long */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Bigint { public @interface Bigint {}
}
/** /**
* Says to use @DataType.Name.BLOB data type in schema Java type is @ByteBuffer * Says to use @DataType.Name.BLOB data type in schema Java type is @ByteBuffer or @byte[] Using
* or @byte[] Using by default * by default
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Blob { public @interface Blob {}
}
/** /**
* Says to use @DataType.Name.LIST data type in schema with specific sub-type * Says to use @DataType.Name.LIST data type in schema with specific sub-type Java type is @List
* Java type is @List
* *
* Helenus does not allow to use a specific implementation of the collection * <p>Helenus does not allow to use a specific implementation of the collection thereof data
* thereof data retrieval operation result can be a collection with another * retrieval operation result can be a collection with another implementation.
* implementation.
* *
* This annotation is usually used only for sub-types clarification and only in * <p>This annotation is usually used only for sub-types clarification and only in case if
* case if sub-type is Java type that corresponds to multiple Cassandra data * sub-type is Java type that corresponds to multiple Cassandra data types.
* types.
* *
* For this type there are special operations: prepend, prependAll, setIdx, * <p>For this type there are special operations: prepend, prependAll, setIdx, append, appendAll,
* append, appendAll, discard and discardAll in @UpdateOperation * discard and discardAll in @UpdateOperation
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface List { public @interface List {
/** /**
* Clarification of using the sub-type data type in the collection. It supports * Clarification of using the sub-type data type in the collection. It supports only simple data
* only simple data type (not Collection, UDT or Tuple) * type (not Collection, UDT or Tuple)
* *
* In case if you need UDT sub-type in the list, consider @UDTList annotation * <p>In case if you need UDT sub-type in the list, consider @UDTList annotation
* *
* @return data type name of the value * @return data type name of the value
*/ */
DataType.Name value(); DataType.Name value();
} }
/** /**
* Says to use @DataType.Name.MAP data type in schema with specific sub-types * Says to use @DataType.Name.MAP data type in schema with specific sub-types Java type is @Map
* Java type is @Map
* *
* Helenus does not allow to use a specific implementation of the collection * <p>Helenus does not allow to use a specific implementation of the collection thereof data
* thereof data retrieval operation result can be a collection with another * retrieval operation result can be a collection with another implementation.
* implementation.
* *
* This annotation is usually used only for sub-types clarification and only in * <p>This annotation is usually used only for sub-types clarification and only in case if
* case if sub-type is Java type that corresponds to multiple Cassandra data * sub-type is Java type that corresponds to multiple Cassandra data types.
* types.
*
* For this type there are special operations: put and putAll
* in @UpdateOperation.
* *
* <p>For this type there are special operations: put and putAll in @UpdateOperation.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Map { public @interface Map {
/** /**
* Clarification of using the sub-type data type in the collection. It supports * Clarification of using the sub-type data type in the collection. It supports only simple data
* only simple data type (not Collection, UDT or Tuple) * type (not Collection, UDT or Tuple)
* *
* In case if you need UDT key sub-type in the map, consider @UDTKeyMap * <p>In case if you need UDT key sub-type in the map, consider @UDTKeyMap or @UDTMap
* or @UDTMap annotations * annotations
* *
* @return data type name of the key * @return data type name of the key
*/ */
DataType.Name key(); DataType.Name key();
/** /**
* Clarification of using the sub-type data type in the collection. It supports * Clarification of using the sub-type data type in the collection. It supports only simple data
* only simple data type (not Collection, UDT or Tuple) * type (not Collection, UDT or Tuple)
* *
* In case if you need UDT value sub-type in the map, consider @UDTValueMap * <p>In case if you need UDT value sub-type in the map, consider @UDTValueMap or @UDTMap
* or @UDTMap annotations * annotations
* *
* @return data type name of the value * @return data type name of the value
*/ */
DataType.Name value(); DataType.Name value();
} }
/** /**
* Says to use @DataType.Name.COUNTER type in schema Java type is @Long * Says to use @DataType.Name.COUNTER type in schema Java type is @Long
* *
* For this type there are special operations: increment and decrement * <p>For this type there are special operations: increment and decrement in @UpdateOperation. You
* in @UpdateOperation. You do not need to initialize counter value, it will be * do not need to initialize counter value, it will be done automatically by Cassandra.
* done automatically by Cassandra.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Counter { public @interface Counter {}
}
/** /**
* Says to use @DataType.Name.SET data type in schema with specific sub-type * Says to use @DataType.Name.SET data type in schema with specific sub-type Java type is @Set
* Java type is @Set
* *
* Helenus does not allow to use a specific implementation of the collection * <p>Helenus does not allow to use a specific implementation of the collection thereof data
* thereof data retrieval operation result can be a collection with another * retrieval operation result can be a collection with another implementation.
* implementation.
* *
* This annotation is usually used only for sub-types clarification and only in * <p>This annotation is usually used only for sub-types clarification and only in case if
* case if sub-type is Java type that corresponds to multiple Cassandra data * sub-type is Java type that corresponds to multiple Cassandra data types.
* types.
* *
* For this type there are special operations: add, addAll, remove and removeAll * <p>For this type there are special operations: add, addAll, remove and removeAll
* in @UpdateOperation. * in @UpdateOperation.
*
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Set { public @interface Set {
/** /**
* Clarification of using the sub-type data type in the collection. It supports * Clarification of using the sub-type data type in the collection. It supports only simple data
* only simple data type (not Collection, UDT or Tuple) * type (not Collection, UDT or Tuple)
* *
* In case if you need UDT sub-type in the set, consider @UDTSet annotation * <p>In case if you need UDT sub-type in the set, consider @UDTSet annotation
* *
* @return data type name of the value * @return data type name of the value
*/ */
DataType.Name value(); DataType.Name value();
} }
/** /**
* Says to use @DataType.Name.CUSTOM type in schema Java type is @ByteBuffer * Says to use @DataType.Name.CUSTOM type in schema Java type is @ByteBuffer or @byte[]
* or @byte[]
* *
* Uses for custom user types that has special implementation. Helenus does not * <p>Uses for custom user types that has special implementation. Helenus does not deal with this
* deal with this class directly for now, uses only in serialized form. * class directly for now, uses only in serialized form.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -230,131 +182,95 @@ public final class Types {
* *
* @return class name of the custom type implementation * @return class name of the custom type implementation
*/ */
String className(); String className();
} }
/** /** Says to use @DataType.Name.TEXT type in schema Java type is @String Using by default */
* Says to use @DataType.Name.TEXT type in schema Java type is @String Using by
* default
*/
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Text { public @interface Text {}
}
/**
* Says to use @DataType.Name.TIMESTAMP type in schema Java type is @Date Using
* by default
*/
/** Says to use @DataType.Name.TIMESTAMP type in schema Java type is @Date Using by default */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Timestamp { public @interface Timestamp {}
}
/**
* Says to use @DataType.Name.TIMEUUID type in schema Java type is @UUID
* or @Date
*/
/** Says to use @DataType.Name.TIMEUUID type in schema Java type is @UUID or @Date */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Timeuuid { public @interface Timeuuid {}
}
/** /**
* Says to use @DataType.Name.TUPLE type in schema Java type is @TupleValue or * Says to use @DataType.Name.TUPLE type in schema Java type is @TupleValue or model interface
* model interface with @Tuple annotation * with @Tuple annotation
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
public @interface Tuple { public @interface Tuple {
/** /**
* If Java type is the @TupleValue then this field is required. Any Cassandra * If Java type is the @TupleValue then this field is required. Any Cassandra Tuple is the
* Tuple is the sequence of Cassandra types. For now Helenus supports only * sequence of Cassandra types. For now Helenus supports only simple data types in tuples
* simple data types in tuples for @TupleValue Java type * for @TupleValue Java type
* *
* In case if Java type is the model interface with @Tuple annotation then all * <p>In case if Java type is the model interface with @Tuple annotation then all methods in
* methods in this interface can have Types annotations that can be complex * this interface can have Types annotations that can be complex types as well.
* types as well.
* *
* @return data type name sequence * @return data type name sequence
*/ */
DataType.Name[] value() default {}; DataType.Name[] value() default {};
} }
/** /**
* Says to use @DataType.Name.UDT type in schema Java type is @UDTValue or model * Says to use @DataType.Name.UDT type in schema Java type is @UDTValue or model interface
* interface with @UDT annotation * with @UDT annotation
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface UDT { public @interface UDT {
/** /**
* If Java type is the @UDTValue then this field is required. Any Cassandra UDT * If Java type is the @UDTValue then this field is required. Any Cassandra UDT has name and
* has name and must be created before this use as a Cassandra Type. * must be created before this use as a Cassandra Type.
* *
* This value is the UDT name of the Cassandra Type that was already created in * <p>This value is the UDT name of the Cassandra Type that was already created in the schema
* the schema
* *
* In case of Java type is the model interface with @UDT annotation then this * <p>In case of Java type is the model interface with @UDT annotation then this field is not
* field is not using since model interface defines UserDefinedType with * using since model interface defines UserDefinedType with specific name
* specific name
* *
* @return UDT name * @return UDT name
*/ */
String value() default ""; String value() default "";
/** /**
* Only used for JavaType @UDTValue * Only used for JavaType @UDTValue
* *
* In case if value() method returns reserved word that can not be used as a * <p>In case if value() method returns reserved word that can not be used as a name of UDT then
* name of UDT then forceQuote will add additional quotes around this name in * forceQuote will add additional quotes around this name in all CQL queries.
* all CQL queries.
* *
* Default value is false. * <p>Default value is false.
* *
* @return true if quotation is needed * @return true if quotation is needed
*/ */
boolean forceQuote() default false; boolean forceQuote() default false;
} }
/** /**
* Says to use @DataType.Name.MAP data type in schema with specific UDT sub-type * Says to use @DataType.Name.MAP data type in schema with specific UDT sub-type as a key and
* as a key and simple sub-type as a value Java type is @Map * simple sub-type as a value Java type is @Map
* *
* Helenus does not allow to use a specific implementation of the collection * <p>Helenus does not allow to use a specific implementation of the collection thereof data
* thereof data retrieval operation result can be a collection with another * retrieval operation result can be a collection with another implementation.
* implementation.
* *
* This annotation is usually used only for sub-types clarification and only in * <p>This annotation is usually used only for sub-types clarification and only in case if
* case if sub-type is Java type that corresponds to multiple Cassandra data * sub-type is Java type that corresponds to multiple Cassandra data types.
* types.
*
* For this type there are special operations: put and putAll
* in @UpdateOperation.
* *
* <p>For this type there are special operations: put and putAll in @UpdateOperation.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -365,39 +281,32 @@ public final class Types {
* *
* @return annotation of UDT type * @return annotation of UDT type
*/ */
UDT key(); UDT key();
/** /**
* Clarification of using the sub-type data type in the collection. It supports * Clarification of using the sub-type data type in the collection. It supports only simple data
* only simple data type (not Collection, UDT or Tuple) * type (not Collection, UDT or Tuple)
* *
* In case if you need UDT value sub-type in the map, consider @UDTMap * <p>In case if you need UDT value sub-type in the map, consider @UDTMap annotations
* annotations
* *
* @return data type name of the value * @return data type name of the value
*/ */
DataType.Name value(); DataType.Name value();
} }
/** /**
* Says to use @DataType.Name.LIST data type in schema with specific UDT * Says to use @DataType.Name.LIST data type in schema with specific UDT sub-type Java type
* sub-type Java type is @List * is @List
* *
* Helenus does not allow to use a specific implementation of the collection * <p>Helenus does not allow to use a specific implementation of the collection thereof data
* thereof data retrieval operation result can be a collection with another * retrieval operation result can be a collection with another implementation.
* implementation.
* *
* This annotation is usually used only for sub-types clarification and only in * <p>This annotation is usually used only for sub-types clarification and only in case if
* case if sub-type is Java type that corresponds to multiple Cassandra data * sub-type is Java type that corresponds to multiple Cassandra data types.
* types.
* *
* For this type there are special operations: prepend, prependAll, setIdx, * <p>For this type there are special operations: prepend, prependAll, setIdx, append, appendAll,
* append, appendAll, discard and discardAll in @UpdateOperation * discard and discardAll in @UpdateOperation
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -408,28 +317,21 @@ public final class Types {
* *
* @return annotation of the UDT value * @return annotation of the UDT value
*/ */
UDT value(); UDT value();
} }
/** /**
* Says to use @DataType.Name.MAP data type in schema with specific UDT * Says to use @DataType.Name.MAP data type in schema with specific UDT sub-types Java type
* sub-types Java type is @Map * is @Map
* *
* Helenus does not allow to use a specific implementation of the collection * <p>Helenus does not allow to use a specific implementation of the collection thereof data
* thereof data retrieval operation result can be a collection with another * retrieval operation result can be a collection with another implementation.
* implementation.
* *
* This annotation is usually used only for sub-types clarification and only in * <p>This annotation is usually used only for sub-types clarification and only in case if
* case if sub-type is Java type that corresponds to multiple Cassandra data * sub-type is Java type that corresponds to multiple Cassandra data types.
* types.
*
* For this type there are special operations: put and putAll
* in @UpdateOperation.
* *
* <p>For this type there are special operations: put and putAll in @UpdateOperation.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -440,37 +342,28 @@ public final class Types {
* *
* @return annotation of the UDT key * @return annotation of the UDT key
*/ */
UDT key(); UDT key();
/** /**
* Clarification of using the UDT data type as a value sub-type in the * Clarification of using the UDT data type as a value sub-type in the collection.
* collection.
* *
* @return annotation of the UDT value * @return annotation of the UDT value
*/ */
UDT value(); UDT value();
} }
/** /**
* Says to use @DataType.Name.SET data type in schema with specific UDT sub-type * Says to use @DataType.Name.SET data type in schema with specific UDT sub-type Java type is @Set
* Java type is @Set
* *
* Helenus does not allow to use a specific implementation of the collection * <p>Helenus does not allow to use a specific implementation of the collection thereof data
* thereof data retrieval operation result can be a collection with another * retrieval operation result can be a collection with another implementation.
* implementation.
* *
* This annotation is usually used only for sub-types clarification and only in * <p>This annotation is usually used only for sub-types clarification and only in case if
* case if sub-type is Java type that corresponds to multiple Cassandra data * sub-type is Java type that corresponds to multiple Cassandra data types.
* types.
* *
* For this type there are special operations: add, addAll, remove and removeAll * <p>For this type there are special operations: add, addAll, remove and removeAll
* in @UpdateOperation. * in @UpdateOperation.
*
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
@ -481,76 +374,53 @@ public final class Types {
* *
* @return annotation of the UDT value * @return annotation of the UDT value
*/ */
UDT value(); UDT value();
} }
/** /**
* Says to use @DataType.Name.MAP data type in schema with specific simple * Says to use @DataType.Name.MAP data type in schema with specific simple sub-type as a key and
* sub-type as a key and UDT sub-type as a value Java type is @Map * UDT sub-type as a value Java type is @Map
* *
* Helenus does not allow to use a specific implementation of the collection * <p>Helenus does not allow to use a specific implementation of the collection thereof data
* thereof data retrieval operation result can be a collection with another * retrieval operation result can be a collection with another implementation.
* implementation.
* *
* This annotation is usually used only for sub-types clarification and only in * <p>This annotation is usually used only for sub-types clarification and only in case if
* case if sub-type is Java type that corresponds to multiple Cassandra data * sub-type is Java type that corresponds to multiple Cassandra data types.
* types.
*
* For this type there are special operations: put and putAll
* in @UpdateOperation.
* *
* <p>For this type there are special operations: put and putAll in @UpdateOperation.
*/ */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface UDTValueMap { public @interface UDTValueMap {
/** /**
* Clarification of using the sub-type data type in the collection. It supports * Clarification of using the sub-type data type in the collection. It supports only simple data
* only simple data type (not Collection, UDT or Tuple) * type (not Collection, UDT or Tuple)
* *
* In case if you need UDT key sub-type in the map, consider @UDTMap annotations * <p>In case if you need UDT key sub-type in the map, consider @UDTMap annotations
* *
* @return data type name of the key * @return data type name of the key
*/ */
DataType.Name key(); DataType.Name key();
/** /**
* Clarification of using the UDT data type as a value sub-type in the * Clarification of using the UDT data type as a value sub-type in the collection.
* collection.
* *
* @return annotation of the UDT value * @return annotation of the UDT value
*/ */
UDT value(); UDT value();
} }
/** /** Says to use @DataType.Name.UUID type in schema Java type is @UUID Using by default */
* Says to use @DataType.Name.UUID type in schema Java type is @UUID Using by
* default
*/
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Uuid { public @interface Uuid {}
}
/**
* Says to use @DataType.Name.VARCHAR type in schema Java type is @String
*/
/** Says to use @DataType.Name.VARCHAR type in schema Java type is @String */
@Documented @Documented
@Retention(RetentionPolicy.RUNTIME) @Retention(RetentionPolicy.RUNTIME)
@Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE}) @Target(value = {ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface Varchar { public @interface Varchar {}
}
} }

View file

@ -18,14 +18,12 @@ package net.helenus.mapping.annotation;
/** /**
* Entity annotation * Entity annotation
* *
* UDT annotation is used to define the UDT (User Defined Type) mapping for some interface * <p>UDT annotation is used to define the UDT (User Defined Type) mapping for some interface
* *
* There are three types of Entity mapping annotations: @Table, @UDT, @Tuple * <p>There are three types of Entity mapping annotations: @Table, @UDT, @Tuple
*
* For each annotated @UDT type Helenus will create/update/verify Cassandra Type on startup
* *
* <p>For each annotated @UDT type Helenus will create/update/verify Cassandra Type on startup
*/ */
import java.lang.annotation.*; import java.lang.annotation.*;
@Inherited @Inherited
@ -38,18 +36,15 @@ public @interface UDT {
* *
* @return name of the UDT type * @return name of the UDT type
*/ */
String value() default ""; String value() default "";
/** /**
* For reserved words in Cassandra we need quotation in CQL queries. This * For reserved words in Cassandra we need quotation in CQL queries. This property marks that the
* property marks that the name of the UDT type needs to be quoted. * name of the UDT type needs to be quoted.
* *
* Default value is false, we are quoting only selected names. * <p>Default value is false, we are quoting only selected names.
* *
* @return true if name have to be quoted * @return true if name have to be quoted
*/ */
boolean forceQuote() default false; boolean forceQuote() default false;
} }

View file

@ -16,7 +16,6 @@
package net.helenus.mapping.convert; package net.helenus.mapping.convert;
import java.util.Map; import java.util.Map;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.core.reflect.MapExportable; import net.helenus.core.reflect.MapExportable;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
@ -54,9 +53,7 @@ public abstract class AbstractEntityValueWriter<V> {
if (prop != null) { if (prop != null) {
writeColumn(outValue, value, prop); writeColumn(outValue, value, prop);
} }
} }
} else { } else {
@ -68,11 +65,7 @@ public abstract class AbstractEntityValueWriter<V> {
if (value != null) { if (value != null) {
writeColumn(outValue, value, prop); writeColumn(outValue, value, prop);
} }
} }
} }
} }
} }

View file

@ -19,7 +19,6 @@ import java.nio.ByteBuffer;
import java.util.function.Function; import java.util.function.Function;
public enum ByteArrayToByteBufferConverter implements Function<byte[], ByteBuffer> { public enum ByteArrayToByteBufferConverter implements Function<byte[], ByteBuffer> {
INSTANCE; INSTANCE;
@Override @Override
@ -31,5 +30,4 @@ public enum ByteArrayToByteBufferConverter implements Function<byte[], ByteBuffe
return ByteBuffer.wrap(t); return ByteBuffer.wrap(t);
} }
} }

View file

@ -19,7 +19,6 @@ import java.nio.ByteBuffer;
import java.util.function.Function; import java.util.function.Function;
public enum ByteBufferToByteArrayConverter implements Function<ByteBuffer, byte[]> { public enum ByteBufferToByteArrayConverter implements Function<ByteBuffer, byte[]> {
INSTANCE; INSTANCE;
@Override @Override
@ -31,5 +30,4 @@ public enum ByteBufferToByteArrayConverter implements Function<ByteBuffer, byte[
return t.array(); return t.array();
} }
} }

View file

@ -15,12 +15,10 @@
*/ */
package net.helenus.mapping.convert; package net.helenus.mapping.convert;
import com.google.common.base.CaseFormat;
import java.util.function.Function; import java.util.function.Function;
import com.google.common.base.CaseFormat;
public enum CamelCaseToUnderscoreConverter implements Function<String, String> { public enum CamelCaseToUnderscoreConverter implements Function<String, String> {
INSTANCE; INSTANCE;
@Override @Override
@ -32,5 +30,4 @@ public enum CamelCaseToUnderscoreConverter implements Function<String, String> {
return CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, source); return CaseFormat.UPPER_CAMEL.to(CaseFormat.LOWER_UNDERSCORE, source);
} }
} }

View file

@ -18,16 +18,10 @@ package net.helenus.mapping.convert;
import java.util.Date; import java.util.Date;
import java.util.UUID; import java.util.UUID;
import java.util.function.Function; import java.util.function.Function;
import net.helenus.support.Timeuuid; import net.helenus.support.Timeuuid;
/** /** Simple Date to TimeUUID Converter */
* Simple Date to TimeUUID Converter
*
*/
public enum DateToTimeuuidConverter implements Function<Date, UUID> { public enum DateToTimeuuidConverter implements Function<Date, UUID> {
INSTANCE; INSTANCE;
@Override @Override
@ -35,5 +29,4 @@ public enum DateToTimeuuidConverter implements Function<Date, UUID> {
long milliseconds = source.getTime(); long milliseconds = source.getTime();
return Timeuuid.of(milliseconds); return Timeuuid.of(milliseconds);
} }
} }

View file

@ -17,18 +17,12 @@ package net.helenus.mapping.convert;
import java.util.function.Function; import java.util.function.Function;
/** /** Enum to String Converter */
* Enum to String Converter
*
*/
public enum EnumToStringConverter implements Function<Enum, String> { public enum EnumToStringConverter implements Function<Enum, String> {
INSTANCE; INSTANCE;
@Override @Override
public String apply(Enum source) { public String apply(Enum source) {
return source.name(); return source.name();
} }
} }

View file

@ -17,7 +17,6 @@ package net.helenus.mapping.convert;
import java.util.Map; import java.util.Map;
import java.util.function.Function; import java.util.function.Function;
import net.helenus.core.Helenus; import net.helenus.core.Helenus;
import net.helenus.mapping.HelenusEntity; import net.helenus.mapping.HelenusEntity;
import net.helenus.mapping.value.ColumnValueProvider; import net.helenus.mapping.value.ColumnValueProvider;
@ -44,5 +43,4 @@ public class ProxyValueReader<T> implements Function<T, Object> {
} }
return null; return null;
} }
} }

View file

@ -29,5 +29,4 @@ public class StringToEnumConverter implements Function<String, Enum> {
public Enum apply(String source) { public Enum apply(String source) {
return Enum.valueOf(enumClass, source); return Enum.valueOf(enumClass, source);
} }
} }

View file

@ -18,16 +18,13 @@ package net.helenus.mapping.convert;
import java.util.Date; import java.util.Date;
import java.util.UUID; import java.util.UUID;
import java.util.function.Function; import java.util.function.Function;
import net.helenus.support.Timeuuid; import net.helenus.support.Timeuuid;
public enum TimeuuidToDateConverter implements Function<UUID, Date> { public enum TimeuuidToDateConverter implements Function<UUID, Date> {
INSTANCE; INSTANCE;
@Override @Override
public Date apply(UUID source) { public Date apply(UUID source) {
return new Date(Timeuuid.getTimestampMillis(source)); return new Date(Timeuuid.getTimestampMillis(source));
} }
} }

View file

@ -15,17 +15,16 @@
*/ */
package net.helenus.mapping.convert; package net.helenus.mapping.convert;
import java.nio.ByteBuffer;
import java.util.function.Function;
import com.datastax.driver.core.TupleType; import com.datastax.driver.core.TupleType;
import com.datastax.driver.core.TupleValue; import com.datastax.driver.core.TupleValue;
import java.nio.ByteBuffer;
import java.util.function.Function;
import net.helenus.core.SessionRepository; import net.helenus.core.SessionRepository;
import net.helenus.mapping.HelenusProperty; import net.helenus.mapping.HelenusProperty;
import net.helenus.mapping.value.TupleColumnValuePreparer; import net.helenus.mapping.value.TupleColumnValuePreparer;
public class TupleValueWriter extends AbstractEntityValueWriter<TupleValue> implements Function<Object, TupleValue> { public class TupleValueWriter extends AbstractEntityValueWriter<TupleValue>
implements Function<Object, TupleValue> {
private final TupleType tupleType; private final TupleType tupleType;
private final TupleColumnValuePreparer valuePreparer; private final TupleColumnValuePreparer valuePreparer;
@ -56,5 +55,4 @@ public class TupleValueWriter extends AbstractEntityValueWriter<TupleValue> impl
} }
return null; return null;
} }
} }

Some files were not shown because too many files have changed in this diff Show more