WIP: Where I stopped when building first a ResultSet cacne then layering an Entity cache. Lot's of tricky bits that were not falling into place.
This commit is contained in:
parent
933526b05b
commit
38ca5973a1
1 changed files with 36 additions and 3 deletions
|
@ -45,7 +45,9 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
||||||
protected List<Ordering> ordering = null;
|
protected List<Ordering> ordering = null;
|
||||||
protected Integer limit = null;
|
protected Integer limit = null;
|
||||||
protected boolean allowFiltering = false;
|
protected boolean allowFiltering = false;
|
||||||
|
protected boolean cacheResult = true;
|
||||||
|
|
||||||
|
protected AbstractCache cache;
|
||||||
|
|
||||||
public SelectOperation(AbstractSessionOperations sessionOperations) {
|
public SelectOperation(AbstractSessionOperations sessionOperations) {
|
||||||
super(sessionOperations);
|
super(sessionOperations);
|
||||||
|
@ -68,7 +70,6 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
||||||
return (E) Fun.ArrayTuple.of(arr);
|
return (E) Fun.ArrayTuple.of(arr);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
|
public SelectOperation(AbstractSessionOperations sessionOperations, HelenusEntity entity) {
|
||||||
|
@ -81,6 +82,9 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
||||||
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||||
.forEach(p -> this.props.add(p));
|
.forEach(p -> this.props.add(p));
|
||||||
|
|
||||||
|
if (entity.isCacheable()) {
|
||||||
|
this.cache = sessionOps.cacheFor(CacheManager.Type.FETCH);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public SelectOperation(
|
public SelectOperation(
|
||||||
|
@ -97,6 +101,9 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
||||||
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
.map(p -> new HelenusPropertyNode(p, Optional.empty()))
|
||||||
.forEach(p -> this.props.add(p));
|
.forEach(p -> this.props.add(p));
|
||||||
|
|
||||||
|
if (entity.isCacheable()) {
|
||||||
|
this.cache = sessionOps.cacheFor(CacheManager.Type.FETCH);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public SelectOperation(
|
public SelectOperation(
|
||||||
|
@ -108,6 +115,9 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
||||||
this.rowMapper = rowMapper;
|
this.rowMapper = rowMapper;
|
||||||
Collections.addAll(this.props, props);
|
Collections.addAll(this.props, props);
|
||||||
|
|
||||||
|
if (props.length > 0 && props[0].getEntity().isCacheable()) {
|
||||||
|
this.cache = sessionOps.cacheFor(CacheManager.Type.FETCH);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
public CountOperation count() {
|
public CountOperation count() {
|
||||||
|
@ -140,13 +150,19 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
||||||
|
|
||||||
HelenusEntity entity = Helenus.entity(entityClass);
|
HelenusEntity entity = Helenus.entity(entityClass);
|
||||||
|
|
||||||
|
if (entity.isCacheable()) {
|
||||||
|
this.cache = sessionOps.cacheFor(CacheManager.Type.FETCH);
|
||||||
|
//TODO cache entity
|
||||||
|
}
|
||||||
|
|
||||||
this.rowMapper = null;
|
this.rowMapper = null;
|
||||||
|
|
||||||
return new SelectTransformingOperation<R, E>(
|
return new SelectTransformingOperation<R, E>(
|
||||||
this,
|
this,
|
||||||
(r) -> {
|
(r) -> {
|
||||||
Map<String, Object> map = new ValueProviderMap(r, sessionOps.getValueProvider(), entity);
|
Map<String, Object> map = new ValueProviderMap(r, sessionOps.getValueProvider(), entity);
|
||||||
return (R) Helenus.map(entityClass, map);
|
R result = (R) Helenus.map(entityClass, map);
|
||||||
|
return result;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -180,6 +196,11 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public SelectOperation<E> cache() {
|
||||||
|
this.cacheResult = true;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public BuiltStatement buildStatement() {
|
public BuiltStatement buildStatement() {
|
||||||
|
|
||||||
|
@ -187,12 +208,18 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
||||||
Selection selection = QueryBuilder.select();
|
Selection selection = QueryBuilder.select();
|
||||||
|
|
||||||
for (HelenusPropertyNode prop : props) {
|
for (HelenusPropertyNode prop : props) {
|
||||||
selection = selection.column(prop.getColumnName());
|
String col = prop.getColumnName();
|
||||||
|
selection = selection.column(col);
|
||||||
|
|
||||||
if (prop.getProperty().caseSensitiveIndex()) {
|
if (prop.getProperty().caseSensitiveIndex()) {
|
||||||
allowFiltering = true;
|
allowFiltering = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (cache != null && cacheResult) {
|
||||||
|
selection = selection.ttl(col);
|
||||||
|
selection = selection.writeTime(col);
|
||||||
|
}
|
||||||
|
|
||||||
if (entity == null) {
|
if (entity == null) {
|
||||||
entity = prop.getEntity();
|
entity = prop.getEntity();
|
||||||
} else if (entity != prop.getEntity()) {
|
} else if (entity != prop.getEntity()) {
|
||||||
|
@ -257,6 +284,12 @@ public final class SelectOperation<E> extends AbstractFilterStreamOperation<E, S
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected AbstractCache getCache() {
|
||||||
|
AbstractCache unitOfWorkCache = super.getCache();
|
||||||
|
return (unitOfWorkCache == null) ? this.cache : unitOfWorkCache;
|
||||||
|
}
|
||||||
|
|
||||||
private List<Ordering> getOrCreateOrdering() {
|
private List<Ordering> getOrCreateOrdering() {
|
||||||
if (ordering == null) {
|
if (ordering == null) {
|
||||||
ordering = new ArrayList<Ordering>();
|
ordering = new ArrayList<Ordering>();
|
||||||
|
|
Loading…
Reference in a new issue