Formatting.

This commit is contained in:
Greg Burd 2017-10-30 11:52:12 -04:00
parent 5605824886
commit dcea254fdb
6 changed files with 66 additions and 69 deletions

View file

@ -3,7 +3,6 @@ package net.helenus.core;
import java.io.Serializable;
import java.util.*;
import net.helenus.mapping.value.ValueProviderMap;
import org.apache.commons.lang3.SerializationUtils;
import com.google.common.primitives.Primitives;

View file

@ -92,9 +92,9 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
}
@Override
public String getPurpose() {
return purpose;
}
public String getPurpose() {
return purpose;
}
@Override
public UnitOfWork setPurpose(String purpose) {
@ -284,7 +284,9 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
// Merge cache and statistics into parent if there is one.
parent.mergeCache(cache);
if (purpose != null) {parent.nestedPurposes.add(purpose);}
if (purpose != null) {
parent.nestedPurposes.add(purpose);
}
parent.cacheHits += cacheHits;
parent.cacheMisses += cacheMisses;
parent.databaseLookups += databaseLookups;
@ -310,14 +312,14 @@ public abstract class AbstractUnitOfWork<E extends Exception> implements UnitOfW
public synchronized void abort() {
TreeTraverser<AbstractUnitOfWork<E>> traverser = TreeTraverser.using(node -> node::getChildNodes);
traverser.postOrderTraversal(this).forEach(uow -> {
uow.committed = false;
uow.aborted = true;
uow.committed = false;
uow.aborted = true;
});
// log.record(txn::abort)
// cache.invalidateSince(txn::start time)
if (!hasAborted()) {
committed = false;
aborted = true;
committed = false;
aborted = true;
elapsedTime.stop();
if (LOG.isInfoEnabled()) {
LOG.info(logTimers("aborted"));

View file

@ -316,19 +316,19 @@ public final class HelenusSession extends AbstractSessionOperations implements C
Constructor<? extends UnitOfWork> ctor = clazz.getConstructor(HelenusSession.class, UnitOfWork.class);
UnitOfWork uow = ctor.newInstance(this, parent);
if (LOG.isInfoEnabled() && uow.getPurpose() == null) {
StringBuilder purpose = null;
StackTraceElement[] trace = Thread.currentThread().getStackTrace();
int frame = 2;
if (trace[2].getMethodName().equals("begin")) {
frame = 3;
} else if (trace[2].getClassName().equals(unitOfWorkClass.getName())) {
frame = 3;
}
purpose = new StringBuilder().append(trace[frame].getClassName()).append(".")
.append(trace[frame].getMethodName()).append("(").append(trace[frame].getFileName()).append(":")
.append(trace[frame].getLineNumber()).append(")");
uow.setPurpose(purpose.toString());
}
StringBuilder purpose = null;
StackTraceElement[] trace = Thread.currentThread().getStackTrace();
int frame = 2;
if (trace[2].getMethodName().equals("begin")) {
frame = 3;
} else if (trace[2].getClassName().equals(unitOfWorkClass.getName())) {
frame = 3;
}
purpose = new StringBuilder().append(trace[frame].getClassName()).append(".")
.append(trace[frame].getMethodName()).append("(").append(trace[frame].getFileName()).append(":")
.append(trace[frame].getLineNumber()).append(")");
uow.setPurpose(purpose.toString());
}
if (parent != null) {
parent.addNestedUnitOfWork(uow);
}

View file

@ -2,11 +2,8 @@ package net.helenus.core.cache;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import net.helenus.core.reflect.MapExportable;
public class CacheUtil {
public static List<String[]> combinations(List<String> items) {
@ -44,22 +41,19 @@ public class CacheUtil {
if (to == from) {
return to;
} else {
return from;
}
/*
// TODO(gburd): take ttl and writeTime into account when merging.
Map<String, Object> toValueMap = to instanceof MapExportable ? ((MapExportable) to).toMap() : null;
Map<String, Object> fromValueMap = to instanceof MapExportable ? ((MapExportable) from).toMap() : null;
if (toValueMap != null && fromValueMap != null) {
for (String key : fromValueMap.keySet()) {
if (toValueMap.containsKey(key) && toValueMap.get(key) != fromValueMap.get(key)) {
toValueMap.put(key, fromValueMap.get(key));
}
}
return from;
}
return to;
*/
/*
* // TODO(gburd): take ttl and writeTime into account when merging. Map<String,
* Object> toValueMap = to instanceof MapExportable ? ((MapExportable)
* to).toMap() : null; Map<String, Object> fromValueMap = to instanceof
* MapExportable ? ((MapExportable) from).toMap() : null;
*
* if (toValueMap != null && fromValueMap != null) { for (String key :
* fromValueMap.keySet()) { if (toValueMap.containsKey(key) &&
* toValueMap.get(key) != fromValueMap.get(key)) { toValueMap.put(key,
* fromValueMap.get(key)); } } } return to;
*/
}
public static String schemaName(List<Facet> facets) {

View file

@ -55,19 +55,20 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
int.class);
constructor.setAccessible(true);
// Now we need to lookup and invoke special the default method on the interface class.
// Now we need to lookup and invoke special the default method on the interface
// class.
final Class<?> declaringClass = method.getDeclaringClass();
Object result = constructor.newInstance(declaringClass, MethodHandles.Lookup.PRIVATE)
.unreflectSpecial(method, declaringClass).bindTo(proxy).invokeWithArguments(args);
return result;
}
private Object writeReplace() {
return new SerializationProxy(this);
}
private void readObject(ObjectInputStream stream) throws InvalidObjectException {
throw new InvalidObjectException("Proxy required.");
}
private Object writeReplace() {
return new SerializationProxy(this);
}
private void readObject(ObjectInputStream stream) throws InvalidObjectException {
throw new InvalidObjectException("Proxy required.");
}
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
@ -108,12 +109,12 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
}
if ("writeReplace".equals(methodName)) {
return new SerializationProxy(this);
}
return new SerializationProxy(this);
}
if ("readObject".equals(methodName)) {
throw new InvalidObjectException("Proxy required.");
}
if ("readObject".equals(methodName)) {
throw new InvalidObjectException("Proxy required.");
}
if ("dsl".equals(methodName)) {
return Helenus.dsl(iface);
@ -153,24 +154,24 @@ public class MapperInvocationHandler<E> implements InvocationHandler, Serializab
static class SerializationProxy implements Serializable {
private static final long serialVersionUID = -5617583940055969353L;
private static final long serialVersionUID = -5617583940055969353L;
private final Class<?> iface;
private final Map<String, Object> src;
private final Class<?> iface;
private final Map<String, Object> src;
public SerializationProxy(MapperInvocationHandler mapper) {
this.iface = mapper.iface;
if (mapper.src instanceof ValueProviderMap) {
this.src = new HashMap<String, Object>(mapper.src.size());
this.src.putAll(src);
} else {
this.src = mapper.src;
}
}
public SerializationProxy(MapperInvocationHandler mapper) {
this.iface = mapper.iface;
if (mapper.src instanceof ValueProviderMap) {
this.src = new HashMap<String, Object>(mapper.src.size());
this.src.putAll(src);
} else {
this.src = mapper.src;
}
}
Object readResolve() throws ObjectStreamException {
return Helenus.map(iface, src);
}
Object readResolve() throws ObjectStreamException {
return Helenus.map(iface, src);
}
}
}
}

View file

@ -29,7 +29,8 @@ public enum ReflectionMapperInstantiator implements MapperInstantiator {
public <E> E instantiate(Class<E> iface, Map<String, Object> src, ClassLoader classLoader) {
MapperInvocationHandler<E> handler = new MapperInvocationHandler<E>(iface, src);
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, MapExportable.class, Serializable.class}, handler);
E proxy = (E) Proxy.newProxyInstance(classLoader, new Class[]{iface, MapExportable.class, Serializable.class},
handler);
return proxy;
}
}