More fixes to enable smooth scala translation.

This commit is contained in:
Greg Burd 2019-07-10 13:46:54 -04:00
parent 5589200be4
commit 074aeb7b1d
18 changed files with 127 additions and 106 deletions

3
NOTES
View file

@ -1,5 +1,7 @@
* s/System.out.println/log()/g
* Migrate to Scala
* mvn com.mysema.scalagen:scalagen-maven-plugin:0.2.2:main -DtargetFolder=/home/gburd/ws/stasis/stasis-core/src/main/scala
* /usr/lib/jvm/zulu-8-amd64/bin/java -cp /home/gburd/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.11.6.jar:/home/gburd/src/scalagen/scalagen/target/dependencies/*:/home/gburd/src/scalagen/scalagen/target/classes/ com.mysema.scalagen.Cli /home/gburd/ws/stasis/stasis-core/src/main/java /tmp/stasis-foo
* Fix @Ignore'ed JUnit tests
* TODO/XXX/FIXME/JE_TEST
* begin/end JE only
@ -19,5 +21,6 @@
* multiple private constructors
* @BeanProperty
* copyright
* assert
* http://dbmsmusings.blogspot.com/2019/06/correctness-anomalies-under.html

View file

@ -339,7 +339,7 @@ public class Cleaner implements DaemonRunner, EnvConfigObserver {
new LongStat(statGroup, CLEANER_REPEAT_ITERATOR_READS);
logSizeStats =
new FileProtector.LogSizeStats(0, 0, 0, new HashMap<>());
new FileProtector.LogSizeStats(0, 0, 0, new HashMap<String, Long>());
if (env.isMemOnly()) {
fileStoreInfo = null;
@ -898,7 +898,7 @@ public class Cleaner implements DaemonRunner, EnvConfigObserver {
throws EnvLockedException {
final FileManager fileManager = env.getFileManager();
final SortedSet<Long> deletedFiles = new TreeSet<>();
final SortedSet<Long> deletedFiles = new TreeSet<Long>();
if (!fileManager.lockEnvironment(false, true)) {
throw new EnvLockedException();

View file

@ -167,7 +167,7 @@ public class FileProcessor extends DaemonThread {
private int nLNsExpiredThisRun = 0;
/*
* Number of LN logrecs that were not known apriory to be obsolete, and as
* Number of LN logrecs that were not known a priori to be obsolete, and as
* a result, needed further processing. These include LNs that had to be
* searched-for in the tree as well as the nLNQueueHitsThisRun (see below).
*/
@ -190,8 +190,8 @@ public class FileProcessor extends DaemonThread {
/*
* Number of LN logrecs whose LSN had to be locked in order to check their
* obsoleteness, and this non-blocking lock request was denied (and as a
* result, the logrec was placed in the "pending LNs" queue.
* how obsolete they are, and this non-blocking lock request was denied (and
* as a result, the logrec was placed in the "pending LNs" queue.
*/
private int nLNsLockedThisRun = 0;
@ -301,7 +301,7 @@ public class FileProcessor extends DaemonThread {
/**
* Selects files to clean and cleans them. It returns the number of
* successfully cleaned files. May be called by the daemon thread or
* programatically.
* programmatically.
*
* @param invokedFromDaemon currently has no effect.
*
@ -410,7 +410,7 @@ public class FileProcessor extends DaemonThread {
final long runId = cleaner.totalRuns.incrementAndGet();
final MemoryBudget budget = envImpl.getMemoryBudget();
nFilesCleaned += 1;
nFilesCleaned = nFilesCleaned + 1;
try {
TestHookExecute.doHookIfSet(cleaner.fileChosenHook);
@ -712,14 +712,14 @@ public class FileProcessor extends DaemonThread {
* all DB IDs encountered and do the check once per DB at the end.
*/
final Set<DatabaseId> checkPendingDbSet =
countOnly ? null : new HashSet<>();
countOnly ? null : new HashSet<DatabaseId>();
/*
* Use local caching to reduce DbTree.getDb overhead. Do not call
* releaseDb after getDb with the dbCache, since the entire dbCache
* will be released at the end of this method.
*/
final Map<DatabaseId, DatabaseImpl> dbCache = new HashMap<>();
final Map<DatabaseId, DatabaseImpl> dbCache = new HashMap<DatabaseId, DatabaseImpl>();
final DbTree dbMapTree = envImpl.getDbTree();
/*
@ -731,7 +731,7 @@ public class FileProcessor extends DaemonThread {
countOnly ? null : new LocalUtilizationTracker(envImpl);
/* Keep track of all database IDs encountered. */
final Set<DatabaseId> databases = new HashSet<>();
final Set<DatabaseId> databases = new HashSet<DatabaseId>();
/* Create the file reader. */
final CleanerFileReader reader = new CleanerFileReader(
@ -1170,7 +1170,7 @@ public class FileProcessor extends DaemonThread {
processFoundLN(info, logLsn, bin.getLsn(index), bin, index);
if (pendingLN != null) {
pendingLNs = new HashMap<>();
pendingLNs = new HashMap<Long, LNInfo>();
pendingLNs.put(logLsn, pendingLN);
}
@ -1202,7 +1202,7 @@ public class FileProcessor extends DaemonThread {
if (pendingLN != null) {
if (pendingLNs == null) {
pendingLNs = new HashMap<>();
pendingLNs = new HashMap<Long, LNInfo>();
}
pendingLNs.put(binLsn, pendingLN);
}
@ -1955,7 +1955,7 @@ public class FileProcessor extends DaemonThread {
private int usedMem;
LookAheadCache(int lookAheadCacheSize) {
map = new TreeMap<>();
map = new TreeMap<Long, LNInfo>();
maxMem = lookAheadCacheSize;
usedMem = MemoryBudget.TREEMAP_OVERHEAD;
}

View file

@ -13,14 +13,6 @@
package com.sleepycat.je.cleaner;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.TreeMap;
import java.util.TreeSet;
import com.sleepycat.je.EnvironmentFailureException;
import com.sleepycat.je.dbi.EnvironmentImpl;
import com.sleepycat.je.log.FileManager;
@ -28,6 +20,10 @@ import com.sleepycat.je.utilint.DbLsn;
import com.sleepycat.je.utilint.Pair;
import com.sleepycat.je.utilint.VLSN;
import java.io.File;
import java.util.*;
import java.util.function.BiFunction;
/**
* The FileProtector is primarily responsible for protecting files from being
* deleted due to log cleaning, when they are needed for other purposes. As
@ -197,15 +193,15 @@ public class FileProtector {
private final EnvironmentImpl envImpl;
/* Access active files only via getActiveFiles. */
private final NavigableMap<Long, Long> activeFiles = new TreeMap<>();
private final NavigableMap<Long, Long> activeFiles = new TreeMap<Long, Long>();
private final NavigableMap<Long, ReservedFileInfo> reservedFiles =
new TreeMap<>();
new TreeMap<Long, ReservedFileInfo>();
private final NavigableMap<Long, Long> condemnedFiles = new TreeMap<>();
private final NavigableMap<Long, Long> condemnedFiles = new TreeMap<Long, Long>();
private final Map<String, ProtectedFileSet> protectedFileSets =
new HashMap<>();
new HashMap<String, ProtectedFileSet>();
/* Is null if the env is not replicated. */
private ProtectedFileRange vlsnIndexRange;
@ -308,7 +304,7 @@ public class FileProtector {
final NavigableMap<Long, Long> activeFiles = getActiveFiles();
final NavigableSet<Long> protectedFiles =
new TreeSet<>(activeFiles.keySet());
new TreeSet<Long>(activeFiles.keySet());
if (nReservedFiles > 0) {
int n = nReservedFiles;
@ -443,7 +439,7 @@ public class FileProtector {
for (final ReservedFileInfo info : reservedFiles.values()) {
size += info.size;
}
return new Pair<>(size, new TreeSet<>(reservedFiles.keySet()));
return new Pair<Long, NavigableSet<Long>>(size, new TreeSet<Long>(reservedFiles.keySet()));
}
/**
@ -483,7 +479,7 @@ public class FileProtector {
if (!condemnedFiles.isEmpty()) {
final Long file = condemnedFiles.firstKey();
final Long size = condemnedFiles.remove(file);
return new Pair<>(file, size);
return new Pair<Long, Long>(file, size);
}
if (reservedFiles.isEmpty()) {
@ -563,7 +559,7 @@ public class FileProtector {
/* Calculate reserved and protected sizes. */
long reservedSize = 0;
long protectedSize = 0;
final Map<String, Long> protectedSizeMap = new HashMap<>();
final Map<String, Long> protectedSizeMap = new HashMap<String, Long>();
for (final Map.Entry<Long, ReservedFileInfo> entry :
reservedFiles.entrySet()) {
@ -581,9 +577,19 @@ public class FileProtector {
isProtected = true;
protectedSizeMap.compute(
pfs.getName(),
(k, v) -> ((v != null) ? v : 0) + info.size);
BiFunction<String, Long, Long> fn = new BiFunction<String, Long, Long>() {
@Override
public Long apply(String k, Long v) {
Long r = 0L;
if (v != null) {
r = v + info.size;
} else {
r = info.size;
}
return r;
}
};
protectedSizeMap.compute(pfs.getName(), fn); // (k, v) -> ((v != null) ? v : 0) + info.size);
}
if (isProtected) {
@ -822,7 +828,7 @@ public class FileProtector {
* new files.
*/
public synchronized NavigableSet<Long> getProtectedFiles() {
return new TreeSet<>(protectedFiles);
return new TreeSet<Long>(protectedFiles);
}
/**

View file

@ -152,9 +152,9 @@ public class FileSelector {
private boolean anyPendingDuringCheckpoint;
FileSelector() {
fileInfoMap = new TreeMap<>();
pendingLNs = new HashMap<>();
pendingDBs = new HashSet<>();
fileInfoMap = new TreeMap<Long, FileInfo>();
pendingLNs = new HashMap<Long, LNInfo>();
pendingDBs = new HashSet<DatabaseId>();
}
/**
@ -177,7 +177,7 @@ public class FileSelector {
if (!toBeCleaned.isEmpty()) {
final Long fileNum = toBeCleaned.iterator().next();
final FileInfo info = setStatus(fileNum, FileStatus.BEING_CLEANED);
return new Pair<>(fileNum, info.requiredUtil);
return new Pair<Long, Integer>(fileNum, info.requiredUtil);
}
final Pair<Long, Integer> result = calculator.getBestFile(
@ -215,7 +215,7 @@ public class FileSelector {
* Returns a sorted set of files having the given status.
*/
private synchronized NavigableSet<Long> getFiles(FileStatus status) {
final NavigableSet<Long> set = new TreeSet<>();
final NavigableSet<Long> set = new TreeSet<Long>();
for (Map.Entry<Long, FileInfo> entry : fileInfoMap.entrySet()) {
if (entry.getValue().status == status) {
set.add(entry.getKey());
@ -403,7 +403,7 @@ public class FileSelector {
return Collections.emptyMap();
}
final Map<Long, FileInfo> reservedFiles = new HashMap<>();
final Map<Long, FileInfo> reservedFiles = new HashMap<Long, FileInfo>();
final Set<Long> previouslyCleanedFiles = info.getCleanedFiles();
final Set<Long> previouslyProcessedFiles =
info.getFullyProcessedFiles();
@ -465,7 +465,7 @@ public class FileSelector {
synchronized Map<Long, LNInfo> getPendingLNs() {
if (pendingLNs.size() > 0) {
return new HashMap<>(pendingLNs);
return new HashMap<Long, LNInfo>(pendingLNs);
} else {
return null;
}
@ -527,7 +527,7 @@ public class FileSelector {
* none.
*/
public synchronized NavigableSet<Long> getInProgressFiles() {
return new TreeSet<>(fileInfoMap.keySet());
return new TreeSet<Long>(fileInfoMap.keySet());
}
/**

View file

@ -52,7 +52,7 @@ public class ReservedFileInfo {
input.readByte(); /* Future flags. */
final VLSN firstVLSN = new VLSN(input.readPackedLong());
final VLSN lastVLSN = new VLSN(input.readPackedLong());
final Set<DatabaseId> dbIds = new HashSet<>();
final Set<DatabaseId> dbIds = new HashSet<DatabaseId>();
final int nDbs = input.readPackedInt();
for (int i = 0; i < nDbs; i += 1) {
dbIds.add(new DatabaseId(input.readPackedLong()));

View file

@ -476,7 +476,7 @@ public class UtilizationCalculator implements EnvConfigObserver {
}
return (fileChosen != null) ?
new Pair<>(fileChosen, pass1RequiredUtil) :
new Pair<Long, Integer>(fileChosen, pass1RequiredUtil) :
null;
}

View file

@ -112,7 +112,7 @@ public class UtilizationProfile {
UtilizationTracker tracker) {
this.env = env;
this.tracker = tracker;
fileSummaryMap = new TreeMap<>();
fileSummaryMap = new TreeMap<Long, FileSummary>();
logger = LoggerUtils.getLogger(getClass());
}
@ -218,7 +218,7 @@ public class UtilizationProfile {
* Copy the fileSummaryMap to a new map, adding in the tracked
* summary information for each entry.
*/
TreeMap<Long, FileSummary> map = new TreeMap<>();
TreeMap<Long, FileSummary> map = new TreeMap<Long, FileSummary>();
for (Long file : fileSummaryMap.keySet()) {
FileSummary summary = getFileSummary(file);
map.put(file, summary);
@ -233,7 +233,7 @@ public class UtilizationProfile {
}
return map;
} else {
return new TreeMap<>(fileSummaryMap);
return new TreeMap<Long, FileSummary>(fileSummaryMap);
}
}
@ -260,7 +260,7 @@ public class UtilizationProfile {
*/
public synchronized SortedMap<Long, Integer> getFileSizeSummaryMap() {
TreeMap<Long, Integer> map = new TreeMap<>();
TreeMap<Long, Integer> map = new TreeMap<Long, Integer>();
for (Long fileNum : fileSummaryMap.keySet()) {
int totalSize = getFileSize(fileNum);
@ -294,7 +294,7 @@ public class UtilizationProfile {
MemoryBudget mb = env.getMemoryBudget();
mb.updateAdminMemoryUsage(0 - memorySize);
fileSummaryMap = new TreeMap<>();
fileSummaryMap = new TreeMap<Long, FileSummary>();
cachePopulated = false;
}
@ -310,7 +310,7 @@ public class UtilizationProfile {
*/
void reserveFiles(final Map<Long, FileSelector.FileInfo> reservedFiles) {
final Set<DatabaseId> dbIds = new HashSet<>();
final Set<DatabaseId> dbIds = new HashSet<DatabaseId>();
for (final Map.Entry<Long, FileSelector.FileInfo> entry :
reservedFiles.entrySet()) {
@ -350,10 +350,12 @@ public class UtilizationProfile {
BasicLocker.createBasicLocker(env, false /*noWait*/);
try {
try (Cursor cursor = DbInternal.makeCursor(
reservedFilesDb, locker, null, false /*retainNonTxnLocks*/)) {
Cursor cursor = DbInternal.makeCursor(
reservedFilesDb, locker, null, false /*retainNonTxnLocks*/);
try {
cursor.put(keyEntry, dataEntry, Put.OVERWRITE, null);
} finally {
cursor.close();
}
} finally {
locker.operationEnd();
@ -374,14 +376,15 @@ public class UtilizationProfile {
final Locker locker =
BasicLocker.createBasicLocker(env, false /*noWait*/);
try {
try (Cursor cursor =
DbInternal.makeCursor(reservedFilesDb, locker, null)) {
Cursor cursor = DbInternal.makeCursor(reservedFilesDb, locker, null);
try {
if (cursor.get(
keyEntry, null, Get.SEARCH, readOptions) != null) {
cursor.delete(null);
}
} finally {
cursor.close();
}
} finally {
locker.operationEnd();
@ -718,7 +721,7 @@ public class UtilizationProfile {
assert cachePopulated;
final long fileNumVal = fileNum;
final List<long[]> list = new ArrayList<>();
final List<long[]> list = new ArrayList<long[]>();
/*
* Get a TrackedFileSummary that cannot be flushed (evicted) while we
@ -899,7 +902,7 @@ public class UtilizationProfile {
}
final Long[] existingFiles = env.getFileManager().getAllFileNumbers();
final Set<Long> reservedFileRecords = new HashSet<>();
final Set<Long> reservedFileRecords = new HashSet<Long>();
final DatabaseEntry keyEntry = new DatabaseEntry();
final DatabaseEntry dataEntry = new DatabaseEntry();
final FileProtector fileProtector = env.getFileProtector();
@ -930,10 +933,10 @@ public class UtilizationProfile {
final ReadOptions options =
new ReadOptions().setLockMode(LockMode.READ_UNCOMMITTED);
try (final Cursor cursor = DbInternal.makeCursor(
final Cursor cursor = DbInternal.makeCursor(
reservedFilesDb, locker, null,
false /*retainNonTxnLocks*/)) {
false /*retainNonTxnLocks*/);
try {
while (cursor.get(
keyEntry, dataEntry, Get.NEXT, options) != null) {
@ -969,6 +972,8 @@ public class UtilizationProfile {
cursor.delete();
}
}
} finally {
cursor.close();
}
} finally {
locker.operationEnd();

View file

@ -71,7 +71,7 @@ public class StatsFileReader extends DumpFileReader {
super(envImpl, readBufferSize, startLsn, finishLsn, endOfFileLsn,
entryTypes, dbIds, txnIds, verbose, repEntriesOnly, forwards);
entryInfoMap = new TreeMap<>(new LogEntryTypeComparator());
entryInfoMap = new TreeMap<LogEntryType, EntryInfo>(new LogEntryTypeComparator());
totalLogBytes = 0;
totalCount = 0;

View file

@ -842,6 +842,7 @@ public class DbCacheSize {
System.out.println(msg);
}
/* TODO(gburd): fix scalagen's issue with string + string ...
System.out.println
("usage:" +
"\njava " + CmdUtil.getJavaCommand(DbCacheSize.class) +
@ -887,6 +888,7 @@ public class DbCacheSize {
"\n # Outputs additional Btree information" +
"\n [-outputproperties]" +
"\n # Writes Java properties to System.out");
*/
System.exit(2);
}

View file

@ -120,15 +120,17 @@ public class Adler32 implements Checksum {
/**
* Update current Adler-32 checksum given the specified byte array.
*/
public void update(byte[] b, int off, int len) {
public void update(byte[] b, int offset, int length) {
long s1 = adler & 0xffff;
long s2 = (adler >> 16) & 0xffff;
int len = length;
while (len > 0) {
int k = len < NMAX ? len : NMAX;
len -= k;
/* This does not benefit from loop unrolling. */
int off = offset;
while (k-- > 0) {
s1 += (b[off++] & 0xff);
s2 += s1;

View file

@ -55,4 +55,7 @@ public abstract class BaseStat<T> implements Serializable {
* @return if the statistic is in its initial state
*/
public abstract boolean isNotSet();
public boolean isSet() { return !isNotSet(); }
}

View file

@ -136,8 +136,8 @@ public class CronScheduleParser {
}
private void assertDelay() {
assert delay >= 0 :
"Delay is: " + delay + "; interval is: " + interval;
if (delay >= 0)
throw new AssertionError("Delay is: " + delay + "; interval is: " + interval);
}
private void parser(final String cronSchedule) {

View file

@ -77,7 +77,7 @@ public abstract class FileStoreInfo {
}
/** Support subclasses. */
protected FileStoreInfo() { }
/* TODO: protected FileStoreInfo() { } */
/** Create the standard factory. */
private static Factory createFactory() {

View file

@ -18,7 +18,8 @@ package com.sleepycat.je.utilint;
* Long.MIN_VALUE. The setMax() methods assigns the counter to
* MAX(counter, new value).
*/
public class LongMaxStat extends LongStat {
public class
LongMaxStat extends LongStat {
private static final long serialVersionUID = 1L;
public LongMaxStat(StatGroup group, StatDefinition definition) {

View file

@ -18,10 +18,13 @@ import static com.sleepycat.je.utilint.CollectionUtils.emptySortedMap;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.Map;
import java.util.Map.Entry;
import java.util.SortedMap;
import java.util.TreeMap;
import java.util.stream.Collectors;
import com.sleepycat.je.utilint.StatDefinition.StatType;
@ -62,7 +65,6 @@ public abstract class MapStat<T, C extends MapStatComponent<T, C>>
* @param other the instance to copy
*/
protected MapStat(MapStat<T, C> other) {
super(other.definition);
synchronized (this) {
synchronized (other) {
for (final Entry<String, C> entry : other.statMap.entrySet()) {
@ -92,14 +94,13 @@ public abstract class MapStat<T, C extends MapStatComponent<T, C>>
SortedMap<String, T> ret = null;
for (final Entry<String, C> entry : statMap.entrySet()) {
final C stat = entry.getValue();
if (stat.isNotSet()) {
continue;
}
if (stat.isSet()) {
if (ret == null) {
ret = new TreeMap<String, T>();
}
ret.put(entry.getKey(), stat.get());
}
}
if (ret == null) {
return emptySortedMap();
}
@ -138,9 +139,7 @@ public abstract class MapStat<T, C extends MapStatComponent<T, C>>
boolean first = true;
for (final Entry<String, C> entry : statMap.entrySet()) {
final C value = entry.getValue();
if (value.isNotSet()) {
continue;
}
if (value.isSet()) {
if (!first) {
sb.append(';');
} else {
@ -153,6 +152,7 @@ public abstract class MapStat<T, C extends MapStatComponent<T, C>>
: "Formatted value doesn't obey useCommas: " + formattedValue;
sb.append(formattedValue);
}
}
return sb.toString();
}

View file

@ -99,8 +99,8 @@ public class StatGroup implements Serializable {
*/
void register(Stat<?> oneStat) {
Stat<?> prev = stats.put(oneStat.getDefinition(), oneStat);
assert (prev == null) : "prev = " + prev + " oneStat=" +
oneStat.getDefinition();
if ((prev != null)) throw new AssertionError("prev = " + prev + " oneStat=" +
oneStat.getDefinition());
}
/**
@ -236,9 +236,8 @@ public class StatGroup implements Serializable {
} else if (s instanceof AtomicIntStat) {
retval = ((AtomicIntStat) s).get();
} else {
assert false : "Internal error calling getInt with" +
" unexpected stat type: " + s.getClass().getName();
retval = 0;
throw new AssertionError("Internal error calling getInt with" +
" unexpected stat type: " + s.getClass().getName());
}
return retval;
}
@ -259,8 +258,7 @@ public class StatGroup implements Serializable {
} else if (s instanceof IntegralLongAvgStat) {
retval = ((IntegralLongAvgStat)s).get().compute();
} else {
assert false: "Internal error calling getLong() with "+
"unknown stat type.";
throw new AssertionError("Internal error calling getLong() with " + "unknown stat type.");
}
return retval;
}

View file

@ -224,6 +224,7 @@ public class DynamicGroupTest extends RepTestBase {
* Verifies that an InsufficientAcksException is not thrown if the group
* size changes while a transaction commit is waiting for acknowledgments.
*/
@Ignore
@Test
public void testMemberRemoveAckInteraction() {
testMemberRemoveAckInteraction(false);