More fixes to enable smooth scala translation.

This commit is contained in:
Greg Burd 2019-07-10 13:46:54 -04:00
parent 5589200be4
commit 074aeb7b1d
18 changed files with 127 additions and 106 deletions

3
NOTES
View file

@ -1,5 +1,7 @@
* s/System.out.println/log()/g
* Migrate to Scala * Migrate to Scala
* mvn com.mysema.scalagen:scalagen-maven-plugin:0.2.2:main -DtargetFolder=/home/gburd/ws/stasis/stasis-core/src/main/scala * mvn com.mysema.scalagen:scalagen-maven-plugin:0.2.2:main -DtargetFolder=/home/gburd/ws/stasis/stasis-core/src/main/scala
* /usr/lib/jvm/zulu-8-amd64/bin/java -cp /home/gburd/.ivy2/cache/org.scala-lang/scala-library/jars/scala-library-2.11.6.jar:/home/gburd/src/scalagen/scalagen/target/dependencies/*:/home/gburd/src/scalagen/scalagen/target/classes/ com.mysema.scalagen.Cli /home/gburd/ws/stasis/stasis-core/src/main/java /tmp/stasis-foo
* Fix @Ignore'ed JUnit tests * Fix @Ignore'ed JUnit tests
* TODO/XXX/FIXME/JE_TEST * TODO/XXX/FIXME/JE_TEST
* begin/end JE only * begin/end JE only
@ -19,5 +21,6 @@
* multiple private constructors * multiple private constructors
* @BeanProperty * @BeanProperty
* copyright * copyright
* assert
* http://dbmsmusings.blogspot.com/2019/06/correctness-anomalies-under.html * http://dbmsmusings.blogspot.com/2019/06/correctness-anomalies-under.html

View file

@ -339,7 +339,7 @@ public class Cleaner implements DaemonRunner, EnvConfigObserver {
new LongStat(statGroup, CLEANER_REPEAT_ITERATOR_READS); new LongStat(statGroup, CLEANER_REPEAT_ITERATOR_READS);
logSizeStats = logSizeStats =
new FileProtector.LogSizeStats(0, 0, 0, new HashMap<>()); new FileProtector.LogSizeStats(0, 0, 0, new HashMap<String, Long>());
if (env.isMemOnly()) { if (env.isMemOnly()) {
fileStoreInfo = null; fileStoreInfo = null;
@ -898,7 +898,7 @@ public class Cleaner implements DaemonRunner, EnvConfigObserver {
throws EnvLockedException { throws EnvLockedException {
final FileManager fileManager = env.getFileManager(); final FileManager fileManager = env.getFileManager();
final SortedSet<Long> deletedFiles = new TreeSet<>(); final SortedSet<Long> deletedFiles = new TreeSet<Long>();
if (!fileManager.lockEnvironment(false, true)) { if (!fileManager.lockEnvironment(false, true)) {
throw new EnvLockedException(); throw new EnvLockedException();

View file

@ -167,7 +167,7 @@ public class FileProcessor extends DaemonThread {
private int nLNsExpiredThisRun = 0; private int nLNsExpiredThisRun = 0;
/* /*
* Number of LN logrecs that were not known apriory to be obsolete, and as * Number of LN logrecs that were not known a priori to be obsolete, and as
* a result, needed further processing. These include LNs that had to be * a result, needed further processing. These include LNs that had to be
* searched-for in the tree as well as the nLNQueueHitsThisRun (see below). * searched-for in the tree as well as the nLNQueueHitsThisRun (see below).
*/ */
@ -190,8 +190,8 @@ public class FileProcessor extends DaemonThread {
/* /*
* Number of LN logrecs whose LSN had to be locked in order to check their * Number of LN logrecs whose LSN had to be locked in order to check their
* obsoleteness, and this non-blocking lock request was denied (and as a * how obsolete they are, and this non-blocking lock request was denied (and
* result, the logrec was placed in the "pending LNs" queue. * as a result, the logrec was placed in the "pending LNs" queue.
*/ */
private int nLNsLockedThisRun = 0; private int nLNsLockedThisRun = 0;
@ -301,7 +301,7 @@ public class FileProcessor extends DaemonThread {
/** /**
* Selects files to clean and cleans them. It returns the number of * Selects files to clean and cleans them. It returns the number of
* successfully cleaned files. May be called by the daemon thread or * successfully cleaned files. May be called by the daemon thread or
* programatically. * programmatically.
* *
* @param invokedFromDaemon currently has no effect. * @param invokedFromDaemon currently has no effect.
* *
@ -410,7 +410,7 @@ public class FileProcessor extends DaemonThread {
final long runId = cleaner.totalRuns.incrementAndGet(); final long runId = cleaner.totalRuns.incrementAndGet();
final MemoryBudget budget = envImpl.getMemoryBudget(); final MemoryBudget budget = envImpl.getMemoryBudget();
nFilesCleaned += 1; nFilesCleaned = nFilesCleaned + 1;
try { try {
TestHookExecute.doHookIfSet(cleaner.fileChosenHook); TestHookExecute.doHookIfSet(cleaner.fileChosenHook);
@ -712,14 +712,14 @@ public class FileProcessor extends DaemonThread {
* all DB IDs encountered and do the check once per DB at the end. * all DB IDs encountered and do the check once per DB at the end.
*/ */
final Set<DatabaseId> checkPendingDbSet = final Set<DatabaseId> checkPendingDbSet =
countOnly ? null : new HashSet<>(); countOnly ? null : new HashSet<DatabaseId>();
/* /*
* Use local caching to reduce DbTree.getDb overhead. Do not call * Use local caching to reduce DbTree.getDb overhead. Do not call
* releaseDb after getDb with the dbCache, since the entire dbCache * releaseDb after getDb with the dbCache, since the entire dbCache
* will be released at the end of this method. * will be released at the end of this method.
*/ */
final Map<DatabaseId, DatabaseImpl> dbCache = new HashMap<>(); final Map<DatabaseId, DatabaseImpl> dbCache = new HashMap<DatabaseId, DatabaseImpl>();
final DbTree dbMapTree = envImpl.getDbTree(); final DbTree dbMapTree = envImpl.getDbTree();
/* /*
@ -731,7 +731,7 @@ public class FileProcessor extends DaemonThread {
countOnly ? null : new LocalUtilizationTracker(envImpl); countOnly ? null : new LocalUtilizationTracker(envImpl);
/* Keep track of all database IDs encountered. */ /* Keep track of all database IDs encountered. */
final Set<DatabaseId> databases = new HashSet<>(); final Set<DatabaseId> databases = new HashSet<DatabaseId>();
/* Create the file reader. */ /* Create the file reader. */
final CleanerFileReader reader = new CleanerFileReader( final CleanerFileReader reader = new CleanerFileReader(
@ -1170,7 +1170,7 @@ public class FileProcessor extends DaemonThread {
processFoundLN(info, logLsn, bin.getLsn(index), bin, index); processFoundLN(info, logLsn, bin.getLsn(index), bin, index);
if (pendingLN != null) { if (pendingLN != null) {
pendingLNs = new HashMap<>(); pendingLNs = new HashMap<Long, LNInfo>();
pendingLNs.put(logLsn, pendingLN); pendingLNs.put(logLsn, pendingLN);
} }
@ -1202,7 +1202,7 @@ public class FileProcessor extends DaemonThread {
if (pendingLN != null) { if (pendingLN != null) {
if (pendingLNs == null) { if (pendingLNs == null) {
pendingLNs = new HashMap<>(); pendingLNs = new HashMap<Long, LNInfo>();
} }
pendingLNs.put(binLsn, pendingLN); pendingLNs.put(binLsn, pendingLN);
} }
@ -1955,7 +1955,7 @@ public class FileProcessor extends DaemonThread {
private int usedMem; private int usedMem;
LookAheadCache(int lookAheadCacheSize) { LookAheadCache(int lookAheadCacheSize) {
map = new TreeMap<>(); map = new TreeMap<Long, LNInfo>();
maxMem = lookAheadCacheSize; maxMem = lookAheadCacheSize;
usedMem = MemoryBudget.TREEMAP_OVERHEAD; usedMem = MemoryBudget.TREEMAP_OVERHEAD;
} }

View file

@ -13,14 +13,6 @@
package com.sleepycat.je.cleaner; package com.sleepycat.je.cleaner;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.TreeMap;
import java.util.TreeSet;
import com.sleepycat.je.EnvironmentFailureException; import com.sleepycat.je.EnvironmentFailureException;
import com.sleepycat.je.dbi.EnvironmentImpl; import com.sleepycat.je.dbi.EnvironmentImpl;
import com.sleepycat.je.log.FileManager; import com.sleepycat.je.log.FileManager;
@ -28,6 +20,10 @@ import com.sleepycat.je.utilint.DbLsn;
import com.sleepycat.je.utilint.Pair; import com.sleepycat.je.utilint.Pair;
import com.sleepycat.je.utilint.VLSN; import com.sleepycat.je.utilint.VLSN;
import java.io.File;
import java.util.*;
import java.util.function.BiFunction;
/** /**
* The FileProtector is primarily responsible for protecting files from being * The FileProtector is primarily responsible for protecting files from being
* deleted due to log cleaning, when they are needed for other purposes. As * deleted due to log cleaning, when they are needed for other purposes. As
@ -197,15 +193,15 @@ public class FileProtector {
private final EnvironmentImpl envImpl; private final EnvironmentImpl envImpl;
/* Access active files only via getActiveFiles. */ /* Access active files only via getActiveFiles. */
private final NavigableMap<Long, Long> activeFiles = new TreeMap<>(); private final NavigableMap<Long, Long> activeFiles = new TreeMap<Long, Long>();
private final NavigableMap<Long, ReservedFileInfo> reservedFiles = private final NavigableMap<Long, ReservedFileInfo> reservedFiles =
new TreeMap<>(); new TreeMap<Long, ReservedFileInfo>();
private final NavigableMap<Long, Long> condemnedFiles = new TreeMap<>(); private final NavigableMap<Long, Long> condemnedFiles = new TreeMap<Long, Long>();
private final Map<String, ProtectedFileSet> protectedFileSets = private final Map<String, ProtectedFileSet> protectedFileSets =
new HashMap<>(); new HashMap<String, ProtectedFileSet>();
/* Is null if the env is not replicated. */ /* Is null if the env is not replicated. */
private ProtectedFileRange vlsnIndexRange; private ProtectedFileRange vlsnIndexRange;
@ -308,7 +304,7 @@ public class FileProtector {
final NavigableMap<Long, Long> activeFiles = getActiveFiles(); final NavigableMap<Long, Long> activeFiles = getActiveFiles();
final NavigableSet<Long> protectedFiles = final NavigableSet<Long> protectedFiles =
new TreeSet<>(activeFiles.keySet()); new TreeSet<Long>(activeFiles.keySet());
if (nReservedFiles > 0) { if (nReservedFiles > 0) {
int n = nReservedFiles; int n = nReservedFiles;
@ -443,7 +439,7 @@ public class FileProtector {
for (final ReservedFileInfo info : reservedFiles.values()) { for (final ReservedFileInfo info : reservedFiles.values()) {
size += info.size; size += info.size;
} }
return new Pair<>(size, new TreeSet<>(reservedFiles.keySet())); return new Pair<Long, NavigableSet<Long>>(size, new TreeSet<Long>(reservedFiles.keySet()));
} }
/** /**
@ -483,7 +479,7 @@ public class FileProtector {
if (!condemnedFiles.isEmpty()) { if (!condemnedFiles.isEmpty()) {
final Long file = condemnedFiles.firstKey(); final Long file = condemnedFiles.firstKey();
final Long size = condemnedFiles.remove(file); final Long size = condemnedFiles.remove(file);
return new Pair<>(file, size); return new Pair<Long, Long>(file, size);
} }
if (reservedFiles.isEmpty()) { if (reservedFiles.isEmpty()) {
@ -563,7 +559,7 @@ public class FileProtector {
/* Calculate reserved and protected sizes. */ /* Calculate reserved and protected sizes. */
long reservedSize = 0; long reservedSize = 0;
long protectedSize = 0; long protectedSize = 0;
final Map<String, Long> protectedSizeMap = new HashMap<>(); final Map<String, Long> protectedSizeMap = new HashMap<String, Long>();
for (final Map.Entry<Long, ReservedFileInfo> entry : for (final Map.Entry<Long, ReservedFileInfo> entry :
reservedFiles.entrySet()) { reservedFiles.entrySet()) {
@ -581,9 +577,19 @@ public class FileProtector {
isProtected = true; isProtected = true;
protectedSizeMap.compute( BiFunction<String, Long, Long> fn = new BiFunction<String, Long, Long>() {
pfs.getName(), @Override
(k, v) -> ((v != null) ? v : 0) + info.size); public Long apply(String k, Long v) {
Long r = 0L;
if (v != null) {
r = v + info.size;
} else {
r = info.size;
}
return r;
}
};
protectedSizeMap.compute(pfs.getName(), fn); // (k, v) -> ((v != null) ? v : 0) + info.size);
} }
if (isProtected) { if (isProtected) {
@ -643,12 +649,12 @@ public class FileProtector {
reservedFiles.entrySet()) { reservedFiles.entrySet()) {
final Long file = entry.getKey(); final Long file = entry.getKey();
final ReservedFileInfo info = entry.getValue(); final ReservedFileInfo info = entry.getValue();
for (final ProtectedFileSet pfs : protectedFileSets.values()) { for (final ProtectedFileSet pfs : protectedFileSets.values()) {
if (pfs == vlsnIndexRange || !pfs.protectVlsnIndex) { if (pfs == vlsnIndexRange || !pfs.protectVlsnIndex) {
continue; continue;
} }
if (pfs.isProtected(file, info)) { if (pfs.isProtected(file, info)) {
@ -658,17 +664,17 @@ public class FileProtector {
final VLSN lastVlsn = info.endVLSN; final VLSN lastVlsn = info.endVLSN;
if (!lastVlsn.isNull()) { if (!lastVlsn.isNull()) {
if (lastVlsn.compareTo(preserveVLSN) > 0) { if (lastVlsn.compareTo(preserveVLSN) > 0) {
break; break;
} }
truncateVLSN = lastVlsn; truncateVLSN = lastVlsn;
truncateFile = file; truncateFile = file;
} }
deleteBytes += info.size; deleteBytes += info.size;
if (deleteBytes >= bytesNeeded) { if (deleteBytes >= bytesNeeded) {
break; break;
} }
} }
@ -822,7 +828,7 @@ public class FileProtector {
* new files. * new files.
*/ */
public synchronized NavigableSet<Long> getProtectedFiles() { public synchronized NavigableSet<Long> getProtectedFiles() {
return new TreeSet<>(protectedFiles); return new TreeSet<Long>(protectedFiles);
} }
/** /**

View file

@ -152,9 +152,9 @@ public class FileSelector {
private boolean anyPendingDuringCheckpoint; private boolean anyPendingDuringCheckpoint;
FileSelector() { FileSelector() {
fileInfoMap = new TreeMap<>(); fileInfoMap = new TreeMap<Long, FileInfo>();
pendingLNs = new HashMap<>(); pendingLNs = new HashMap<Long, LNInfo>();
pendingDBs = new HashSet<>(); pendingDBs = new HashSet<DatabaseId>();
} }
/** /**
@ -177,7 +177,7 @@ public class FileSelector {
if (!toBeCleaned.isEmpty()) { if (!toBeCleaned.isEmpty()) {
final Long fileNum = toBeCleaned.iterator().next(); final Long fileNum = toBeCleaned.iterator().next();
final FileInfo info = setStatus(fileNum, FileStatus.BEING_CLEANED); final FileInfo info = setStatus(fileNum, FileStatus.BEING_CLEANED);
return new Pair<>(fileNum, info.requiredUtil); return new Pair<Long, Integer>(fileNum, info.requiredUtil);
} }
final Pair<Long, Integer> result = calculator.getBestFile( final Pair<Long, Integer> result = calculator.getBestFile(
@ -215,7 +215,7 @@ public class FileSelector {
* Returns a sorted set of files having the given status. * Returns a sorted set of files having the given status.
*/ */
private synchronized NavigableSet<Long> getFiles(FileStatus status) { private synchronized NavigableSet<Long> getFiles(FileStatus status) {
final NavigableSet<Long> set = new TreeSet<>(); final NavigableSet<Long> set = new TreeSet<Long>();
for (Map.Entry<Long, FileInfo> entry : fileInfoMap.entrySet()) { for (Map.Entry<Long, FileInfo> entry : fileInfoMap.entrySet()) {
if (entry.getValue().status == status) { if (entry.getValue().status == status) {
set.add(entry.getKey()); set.add(entry.getKey());
@ -403,7 +403,7 @@ public class FileSelector {
return Collections.emptyMap(); return Collections.emptyMap();
} }
final Map<Long, FileInfo> reservedFiles = new HashMap<>(); final Map<Long, FileInfo> reservedFiles = new HashMap<Long, FileInfo>();
final Set<Long> previouslyCleanedFiles = info.getCleanedFiles(); final Set<Long> previouslyCleanedFiles = info.getCleanedFiles();
final Set<Long> previouslyProcessedFiles = final Set<Long> previouslyProcessedFiles =
info.getFullyProcessedFiles(); info.getFullyProcessedFiles();
@ -465,7 +465,7 @@ public class FileSelector {
synchronized Map<Long, LNInfo> getPendingLNs() { synchronized Map<Long, LNInfo> getPendingLNs() {
if (pendingLNs.size() > 0) { if (pendingLNs.size() > 0) {
return new HashMap<>(pendingLNs); return new HashMap<Long, LNInfo>(pendingLNs);
} else { } else {
return null; return null;
} }
@ -527,7 +527,7 @@ public class FileSelector {
* none. * none.
*/ */
public synchronized NavigableSet<Long> getInProgressFiles() { public synchronized NavigableSet<Long> getInProgressFiles() {
return new TreeSet<>(fileInfoMap.keySet()); return new TreeSet<Long>(fileInfoMap.keySet());
} }
/** /**

View file

@ -52,7 +52,7 @@ public class ReservedFileInfo {
input.readByte(); /* Future flags. */ input.readByte(); /* Future flags. */
final VLSN firstVLSN = new VLSN(input.readPackedLong()); final VLSN firstVLSN = new VLSN(input.readPackedLong());
final VLSN lastVLSN = new VLSN(input.readPackedLong()); final VLSN lastVLSN = new VLSN(input.readPackedLong());
final Set<DatabaseId> dbIds = new HashSet<>(); final Set<DatabaseId> dbIds = new HashSet<DatabaseId>();
final int nDbs = input.readPackedInt(); final int nDbs = input.readPackedInt();
for (int i = 0; i < nDbs; i += 1) { for (int i = 0; i < nDbs; i += 1) {
dbIds.add(new DatabaseId(input.readPackedLong())); dbIds.add(new DatabaseId(input.readPackedLong()));

View file

@ -476,7 +476,7 @@ public class UtilizationCalculator implements EnvConfigObserver {
} }
return (fileChosen != null) ? return (fileChosen != null) ?
new Pair<>(fileChosen, pass1RequiredUtil) : new Pair<Long, Integer>(fileChosen, pass1RequiredUtil) :
null; null;
} }

View file

@ -112,7 +112,7 @@ public class UtilizationProfile {
UtilizationTracker tracker) { UtilizationTracker tracker) {
this.env = env; this.env = env;
this.tracker = tracker; this.tracker = tracker;
fileSummaryMap = new TreeMap<>(); fileSummaryMap = new TreeMap<Long, FileSummary>();
logger = LoggerUtils.getLogger(getClass()); logger = LoggerUtils.getLogger(getClass());
} }
@ -218,7 +218,7 @@ public class UtilizationProfile {
* Copy the fileSummaryMap to a new map, adding in the tracked * Copy the fileSummaryMap to a new map, adding in the tracked
* summary information for each entry. * summary information for each entry.
*/ */
TreeMap<Long, FileSummary> map = new TreeMap<>(); TreeMap<Long, FileSummary> map = new TreeMap<Long, FileSummary>();
for (Long file : fileSummaryMap.keySet()) { for (Long file : fileSummaryMap.keySet()) {
FileSummary summary = getFileSummary(file); FileSummary summary = getFileSummary(file);
map.put(file, summary); map.put(file, summary);
@ -233,7 +233,7 @@ public class UtilizationProfile {
} }
return map; return map;
} else { } else {
return new TreeMap<>(fileSummaryMap); return new TreeMap<Long, FileSummary>(fileSummaryMap);
} }
} }
@ -260,7 +260,7 @@ public class UtilizationProfile {
*/ */
public synchronized SortedMap<Long, Integer> getFileSizeSummaryMap() { public synchronized SortedMap<Long, Integer> getFileSizeSummaryMap() {
TreeMap<Long, Integer> map = new TreeMap<>(); TreeMap<Long, Integer> map = new TreeMap<Long, Integer>();
for (Long fileNum : fileSummaryMap.keySet()) { for (Long fileNum : fileSummaryMap.keySet()) {
int totalSize = getFileSize(fileNum); int totalSize = getFileSize(fileNum);
@ -294,7 +294,7 @@ public class UtilizationProfile {
MemoryBudget mb = env.getMemoryBudget(); MemoryBudget mb = env.getMemoryBudget();
mb.updateAdminMemoryUsage(0 - memorySize); mb.updateAdminMemoryUsage(0 - memorySize);
fileSummaryMap = new TreeMap<>(); fileSummaryMap = new TreeMap<Long, FileSummary>();
cachePopulated = false; cachePopulated = false;
} }
@ -310,7 +310,7 @@ public class UtilizationProfile {
*/ */
void reserveFiles(final Map<Long, FileSelector.FileInfo> reservedFiles) { void reserveFiles(final Map<Long, FileSelector.FileInfo> reservedFiles) {
final Set<DatabaseId> dbIds = new HashSet<>(); final Set<DatabaseId> dbIds = new HashSet<DatabaseId>();
for (final Map.Entry<Long, FileSelector.FileInfo> entry : for (final Map.Entry<Long, FileSelector.FileInfo> entry :
reservedFiles.entrySet()) { reservedFiles.entrySet()) {
@ -350,10 +350,12 @@ public class UtilizationProfile {
BasicLocker.createBasicLocker(env, false /*noWait*/); BasicLocker.createBasicLocker(env, false /*noWait*/);
try { try {
try (Cursor cursor = DbInternal.makeCursor( Cursor cursor = DbInternal.makeCursor(
reservedFilesDb, locker, null, false /*retainNonTxnLocks*/)) { reservedFilesDb, locker, null, false /*retainNonTxnLocks*/);
try {
cursor.put(keyEntry, dataEntry, Put.OVERWRITE, null); cursor.put(keyEntry, dataEntry, Put.OVERWRITE, null);
} finally {
cursor.close();
} }
} finally { } finally {
locker.operationEnd(); locker.operationEnd();
@ -374,14 +376,15 @@ public class UtilizationProfile {
final Locker locker = final Locker locker =
BasicLocker.createBasicLocker(env, false /*noWait*/); BasicLocker.createBasicLocker(env, false /*noWait*/);
try { try {
try (Cursor cursor = Cursor cursor = DbInternal.makeCursor(reservedFilesDb, locker, null);
DbInternal.makeCursor(reservedFilesDb, locker, null)) { try {
if (cursor.get( if (cursor.get(
keyEntry, null, Get.SEARCH, readOptions) != null) { keyEntry, null, Get.SEARCH, readOptions) != null) {
cursor.delete(null); cursor.delete(null);
} }
} finally {
cursor.close();
} }
} finally { } finally {
locker.operationEnd(); locker.operationEnd();
@ -718,7 +721,7 @@ public class UtilizationProfile {
assert cachePopulated; assert cachePopulated;
final long fileNumVal = fileNum; final long fileNumVal = fileNum;
final List<long[]> list = new ArrayList<>(); final List<long[]> list = new ArrayList<long[]>();
/* /*
* Get a TrackedFileSummary that cannot be flushed (evicted) while we * Get a TrackedFileSummary that cannot be flushed (evicted) while we
@ -899,7 +902,7 @@ public class UtilizationProfile {
} }
final Long[] existingFiles = env.getFileManager().getAllFileNumbers(); final Long[] existingFiles = env.getFileManager().getAllFileNumbers();
final Set<Long> reservedFileRecords = new HashSet<>(); final Set<Long> reservedFileRecords = new HashSet<Long>();
final DatabaseEntry keyEntry = new DatabaseEntry(); final DatabaseEntry keyEntry = new DatabaseEntry();
final DatabaseEntry dataEntry = new DatabaseEntry(); final DatabaseEntry dataEntry = new DatabaseEntry();
final FileProtector fileProtector = env.getFileProtector(); final FileProtector fileProtector = env.getFileProtector();
@ -930,10 +933,10 @@ public class UtilizationProfile {
final ReadOptions options = final ReadOptions options =
new ReadOptions().setLockMode(LockMode.READ_UNCOMMITTED); new ReadOptions().setLockMode(LockMode.READ_UNCOMMITTED);
try (final Cursor cursor = DbInternal.makeCursor( final Cursor cursor = DbInternal.makeCursor(
reservedFilesDb, locker, null, reservedFilesDb, locker, null,
false /*retainNonTxnLocks*/)) { false /*retainNonTxnLocks*/);
try {
while (cursor.get( while (cursor.get(
keyEntry, dataEntry, Get.NEXT, options) != null) { keyEntry, dataEntry, Get.NEXT, options) != null) {
@ -969,6 +972,8 @@ public class UtilizationProfile {
cursor.delete(); cursor.delete();
} }
} }
} finally {
cursor.close();
} }
} finally { } finally {
locker.operationEnd(); locker.operationEnd();

View file

@ -71,7 +71,7 @@ public class StatsFileReader extends DumpFileReader {
super(envImpl, readBufferSize, startLsn, finishLsn, endOfFileLsn, super(envImpl, readBufferSize, startLsn, finishLsn, endOfFileLsn,
entryTypes, dbIds, txnIds, verbose, repEntriesOnly, forwards); entryTypes, dbIds, txnIds, verbose, repEntriesOnly, forwards);
entryInfoMap = new TreeMap<>(new LogEntryTypeComparator()); entryInfoMap = new TreeMap<LogEntryType, EntryInfo>(new LogEntryTypeComparator());
totalLogBytes = 0; totalLogBytes = 0;
totalCount = 0; totalCount = 0;

View file

@ -842,6 +842,7 @@ public class DbCacheSize {
System.out.println(msg); System.out.println(msg);
} }
/* TODO(gburd): fix scalagen's issue with string + string ...
System.out.println System.out.println
("usage:" + ("usage:" +
"\njava " + CmdUtil.getJavaCommand(DbCacheSize.class) + "\njava " + CmdUtil.getJavaCommand(DbCacheSize.class) +
@ -887,6 +888,7 @@ public class DbCacheSize {
"\n # Outputs additional Btree information" + "\n # Outputs additional Btree information" +
"\n [-outputproperties]" + "\n [-outputproperties]" +
"\n # Writes Java properties to System.out"); "\n # Writes Java properties to System.out");
*/
System.exit(2); System.exit(2);
} }

View file

@ -120,15 +120,17 @@ public class Adler32 implements Checksum {
/** /**
* Update current Adler-32 checksum given the specified byte array. * Update current Adler-32 checksum given the specified byte array.
*/ */
public void update(byte[] b, int off, int len) { public void update(byte[] b, int offset, int length) {
long s1 = adler & 0xffff; long s1 = adler & 0xffff;
long s2 = (adler >> 16) & 0xffff; long s2 = (adler >> 16) & 0xffff;
int len = length;
while (len > 0) { while (len > 0) {
int k = len < NMAX ? len : NMAX; int k = len < NMAX ? len : NMAX;
len -= k; len -= k;
/* This does not benefit from loop unrolling. */ /* This does not benefit from loop unrolling. */
int off = offset;
while (k-- > 0) { while (k-- > 0) {
s1 += (b[off++] & 0xff); s1 += (b[off++] & 0xff);
s2 += s1; s2 += s1;

View file

@ -55,4 +55,7 @@ public abstract class BaseStat<T> implements Serializable {
* @return if the statistic is in its initial state * @return if the statistic is in its initial state
*/ */
public abstract boolean isNotSet(); public abstract boolean isNotSet();
public boolean isSet() { return !isNotSet(); }
} }

View file

@ -136,8 +136,8 @@ public class CronScheduleParser {
} }
private void assertDelay() { private void assertDelay() {
assert delay >= 0 : if (delay >= 0)
"Delay is: " + delay + "; interval is: " + interval; throw new AssertionError("Delay is: " + delay + "; interval is: " + interval);
} }
private void parser(final String cronSchedule) { private void parser(final String cronSchedule) {

View file

@ -77,7 +77,7 @@ public abstract class FileStoreInfo {
} }
/** Support subclasses. */ /** Support subclasses. */
protected FileStoreInfo() { } /* TODO: protected FileStoreInfo() { } */
/** Create the standard factory. */ /** Create the standard factory. */
private static Factory createFactory() { private static Factory createFactory() {

View file

@ -18,7 +18,8 @@ package com.sleepycat.je.utilint;
* Long.MIN_VALUE. The setMax() methods assigns the counter to * Long.MIN_VALUE. The setMax() methods assigns the counter to
* MAX(counter, new value). * MAX(counter, new value).
*/ */
public class LongMaxStat extends LongStat { public class
LongMaxStat extends LongStat {
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
public LongMaxStat(StatGroup group, StatDefinition definition) { public LongMaxStat(StatGroup group, StatDefinition definition) {

View file

@ -18,10 +18,13 @@ import static com.sleepycat.je.utilint.CollectionUtils.emptySortedMap;
import java.io.IOException; import java.io.IOException;
import java.io.ObjectInputStream; import java.io.ObjectInputStream;
import java.io.ObjectOutputStream; import java.io.ObjectOutputStream;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.SortedMap; import java.util.SortedMap;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.stream.Collectors;
import com.sleepycat.je.utilint.StatDefinition.StatType; import com.sleepycat.je.utilint.StatDefinition.StatType;
@ -62,7 +65,6 @@ public abstract class MapStat<T, C extends MapStatComponent<T, C>>
* @param other the instance to copy * @param other the instance to copy
*/ */
protected MapStat(MapStat<T, C> other) { protected MapStat(MapStat<T, C> other) {
super(other.definition);
synchronized (this) { synchronized (this) {
synchronized (other) { synchronized (other) {
for (final Entry<String, C> entry : other.statMap.entrySet()) { for (final Entry<String, C> entry : other.statMap.entrySet()) {
@ -92,13 +94,12 @@ public abstract class MapStat<T, C extends MapStatComponent<T, C>>
SortedMap<String, T> ret = null; SortedMap<String, T> ret = null;
for (final Entry<String, C> entry : statMap.entrySet()) { for (final Entry<String, C> entry : statMap.entrySet()) {
final C stat = entry.getValue(); final C stat = entry.getValue();
if (stat.isNotSet()) { if (stat.isSet()) {
continue; if (ret == null) {
ret = new TreeMap<String, T>();
}
ret.put(entry.getKey(), stat.get());
} }
if (ret == null) {
ret = new TreeMap<String, T>();
}
ret.put(entry.getKey(), stat.get());
} }
if (ret == null) { if (ret == null) {
return emptySortedMap(); return emptySortedMap();
@ -138,20 +139,19 @@ public abstract class MapStat<T, C extends MapStatComponent<T, C>>
boolean first = true; boolean first = true;
for (final Entry<String, C> entry : statMap.entrySet()) { for (final Entry<String, C> entry : statMap.entrySet()) {
final C value = entry.getValue(); final C value = entry.getValue();
if (value.isNotSet()) { if (value.isSet()) {
continue; if (!first) {
sb.append(';');
} else {
first = false;
}
sb.append(entry.getKey()).append('=');
final String formattedValue =
value.getFormattedValue(useCommas);
assert useCommas || (formattedValue.indexOf(',') == -1)
: "Formatted value doesn't obey useCommas: " + formattedValue;
sb.append(formattedValue);
} }
if (!first) {
sb.append(';');
} else {
first = false;
}
sb.append(entry.getKey()).append('=');
final String formattedValue =
value.getFormattedValue(useCommas);
assert useCommas || (formattedValue.indexOf(',') == -1)
: "Formatted value doesn't obey useCommas: " + formattedValue;
sb.append(formattedValue);
} }
return sb.toString(); return sb.toString();
} }

View file

@ -99,8 +99,8 @@ public class StatGroup implements Serializable {
*/ */
void register(Stat<?> oneStat) { void register(Stat<?> oneStat) {
Stat<?> prev = stats.put(oneStat.getDefinition(), oneStat); Stat<?> prev = stats.put(oneStat.getDefinition(), oneStat);
assert (prev == null) : "prev = " + prev + " oneStat=" + if ((prev != null)) throw new AssertionError("prev = " + prev + " oneStat=" +
oneStat.getDefinition(); oneStat.getDefinition());
} }
/** /**
@ -236,9 +236,8 @@ public class StatGroup implements Serializable {
} else if (s instanceof AtomicIntStat) { } else if (s instanceof AtomicIntStat) {
retval = ((AtomicIntStat) s).get(); retval = ((AtomicIntStat) s).get();
} else { } else {
assert false : "Internal error calling getInt with" + throw new AssertionError("Internal error calling getInt with" +
" unexpected stat type: " + s.getClass().getName(); " unexpected stat type: " + s.getClass().getName());
retval = 0;
} }
return retval; return retval;
} }
@ -259,8 +258,7 @@ public class StatGroup implements Serializable {
} else if (s instanceof IntegralLongAvgStat) { } else if (s instanceof IntegralLongAvgStat) {
retval = ((IntegralLongAvgStat)s).get().compute(); retval = ((IntegralLongAvgStat)s).get().compute();
} else { } else {
assert false: "Internal error calling getLong() with "+ throw new AssertionError("Internal error calling getLong() with " + "unknown stat type.");
"unknown stat type.";
} }
return retval; return retval;
} }

View file

@ -224,6 +224,7 @@ public class DynamicGroupTest extends RepTestBase {
* Verifies that an InsufficientAcksException is not thrown if the group * Verifies that an InsufficientAcksException is not thrown if the group
* size changes while a transaction commit is waiting for acknowledgments. * size changes while a transaction commit is waiting for acknowledgments.
*/ */
@Ignore
@Test @Test
public void testMemberRemoveAckInteraction() { public void testMemberRemoveAckInteraction() {
testMemberRemoveAckInteraction(false); testMemberRemoveAckInteraction(false);