Merge pull request #375 from fyrz/RocksJava-ColumnFamilyOptions-Extension-3.6

[RocksJava] ColumnFamilyOptions extension 3.6
main
Yueh-Hsuan Chiang 10 years ago
commit 9a03da773c
  1. 11
      java/Makefile
  2. 91
      java/RocksDBColumnFamilySample.java
  3. 58
      java/org/rocksdb/ColumnFamilyDescriptor.java
  4. 647
      java/org/rocksdb/ColumnFamilyOptions.java
  5. 14
      java/org/rocksdb/Options.java
  6. 71
      java/org/rocksdb/RocksDB.java
  7. 229
      java/org/rocksdb/test/ColumnFamilyOptionsTest.java
  8. 23
      java/org/rocksdb/test/ColumnFamilyTest.java
  9. 9
      java/org/rocksdb/test/KeyMayExistTest.java
  10. 38
      java/org/rocksdb/test/MergeTest.java
  11. 51
      java/org/rocksdb/test/MixedOptionsTest.java
  12. 206
      java/org/rocksdb/test/OptionsTest.java
  13. 18
      java/org/rocksdb/test/ReadOnlyTest.java
  14. 8
      java/rocksjni/options.cc
  15. 28
      java/rocksjni/portal.h
  16. 74
      java/rocksjni/rocksjni.cc

@ -5,6 +5,7 @@ NATIVE_JAVA_CLASSES = org.rocksdb.AbstractComparator\
org.rocksdb.BlockBasedTableConfig\
org.rocksdb.BloomFilter\
org.rocksdb.ColumnFamilyHandle\
org.rocksdb.ColumnFamilyOptions\
org.rocksdb.Comparator\
org.rocksdb.ComparatorOptions\
org.rocksdb.DBOptions\
@ -69,6 +70,12 @@ sample: java
@rm -rf /tmp/rocksdbjni
@rm -rf /tmp/rocksdbjni_not_found
column_family_sample: java
javac -cp $(ROCKSDB_JAR) RocksDBColumnFamilySample.java
@rm -rf /tmp/rocksdbjni
java -ea -Djava.library.path=.:../ -cp ".:./*" -Xcheck:jni RocksDBColumnFamilySample /tmp/rocksdbjni
@rm -rf /tmp/rocksdbjni
test: java
@rm -rf /tmp/rocksdbjni_*
javac org/rocksdb/test/*.java
@ -77,14 +84,16 @@ test: java
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.BlockBasedTableConfigTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.DBOptionsTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.ColumnFamilyTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.ColumnFamilyOptionsTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.FilterTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.FlushTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.KeyMayExistTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.MemTableTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.MergeTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.MixedOptionsTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.OptionsTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.PlainTableConfigTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.ReadOnlyTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.MergeTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.ReadOptionsTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.RocksIteratorTest
java -ea -Djava.library.path=.:../ -cp "$(ROCKSDB_JAR):.:./*" org.rocksdb.test.SnapshotTest

@ -0,0 +1,91 @@
// Copyright (c) 2014, Facebook, Inc. All rights reserved.
// This source code is licensed under the BSD-style license found in the
// LICENSE file in the root directory of this source tree. An additional grant
// of patent rights can be found in the PATENTS file in the same directory.
import org.rocksdb.*;
import java.util.ArrayList;
import java.util.List;
public class RocksDBColumnFamilySample {
static {
RocksDB.loadLibrary();
}
public static void main(String[] args) throws RocksDBException {
if (args.length < 1) {
System.out.println(
"usage: RocksDBColumnFamilySample db_path");
return;
}
String db_path = args[0];
System.out.println("RocksDBColumnFamilySample");
RocksDB db = null;
Options options = null;
ColumnFamilyHandle columnFamilyHandle = null;
WriteBatch wb = null;
try {
options = new Options().setCreateIfMissing(true);
db = RocksDB.open(options, db_path);
assert(db != null);
// create column family
columnFamilyHandle = db.createColumnFamily(
new ColumnFamilyDescriptor("new_cf", new ColumnFamilyOptions()));
assert(columnFamilyHandle != null);
} finally {
if (columnFamilyHandle != null) {
columnFamilyHandle.dispose();
}
if (db != null) {
db.close();
db = null;
}
}
// open DB with two column families
List<ColumnFamilyDescriptor> columnFamilyDescriptors = new ArrayList<>();
// have to open default column family
columnFamilyDescriptors.add(new ColumnFamilyDescriptor(
RocksDB.DEFAULT_COLUMN_FAMILY, new ColumnFamilyOptions()));
// open the new one, too
columnFamilyDescriptors.add(new ColumnFamilyDescriptor(
"new_cf", new ColumnFamilyOptions()));
List<ColumnFamilyHandle> columnFamilyHandles = new ArrayList<>();
try {
db = RocksDB.open(new DBOptions(), db_path,
columnFamilyDescriptors, columnFamilyHandles);
assert(db != null);
// put and get from non-default column family
db.put(columnFamilyHandles.get(0), new WriteOptions(),
"key".getBytes(), "value".getBytes());
String value = new String(db.get(columnFamilyHandles.get(0),
"key".getBytes()));
// atomic write
wb = new WriteBatch();
wb.put(columnFamilyHandles.get(0), "key2".getBytes(), "value2".getBytes());
wb.put(columnFamilyHandles.get(1), "key3".getBytes(), "value3".getBytes());
wb.remove(columnFamilyHandles.get(0), "key".getBytes());
db.write(new WriteOptions(), wb);
// drop column family
db.dropColumnFamily(columnFamilyHandles.get(1));
} finally {
for (ColumnFamilyHandle handle : columnFamilyHandles){
handle.dispose();
}
if (db != null) {
db.close();
}
if (wb != null) {
wb.dispose();
}
}
}
}

@ -0,0 +1,58 @@
// Copyright (c) 2014, Facebook, Inc. All rights reserved.
// This source code is licensed under the BSD-style license found in the
// LICENSE file in the root directory of this source tree. An additional grant
// of patent rights can be found in the PATENTS file in the same directory.
package org.rocksdb;
/**
* <p>Describes a column family with a
* name and respective Options.</p>
*/
public class ColumnFamilyDescriptor {
/**
* <p>Creates a new Column Family using a name and default
* options,</p>
*
* @param columnFamilyName name of column family.
*/
public ColumnFamilyDescriptor(final String columnFamilyName){
this(columnFamilyName, new ColumnFamilyOptions());
}
/**
* <p>Creates a new Column Family using a name and custom
* options.</p>
*
* @param columnFamilyName name of column family.
* @param columnFamilyOptions options to be used with
* column family.
*/
public ColumnFamilyDescriptor(final String columnFamilyName,
final ColumnFamilyOptions columnFamilyOptions) {
columnFamilyName_ = columnFamilyName;
columnFamilyOptions_ = columnFamilyOptions;
}
/**
* Retrieve name of column family.
*
* @return column family name.
*/
public String columnFamilyName() {
return columnFamilyName_;
}
/**
* Retrieve assigned options instance.
*
* @return Options instance assigned to this instance.
*/
public ColumnFamilyOptions columnFamilyOptions() {
return columnFamilyOptions_;
}
private final String columnFamilyName_;
private final ColumnFamilyOptions columnFamilyOptions_;
}

@ -0,0 +1,647 @@
// Copyright (c) 2014, Facebook, Inc. All rights reserved.
// This source code is licensed under the BSD-style license found in the
// LICENSE file in the root directory of this source tree. An additional grant
// of patent rights can be found in the PATENTS file in the same directory.
package org.rocksdb;
/**
* ColumnFamilyOptions to control the behavior of a database. It will be used
* during the creation of a {@link org.rocksdb.RocksDB} (i.e., RocksDB.open()).
*
* If {@link #dispose()} function is not called, then it will be GC'd automatically
* and native resources will be released as part of the process.
*/
public class ColumnFamilyOptions extends RocksObject
implements ColumnFamilyOptionsInterface {
static {
RocksDB.loadLibrary();
}
/**
* Construct ColumnFamilyOptions.
*
* This constructor will create (by allocating a block of memory)
* an {@code rocksdb::DBOptions} in the c++ side.
*/
public ColumnFamilyOptions() {
super();
newColumnFamilyOptions();
}
@Override
public ColumnFamilyOptions optimizeForPointLookup(
long blockCacheSizeMb) {
optimizeForPointLookup(nativeHandle_,
blockCacheSizeMb);
return this;
}
@Override
public ColumnFamilyOptions optimizeLevelStyleCompaction() {
optimizeLevelStyleCompaction(nativeHandle_,
DEFAULT_COMPACTION_MEMTABLE_MEMORY_BUDGET);
return this;
}
@Override
public ColumnFamilyOptions optimizeLevelStyleCompaction(
long memtableMemoryBudget) {
optimizeLevelStyleCompaction(nativeHandle_,
memtableMemoryBudget);
return this;
}
@Override
public ColumnFamilyOptions optimizeUniversalStyleCompaction() {
optimizeUniversalStyleCompaction(nativeHandle_,
DEFAULT_COMPACTION_MEMTABLE_MEMORY_BUDGET);
return this;
}
@Override
public ColumnFamilyOptions optimizeUniversalStyleCompaction(
long memtableMemoryBudget) {
optimizeUniversalStyleCompaction(nativeHandle_,
memtableMemoryBudget);
return this;
}
@Override
public ColumnFamilyOptions setComparator(BuiltinComparator builtinComparator) {
assert(isInitialized());
setComparatorHandle(nativeHandle_, builtinComparator.ordinal());
return this;
}
@Override
public ColumnFamilyOptions setComparator(AbstractComparator comparator) {
assert (isInitialized());
setComparatorHandle(nativeHandle_, comparator.nativeHandle_);
comparator_ = comparator;
return this;
}
@Override
public ColumnFamilyOptions setMergeOperatorName(String name) {
setMergeOperatorName(nativeHandle_, name);
return this;
}
@Override
public ColumnFamilyOptions setMergeOperator(MergeOperator mergeOperator) {
setMergeOperator(nativeHandle_, mergeOperator.newMergeOperatorHandle());
return this;
}
@Override
public ColumnFamilyOptions setWriteBufferSize(long writeBufferSize)
throws RocksDBException {
assert(isInitialized());
setWriteBufferSize(nativeHandle_, writeBufferSize);
return this;
}
@Override
public long writeBufferSize() {
assert(isInitialized());
return writeBufferSize(nativeHandle_);
}
@Override
public ColumnFamilyOptions setMaxWriteBufferNumber(
int maxWriteBufferNumber) {
assert(isInitialized());
setMaxWriteBufferNumber(nativeHandle_, maxWriteBufferNumber);
return this;
}
@Override
public int maxWriteBufferNumber() {
assert(isInitialized());
return maxWriteBufferNumber(nativeHandle_);
}
@Override
public ColumnFamilyOptions setMinWriteBufferNumberToMerge(
int minWriteBufferNumberToMerge) {
setMinWriteBufferNumberToMerge(nativeHandle_, minWriteBufferNumberToMerge);
return this;
}
@Override
public int minWriteBufferNumberToMerge() {
return minWriteBufferNumberToMerge(nativeHandle_);
}
@Override
public ColumnFamilyOptions useFixedLengthPrefixExtractor(int n) {
assert(isInitialized());
useFixedLengthPrefixExtractor(nativeHandle_, n);
return this;
}
@Override
public ColumnFamilyOptions setCompressionType(CompressionType compressionType) {
setCompressionType(nativeHandle_, compressionType.getValue());
return this;
}
@Override
public CompressionType compressionType() {
return CompressionType.values()[compressionType(nativeHandle_)];
}
@Override
public ColumnFamilyOptions setNumLevels(int numLevels) {
setNumLevels(nativeHandle_, numLevels);
return this;
}
@Override
public int numLevels() {
return numLevels(nativeHandle_);
}
@Override
public ColumnFamilyOptions setLevelZeroFileNumCompactionTrigger(
int numFiles) {
setLevelZeroFileNumCompactionTrigger(
nativeHandle_, numFiles);
return this;
}
@Override
public int levelZeroFileNumCompactionTrigger() {
return levelZeroFileNumCompactionTrigger(nativeHandle_);
}
@Override
public ColumnFamilyOptions setLevelZeroSlowdownWritesTrigger(
int numFiles) {
setLevelZeroSlowdownWritesTrigger(nativeHandle_, numFiles);
return this;
}
@Override
public int levelZeroSlowdownWritesTrigger() {
return levelZeroSlowdownWritesTrigger(nativeHandle_);
}
@Override
public ColumnFamilyOptions setLevelZeroStopWritesTrigger(int numFiles) {
setLevelZeroStopWritesTrigger(nativeHandle_, numFiles);
return this;
}
@Override
public int levelZeroStopWritesTrigger() {
return levelZeroStopWritesTrigger(nativeHandle_);
}
@Override
public ColumnFamilyOptions setMaxMemCompactionLevel(
int maxMemCompactionLevel) {
setMaxMemCompactionLevel(nativeHandle_, maxMemCompactionLevel);
return this;
}
@Override
public int maxMemCompactionLevel() {
return maxMemCompactionLevel(nativeHandle_);
}
@Override
public ColumnFamilyOptions setTargetFileSizeBase(long targetFileSizeBase) {
setTargetFileSizeBase(nativeHandle_, targetFileSizeBase);
return this;
}
@Override
public long targetFileSizeBase() {
return targetFileSizeBase(nativeHandle_);
}
@Override
public ColumnFamilyOptions setTargetFileSizeMultiplier(int multiplier) {
setTargetFileSizeMultiplier(nativeHandle_, multiplier);
return this;
}
@Override
public int targetFileSizeMultiplier() {
return targetFileSizeMultiplier(nativeHandle_);
}
@Override
public ColumnFamilyOptions setMaxBytesForLevelBase(
long maxBytesForLevelBase) {
setMaxBytesForLevelBase(nativeHandle_, maxBytesForLevelBase);
return this;
}
@Override
public long maxBytesForLevelBase() {
return maxBytesForLevelBase(nativeHandle_);
}
@Override
public ColumnFamilyOptions setMaxBytesForLevelMultiplier(int multiplier) {
setMaxBytesForLevelMultiplier(nativeHandle_, multiplier);
return this;
}
@Override
public int maxBytesForLevelMultiplier() {
return maxBytesForLevelMultiplier(nativeHandle_);
}
@Override
public ColumnFamilyOptions setExpandedCompactionFactor(int expandedCompactionFactor) {
setExpandedCompactionFactor(nativeHandle_, expandedCompactionFactor);
return this;
}
@Override
public int expandedCompactionFactor() {
return expandedCompactionFactor(nativeHandle_);
}
@Override
public ColumnFamilyOptions setSourceCompactionFactor(int sourceCompactionFactor) {
setSourceCompactionFactor(nativeHandle_, sourceCompactionFactor);
return this;
}
@Override
public int sourceCompactionFactor() {
return sourceCompactionFactor(nativeHandle_);
}
@Override
public ColumnFamilyOptions setMaxGrandparentOverlapFactor(
int maxGrandparentOverlapFactor) {
setMaxGrandparentOverlapFactor(nativeHandle_, maxGrandparentOverlapFactor);
return this;
}
@Override
public int maxGrandparentOverlapFactor() {
return maxGrandparentOverlapFactor(nativeHandle_);
}
@Override
public ColumnFamilyOptions setSoftRateLimit(double softRateLimit) {
setSoftRateLimit(nativeHandle_, softRateLimit);
return this;
}
@Override
public double softRateLimit() {
return softRateLimit(nativeHandle_);
}
@Override
public ColumnFamilyOptions setHardRateLimit(double hardRateLimit) {
setHardRateLimit(nativeHandle_, hardRateLimit);
return this;
}
@Override
public double hardRateLimit() {
return hardRateLimit(nativeHandle_);
}
@Override
public ColumnFamilyOptions setRateLimitDelayMaxMilliseconds(
int rateLimitDelayMaxMilliseconds) {
setRateLimitDelayMaxMilliseconds(
nativeHandle_, rateLimitDelayMaxMilliseconds);
return this;
}
@Override
public int rateLimitDelayMaxMilliseconds() {
return rateLimitDelayMaxMilliseconds(nativeHandle_);
}
@Override
public ColumnFamilyOptions setArenaBlockSize(long arenaBlockSize)
throws RocksDBException {
setArenaBlockSize(nativeHandle_, arenaBlockSize);
return this;
}
@Override
public long arenaBlockSize() {
return arenaBlockSize(nativeHandle_);
}
@Override
public ColumnFamilyOptions setDisableAutoCompactions(boolean disableAutoCompactions) {
setDisableAutoCompactions(nativeHandle_, disableAutoCompactions);
return this;
}
@Override
public boolean disableAutoCompactions() {
return disableAutoCompactions(nativeHandle_);
}
@Override
public ColumnFamilyOptions setPurgeRedundantKvsWhileFlush(
boolean purgeRedundantKvsWhileFlush) {
setPurgeRedundantKvsWhileFlush(
nativeHandle_, purgeRedundantKvsWhileFlush);
return this;
}
@Override
public boolean purgeRedundantKvsWhileFlush() {
return purgeRedundantKvsWhileFlush(nativeHandle_);
}
@Override
public ColumnFamilyOptions setCompactionStyle(CompactionStyle compactionStyle) {
setCompactionStyle(nativeHandle_, compactionStyle.getValue());
return this;
}
@Override
public CompactionStyle compactionStyle() {
return CompactionStyle.values()[compactionStyle(nativeHandle_)];
}
@Override
public ColumnFamilyOptions setVerifyChecksumsInCompaction(
boolean verifyChecksumsInCompaction) {
setVerifyChecksumsInCompaction(
nativeHandle_, verifyChecksumsInCompaction);
return this;
}
@Override
public boolean verifyChecksumsInCompaction() {
return verifyChecksumsInCompaction(nativeHandle_);
}
@Override
public ColumnFamilyOptions setFilterDeletes(boolean filterDeletes) {
setFilterDeletes(nativeHandle_, filterDeletes);
return this;
}
@Override
public boolean filterDeletes() {
return filterDeletes(nativeHandle_);
}
@Override
public ColumnFamilyOptions setMaxSequentialSkipInIterations(long maxSequentialSkipInIterations) {
setMaxSequentialSkipInIterations(nativeHandle_, maxSequentialSkipInIterations);
return this;
}
@Override
public long maxSequentialSkipInIterations() {
return maxSequentialSkipInIterations(nativeHandle_);
}
@Override
public ColumnFamilyOptions setMemTableConfig(MemTableConfig config)
throws RocksDBException {
memTableConfig_ = config;
setMemTableFactory(nativeHandle_, config.newMemTableFactoryHandle());
return this;
}
@Override
public String memTableFactoryName() {
assert(isInitialized());
return memTableFactoryName(nativeHandle_);
}
@Override
public ColumnFamilyOptions setTableFormatConfig(TableFormatConfig config) {
tableFormatConfig_ = config;
setTableFactory(nativeHandle_, config.newTableFactoryHandle());
return this;
}
@Override
public String tableFactoryName() {
assert(isInitialized());
return tableFactoryName(nativeHandle_);
}
@Override
public ColumnFamilyOptions setInplaceUpdateSupport(boolean inplaceUpdateSupport) {
setInplaceUpdateSupport(nativeHandle_, inplaceUpdateSupport);
return this;
}
@Override
public boolean inplaceUpdateSupport() {
return inplaceUpdateSupport(nativeHandle_);
}
@Override
public ColumnFamilyOptions setInplaceUpdateNumLocks(long inplaceUpdateNumLocks)
throws RocksDBException {
setInplaceUpdateNumLocks(nativeHandle_, inplaceUpdateNumLocks);
return this;
}
@Override
public long inplaceUpdateNumLocks() {
return inplaceUpdateNumLocks(nativeHandle_);
}
@Override
public ColumnFamilyOptions setMemtablePrefixBloomBits(int memtablePrefixBloomBits) {
setMemtablePrefixBloomBits(nativeHandle_, memtablePrefixBloomBits);
return this;
}
@Override
public int memtablePrefixBloomBits() {
return memtablePrefixBloomBits(nativeHandle_);
}
@Override
public ColumnFamilyOptions setMemtablePrefixBloomProbes(int memtablePrefixBloomProbes) {
setMemtablePrefixBloomProbes(nativeHandle_, memtablePrefixBloomProbes);
return this;
}
@Override
public int memtablePrefixBloomProbes() {
return memtablePrefixBloomProbes(nativeHandle_);
}
@Override
public ColumnFamilyOptions setBloomLocality(int bloomLocality) {
setBloomLocality(nativeHandle_, bloomLocality);
return this;
}
@Override
public int bloomLocality() {
return bloomLocality(nativeHandle_);
}
@Override
public ColumnFamilyOptions setMaxSuccessiveMerges(long maxSuccessiveMerges)
throws RocksDBException {
setMaxSuccessiveMerges(nativeHandle_, maxSuccessiveMerges);
return this;
}
@Override
public long maxSuccessiveMerges() {
return maxSuccessiveMerges(nativeHandle_);
}
@Override
public ColumnFamilyOptions setMinPartialMergeOperands(int minPartialMergeOperands) {
setMinPartialMergeOperands(nativeHandle_, minPartialMergeOperands);
return this;
}
@Override
public int minPartialMergeOperands() {
return minPartialMergeOperands(nativeHandle_);
}
/**
* Release the memory allocated for the current instance
* in the c++ side.
*/
@Override protected void disposeInternal() {
assert(isInitialized());
disposeInternal(nativeHandle_);
}
private native void newColumnFamilyOptions();
private native void disposeInternal(long handle);
private native void optimizeForPointLookup(long handle,
long blockCacheSizeMb);
private native void optimizeLevelStyleCompaction(long handle,
long memtableMemoryBudget);
private native void optimizeUniversalStyleCompaction(long handle,
long memtableMemoryBudget);
private native void setComparatorHandle(long handle, int builtinComparator);
private native void setComparatorHandle(long optHandle, long comparatorHandle);
private native void setMergeOperatorName(
long handle, String name);
private native void setMergeOperator(
long handle, long mergeOperatorHandle);
private native void setWriteBufferSize(long handle, long writeBufferSize)
throws RocksDBException;
private native long writeBufferSize(long handle);
private native void setMaxWriteBufferNumber(
long handle, int maxWriteBufferNumber);
private native int maxWriteBufferNumber(long handle);
private native void setMinWriteBufferNumberToMerge(
long handle, int minWriteBufferNumberToMerge);
private native int minWriteBufferNumberToMerge(long handle);
private native void setCompressionType(long handle, byte compressionType);
private native byte compressionType(long handle);
private native void useFixedLengthPrefixExtractor(
long handle, int prefixLength);
private native void setNumLevels(
long handle, int numLevels);
private native int numLevels(long handle);
private native void setLevelZeroFileNumCompactionTrigger(
long handle, int numFiles);
private native int levelZeroFileNumCompactionTrigger(long handle);
private native void setLevelZeroSlowdownWritesTrigger(
long handle, int numFiles);
private native int levelZeroSlowdownWritesTrigger(long handle);
private native void setLevelZeroStopWritesTrigger(
long handle, int numFiles);
private native int levelZeroStopWritesTrigger(long handle);
private native void setMaxMemCompactionLevel(
long handle, int maxMemCompactionLevel);
private native int maxMemCompactionLevel(long handle);
private native void setTargetFileSizeBase(
long handle, long targetFileSizeBase);
private native long targetFileSizeBase(long handle);
private native void setTargetFileSizeMultiplier(
long handle, int multiplier);
private native int targetFileSizeMultiplier(long handle);
private native void setMaxBytesForLevelBase(
long handle, long maxBytesForLevelBase);
private native long maxBytesForLevelBase(long handle);
private native void setMaxBytesForLevelMultiplier(
long handle, int multiplier);
private native int maxBytesForLevelMultiplier(long handle);
private native void setExpandedCompactionFactor(
long handle, int expandedCompactionFactor);
private native int expandedCompactionFactor(long handle);
private native void setSourceCompactionFactor(
long handle, int sourceCompactionFactor);
private native int sourceCompactionFactor(long handle);
private native void setMaxGrandparentOverlapFactor(
long handle, int maxGrandparentOverlapFactor);
private native int maxGrandparentOverlapFactor(long handle);
private native void setSoftRateLimit(
long handle, double softRateLimit);
private native double softRateLimit(long handle);
private native void setHardRateLimit(
long handle, double hardRateLimit);
private native double hardRateLimit(long handle);
private native void setRateLimitDelayMaxMilliseconds(
long handle, int rateLimitDelayMaxMilliseconds);
private native int rateLimitDelayMaxMilliseconds(long handle);
private native void setArenaBlockSize(
long handle, long arenaBlockSize) throws RocksDBException;
private native long arenaBlockSize(long handle);
private native void setDisableAutoCompactions(
long handle, boolean disableAutoCompactions);
private native boolean disableAutoCompactions(long handle);
private native void setCompactionStyle(long handle, byte compactionStyle);
private native byte compactionStyle(long handle);
private native void setPurgeRedundantKvsWhileFlush(
long handle, boolean purgeRedundantKvsWhileFlush);
private native boolean purgeRedundantKvsWhileFlush(long handle);
private native void setVerifyChecksumsInCompaction(
long handle, boolean verifyChecksumsInCompaction);
private native boolean verifyChecksumsInCompaction(long handle);
private native void setFilterDeletes(
long handle, boolean filterDeletes);
private native boolean filterDeletes(long handle);
private native void setMaxSequentialSkipInIterations(
long handle, long maxSequentialSkipInIterations);
private native long maxSequentialSkipInIterations(long handle);
private native void setMemTableFactory(long handle, long factoryHandle);
private native String memTableFactoryName(long handle);
private native void setTableFactory(long handle, long factoryHandle);
private native String tableFactoryName(long handle);
private native void setInplaceUpdateSupport(
long handle, boolean inplaceUpdateSupport);
private native boolean inplaceUpdateSupport(long handle);
private native void setInplaceUpdateNumLocks(
long handle, long inplaceUpdateNumLocks) throws RocksDBException;
private native long inplaceUpdateNumLocks(long handle);
private native void setMemtablePrefixBloomBits(
long handle, int memtablePrefixBloomBits);
private native int memtablePrefixBloomBits(long handle);
private native void setMemtablePrefixBloomProbes(
long handle, int memtablePrefixBloomProbes);
private native int memtablePrefixBloomProbes(long handle);
private native void setBloomLocality(
long handle, int bloomLocality);
private native int bloomLocality(long handle);
private native void setMaxSuccessiveMerges(
long handle, long maxSuccessiveMerges) throws RocksDBException;
private native long maxSuccessiveMerges(long handle);
private native void setMinPartialMergeOperands(
long handle, int minPartialMergeOperands);
private native int minPartialMergeOperands(long handle);
MemTableConfig memTableConfig_;
TableFormatConfig tableFormatConfig_;
AbstractComparator comparator_;
}

@ -29,6 +29,20 @@ public class Options extends RocksObject
env_ = RocksEnv.getDefault();
}
/**
* Construct options for opening a RocksDB. Reusing database options
* and column family options.
*
* @param dbOptions {@link org.rocksdb.DBOptions} instance
* @param columnFamilyOptions {@link org.rocksdb.ColumnFamilyOptions}
* instance
*/
public Options(DBOptions dbOptions, ColumnFamilyOptions columnFamilyOptions) {
super();
newOptions(dbOptions.nativeHandle_, columnFamilyOptions.nativeHandle_);
env_ = RocksEnv.getDefault();
}
@Override
public Options setCreateIfMissing(boolean flag) {
assert(isInitialized());

@ -16,6 +16,7 @@ import org.rocksdb.util.Environment;
* indicates sth wrong at the RocksDB library side and the call failed.
*/
public class RocksDB extends RocksObject {
public static final String DEFAULT_COLUMN_FAMILY = "default";
public static final int NOT_FOUND = -1;
private static final String[] compressionLibs_ = {
"snappy", "z", "bzip2", "lz4", "lz4hc"};
@ -123,21 +124,22 @@ public class RocksDB extends RocksObject {
* </p>
*
* @param path the path to the rocksdb.
* @param columnFamilyNames list of column family names
* @param columnFamilyDescriptors list of column family descriptors
* @param columnFamilyHandles will be filled with ColumnFamilyHandle instances
* on open.
* @return a {@link RocksDB} instance on success, null if the specified
* {@link RocksDB} can not be opened.
*
* @throws org.rocksdb.RocksDBException
* @see Options#setCreateIfMissing(boolean)
* @see DBOptions#setCreateIfMissing(boolean)
*/
public static RocksDB open(String path, List<String> columnFamilyNames,
public static RocksDB open(String path,
List<ColumnFamilyDescriptor> columnFamilyDescriptors,
List<ColumnFamilyHandle> columnFamilyHandles) throws RocksDBException {
// This allows to use the rocksjni default Options instead of
// the c++ one.
Options options = new Options();
return open(options, path, columnFamilyNames, columnFamilyHandles);
DBOptions options = new DBOptions();
return open(options, path, columnFamilyDescriptors, columnFamilyHandles);
}
/**
@ -196,24 +198,25 @@ public class RocksDB extends RocksObject {
* <p>
* ColumnFamily handles are disposed when the RocksDB instance is disposed.</p>
*
* @param options {@link org.rocksdb.Options} instance.
* @param options {@link org.rocksdb.DBOptions} instance.
* @param path the path to the rocksdb.
* @param columnFamilyNames list of column family names
* @param columnFamilyDescriptors list of column family descriptors
* @param columnFamilyHandles will be filled with ColumnFamilyHandle instances
* on open.
* @return a {@link RocksDB} instance on success, null if the specified
* {@link RocksDB} can not be opened.
*
* @throws org.rocksdb.RocksDBException
* @see Options#setCreateIfMissing(boolean)
* @see DBOptions#setCreateIfMissing(boolean)
*/
public static RocksDB open(Options options, String path, List<String> columnFamilyNames,
public static RocksDB open(DBOptions options, String path,
List<ColumnFamilyDescriptor> columnFamilyDescriptors,
List<ColumnFamilyHandle> columnFamilyHandles)
throws RocksDBException {
RocksDB db = new RocksDB();
List<Long> cfReferences = db.open(options.nativeHandle_, path,
columnFamilyNames, columnFamilyNames.size());
for (int i=0; i<columnFamilyNames.size(); i++) {
columnFamilyDescriptors, columnFamilyDescriptors.size());
for (int i = 0; i < columnFamilyDescriptors.size(); i++) {
columnFamilyHandles.add(new ColumnFamilyHandle(db, cfReferences.get(i)));
}
db.storeOptionsInstance(options);
@ -244,19 +247,21 @@ public class RocksDB extends RocksObject {
* options.
*
* @param path the path to the RocksDB.
* @param columnFamilyNames list of column family names
* @param columnFamilyDescriptors list of column family descriptors
* @param columnFamilyHandles will be filled with ColumnFamilyHandle instances
* on open.
* @return a {@link RocksDB} instance on success, null if the specified
* {@link RocksDB} can not be opened.
* @throws RocksDBException
*/
public static RocksDB openReadOnly(String path, List<String> columnFamilyNames,
public static RocksDB openReadOnly(String path,
List<ColumnFamilyDescriptor> columnFamilyDescriptors,
List<ColumnFamilyHandle> columnFamilyHandles) throws RocksDBException {
// This allows to use the rocksjni default Options instead of
// the c++ one.
Options options = new Options();
return openReadOnly(options, path, columnFamilyNames, columnFamilyHandles);
DBOptions options = new DBOptions();
return openReadOnly(options, path, columnFamilyDescriptors,
columnFamilyHandles);
}
/**
@ -297,25 +302,26 @@ public class RocksDB extends RocksObject {
* options instance have been closed. If user doesn't call options dispose
* explicitly,then this options instance will be GC'd automatically.</p>
*
* @param options {@link Options} instance.
* @param options {@link DBOptions} instance.
* @param path the path to the RocksDB.
* @param columnFamilyNames list of column family names
* @param columnFamilyDescriptors list of column family descriptors
* @param columnFamilyHandles will be filled with ColumnFamilyHandle instances
* on open.
* @return a {@link RocksDB} instance on success, null if the specified
* {@link RocksDB} can not be opened.
* @throws RocksDBException
*/
public static RocksDB openReadOnly(Options options, String path,
List<String> columnFamilyNames, List<ColumnFamilyHandle> columnFamilyHandles)
public static RocksDB openReadOnly(DBOptions options, String path,
List<ColumnFamilyDescriptor> columnFamilyDescriptors,
List<ColumnFamilyHandle> columnFamilyHandles)
throws RocksDBException {
// when non-default Options is used, keeping an Options reference
// in RocksDB can prevent Java to GC during the life-time of
// the currently-created RocksDB.
RocksDB db = new RocksDB();
List<Long> cfReferences = db.openROnly(options.nativeHandle_, path,
columnFamilyNames, columnFamilyNames.size());
for (int i=0; i<columnFamilyNames.size(); i++) {
columnFamilyDescriptors, columnFamilyDescriptors.size());
for (int i=0; i<columnFamilyDescriptors.size(); i++) {
columnFamilyHandles.add(new ColumnFamilyHandle(db, cfReferences.get(i)));
}
@ -337,7 +343,7 @@ public class RocksDB extends RocksObject {
return RocksDB.listColumnFamilies(options.nativeHandle_, path);
}
private void storeOptionsInstance(Options options) {
private void storeOptionsInstance(DBOptionsInterface options) {
options_ = options;
}
@ -1059,14 +1065,15 @@ public class RocksDB extends RocksObject {
* allocates a ColumnFamilyHandle within an internal structure.
* The ColumnFamilyHandle is automatically disposed with DB disposal.
*
* @param columnFamilyName Name of column family to be created.
* @param columnFamilyDescriptor column family to be created.
* @return {@link org.rocksdb.ColumnFamilyHandle} instance
* @see RocksDBException
*/
public ColumnFamilyHandle createColumnFamily(String columnFamilyName)
public ColumnFamilyHandle createColumnFamily(
ColumnFamilyDescriptor columnFamilyDescriptor)
throws RocksDBException {
return new ColumnFamilyHandle(this, createColumnFamily(nativeHandle_,
options_.nativeHandle_, columnFamilyName));
columnFamilyDescriptor));
}
/**
@ -1130,15 +1137,17 @@ public class RocksDB extends RocksObject {
protected native void open(
long optionsHandle, String path) throws RocksDBException;
protected native List<Long> open(long optionsHandle, String path,
List<String> columnFamilyNames, int columnFamilyNamesLength)
List<ColumnFamilyDescriptor> columnFamilyDescriptors,
int columnFamilyDescriptorsLength)
throws RocksDBException;
protected native static List<byte[]> listColumnFamilies(
long optionsHandle, String path) throws RocksDBException;
protected native void openROnly(
long optionsHandle, String path) throws RocksDBException;
protected native List<Long> openROnly(
long optionsHandle, String path, List<String> columnFamilyNames,
int columnFamilyNamesLength) throws RocksDBException;
long optionsHandle, String path,
List<ColumnFamilyDescriptor> columnFamilyDescriptors,
int columnFamilyDescriptorsLength) throws RocksDBException;
protected native void put(
long handle, byte[] key, int keyLen,
byte[] value, int valueLen) throws RocksDBException;
@ -1231,13 +1240,13 @@ public class RocksDB extends RocksObject {
protected native void releaseSnapshot(
long nativeHandle, long snapshotHandle);
private native void disposeInternal(long handle);
private native long createColumnFamily(long handle, long opt_handle,
String name) throws RocksDBException;
private native long createColumnFamily(long handle,
ColumnFamilyDescriptor columnFamilyDescriptor) throws RocksDBException;
private native void dropColumnFamily(long handle, long cfHandle) throws RocksDBException;
private native void flush(long handle, long flushOptHandle)
throws RocksDBException;
private native void flush(long handle, long flushOptHandle,
long cfHandle) throws RocksDBException;
protected Options options_;
protected DBOptionsInterface options_;
}

@ -0,0 +1,229 @@
// Copyright (c) 2014, Facebook, Inc. All rights reserved.
// This source code is licensed under the BSD-style license found in the
// LICENSE file in the root directory of this source tree. An additional grant
// of patent rights can be found in the PATENTS file in the same directory.
package org.rocksdb.test;
import org.rocksdb.*;
import java.util.Random;
public class ColumnFamilyOptionsTest {
static {
RocksDB.loadLibrary();
}
public static void testCFOptions(ColumnFamilyOptionsInterface opt) {
Random rand = PlatformRandomHelper.
getPlatformSpecificRandomFactory();
{ // WriteBufferSize test
try {
long longValue = rand.nextLong();
opt.setWriteBufferSize(longValue);
assert(opt.writeBufferSize() == longValue);
} catch (RocksDBException e) {
assert(false);
}
}
{ // MaxWriteBufferNumber test
int intValue = rand.nextInt();
opt.setMaxWriteBufferNumber(intValue);
assert(opt.maxWriteBufferNumber() == intValue);
}
{ // MinWriteBufferNumberToMerge test
int intValue = rand.nextInt();
opt.setMinWriteBufferNumberToMerge(intValue);
assert(opt.minWriteBufferNumberToMerge() == intValue);
}
{ // NumLevels test
int intValue = rand.nextInt();
opt.setNumLevels(intValue);
assert(opt.numLevels() == intValue);
}
{ // LevelFileNumCompactionTrigger test
int intValue = rand.nextInt();
opt.setLevelZeroFileNumCompactionTrigger(intValue);
assert(opt.levelZeroFileNumCompactionTrigger() == intValue);
}
{ // LevelSlowdownWritesTrigger test
int intValue = rand.nextInt();
opt.setLevelZeroSlowdownWritesTrigger(intValue);
assert(opt.levelZeroSlowdownWritesTrigger() == intValue);
}
{ // LevelStopWritesTrigger test
int intValue = rand.nextInt();
opt.setLevelZeroStopWritesTrigger(intValue);
assert(opt.levelZeroStopWritesTrigger() == intValue);
}
{ // MaxMemCompactionLevel test
int intValue = rand.nextInt();
opt.setMaxMemCompactionLevel(intValue);
assert(opt.maxMemCompactionLevel() == intValue);
}
{ // TargetFileSizeBase test
long longValue = rand.nextLong();
opt.setTargetFileSizeBase(longValue);
assert(opt.targetFileSizeBase() == longValue);
}
{ // TargetFileSizeMultiplier test
int intValue = rand.nextInt();
opt.setTargetFileSizeMultiplier(intValue);
assert(opt.targetFileSizeMultiplier() == intValue);
}
{ // MaxBytesForLevelBase test
long longValue = rand.nextLong();
opt.setMaxBytesForLevelBase(longValue);
assert(opt.maxBytesForLevelBase() == longValue);
}
{ // MaxBytesForLevelMultiplier test
int intValue = rand.nextInt();
opt.setMaxBytesForLevelMultiplier(intValue);
assert(opt.maxBytesForLevelMultiplier() == intValue);
}
{ // ExpandedCompactionFactor test
int intValue = rand.nextInt();
opt.setExpandedCompactionFactor(intValue);
assert(opt.expandedCompactionFactor() == intValue);
}
{ // SourceCompactionFactor test
int intValue = rand.nextInt();
opt.setSourceCompactionFactor(intValue);
assert(opt.sourceCompactionFactor() == intValue);
}
{ // MaxGrandparentOverlapFactor test
int intValue = rand.nextInt();
opt.setMaxGrandparentOverlapFactor(intValue);
assert(opt.maxGrandparentOverlapFactor() == intValue);
}
{ // SoftRateLimit test
double doubleValue = rand.nextDouble();
opt.setSoftRateLimit(doubleValue);
assert(opt.softRateLimit() == doubleValue);
}
{ // HardRateLimit test
double doubleValue = rand.nextDouble();
opt.setHardRateLimit(doubleValue);
assert(opt.hardRateLimit() == doubleValue);
}
{ // RateLimitDelayMaxMilliseconds test
int intValue = rand.nextInt();
opt.setRateLimitDelayMaxMilliseconds(intValue);
assert(opt.rateLimitDelayMaxMilliseconds() == intValue);
}
{ // ArenaBlockSize test
try {
long longValue = rand.nextLong();
opt.setArenaBlockSize(longValue);
assert(opt.arenaBlockSize() == longValue);
} catch (RocksDBException e) {
assert(false);
}
}
{ // DisableAutoCompactions test
boolean boolValue = rand.nextBoolean();
opt.setDisableAutoCompactions(boolValue);
assert(opt.disableAutoCompactions() == boolValue);
}
{ // PurgeRedundantKvsWhileFlush test
boolean boolValue = rand.nextBoolean();
opt.setPurgeRedundantKvsWhileFlush(boolValue);
assert(opt.purgeRedundantKvsWhileFlush() == boolValue);
}
{ // VerifyChecksumsInCompaction test
boolean boolValue = rand.nextBoolean();
opt.setVerifyChecksumsInCompaction(boolValue);
assert(opt.verifyChecksumsInCompaction() == boolValue);
}
{ // FilterDeletes test
boolean boolValue = rand.nextBoolean();
opt.setFilterDeletes(boolValue);
assert(opt.filterDeletes() == boolValue);
}
{ // MaxSequentialSkipInIterations test
long longValue = rand.nextLong();
opt.setMaxSequentialSkipInIterations(longValue);
assert(opt.maxSequentialSkipInIterations() == longValue);
}
{ // InplaceUpdateSupport test
boolean boolValue = rand.nextBoolean();
opt.setInplaceUpdateSupport(boolValue);
assert(opt.inplaceUpdateSupport() == boolValue);
}
{ // InplaceUpdateNumLocks test
try {
long longValue = rand.nextLong();
opt.setInplaceUpdateNumLocks(longValue);
assert(opt.inplaceUpdateNumLocks() == longValue);
} catch (RocksDBException e) {
assert(false);
}
}
{ // MemtablePrefixBloomBits test
int intValue = rand.nextInt();
opt.setMemtablePrefixBloomBits(intValue);
assert(opt.memtablePrefixBloomBits() == intValue);
}
{ // MemtablePrefixBloomProbes test
int intValue = rand.nextInt();
opt.setMemtablePrefixBloomProbes(intValue);
assert(opt.memtablePrefixBloomProbes() == intValue);
}
{ // BloomLocality test
int intValue = rand.nextInt();
opt.setBloomLocality(intValue);
assert(opt.bloomLocality() == intValue);
}
{ // MaxSuccessiveMerges test
try {
long longValue = rand.nextLong();
opt.setMaxSuccessiveMerges(longValue);
assert(opt.maxSuccessiveMerges() == longValue);
} catch (RocksDBException e){
assert(false);
}
}
{ // MinPartialMergeOperands test
int intValue = rand.nextInt();
opt.setMinPartialMergeOperands(intValue);
assert(opt.minPartialMergeOperands() == intValue);
}
}
public static void main(String[] args) {
ColumnFamilyOptions opt = new ColumnFamilyOptions();
testCFOptions(opt);
opt.dispose();
System.out.println("Passed DBOptionsTest");
}
}

@ -22,6 +22,10 @@ public class ColumnFamilyTest {
RocksDB db = null;
Options options = new Options();
options.setCreateIfMissing(true);
DBOptions dbOptions = new DBOptions();
dbOptions.setCreateIfMissing(true);
try {
db = RocksDB.open(options, db_path);
} catch (RocksDBException e) {
@ -43,7 +47,8 @@ public class ColumnFamilyTest {
// Test createColumnFamily
try {
db.createColumnFamily("new_cf");
db.createColumnFamily(new ColumnFamilyDescriptor("new_cf",
new ColumnFamilyOptions()));
} catch (RocksDBException e) {
assert(false);
}
@ -67,14 +72,15 @@ public class ColumnFamilyTest {
}
// Test open database with column family names
List<String> cfNames = new ArrayList<String>();
List<ColumnFamilyDescriptor> cfNames =
new ArrayList<>();
List<ColumnFamilyHandle> columnFamilyHandleList =
new ArrayList<ColumnFamilyHandle>();
cfNames.add("default");
cfNames.add("new_cf");
new ArrayList<>();
cfNames.add(new ColumnFamilyDescriptor("default"));
cfNames.add(new ColumnFamilyDescriptor("new_cf"));
try {
db = RocksDB.open(options, db_path, cfNames, columnFamilyHandleList);
db = RocksDB.open(dbOptions, db_path, cfNames, columnFamilyHandleList);
assert(columnFamilyHandleList.size() == 2);
db.put("dfkey1".getBytes(), "dfvalue".getBytes());
db.put(columnFamilyHandleList.get(0), "dfkey2".getBytes(),
@ -100,7 +106,8 @@ public class ColumnFamilyTest {
// Test create write to and drop ColumnFamily
ColumnFamilyHandle tmpColumnFamilyHandle = null;
try {
tmpColumnFamilyHandle = db.createColumnFamily("tmpCF");
tmpColumnFamilyHandle = db.createColumnFamily(
new ColumnFamilyDescriptor("tmpCF", new ColumnFamilyOptions()));
db.put(tmpColumnFamilyHandle, "key".getBytes(), "value".getBytes());
db.dropColumnFamily(tmpColumnFamilyHandle);
tmpColumnFamilyHandle.dispose();
@ -215,8 +222,6 @@ public class ColumnFamilyTest {
.equals("value"));
} catch (RocksDBException e) {
assert(false);
} catch (IllegalArgumentException e) {
assert(false);
}
// Test multiget without correct number of column

@ -17,16 +17,17 @@ public class KeyMayExistTest {
public static void main(String[] args){
RocksDB db;
Options options = new Options();
DBOptions options = new DBOptions();
options.setCreateIfMissing(true)
.setCreateMissingColumnFamilies(true);
try {
// open database using cf names
List<String> cfNames = new ArrayList<String>();
List<ColumnFamilyDescriptor> cfNames =
new ArrayList<ColumnFamilyDescriptor>();
List<ColumnFamilyHandle> columnFamilyHandleList =
new ArrayList<ColumnFamilyHandle>();
cfNames.add("default");
cfNames.add("new_cf");
cfNames.add(new ColumnFamilyDescriptor("default"));
cfNames.add(new ColumnFamilyDescriptor("new_cf"));
db = RocksDB.open(options, DB_PATH, cfNames, columnFamilyHandleList);
assert(columnFamilyHandleList.size()==2);

@ -7,7 +7,6 @@ package org.rocksdb.test;
import java.util.List;
import java.util.ArrayList;
import java.util.Collections;
import org.rocksdb.*;
public class MergeTest {
@ -41,18 +40,22 @@ public class MergeTest {
public static void testCFStringOption()
throws InterruptedException, RocksDBException {
Options opt = new Options();
DBOptions opt = new DBOptions();
opt.setCreateIfMissing(true);
opt.setCreateMissingColumnFamilies(true);
opt.setMergeOperatorName("stringappend");
List<String> cfNames = new ArrayList<String>();
List<ColumnFamilyDescriptor> cfDescr =
new ArrayList<ColumnFamilyDescriptor>();
List<ColumnFamilyHandle> columnFamilyHandleList =
new ArrayList<ColumnFamilyHandle>();
cfNames.add("default");
cfNames.add("new_cf");
cfDescr.add(new ColumnFamilyDescriptor("default",
new ColumnFamilyOptions().setMergeOperatorName(
"stringappend")));
cfDescr.add(new ColumnFamilyDescriptor("default",
new ColumnFamilyOptions().setMergeOperatorName(
"stringappend")));
RocksDB db = RocksDB.open(opt, db_cf_path_string,
cfNames, columnFamilyHandleList);
cfDescr, columnFamilyHandleList);
// writing aa under key
db.put(columnFamilyHandleList.get(1),
@ -97,19 +100,23 @@ public class MergeTest {
public static void testCFOperatorOption()
throws InterruptedException, RocksDBException {
Options opt = new Options();
DBOptions opt = new DBOptions();
opt.setCreateIfMissing(true);
opt.setCreateMissingColumnFamilies(true);
StringAppendOperator stringAppendOperator = new StringAppendOperator();
opt.setMergeOperator(stringAppendOperator);
List<String> cfNames = new ArrayList<String>();
List<ColumnFamilyDescriptor> cfDescr =
new ArrayList<ColumnFamilyDescriptor>();
List<ColumnFamilyHandle> columnFamilyHandleList =
new ArrayList<ColumnFamilyHandle>();
cfNames.add("default");
cfNames.add("new_cf");
cfDescr.add(new ColumnFamilyDescriptor("default",
new ColumnFamilyOptions().setMergeOperator(
stringAppendOperator)));
cfDescr.add(new ColumnFamilyDescriptor("new_cf",
new ColumnFamilyOptions().setMergeOperator(
stringAppendOperator)));
RocksDB db = RocksDB.open(opt, db_path_operator,
cfNames, columnFamilyHandleList);
cfDescr, columnFamilyHandleList);
// writing aa under key
db.put(columnFamilyHandleList.get(1),
@ -121,7 +128,10 @@ public class MergeTest {
String strValue = new String(value);
// Test also with createColumnFamily
ColumnFamilyHandle columnFamilyHandle = db.createColumnFamily("new_cf2");
ColumnFamilyHandle columnFamilyHandle = db.createColumnFamily(
new ColumnFamilyDescriptor("new_cf2",
new ColumnFamilyOptions().setMergeOperator(
new StringAppendOperator())));
// writing xx under cfkey2
db.put(columnFamilyHandle, "cfkey2".getBytes(), "xx".getBytes());
// merge yy under cfkey2

@ -0,0 +1,51 @@
// Copyright (c) 2014, Facebook, Inc. All rights reserved.
// This source code is licensed under the BSD-style license found in the
// LICENSE file in the root directory of this source tree. An additional grant
// of patent rights can be found in the PATENTS file in the same directory.
package org.rocksdb.test;
import org.rocksdb.*;
public class MixedOptionsTest {
static {
RocksDB.loadLibrary();
}
public static void main(String[] args) {
// Set a table factory and check the names
ColumnFamilyOptions cfOptions = new ColumnFamilyOptions();
cfOptions.setTableFormatConfig(new BlockBasedTableConfig().
setFilter(new BloomFilter()));
assert(cfOptions.tableFactoryName().equals(
"BlockBasedTable"));
cfOptions.setTableFormatConfig(new PlainTableConfig());
assert(cfOptions.tableFactoryName().equals("PlainTable"));
// Initialize a dbOptions object from cf options and
// db options
DBOptions dbOptions = new DBOptions();
Options options = new Options(dbOptions, cfOptions);
assert(options.tableFactoryName().equals("PlainTable"));
// Free instances
options.dispose();
options = null;
cfOptions.dispose();
cfOptions = null;
dbOptions.dispose();
dbOptions = null;
System.gc();
System.runFinalization();
// Test Optimize for statements
cfOptions = new ColumnFamilyOptions();
cfOptions.optimizeUniversalStyleCompaction();
cfOptions.optimizeLevelStyleCompaction();
cfOptions.optimizeForPointLookup(1024);
options = new Options();
options.optimizeLevelStyleCompaction();
options.optimizeLevelStyleCompaction(400);
options.optimizeUniversalStyleCompaction();
options.optimizeUniversalStyleCompaction(400);
options.optimizeForPointLookup(1024);
options.prepareForBulkLoad();
System.out.println("Mixed options test passed");
}
}

@ -6,10 +6,7 @@
package org.rocksdb.test;
import java.util.Random;
import org.rocksdb.DBOptions;
import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException;
import org.rocksdb.Options;
public class OptionsTest {
@ -23,208 +20,7 @@ public class OptionsTest {
getPlatformSpecificRandomFactory();
DBOptionsTest.testDBOptions(opt);
{ // WriteBufferSize test
try {
long longValue = rand.nextLong();
opt.setWriteBufferSize(longValue);
assert(opt.writeBufferSize() == longValue);
} catch (RocksDBException e) {
assert(false);
}
}
{ // MaxWriteBufferNumber test
int intValue = rand.nextInt();
opt.setMaxWriteBufferNumber(intValue);
assert(opt.maxWriteBufferNumber() == intValue);
}
{ // MinWriteBufferNumberToMerge test
int intValue = rand.nextInt();
opt.setMinWriteBufferNumberToMerge(intValue);
assert(opt.minWriteBufferNumberToMerge() == intValue);
}
{ // NumLevels test
int intValue = rand.nextInt();
opt.setNumLevels(intValue);
assert(opt.numLevels() == intValue);
}
{ // LevelFileNumCompactionTrigger test
int intValue = rand.nextInt();
opt.setLevelZeroFileNumCompactionTrigger(intValue);
assert(opt.levelZeroFileNumCompactionTrigger() == intValue);
}
{ // LevelSlowdownWritesTrigger test
int intValue = rand.nextInt();
opt.setLevelZeroSlowdownWritesTrigger(intValue);
assert(opt.levelZeroSlowdownWritesTrigger() == intValue);
}
{ // LevelStopWritesTrigger test
int intValue = rand.nextInt();
opt.setLevelZeroStopWritesTrigger(intValue);
assert(opt.levelZeroStopWritesTrigger() == intValue);
}
{ // MaxMemCompactionLevel test
int intValue = rand.nextInt();
opt.setMaxMemCompactionLevel(intValue);
assert(opt.maxMemCompactionLevel() == intValue);
}
{ // TargetFileSizeBase test
long longValue = rand.nextLong();
opt.setTargetFileSizeBase(longValue);
assert(opt.targetFileSizeBase() == longValue);
}
{ // TargetFileSizeMultiplier test
int intValue = rand.nextInt();
opt.setTargetFileSizeMultiplier(intValue);
assert(opt.targetFileSizeMultiplier() == intValue);
}
{ // MaxBytesForLevelBase test
long longValue = rand.nextLong();
opt.setMaxBytesForLevelBase(longValue);
assert(opt.maxBytesForLevelBase() == longValue);
}
{ // MaxBytesForLevelMultiplier test
int intValue = rand.nextInt();
opt.setMaxBytesForLevelMultiplier(intValue);
assert(opt.maxBytesForLevelMultiplier() == intValue);
}
{ // ExpandedCompactionFactor test
int intValue = rand.nextInt();
opt.setExpandedCompactionFactor(intValue);
assert(opt.expandedCompactionFactor() == intValue);
}
{ // SourceCompactionFactor test
int intValue = rand.nextInt();
opt.setSourceCompactionFactor(intValue);
assert(opt.sourceCompactionFactor() == intValue);
}
{ // MaxGrandparentOverlapFactor test
int intValue = rand.nextInt();
opt.setMaxGrandparentOverlapFactor(intValue);
assert(opt.maxGrandparentOverlapFactor() == intValue);
}
{ // SoftRateLimit test
double doubleValue = rand.nextDouble();
opt.setSoftRateLimit(doubleValue);
assert(opt.softRateLimit() == doubleValue);
}
{ // HardRateLimit test
double doubleValue = rand.nextDouble();
opt.setHardRateLimit(doubleValue);
assert(opt.hardRateLimit() == doubleValue);
}
{ // RateLimitDelayMaxMilliseconds test
int intValue = rand.nextInt();
opt.setRateLimitDelayMaxMilliseconds(intValue);
assert(opt.rateLimitDelayMaxMilliseconds() == intValue);
}
{ // ArenaBlockSize test
try {
long longValue = rand.nextLong();
opt.setArenaBlockSize(longValue);
assert(opt.arenaBlockSize() == longValue);
} catch (RocksDBException e) {
assert(false);
}
}
{ // DisableAutoCompactions test
boolean boolValue = rand.nextBoolean();
opt.setDisableAutoCompactions(boolValue);
assert(opt.disableAutoCompactions() == boolValue);
}
{ // PurgeRedundantKvsWhileFlush test
boolean boolValue = rand.nextBoolean();
opt.setPurgeRedundantKvsWhileFlush(boolValue);
assert(opt.purgeRedundantKvsWhileFlush() == boolValue);
}
{ // VerifyChecksumsInCompaction test
boolean boolValue = rand.nextBoolean();
opt.setVerifyChecksumsInCompaction(boolValue);
assert(opt.verifyChecksumsInCompaction() == boolValue);
}
{ // FilterDeletes test
boolean boolValue = rand.nextBoolean();
opt.setFilterDeletes(boolValue);
assert(opt.filterDeletes() == boolValue);
}
{ // MaxSequentialSkipInIterations test
long longValue = rand.nextLong();
opt.setMaxSequentialSkipInIterations(longValue);
assert(opt.maxSequentialSkipInIterations() == longValue);
}
{ // InplaceUpdateSupport test
boolean boolValue = rand.nextBoolean();
opt.setInplaceUpdateSupport(boolValue);
assert(opt.inplaceUpdateSupport() == boolValue);
}
{ // InplaceUpdateNumLocks test
try {
long longValue = rand.nextLong();
opt.setInplaceUpdateNumLocks(longValue);
assert(opt.inplaceUpdateNumLocks() == longValue);
} catch (RocksDBException e) {
assert(false);
}
}
{ // MemtablePrefixBloomBits test
int intValue = rand.nextInt();
opt.setMemtablePrefixBloomBits(intValue);
assert(opt.memtablePrefixBloomBits() == intValue);
}
{ // MemtablePrefixBloomProbes test
int intValue = rand.nextInt();
opt.setMemtablePrefixBloomProbes(intValue);
assert(opt.memtablePrefixBloomProbes() == intValue);
}
{ // BloomLocality test
int intValue = rand.nextInt();
opt.setBloomLocality(intValue);
assert(opt.bloomLocality() == intValue);
}
{ // MaxSuccessiveMerges test
try {
long longValue = rand.nextLong();
opt.setMaxSuccessiveMerges(longValue);
assert(opt.maxSuccessiveMerges() == longValue);
} catch (RocksDBException e){
assert(false);
}
}
{ // MinPartialMergeOperands test
int intValue = rand.nextInt();
opt.setMinPartialMergeOperands(intValue);
assert(opt.minPartialMergeOperands() == intValue);
}
ColumnFamilyOptionsTest.testCFOptions(opt);
opt.dispose();
System.out.println("Passed OptionsTest");

@ -34,12 +34,15 @@ public class ReadOnlyTest {
db2.close();
List<String> cfNames = new ArrayList<String>();
cfNames.add("default");
List<ColumnFamilyDescriptor> cfNames =
new ArrayList<ColumnFamilyDescriptor>();
cfNames.add(new ColumnFamilyDescriptor("default"));
db = RocksDB.open(DB_PATH, cfNames, columnFamilyHandleList);
columnFamilyHandleList.add(db.createColumnFamily("new_cf"));
columnFamilyHandleList.add(db.createColumnFamily("new_cf2"));
columnFamilyHandleList.add(db.createColumnFamily(
new ColumnFamilyDescriptor("new_cf", new ColumnFamilyOptions())));
columnFamilyHandleList.add(db.createColumnFamily(
new ColumnFamilyDescriptor("new_cf2", new ColumnFamilyOptions())));
db.put(columnFamilyHandleList.get(2), "key2".getBytes(),
"value2".getBytes());
@ -47,9 +50,10 @@ public class ReadOnlyTest {
assert(db2.get("key2".getBytes())==null);
assert(db2.get(columnFamilyHandleList.get(0), "key2".getBytes())==null);
List<String> cfNewName = new ArrayList<String>();
cfNewName.add("default");
cfNewName.add("new_cf2");
List<ColumnFamilyDescriptor> cfNewName =
new ArrayList<ColumnFamilyDescriptor>();
cfNewName.add(new ColumnFamilyDescriptor("default"));
cfNewName.add(new ColumnFamilyDescriptor("new_cf2"));
db3 = RocksDB.openReadOnly(DB_PATH, cfNewName, db3ColumnFamilyHandleList);
assert(new String(db3.get(db3ColumnFamilyHandleList.get(1),
"key2".getBytes())).equals("value2"));

@ -12,9 +12,8 @@
#include <memory>
#include "include/org_rocksdb_Options.h"
//TODO(fyrz) to be commented in with options refactoring pull requests
#include "include/org_rocksdb_DBOptions.h"
//#include "include/org_rocksdb_ColumnFamilyOptions.h"
#include "include/org_rocksdb_ColumnFamilyOptions.h"
#include "include/org_rocksdb_WriteOptions.h"
#include "include/org_rocksdb_ReadOptions.h"
#include "include/org_rocksdb_ComparatorOptions.h"
@ -1740,9 +1739,8 @@ void Java_org_rocksdb_Options_prepareForBulkLoad(
*/
void Java_org_rocksdb_ColumnFamilyOptions_newColumnFamilyOptions(
JNIEnv* env, jobject jobj) {
// TODO(fyrz) needs to be enabled back when ColumnFamilyOptions are available
// rocksdb::ColumnFamilyOptions* op = new rocksdb::ColumnFamilyOptions();
// rocksdb::ColumnFamilyOptionsJni::setHandle(env, jobj, op);
rocksdb::ColumnFamilyOptions* op = new rocksdb::ColumnFamilyOptions();
rocksdb::ColumnFamilyOptionsJni::setHandle(env, jobj, op);
}
/*

@ -159,6 +159,34 @@ class DBOptionsJni {
}
};
class ColumnFamilyDescriptorJni {
public:
// Get the java class id of org.rocksdb.ColumnFamilyDescriptor
static jclass getColumnFamilyDescriptorClass(JNIEnv* env) {
jclass jclazz = env->FindClass("org/rocksdb/ColumnFamilyDescriptor");
assert(jclazz != nullptr);
return jclazz;
}
// Get the java method id of columnFamilyName
static jmethodID getColumnFamilyNameMethod(JNIEnv* env) {
static jmethodID mid = env->GetMethodID(
getColumnFamilyDescriptorClass(env),
"columnFamilyName", "()Ljava/lang/String;");
assert(mid != nullptr);
return mid;
}
// Get the java method id of columnFamilyOptions
static jmethodID getColumnFamilyOptionsMethod(JNIEnv* env) {
static jmethodID mid = env->GetMethodID(
getColumnFamilyDescriptorClass(env),
"columnFamilyOptions", "()Lorg/rocksdb/ColumnFamilyOptions;");
assert(mid != nullptr);
return mid;
}
};
class ColumnFamilyOptionsJni {
public:
// Get the java class id of org.rocksdb.ColumnFamilyOptions.

@ -69,7 +69,7 @@ void Java_org_rocksdb_RocksDB_openROnly__JLjava_lang_String_2(
jobject
Java_org_rocksdb_RocksDB_openROnly__JLjava_lang_String_2Ljava_util_List_2I(
JNIEnv* env, jobject jdb, jlong jopt_handle, jstring jdb_path,
jobject jcfname_list, jint jcfname_count) {
jobject jcfdesc_list, jint jcfdesc_count) {
auto opt = reinterpret_cast<rocksdb::Options*>(jopt_handle);
rocksdb::DB* db = nullptr;
const char* db_path = env->GetStringUTFChars(jdb_path, 0);
@ -79,23 +79,34 @@ jobject
std::vector<rocksdb::ColumnFamilyDescriptor> column_families;
std::vector<rocksdb::ColumnFamilyHandle* > handles;
// get iterator for cfnames
// get iterator for ColumnFamilyDescriptors
jobject iteratorObj = env->CallObjectMethod(
jcfname_list, rocksdb::ListJni::getIteratorMethod(env));
jcfdesc_list, rocksdb::ListJni::getIteratorMethod(env));
// iterate over cfnames and convert cfnames to
// ColumnFamilyDescriptor instances
// iterate over ColumnFamilyDescriptors
while (env->CallBooleanMethod(
iteratorObj, rocksdb::ListJni::getHasNextMethod(env)) == JNI_TRUE) {
jstring jstr = (jstring) env->CallObjectMethod(iteratorObj,
// get ColumnFamilyDescriptor
jobject jcf_descriptor = env->CallObjectMethod(iteratorObj,
rocksdb::ListJni::getNextMethod(env));
// get ColumnFamilyName
jstring jstr = (jstring) env->CallObjectMethod(jcf_descriptor,
rocksdb::ColumnFamilyDescriptorJni::getColumnFamilyNameMethod(
env));
// get CF Options
jobject jcf_opt_obj = env->CallObjectMethod(jcf_descriptor,
rocksdb::ColumnFamilyDescriptorJni::getColumnFamilyOptionsMethod(
env));
rocksdb::ColumnFamilyOptions* cfOptions =
rocksdb::ColumnFamilyOptionsJni::getHandle(env, jcf_opt_obj);
const char* cfname = env->GetStringUTFChars(jstr, 0);
// free allocated cfnames after call to open
cfnames_to_free.push_back(cfname);
jcfnames_for_free.push_back(jstr);
column_families.push_back(rocksdb::ColumnFamilyDescriptor(cfname,
*static_cast<rocksdb::ColumnFamilyOptions*>(opt)));
*cfOptions));
}
rocksdb::Status s = rocksdb::DB::OpenForReadOnly(*opt,
@ -141,7 +152,7 @@ jobject
*/
jobject Java_org_rocksdb_RocksDB_open__JLjava_lang_String_2Ljava_util_List_2I(
JNIEnv* env, jobject jdb, jlong jopt_handle, jstring jdb_path,
jobject jcfname_list, jint jcfname_count) {
jobject jcfdesc_list, jint jcfdesc_count) {
auto opt = reinterpret_cast<rocksdb::Options*>(jopt_handle);
rocksdb::DB* db = nullptr;
const char* db_path = env->GetStringUTFChars(jdb_path, 0);
@ -151,23 +162,34 @@ jobject Java_org_rocksdb_RocksDB_open__JLjava_lang_String_2Ljava_util_List_2I(
std::vector<rocksdb::ColumnFamilyDescriptor> column_families;
std::vector<rocksdb::ColumnFamilyHandle* > handles;
// get iterator for cfnames
// get iterator for ColumnFamilyDescriptors
jobject iteratorObj = env->CallObjectMethod(
jcfname_list, rocksdb::ListJni::getIteratorMethod(env));
jcfdesc_list, rocksdb::ListJni::getIteratorMethod(env));
// iterate over cfnames and convert cfnames to
// ColumnFamilyDescriptor instances
// iterate over ColumnFamilyDescriptors
while (env->CallBooleanMethod(
iteratorObj, rocksdb::ListJni::getHasNextMethod(env)) == JNI_TRUE) {
jstring jstr = (jstring) env->CallObjectMethod(iteratorObj,
// get ColumnFamilyDescriptor
jobject jcf_descriptor = env->CallObjectMethod(iteratorObj,
rocksdb::ListJni::getNextMethod(env));
// get ColumnFamilyName
jstring jstr = (jstring) env->CallObjectMethod(jcf_descriptor,
rocksdb::ColumnFamilyDescriptorJni::getColumnFamilyNameMethod(
env));
// get CF Options
jobject jcf_opt_obj = env->CallObjectMethod(jcf_descriptor,
rocksdb::ColumnFamilyDescriptorJni::getColumnFamilyOptionsMethod(
env));
rocksdb::ColumnFamilyOptions* cfOptions =
rocksdb::ColumnFamilyOptionsJni::getHandle(env, jcf_opt_obj);
const char* cfname = env->GetStringUTFChars(jstr, 0);
// free allocated cfnames after call to open
cfnames_to_free.push_back(cfname);
jcfnames_for_free.push_back(jstr);
column_families.push_back(rocksdb::ColumnFamilyDescriptor(cfname,
*static_cast<rocksdb::ColumnFamilyOptions*>(opt)));
*cfOptions));
}
rocksdb::Status s = rocksdb::DB::Open(*opt, db_path, column_families,
@ -1151,18 +1173,28 @@ jlongArray Java_org_rocksdb_RocksDB_iterators(
/*
* Class: org_rocksdb_RocksDB
* Method: createColumnFamily
* Signature: (JJLjava/lang/String;)J;
* Signature: (JLorg/rocksdb/ColumnFamilyDescriptor;)J;
*/
jlong Java_org_rocksdb_RocksDB_createColumnFamily(
JNIEnv* env, jobject jdb, jlong jdb_handle, jlong jopt_handle,
jstring jcfname) {
JNIEnv* env, jobject jdb, jlong jdb_handle,
jobject jcf_descriptor) {
rocksdb::ColumnFamilyHandle* handle;
const char* cfname = env->GetStringUTFChars(jcfname, 0);
auto db_handle = reinterpret_cast<rocksdb::DB*>(jdb_handle);
auto opt = reinterpret_cast<rocksdb::Options*>(jopt_handle);
jstring jstr = (jstring) env->CallObjectMethod(jcf_descriptor,
rocksdb::ColumnFamilyDescriptorJni::getColumnFamilyNameMethod(
env));
// get CF Options
jobject jcf_opt_obj = env->CallObjectMethod(jcf_descriptor,
rocksdb::ColumnFamilyDescriptorJni::getColumnFamilyOptionsMethod(
env));
rocksdb::ColumnFamilyOptions* cfOptions =
rocksdb::ColumnFamilyOptionsJni::getHandle(env, jcf_opt_obj);
const char* cfname = env->GetStringUTFChars(jstr, 0);
rocksdb::Status s = db_handle->CreateColumnFamily(
*static_cast<rocksdb::ColumnFamilyOptions*>(opt), cfname, &handle);
env->ReleaseStringUTFChars(jcfname, cfname);
*cfOptions, cfname, &handle);
env->ReleaseStringUTFChars(jstr, cfname);
if (s.ok()) {
return reinterpret_cast<jlong>(handle);

Loading…
Cancel
Save