Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in
Toggle navigation
R
rust-rocksdb
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
fangzongwu
rust-rocksdb
Commits
cc1db4a1
Commit
cc1db4a1
authored
Sep 27, 2017
by
Kohei Watanabe
Committed by
Huachao Huang
Sep 27, 2017
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Add type 'ColumnFamilyDescriptor' for 'open' related methods (#139)
parent
129f6c7d
Hide whitespace changes
Inline
Side-by-side
Showing
18 changed files
with
173 additions
and
149 deletions
+173
-149
Cargo.toml
Cargo.toml
+0
-1
Makefile
Makefile
+3
-4
main.rs
src/main.rs
+2
-2
merge_operator.rs
src/merge_operator.rs
+1
-3
rocksdb.rs
src/rocksdb.rs
+88
-80
rocksdb_options.rs
src/rocksdb_options.rs
+40
-1
test.rs
tests/test.rs
+2
-2
test_column_family.rs
tests/test_column_family.rs
+5
-6
test_compaction_filter.rs
tests/test_compaction_filter.rs
+2
-4
test_delete_files_in_range.rs
tests/test_delete_files_in_range.rs
+1
-1
test_delete_range.rs
tests/test_delete_range.rs
+13
-13
test_event_listener.rs
tests/test_event_listener.rs
+0
-1
test_ingest_external_file.rs
tests/test_ingest_external_file.rs
+1
-2
test_iterator.rs
tests/test_iterator.rs
+2
-4
test_prefix_extractor.rs
tests/test_prefix_extractor.rs
+1
-2
test_rocksdb_options.rs
tests/test_rocksdb_options.rs
+10
-19
test_slice_transform.rs
tests/test_slice_transform.rs
+1
-2
test_table_properties.rs
tests/test_table_properties.rs
+1
-2
No files found.
Cargo.toml
View file @
cc1db4a1
...
@@ -22,7 +22,6 @@ portable = ["librocksdb_sys/portable"]
...
@@ -22,7 +22,6 @@ portable = ["librocksdb_sys/portable"]
sse
=
["librocksdb_sys/sse"]
sse
=
["librocksdb_sys/sse"]
[[test]]
[[test]]
name
=
"test"
name
=
"test"
path
=
"tests/test.rs"
path
=
"tests/test.rs"
...
...
Makefile
View file @
cc1db4a1
...
@@ -11,8 +11,8 @@ endef
...
@@ -11,8 +11,8 @@ endef
# $(call format-code-in,file-or-dir)
# $(call format-code-in,file-or-dir)
define
format-code-in
define
format-code-in
$(if
$(filter
%.rs,
$1),
\
$(if
$(filter
%.rs,
$1),
\
$(call
do-format-with-cmd,
rustfmt,
$1),
\
$(call
do-format-with-cmd,
rustfmt,
$1),
\
cd
$1
&&
$(call
do-format-with-cmd,
cargo
fmt
--))
cd
$1
&&
$(call
do-format-with-cmd,
cargo
fmt
--))
endef
endef
all
:
format build test
all
:
format build test
...
@@ -32,4 +32,4 @@ format:
...
@@ -32,4 +32,4 @@ format:
clean
:
clean
:
@
cargo clean
@
cargo clean
@
cd
librocksdb_sys
&&
cargo clean
@
cd
librocksdb_sys
&&
cargo clean
\ No newline at end of file
src/main.rs
View file @
cc1db4a1
...
@@ -88,7 +88,7 @@ fn custom_merge() {
...
@@ -88,7 +88,7 @@ fn custom_merge() {
let
mut
cf_opts
=
ColumnFamilyOptions
::
new
();
let
mut
cf_opts
=
ColumnFamilyOptions
::
new
();
cf_opts
.add_merge_operator
(
"test operator"
,
concat_merge
);
cf_opts
.add_merge_operator
(
"test operator"
,
concat_merge
);
{
{
let
db
=
DB
::
open_cf
(
opts
,
path
,
vec!
[
"default"
],
vec!
[
cf_opts
])
.unwrap
();
let
db
=
DB
::
open_cf
(
opts
,
path
,
vec!
[
(
"default"
,
cf_opts
)
])
.unwrap
();
db
.put
(
b
"k1"
,
b
"a"
)
.unwrap
();
db
.put
(
b
"k1"
,
b
"a"
)
.unwrap
();
db
.merge
(
b
"k1"
,
b
"b"
)
.unwrap
();
db
.merge
(
b
"k1"
,
b
"b"
)
.unwrap
();
db
.merge
(
b
"k1"
,
b
"c"
)
.unwrap
();
db
.merge
(
b
"k1"
,
b
"c"
)
.unwrap
();
...
@@ -160,7 +160,7 @@ mod tests {
...
@@ -160,7 +160,7 @@ mod tests {
// let filter = new_bloom_filter(10);
// let filter = new_bloom_filter(10);
// opts.set_filter(filter);
// opts.set_filter(filter);
DB
::
open_cf
(
opts
,
path
,
vec!
[
"default"
],
vec!
[
cf_opts
])
.unwrap
()
DB
::
open_cf
(
opts
,
path
,
vec!
[
(
"default"
,
cf_opts
)
])
.unwrap
()
}
}
// TODO(tyler) unstable
// TODO(tyler) unstable
...
...
src/merge_operator.rs
View file @
cc1db4a1
...
@@ -30,7 +30,6 @@ pub struct MergeOperatorCallback {
...
@@ -30,7 +30,6 @@ pub struct MergeOperatorCallback {
pub
extern
"C"
fn
destructor_callback
(
raw_cb
:
*
mut
c_void
)
{
pub
extern
"C"
fn
destructor_callback
(
raw_cb
:
*
mut
c_void
)
{
// turn this back into a local variable so rust will reclaim it
// turn this back into a local variable so rust will reclaim it
let
_
:
Box
<
MergeOperatorCallback
>
=
unsafe
{
mem
::
transmute
(
raw_cb
)
};
let
_
:
Box
<
MergeOperatorCallback
>
=
unsafe
{
mem
::
transmute
(
raw_cb
)
};
}
}
pub
extern
"C"
fn
name_callback
(
raw_cb
:
*
mut
c_void
)
->
*
const
c_char
{
pub
extern
"C"
fn
name_callback
(
raw_cb
:
*
mut
c_void
)
->
*
const
c_char
{
...
@@ -190,8 +189,7 @@ mod test {
...
@@ -190,8 +189,7 @@ mod test {
let
db
=
DB
::
open_cf
(
let
db
=
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
let
p
=
db
.put
(
b
"k1"
,
b
"a"
);
let
p
=
db
.put
(
b
"k1"
,
b
"a"
);
assert
!
(
p
.is_ok
());
assert
!
(
p
.is_ok
());
...
...
src/rocksdb.rs
View file @
cc1db4a1
...
@@ -17,9 +17,9 @@ use crocksdb_ffi::{self, DBBackupEngine, DBCFHandle, DBCompressionType, DBInstan
...
@@ -17,9 +17,9 @@ use crocksdb_ffi::{self, DBBackupEngine, DBCFHandle, DBCompressionType, DBInstan
DBPinnableSlice
,
DBStatisticsHistogramType
,
DBStatisticsTickerType
,
DBPinnableSlice
,
DBStatisticsHistogramType
,
DBStatisticsTickerType
,
DBWriteBatch
};
DBWriteBatch
};
use
libc
::{
self
,
c_int
,
c_void
,
size_t
};
use
libc
::{
self
,
c_int
,
c_void
,
size_t
};
use
rocksdb_options
::{
ColumnFamily
Options
,
CompactOptions
,
DBOptions
,
EnvOptions
,
Flush
Options
,
use
rocksdb_options
::{
ColumnFamily
Descriptor
,
ColumnFamilyOptions
,
CompactOptions
,
DB
Options
,
HistogramData
,
IngestExternalFileOptions
,
ReadOptions
,
Restor
eOptions
,
EnvOptions
,
FlushOptions
,
HistogramData
,
IngestExternalFil
eOptions
,
UnsafeSnap
,
WriteOptions
};
ReadOptions
,
RestoreOptions
,
UnsafeSnap
,
WriteOptions
};
use
std
::{
fs
,
ptr
,
slice
};
use
std
::{
fs
,
ptr
,
slice
};
use
std
::
collections
::
BTreeMap
;
use
std
::
collections
::
BTreeMap
;
use
std
::
collections
::
btree_map
::
Entry
;
use
std
::
collections
::
btree_map
::
Entry
;
...
@@ -30,8 +30,6 @@ use std::path::Path;
...
@@ -30,8 +30,6 @@ use std::path::Path;
use
std
::
str
::
from_utf8
;
use
std
::
str
::
from_utf8
;
use
table_properties
::
TablePropertiesCollection
;
use
table_properties
::
TablePropertiesCollection
;
const
DEFAULT_COLUMN_FAMILY
:
&
'static
str
=
"default"
;
pub
struct
CFHandle
{
pub
struct
CFHandle
{
inner
:
*
mut
DBCFHandle
,
inner
:
*
mut
DBCFHandle
,
}
}
...
@@ -50,6 +48,25 @@ impl Drop for CFHandle {
...
@@ -50,6 +48,25 @@ impl Drop for CFHandle {
}
}
}
}
fn
ensure_default_cf_exists
<
'a
>
(
list
:
&
mut
Vec
<
ColumnFamilyDescriptor
<
'a
>>
)
{
let
contains
=
list
.iter
()
.any
(|
ref
cf
|
cf
.is_default
());
if
!
contains
{
list
.push
(
ColumnFamilyDescriptor
::
default
());
}
}
fn
split_descriptors
<
'a
>
(
list
:
Vec
<
ColumnFamilyDescriptor
<
'a
>>
,
)
->
(
Vec
<&
'a
str
>
,
Vec
<
ColumnFamilyOptions
>
)
{
let
mut
v1
=
Vec
::
with_capacity
(
list
.len
());
let
mut
v2
=
Vec
::
with_capacity
(
list
.len
());
for
d
in
list
{
v1
.push
(
d
.name
);
v2
.push
(
d
.options
);
}
(
v1
,
v2
)
}
fn
build_cstring_list
(
str_list
:
&
[
&
str
])
->
Vec
<
CString
>
{
fn
build_cstring_list
(
str_list
:
&
[
&
str
])
->
Vec
<
CString
>
{
str_list
str_list
.into_iter
()
.into_iter
()
...
@@ -327,96 +344,89 @@ impl DB {
...
@@ -327,96 +344,89 @@ impl DB {
}
}
pub
fn
open
(
opts
:
DBOptions
,
path
:
&
str
)
->
Result
<
DB
,
String
>
{
pub
fn
open
(
opts
:
DBOptions
,
path
:
&
str
)
->
Result
<
DB
,
String
>
{
DB
::
open_cf
(
opts
,
path
,
vec!
[],
vec!
[])
let
cfds
:
Vec
<&
str
>
=
vec!
[];
DB
::
open_cf
(
opts
,
path
,
cfds
)
}
}
pub
fn
open_cf
(
pub
fn
open_cf
<
'a
,
T
>
(
opts
:
DBOptions
,
path
:
&
str
,
cfds
:
Vec
<
T
>
)
->
Result
<
DB
,
String
>
opts
:
DBOptions
,
where
path
:
&
str
,
T
:
Into
<
ColumnFamilyDescriptor
<
'a
>>
,
cfs
:
Vec
<&
str
>
,
{
cf_opts
:
Vec
<
ColumnFamilyOptions
>
,
DB
::
open_cf_internal
(
opts
,
path
,
cfds
)
)
->
Result
<
DB
,
String
>
{
}
let
cpath
=
match
CString
::
new
(
path
.as_bytes
())
{
Ok
(
c
)
=>
c
,
fn
open_cf_internal
<
'a
,
T
>
(
opts
:
DBOptions
,
path
:
&
str
,
cfds
:
Vec
<
T
>
)
->
Result
<
DB
,
String
>
Err
(
_
)
=>
{
where
return
Err
(
T
:
Into
<
ColumnFamilyDescriptor
<
'a
>>
,
"Failed to convert path to CString when opening rocksdb"
.to_owned
(),
{
)
const
ERR_CONVERT_PATH
:
&
str
=
"Failed to convert path to CString when opening rocksdb"
;
}
const
ERR_NULL_DB_ONINIT
:
&
str
=
"Could not initialize database"
;
};
const
ERR_NULL_CF_HANDLE
:
&
str
=
"Received null column family handle from DB"
;
if
let
Err
(
e
)
=
fs
::
create_dir_all
(
&
Path
::
new
(
path
))
{
return
Err
(
format!
(
let
cpath
=
try!
(
CString
::
new
(
path
.as_bytes
())
.map_err
(|
_
|
ERR_CONVERT_PATH
.to_owned
()));
try!
(
fs
::
create_dir_all
(
&
Path
::
new
(
path
))
.map_err
(|
e
|
{
format!
(
"Failed to create rocksdb directory:
\
"Failed to create rocksdb directory:
\
src/rocksdb.rs:
\
src/rocksdb.rs:
\
{:?}"
,
{:?}"
,
e
e
));
)
}
}));
if
cfs
.len
()
!=
cf_opts
.len
()
{
return
Err
(
format!
(
"cfs.len() and cf_opts.len() not match."
));
let
mut
descs
=
cfds
.into_iter
()
.map
(|
t
|
t
.into
())
.collect
();
}
ensure_default_cf_exists
(
&
mut
descs
);
let
mut
cfs_v
=
cfs
;
let
mut
cf_opts_v
=
cf_opts
;
let
(
names
,
options
)
=
split_descriptors
(
descs
);
let
cstrings
=
build_cstring_list
(
&
names
);
let
(
db
,
cf_map
)
=
{
// Always open the default column family
let
cf_names
:
Vec
<*
const
_
>
=
cstrings
.iter
()
.map
(|
cs
|
cs
.as_ptr
())
.collect
();
if
!
cfs_v
.contains
(
&
DEFAULT_COLUMN_FAMILY
)
{
let
cf_handles
:
Vec
<
_
>
=
vec!
[
ptr
::
null_mut
();
cf_names
.len
()];
cfs_v
.push
(
DEFAULT_COLUMN_FAMILY
);
let
cf_options
:
Vec
<
_
>
=
options
cf_opts_v
.push
(
ColumnFamilyOptions
::
new
());
.iter
()
}
.map
(|
x
|
x
.inner
as
*
const
crocksdb_ffi
::
Options
)
.collect
();
// We need to store our CStrings in an intermediate vector
// so that their pointers remain valid.
let
db
=
{
let
c_cfs
=
build_cstring_list
(
&
cfs_v
);
let
db_options
=
opts
.inner
;
let
db_path
=
cpath
.as_ptr
();
let
cfnames
:
Vec
<*
const
_
>
=
c_cfs
.iter
()
.map
(|
cf
|
cf
.as_ptr
())
.collect
();
let
db_cfs_count
=
cf_names
.len
()
as
c_int
;
let
db_cf_ptrs
=
cf_names
.as_ptr
();
// These handles will be populated by DB.
let
db_cf_opts
=
cf_options
.as_ptr
();
let
cfhandles
:
Vec
<
_
>
=
cfs_v
.iter
()
.map
(|
_
|
ptr
::
null_mut
())
.collect
();
let
db_cf_handles
=
cf_handles
.as_ptr
();
unsafe
{
let
cfopts
:
Vec
<
_
>
=
cf_opts_v
.iter
()
.map
(|
x
|
x
.inner
as
*
const
crocksdb_ffi
::
Options
)
.collect
();
let
db
=
unsafe
{
ffi_try!
(
crocksdb_open_column_families
(
ffi_try!
(
crocksdb_open_column_families
(
opts
.inner
,
db_options
,
cpath
.as_ptr
()
,
db_path
,
cfs_v
.len
()
as
c_i
nt
,
db_cfs_cou
nt
,
cfnames
.as_ptr
()
,
db_cf_ptrs
,
cfopts
.as_ptr
()
,
db_cf_opts
,
cfhandles
.as_ptr
()
db_cf_handles
))
))
};
for
handle
in
&
cfhandles
{
if
handle
.is_null
()
{
return
Err
(
"Received null column family handle from DB."
.to_owned
());
}
}
}
let
mut
cf_map
=
BTreeMap
::
new
();
for
(
n
,
h
)
in
cfs_v
.iter
()
.zip
(
cfhandles
)
{
cf_map
.insert
((
*
n
)
.to_owned
(),
CFHandle
{
inner
:
h
});
}
if
db
.is_null
()
{
return
Err
(
"Could not initialize database."
.to_owned
());
}
(
db
,
cf_map
)
};
};
if
cf_handles
.iter
()
.any
(|
h
|
h
.is_null
())
{
return
Err
(
ERR_NULL_CF_HANDLE
.to_owned
());
}
if
db
.is_null
()
{
return
Err
(
ERR_NULL_DB_ONINIT
.to_owned
());
}
let
cfs
=
names
.into_iter
()
.zip
(
cf_handles
)
.map
(|(
s
,
h
)|
(
s
.to_owned
(),
CFHandle
{
inner
:
h
}))
.collect
();
Ok
(
DB
{
Ok
(
DB
{
inner
:
db
,
inner
:
db
,
cfs
:
cf
_map
,
cfs
:
cf
s
,
path
:
path
.to_owned
(),
path
:
path
.to_owned
(),
opts
:
opts
,
opts
:
opts
,
_cf_opts
:
cf_opts_v
,
_cf_opts
:
options
,
})
})
}
}
pub
fn
destroy
(
opts
:
&
DBOptions
,
path
:
&
str
)
->
Result
<
(),
String
>
{
pub
fn
destroy
(
opts
:
&
DBOptions
,
path
:
&
str
)
->
Result
<
(),
String
>
{
let
cpath
=
CString
::
new
(
path
.as_bytes
())
.unwrap
();
let
cpath
=
CString
::
new
(
path
.as_bytes
())
.unwrap
();
unsafe
{
unsafe
{
...
@@ -554,9 +564,7 @@ impl DB {
...
@@ -554,9 +564,7 @@ impl DB {
let
cname
=
match
CString
::
new
(
name
.as_bytes
())
{
let
cname
=
match
CString
::
new
(
name
.as_bytes
())
{
Ok
(
c
)
=>
c
,
Ok
(
c
)
=>
c
,
Err
(
_
)
=>
{
Err
(
_
)
=>
{
return
Err
(
return
Err
(
"Failed to convert path to CString when opening rocksdb"
.to_owned
())
"Failed to convert path to CString when opening rocksdb"
.to_owned
(),
)
}
}
};
};
let
cname_ptr
=
cname
.as_ptr
();
let
cname_ptr
=
cname
.as_ptr
();
...
...
src/rocksdb_options.rs
View file @
cc1db4a1
...
@@ -15,7 +15,6 @@
...
@@ -15,7 +15,6 @@
use
compaction_filter
::{
new_compaction_filter
,
CompactionFilter
,
CompactionFilterHandle
};
use
compaction_filter
::{
new_compaction_filter
,
CompactionFilter
,
CompactionFilterHandle
};
use
comparator
::{
self
,
compare_callback
,
ComparatorCallback
};
use
comparator
::{
self
,
compare_callback
,
ComparatorCallback
};
use
crocksdb_ffi
::{
self
,
DBBlockBasedTableOptions
,
DBCompactOptions
,
DBCompressionType
,
use
crocksdb_ffi
::{
self
,
DBBlockBasedTableOptions
,
DBCompactOptions
,
DBCompressionType
,
DBFlushOptions
,
DBInfoLogLevel
,
DBInstance
,
DBRateLimiter
,
DBReadOptions
,
DBFlushOptions
,
DBInfoLogLevel
,
DBInstance
,
DBRateLimiter
,
DBReadOptions
,
DBRecoveryMode
,
DBRestoreOptions
,
DBSnapshot
,
DBStatisticsHistogramType
,
DBRecoveryMode
,
DBRestoreOptions
,
DBSnapshot
,
DBStatisticsHistogramType
,
...
@@ -1161,6 +1160,46 @@ impl ColumnFamilyOptions {
...
@@ -1161,6 +1160,46 @@ impl ColumnFamilyOptions {
}
}
}
}
// ColumnFamilyDescriptor is a pair of column family's name and options.
pub
struct
ColumnFamilyDescriptor
<
'a
>
{
pub
name
:
&
'a
str
,
pub
options
:
ColumnFamilyOptions
,
}
impl
<
'a
>
ColumnFamilyDescriptor
<
'a
>
{
const
DEFAULT_COLUMN_FAMILY
:
&
'static
str
=
"default"
;
pub
fn
new
(
name
:
&
'a
str
,
options
:
ColumnFamilyOptions
)
->
Self
{
ColumnFamilyDescriptor
{
name
,
options
}
}
pub
fn
is_default
(
&
self
)
->
bool
{
self
.name
==
Self
::
DEFAULT_COLUMN_FAMILY
}
}
impl
Default
for
ColumnFamilyDescriptor
<
'static
>
{
fn
default
()
->
Self
{
let
name
=
Self
::
DEFAULT_COLUMN_FAMILY
;
let
options
=
ColumnFamilyOptions
::
new
();
ColumnFamilyDescriptor
::
new
(
name
,
options
)
}
}
impl
<
'a
>
From
<&
'a
str
>
for
ColumnFamilyDescriptor
<
'a
>
{
fn
from
(
name
:
&
'a
str
)
->
Self
{
let
options
=
ColumnFamilyOptions
::
new
();
ColumnFamilyDescriptor
::
new
(
name
,
options
)
}
}
impl
<
'a
>
From
<
(
&
'a
str
,
ColumnFamilyOptions
)
>
for
ColumnFamilyDescriptor
<
'a
>
{
fn
from
(
tuple
:
(
&
'a
str
,
ColumnFamilyOptions
))
->
Self
{
let
(
name
,
options
)
=
tuple
;
ColumnFamilyDescriptor
::
new
(
name
,
options
)
}
}
pub
struct
FlushOptions
{
pub
struct
FlushOptions
{
pub
inner
:
*
mut
DBFlushOptions
,
pub
inner
:
*
mut
DBFlushOptions
,
}
}
...
...
tests/test.rs
View file @
cc1db4a1
extern
crate
rocksdb
;
extern
crate
tempdir
;
extern
crate
byteorder
;
extern
crate
byteorder
;
extern
crate
crc
;
extern
crate
crc
;
extern
crate
rocksdb
;
extern
crate
tempdir
;
mod
test_iterator
;
mod
test_iterator
;
mod
test_multithreaded
;
mod
test_multithreaded
;
...
...
tests/test_column_family.rs
View file @
cc1db4a1
...
@@ -27,7 +27,7 @@ pub fn test_column_family() {
...
@@ -27,7 +27,7 @@ pub fn test_column_family() {
opts
.create_if_missing
(
true
);
opts
.create_if_missing
(
true
);
let
mut
cf_opts
=
ColumnFamilyOptions
::
new
();
let
mut
cf_opts
=
ColumnFamilyOptions
::
new
();
cf_opts
.add_merge_operator
(
"test operator"
,
test_provided_merge
);
cf_opts
.add_merge_operator
(
"test operator"
,
test_provided_merge
);
let
mut
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
"default"
],
vec!
[
cf_opts
])
.unwrap
();
let
mut
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
(
"default"
,
cf_opts
)
])
.unwrap
();
let
cf_opts
=
ColumnFamilyOptions
::
new
();
let
cf_opts
=
ColumnFamilyOptions
::
new
();
match
db
.create_cf
(
"cf1"
,
cf_opts
)
{
match
db
.create_cf
(
"cf1"
,
cf_opts
)
{
Ok
(
_
)
=>
println!
(
"cf1 created successfully"
),
Ok
(
_
)
=>
println!
(
"cf1 created successfully"
),
...
@@ -42,7 +42,7 @@ pub fn test_column_family() {
...
@@ -42,7 +42,7 @@ pub fn test_column_family() {
{
{
let
mut
cf_opts
=
ColumnFamilyOptions
::
new
();
let
mut
cf_opts
=
ColumnFamilyOptions
::
new
();
cf_opts
.add_merge_operator
(
"test operator"
,
test_provided_merge
);
cf_opts
.add_merge_operator
(
"test operator"
,
test_provided_merge
);
match
DB
::
open_cf
(
DBOptions
::
new
(),
path_str
,
vec!
[
"default"
],
vec!
[
cf_opts
])
{
match
DB
::
open_cf
(
DBOptions
::
new
(),
path_str
,
vec!
[
(
"default"
,
cf_opts
)
])
{
Ok
(
_
)
=>
panic!
(
Ok
(
_
)
=>
panic!
(
"should not have opened DB successfully without
\
"should not have opened DB successfully without
\
specifying column
specifying column
...
@@ -59,7 +59,7 @@ pub fn test_column_family() {
...
@@ -59,7 +59,7 @@ pub fn test_column_family() {
{
{
let
mut
cf_opts
=
ColumnFamilyOptions
::
new
();
let
mut
cf_opts
=
ColumnFamilyOptions
::
new
();
cf_opts
.add_merge_operator
(
"test operator"
,
test_provided_merge
);
cf_opts
.add_merge_operator
(
"test operator"
,
test_provided_merge
);
match
DB
::
open_cf
(
DBOptions
::
new
(),
path_str
,
vec!
[
"cf1"
],
vec!
[
cf_opts
])
{
match
DB
::
open_cf
(
DBOptions
::
new
(),
path_str
,
vec!
[
(
"cf1"
,
cf_opts
)
])
{
Ok
(
_
)
=>
println!
(
"successfully opened db with column family"
),
Ok
(
_
)
=>
println!
(
"successfully opened db with column family"
),
Err
(
e
)
=>
panic!
(
"failed to open db with column family: {}"
,
e
),
Err
(
e
)
=>
panic!
(
"failed to open db with column family: {}"
,
e
),
}
}
...
@@ -68,7 +68,7 @@ pub fn test_column_family() {
...
@@ -68,7 +68,7 @@ pub fn test_column_family() {
{
{
let
mut
cf_opts
=
ColumnFamilyOptions
::
new
();
let
mut
cf_opts
=
ColumnFamilyOptions
::
new
();
cf_opts
.add_merge_operator
(
"test operator"
,
test_provided_merge
);
cf_opts
.add_merge_operator
(
"test operator"
,
test_provided_merge
);
let
db
=
match
DB
::
open_cf
(
DBOptions
::
new
(),
path_str
,
vec!
[
"cf1"
],
vec!
[
cf_opts
])
{
let
db
=
match
DB
::
open_cf
(
DBOptions
::
new
(),
path_str
,
vec!
[
(
"cf1"
,
cf_opts
)
])
{
Ok
(
db
)
=>
{
Ok
(
db
)
=>
{
println!
(
"successfully opened db with column family"
);
println!
(
"successfully opened db with column family"
);
db
db
...
@@ -116,8 +116,7 @@ pub fn test_column_family() {
...
@@ -116,8 +116,7 @@ pub fn test_column_family() {
let
mut
db
=
DB
::
open_cf
(
let
mut
db
=
DB
::
open_cf
(
DBOptions
::
new
(),
DBOptions
::
new
(),
path_str
,
path_str
,
vec!
[
"cf1"
],
vec!
[(
"cf1"
,
ColumnFamilyOptions
::
new
())],
vec!
[
ColumnFamilyOptions
::
new
()],
)
.unwrap
();
)
.unwrap
();
match
db
.drop_cf
(
"cf1"
)
{
match
db
.drop_cf
(
"cf1"
)
{
Ok
(
_
)
=>
println!
(
"cf1 successfully dropped."
),
Ok
(
_
)
=>
println!
(
"cf1 successfully dropped."
),
...
...
tests/test_compaction_filter.rs
View file @
cc1db4a1
...
@@ -59,8 +59,7 @@ fn test_compaction_filter() {
...
@@ -59,8 +59,7 @@ fn test_compaction_filter() {
let
db
=
DB
::
open_cf
(
let
db
=
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
let
samples
=
vec!
[
let
samples
=
vec!
[
(
b
"key1"
.to_vec
(),
b
"value1"
.to_vec
()),
(
b
"key1"
.to_vec
(),
b
"value1"
.to_vec
()),
...
@@ -101,8 +100,7 @@ fn test_compaction_filter() {
...
@@ -101,8 +100,7 @@ fn test_compaction_filter() {
let
db
=
DB
::
open_cf
(
let
db
=
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
let
_snap
=
db
.snapshot
();
let
_snap
=
db
.snapshot
();
// Because ignore_snapshots is true, so all the keys will be compacted.
// Because ignore_snapshots is true, so all the keys will be compacted.
...
...
tests/test_delete_files_in_range.rs
View file @
cc1db4a1
...
@@ -22,7 +22,7 @@ fn initial_data(path: &str) -> DB {
...
@@ -22,7 +22,7 @@ fn initial_data(path: &str) -> DB {
// DeleteFilesInRange ignore sst files in level 0,
// DeleteFilesInRange ignore sst files in level 0,
// this will makes all sst files fall into level 1.
// this will makes all sst files fall into level 1.
cf_opts
.set_level_zero_file_num_compaction_trigger
(
1
);
cf_opts
.set_level_zero_file_num_compaction_trigger
(
1
);
let
db
=
DB
::
open_cf
(
opts
,
path
,
vec!
[
"default"
],
vec!
[
cf_opts
])
.unwrap
();
let
db
=
DB
::
open_cf
(
opts
,
path
,
vec!
[
(
"default"
,
cf_opts
)
])
.unwrap
();
for
i
in
0
..
3
{
for
i
in
0
..
3
{
let
k
=
format!
(
"key{}"
,
i
);
let
k
=
format!
(
"key{}"
,
i
);
let
v
=
format!
(
"value{}"
,
i
);
let
v
=
format!
(
"value{}"
,
i
);
...
...
tests/test_delete_range.rs
View file @
cc1db4a1
...
@@ -646,7 +646,7 @@ fn test_delete_range_prefix_bloom_case_1() {
...
@@ -646,7 +646,7 @@ fn test_delete_range_prefix_bloom_case_1() {
// Create prefix bloom filter for memtable.
// Create prefix bloom filter for memtable.
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
let
cf
=
"default"
;
let
cf
=
"default"
;
let
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
cf
],
vec!
[
cf_opts
])
.unwrap
();
let
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
(
cf
,
cf_opts
)
])
.unwrap
();
let
samples_a
=
vec!
[
let
samples_a
=
vec!
[
(
b
"keya11111"
,
b
"value1"
),
(
b
"keya11111"
,
b
"value1"
),
...
@@ -721,7 +721,7 @@ fn test_delete_range_prefix_bloom_case_2() {
...
@@ -721,7 +721,7 @@ fn test_delete_range_prefix_bloom_case_2() {
// Create prefix bloom filter for memtable.
// Create prefix bloom filter for memtable.
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
let
cf
=
"default"
;
let
cf
=
"default"
;
let
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
cf
],
vec!
[
cf_opts
])
.unwrap
();
let
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
(
cf
,
cf_opts
)
])
.unwrap
();
let
handle
=
get_cf_handle
(
&
db
,
cf
)
.unwrap
();
let
handle
=
get_cf_handle
(
&
db
,
cf
)
.unwrap
();
let
samples_a
=
vec!
[
let
samples_a
=
vec!
[
...
@@ -777,7 +777,7 @@ fn test_delete_range_prefix_bloom_case_2() {
...
@@ -777,7 +777,7 @@ fn test_delete_range_prefix_bloom_case_2() {
// Create prefix bloom filter for memtable.
// Create prefix bloom filter for memtable.
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
let
cf
=
"default"
;
let
cf
=
"default"
;
let
db2
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
cf
],
vec!
[
cf_opts
])
.unwrap
();
let
db2
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
(
cf
,
cf_opts
)
])
.unwrap
();
let
handle2
=
get_cf_handle
(
&
db2
,
cf
)
.unwrap
();
let
handle2
=
get_cf_handle
(
&
db2
,
cf
)
.unwrap
();
db2
.ingest_external_file_cf
(
handle2
,
&
ingest_opt
,
&
[
test_sstfile_str
])
db2
.ingest_external_file_cf
(
handle2
,
&
ingest_opt
,
&
[
test_sstfile_str
])
...
@@ -814,7 +814,7 @@ fn test_delete_range_prefix_bloom_case_3() {
...
@@ -814,7 +814,7 @@ fn test_delete_range_prefix_bloom_case_3() {
// Create prefix bloom filter for memtable.
// Create prefix bloom filter for memtable.
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
let
cf
=
"default"
;
let
cf
=
"default"
;
let
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
cf
],
vec!
[
cf_opts
])
.unwrap
();
let
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
(
cf
,
cf_opts
)
])
.unwrap
();
let
handle
=
get_cf_handle
(
&
db
,
cf
)
.unwrap
();
let
handle
=
get_cf_handle
(
&
db
,
cf
)
.unwrap
();
let
samples_a
=
vec!
[
let
samples_a
=
vec!
[
(
b
"keya11111"
,
b
"value1"
),
(
b
"keya11111"
,
b
"value1"
),
...
@@ -859,7 +859,7 @@ fn test_delete_range_prefix_bloom_case_3() {
...
@@ -859,7 +859,7 @@ fn test_delete_range_prefix_bloom_case_3() {
// Create prefix bloom filter for memtable.
// Create prefix bloom filter for memtable.
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
let
cf
=
"default"
;
let
cf
=
"default"
;
let
db2
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
cf
],
vec!
[
cf_opts
])
.unwrap
();
let
db2
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
(
cf
,
cf_opts
)
])
.unwrap
();
let
handle2
=
get_cf_handle
(
&
db2
,
cf
)
.unwrap
();
let
handle2
=
get_cf_handle
(
&
db2
,
cf
)
.unwrap
();
let
samples_b
=
vec!
[(
b
"keyb22222"
,
b
"value2"
),
(
b
"keyc33333"
,
b
"value3"
)];
let
samples_b
=
vec!
[(
b
"keyb22222"
,
b
"value2"
),
(
b
"keyc33333"
,
b
"value3"
)];
for
(
k
,
v
)
in
samples_b
{
for
(
k
,
v
)
in
samples_b
{
...
@@ -910,7 +910,7 @@ fn test_delete_range_prefix_bloom_case_4() {
...
@@ -910,7 +910,7 @@ fn test_delete_range_prefix_bloom_case_4() {
// Create prefix bloom filter for memtable.
// Create prefix bloom filter for memtable.
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
let
cf
=
"default"
;
let
cf
=
"default"
;
let
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
cf
],
vec!
[
cf_opts
])
.unwrap
();
let
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
(
cf
,
cf_opts
)
])
.unwrap
();
let
handle
=
get_cf_handle
(
&
db
,
cf
)
.unwrap
();
let
handle
=
get_cf_handle
(
&
db
,
cf
)
.unwrap
();
let
samples_a
=
vec!
[
let
samples_a
=
vec!
[
(
b
"keya11111"
,
b
"value1"
),
(
b
"keya11111"
,
b
"value1"
),
...
@@ -955,7 +955,7 @@ fn test_delete_range_prefix_bloom_case_4() {
...
@@ -955,7 +955,7 @@ fn test_delete_range_prefix_bloom_case_4() {
// Create prefix bloom filter for memtable.
// Create prefix bloom filter for memtable.
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
let
cf
=
"default"
;
let
cf
=
"default"
;
let
db2
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
cf
],
vec!
[
cf_opts
])
.unwrap
();
let
db2
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
(
cf
,
cf_opts
)
])
.unwrap
();
let
handle2
=
get_cf_handle
(
&
db2
,
cf
)
.unwrap
();
let
handle2
=
get_cf_handle
(
&
db2
,
cf
)
.unwrap
();
...
@@ -1009,7 +1009,7 @@ fn test_delete_range_prefix_bloom_case_5() {
...
@@ -1009,7 +1009,7 @@ fn test_delete_range_prefix_bloom_case_5() {
// Create prefix bloom filter for memtable.
// Create prefix bloom filter for memtable.
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
let
cf
=
"default"
;
let
cf
=
"default"
;
let
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
cf
],
vec!
[
cf_opts
])
.unwrap
();
let
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
(
cf
,
cf_opts
)
])
.unwrap
();
let
handle
=
get_cf_handle
(
&
db
,
cf
)
.unwrap
();
let
handle
=
get_cf_handle
(
&
db
,
cf
)
.unwrap
();
let
samples_a
=
vec!
[
let
samples_a
=
vec!
[
(
b
"keya11111"
,
b
"value1"
),
(
b
"keya11111"
,
b
"value1"
),
...
@@ -1051,7 +1051,7 @@ fn test_delete_range_prefix_bloom_case_5() {
...
@@ -1051,7 +1051,7 @@ fn test_delete_range_prefix_bloom_case_5() {
.unwrap_or_else
(|
err
|
panic!
(
format!
(
"{:?}"
,
err
)));
.unwrap_or_else
(|
err
|
panic!
(
format!
(
"{:?}"
,
err
)));
// Create prefix bloom filter for memtable.
// Create prefix bloom filter for memtable.
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
let
db2
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
cf
],
vec!
[
cf_opts
])
.unwrap
();
let
db2
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
(
cf
,
cf_opts
)
])
.unwrap
();
let
handle2
=
get_cf_handle
(
&
db2
,
cf
)
.unwrap
();
let
handle2
=
get_cf_handle
(
&
db2
,
cf
)
.unwrap
();
let
samples_b
=
vec!
[(
b
"keyd44444"
,
b
"value4"
),
(
b
"keye55555"
,
b
"value5"
)];
let
samples_b
=
vec!
[(
b
"keyd44444"
,
b
"value4"
),
(
b
"keye55555"
,
b
"value5"
)];
...
@@ -1101,7 +1101,7 @@ fn test_delete_range_prefix_bloom_case_6() {
...
@@ -1101,7 +1101,7 @@ fn test_delete_range_prefix_bloom_case_6() {
// Create prefix bloom filter for memtable.
// Create prefix bloom filter for memtable.
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
let
cf
=
"default"
;
let
cf
=
"default"
;
let
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
cf
],
vec!
[
cf_opts
])
.unwrap
();
let
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
(
cf
,
cf_opts
)
])
.unwrap
();
let
handle
=
get_cf_handle
(
&
db
,
cf
)
.unwrap
();
let
handle
=
get_cf_handle
(
&
db
,
cf
)
.unwrap
();
let
samples_a
=
vec!
[
let
samples_a
=
vec!
[
(
b
"keya11111"
,
b
"value1"
),
(
b
"keya11111"
,
b
"value1"
),
...
@@ -1145,7 +1145,7 @@ fn test_delete_range_prefix_bloom_case_6() {
...
@@ -1145,7 +1145,7 @@ fn test_delete_range_prefix_bloom_case_6() {
.unwrap_or_else
(|
err
|
panic!
(
format!
(
"{:?}"
,
err
)));
.unwrap_or_else
(|
err
|
panic!
(
format!
(
"{:?}"
,
err
)));
// Create prefix bloom filter for memtable.
// Create prefix bloom filter for memtable.
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
let
db2
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
cf
],
vec!
[
cf_opts
])
.unwrap
();
let
db2
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
(
cf
,
cf_opts
)
])
.unwrap
();
let
handle2
=
get_cf_handle
(
&
db2
,
cf
)
.unwrap
();
let
handle2
=
get_cf_handle
(
&
db2
,
cf
)
.unwrap
();
let
samples_b
=
vec!
[
let
samples_b
=
vec!
[
...
@@ -1219,7 +1219,7 @@ fn test_delete_range_prefix_bloom_compact_case() {
...
@@ -1219,7 +1219,7 @@ fn test_delete_range_prefix_bloom_compact_case() {
// Create prefix bloom filter for memtable.
// Create prefix bloom filter for memtable.
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
let
cf
=
"default"
;
let
cf
=
"default"
;
let
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
cf
],
vec!
[
cf_opts
])
.unwrap
();
let
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
(
cf
,
cf_opts
)
])
.unwrap
();
let
handle
=
get_cf_handle
(
&
db
,
cf
)
.unwrap
();
let
handle
=
get_cf_handle
(
&
db
,
cf
)
.unwrap
();
let
samples_a
=
vec!
[
let
samples_a
=
vec!
[
(
b
"keya11111"
,
b
"value1"
),
(
b
"keya11111"
,
b
"value1"
),
...
@@ -1263,7 +1263,7 @@ fn test_delete_range_prefix_bloom_compact_case() {
...
@@ -1263,7 +1263,7 @@ fn test_delete_range_prefix_bloom_compact_case() {
.unwrap_or_else
(|
err
|
panic!
(
format!
(
"{:?}"
,
err
)));
.unwrap_or_else
(|
err
|
panic!
(
format!
(
"{:?}"
,
err
)));
// Create prefix bloom filter for memtable.
// Create prefix bloom filter for memtable.
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
cf_opts
.set_memtable_prefix_bloom_size_ratio
(
0.1
as
f64
);
let
db2
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
cf
],
vec!
[
cf_opts
])
.unwrap
();
let
db2
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
(
cf
,
cf_opts
)
])
.unwrap
();
let
handle2
=
get_cf_handle
(
&
db2
,
cf
)
.unwrap
();
let
handle2
=
get_cf_handle
(
&
db2
,
cf
)
.unwrap
();
let
samples_b
=
vec!
[
let
samples_b
=
vec!
[
...
...
tests/test_event_listener.rs
View file @
cc1db4a1
...
@@ -17,7 +17,6 @@ use rocksdb::*;
...
@@ -17,7 +17,6 @@ use rocksdb::*;
use
std
::
sync
::
Arc
;
use
std
::
sync
::
Arc
;
use
std
::
sync
::
atomic
::
*
;
use
std
::
sync
::
atomic
::
*
;
use
tempdir
::
TempDir
;
use
tempdir
::
TempDir
;
use
test_ingest_external_file
::
gen_sst
;
use
test_ingest_external_file
::
gen_sst
;
#[derive(Default,
Clone)]
#[derive(Default,
Clone)]
...
...
tests/test_ingest_external_file.rs
View file @
cc1db4a1
...
@@ -12,7 +12,6 @@
...
@@ -12,7 +12,6 @@
// limitations under the License.
// limitations under the License.
use
rocksdb
::
*
;
use
rocksdb
::
*
;
use
std
::
fs
;
use
std
::
fs
;
use
tempdir
::
TempDir
;
use
tempdir
::
TempDir
;
...
@@ -157,7 +156,7 @@ fn test_ingest_external_file_new() {
...
@@ -157,7 +156,7 @@ fn test_ingest_external_file_new() {
opts
.create_if_missing
(
true
);
opts
.create_if_missing
(
true
);
let
mut
cf_opts
=
ColumnFamilyOptions
::
new
();
let
mut
cf_opts
=
ColumnFamilyOptions
::
new
();
cf_opts
.add_merge_operator
(
"merge operator"
,
concat_merge
);
cf_opts
.add_merge_operator
(
"merge operator"
,
concat_merge
);
let
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
"default"
],
vec!
[
cf_opts
])
.unwrap
();
let
db
=
DB
::
open_cf
(
opts
,
path_str
,
vec!
[
(
"default"
,
cf_opts
)
])
.unwrap
();
let
gen_path
=
TempDir
::
new
(
"_rust_rocksdb_ingest_sst_gen_new"
)
.expect
(
""
);
let
gen_path
=
TempDir
::
new
(
"_rust_rocksdb_ingest_sst_gen_new"
)
.expect
(
""
);
let
test_sstfile
=
gen_path
.path
()
.join
(
"test_sst_file_new"
);
let
test_sstfile
=
gen_path
.path
()
.join
(
"test_sst_file_new"
);
let
test_sstfile_str
=
test_sstfile
.to_str
()
.unwrap
();
let
test_sstfile_str
=
test_sstfile
.to_str
()
.unwrap
();
...
...
tests/test_iterator.rs
View file @
cc1db4a1
...
@@ -272,8 +272,7 @@ fn test_total_order_seek() {
...
@@ -272,8 +272,7 @@ fn test_total_order_seek() {
let
db
=
DB
::
open_cf
(
let
db
=
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
let
wopts
=
WriteOptions
::
new
();
let
wopts
=
WriteOptions
::
new
();
...
@@ -355,8 +354,7 @@ fn test_fixed_suffix_seek() {
...
@@ -355,8 +354,7 @@ fn test_fixed_suffix_seek() {
let
db
=
DB
::
open_cf
(
let
db
=
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
db
.put
(
b
"k-eghe-5"
,
b
"a"
)
.unwrap
();
db
.put
(
b
"k-eghe-5"
,
b
"a"
)
.unwrap
();
db
.put
(
b
"k-24yfae-6"
,
b
"a"
)
.unwrap
();
db
.put
(
b
"k-24yfae-6"
,
b
"a"
)
.unwrap
();
...
...
tests/test_prefix_extractor.rs
View file @
cc1db4a1
...
@@ -77,8 +77,7 @@ fn test_prefix_extractor_compatibility() {
...
@@ -77,8 +77,7 @@ fn test_prefix_extractor_compatibility() {
let
db
=
DB
::
open_cf
(
let
db
=
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
let
wopts
=
WriteOptions
::
new
();
let
wopts
=
WriteOptions
::
new
();
...
...
tests/test_rocksdb_options.rs
View file @
cc1db4a1
...
@@ -33,8 +33,7 @@ fn test_set_num_levels() {
...
@@ -33,8 +33,7 @@ fn test_set_num_levels() {
let
db
=
DB
::
open_cf
(
let
db
=
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
drop
(
db
);
drop
(
db
);
}
}
...
@@ -127,8 +126,7 @@ fn test_memtable_insert_hint_prefix_extractor() {
...
@@ -127,8 +126,7 @@ fn test_memtable_insert_hint_prefix_extractor() {
let
db
=
DB
::
open_cf
(
let
db
=
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
let
wopts
=
WriteOptions
::
new
();
let
wopts
=
WriteOptions
::
new
();
...
@@ -261,8 +259,7 @@ fn test_set_pin_l0_filter_and_index_blocks_in_cache() {
...
@@ -261,8 +259,7 @@ fn test_set_pin_l0_filter_and_index_blocks_in_cache() {
DB
::
open_cf
(
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
}
}
#[test]
#[test]
...
@@ -276,8 +273,7 @@ fn test_pending_compaction_bytes_limit() {
...
@@ -276,8 +273,7 @@ fn test_pending_compaction_bytes_limit() {
DB
::
open_cf
(
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
}
}
...
@@ -310,8 +306,7 @@ fn test_set_optimize_filters_for_hits() {
...
@@ -310,8 +306,7 @@ fn test_set_optimize_filters_for_hits() {
DB
::
open_cf
(
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
}
}
...
@@ -330,8 +325,7 @@ fn test_get_block_cache_usage() {
...
@@ -330,8 +325,7 @@ fn test_get_block_cache_usage() {
let
db
=
DB
::
open_cf
(
let
db
=
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
for
i
in
0
..
200
{
for
i
in
0
..
200
{
...
@@ -355,8 +349,7 @@ fn test_set_level_compaction_dynamic_level_bytes() {
...
@@ -355,8 +349,7 @@ fn test_set_level_compaction_dynamic_level_bytes() {
DB
::
open_cf
(
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
}
}
...
@@ -412,8 +405,7 @@ fn test_set_compaction_pri() {
...
@@ -412,8 +405,7 @@ fn test_set_compaction_pri() {
DB
::
open_cf
(
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
}
}
...
@@ -488,8 +480,7 @@ fn test_bottommost_compression() {
...
@@ -488,8 +480,7 @@ fn test_bottommost_compression() {
DB
::
open_cf
(
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
}
}
...
@@ -577,7 +568,7 @@ fn test_block_based_options() {
...
@@ -577,7 +568,7 @@ fn test_block_based_options() {
let
mut
cfopts
=
ColumnFamilyOptions
::
new
();
let
mut
cfopts
=
ColumnFamilyOptions
::
new
();
cfopts
.set_block_based_table_factory
(
&
bopts
);
cfopts
.set_block_based_table_factory
(
&
bopts
);
let
db
=
DB
::
open_cf
(
opts
.clone
(),
path_str
,
vec!
[
"default"
],
vec!
[
cfopts
])
.unwrap
();
let
db
=
DB
::
open_cf
(
opts
.clone
(),
path_str
,
vec!
[
(
"default"
,
cfopts
)
])
.unwrap
();
// RocksDB use randomness for the read amplification statistics,
// RocksDB use randomness for the read amplification statistics,
// we should use a bigger enough value (> `bytes_per_bit`) to make
// we should use a bigger enough value (> `bytes_per_bit`) to make
// sure the statistics will not be 0.
// sure the statistics will not be 0.
...
...
tests/test_slice_transform.rs
View file @
cc1db4a1
...
@@ -51,8 +51,7 @@ fn test_slice_transform() {
...
@@ -51,8 +51,7 @@ fn test_slice_transform() {
let
db
=
DB
::
open_cf
(
let
db
=
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
let
samples
=
vec!
[
let
samples
=
vec!
[
(
b
"key_01"
.to_vec
(),
b
"1"
.to_vec
()),
(
b
"key_01"
.to_vec
(),
b
"1"
.to_vec
()),
...
...
tests/test_table_properties.rs
View file @
cc1db4a1
...
@@ -176,8 +176,7 @@ fn test_table_properties_collector_factory() {
...
@@ -176,8 +176,7 @@ fn test_table_properties_collector_factory() {
let
db
=
DB
::
open_cf
(
let
db
=
DB
::
open_cf
(
opts
,
opts
,
path
.path
()
.to_str
()
.unwrap
(),
path
.path
()
.to_str
()
.unwrap
(),
vec!
[
"default"
],
vec!
[(
"default"
,
cf_opts
)],
vec!
[
cf_opts
],
)
.unwrap
();
)
.unwrap
();
let
samples
=
vec!
[
let
samples
=
vec!
[
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment