Skip to content

Commit f2de908

Browse files
author
yangweijian
committed
fix CI fails
1 parent e8d9152 commit f2de908

File tree

9 files changed

+47
-54
lines changed

9 files changed

+47
-54
lines changed

librocksdb-sys/tests/ffi.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -24,8 +24,8 @@
2424
)]
2525

2626
use const_cstr::const_cstr;
27+
use haizhi_librocksdb_sys::*;
2728
use libc::*;
28-
use librocksdb_sys::*;
2929
use std::borrow::Cow;
3030
use std::env;
3131
use std::ffi::{CStr, CString};

src/checkpoint.rs

+8-10
Original file line numberDiff line numberDiff line change
@@ -18,12 +18,12 @@
1818
//! [1]: https://github.com/facebook/rocksdb/wiki/Checkpoints
1919
2020
use crate::AsColumnFamilyRef;
21-
use crate::{ffi, Error, DB};
22-
use libc::{c_char, int32_t};
21+
use crate::{ffi, Error};
22+
use libc::c_char;
2323

2424
use crate::db::DBInner;
2525
use crate::ffi_util::to_cpath;
26-
use crate::{ColumnFamily, DBCommon, ThreadMode};
26+
use crate::{DBCommon, ThreadMode};
2727
use std::ffi::{CStr, CString};
2828
use std::fs::File;
2929
use std::io::{Read, Write};
@@ -215,13 +215,11 @@ impl<'db> Checkpoint<'db> {
215215
export_dir: P,
216216
) -> Result<ExportImportFilesMetaData, Error> {
217217
let path = export_dir.as_ref();
218-
let cpath = if let Ok(c) = CString::new(path.to_string_lossy().as_bytes()) {
219-
c
220-
} else {
221-
return Err(Error::new(
222-
"Failed to convert path to CString when creating DB checkpoint".to_owned(),
223-
));
224-
};
218+
let cpath = CString::new(path.to_string_lossy().as_bytes()).map_err(|err| {
219+
Error::new(format!(
220+
"Failed to convert path to CString when creating DB checkpoint: {err}"
221+
))
222+
})?;
225223

226224
let inner: *mut ffi::rocksdb_export_import_files_metadata_t;
227225

src/db.rs

+6-8
Original file line numberDiff line numberDiff line change
@@ -2036,7 +2036,7 @@ impl<T: ThreadMode, D: DBInner> DBCommon<T, D> {
20362036
true,
20372037
true,
20382038
files_size_error_margin,
2039-
))
2039+
));
20402040
}
20412041
Ok(sizes)
20422042
}
@@ -2167,13 +2167,11 @@ impl<T: ThreadMode, D: DBInner> DBCommon<T, D> {
21672167
opts: &Options,
21682168
metadata: &ExportImportFilesMetaData,
21692169
) -> Result<*mut ffi::rocksdb_column_family_handle_t, Error> {
2170-
let cf_name = if let Ok(c) = CString::new(name.as_bytes()) {
2171-
c
2172-
} else {
2173-
return Err(Error::new(
2174-
"Failed to convert path to CString when creating cf".to_owned(),
2175-
));
2176-
};
2170+
let cf_name = CString::new(name.as_bytes()).map_err(|err| {
2171+
Error::new(format!(
2172+
"Failed to convert path to CString when creating cf: {err}"
2173+
))
2174+
})?;
21772175
Ok(unsafe {
21782176
ffi_try!(ffi::rocksdb_create_column_family_with_import(
21792177
self.inner.inner(),

src/properties.rs

+7-7
Original file line numberDiff line numberDiff line change
@@ -15,18 +15,18 @@ macro_rules! property {
1515
};
1616
}
1717

18-
/// "rocksdb.num-files-at-level<N>" - returns string containing the number
19-
/// of files at level <N>, where <N> is an ASCII representation of a
18+
/// "rocksdb.num-files-at-level`<N>`" - returns string containing the number
19+
/// of files at level `<N>`, where `<N>` is an ASCII representation of a
2020
/// level number (e.g., "0").
2121
pub fn num_files_at_level(level: usize) -> CString {
2222
unsafe { level_property("num-files-at-level", level) }
2323
}
2424

25-
/// "rocksdb.compression-ratio-at-level<N>" - returns string containing the
26-
/// compression ratio of data at level <N>, where <N> is an ASCII
25+
/// "rocksdb.compression-ratio-at-level`<N>`" - returns string containing the
26+
/// compression ratio of data at level `<N>`, where `<N>` is an ASCII
2727
/// representation of a level number (e.g., "0"). Here, compression
2828
/// ratio is defined as uncompressed data size / compressed file size.
29-
/// Returns "-1.0" if no open files at level <N>.
29+
/// Returns "-1.0" if no open files at level `<N>`.
3030
pub fn compression_ratio_at_level(level: usize) -> CString {
3131
unsafe { level_property("compression-ratio-at-level", level) }
3232
}
@@ -45,7 +45,7 @@ pub const SSTABLES: &CStr = property!("sstables");
4545
pub const CFSTATS: &CStr = property!("CFSTATS");
4646

4747
/// "rocksdb.cfstats-no-file-histogram" - returns a multi-line string with
48-
/// general columm family stats per-level over db's lifetime ("L<n>"),
48+
/// general columm family stats per-level over db's lifetime ("L`<n>`"),
4949
/// aggregated over db's lifetime ("Sum"), and aggregated over the
5050
/// interval since the last retrieval ("Int").
5151
/// It could also be used to return the stats in the format of the map.
@@ -192,7 +192,7 @@ pub const ESTIMATE_PENDING_COMPACTION_BYTES: &CStr = property!("estimate-pending
192192
/// of the aggregated table properties of the target column family.
193193
pub const AGGREGATED_TABLE_PROPERTIES: &CStr = property!("aggregated-table-properties");
194194

195-
/// "rocksdb.aggregated-table-properties-at-level<N>", same as the previous
195+
/// "rocksdb.aggregated-table-properties-at-level`<N>`", same as the previous
196196
/// one but only returns the aggregated table properties of the
197197
/// specified level "N" at the target column family.
198198
pub fn aggregated_table_properties_at_level(level: usize) -> CString {

src/transactions/transaction_db.rs

+1-1
Original file line numberDiff line numberDiff line change
@@ -300,8 +300,8 @@ impl<T: ThreadMode> TransactionDB<T> {
300300
let mut cnt = 0;
301301
let ptr = ffi::rocksdb_transactiondb_get_prepared_transactions(db, &mut cnt);
302302
let mut vec = vec![std::ptr::null_mut(); cnt];
303-
std::ptr::copy_nonoverlapping(ptr, vec.as_mut_ptr(), cnt);
304303
if !ptr.is_null() {
304+
std::ptr::copy_nonoverlapping(ptr, vec.as_mut_ptr(), cnt);
305305
ffi::rocksdb_free(ptr as *mut c_void);
306306
}
307307
vec

tests/test_approximate.rs

+16-16
Original file line numberDiff line numberDiff line change
@@ -4,16 +4,20 @@ use haizhi_rocksdb as rocksdb;
44

55
use rocksdb::Ranges;
66
use rocksdb::{ColumnFamilyDescriptor, Options, DB};
7+
8+
mod util;
9+
use util::DBPath;
10+
711
#[test]
812
fn test_approximate() {
9-
let path = "test1";
13+
let path = DBPath::new("test_approximate_test1");
1014
let cf_opts = Options::default();
1115
let cf1 = ColumnFamilyDescriptor::new("cf1", cf_opts.clone());
1216
let cf2 = ColumnFamilyDescriptor::new("cf2", cf_opts);
1317
let mut db_opts = Options::default();
1418
db_opts.create_missing_column_families(true);
1519
db_opts.create_if_missing(true);
16-
let db = DB::open_cf_descriptors(&db_opts, path, vec![cf1, cf2]).unwrap();
20+
let db = DB::open_cf_descriptors(&db_opts, &path, vec![cf1, cf2]).unwrap();
1721
//
1822
let a = 1.to_string();
1923
let start_key: &[u8] = a.as_ref();
@@ -23,25 +27,21 @@ fn test_approximate() {
2327
let cf2 = db.cf_handle("cf2").unwrap();
2428
for key in 0..10000 {
2529
if key % 2 == 1 {
26-
db.put_cf(cf1, key.to_string(), (key * 2).to_string())
30+
db.put_cf(&cf1, key.to_string(), (key * 2).to_string())
2731
.unwrap();
2832
} else {
29-
db.put_cf(cf2, key.to_string(), (key * 2).to_string())
33+
db.put_cf(&cf2, key.to_string(), (key * 2).to_string())
3034
.unwrap();
3135
}
3236
}
33-
db.flush_cf(cf1).unwrap();
34-
db.flush_cf(cf2).unwrap();
37+
db.flush_cf(&cf1).unwrap();
38+
db.flush_cf(&cf2).unwrap();
3539
std::thread::sleep(Duration::from_secs(2));
36-
println!(
37-
"start_key {:?}, end_key {:?}",
38-
start_key.clone(),
39-
end_key.clone()
40-
);
40+
println!("start_key {:?}, end_key {:?}", start_key, end_key,);
4141
let files_error_margin: f64 = 1.0;
4242
let f = db
4343
.get_approximate_sizes_with_option(
44-
cf1,
44+
&cf1,
4545
&[Ranges::new(start_key, end_key)],
4646
files_error_margin,
4747
)
@@ -53,18 +53,18 @@ fn test_approximate() {
5353

5454
for key in 0..10000 {
5555
if key % 2 == 1 {
56-
db.delete_cf(cf1, key.to_string()).unwrap();
56+
db.delete_cf(&cf1, key.to_string()).unwrap();
5757
} else {
58-
db.delete_cf(cf2, key.to_string()).unwrap();
58+
db.delete_cf(&cf2, key.to_string()).unwrap();
5959
}
6060
}
61-
db.flush_cf(cf1).unwrap();
61+
db.flush_cf(&cf1).unwrap();
6262
std::thread::sleep(Duration::from_secs(5));
6363
let none: Option<Vec<u8>> = None;
6464
db.compact_range(none.clone(), none);
6565
let f = db
6666
.get_approximate_sizes_with_option(
67-
cf1,
67+
&cf1,
6868
&[Ranges::new(start_key, end_key)],
6969
files_error_margin,
7070
)

tests/test_checkpoint.rs

+2-5
Original file line numberDiff line numberDiff line change
@@ -18,10 +18,7 @@ use pretty_assertions::assert_eq;
1818

1919
use haizhi_rocksdb as rocksdb;
2020

21-
use rocksdb::{
22-
checkpoint::{Checkpoint, ExportImportFilesMetaData},
23-
Options, DB,
24-
};
21+
use rocksdb::{checkpoint::Checkpoint, Options, DB};
2522
use util::DBPath;
2623

2724
#[test]
@@ -139,7 +136,7 @@ fn test_export_column_family() {
139136

140137
let export_path = DBPath::new(&format!("{}db1_backup", PATH_PREFIX));
141138
// let export_path = Path::new("db1_backup");
142-
let result = checkpoint.export_column_family(cf1, &export_path);
139+
let result = checkpoint.export_column_family(&cf1, &export_path);
143140
assert!(result.is_ok());
144141
let metadata = result.unwrap();
145142
// println!("metadata {:?}", metadata.save("save"));

tests/test_column_family.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -512,7 +512,7 @@ fn test_create_cf_with_import() {
512512

513513
let mut opts = Options::default();
514514
opts.create_if_missing(true);
515-
let mut origin_db = DB::open(&opts, &origin_db_path).unwrap();
515+
let mut origin_db = DB::open(&opts, origin_db_path).unwrap();
516516
// create two column families
517517
assert!(origin_db.create_cf("cf1", &opts).is_ok());
518518
assert!(origin_db.create_cf("cf2", &opts).is_ok());
@@ -537,7 +537,7 @@ fn test_create_cf_with_import() {
537537

538538
let export_path = format!("{}/db1_backup", PATH_PREFIX);
539539
let export_path = Path::new(&export_path);
540-
let result = checkpoint.export_column_family(cf1, &export_path);
540+
let result = checkpoint.export_column_family(&cf1, export_path);
541541
assert!(result.is_ok());
542542
drop(checkpoint);
543543

@@ -549,7 +549,7 @@ fn test_create_cf_with_import() {
549549
// new db from export path
550550
let recover_db_path = format!("{}/db1_recover", PATH_PREFIX);
551551
let recover_db_path = Path::new(&recover_db_path);
552-
let mut recover_db = DB::open(&opts, &recover_db_path).unwrap();
552+
let mut recover_db = DB::open(&opts, recover_db_path).unwrap();
553553
assert!(recover_db.cf_handle("cf1").is_none());
554554
assert!(recover_db.cf_handle("cf2").is_none());
555555
let result = recover_db.create_cf_with_import("cf1", &opts, &recover_metadata);

tests/test_comparator.rs

+3-3
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,10 @@ pub fn rocks_old_compare(one: &[u8], two: &[u8]) -> Ordering {
99
one.cmp(two)
1010
}
1111

12+
type CmpFn = dyn Fn(&[u8], &[u8]) -> Ordering;
13+
1214
/// create database add some values, and iterate over these
13-
pub fn write_to_db_with_comparator(
14-
compare_fn: Box<dyn Fn(&[u8], &[u8]) -> Ordering>,
15-
) -> Vec<String> {
15+
pub fn write_to_db_with_comparator(compare_fn: Box<CmpFn>) -> Vec<String> {
1616
let mut result_vec = Vec::new();
1717

1818
let path = "_path_for_rocksdb_storage";

0 commit comments

Comments
 (0)