remove sled, upgrade rocksdb and rusqlite to conduit's version

This commit is contained in:
Jonathan de Jong 2024-05-04 12:01:34 +02:00
parent 7cc4616fbf
commit 6ee110e5ba
10 changed files with 127 additions and 363 deletions

View file

@ -10,12 +10,20 @@ itertools = "0.10.1"
thiserror = "1.0.26"
anyhow = "1.0.42"
sled = { version = "0.34.6", features = ["compression", "no_metrics"], optional = true }
rusqlite = { version = "0.25.3", features = ["bundled"], optional = true }
rusqlite = { version = "0.31", features = ["bundled"], optional = true }
heed = { git = "https://github.com/timokoesters/heed.git", rev = "f6f825da7fb2c758867e05ad973ef800a6fe1d5d", optional = true }
rocksdb = { version = "0.17.0", features = ["multi-threaded-cf", "zstd"], optional = true }
persy = { version = "1.2", optional = true }
[dependencies.rocksdb]
package = "rust-rocksdb"
version = "0.25"
optional = true
features = [
"multi-threaded-cf",
"zstd",
"lz4",
]
[features]
default = []

View file

@ -4,8 +4,6 @@ pub mod heed;
pub mod persy;
#[cfg(feature = "rocksdb")]
pub mod rocksdb;
#[cfg(feature = "sled")]
pub mod sled;
#[cfg(feature = "sqlite")]
pub mod sqlite;

View file

@ -131,7 +131,10 @@ impl super::SegmentIter for RocksDBCFIter<'_> {
.db
.rocks
.iterator_cf(&self.0.cf(), rocksdb::IteratorMode::Start)
.map(|(k, v)| (Vec::from(k), Vec::from(v))),
.map(|r| {
let (k, v) = r.expect("we expect rocksdb to give us good rows only");
(Vec::from(k), Vec::from(v))
}),
)
}
}

View file

@ -1,73 +0,0 @@
use super::{Database, KVIter, Segment, SegmentIter};
use itertools::Itertools;
use sled::{Batch, Config, Db, Result, Tree};
use std::path::Path;
pub fn new_db<P: AsRef<Path>>(path: P) -> Result<Db> {
Config::default().path(path).use_compression(true).open()
}
pub struct SledDB(Db);
impl SledDB {
pub fn new(db: Db) -> Self {
Self(db)
}
}
const SLED_DEFAULT: &[u8] = "__sled__default".as_bytes();
impl Database for SledDB {
fn names<'a>(&'a self) -> Vec<Vec<u8>> {
self.0
.tree_names()
.into_iter()
.filter(|v| v != SLED_DEFAULT)
.map(|v| v.to_vec())
.collect_vec()
}
fn segment(&mut self, name: Vec<u8>) -> Option<Box<dyn Segment>> {
self.0
.open_tree(name)
.ok()
.map(|t| -> Box<dyn Segment> { Box::new(t) })
}
fn flush(&mut self) {
self.0.flush().unwrap();
}
}
impl Segment for Tree {
fn batch_insert(
&mut self,
batch: Box<dyn Iterator<Item = (Vec<u8>, Vec<u8>)> + '_>,
) -> anyhow::Result<()> {
let mut sled_batch = Batch::default();
for (k, v) in batch {
sled_batch.insert(k, v)
}
self.apply_batch(sled_batch).map_err(Into::into)
}
fn get_iter<'a>(&'a mut self) -> Box<dyn super::SegmentIter + 'a> {
Box::new(SledTreeIter(self))
}
}
struct SledTreeIter<'a>(&'a mut Tree);
impl SegmentIter for SledTreeIter<'_> {
fn iter<'a>(&'a mut self) -> KVIter<'a> {
Box::new(self.0.iter().filter_map(|r| {
if let Ok(t) = r {
Some((t.0.to_vec(), t.1.to_vec()))
} else {
None
}
}))
}
}

View file

@ -12,9 +12,8 @@ conduit_iface = { path = "../iface/", default-features = false }
thiserror = "1.0.26"
[features]
default = ["sled", "sqlite", "rocksdb"]
default = ["sqlite", "rocksdb"]
sled = ["conduit_iface/sled"]
persy = ["conduit_iface/persy"]
heed = ["conduit_iface/heed"]
sqlite = ["conduit_iface/sqlite"]

View file

@ -6,8 +6,6 @@ use std::{
};
enum Database {
#[cfg(feature = "sled")]
Sled(db::sled::SledDB),
#[cfg(feature = "heed")]
Heed(db::heed::HeedDB),
#[cfg(feature = "sqlite")]
@ -21,8 +19,6 @@ enum Database {
impl Database {
fn new(name: &str, path: PathBuf, config: Config) -> anyhow::Result<Self> {
Ok(match name {
#[cfg(feature = "sled")]
"sled" => Self::Sled(db::sled::SledDB::new(db::sled::new_db(path)?)),
#[cfg(feature = "heed")]
"heed" => Self::Heed(db::heed::HeedDB::new(db::heed::new_db(path)?)),
#[cfg(feature = "sqlite")]
@ -44,8 +40,6 @@ impl Deref for Database {
fn deref(&self) -> &Self::Target {
match self {
#[cfg(feature = "sled")]
Database::Sled(db) => db,
#[cfg(feature = "heed")]
Database::Heed(db) => db,
#[cfg(feature = "sqlite")]
@ -61,8 +55,6 @@ impl Deref for Database {
impl DerefMut for Database {
fn deref_mut(&mut self) -> &mut Self::Target {
match self {
#[cfg(feature = "sled")]
Database::Sled(db) => db,
#[cfg(feature = "heed")]
Database::Heed(db) => db,
#[cfg(feature = "sqlite")]
@ -76,8 +68,6 @@ impl DerefMut for Database {
}
const DATABASES: &[&str] = &[
#[cfg(feature = "sled")]
"sled",
#[cfg(feature = "heed")]
"heed",
#[cfg(feature = "sqlite")]
@ -89,7 +79,7 @@ const DATABASES: &[&str] = &[
];
fn main() -> anyhow::Result<()> {
let matches = App::new("Conduit Sled to Sqlite Migrator")
let matches = App::new("Conduit Generic Migrator")
.arg(
Arg::with_name("from_dir")
.short("s")

View file

@ -1,11 +0,0 @@
[package]
name = "conduit_sled_to_sqlite"
version = "0.1.0"
edition = "2018"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
clap = "2.33.3"
anyhow = "1.0.41"
conduit_iface = { path = "../iface/", features=["sled", "sqlite"] }

View file

@ -1,57 +0,0 @@
use std::path::Path;
use clap::{App, Arg};
use conduit_iface::db::{copy_database, sled, sqlite, Config};
fn main() -> anyhow::Result<()> {
let matches = App::new("Conduit Sled to Sqlite Migrator")
.arg(
Arg::with_name("DIR")
.long_help("Sets the directory to grab the database from\nWill default to \".\"")
.index(1),
)
.arg(
Arg::with_name("NEW_DIR")
.long_help("Sets the destination directory\nWill default to DIR")
.index(2),
)
.get_matches();
let source_dir = matches.value_of("DIR").unwrap_or(".");
let dest_dir = matches.value_of("NEW_DIR");
let source_dir = Path::new(source_dir).canonicalize()?;
if !source_dir.is_dir() {
return Err(anyhow::anyhow!("source path must be directory"));
}
let dest_dir = match dest_dir {
None => Ok(source_dir.clone()),
Some(dir) => {
let p = Path::new(dir).canonicalize()?;
if !p.is_dir() {
Err(anyhow::anyhow!("destination path must be directory"))
} else {
Ok(p)
}
}
}?;
dbg!(&source_dir, &dest_dir);
let mut sled = sled::SledDB::new(sled::new_db(source_dir)?);
let mut sqlite = sqlite::SqliteDB::new(
sqlite::new_conn(dest_dir)?,
Config {
ignore_broken_rows: false,
},
);
copy_database(&mut sled, &mut sqlite, 1000)?;
Ok(())
}