mirror of
https://github.com/ShadowJonathan/conduit_toolbox.git
synced 2025-06-08 02:17:04 +03:00
add persy backend and make building optional
This commit is contained in:
parent
0e0a025c37
commit
805baa0705
6 changed files with 276 additions and 19 deletions
|
@ -7,9 +7,16 @@ edition = "2018"
|
|||
|
||||
[dependencies]
|
||||
itertools = "0.10.1"
|
||||
sled = { version = "0.34.6", features = ["compression", "no_metrics"] }
|
||||
rusqlite = { version = "0.25.3", features = ["bundled"] }
|
||||
anyhow = "1.0.42"
|
||||
heed = { git = "https://github.com/timokoesters/heed.git", rev = "f6f825da7fb2c758867e05ad973ef800a6fe1d5d" }
|
||||
thiserror = "1.0.26"
|
||||
rocksdb = { version = "0.17.0", features = ["multi-threaded-cf", "zstd"] }
|
||||
anyhow = "1.0.42"
|
||||
|
||||
sled = { version = "0.34.6", features = ["compression", "no_metrics"], optional = true }
|
||||
rusqlite = { version = "0.25.3", features = ["bundled"], optional = true }
|
||||
heed = { git = "https://github.com/timokoesters/heed.git", rev = "f6f825da7fb2c758867e05ad973ef800a6fe1d5d", optional = true }
|
||||
rocksdb = { version = "0.17.0", features = ["multi-threaded-cf", "zstd"], optional = true }
|
||||
persy = { version = "1.2", optional = true }
|
||||
|
||||
[features]
|
||||
default = ["sled", "persy", "heed", "sqlite", "rocksdb"]
|
||||
|
||||
sqlite = ["rusqlite"]
|
|
@ -1,6 +1,12 @@
|
|||
#[cfg(feature = "heed")]
|
||||
pub mod heed;
|
||||
#[cfg(feature = "persy")]
|
||||
pub mod persy;
|
||||
#[cfg(feature = "rocksdb")]
|
||||
pub mod rocksdb;
|
||||
#[cfg(feature = "sled")]
|
||||
pub mod sled;
|
||||
#[cfg(feature = "sqlite")]
|
||||
pub mod sqlite;
|
||||
|
||||
use itertools::Itertools;
|
||||
|
|
98
tools/iface/src/db/persy.rs
Normal file
98
tools/iface/src/db/persy.rs
Normal file
|
@ -0,0 +1,98 @@
|
|||
use super::{Database, KVIter, Segment, SegmentIter};
|
||||
use persy::{ByteVec, Persy};
|
||||
use std::path::Path;
|
||||
|
||||
pub fn new_db<P: AsRef<Path>>(path: P) -> anyhow::Result<PersyDB> {
|
||||
let path = Path::new("./db.persy").join(path);
|
||||
|
||||
let persy = persy::OpenOptions::new()
|
||||
.create(true)
|
||||
.config(persy::Config::new())
|
||||
.open(&path)?;
|
||||
|
||||
Ok(PersyDB { persy })
|
||||
}
|
||||
|
||||
pub struct PersyDB {
|
||||
persy: Persy,
|
||||
}
|
||||
|
||||
impl Database for PersyDB {
|
||||
fn names<'a>(&'a self) -> Vec<Vec<u8>> {
|
||||
self.persy
|
||||
.list_indexes()
|
||||
.unwrap()
|
||||
.iter()
|
||||
.map(|(s, _)| s.as_bytes().to_vec())
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn segment<'a>(&'a mut self, name: Vec<u8>) -> Option<Box<dyn Segment + 'a>> {
|
||||
let string = String::from_utf8(name).unwrap();
|
||||
|
||||
if !self.persy.exists_index(&string).unwrap() {
|
||||
use persy::ValueMode;
|
||||
|
||||
let mut tx = self.persy.begin().unwrap();
|
||||
tx.create_index::<ByteVec, ByteVec>(&string, ValueMode::Replace)
|
||||
.unwrap();
|
||||
tx.prepare().unwrap().commit().unwrap();
|
||||
}
|
||||
|
||||
Some(Box::new(PersySeg {
|
||||
db: self,
|
||||
name: string,
|
||||
}))
|
||||
}
|
||||
|
||||
fn flush(&mut self) {
|
||||
// NOOP
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PersySeg<'a> {
|
||||
db: &'a mut PersyDB,
|
||||
name: String,
|
||||
}
|
||||
|
||||
impl<'r> Segment for PersySeg<'r> {
|
||||
fn batch_insert<'a>(
|
||||
&'a mut self,
|
||||
batch: Box<dyn Iterator<Item = (Vec<u8>, Vec<u8>)> + 'a>,
|
||||
) -> anyhow::Result<()> {
|
||||
let mut tx = self.db.persy.begin()?;
|
||||
for (key, value) in batch {
|
||||
tx.put::<ByteVec, ByteVec>(
|
||||
&self.name,
|
||||
ByteVec::from(key.clone()),
|
||||
ByteVec::from(value),
|
||||
)?;
|
||||
}
|
||||
tx.prepare()?.commit()?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_iter<'a>(&'a mut self) -> Box<dyn SegmentIter + 'a> {
|
||||
Box::new(PersySegIter(self, &self.name))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PersySegIter<'a>(&'a PersySeg<'a>, &'a str);
|
||||
|
||||
impl<'r> SegmentIter for PersySegIter<'r> {
|
||||
fn iter<'a>(&'a mut self) -> KVIter<'a> {
|
||||
Box::new(
|
||||
self.0
|
||||
.db
|
||||
.persy
|
||||
.range::<ByteVec, ByteVec, _>(self.1, ..)
|
||||
.unwrap()
|
||||
.filter_map(|(k, v)| {
|
||||
v.into_iter()
|
||||
.map(|val| ((*k).to_owned().into(), (*val).to_owned().into()))
|
||||
.next()
|
||||
}),
|
||||
)
|
||||
}
|
||||
}
|
|
@ -8,5 +8,14 @@ edition = "2018"
|
|||
[dependencies]
|
||||
clap = "2.33.3"
|
||||
anyhow = "1.0.41"
|
||||
conduit_iface = { path = "../iface/" }
|
||||
conduit_iface = { path = "../iface/", default-features = false }
|
||||
thiserror = "1.0.26"
|
||||
|
||||
[features]
|
||||
default = ["sled", "sqlite", "rocksdb"]
|
||||
|
||||
sled = ["conduit_iface/sled"]
|
||||
persy = ["conduit_iface/persy"]
|
||||
heed = ["conduit_iface/heed"]
|
||||
sqlite = ["conduit_iface/sqlite"]
|
||||
rocksdb = ["conduit_iface/rocksdb"]
|
|
@ -1,26 +1,36 @@
|
|||
use clap::{App, Arg};
|
||||
use conduit_iface::db::{
|
||||
self, copy_database, heed::HeedDB, rocksdb::RocksDB, sled::SledDB, sqlite::SqliteDB,
|
||||
};
|
||||
use conduit_iface::db::{self, copy_database};
|
||||
use std::{
|
||||
ops::{Deref, DerefMut},
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
enum Database {
|
||||
Sled(SledDB),
|
||||
Sqlite(SqliteDB),
|
||||
Heed(HeedDB),
|
||||
Rocks(RocksDB),
|
||||
#[cfg(feature = "sled")]
|
||||
Sled(db::sled::SledDB),
|
||||
#[cfg(feature = "heed")]
|
||||
Heed(db::heed::HeedDB),
|
||||
#[cfg(feature = "sqlite")]
|
||||
Sqlite(db::sqlite::SqliteDB),
|
||||
#[cfg(feature = "rocksdb")]
|
||||
Rocks(db::rocksdb::RocksDB),
|
||||
#[cfg(feature = "persy")]
|
||||
Persy(db::persy::PersyDB),
|
||||
}
|
||||
|
||||
impl Database {
|
||||
fn new(name: &str, path: PathBuf) -> anyhow::Result<Self> {
|
||||
Ok(match name {
|
||||
"sled" => Self::Sled(SledDB::new(db::sled::new_db(path)?)),
|
||||
"heed" => Self::Heed(HeedDB::new(db::heed::new_db(path)?)),
|
||||
"sqlite" => Self::Sqlite(SqliteDB::new(db::sqlite::new_conn(path)?)),
|
||||
#[cfg(feature = "sled")]
|
||||
"sled" => Self::Sled(db::sled::SledDB::new(db::sled::new_db(path)?)),
|
||||
#[cfg(feature = "heed")]
|
||||
"heed" => Self::Heed(db::heed::HeedDB::new(db::heed::new_db(path)?)),
|
||||
#[cfg(feature = "sqlite")]
|
||||
"sqlite" => Self::Sqlite(db::sqlite::SqliteDB::new(db::sqlite::new_conn(path)?)),
|
||||
#[cfg(feature = "rocksdb")]
|
||||
"rocks" => Self::Rocks(db::rocksdb::new_conn(path)?),
|
||||
#[cfg(feature = "persy")]
|
||||
"persy" => Self::Persy(db::persy::new_db(path)?),
|
||||
_ => panic!("unknown database type: {}", name),
|
||||
})
|
||||
}
|
||||
|
@ -31,10 +41,16 @@ impl Deref for Database {
|
|||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
match self {
|
||||
#[cfg(feature = "sled")]
|
||||
Database::Sled(db) => db,
|
||||
Database::Sqlite(db) => db,
|
||||
#[cfg(feature = "heed")]
|
||||
Database::Heed(db) => db,
|
||||
#[cfg(feature = "sqlite")]
|
||||
Database::Sqlite(db) => db,
|
||||
#[cfg(feature = "rocksdb")]
|
||||
Database::Rocks(db) => db,
|
||||
#[cfg(feature = "persy")]
|
||||
Database::Persy(db) => db,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -42,15 +58,32 @@ impl Deref for Database {
|
|||
impl DerefMut for Database {
|
||||
fn deref_mut(&mut self) -> &mut Self::Target {
|
||||
match self {
|
||||
#[cfg(feature = "sled")]
|
||||
Database::Sled(db) => db,
|
||||
Database::Sqlite(db) => db,
|
||||
#[cfg(feature = "heed")]
|
||||
Database::Heed(db) => db,
|
||||
#[cfg(feature = "sqlite")]
|
||||
Database::Sqlite(db) => db,
|
||||
#[cfg(feature = "rocksdb")]
|
||||
Database::Rocks(db) => db,
|
||||
#[cfg(feature = "persy")]
|
||||
Database::Persy(db) => db,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const DATABASES: &[&str] = &["heed", "sqlite", "sled", "rocks"];
|
||||
const DATABASES: &[&str] = &[
|
||||
#[cfg(feature = "sled")]
|
||||
"sled",
|
||||
#[cfg(feature = "heed")]
|
||||
"heed",
|
||||
#[cfg(feature = "sqlite")]
|
||||
"sqlite",
|
||||
#[cfg(feature = "rocksdb")]
|
||||
"rocks",
|
||||
#[cfg(feature = "persy")]
|
||||
"persy",
|
||||
];
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
let matches = App::new("Conduit Sled to Sqlite Migrator")
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue