summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/backup_run.rs2
-rw-r--r--src/benchmark.rs6
-rw-r--r--src/bin/benchmark-index.rs7
-rw-r--r--src/bin/benchmark-indexedstore.rs8
-rw-r--r--src/bin/benchmark-null.rs2
-rw-r--r--src/bin/benchmark-store.rs4
-rw-r--r--src/bin/obnam-server.rs8
-rw-r--r--src/bin/obnam.rs39
-rw-r--r--src/chunk.rs24
-rw-r--r--src/chunker.rs12
-rw-r--r--src/chunkmeta.rs21
-rw-r--r--src/cipher.rs200
-rw-r--r--src/client.rs291
-rw-r--r--src/cmd/backup.rs2
-rw-r--r--src/cmd/chunk.rs64
-rw-r--r--src/cmd/init.rs8
-rw-r--r--src/cmd/mod.rs1
-rw-r--r--src/cmd/show_config.rs2
-rw-r--r--src/config.rs92
-rw-r--r--src/error.rs4
-rw-r--r--src/indexedstore.rs31
-rw-r--r--src/lib.rs1
-rw-r--r--src/passwords.rs15
-rw-r--r--src/store.rs12
24 files changed, 550 insertions, 306 deletions
diff --git a/src/backup_run.rs b/src/backup_run.rs
index 23c97f6..16d6700 100644
--- a/src/backup_run.rs
+++ b/src/backup_run.rs
@@ -41,7 +41,6 @@ pub type BackupResult<T> = Result<T, BackupError>;
impl<'a> InitialBackup<'a> {
pub fn new(config: &ClientConfig, client: &'a BackupClient) -> BackupResult<Self> {
let progress = BackupProgress::initial();
- let config = config.config();
Ok(Self {
client,
buffer_size: config.chunk_size,
@@ -81,7 +80,6 @@ impl<'a> InitialBackup<'a> {
impl<'a> IncrementalBackup<'a> {
pub fn new(config: &ClientConfig, client: &'a BackupClient) -> BackupResult<Self> {
- let config = config.config();
let policy = BackupPolicy::default();
Ok(Self {
client,
diff --git a/src/benchmark.rs b/src/benchmark.rs
index d214939..3c94f92 100644
--- a/src/benchmark.rs
+++ b/src/benchmark.rs
@@ -15,7 +15,7 @@ impl ChunkGenerator {
}
impl Iterator for ChunkGenerator {
- type Item = (ChunkId, String, ChunkMeta, DataChunk);
+ type Item = (ChunkId, String, DataChunk);
fn next(&mut self) -> Option<Self::Item> {
if self.next >= self.goal {
@@ -24,9 +24,9 @@ impl Iterator for ChunkGenerator {
let id = ChunkId::recreate(&format!("{}", self.next));
let checksum = id.sha256();
let meta = ChunkMeta::new(&checksum);
- let chunk = DataChunk::new(vec![]);
+ let chunk = DataChunk::new(vec![], meta);
self.next += 1;
- Some((id, checksum, meta, chunk))
+ Some((id, checksum, chunk))
}
}
}
diff --git a/src/bin/benchmark-index.rs b/src/bin/benchmark-index.rs
index 9baa327..b5a059c 100644
--- a/src/bin/benchmark-index.rs
+++ b/src/bin/benchmark-index.rs
@@ -60,7 +60,8 @@ fn create(chunks: &Path, num: u32) -> anyhow::Result<()> {
let mut index = Index::new(chunks)?;
let gen = ChunkGenerator::new(num);
- for (id, _, meta, _) in gen {
+ for (id, _, chunk) in gen {
+ let meta = (*chunk.meta()).clone();
index.insert_meta(id, meta)?;
}
@@ -82,8 +83,8 @@ fn lookup(index: &mut Index, num: u32) -> anyhow::Result<()> {
loop {
let gen = ChunkGenerator::new(num);
- for (_, _, meta, _) in gen {
- index.find_by_sha256(&meta.sha256())?;
+ for (_, _, chunk) in gen {
+ index.find_by_sha256(&chunk.meta().sha256())?;
done += 1;
if done >= num {
return Ok(());
diff --git a/src/bin/benchmark-indexedstore.rs b/src/bin/benchmark-indexedstore.rs
index acc3bd3..5cd3ff1 100644
--- a/src/bin/benchmark-indexedstore.rs
+++ b/src/bin/benchmark-indexedstore.rs
@@ -60,8 +60,8 @@ fn create(chunks: &Path, num: u32) -> anyhow::Result<()> {
let mut store = IndexedStore::new(chunks)?;
let gen = ChunkGenerator::new(num);
- for (_, _, meta, chunk) in gen {
- store.save(&meta, &chunk)?;
+ for (_, _, chunk) in gen {
+ store.save(&chunk)?;
}
Ok(())
@@ -82,8 +82,8 @@ fn lookup(index: &mut IndexedStore, num: u32) -> anyhow::Result<()> {
loop {
let gen = ChunkGenerator::new(num);
- for (_, _, meta, _) in gen {
- index.find_by_sha256(&meta.sha256())?;
+ for (_, _, chunk) in gen {
+ index.find_by_sha256(&chunk.meta().sha256())?;
done += 1;
if done >= num {
return Ok(());
diff --git a/src/bin/benchmark-null.rs b/src/bin/benchmark-null.rs
index 259a837..fc60a77 100644
--- a/src/bin/benchmark-null.rs
+++ b/src/bin/benchmark-null.rs
@@ -23,5 +23,5 @@ fn main() {
let opt = Opt::from_args();
let gen = ChunkGenerator::new(opt.num);
- for (_, _, _, _) in gen {}
+ for (_, _, _) in gen {}
}
diff --git a/src/bin/benchmark-store.rs b/src/bin/benchmark-store.rs
index f7c82b1..7896f9d 100644
--- a/src/bin/benchmark-store.rs
+++ b/src/bin/benchmark-store.rs
@@ -20,8 +20,8 @@ fn main() -> anyhow::Result<()> {
let gen = ChunkGenerator::new(opt.num);
let store = Store::new(&opt.chunks);
- for (id, _, meta, chunk) in gen {
- store.save(&id, &meta, &chunk)?;
+ for (id, _, chunk) in gen {
+ store.save(&id, &&chunk)?;
}
Ok(())
diff --git a/src/bin/obnam-server.rs b/src/bin/obnam-server.rs
index 9a6540f..29ea9ff 100644
--- a/src/bin/obnam-server.rs
+++ b/src/bin/obnam-server.rs
@@ -109,9 +109,9 @@ pub async fn create_chunk(
}
};
- let chunk = DataChunk::new(data.to_vec());
+ let chunk = DataChunk::new(data.to_vec(), meta);
- let id = match store.save(&meta, &chunk) {
+ let id = match store.save(&chunk) {
Ok(id) => id,
Err(e) => {
error!("couldn't save: {}", e);
@@ -119,7 +119,7 @@ pub async fn create_chunk(
}
};
- info!("created chunk {}: {:?}", id, meta);
+ info!("created chunk {}", id);
Ok(ChunkResult::Created(id))
}
@@ -155,8 +155,6 @@ pub async fn search_chunks(
}
if key == "generation" && value == "true" {
store.find_generations().expect("SQL lookup failed")
- } else if key == "data" && value == "true" {
- store.find_file_chunks().expect("SQL lookup failed")
} else if key == "sha256" {
store.find_by_sha256(value).expect("SQL lookup failed")
} else {
diff --git a/src/bin/obnam.rs b/src/bin/obnam.rs
index cdb5179..c8da6c2 100644
--- a/src/bin/obnam.rs
+++ b/src/bin/obnam.rs
@@ -3,6 +3,7 @@ use log::{debug, error, info, LevelFilter};
use log4rs::append::file::FileAppender;
use log4rs::config::{Appender, Logger, Root};
use obnam::cmd::backup::Backup;
+use obnam::cmd::chunk::{DecryptChunk, EncryptChunk};
use obnam::cmd::get_chunk::GetChunk;
use obnam::cmd::init::Init;
use obnam::cmd::list::List;
@@ -20,28 +21,24 @@ const APPLICATION: &str = "obnam";
fn main() -> anyhow::Result<()> {
let opt = Opt::from_args();
- let config = load_config_without_passwords(&opt)?;
- setup_logging(&config.config().log)?;
+ let config = ClientConfig::read(&config_filename(&opt))?;
+ setup_logging(&config.log)?;
info!("client starts");
debug!("{:?}", opt);
debug!("configuration: {:#?}", config);
let result = match opt.cmd {
- Command::Init(x) => x.run(config.config()),
- _ => {
- let config = load_config_with_passwords(&opt)?;
- match opt.cmd {
- Command::Init(_) => panic!("this can't happen"),
- Command::Backup(x) => x.run(&config),
- Command::List(x) => x.run(&config),
- Command::ShowGeneration(x) => x.run(&config),
- Command::ListFiles(x) => x.run(&config),
- Command::Restore(x) => x.run(&config),
- Command::GetChunk(x) => x.run(&config),
- Command::Config(x) => x.run(&config),
- }
- }
+ Command::Init(x) => x.run(&config),
+ Command::Backup(x) => x.run(&config),
+ Command::List(x) => x.run(&config),
+ Command::ShowGeneration(x) => x.run(&config),
+ Command::ListFiles(x) => x.run(&config),
+ Command::Restore(x) => x.run(&config),
+ Command::GetChunk(x) => x.run(&config),
+ Command::Config(x) => x.run(&config),
+ Command::EncryptChunk(x) => x.run(&config),
+ Command::DecryptChunk(x) => x.run(&config),
};
if let Err(ref e) = result {
@@ -66,14 +63,6 @@ fn setup_logging(filename: &Path) -> anyhow::Result<()> {
Ok(())
}
-fn load_config_with_passwords(opt: &Opt) -> Result<ClientConfig, anyhow::Error> {
- Ok(ClientConfig::read_with_passwords(&config_filename(opt))?)
-}
-
-fn load_config_without_passwords(opt: &Opt) -> Result<ClientConfig, anyhow::Error> {
- Ok(ClientConfig::read_without_passwords(&config_filename(opt))?)
-}
-
fn config_filename(opt: &Opt) -> PathBuf {
match opt.config {
None => default_config(),
@@ -109,4 +98,6 @@ enum Command {
ShowGeneration(ShowGeneration),
GetChunk(GetChunk),
Config(ShowConfig),
+ EncryptChunk(EncryptChunk),
+ DecryptChunk(DecryptChunk),
}
diff --git a/src/chunk.rs b/src/chunk.rs
index 0eed38a..8631fd9 100644
--- a/src/chunk.rs
+++ b/src/chunk.rs
@@ -1,4 +1,6 @@
+use crate::checksummer::sha256;
use crate::chunkid::ChunkId;
+use crate::chunkmeta::ChunkMeta;
use serde::{Deserialize, Serialize};
use std::default::Default;
@@ -8,21 +10,27 @@ use std::default::Default;
///
/// A chunk also contains its associated metadata, except its
/// identifier.
-#[derive(Debug, Clone, Serialize, Deserialize)]
+#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct DataChunk {
data: Vec<u8>,
+ meta: ChunkMeta,
}
impl DataChunk {
/// Construct a new chunk.
- pub fn new(data: Vec<u8>) -> Self {
- Self { data }
+ pub fn new(data: Vec<u8>, meta: ChunkMeta) -> Self {
+ Self { data, meta }
}
/// Return a chunk's data.
pub fn data(&self) -> &[u8] {
&self.data
}
+
+ /// Return a chunk's metadata.
+ pub fn meta(&self) -> &ChunkMeta {
+ &self.meta
+ }
}
#[derive(Default, Debug, Serialize, Deserialize)]
@@ -69,8 +77,12 @@ impl GenerationChunk {
self.chunk_ids.iter()
}
- pub fn to_data_chunk(&self) -> GenerationChunkResult<DataChunk> {
- let json = serde_json::to_string(self).map_err(GenerationChunkError::JsonGenerate)?;
- Ok(DataChunk::new(json.as_bytes().to_vec()))
+ pub fn to_data_chunk(&self, ended: &str) -> GenerationChunkResult<DataChunk> {
+ let json: String =
+ serde_json::to_string(self).map_err(GenerationChunkError::JsonGenerate)?;
+ let bytes = json.as_bytes().to_vec();
+ let sha = sha256(&bytes);
+ let meta = ChunkMeta::new_generation(&sha, ended);
+ Ok(DataChunk::new(bytes, meta))
}
}
diff --git a/src/chunker.rs b/src/chunker.rs
index eeeed8d..a7a39f1 100644
--- a/src/chunker.rs
+++ b/src/chunker.rs
@@ -31,7 +31,7 @@ impl Chunker {
}
}
- pub fn read_chunk(&mut self) -> ChunkerResult<Option<(ChunkMeta, DataChunk)>> {
+ pub fn read_chunk(&mut self) -> ChunkerResult<Option<DataChunk>> {
let mut used = 0;
loop {
@@ -52,18 +52,18 @@ impl Chunker {
let buffer = &self.buf.as_slice()[..used];
let hash = sha256(buffer);
let meta = ChunkMeta::new(&hash);
- let chunk = DataChunk::new(buffer.to_vec());
- Ok(Some((meta, chunk)))
+ let chunk = DataChunk::new(buffer.to_vec(), meta);
+ Ok(Some(chunk))
}
}
impl Iterator for Chunker {
- type Item = ChunkerResult<(ChunkMeta, DataChunk)>;
+ type Item = ChunkerResult<DataChunk>;
- fn next(&mut self) -> Option<ChunkerResult<(ChunkMeta, DataChunk)>> {
+ fn next(&mut self) -> Option<ChunkerResult<DataChunk>> {
match self.read_chunk() {
Ok(None) => None,
- Ok(Some((meta, chunk))) => Some(Ok((meta, chunk))),
+ Ok(Some(chunk)) => Some(Ok(chunk)),
Err(e) => Some(Err(e)),
}
}
diff --git a/src/chunkmeta.rs b/src/chunkmeta.rs
index 37e2ed5..73d9007 100644
--- a/src/chunkmeta.rs
+++ b/src/chunkmeta.rs
@@ -80,10 +80,20 @@ impl ChunkMeta {
&self.sha256
}
+ /// Serialize from a textual JSON representation.
+ pub fn from_json(json: &str) -> Result<Self, serde_json::Error> {
+ serde_json::from_str(json)
+ }
+
/// Serialize as JSON.
pub fn to_json(&self) -> String {
serde_json::to_string(self).unwrap()
}
+
+ /// Serialize as JSON, as a byte vector.
+ pub fn to_json_vec(&self) -> Vec<u8> {
+ self.to_json().as_bytes().to_vec()
+ }
}
impl FromStr for ChunkMeta {
@@ -135,10 +145,19 @@ mod test {
}
#[test]
- fn json_roundtrip() {
+ fn generation_json_roundtrip() {
let meta = ChunkMeta::new_generation("abcdef", "2020-09-17T08:17:13+03:00");
let json = serde_json::to_string(&meta).unwrap();
let meta2 = serde_json::from_str(&json).unwrap();
assert_eq!(meta, meta2);
}
+
+ #[test]
+ fn data_json_roundtrip() {
+ let meta = ChunkMeta::new("abcdef");
+ let json = meta.to_json_vec();
+ let meta2 = serde_json::from_slice(&json).unwrap();
+ assert_eq!(meta, meta2);
+ assert_eq!(meta.to_json_vec(), meta2.to_json_vec());
+ }
}
diff --git a/src/cipher.rs b/src/cipher.rs
new file mode 100644
index 0000000..550fafd
--- /dev/null
+++ b/src/cipher.rs
@@ -0,0 +1,200 @@
+use crate::chunk::DataChunk;
+use crate::chunkmeta::ChunkMeta;
+use crate::passwords::Passwords;
+
+use aes_gcm::aead::{generic_array::GenericArray, Aead, NewAead, Payload};
+use aes_gcm::Aes256Gcm; // Or `Aes128Gcm`
+use rand::Rng;
+
+use std::str::FromStr;
+
+const CHUNK_V1: &[u8] = b"0001";
+
+pub struct EncryptedChunk {
+ ciphertext: Vec<u8>,
+ aad: Vec<u8>,
+}
+
+impl EncryptedChunk {
+ fn new(ciphertext: Vec<u8>, aad: Vec<u8>) -> Self {
+ Self { ciphertext, aad }
+ }
+
+ pub fn ciphertext(&self) -> &[u8] {
+ &self.ciphertext
+ }
+
+ pub fn aad(&self) -> &[u8] {
+ &self.aad
+ }
+}
+
+pub struct CipherEngine {
+ cipher: Aes256Gcm,
+}
+
+impl CipherEngine {
+ pub fn new(pass: &Passwords) -> Self {
+ let key = GenericArray::from_slice(pass.encryption_key());
+ Self {
+ cipher: Aes256Gcm::new(key),
+ }
+ }
+
+ pub fn encrypt_chunk(&self, chunk: &DataChunk) -> Result<EncryptedChunk, CipherError> {
+ // Payload with metadata as associated data, to be encrypted.
+ //
+ // The metadata will be stored in cleartext after encryption.
+ let aad = chunk.meta().to_json_vec();
+ let payload = Payload {
+ msg: chunk.data(),
+ aad: &aad,
+ };
+
+ // Unique random key for each encryption.
+ let nonce = Nonce::new();
+ let nonce_arr = GenericArray::from_slice(nonce.as_bytes());
+
+ // Encrypt the sensitive part.
+ let ciphertext = self
+ .cipher
+ .encrypt(nonce_arr, payload)
+ .map_err(CipherError::EncryptError)?;
+
+ // Construct the blob to be stored on the server.
+ let mut vec: Vec<u8> = vec![];
+ push_bytes(&mut vec, CHUNK_V1);
+ push_bytes(&mut vec, nonce.as_bytes());
+ push_bytes(&mut vec, &ciphertext);
+
+ Ok(EncryptedChunk::new(vec, aad))
+ }
+
+ pub fn decrypt_chunk(&self, bytes: &[u8], meta: &[u8]) -> Result<DataChunk, CipherError> {
+ // Does encrypted chunk start with the right version?
+ if !bytes.starts_with(CHUNK_V1) {
+ return Err(CipherError::UnknownChunkVersion);
+ }
+ let version_len = CHUNK_V1.len();
+ let bytes = &bytes[version_len..];
+
+ // Get nonce.
+ let nonce = &bytes[..NONCE_SIZE];
+ if nonce.len() != NONCE_SIZE {
+ return Err(CipherError::NoNonce);
+ }
+ let nonce = GenericArray::from_slice(nonce);
+ let ciphertext = &bytes[NONCE_SIZE..];
+
+ let payload = Payload {
+ msg: ciphertext,
+ aad: meta,
+ };
+
+ let payload = self
+ .cipher
+ .decrypt(nonce, payload)
+ .map_err(CipherError::DecryptError)?;
+ let payload = Payload::from(payload.as_slice());
+
+ let meta = std::str::from_utf8(meta)?;
+ let meta = ChunkMeta::from_str(&meta)?;
+
+ let chunk = DataChunk::new(payload.msg.to_vec(), meta);
+
+ Ok(chunk)
+ }
+}
+
+fn push_bytes(vec: &mut Vec<u8>, bytes: &[u8]) {
+ for byte in bytes.iter() {
+ vec.push(*byte);
+ }
+}
+
+#[derive(Debug, thiserror::Error)]
+pub enum CipherError {
+ #[error("failed to encrypt with AES-GEM: {0}")]
+ EncryptError(aes_gcm::Error),
+
+ #[error("encrypted chunk does not start with correct version")]
+ UnknownChunkVersion,
+
+ #[error("encrypted chunk does not have a complete nonce")]
+ NoNonce,
+
+ #[error("failed to decrypt with AES-GEM: {0}")]
+ DecryptError(aes_gcm::Error),
+
+ #[error("failed to parse decrypted data as a DataChunk: {0}")]
+ Parse(serde_yaml::Error),
+
+ #[error(transparent)]
+ Utf8Error(#[from] std::str::Utf8Error),
+
+ #[error("failed to parse JSON: {0}")]
+ JsonParse(#[from] serde_json::Error),
+}
+
+const NONCE_SIZE: usize = 12;
+
+#[derive(Debug)]
+struct Nonce {
+ nonce: Vec<u8>,
+}
+
+impl Nonce {
+ fn from_bytes(bytes: &[u8]) -> Self {
+ assert_eq!(bytes.len(), NONCE_SIZE);
+ Self {
+ nonce: bytes.to_vec(),
+ }
+ }
+
+ fn new() -> Self {
+ let mut bytes: Vec<u8> = vec![0; NONCE_SIZE];
+ let mut rng = rand::thread_rng();
+ for x in bytes.iter_mut() {
+ *x = rng.gen();
+ }
+ Self::from_bytes(&bytes)
+ }
+
+ fn as_bytes(&self) -> &[u8] {
+ &self.nonce
+ }
+}
+
+#[cfg(test)]
+mod test {
+ use crate::chunk::DataChunk;
+ use crate::chunkmeta::ChunkMeta;
+ use crate::cipher::CipherEngine;
+ use crate::passwords::Passwords;
+
+ #[test]
+ fn metadata_as_aad() {
+ let meta = ChunkMeta::new("dummy-checksum");
+ let meta_as_aad = meta.to_json_vec();
+ let chunk = DataChunk::new("hello".as_bytes().to_vec(), meta);
+ let pass = Passwords::new("secret");
+ let cipher = CipherEngine::new(&pass);
+ let enc = cipher.encrypt_chunk(&chunk).unwrap();
+
+ assert_eq!(meta_as_aad, enc.aad());
+ }
+
+ #[test]
+ fn round_trip() {
+ let meta = ChunkMeta::new("dummy-checksum");
+ let chunk = DataChunk::new("hello".as_bytes().to_vec(), meta);
+ let pass = Passwords::new("secret");
+
+ let cipher = CipherEngine::new(&pass);
+ let enc = cipher.encrypt_chunk(&chunk).unwrap();
+
+ let bytes: Vec<u8> = enc.ciphertext().to_vec();
+ let dec = cipher.decrypt_chunk(&bytes, enc.aad()).unwrap();
+ assert_eq!(chunk, dec);
+ }
+}
diff --git a/src/client.rs b/src/client.rs
index 0f8a72f..b1f9976 100644
--- a/src/client.rs
+++ b/src/client.rs
@@ -1,17 +1,18 @@
-use crate::checksummer::sha256;
use crate::chunk::DataChunk;
use crate::chunk::{GenerationChunk, GenerationChunkError};
use crate::chunker::{Chunker, ChunkerError};
use crate::chunkid::ChunkId;
use crate::chunkmeta::ChunkMeta;
-use crate::config::ClientConfig;
+use crate::cipher::{CipherEngine, CipherError};
+use crate::config::{ClientConfig, ClientConfigError};
use crate::fsentry::{FilesystemEntry, FilesystemKind};
use crate::generation::{FinishedGeneration, LocalGeneration, LocalGenerationError};
use crate::genlist::GenerationList;
use chrono::{DateTime, Local};
-use log::{debug, error, info, trace};
+use log::{debug, error, info};
use reqwest::blocking::Client;
+use reqwest::header::HeaderMap;
use std::collections::HashMap;
use std::fs::File;
use std::io::prelude::*;
@@ -22,6 +23,9 @@ pub enum ClientError {
#[error("Server response claimed it had created a chunk, but lacked chunk id")]
NoCreatedChunkId,
+ #[error("Server does not have {0}")]
+ NotFound(String),
+
#[error("Server does not have chunk {0}")]
ChunkNotFound(String),
@@ -35,6 +39,12 @@ pub enum ClientError {
WrongChecksum(ChunkId, String, String),
#[error(transparent)]
+ ClientConfigError(#[from] ClientConfigError),
+
+ #[error(transparent)]
+ CipherError(#[from] CipherError),
+
+ #[error(transparent)]
GenerationChunkError(#[from] GenerationChunkError),
#[error(transparent)]
@@ -74,21 +84,14 @@ pub enum ClientError {
pub type ClientResult<T> = Result<T, ClientError>;
pub struct BackupClient {
- client: Client,
- base_url: String,
+ chunk_client: ChunkClient,
}
impl BackupClient {
pub fn new(config: &ClientConfig) -> ClientResult<Self> {
info!("creating backup client with config: {:#?}", config);
- let config = config.config();
- let client = Client::builder()
- .danger_accept_invalid_certs(!config.verify_tls_cert)
- .build()
- .map_err(ClientError::ReqwestError)?;
Ok(Self {
- client,
- base_url: config.server_url.to_string(),
+ chunk_client: ChunkClient::new(config)?,
})
}
@@ -114,10 +117,9 @@ impl BackupClient {
info!("upload SQLite {}", filename.display());
let ids = self.read_file(filename, size)?;
let gen = GenerationChunk::new(ids);
- let data = gen.to_data_chunk()?;
- let meta = ChunkMeta::new_generation(&sha256(data.data()), &current_timestamp());
- let gen_id = self.upload_gen_chunk(meta.clone(), gen)?;
- info!("uploaded generation {}, meta {:?}", gen_id, meta);
+ let data = gen.to_data_chunk(&current_timestamp())?;
+ let gen_id = self.upload_chunk(data)?;
+ info!("uploaded generation {}", gen_id);
Ok(gen_id)
}
@@ -130,6 +132,86 @@ impl BackupClient {
Ok(chunk_ids)
}
+ pub fn has_chunk(&self, meta: &ChunkMeta) -> ClientResult<Option<ChunkId>> {
+ self.chunk_client.has_chunk(meta)
+ }
+
+ pub fn upload_chunk(&self, chunk: DataChunk) -> ClientResult<ChunkId> {
+ self.chunk_client.upload_chunk(chunk)
+ }
+
+ pub fn upload_new_file_chunks(&self, chunker: Chunker) -> ClientResult<Vec<ChunkId>> {
+ let mut chunk_ids = vec![];
+ for item in chunker {
+ let chunk = item?;
+ if let Some(chunk_id) = self.has_chunk(chunk.meta())? {
+ chunk_ids.push(chunk_id.clone());
+ info!("reusing existing chunk {}", chunk_id);
+ } else {
+ let chunk_id = self.upload_chunk(chunk)?;
+ chunk_ids.push(chunk_id.clone());
+ info!("created new chunk {}", chunk_id);
+ }
+ }
+
+ Ok(chunk_ids)
+ }
+
+ pub fn list_generations(&self) -> ClientResult<GenerationList> {
+ self.chunk_client.list_generations()
+ }
+
+ pub fn fetch_chunk(&self, chunk_id: &ChunkId) -> ClientResult<DataChunk> {
+ self.chunk_client.fetch_chunk(chunk_id)
+ }
+
+ fn fetch_generation_chunk(&self, gen_id: &str) -> ClientResult<GenerationChunk> {
+ let chunk_id = ChunkId::recreate(gen_id);
+ let chunk = self.fetch_chunk(&chunk_id)?;
+ let gen = GenerationChunk::from_data_chunk(&chunk)?;
+ Ok(gen)
+ }
+
+ pub fn fetch_generation(&self, gen_id: &str, dbname: &Path) -> ClientResult<LocalGeneration> {
+ let gen = self.fetch_generation_chunk(gen_id)?;
+
+ // Fetch the SQLite file, storing it in the named file.
+ let mut dbfile = File::create(&dbname)
+ .map_err(|err| ClientError::FileCreate(dbname.to_path_buf(), err))?;
+ for id in gen.chunk_ids() {
+ let chunk = self.fetch_chunk(id)?;
+ dbfile
+ .write_all(chunk.data())
+ .map_err(|err| ClientError::FileWrite(dbname.to_path_buf(), err))?;
+ }
+ info!("downloaded generation to {}", dbname.display());
+
+ let gen = LocalGeneration::open(dbname)?;
+ Ok(gen)
+ }
+}
+
+pub struct ChunkClient {
+ client: Client,
+ base_url: String,
+ cipher: CipherEngine,
+}
+
+impl ChunkClient {
+ pub fn new(config: &ClientConfig) -> ClientResult<Self> {
+ let pass = config.passwords()?;
+
+ let client = Client::builder()
+ .danger_accept_invalid_certs(!config.verify_tls_cert)
+ .build()
+ .map_err(ClientError::ReqwestError)?;
+ Ok(Self {
+ client,
+ base_url: config.server_url.to_string(),
+ cipher: CipherEngine::new(&pass),
+ })
+ }
+
fn base_url(&self) -> &str {
&self.base_url
}
@@ -139,44 +221,30 @@ impl BackupClient {
}
pub fn has_chunk(&self, meta: &ChunkMeta) -> ClientResult<Option<ChunkId>> {
- trace!("has_chunk: url={:?}", self.base_url());
- let req = self
- .client
- .get(&self.chunks_url())
- .query(&[("sha256", meta.sha256())])
- .build()
- .map_err(ClientError::ReqwestError)?;
+ let body = match self.get("", &[("sha256", meta.sha256())]) {
+ Ok((_, body)) => body,
+ Err(err) => return Err(err),
+ };
- let res = self.client.execute(req).map_err(ClientError::ChunkExists)?;
- debug!("has_chunk: status={}", res.status());
- let has = if res.status() != 200 {
- debug!("has_chunk: error from server");
- None
+ let hits: HashMap<String, ChunkMeta> =
+ serde_json::from_slice(&body).map_err(ClientError::JsonParse)?;
+ let mut iter = hits.iter();
+ let has = if let Some((chunk_id, _)) = iter.next() {
+ Some(chunk_id.into())
} else {
- let text = res.text().map_err(ClientError::ReqwestError)?;
- debug!("has_chunk: text={:?}", text);
- let hits: HashMap<String, ChunkMeta> =
- serde_json::from_str(&text).map_err(ClientError::JsonParse)?;
- debug!("has_chunk: hits={:?}", hits);
- let mut iter = hits.iter();
- if let Some((chunk_id, _)) = iter.next() {
- debug!("has_chunk: chunk_id={:?}", chunk_id);
- Some(chunk_id.into())
- } else {
- None
- }
+ None
};
- info!("has_chunk result: {:?}", has);
Ok(has)
}
- pub fn upload_chunk(&self, meta: ChunkMeta, chunk: DataChunk) -> ClientResult<ChunkId> {
+ pub fn upload_chunk(&self, chunk: DataChunk) -> ClientResult<ChunkId> {
+ let enc = self.cipher.encrypt_chunk(&chunk)?;
let res = self
.client
.post(&self.chunks_url())
- .header("chunk-meta", meta.to_json())
- .body(chunk.data().to_vec())
+ .header("chunk-meta", chunk.meta().to_json())
+ .body(enc.ciphertext().to_vec())
.send()
.map_err(ClientError::ReqwestError)?;
debug!("upload_chunk: res={:?}", res);
@@ -187,62 +255,13 @@ impl BackupClient {
} else {
return Err(ClientError::NoCreatedChunkId);
};
- info!("uploaded_chunk {} meta {:?}", chunk_id, meta);
+ info!("uploaded_chunk {}", chunk_id);
Ok(chunk_id)
}
- pub fn upload_gen_chunk(&self, meta: ChunkMeta, gen: GenerationChunk) -> ClientResult<ChunkId> {
- let res = self
- .client
- .post(&self.chunks_url())
- .header("chunk-meta", meta.to_json())
- .body(serde_json::to_string(&gen).map_err(ClientError::JsonGenerate)?)
- .send()
- .map_err(ClientError::ReqwestError)?;
- debug!("upload_chunk: res={:?}", res);
- let res: HashMap<String, String> = res.json().map_err(ClientError::ReqwestError)?;
- let chunk_id = if let Some(chunk_id) = res.get("chunk_id") {
- debug!("upload_chunk: id={}", chunk_id);
- chunk_id.parse().unwrap()
- } else {
- return Err(ClientError::NoCreatedChunkId);
- };
- info!("uploaded_generation chunk {}", chunk_id);
- Ok(chunk_id)
- }
-
- pub fn upload_new_file_chunks(&self, chunker: Chunker) -> ClientResult<Vec<ChunkId>> {
- let mut chunk_ids = vec![];
- for item in chunker {
- let (meta, chunk) = item?;
- if let Some(chunk_id) = self.has_chunk(&meta)? {
- chunk_ids.push(chunk_id.clone());
- info!("reusing existing chunk {}", chunk_id);
- } else {
- let chunk_id = self.upload_chunk(meta, chunk)?;
- chunk_ids.push(chunk_id.clone());
- info!("created new chunk {}", chunk_id);
- }
- }
-
- Ok(chunk_ids)
- }
-
pub fn list_generations(&self) -> ClientResult<GenerationList> {
- let url = format!("{}?generation=true", &self.chunks_url());
- trace!("list_generations: url={:?}", url);
- let req = self
- .client
- .get(&url)
- .build()
- .map_err(ClientError::ReqwestError)?;
- let res = self
- .client
- .execute(req)
- .map_err(ClientError::ReqwestError)?;
- debug!("list_generations: status={}", res.status());
- let body = res.bytes().map_err(ClientError::ReqwestError)?;
- debug!("list_generations: body={:?}", body);
+ let (_, body) = self.get("", &[("generation", "true")])?;
+
let map: HashMap<String, ChunkMeta> =
serde_yaml::from_slice(&body).map_err(ClientError::YamlParse)?;
debug!("list_generations: map={:?}", map);
@@ -254,77 +273,65 @@ impl BackupClient {
}
pub fn fetch_chunk(&self, chunk_id: &ChunkId) -> ClientResult<DataChunk> {
- info!("fetch chunk {}", chunk_id);
+ let (headers, body) = self.get(&format!("/{}", chunk_id), &[])?;
+ let meta = self.get_chunk_meta_header(chunk_id, &headers)?;
+
+ let meta_bytes = meta.to_json_vec();
+ let chunk = self.cipher.decrypt_chunk(&body, &meta_bytes)?;
+
+ Ok(chunk)
+ }
+
+ fn get(&self, path: &str, query: &[(&str, &str)]) -> ClientResult<(HeaderMap, Vec<u8>)> {
+ let url = format!("{}{}", &self.chunks_url(), path);
+ info!("GET {}", url);
- let url = format!("{}/{}", &self.chunks_url(), chunk_id);
+ // Build HTTP request structure.
let req = self
.client
.get(&url)
+ .query(query)
.build()
.map_err(ClientError::ReqwestError)?;
+
+ // Make HTTP request.
let res = self
.client
.execute(req)
.map_err(ClientError::ReqwestError)?;
+
+ // Did it work?
if res.status() != 200 {
- let err = ClientError::ChunkNotFound(chunk_id.to_string());
- error!("fetching chunk {} failed: {}", chunk_id, err);
- return Err(err);
+ return Err(ClientError::NotFound(path.to_string()));
}
- let headers = res.headers();
+ // Return headers and body.
+ let headers = res.headers().clone();
+ let body = res.bytes().map_err(ClientError::ReqwestError)?;
+ let body = body.to_vec();
+ Ok((headers, body))
+ }
+
+ fn get_chunk_meta_header(
+ &self,
+ chunk_id: &ChunkId,
+ headers: &HeaderMap,
+ ) -> ClientResult<ChunkMeta> {
let meta = headers.get("chunk-meta");
+
if meta.is_none() {
let err = ClientError::NoChunkMeta(chunk_id.clone());
error!("fetching chunk {} failed: {}", chunk_id, err);
return Err(err);
}
+
let meta = meta
.unwrap()
.to_str()
.map_err(ClientError::MetaHeaderToString)?;
- debug!("fetching chunk {}: meta={:?}", chunk_id, meta);
let meta: ChunkMeta = serde_json::from_str(meta).map_err(ClientError::JsonParse)?;
- debug!("fetching chunk {}: meta={:?}", chunk_id, meta);
-
- let body = res.bytes().map_err(ClientError::ReqwestError)?;
- let body = body.to_vec();
- let actual = sha256(&body);
- if actual != meta.sha256() {
- let err =
- ClientError::WrongChecksum(chunk_id.clone(), actual, meta.sha256().to_string());
- error!("fetching chunk {} failed: {}", chunk_id, err);
- return Err(err);
- }
-
- let chunk: DataChunk = DataChunk::new(body);
-
- Ok(chunk)
- }
-
- fn fetch_generation_chunk(&self, gen_id: &str) -> ClientResult<GenerationChunk> {
- let chunk_id = ChunkId::recreate(gen_id);
- let chunk = self.fetch_chunk(&chunk_id)?;
- let gen = GenerationChunk::from_data_chunk(&chunk)?;
- Ok(gen)
- }
-
- pub fn fetch_generation(&self, gen_id: &str, dbname: &Path) -> ClientResult<LocalGeneration> {
- let gen = self.fetch_generation_chunk(gen_id)?;
-
- // Fetch the SQLite file, storing it in the named file.
- let mut dbfile = File::create(&dbname)
- .map_err(|err| ClientError::FileCreate(dbname.to_path_buf(), err))?;
- for id in gen.chunk_ids() {
- let chunk = self.fetch_chunk(id)?;
- dbfile
- .write_all(chunk.data())
- .map_err(|err| ClientError::FileWrite(dbname.to_path_buf(), err))?;
- }
- info!("downloaded generation to {}", dbname.display());
- let gen = LocalGeneration::open(dbname)?;
- Ok(gen)
+ Ok(meta)
}
}
diff --git a/src/cmd/backup.rs b/src/cmd/backup.rs
index 0479844..22afd6e 100644
--- a/src/cmd/backup.rs
+++ b/src/cmd/backup.rs
@@ -60,7 +60,6 @@ fn initial_backup(
info!("fresh backup without a previous generation");
let newtemp = NamedTempFile::new()?;
let run = InitialBackup::new(config, &client)?;
- let config = config.config();
let mut all_warnings = vec![];
let count = {
let mut new = NascentGeneration::create(newtemp.path())?;
@@ -87,7 +86,6 @@ fn incremental_backup(
info!("incremental backup based on {}", old_ref);
let newtemp = NamedTempFile::new()?;
let mut run = IncrementalBackup::new(config, &client)?;
- let config = config.config();
let mut all_warnings = vec![];
let count = {
let oldtemp = NamedTempFile::new()?;
diff --git a/src/cmd/chunk.rs b/src/cmd/chunk.rs
new file mode 100644
index 0000000..e0e91b1
--- /dev/null
+++ b/src/cmd/chunk.rs
@@ -0,0 +1,64 @@
+use crate::chunk::DataChunk;
+use crate::chunkmeta::ChunkMeta;
+use crate::cipher::CipherEngine;
+use crate::config::ClientConfig;
+use crate::error::ObnamError;
+use std::path::PathBuf;
+use structopt::StructOpt;
+
+#[derive(Debug, StructOpt)]
+pub struct EncryptChunk {
+ #[structopt(parse(from_os_str))]
+ filename: PathBuf,
+
+ #[structopt(parse(from_os_str))]
+ output: PathBuf,
+
+ #[structopt()]
+ json: String,
+}
+
+impl EncryptChunk {
+ pub fn run(&self, config: &ClientConfig) -> Result<(), ObnamError> {
+ let pass = config.passwords()?;
+ let cipher = CipherEngine::new(&pass);
+
+ let meta = ChunkMeta::from_json(&self.json)?;
+
+ let cleartext = std::fs::read(&self.filename)?;
+ let chunk = DataChunk::new(cleartext, meta);
+ let encrypted = cipher.encrypt_chunk(&chunk)?;
+
+ std::fs::write(&self.output, encrypted.ciphertext())?;
+
+ Ok(())
+ }
+}
+
+#[derive(Debug, StructOpt)]
+pub struct DecryptChunk {
+ #[structopt(parse(from_os_str))]
+ filename: PathBuf,
+
+ #[structopt(parse(from_os_str))]
+ output: PathBuf,
+
+ #[structopt()]
+ json: String,
+}
+
+impl DecryptChunk {
+ pub fn run(&self, config: &ClientConfig) -> Result<(), ObnamError> {
+ let pass = config.passwords()?;
+ let cipher = CipherEngine::new(&pass);
+
+ let meta = ChunkMeta::from_json(&self.json)?;
+
+ let encrypted = std::fs::read(&self.filename)?;
+ let chunk = cipher.decrypt_chunk(&encrypted, &meta.to_json_vec())?;
+
+ std::fs::write(&self.output, chunk.data())?;
+
+ Ok(())
+ }
+}
diff --git a/src/cmd/init.rs b/src/cmd/init.rs
index cb61fba..08060f7 100644
--- a/src/cmd/init.rs
+++ b/src/cmd/init.rs
@@ -1,4 +1,4 @@
-use crate::config::ClientConfigWithoutPasswords;
+use crate::config::ClientConfig;
use crate::error::ObnamError;
use crate::passwords::{passwords_filename, Passwords};
use structopt::StructOpt;
@@ -12,11 +12,7 @@ pub struct Init {
}
impl Init {
- pub fn run(&self, config: &ClientConfigWithoutPasswords) -> Result<(), ObnamError> {
- if !config.encrypt {
- panic!("no encryption specified");
- }
-
+ pub fn run(&self, config: &ClientConfig) -> Result<(), ObnamError> {
let passphrase = match &self.insecure_passphrase {
Some(x) => x.to_string(),
None => rpassword::read_password_from_tty(Some(PROMPT)).unwrap(),
diff --git a/src/cmd/mod.rs b/src/cmd/mod.rs
index 890e176..bd101da 100644
--- a/src/cmd/mod.rs
+++ b/src/cmd/mod.rs
@@ -1,4 +1,5 @@
pub mod backup;
+pub mod chunk;
pub mod get_chunk;
pub mod init;
pub mod list;
diff --git a/src/cmd/show_config.rs b/src/cmd/show_config.rs
index 424e2ed..05e83c1 100644
--- a/src/cmd/show_config.rs
+++ b/src/cmd/show_config.rs
@@ -7,7 +7,7 @@ pub struct ShowConfig {}
impl ShowConfig {
pub fn run(&self, config: &ClientConfig) -> Result<(), ObnamError> {
- println!("{}", serde_json::to_string_pretty(&config.config())?);
+ println!("{}", serde_json::to_string_pretty(config)?);
Ok(())
}
}
diff --git a/src/config.rs b/src/config.rs
index 33e08a2..0d4e9de 100644
--- a/src/config.rs
+++ b/src/config.rs
@@ -16,78 +16,22 @@ struct TentativeClientConfig {
chunk_size: Option<usize>,
roots: Vec<PathBuf>,
log: Option<PathBuf>,
- encrypt: Option<bool>,
exclude_cache_tag_directories: Option<bool>,
}
#[derive(Debug, Serialize, Clone)]
-pub enum ClientConfig {
- Plain(ClientConfigWithoutPasswords),
- WithPasswords(ClientConfigWithoutPasswords, Passwords),
-}
-
-impl ClientConfig {
- pub fn read_without_passwords(filename: &Path) -> Result<Self, ClientConfigError> {
- let config = ClientConfigWithoutPasswords::read_config(filename)?;
- Ok(ClientConfig::Plain(config))
- }
-
- pub fn read_with_passwords(filename: &Path) -> Result<Self, ClientConfigError> {
- let config = ClientConfigWithoutPasswords::read_config(filename)?;
- if config.encrypt {
- let passwords = Passwords::load(&passwords_filename(filename))
- .map_err(ClientConfigError::PasswordsMissing)?;
- Ok(ClientConfig::WithPasswords(config, passwords))
- } else {
- Ok(ClientConfig::Plain(config))
- }
- }
-
- pub fn config(&self) -> &ClientConfigWithoutPasswords {
- match self {
- Self::Plain(config) => &config,
- Self::WithPasswords(config, _) => &config,
- }
- }
-}
-
-#[derive(Debug, Serialize, Clone)]
-pub struct ClientConfigWithoutPasswords {
+pub struct ClientConfig {
pub filename: PathBuf,
pub server_url: String,
pub verify_tls_cert: bool,
pub chunk_size: usize,
pub roots: Vec<PathBuf>,
pub log: PathBuf,
- pub encrypt: bool,
pub exclude_cache_tag_directories: bool,
}
-#[derive(Debug, thiserror::Error)]
-pub enum ClientConfigError {
- #[error("server_url is empty")]
- ServerUrlIsEmpty,
-
- #[error("No backup roots in config; at least one is needed")]
- NoBackupRoot,
-
- #[error("server URL doesn't use https: {0}")]
- NotHttps(String),
-
- #[error("No passwords are set: you may need to run 'obnam init': {0}")]
- PasswordsMissing(PasswordError),
-
- #[error("failed to read configuration file {0}: {1}")]
- Read(PathBuf, std::io::Error),
-
- #[error("failed to parse configuration file {0} as YAML: {1}")]
- YamlParse(PathBuf, serde_yaml::Error),
-}
-
-pub type ClientConfigResult<T> = Result<T, ClientConfigError>;
-
-impl ClientConfigWithoutPasswords {
- pub fn read_config(filename: &Path) -> ClientConfigResult<Self> {
+impl ClientConfig {
+ pub fn read(filename: &Path) -> ClientConfigResult<Self> {
trace!("read_config: filename={:?}", filename);
let config = std::fs::read_to_string(filename)
.map_err(|err| ClientConfigError::Read(filename.to_path_buf(), err))?;
@@ -102,12 +46,10 @@ impl ClientConfigWithoutPasswords {
.log
.map(|path| expand_tilde(&path))
.unwrap_or_else(|| PathBuf::from(DEVNULL));
- let encrypt = tentative.encrypt.or(Some(false)).unwrap();
let exclude_cache_tag_directories = tentative.exclude_cache_tag_directories.unwrap_or(true);
let config = Self {
chunk_size: tentative.chunk_size.or(Some(DEFAULT_CHUNK_SIZE)).unwrap(),
- encrypt,
filename: filename.to_path_buf(),
roots,
server_url: tentative.server_url,
@@ -132,8 +74,36 @@ impl ClientConfigWithoutPasswords {
}
Ok(())
}
+
+ pub fn passwords(&self) -> Result<Passwords, ClientConfigError> {
+ Passwords::load(&passwords_filename(&self.filename))
+ .map_err(ClientConfigError::PasswordsMissing)
+ }
}
+#[derive(Debug, thiserror::Error)]
+pub enum ClientConfigError {
+ #[error("server_url is empty")]
+ ServerUrlIsEmpty,
+
+ #[error("No backup roots in config; at least one is needed")]
+ NoBackupRoot,
+
+ #[error("server URL doesn't use https: {0}")]
+ NotHttps(String),
+
+ #[error("No passwords are set: you may need to run 'obnam init': {0}")]
+ PasswordsMissing(PasswordError),
+
+ #[error("failed to read configuration file {0}: {1}")]
+ Read(PathBuf, std::io::Error),
+
+ #[error("failed to parse configuration file {0} as YAML: {1}")]
+ YamlParse(PathBuf, serde_yaml::Error),
+}
+
+pub type ClientConfigResult<T> = Result<T, ClientConfigError>;
+
fn expand_tilde(path: &Path) -> PathBuf {
if path.starts_with("~/") {
if let Some(home) = std::env::var_os("HOME") {
diff --git a/src/error.rs b/src/error.rs
index 8241d5d..e4d77d3 100644
--- a/src/error.rs
+++ b/src/error.rs
@@ -1,4 +1,5 @@
use crate::backup_run::BackupError;
+use crate::cipher::CipherError;
use crate::client::ClientError;
use crate::cmd::restore::RestoreError;
use crate::config::ClientConfigError;
@@ -32,6 +33,9 @@ pub enum ObnamError {
NascentError(#[from] NascentError),
#[error(transparent)]
+ CipherError(#[from] CipherError),
+
+ #[error(transparent)]
LocalGenerationError(#[from] LocalGenerationError),
#[error(transparent)]
diff --git a/src/indexedstore.rs b/src/indexedstore.rs
index 7f67a1f..b05cfba 100644
--- a/src/indexedstore.rs
+++ b/src/indexedstore.rs
@@ -1,9 +1,8 @@
-use crate::chunk::{DataChunk, GenerationChunk, GenerationChunkError};
+use crate::chunk::{DataChunk, GenerationChunkError};
use crate::chunkid::ChunkId;
use crate::chunkmeta::ChunkMeta;
use crate::index::{Index, IndexError};
use crate::store::{Store, StoreError};
-use std::collections::HashSet;
use std::path::Path;
/// A store for chunks and their metadata.
@@ -40,10 +39,10 @@ impl IndexedStore {
Ok(Self { store, index })
}
- pub fn save(&mut self, meta: &ChunkMeta, chunk: &DataChunk) -> IndexedResult<ChunkId> {
+ pub fn save(&mut self, chunk: &DataChunk) -> IndexedResult<ChunkId> {
let id = ChunkId::new();
- self.store.save(&id, meta, chunk)?;
- self.insert_meta(&id, meta)?;
+ self.store.save(&id, chunk)?;
+ self.insert_meta(&id, chunk.meta())?;
Ok(id)
}
@@ -68,28 +67,6 @@ impl IndexedStore {
Ok(self.index.find_generations()?)
}
- pub fn find_file_chunks(&self) -> IndexedResult<Vec<ChunkId>> {
- let gen_ids = self.find_generations()?;
-
- let mut sql_chunks: HashSet<ChunkId> = HashSet::new();
- for id in gen_ids {
- let gen_chunk = self.store.load(&id)?;
- let gen = GenerationChunk::from_data_chunk(&gen_chunk)?;
- for sqlite_chunk_id in gen.chunk_ids() {
- sql_chunks.insert(sqlite_chunk_id.clone());
- }
- }
-
- let all_chunk_ids = self.index.all_chunks()?;
- let file_chunks = all_chunk_ids
- .iter()
- .filter(|id| !sql_chunks.contains(id))
- .cloned()
- .collect();
-
- Ok(file_chunks)
- }
-
pub fn remove(&mut self, id: &ChunkId) -> IndexedResult<()> {
self.index.remove_meta(id)?;
self.store.delete(id)?;
diff --git a/src/lib.rs b/src/lib.rs
index 82dab15..7d7afdc 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -7,6 +7,7 @@ pub mod chunk;
pub mod chunker;
pub mod chunkid;
pub mod chunkmeta;
+pub mod cipher;
pub mod client;
pub mod cmd;
pub mod config;
diff --git a/src/passwords.rs b/src/passwords.rs
index b8ca3f5..a1cf42e 100644
--- a/src/passwords.rs
+++ b/src/passwords.rs
@@ -8,18 +8,23 @@ use std::io::prelude::Write;
use std::os::unix::fs::PermissionsExt;
use std::path::{Path, PathBuf};
+const KEY_LEN: usize = 32; // Only size accepted by aead crate?
+
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct Passwords {
encryption: String,
- mac: String,
}
impl Passwords {
pub fn new(passphrase: &str) -> Self {
- Self {
- encryption: derive_password(passphrase),
- mac: derive_password(passphrase),
- }
+ let mut key = derive_password(passphrase);
+ let _ = key.split_off(KEY_LEN);
+ assert_eq!(key.len(), KEY_LEN);
+ Self { encryption: key }
+ }
+
+ pub fn encryption_key(&self) -> &[u8] {
+ self.encryption.as_bytes()
}
pub fn load(filename: &Path) -> Result<Self, PasswordError> {
diff --git a/src/store.rs b/src/store.rs
index fca2c13..bccecc7 100644
--- a/src/store.rs
+++ b/src/store.rs
@@ -1,6 +1,5 @@
use crate::chunk::DataChunk;
use crate::chunkid::ChunkId;
-use crate::chunkmeta::ChunkMeta;
use std::path::{Path, PathBuf};
/// Store chunks, with metadata, persistently.
@@ -43,23 +42,26 @@ impl Store {
}
/// Save a chunk into a store.
- pub fn save(&self, id: &ChunkId, meta: &ChunkMeta, chunk: &DataChunk) -> StoreResult<()> {
+ pub fn save(&self, id: &ChunkId, chunk: &DataChunk) -> StoreResult<()> {
let (dir, metaname, dataname) = &self.filenames(id);
if !dir.exists() {
std::fs::create_dir_all(dir)?;
}
- std::fs::write(&metaname, meta.to_json())?;
+ std::fs::write(&metaname, chunk.meta().to_json())?;
std::fs::write(&dataname, chunk.data())?;
Ok(())
}
/// Load a chunk from a store.
pub fn load(&self, id: &ChunkId) -> StoreResult<DataChunk> {
- let (_, _, dataname) = &self.filenames(id);
+ let (_, metaname, dataname) = &self.filenames(id);
+ let meta = std::fs::read(&metaname)?;
+ let meta = serde_json::from_slice(&meta)?;
+
let data = std::fs::read(&dataname)?;
- let data = DataChunk::new(data);
+ let data = DataChunk::new(data, meta);
Ok(data)
}