diff options
Diffstat (limited to 'src/client.rs')
-rw-r--r-- | src/client.rs | 291 |
1 files changed, 149 insertions, 142 deletions
diff --git a/src/client.rs b/src/client.rs index 0f8a72f..b1f9976 100644 --- a/src/client.rs +++ b/src/client.rs @@ -1,17 +1,18 @@ -use crate::checksummer::sha256; use crate::chunk::DataChunk; use crate::chunk::{GenerationChunk, GenerationChunkError}; use crate::chunker::{Chunker, ChunkerError}; use crate::chunkid::ChunkId; use crate::chunkmeta::ChunkMeta; -use crate::config::ClientConfig; +use crate::cipher::{CipherEngine, CipherError}; +use crate::config::{ClientConfig, ClientConfigError}; use crate::fsentry::{FilesystemEntry, FilesystemKind}; use crate::generation::{FinishedGeneration, LocalGeneration, LocalGenerationError}; use crate::genlist::GenerationList; use chrono::{DateTime, Local}; -use log::{debug, error, info, trace}; +use log::{debug, error, info}; use reqwest::blocking::Client; +use reqwest::header::HeaderMap; use std::collections::HashMap; use std::fs::File; use std::io::prelude::*; @@ -22,6 +23,9 @@ pub enum ClientError { #[error("Server response claimed it had created a chunk, but lacked chunk id")] NoCreatedChunkId, + #[error("Server does not have {0}")] + NotFound(String), + #[error("Server does not have chunk {0}")] ChunkNotFound(String), @@ -35,6 +39,12 @@ pub enum ClientError { WrongChecksum(ChunkId, String, String), #[error(transparent)] + ClientConfigError(#[from] ClientConfigError), + + #[error(transparent)] + CipherError(#[from] CipherError), + + #[error(transparent)] GenerationChunkError(#[from] GenerationChunkError), #[error(transparent)] @@ -74,21 +84,14 @@ pub enum ClientError { pub type ClientResult<T> = Result<T, ClientError>; pub struct BackupClient { - client: Client, - base_url: String, + chunk_client: ChunkClient, } impl BackupClient { pub fn new(config: &ClientConfig) -> ClientResult<Self> { info!("creating backup client with config: {:#?}", config); - let config = config.config(); - let client = Client::builder() - .danger_accept_invalid_certs(!config.verify_tls_cert) - .build() - .map_err(ClientError::ReqwestError)?; Ok(Self { - client, - base_url: config.server_url.to_string(), + chunk_client: ChunkClient::new(config)?, }) } @@ -114,10 +117,9 @@ impl BackupClient { info!("upload SQLite {}", filename.display()); let ids = self.read_file(filename, size)?; let gen = GenerationChunk::new(ids); - let data = gen.to_data_chunk()?; - let meta = ChunkMeta::new_generation(&sha256(data.data()), ¤t_timestamp()); - let gen_id = self.upload_gen_chunk(meta.clone(), gen)?; - info!("uploaded generation {}, meta {:?}", gen_id, meta); + let data = gen.to_data_chunk(¤t_timestamp())?; + let gen_id = self.upload_chunk(data)?; + info!("uploaded generation {}", gen_id); Ok(gen_id) } @@ -130,6 +132,86 @@ impl BackupClient { Ok(chunk_ids) } + pub fn has_chunk(&self, meta: &ChunkMeta) -> ClientResult<Option<ChunkId>> { + self.chunk_client.has_chunk(meta) + } + + pub fn upload_chunk(&self, chunk: DataChunk) -> ClientResult<ChunkId> { + self.chunk_client.upload_chunk(chunk) + } + + pub fn upload_new_file_chunks(&self, chunker: Chunker) -> ClientResult<Vec<ChunkId>> { + let mut chunk_ids = vec![]; + for item in chunker { + let chunk = item?; + if let Some(chunk_id) = self.has_chunk(chunk.meta())? { + chunk_ids.push(chunk_id.clone()); + info!("reusing existing chunk {}", chunk_id); + } else { + let chunk_id = self.upload_chunk(chunk)?; + chunk_ids.push(chunk_id.clone()); + info!("created new chunk {}", chunk_id); + } + } + + Ok(chunk_ids) + } + + pub fn list_generations(&self) -> ClientResult<GenerationList> { + self.chunk_client.list_generations() + } + + pub fn fetch_chunk(&self, chunk_id: &ChunkId) -> ClientResult<DataChunk> { + self.chunk_client.fetch_chunk(chunk_id) + } + + fn fetch_generation_chunk(&self, gen_id: &str) -> ClientResult<GenerationChunk> { + let chunk_id = ChunkId::recreate(gen_id); + let chunk = self.fetch_chunk(&chunk_id)?; + let gen = GenerationChunk::from_data_chunk(&chunk)?; + Ok(gen) + } + + pub fn fetch_generation(&self, gen_id: &str, dbname: &Path) -> ClientResult<LocalGeneration> { + let gen = self.fetch_generation_chunk(gen_id)?; + + // Fetch the SQLite file, storing it in the named file. + let mut dbfile = File::create(&dbname) + .map_err(|err| ClientError::FileCreate(dbname.to_path_buf(), err))?; + for id in gen.chunk_ids() { + let chunk = self.fetch_chunk(id)?; + dbfile + .write_all(chunk.data()) + .map_err(|err| ClientError::FileWrite(dbname.to_path_buf(), err))?; + } + info!("downloaded generation to {}", dbname.display()); + + let gen = LocalGeneration::open(dbname)?; + Ok(gen) + } +} + +pub struct ChunkClient { + client: Client, + base_url: String, + cipher: CipherEngine, +} + +impl ChunkClient { + pub fn new(config: &ClientConfig) -> ClientResult<Self> { + let pass = config.passwords()?; + + let client = Client::builder() + .danger_accept_invalid_certs(!config.verify_tls_cert) + .build() + .map_err(ClientError::ReqwestError)?; + Ok(Self { + client, + base_url: config.server_url.to_string(), + cipher: CipherEngine::new(&pass), + }) + } + fn base_url(&self) -> &str { &self.base_url } @@ -139,44 +221,30 @@ impl BackupClient { } pub fn has_chunk(&self, meta: &ChunkMeta) -> ClientResult<Option<ChunkId>> { - trace!("has_chunk: url={:?}", self.base_url()); - let req = self - .client - .get(&self.chunks_url()) - .query(&[("sha256", meta.sha256())]) - .build() - .map_err(ClientError::ReqwestError)?; + let body = match self.get("", &[("sha256", meta.sha256())]) { + Ok((_, body)) => body, + Err(err) => return Err(err), + }; - let res = self.client.execute(req).map_err(ClientError::ChunkExists)?; - debug!("has_chunk: status={}", res.status()); - let has = if res.status() != 200 { - debug!("has_chunk: error from server"); - None + let hits: HashMap<String, ChunkMeta> = + serde_json::from_slice(&body).map_err(ClientError::JsonParse)?; + let mut iter = hits.iter(); + let has = if let Some((chunk_id, _)) = iter.next() { + Some(chunk_id.into()) } else { - let text = res.text().map_err(ClientError::ReqwestError)?; - debug!("has_chunk: text={:?}", text); - let hits: HashMap<String, ChunkMeta> = - serde_json::from_str(&text).map_err(ClientError::JsonParse)?; - debug!("has_chunk: hits={:?}", hits); - let mut iter = hits.iter(); - if let Some((chunk_id, _)) = iter.next() { - debug!("has_chunk: chunk_id={:?}", chunk_id); - Some(chunk_id.into()) - } else { - None - } + None }; - info!("has_chunk result: {:?}", has); Ok(has) } - pub fn upload_chunk(&self, meta: ChunkMeta, chunk: DataChunk) -> ClientResult<ChunkId> { + pub fn upload_chunk(&self, chunk: DataChunk) -> ClientResult<ChunkId> { + let enc = self.cipher.encrypt_chunk(&chunk)?; let res = self .client .post(&self.chunks_url()) - .header("chunk-meta", meta.to_json()) - .body(chunk.data().to_vec()) + .header("chunk-meta", chunk.meta().to_json()) + .body(enc.ciphertext().to_vec()) .send() .map_err(ClientError::ReqwestError)?; debug!("upload_chunk: res={:?}", res); @@ -187,62 +255,13 @@ impl BackupClient { } else { return Err(ClientError::NoCreatedChunkId); }; - info!("uploaded_chunk {} meta {:?}", chunk_id, meta); + info!("uploaded_chunk {}", chunk_id); Ok(chunk_id) } - pub fn upload_gen_chunk(&self, meta: ChunkMeta, gen: GenerationChunk) -> ClientResult<ChunkId> { - let res = self - .client - .post(&self.chunks_url()) - .header("chunk-meta", meta.to_json()) - .body(serde_json::to_string(&gen).map_err(ClientError::JsonGenerate)?) - .send() - .map_err(ClientError::ReqwestError)?; - debug!("upload_chunk: res={:?}", res); - let res: HashMap<String, String> = res.json().map_err(ClientError::ReqwestError)?; - let chunk_id = if let Some(chunk_id) = res.get("chunk_id") { - debug!("upload_chunk: id={}", chunk_id); - chunk_id.parse().unwrap() - } else { - return Err(ClientError::NoCreatedChunkId); - }; - info!("uploaded_generation chunk {}", chunk_id); - Ok(chunk_id) - } - - pub fn upload_new_file_chunks(&self, chunker: Chunker) -> ClientResult<Vec<ChunkId>> { - let mut chunk_ids = vec![]; - for item in chunker { - let (meta, chunk) = item?; - if let Some(chunk_id) = self.has_chunk(&meta)? { - chunk_ids.push(chunk_id.clone()); - info!("reusing existing chunk {}", chunk_id); - } else { - let chunk_id = self.upload_chunk(meta, chunk)?; - chunk_ids.push(chunk_id.clone()); - info!("created new chunk {}", chunk_id); - } - } - - Ok(chunk_ids) - } - pub fn list_generations(&self) -> ClientResult<GenerationList> { - let url = format!("{}?generation=true", &self.chunks_url()); - trace!("list_generations: url={:?}", url); - let req = self - .client - .get(&url) - .build() - .map_err(ClientError::ReqwestError)?; - let res = self - .client - .execute(req) - .map_err(ClientError::ReqwestError)?; - debug!("list_generations: status={}", res.status()); - let body = res.bytes().map_err(ClientError::ReqwestError)?; - debug!("list_generations: body={:?}", body); + let (_, body) = self.get("", &[("generation", "true")])?; + let map: HashMap<String, ChunkMeta> = serde_yaml::from_slice(&body).map_err(ClientError::YamlParse)?; debug!("list_generations: map={:?}", map); @@ -254,77 +273,65 @@ impl BackupClient { } pub fn fetch_chunk(&self, chunk_id: &ChunkId) -> ClientResult<DataChunk> { - info!("fetch chunk {}", chunk_id); + let (headers, body) = self.get(&format!("/{}", chunk_id), &[])?; + let meta = self.get_chunk_meta_header(chunk_id, &headers)?; + + let meta_bytes = meta.to_json_vec(); + let chunk = self.cipher.decrypt_chunk(&body, &meta_bytes)?; + + Ok(chunk) + } + + fn get(&self, path: &str, query: &[(&str, &str)]) -> ClientResult<(HeaderMap, Vec<u8>)> { + let url = format!("{}{}", &self.chunks_url(), path); + info!("GET {}", url); - let url = format!("{}/{}", &self.chunks_url(), chunk_id); + // Build HTTP request structure. let req = self .client .get(&url) + .query(query) .build() .map_err(ClientError::ReqwestError)?; + + // Make HTTP request. let res = self .client .execute(req) .map_err(ClientError::ReqwestError)?; + + // Did it work? if res.status() != 200 { - let err = ClientError::ChunkNotFound(chunk_id.to_string()); - error!("fetching chunk {} failed: {}", chunk_id, err); - return Err(err); + return Err(ClientError::NotFound(path.to_string())); } - let headers = res.headers(); + // Return headers and body. + let headers = res.headers().clone(); + let body = res.bytes().map_err(ClientError::ReqwestError)?; + let body = body.to_vec(); + Ok((headers, body)) + } + + fn get_chunk_meta_header( + &self, + chunk_id: &ChunkId, + headers: &HeaderMap, + ) -> ClientResult<ChunkMeta> { let meta = headers.get("chunk-meta"); + if meta.is_none() { let err = ClientError::NoChunkMeta(chunk_id.clone()); error!("fetching chunk {} failed: {}", chunk_id, err); return Err(err); } + let meta = meta .unwrap() .to_str() .map_err(ClientError::MetaHeaderToString)?; - debug!("fetching chunk {}: meta={:?}", chunk_id, meta); let meta: ChunkMeta = serde_json::from_str(meta).map_err(ClientError::JsonParse)?; - debug!("fetching chunk {}: meta={:?}", chunk_id, meta); - - let body = res.bytes().map_err(ClientError::ReqwestError)?; - let body = body.to_vec(); - let actual = sha256(&body); - if actual != meta.sha256() { - let err = - ClientError::WrongChecksum(chunk_id.clone(), actual, meta.sha256().to_string()); - error!("fetching chunk {} failed: {}", chunk_id, err); - return Err(err); - } - - let chunk: DataChunk = DataChunk::new(body); - - Ok(chunk) - } - - fn fetch_generation_chunk(&self, gen_id: &str) -> ClientResult<GenerationChunk> { - let chunk_id = ChunkId::recreate(gen_id); - let chunk = self.fetch_chunk(&chunk_id)?; - let gen = GenerationChunk::from_data_chunk(&chunk)?; - Ok(gen) - } - - pub fn fetch_generation(&self, gen_id: &str, dbname: &Path) -> ClientResult<LocalGeneration> { - let gen = self.fetch_generation_chunk(gen_id)?; - - // Fetch the SQLite file, storing it in the named file. - let mut dbfile = File::create(&dbname) - .map_err(|err| ClientError::FileCreate(dbname.to_path_buf(), err))?; - for id in gen.chunk_ids() { - let chunk = self.fetch_chunk(id)?; - dbfile - .write_all(chunk.data()) - .map_err(|err| ClientError::FileWrite(dbname.to_path_buf(), err))?; - } - info!("downloaded generation to {}", dbname.display()); - let gen = LocalGeneration::open(dbname)?; - Ok(gen) + Ok(meta) } } |