summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--Cargo.toml8
-rwxr-xr-xcheck4
-rw-r--r--config.yaml4
-rw-r--r--daemon.py84
-rw-r--r--obnam.md55
-rw-r--r--obnam.py134
-rw-r--r--obnam.yaml30
-rw-r--r--runcmd.py77
-rw-r--r--src/bin/obnam-server.rs224
-rw-r--r--src/chunk.rs2
-rw-r--r--src/chunkmeta.rs2
-rw-r--r--src/server.rs12
-rw-r--r--test.key27
-rw-r--r--test.pem17
14 files changed, 674 insertions, 6 deletions
diff --git a/Cargo.toml b/Cargo.toml
index 0f10594..5575cc8 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -4,10 +4,14 @@ version = "0.1.0"
authors = ["Lars Wirzenius <liw@liw.fi>"]
edition = "2018"
-# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
-
[dependencies]
anyhow = "1"
+bytes = "0.5"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
+serde_yaml = "0.8"
+structopt = "0.3"
+thiserror = "1"
+tokio = { version = "0.2", features = ["macros"] }
uuid = { version = "0.8", features = ["v4"] }
+warp = { version = "0.2", features = ["tls"] }
diff --git a/check b/check
index c3b0d8a..fe562df 100755
--- a/check
+++ b/check
@@ -21,4 +21,8 @@ cargo clippy $quiet
cargo fmt -- --check
$hideok cargo test $quiet
+sp-docgen obnam.md -o obnam.html
+sp-docgen obnam.md -o obnam.pdf
+$hideok sp-codegen obnam.md -o test.py --run
+
echo "Everything seems to be in order."
diff --git a/config.yaml b/config.yaml
new file mode 100644
index 0000000..4e1e57b
--- /dev/null
+++ b/config.yaml
@@ -0,0 +1,4 @@
+port: 8888
+chunks: /home/liw/tmp/chunks
+tls_key: test.key
+tls_cert: test.pem
diff --git a/daemon.py b/daemon.py
new file mode 100644
index 0000000..e223505
--- /dev/null
+++ b/daemon.py
@@ -0,0 +1,84 @@
+#############################################################################
+# Start and stop daemons, or background processes.
+
+
+import logging
+import os
+import signal
+import time
+
+
+# Start a process in the background.
+def start_daemon(ctx, name, argv):
+ runcmd = globals()["runcmd"]
+ exit_code_is = globals()["exit_code_is"]
+
+ logging.debug(f"Starting daemon {name}")
+ logging.debug(f" ctx={ctx.as_dict()}")
+ logging.debug(f" name={name}")
+ logging.debug(f" argv={argv}")
+
+ if "daemon" not in ctx.as_dict():
+ ctx["daemon"] = {}
+ assert name not in ctx["daemon"]
+ this = ctx["daemon"][name] = {
+ "pid-file": f"{name}.pid",
+ "stderr": f"{name}.stderr",
+ "stdout": f"{name}.stdout",
+ }
+ runcmd(
+ ctx,
+ [
+ "/usr/sbin/daemonize",
+ "-c",
+ os.getcwd(),
+ "-p",
+ this["pid-file"],
+ "-e",
+ this["stderr"],
+ "-o",
+ this["stdout"],
+ ]
+ + argv,
+ )
+
+ # Wait for a bit for daemon to start and maybe find a problem and die.
+ time.sleep(3)
+ if ctx["exit"] != 0:
+ logging.error(f"obnam-server stderr: {ctx['stderr']}")
+
+ exit_code_is(ctx, 0)
+ this["pid"] = int(open(this["pid-file"]).read().strip())
+ assert process_exists(this["pid"])
+
+ logging.debug(f"Started daemon {name}")
+ logging.debug(f" ctx={ctx.as_dict()}")
+
+
+# Stop a daemon.
+def stop_daemon(ctx, name):
+ logging.debug(f"Stopping daemon {name}")
+ logging.debug(f" ctx={ctx.as_dict()}")
+ logging.debug(f" ctx['daemon']={ctx.as_dict()['daemon']}")
+
+ this = ctx["daemon"][name]
+ terminate_process(this["pid"], signal.SIGKILL)
+
+
+# Does a process exist?
+def process_exists(pid):
+ try:
+ os.kill(pid, 0)
+ except ProcessLookupError:
+ return False
+ return True
+
+
+# Terminate process.
+def terminate_process(pid, signalno):
+ logging.debug(f"Terminating process {pid} with signal {signalno}")
+ try:
+ os.kill(pid, signalno)
+ except ProcessLookupError:
+ logging.debug("Process did not actually exist (anymore?)")
+ pass
diff --git a/obnam.md b/obnam.md
new file mode 100644
index 0000000..b95cbe7
--- /dev/null
+++ b/obnam.md
@@ -0,0 +1,55 @@
+# Acceptance criteria
+
+[Subplot]: https://subplot.liw.fi/
+
+This chapter documents detailed acceptance criteria and how they are
+verified as scenarios for the [Subplot][] tool
+
+## Chunk server
+
+These scenarios verify that the chunk server works.
+
+### Chunk management
+
+This scenario verifies that a chunk can be uploaded and then
+retrieved, with its metadata, and then deleted. The chunk server has
+an API with just one endpoint, `/chunks`, and accepts the the POST,
+GET, and DELETE operations on it.
+
+To create a chunk, we use POST.
+
+~~~scenario
+given a chunk server
+and a file data.dat containing some random data
+when I POST data.dat to /chunks, with chunk-meta: {"sha256":"abc"}
+then HTTP status code is 201
+and content-type is application/json
+and the JSON body has a field chunk_id, henceforth ID
+~~~
+
+To retrieve a chunk, we use GET, giving the chunk id in the path.
+
+~~~scenario
+when I GET /chunks/<ID>
+then HTTP status code is 200
+and content-type is application/octet-stream
+and chunk-meta is {"sha256":"abc","generation":null,"ended":null}
+and the body matches file data.dat
+~~~
+
+
+
+
+<!-- -------------------------------------------------------------------- -->
+
+
+---
+title: "Obnam2&mdash;a backup system"
+author: Lars Wirzenius
+bindings:
+ - obnam.yaml
+functions:
+ - obnam.py
+ - runcmd.py
+ - daemon.py
+...
diff --git a/obnam.py b/obnam.py
new file mode 100644
index 0000000..c827180
--- /dev/null
+++ b/obnam.py
@@ -0,0 +1,134 @@
+import logging
+import os
+import random
+import requests
+import shutil
+import socket
+import time
+import urllib3
+import yaml
+
+
+urllib3.disable_warnings()
+
+
+def start_chunk_server(ctx):
+ start_daemon = globals()["start_daemon"]
+ srcdir = globals()["srcdir"]
+
+ logging.debug(f"Starting obnam-server")
+
+ for x in ["test.pem", "test.key"]:
+ shutil.copy(os.path.join(srcdir, x), x)
+
+ chunks = "chunks"
+ os.mkdir(chunks)
+
+ config = {"chunks": chunks, "tls_key": "test.key", "tls_cert": "test.pem"}
+ port = config["port"] = random.randint(2000, 30000)
+ filename = "config.yaml"
+ yaml.safe_dump(config, stream=open(filename, "w"))
+ logging.debug(f"Picked randomly port for obnam-server: {config['port']}")
+ ctx["config"] = config
+
+ ctx["url"] = f"https://localhost:{port}"
+
+ start_daemon(ctx, "obnam-server", [_binary("obnam-server"), filename])
+
+ if not port_open("localhost", port, 5.0):
+ stderr = open(ctx["daemon"]["obnam-server"]["stderr"]).read()
+ logging.debug(f"Stderr from daemon: {stderr!r}")
+
+
+def stop_chunk_server(ctx):
+ logging.debug("Stopping obnam-server")
+ stop_daemon = globals()["stop_daemon"]
+ stop_daemon(ctx, "obnam-server")
+
+
+def create_file_with_random_data(ctx, filename=None):
+ N = 128
+ data = "".join(chr(random.randint(0, 255)) for i in range(N)).encode("UTF-8")
+ with open(filename, "wb") as f:
+ f.write(data)
+
+
+def post_file(ctx, filename=None, path=None, header=None, json=None):
+ url = f"{ctx['url']}/chunks"
+ headers = {header: json}
+ data = open(filename, "rb").read()
+ _request(ctx, requests.post, url, headers=headers, data=data)
+
+
+def get_chunk(ctx, var=None):
+ chunk_id = ctx["vars"][var]
+ url = f"{ctx['url']}/chunks/{chunk_id}"
+ _request(ctx, requests.get, url)
+
+
+def status_code_is(ctx, status=None):
+ assert_eq = globals()["assert_eq"]
+ assert_eq(ctx["http.status"], int(status))
+
+
+def header_is(ctx, header=None, value=None):
+ assert_eq = globals()["assert_eq"]
+ assert_eq(ctx["http.headers"][header], value)
+
+
+def remember_json_field(ctx, field=None, var=None):
+ v = ctx.get("vars", {})
+ v[var] = ctx["http.json"][field]
+ ctx["vars"] = v
+
+
+def body_matches_file(ctx, filename=None):
+ assert_eq = globals()["assert_eq"]
+ content = open(filename, "rb").read()
+ logging.debug(f"body_matches_file:")
+ logging.debug(f" filename: {filename}")
+ logging.debug(f" content: {content!r}")
+ logging.debug(f" body: {ctx['http.raw']!r}")
+ assert_eq(ctx["http.raw"], content)
+
+
+# Name of Rust binary, debug-build.
+def _binary(name):
+ srcdir = globals()["srcdir"]
+ return os.path.abspath(os.path.join(srcdir, "target", "debug", name))
+
+
+# Wait for a port to be open
+def port_open(host, port, timeout):
+ logging.debug(f"Waiting for port localhost:{port} to be available")
+ started = time.time()
+ while time.time() < started + timeout:
+ try:
+ socket.create_connection((host, port), timeout=timeout)
+ return True
+ except socket.error:
+ pass
+ logging.error(f"Port localhost:{port} is not open")
+ return False
+
+
+# Make an HTTP request.
+def _request(ctx, method, url, headers=None, data=None):
+ r = method(url, headers=headers, data=data, verify=False)
+ ctx["http.status"] = r.status_code
+ ctx["http.headers"] = dict(r.headers)
+ try:
+ ctx["http.json"] = dict(r.json())
+ except ValueError:
+ ctx["http.json"] = None
+ ctx["http.raw"] = r.content
+ logging.debug("HTTP request:")
+ logging.debug(f" url: {url}")
+ logging.debug(f" header: {headers!r}")
+ logging.debug("HTTP response:")
+ logging.debug(f" status: {r.status_code}")
+ logging.debug(f" json: {ctx['http.json']!r}")
+ logging.debug(f" text: {r.content!r}")
+ if not r.ok:
+ stderr = open(ctx["daemon"]["obnam-server"]["stderr"], "rb").read()
+ logging.debug(f" server stderr: {stderr!r}")
diff --git a/obnam.yaml b/obnam.yaml
new file mode 100644
index 0000000..7acf581
--- /dev/null
+++ b/obnam.yaml
@@ -0,0 +1,30 @@
+- given: "a chunk server"
+ function: start_chunk_server
+ cleanup: stop_chunk_server
+
+- given: >
+ a file (?P<filename>\\S+) containing "(?P<data>.*)"
+ regex: true
+ function: create_file_with_given_data
+
+- given: "a file {filename} containing some random data"
+ function: create_file_with_random_data
+
+- when: "I POST (?P<filename>\\S+) to (?P<path>\\S+), with (?P<header>\\S+): (?P<json>.*)"
+ regex: true
+ function: post_file
+
+- when: "I GET /chunks/<{var}>"
+ function: get_chunk
+
+- then: "HTTP status code is {status}"
+ function: status_code_is
+
+- then: "{header} is {value}"
+ function: header_is
+
+- then: "the JSON body has a field {field}, henceforth {var}"
+ function: remember_json_field
+
+- then: "the body matches file {filename}"
+ function: body_matches_file
diff --git a/runcmd.py b/runcmd.py
new file mode 100644
index 0000000..7193c15
--- /dev/null
+++ b/runcmd.py
@@ -0,0 +1,77 @@
+# Some step implementations for running commands and capturing the result.
+
+import subprocess
+
+
+# Run a command, capture its stdout, stderr, and exit code in context.
+def runcmd(ctx, argv, **kwargs):
+ p = subprocess.Popen(argv, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs)
+ stdout, stderr = p.communicate("")
+ ctx["argv"] = argv
+ ctx["stdout"] = stdout.decode("utf-8")
+ ctx["stderr"] = stderr.decode("utf-8")
+ ctx["exit"] = p.returncode
+
+
+# Check that latest exit code captured by runcmd was a specific one.
+def exit_code_is(ctx, wanted):
+ if ctx.get("exit") != wanted:
+ print("context:", ctx.as_dict())
+ assert_eq(ctx.get("exit"), wanted)
+
+
+# Check that latest exit code captured by runcmd was not a specific one.
+def exit_code_is_not(ctx, unwanted):
+ if ctx.get("exit") == unwanted:
+ print("context:", ctx.as_dict())
+ assert_ne(ctx.get("exit"), unwanted)
+
+
+# Check that latest exit code captured by runcmd was zero.
+def exit_code_zero(ctx):
+ exit_code_is(ctx, 0)
+
+
+# Check that latest exit code captured by runcmd was not zero.
+def exit_code_nonzero(ctx):
+ exit_code_is_not(ctx, 0)
+
+
+# Check that stdout of latest runcmd contains a specific string.
+def stdout_contains(ctx, pattern=None):
+ stdout = ctx.get("stdout", "")
+ if pattern not in stdout:
+ print("pattern:", repr(pattern))
+ print("stdout:", repr(stdout))
+ print("ctx:", ctx.as_dict())
+ assert_eq(pattern in stdout, True)
+
+
+# Check that stdout of latest runcmd does not contain a specific string.
+def stdout_does_not_contain(ctx, pattern=None):
+ stdout = ctx.get("stdout", "")
+ if pattern in stdout:
+ print("pattern:", repr(pattern))
+ print("stdout:", repr(stdout))
+ print("ctx:", ctx.as_dict())
+ assert_eq(pattern not in stdout, True)
+
+
+# Check that stderr of latest runcmd does contains a specific string.
+def stderr_contains(ctx, pattern=None):
+ stderr = ctx.get("stderr", "")
+ if pattern not in stderr:
+ print("pattern:", repr(pattern))
+ print("stderr:", repr(stderr))
+ print("ctx:", ctx.as_dict())
+ assert_eq(pattern in stderr, True)
+
+
+# Check that stderr of latest runcmd does not contain a specific string.
+def stderr_does_not_contain(ctx, pattern=None):
+ stderr = ctx.get("stderr", "")
+ if pattern not in stderr:
+ print("pattern:", repr(pattern))
+ print("stderr:", repr(stderr))
+ print("ctx:", ctx.as_dict())
+ assert_eq(pattern not in stderr, True)
diff --git a/src/bin/obnam-server.rs b/src/bin/obnam-server.rs
new file mode 100644
index 0000000..40ea400
--- /dev/null
+++ b/src/bin/obnam-server.rs
@@ -0,0 +1,224 @@
+use bytes::Bytes;
+use obnam::{chunk::Chunk, chunkid::ChunkId, chunkmeta::ChunkMeta, index::Index, store::Store};
+use serde::{Deserialize, Serialize};
+use std::default::Default;
+use std::path::{Path, PathBuf};
+use std::sync::Arc;
+use structopt::StructOpt;
+use tokio::sync::Mutex;
+use warp::http::StatusCode;
+use warp::Filter;
+
+#[derive(Debug, StructOpt)]
+#[structopt(name = "obnam2-server", about = "Backup server")]
+struct Opt {
+ #[structopt(parse(from_os_str))]
+ config: PathBuf,
+}
+
+#[tokio::main]
+async fn main() -> anyhow::Result<()> {
+ let opt = Opt::from_args();
+ let config = Config::read_config(&opt.config).unwrap();
+ let config_bare = config.clone();
+ let config = Arc::new(Mutex::new(config));
+ let config = warp::any().map(move || Arc::clone(&config));
+
+ let index = Arc::new(Mutex::new(Index::default()));
+ let index = warp::any().map(move || Arc::clone(&index));
+
+ let create = warp::post()
+ .and(warp::path("chunks"))
+ .and(config.clone())
+ .and(index.clone())
+ .and(warp::header("chunk-meta"))
+ .and(warp::filters::body::bytes())
+ .and_then(create_chunk);
+
+ let fetch = warp::get()
+ .and(warp::path("chunks"))
+ .and(warp::path::param())
+ .and(config.clone())
+ .and_then(fetch_chunk);
+
+ // let search = warp::get()
+ // .and(warp::path("chunks"))
+ // .and(warp::query::<HashMap<String, String>>())
+ // .and(config.clone())
+ // .and(index.clone())
+ // .and_then(obnam::routes::search::search_chunks);
+
+ // let webroot = create.or(fetch).or(search);
+ let webroot = create.or(fetch);
+ warp::serve(webroot)
+ .tls()
+ .key_path(config_bare.tls_key)
+ .cert_path(config_bare.tls_cert)
+ .run(([127, 0, 0, 1], config_bare.port))
+ .await;
+ Ok(())
+}
+
+#[derive(Debug, Deserialize, Clone)]
+pub struct Config {
+ pub chunks: PathBuf,
+ pub port: u16,
+ pub tls_key: PathBuf,
+ pub tls_cert: PathBuf,
+}
+
+#[derive(Debug, thiserror::Error)]
+enum ConfigError {
+ #[error("Port number {0} too small, would require running as root")]
+ PortTooSmall(u16),
+
+ #[error("Directory for chunks {0} does not exist")]
+ ChunksDirNotFound(PathBuf),
+
+ #[error("TLS certificate {0} does not exist")]
+ TlsCertNotFound(PathBuf),
+
+ #[error("TLS key {0} does not exist")]
+ TlsKeyNotFound(PathBuf),
+}
+
+impl Config {
+ pub fn read_config(filename: &Path) -> anyhow::Result<Config> {
+ let config = std::fs::read_to_string(filename)?;
+ let config: Config = serde_yaml::from_str(&config)?;
+ config.check()?;
+ Ok(config)
+ }
+
+ pub fn check(&self) -> anyhow::Result<()> {
+ if self.port < 1024 {
+ return Err(ConfigError::PortTooSmall(self.port).into());
+ }
+ if !self.chunks.exists() {
+ return Err(ConfigError::ChunksDirNotFound(self.chunks.clone()).into());
+ }
+ if !self.tls_cert.exists() {
+ return Err(ConfigError::TlsCertNotFound(self.tls_cert.clone()).into());
+ }
+ if !self.tls_key.exists() {
+ return Err(ConfigError::TlsKeyNotFound(self.tls_key.clone()).into());
+ }
+ Ok(())
+ }
+}
+
+pub async fn create_chunk(
+ config: Arc<Mutex<Config>>,
+ index: Arc<Mutex<Index>>,
+ meta: String,
+ data: Bytes,
+) -> Result<impl warp::Reply, warp::Rejection> {
+ let id = ChunkId::new();
+ let config = config.lock().await;
+ let store = Store::new(&config.chunks);
+
+ let meta: ChunkMeta = match meta.parse() {
+ Ok(s) => s,
+ Err(_) => {
+ eprintln!("bad meta");
+ return Ok(ChunkResult::BadRequest);
+ }
+ };
+
+ let chunk = Chunk::new(meta.clone(), data.to_vec());
+
+ match store.save(&id, &chunk) {
+ Ok(_) => (),
+ Err(_) => {
+ eprintln!("no meta file");
+ return Ok(ChunkResult::InternalServerError);
+ }
+ }
+
+ let mut index = index.lock().await;
+ index.insert(id.clone(), "sha256", meta.sha256());
+ if meta.is_generation() {
+ index.insert_generation(id.clone());
+ }
+
+ Ok(ChunkResult::Created(id))
+}
+
+pub async fn fetch_chunk(
+ id: String,
+ config: Arc<Mutex<Config>>,
+) -> Result<impl warp::Reply, warp::Rejection> {
+ let config = config.lock().await;
+ let store = Store::new(&config.chunks);
+ let id: ChunkId = id.parse().unwrap();
+ match store.load(&id) {
+ Ok(chunk) => Ok(ChunkResult::Fetched(chunk)),
+ Err(_) => Err(warp::reject::not_found()),
+ }
+}
+
+enum ChunkResult {
+ Created(ChunkId),
+ Fetched(Chunk),
+ BadRequest,
+ InternalServerError,
+}
+
+#[derive(Debug, Serialize)]
+struct CreatedBody {
+ chunk_id: String,
+}
+
+impl warp::Reply for ChunkResult {
+ fn into_response(self) -> warp::reply::Response {
+ match self {
+ ChunkResult::Created(id) => {
+ let body = CreatedBody {
+ chunk_id: format!("{}", id),
+ };
+ let body = serde_json::to_string(&body).unwrap();
+ let mut r = warp::reply::Response::new(body.into());
+ r.headers_mut().insert(
+ warp::http::header::CONTENT_TYPE,
+ warp::http::header::HeaderValue::from_static("application/json"),
+ );
+ *r.status_mut() = StatusCode::CREATED;
+ r
+ }
+ ChunkResult::Fetched(chunk) => {
+ let mut r = warp::reply::Response::new(chunk.data().to_vec().into());
+ r.headers_mut().insert(
+ warp::http::header::CONTENT_TYPE,
+ warp::http::header::HeaderValue::from_static("application/octet-stream"),
+ );
+ r.headers_mut().insert(
+ "chunk-meta",
+ warp::http::header::HeaderValue::from_str(
+ &serde_json::to_string(&chunk.meta()).unwrap(),
+ )
+ .unwrap(),
+ );
+ *r.status_mut() = StatusCode::OK;
+ r
+ }
+ ChunkResult::BadRequest => {
+ let mut r = warp::reply::Response::new("".into());
+ r.headers_mut().insert(
+ warp::http::header::CONTENT_TYPE,
+ warp::http::header::HeaderValue::from_static("application/json"),
+ );
+ *r.status_mut() = StatusCode::BAD_REQUEST;
+ r
+ }
+ ChunkResult::InternalServerError => {
+ let mut r = warp::reply::Response::new("".into());
+ r.headers_mut().insert(
+ warp::http::header::CONTENT_TYPE,
+ warp::http::header::HeaderValue::from_static("application/json"),
+ );
+ *r.status_mut() = StatusCode::INTERNAL_SERVER_ERROR;
+ r
+ }
+ }
+ }
+}
diff --git a/src/chunk.rs b/src/chunk.rs
index 29d64a2..33d2d32 100644
--- a/src/chunk.rs
+++ b/src/chunk.rs
@@ -1,4 +1,5 @@
use crate::chunkmeta::ChunkMeta;
+use serde::Serialize;
/// Store an arbitrary chunk of data.
///
@@ -6,6 +7,7 @@ use crate::chunkmeta::ChunkMeta;
///
/// A chunk also contains its associated metadata, except its
/// identifier.
+#[derive(Debug, Serialize)]
pub struct Chunk {
meta: ChunkMeta,
data: Vec<u8>,
diff --git a/src/chunkmeta.rs b/src/chunkmeta.rs
index b48b9a3..7e0d6f3 100644
--- a/src/chunkmeta.rs
+++ b/src/chunkmeta.rs
@@ -35,7 +35,7 @@ use std::str::FromStr;
///
/// [ISO 8601]: https://en.wikipedia.org/wiki/ISO_8601
/// [SHA256]: https://en.wikipedia.org/wiki/SHA-2
-#[derive(Debug, Default, PartialEq, Serialize, Deserialize)]
+#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)]
pub struct ChunkMeta {
sha256: String,
// The remaining fields are Options so that JSON parsing doesn't
diff --git a/src/server.rs b/src/server.rs
index 6b8a064..7498050 100644
--- a/src/server.rs
+++ b/src/server.rs
@@ -1,3 +1,4 @@
+use crate::chunk::Chunk;
use crate::chunkid::ChunkId;
use crate::chunkmeta::ChunkMeta;
use serde::{Deserialize, Serialize};
@@ -21,15 +22,20 @@ impl Created {
}
/// Result of retrieving a chunk.
+
#[derive(Debug, Serialize)]
pub struct Fetched {
id: ChunkId,
- meta: ChunkMeta,
+ chunk: Chunk,
}
impl Fetched {
- pub fn new(id: ChunkId, meta: ChunkMeta) -> Self {
- Fetched { id, meta }
+ pub fn new(id: ChunkId, chunk: Chunk) -> Self {
+ Fetched { id, chunk }
+ }
+
+ pub fn to_json(&self) -> String {
+ serde_json::to_string(&self).unwrap()
}
}
diff --git a/test.key b/test.key
new file mode 100644
index 0000000..badc955
--- /dev/null
+++ b/test.key
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEA/UAeDo1v4vzLY3cBS2k7ueK4ViKSTrNOo0eXj8L3XVNQgf/t
+a4bmxxJrp5du+QK9jCTQzQpJaOgWxY8pOGDCzpjANATjLvAb77vYwIE/m+y98bAv
+SagIRLqUKaikPUk2ERT8xk8K/cbeJL1BgjOv2L/v3XRCquX/UavWn721PjH88CDU
+Fo2/SLtBHhGL1IBDXNPWzMxcr+qIDDefLGNROf7kCAB8kv/Lf+CkyghqMTRhtzRm
+Ny2+MOoVK0v5E5qsXa/2uaKz8f5SArnwLPLwPkSjm2GgAFhulZtQhbwVB6+xK7OH
+fuq5mNEehpDGK4TZf6iGbfu1ze8lQpmy5pW/UwIDAQABAoIBAGuEvT4QbmBAU1l3
+POV3WBQjXX+0Tcl79vb7fi1a6QqGRLfsoC/2piP+fhY5BLRTpYXKW5uxslkHsCNQ
+FIALFKoB0BhFuu5Copm1duDD/u2EJWBnHKx42onTbPQC1TfvzTSTZH4NMO1tcQLI
+FvfgWdf8JSowId3MqAlUm1pWgIxWdIe5p0jqbaXChs/yOXByKV/Pe0vVHjBq4foG
+TdiojDZapn3DxkjH5mJ4exZ7xvKs0hwN4DmgJ2kv8DL2BctupFpv+3Xt/Krhuakj
+gpcG43VPNJCnilb0Zse5pMtNZRbDMbvsvcTXY+CsLy7QefTwiewCAz5GwBA6YYwG
+5DftwwECgYEA/9+vzD8ApehJs1WahepG3Yw0iWKOzrdb83i4r/9Du3PTR5MtgK1m
+GSzCInWQf2OL+VdoShhssIxjPGGicj3Ir0GugRL5oMLu9/pxD0aHtqEcroP1okAN
+6731UblMA7BbNMpz6T79Cz9RebWPgDnuFMzD5DuiXccqTJvbwlPjnWkCgYEA/WAZ
+cvztUJRCoAO1NP92jBJYGOdds5skRaA6HUNLHV2WXkGAESCAYksuIsytOzHftwzq
+6UNn+yfFeO065AGl+X4n08/K6NpH/HQRtrNliNkxlRx2fT/vQtdZn/L8ppMrMEel
+1zSxZDpvvxDlz67z9qKXD2yzKGm+8QeHeCpOE1sCgYAZyaSBFDRvhBqxbUY2iub7
+ex7sXbeA0+/7eMY8TqC36rN3ejjSyCcLEHy8Vlbqz9CSn6GOHioOHQf/VOO3Wcy5
+BQieT5hDAIJvEZmFbZlN6aznVbtSz9vXT3zC8MbddE+s4V6POl7sXERM3OSKiswY
+4o43mV8CzojP0cFzM1s0SQKBgQDilUDM6tCSgbM/A3HV7iomQvVTd/v8dN2wAgRW
+TOpsXVft/Gc5abZFsHOWnTDBQe2M9dlRNn1TRZK7fHJwmeTCQ4X8TdNn/mPnqCh7
+GNzQz2IetXBku3sMbCZy+knj8LHrn/b3h+Cm//cUh1jsK8lxop6yJYPlvQMK6q/D
+zxk5BwKBgQCLUcgboMNevbWvjciWiRgl3G3Sp2FMnHBvGmKKzLO3hgAp93xD9Npu
+cAtwXDevGsg+UF3S3Ydfm0BQxaVCfJOxFa61iIr3iAvAqXFrmhnrv5iMIhTFoUkF
+PPf/sksgRTe/FuEL9deYsd9zrDgETn8xM/3WaJoqmSY8kMUGDvhuKA==
+-----END RSA PRIVATE KEY-----
diff --git a/test.pem b/test.pem
new file mode 100644
index 0000000..f6a9ab3
--- /dev/null
+++ b/test.pem
@@ -0,0 +1,17 @@
+-----BEGIN CERTIFICATE-----
+MIICrzCCAZcCFEVeEq1IBaVKfTcSfyZiVu/2YKfoMA0GCSqGSIb3DQEBCwUAMBQx
+EjAQBgNVBAMMCWxvY2FsaG9zdDAeFw0yMDA4MTEwODQ5NTdaFw0yMTA4MTEwODQ5
+NTdaMBQxEjAQBgNVBAMMCWxvY2FsaG9zdDCCASIwDQYJKoZIhvcNAQEBBQADggEP
+ADCCAQoCggEBAP1AHg6Nb+L8y2N3AUtpO7niuFYikk6zTqNHl4/C911TUIH/7WuG
+5scSa6eXbvkCvYwk0M0KSWjoFsWPKThgws6YwDQE4y7wG++72MCBP5vsvfGwL0mo
+CES6lCmopD1JNhEU/MZPCv3G3iS9QYIzr9i/7910Qqrl/1Gr1p+9tT4x/PAg1BaN
+v0i7QR4Ri9SAQ1zT1szMXK/qiAw3nyxjUTn+5AgAfJL/y3/gpMoIajE0Ybc0Zjct
+vjDqFStL+ROarF2v9rmis/H+UgK58Czy8D5Eo5thoABYbpWbUIW8FQevsSuzh37q
+uZjRHoaQxiuE2X+ohm37tc3vJUKZsuaVv1MCAwEAATANBgkqhkiG9w0BAQsFAAOC
+AQEAgR/mSq1dQmVZxi8jnZQDmaNit819MNgYVOozb2h+/s3ft1YbbkMZHActphcr
+DpR1INrMT6oY5rfW91Azy+czjcIxc9RRZyvXV2z+w48wxloV2XSm/4BwPa2RCKt7
+UVyrggiKBCSGbZAv2HXZDpAHFMqIFR2TMiMbH+ODRdJ7sUx0aN2lgKtooLwG93yz
+/BchITIeXLUfwo/gdpFqFBRLD9DfdYA1F3IuckOMrEjriH8N1s2K/Fjfn+rla3jG
+oEaUCzkv+UkvLdQ1Rm8m9jTJsfA1cTyQcGlMN06IUwekbMJJnDn5jsOO2OV2rjyn
+0X14t9KBImVYqhE2df6CkZqOrA==
+-----END CERTIFICATE-----