Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -75,11 +75,11 @@ codegen-units = 1
unsafe_code = "forbid"
missing_docs = "warn"
rust_2018_idioms = { level = "warn", priority = -1 }
bad_style = { level = "warn", priority = -1 }
trivial_casts = "warn"
unused_lifetimes = "warn"
unused_qualifications = "warn"
bad_style = "warn"
dead_code = "allow" # TODO: "warn"
dead_code = "warn"
improper_ctypes = "warn"
missing_copy_implementations = "warn"
missing_debug_implementations = "warn"
Expand Down
42 changes: 0 additions & 42 deletions crates/core/src/backend.rs
Original file line number Diff line number Diff line change
Expand Up @@ -483,48 +483,6 @@ pub trait ReadSource: Sync + Send {
fn entries(&self) -> Self::Iter;
}

/// Trait for backends that can write to a source.
///
/// This trait is implemented by all backends that can write data to a source.
pub trait WriteSource: Clone {
/// Create a new source.
///
/// # Type Parameters
///
/// * `P` - The type of the path.
///
/// # Arguments
///
/// * `path` - The path of the source.
/// * `node` - The node information of the source.
fn create<P: Into<PathBuf>>(&self, path: P, node: Node);

/// Set the metadata of a source.
///
/// # Type Parameters
///
/// * `P` - The type of the path.
///
/// # Arguments
///
/// * `path` - The path of the source.
/// * `node` - The node information of the source.
fn set_metadata<P: Into<PathBuf>>(&self, path: P, node: Node);

/// Write data to a source at the given offset.
///
/// # Type Parameters
///
/// * `P` - The type of the path.
///
/// # Arguments
///
/// * `path` - The path of the source.
/// * `offset` - The offset to write at.
/// * `data` - The data to write.
fn write_at<P: Into<PathBuf>>(&self, path: P, offset: u64, data: Bytes);
}

/// The backends a repository can be initialized and operated on
///
/// # Note
Expand Down
1 change: 1 addition & 0 deletions crates/core/src/backend/local_destination.rs
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ pub enum LocalDestinationErrorKind {
DirectoryCreationFailed(std::io::Error),
/// file `{0:?}` should have a parent
FileDoesNotHaveParent(PathBuf),
#[cfg(any(target_os = "macos", target_os = "openbsd"))]
/// `DeviceID` could not be converted to other type `{target}` of device `{device}`: `{source}`
DeviceIdConversionFailed {
target: String,
Expand Down
20 changes: 0 additions & 20 deletions crates/core/src/blob.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ pub(crate) mod tree;

use std::{cmp::Ordering, num::NonZeroU32};

use derive_more::Constructor;
use enum_map::{Enum, EnumMap};
use serde_derive::{Deserialize, Serialize};

Expand Down Expand Up @@ -118,21 +117,6 @@ macro_rules! impl_blobid {

impl_blobid!(DataId, BlobType::Data);

/// A `Blob` is a file that is stored in the backend.
///
/// It can be a `tree` or a `data` blob.
///
/// A `tree` blob is a file that contains a list of other blobs.
/// A `data` blob is a file that contains the actual data.
#[derive(Debug, PartialEq, Eq, Copy, Clone, Constructor)]
pub(crate) struct Blob {
/// The type of the blob
tpe: BlobType,

/// The id of the blob
id: BlobId,
}

/// `BlobLocation` contains information about a blob within a pack
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub struct BlobLocation {
Expand Down Expand Up @@ -190,10 +174,6 @@ impl<T> BlobLocations<T> {
self.blobs.iter().map(|bl| bl.0.length).sum()
}

pub fn data_length(&self) -> u32 {
self.blobs.iter().map(|bl| bl.0.data_length()).sum()
}

pub fn from_blob_location(location: BlobLocation, target: T) -> Self {
Self {
offset: location.offset,
Expand Down
19 changes: 1 addition & 18 deletions crates/core/src/blob/packer.rs
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ use crate::{
index::{IndexEntry, indexer::SharedIndexer},
repofile::{
configfile::ConfigFile,
indexfile::{IndexBlob, IndexPack},
indexfile::IndexPack,
packfile::{PackHeaderLength, PackHeaderRef, PackId},
snapshotfile::SnapshotSummary,
},
Expand All @@ -40,12 +40,6 @@ pub enum PackerErrorKind {
from: &'static str,
source: std::num::TryFromIntError,
},
/// Sending crossbeam message failed: `id`: `{id:?}`, `data`: `{data:?}` : `{source}`
SendingCrossbeamMessage {
id: BlobId,
data: Bytes,
source: crossbeam_channel::SendError<(Bytes, BlobId)>,
},
/// Sending crossbeam data message failed: `data`: `{data:?}`, `index_pack`: `{index_pack:?}` : `{source}`
SendingCrossbeamDataMessage {
data: Bytes,
Expand Down Expand Up @@ -822,13 +816,6 @@ pub struct CopyPackBlobs {
}

impl CopyPackBlobs {
pub fn from_index_blob(pack_id: PackId, blob: IndexBlob) -> Self {
Self {
pack_id,
locations: BlobLocations::from_blob_location(blob.location, blob.id),
}
}

pub fn from_index_entry(entry: IndexEntry, id: BlobId) -> Self {
Self {
pack_id: entry.pack,
Expand Down Expand Up @@ -864,8 +851,6 @@ where
be_src: BE,
/// The packer to write to.
packer: Packer<BE>,
/// The size limit of the pack file.
size_limit: u32,
/// the blob type
blob_type: BlobType,
}
Expand Down Expand Up @@ -896,11 +881,9 @@ impl<BE: DecryptFullBackend> BlobCopier<BE> {
pack_sizer: PackSizer,
) -> RusticResult<Self> {
let packer = Packer::new(be_dst, blob_type, indexer, pack_sizer)?;
let size_limit = pack_sizer.pack_size();
Ok(Self {
be_src,
packer,
size_limit,
blob_type,
})
}
Expand Down
15 changes: 0 additions & 15 deletions crates/core/src/blob/tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -514,21 +514,6 @@ where
BE: DecryptReadBackend,
I: ReadGlobalIndex,
{
/// Creates a new `NodeStreamer`.
///
/// # Arguments
///
/// * `be` - The backend to read from.
/// * `node` - The node to start from.
///
/// # Errors
///
/// * If the tree ID is not found in the backend.
/// * If deserialization fails.
pub fn new(be: BE, index: &'a I, node: &Node) -> RusticResult<Self> {
Self::new_streamer(be, index, node, None, true)
}

/// Creates a new `NodeStreamer`.
///
/// # Arguments
Expand Down
8 changes: 0 additions & 8 deletions crates/core/src/blob/tree/modify.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,6 @@ pub enum ModifierChange {
#[derive(Debug)]
pub enum ModifierAction {
Change(ModifierChange),
WriteChangedTree(Tree),
Process(TreeId),
}

Expand Down Expand Up @@ -76,9 +75,6 @@ pub trait Visitor {
}
}

pub struct DefaultVisitor;
impl Visitor for DefaultVisitor {}

pub struct TreeModifier<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> {
be: &'a BE,
index: &'a I,
Expand Down Expand Up @@ -112,10 +108,6 @@ impl<'a, BE: DecryptFullBackend, I: ReadGlobalIndex> TreeModifier<'a, BE, I> {
let mut changed = false;
let tree = match visitor.pre_process(&path, id) {
ModifierAction::Change(change) => return Ok(change),
ModifierAction::WriteChangedTree(tree) => {
changed = true;
tree
}
ModifierAction::Process(id) => {
match visitor.pre_process_tree(Tree::from_backend(self.be, self.index, id))? {
TreeAction::ProcessChangedTree(tree) => {
Expand Down
20 changes: 6 additions & 14 deletions crates/core/src/blob/tree/rewrite.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,8 +47,7 @@ pub struct RewriteTreesOptions {
}

#[derive(Debug)]
pub struct RewriteVisitor<'a, I: ReadGlobalIndex> {
index: &'a I,
pub struct RewriteVisitor {
overrides: Override,
node_modification: NodeModification,
all_trees: bool,
Expand All @@ -57,10 +56,9 @@ pub struct RewriteVisitor<'a, I: ReadGlobalIndex> {
summary: BTreeMap<TreeId, Summary>,
}

impl<'a, I: ReadGlobalIndex> RewriteVisitor<'a, I> {
pub fn new(index: &'a I, opts: &RewriteTreesOptions) -> RusticResult<Self> {
impl RewriteVisitor {
pub fn new(opts: &RewriteTreesOptions) -> RusticResult<Self> {
Ok(Self {
index,
overrides: opts.excludes.as_override()?,
node_modification: opts.node_modification.clone(),
all_trees: opts.all_trees,
Expand All @@ -71,7 +69,7 @@ impl<'a, I: ReadGlobalIndex> RewriteVisitor<'a, I> {
}
}

impl<I: ReadGlobalIndex> Visitor for RewriteVisitor<'_, I> {
impl Visitor for RewriteVisitor {
fn pre_process(&self, path: &PathBuf, id: TreeId) -> ModifierAction {
if self.unchanged.contains(&(path.clone(), id)) {
ModifierAction::Change(ModifierChange::Unchanged)
Expand Down Expand Up @@ -125,7 +123,7 @@ impl<I: ReadGlobalIndex> Visitor for RewriteVisitor<'_, I> {

pub struct Rewriter<'a, BE: DecryptWriteBackend, I: ReadGlobalIndex> {
modifier: TreeModifier<'a, BE, I>,
visitor: RewriteVisitor<'a, I>,
visitor: RewriteVisitor,
}

impl<'a, BE: DecryptFullBackend, I: ReadGlobalIndex> Rewriter<'a, BE, I> {
Expand All @@ -137,7 +135,7 @@ impl<'a, BE: DecryptFullBackend, I: ReadGlobalIndex> Rewriter<'a, BE, I> {
dry_run: bool,
) -> RusticResult<Self> {
let modifier = TreeModifier::new(be, index, config, dry_run)?;
let visitor = RewriteVisitor::new(index, opts)?;
let visitor = RewriteVisitor::new(opts)?;
Ok(Self { modifier, visitor })
}

Expand Down Expand Up @@ -189,10 +187,4 @@ impl Summary {
self.size += node.meta.size;
}
}

pub fn from_node(node: &Node) -> Self {
let mut summary = Self::default();
summary.update(node);
summary
}
}
29 changes: 11 additions & 18 deletions crates/core/src/chunker/rabin.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,16 +6,8 @@ use rustic_cdc::{Polynom, Polynom64, Rabin64, RollingHash64};
use crate::error::{ErrorKind, RusticError, RusticResult};

pub(super) mod constants {
/// The Splitmask is used to determine if a chunk is a chunk boundary.
pub(super) const SIZE: usize = 1 << 20;
/// The size of a kilobyte.
pub(super) const KB: usize = 1024;
/// The size of a megabyte.
pub(super) const MB: usize = 1024 * KB;
/// The minimum size of a chunk.
pub(super) const MIN_SIZE: usize = 512 * KB;
/// The maximum size of a chunk.
pub(super) const MAX_SIZE: usize = 8 * MB;
/// Buffer size used for reading - TODO: Find out optimal size for best performance!
pub(super) const BUF_SIZE: usize = 4 * KB;
/// Random polynomial maximum tries.
Expand Down Expand Up @@ -331,18 +323,19 @@ mod tests {
use rand::prelude::*;
use std::io::{Cursor, repeat};

/// The Splitmask is used to determine if a chunk is a chunk boundary.
const SIZE: usize = 1 << 20;
/// The size of a megabyte.
const MB: usize = 1024 * constants::KB;
/// The minimum size of a chunk.
const MIN_SIZE: usize = 512 * constants::KB;
/// The maximum size of a chunk.
const MAX_SIZE: usize = 8 * MB;

fn chunker<R: Read + Send>(reader: R, size_hint: usize) -> ChunkIter<R> {
let poly = 0x003D_A335_8B4D_C173;
let rabin = Rabin64::new_with_polynom(6, &poly);
ChunkIter::new(
rabin,
constants::SIZE,
constants::MIN_SIZE,
constants::MAX_SIZE,
reader,
size_hint,
)
.unwrap()
ChunkIter::new(rabin, SIZE, MIN_SIZE, MAX_SIZE, reader, size_hint).unwrap()
}

#[test]
Expand Down Expand Up @@ -388,6 +381,6 @@ mod tests {
let mut chunker = chunker(&mut reader, usize::MAX);

let chunk = chunker.next().unwrap().unwrap();
assert_eq!(constants::MIN_SIZE, chunk.len());
assert_eq!(MIN_SIZE, chunk.len());
}
}
1 change: 1 addition & 0 deletions crates/core/src/commands/check.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1113,6 +1113,7 @@ mod tests {
);
}

#[test]
fn test_read_subset_n_m() {
let test_packs = test_packs(&mut rng());
let mut all_packs: BTreeSet<_> = test_packs.iter().map(|pack| pack.id).collect();
Expand Down
2 changes: 1 addition & 1 deletion crates/core/src/id.rs
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ impl Id {
pub fn parse_some(name: &str, tpe: FileType) -> Option<Self> {
name.parse()
.inspect_err(|err| {
debug!("ignoring {name} which is no ID while listing {tpe}: {err}",);
debug!("ignoring {name} which is no ID while listing {tpe}: {err}");
})
.ok()
}
Expand Down
5 changes: 0 additions & 5 deletions crates/core/src/index/binarysorted.rs
Original file line number Diff line number Diff line change
Expand Up @@ -104,11 +104,6 @@ impl IndexCollector {
&self.0[BlobType::Tree].packs
}

#[must_use]
pub fn data_packs(&self) -> &Vec<(PackId, u32)> {
&self.0[BlobType::Data].packs
}

// Turns Collector into an index by sorting the entries by ID.
#[must_use]
pub fn into_index(self) -> Index {
Expand Down
21 changes: 0 additions & 21 deletions crates/core/src/repofile/snapshotfile.rs
Original file line number Diff line number Diff line change
Expand Up @@ -818,27 +818,6 @@ impl SnapshotFile {
Self::from_backend(be, &SnapshotId::from(id))
}

/// Get a list of [`SnapshotFile`]s from the backend by supplying a list of/parts of their Ids
///
/// # Arguments
///
/// * `be` - The backend to use
/// * `ids` - The list of (parts of the) ids of the snapshots
/// * `p` - A progress bar to use
///
/// # Errors
///
/// * If the string is not a valid hexadecimal string
/// * If no id could be found.
/// * If the id is not unique.
pub(crate) fn from_ids<B: DecryptReadBackend, T: AsRef<str>>(
be: &B,
ids: &[T],
p: &Progress,
) -> RusticResult<Vec<Self>> {
Self::update_from_ids(be, Vec::new(), ids, p)
}

/// Update a list of [`SnapshotFile`]s from the backend by supplying a list of/parts of their Ids
///
/// # Arguments
Expand Down
Loading
Loading