mirror of
https://github.com/fluencelabs/rust-libp2p
synced 2025-06-27 08:41:36 +00:00
Run rustfmt on the code
This commit is contained in:
@ -21,6 +21,7 @@
|
||||
use Datastore;
|
||||
use futures::Future;
|
||||
use futures::stream::{Stream, iter_ok};
|
||||
use parking_lot::Mutex;
|
||||
use query::{Query, naive_apply_query};
|
||||
use serde::Serialize;
|
||||
use serde::de::DeserializeOwned;
|
||||
@ -34,7 +35,6 @@ use std::io::Error as IoError;
|
||||
use std::io::ErrorKind as IoErrorKind;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
use parking_lot::Mutex;
|
||||
use tempfile::NamedTempFile;
|
||||
|
||||
/// Implementation of `Datastore` that uses a single plain JSON file.
|
||||
@ -52,8 +52,7 @@ impl<T> JsonFileDatastore<T>
|
||||
/// will attempt to load an existing set of values from it (which can result in an error).
|
||||
/// Otherwise if the path doesn't exist, a new empty datastore will be created.
|
||||
pub fn new<P>(path: P) -> Result<JsonFileDatastore<T>, IoError>
|
||||
where
|
||||
P: Into<PathBuf>,
|
||||
where P: Into<PathBuf>
|
||||
{
|
||||
let path = path.into();
|
||||
|
||||
@ -84,24 +83,18 @@ impl<T> JsonFileDatastore<T>
|
||||
for (key, value) in map.into_iter() {
|
||||
let value = match from_value(value) {
|
||||
Ok(v) => v,
|
||||
Err(err) => return Err(IoError::new(
|
||||
IoErrorKind::InvalidData,
|
||||
err,
|
||||
)),
|
||||
Err(err) => return Err(IoError::new(IoErrorKind::InvalidData, err)),
|
||||
};
|
||||
out.insert(key, value);
|
||||
}
|
||||
out
|
||||
},
|
||||
}
|
||||
Ok(_) => {
|
||||
return Err(IoError::new(
|
||||
IoErrorKind::InvalidData,
|
||||
"expected JSON object",
|
||||
));
|
||||
return Err(IoError::new(IoErrorKind::InvalidData, "expected JSON object"));
|
||||
}
|
||||
Err(err) => {
|
||||
return Err(IoError::new(IoErrorKind::InvalidData, err));
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
@ -120,15 +113,19 @@ impl<T> JsonFileDatastore<T>
|
||||
pub fn flush(&self) -> Result<(), IoError> {
|
||||
// Create a temporary file in the same directory as the destination, which avoids the
|
||||
// problem of having a file cleaner delete our file while we use it.
|
||||
let self_path_parent = self.path.parent().ok_or(IoError::new(
|
||||
let self_path_parent = self.path
|
||||
.parent()
|
||||
.ok_or(IoError::new(
|
||||
IoErrorKind::Other,
|
||||
"couldn't get parent directory of destination",
|
||||
))?;
|
||||
let mut temporary_file = NamedTempFile::new_in(self_path_parent)?;
|
||||
|
||||
let content = self.content.lock();
|
||||
to_writer(&mut temporary_file,
|
||||
&content.iter().map(|(k, v)| (k.clone(), to_value(v).unwrap())).collect::<Map<_, _>>())?; // TODO: panic!
|
||||
to_writer(
|
||||
&mut temporary_file,
|
||||
&content.iter().map(|(k, v)| (k.clone(), to_value(v).unwrap())).collect::<Map<_, _>>(),
|
||||
)?; // TODO: panic!
|
||||
temporary_file.sync_data()?;
|
||||
|
||||
// Note that `persist` will fail if we try to persist across filesystems. However that
|
||||
@ -174,11 +171,7 @@ impl<T> Datastore<T> for JsonFileDatastore<T>
|
||||
|
||||
let content_stream = iter_ok(content.iter().filter_map(|(key, value)| {
|
||||
// Skip values that are malformed.
|
||||
let value = if keys_only {
|
||||
Default::default()
|
||||
} else {
|
||||
value.clone()
|
||||
};
|
||||
let value = if keys_only { Default::default() } else { value.clone() };
|
||||
|
||||
Some((key.clone(), value))
|
||||
}));
|
||||
@ -253,8 +246,7 @@ mod tests {
|
||||
datastore.put("bar1".into(), vec![0, 255, 127]);
|
||||
datastore.flush().unwrap();
|
||||
|
||||
let query = datastore
|
||||
.query(Query {
|
||||
let query = datastore.query(Query {
|
||||
prefix: "fo".into(),
|
||||
filters: vec![
|
||||
Filter {
|
||||
|
@ -104,8 +104,7 @@ pub fn naive_apply_query<'a, S, V>(stream: S, query: Query<'a, V>)
|
||||
/// Skips the `skip` first element of a stream and only returns `limit` elements.
|
||||
#[inline]
|
||||
pub fn naive_apply_skip_limit<S, T>(stream: S, skip: u64, limit: u64) -> StreamTake<StreamSkip<S>>
|
||||
where
|
||||
S: Stream<Item = (String, T), Error = IoError>,
|
||||
where S: Stream<Item = (String, T), Error = IoError>
|
||||
{
|
||||
stream.skip(skip).take(limit)
|
||||
}
|
||||
@ -113,8 +112,7 @@ where
|
||||
/// Filters the result of a stream to empty values if `keys_only` is true.
|
||||
#[inline]
|
||||
pub fn naive_apply_keys_only<S, T>(stream: S, keys_only: bool) -> NaiveKeysOnlyApply<S>
|
||||
where
|
||||
S: Stream<Item = (String, T), Error = IoError>,
|
||||
where S: Stream<Item = (String, T), Error = IoError>
|
||||
{
|
||||
NaiveKeysOnlyApply {
|
||||
keys_only: keys_only,
|
||||
@ -130,8 +128,7 @@ pub struct NaiveKeysOnlyApply<S> {
|
||||
}
|
||||
|
||||
impl<S, T> Stream for NaiveKeysOnlyApply<S>
|
||||
where
|
||||
S: Stream<Item = (String, T), Error = IoError>,
|
||||
where S: Stream<Item = (String, T), Error = IoError>,
|
||||
T: Default
|
||||
{
|
||||
type Item = (String, T);
|
||||
@ -153,13 +150,9 @@ where
|
||||
/// Filters the result of a stream to only keep the results with a prefix.
|
||||
#[inline]
|
||||
pub fn naive_apply_prefix<'a, S, T>(stream: S, prefix: Cow<'a, str>) -> NaivePrefixApply<'a, S>
|
||||
where
|
||||
S: Stream<Item = (String, T), Error = IoError>,
|
||||
where S: Stream<Item = (String, T), Error = IoError>
|
||||
{
|
||||
NaivePrefixApply {
|
||||
prefix: prefix,
|
||||
stream: stream,
|
||||
}
|
||||
NaivePrefixApply { prefix: prefix, stream: stream }
|
||||
}
|
||||
|
||||
/// Returned by `naive_apply_prefix`.
|
||||
@ -170,8 +163,7 @@ pub struct NaivePrefixApply<'a, S> {
|
||||
}
|
||||
|
||||
impl<'a, S, T> Stream for NaivePrefixApply<'a, S>
|
||||
where
|
||||
S: Stream<Item = (String, T), Error = IoError>,
|
||||
where S: Stream<Item = (String, T), Error = IoError>
|
||||
{
|
||||
type Item = (String, T);
|
||||
type Error = IoError;
|
||||
@ -195,19 +187,17 @@ where
|
||||
/// Applies orderings on the stream data. Will simply pass data through if the list of orderings
|
||||
/// is empty. Otherwise will need to collect.
|
||||
pub fn naive_apply_ordered<'a, S, I, V>(stream: S, orders_iter: I) -> NaiveApplyOrdered<'a, S, V>
|
||||
where
|
||||
S: Stream<Item = (String, V), Error = IoError> + 'a,
|
||||
where S: Stream<Item = (String, V), Error = IoError> + 'a,
|
||||
I: IntoIterator<Item = Order>,
|
||||
I::IntoIter: 'a,
|
||||
V: Ord + 'static,
|
||||
V: Ord + 'static
|
||||
{
|
||||
let orders_iter = orders_iter.into_iter();
|
||||
if orders_iter.size_hint().1 == Some(0) {
|
||||
return NaiveApplyOrdered { inner: NaiveApplyOrderedInner::PassThrough(stream) };
|
||||
}
|
||||
|
||||
let collected = stream
|
||||
.collect()
|
||||
let collected = stream.collect()
|
||||
.and_then(move |mut collected| {
|
||||
for order in orders_iter {
|
||||
match order {
|
||||
@ -243,8 +233,7 @@ enum NaiveApplyOrderedInner<'a, S, T> {
|
||||
}
|
||||
|
||||
impl<'a, S, V> Stream for NaiveApplyOrdered<'a, S, V>
|
||||
where
|
||||
S: Stream<Item = (String, V), Error = IoError>,
|
||||
where S: Stream<Item = (String, V), Error = IoError>
|
||||
{
|
||||
type Item = (String, V);
|
||||
type Error = IoError;
|
||||
@ -261,8 +250,7 @@ where
|
||||
/// Filters the result of a stream to apply a set of filters.
|
||||
#[inline]
|
||||
pub fn naive_apply_filters<'a, S, I, V>(stream: S, filters: I) -> NaiveFiltersApply<'a, S, I>
|
||||
where
|
||||
S: Stream<Item = (String, V), Error = IoError>,
|
||||
where S: Stream<Item = (String, V), Error = IoError>,
|
||||
I: Iterator<Item = Filter<'a, V>> + Clone,
|
||||
V: 'a
|
||||
{
|
||||
@ -282,13 +270,9 @@ pub struct NaiveFiltersApply<'a, S, I> {
|
||||
}
|
||||
|
||||
impl<'a, S, I, T> Stream for NaiveFiltersApply<'a, S, I>
|
||||
where
|
||||
S: Stream<
|
||||
Item = (String, T),
|
||||
Error = IoError,
|
||||
>,
|
||||
where S: Stream<Item = (String, T), Error = IoError>,
|
||||
I: Iterator<Item = Filter<'a, T>> + Clone,
|
||||
T: Ord + 'a,
|
||||
T: Ord + 'a
|
||||
{
|
||||
type Item = (String, T);
|
||||
type Error = IoError;
|
||||
|
Reference in New Issue
Block a user