Extractor rewrite
This commit is contained in:
131
crates/pile-value/src/source/dir.rs
Normal file
131
crates/pile-value/src/source/dir.rs
Normal file
@@ -0,0 +1,131 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use pile_config::Label;
|
||||
use std::{path::PathBuf, sync::Arc};
|
||||
use tokio_stream::wrappers::ReceiverStream;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::{
|
||||
source::{DataSource, misc::path_ts_latest},
|
||||
value::Item,
|
||||
};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct DirDataSource {
|
||||
pub name: Label,
|
||||
pub dir: PathBuf,
|
||||
|
||||
pub sidecars: bool,
|
||||
}
|
||||
|
||||
impl DirDataSource {
|
||||
pub fn new(name: &Label, dir: PathBuf, sidecars: bool) -> Self {
|
||||
Self {
|
||||
name: name.clone(),
|
||||
dir,
|
||||
sidecars,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DataSource for Arc<DirDataSource> {
|
||||
async fn get(&self, key: &str) -> Result<Option<Item>, std::io::Error> {
|
||||
let key = match key.parse::<PathBuf>() {
|
||||
Ok(x) => self.dir.join(x),
|
||||
Err(_) => return Ok(None),
|
||||
};
|
||||
|
||||
if !key.is_file() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Ignore toml files if sidecars are enabled
|
||||
if self.sidecars && key.extension().and_then(|x| x.to_str()) == Some("toml") {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
return Ok(Some(Item::File {
|
||||
source: Arc::clone(self),
|
||||
mime: mime_guess::from_path(&key).first_or_octet_stream(),
|
||||
path: key.clone(),
|
||||
sidecar: self.sidecars.then(|| {
|
||||
Box::new(Item::File {
|
||||
source: Arc::clone(self),
|
||||
mime: mime_guess::from_path(key.with_extension("toml")).first_or_octet_stream(),
|
||||
path: key.with_extension("toml"),
|
||||
sidecar: None,
|
||||
})
|
||||
}),
|
||||
}));
|
||||
}
|
||||
|
||||
fn iter(&self) -> ReceiverStream<Result<Item, std::io::Error>> {
|
||||
let (tx, rx) = tokio::sync::mpsc::channel(64);
|
||||
let source = Arc::clone(self);
|
||||
|
||||
let dir = self.dir.clone();
|
||||
tokio::task::spawn_blocking(move || {
|
||||
for entry in WalkDir::new(dir) {
|
||||
let entry = match entry {
|
||||
Err(e) => {
|
||||
let msg = format!("walkdir error: {e:?}");
|
||||
let err = e.into_io_error().unwrap_or(std::io::Error::other(msg));
|
||||
if tx.blocking_send(Err(err)).is_err() {
|
||||
return;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
Ok(e) => e,
|
||||
};
|
||||
|
||||
if entry.file_type().is_dir() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let path = entry.into_path();
|
||||
|
||||
let item = match path.extension().and_then(|x| x.to_str()) {
|
||||
None => continue,
|
||||
Some("toml") if source.sidecars => continue,
|
||||
Some(_) => Item::File {
|
||||
source: Arc::clone(&source),
|
||||
mime: mime_guess::from_path(&path).first_or_octet_stream(),
|
||||
path: path.clone(),
|
||||
|
||||
sidecar: source.sidecars.then(|| {
|
||||
Box::new(Item::File {
|
||||
source: Arc::clone(&source),
|
||||
mime: mime_guess::from_path(path.with_extension("toml"))
|
||||
.first_or_octet_stream(),
|
||||
path: path.with_extension("toml"),
|
||||
sidecar: None,
|
||||
})
|
||||
}),
|
||||
},
|
||||
};
|
||||
|
||||
if tx.blocking_send(Ok(item)).is_err() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
ReceiverStream::new(rx)
|
||||
}
|
||||
|
||||
async fn latest_change(&self) -> Result<Option<DateTime<Utc>>, std::io::Error> {
|
||||
let mut ts: Option<DateTime<Utc>> = None;
|
||||
|
||||
if !self.dir.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let new = path_ts_latest(&self.dir)?;
|
||||
match (ts, new) {
|
||||
(_, None) => {}
|
||||
(None, Some(new)) => ts = Some(new),
|
||||
(Some(old), Some(new)) => ts = Some(old.max(new)),
|
||||
};
|
||||
|
||||
return Ok(ts);
|
||||
}
|
||||
}
|
||||
121
crates/pile-value/src/source/misc.rs
Normal file
121
crates/pile-value/src/source/misc.rs
Normal file
@@ -0,0 +1,121 @@
|
||||
use chrono::{DateTime, Utc};
|
||||
use std::fs;
|
||||
use std::path::Path;
|
||||
|
||||
/// Returns the age of a path as a [DateTime].
|
||||
/// - If the path doesn't exist, returns [None]
|
||||
/// - If it's a file, returns the modified time
|
||||
/// - If it's a directory, returns the LATEST modified time of all files within
|
||||
pub fn path_ts_latest(path: impl AsRef<Path>) -> Result<Option<DateTime<Utc>>, std::io::Error> {
|
||||
let path = path.as_ref();
|
||||
if !path.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let metadata = fs::metadata(path)?;
|
||||
|
||||
if metadata.is_file() {
|
||||
let modified = metadata.modified()?;
|
||||
Ok(Some(modified.into()))
|
||||
} else if metadata.is_dir() {
|
||||
find_latest_modified(path)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the age of a path as a [DateTime].
|
||||
/// - If the path doesn't exist, returns [None]
|
||||
/// - If it's a file, returns the modified time
|
||||
/// - If it's a directory, returns the EARLIEST modified time of all files within
|
||||
pub fn path_ts_earliest(path: impl AsRef<Path>) -> Result<Option<DateTime<Utc>>, std::io::Error> {
|
||||
let path = path.as_ref();
|
||||
if !path.exists() {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let metadata = fs::metadata(path)?;
|
||||
|
||||
if metadata.is_file() {
|
||||
let modified = metadata.modified()?;
|
||||
Ok(Some(modified.into()))
|
||||
} else if metadata.is_dir() {
|
||||
find_earliest_modified(path)
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
fn find_latest_modified(dir: &Path) -> Result<Option<DateTime<Utc>>, std::io::Error> {
|
||||
let mut latest: Option<DateTime<Utc>> = None;
|
||||
|
||||
// Include the directory's own modification time
|
||||
let dir_metadata = fs::metadata(dir)?;
|
||||
if let Ok(modified) = dir_metadata.modified() {
|
||||
let dt: DateTime<Utc> = modified.into();
|
||||
latest = Some(dt);
|
||||
}
|
||||
|
||||
let entries = fs::read_dir(dir)?;
|
||||
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
let metadata = entry.metadata()?;
|
||||
|
||||
if metadata.is_file() {
|
||||
if let Ok(modified) = metadata.modified() {
|
||||
let dt: DateTime<Utc> = modified.into();
|
||||
latest = Some(match latest {
|
||||
Some(prev) if prev > dt => prev,
|
||||
_ => dt,
|
||||
});
|
||||
}
|
||||
} else if metadata.is_dir()
|
||||
&& let Some(dir_latest) = find_latest_modified(&path)?
|
||||
{
|
||||
latest = Some(match latest {
|
||||
Some(prev) if prev > dir_latest => prev,
|
||||
_ => dir_latest,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(latest);
|
||||
}
|
||||
|
||||
fn find_earliest_modified(dir: &Path) -> Result<Option<DateTime<Utc>>, std::io::Error> {
|
||||
let mut earliest: Option<DateTime<Utc>> = None;
|
||||
|
||||
// Include the directory's own modification time
|
||||
let dir_metadata = fs::metadata(dir)?;
|
||||
if let Ok(modified) = dir_metadata.modified() {
|
||||
let dt: DateTime<Utc> = modified.into();
|
||||
earliest = Some(dt);
|
||||
}
|
||||
|
||||
let entries = fs::read_dir(dir)?;
|
||||
|
||||
for entry in entries.flatten() {
|
||||
let path = entry.path();
|
||||
let metadata = entry.metadata()?;
|
||||
|
||||
if metadata.is_file() {
|
||||
if let Ok(modified) = metadata.modified() {
|
||||
let dt: DateTime<Utc> = modified.into();
|
||||
earliest = Some(match earliest {
|
||||
Some(prev) if prev < dt => prev,
|
||||
_ => dt,
|
||||
});
|
||||
}
|
||||
} else if metadata.is_dir()
|
||||
&& let Some(dir_earliest) = find_earliest_modified(&path)?
|
||||
{
|
||||
earliest = Some(match earliest {
|
||||
Some(prev) if prev < dir_earliest => prev,
|
||||
_ => dir_earliest,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return Ok(earliest);
|
||||
}
|
||||
27
crates/pile-value/src/source/mod.rs
Normal file
27
crates/pile-value/src/source/mod.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
mod dir;
|
||||
pub use dir::*;
|
||||
|
||||
mod s3;
|
||||
pub use s3::*;
|
||||
|
||||
pub mod misc;
|
||||
|
||||
use chrono::{DateTime, Utc};
|
||||
use tokio_stream::wrappers::ReceiverStream;
|
||||
|
||||
/// A read-only set of [Item]s.
|
||||
pub trait DataSource {
|
||||
/// Get an item from this datasource
|
||||
fn get(
|
||||
&self,
|
||||
key: &str,
|
||||
) -> impl Future<Output = Result<Option<crate::value::Item>, std::io::Error>> + Send;
|
||||
|
||||
/// Iterate over all items in this source in an arbitrary order
|
||||
fn iter(&self) -> ReceiverStream<Result<crate::value::Item, std::io::Error>>;
|
||||
|
||||
/// Return the time of the latest change to the data in this source
|
||||
fn latest_change(
|
||||
&self,
|
||||
) -> impl Future<Output = Result<Option<DateTime<Utc>>, std::io::Error>> + Send;
|
||||
}
|
||||
255
crates/pile-value/src/source/s3.rs
Normal file
255
crates/pile-value/src/source/s3.rs
Normal file
@@ -0,0 +1,255 @@
|
||||
use aws_sdk_s3::config::{BehaviorVersion, Credentials, Region};
|
||||
use chrono::{DateTime, Utc};
|
||||
use pile_config::{Label, S3Credentials};
|
||||
use smartstring::{LazyCompact, SmartString};
|
||||
use std::sync::Arc;
|
||||
use tokio_stream::wrappers::ReceiverStream;
|
||||
|
||||
use crate::{source::DataSource, value::Item};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct S3DataSource {
|
||||
pub name: Label,
|
||||
pub bucket: SmartString<LazyCompact>,
|
||||
pub prefix: Option<SmartString<LazyCompact>>,
|
||||
pub sidecars: bool,
|
||||
pub client: Arc<aws_sdk_s3::Client>,
|
||||
}
|
||||
|
||||
impl S3DataSource {
|
||||
pub fn new(
|
||||
name: &Label,
|
||||
bucket: String,
|
||||
prefix: Option<String>,
|
||||
endpoint: Option<String>,
|
||||
region: String,
|
||||
credentials: &S3Credentials,
|
||||
sidecars: bool,
|
||||
) -> Result<Self, std::io::Error> {
|
||||
let client = {
|
||||
let creds = Credentials::new(
|
||||
&credentials.access_key_id,
|
||||
&credentials.secret_access_key,
|
||||
None,
|
||||
None,
|
||||
"pile",
|
||||
);
|
||||
|
||||
let mut s3_config = aws_sdk_s3::config::Builder::new()
|
||||
.behavior_version(BehaviorVersion::latest())
|
||||
.region(Region::new(region))
|
||||
.credentials_provider(creds);
|
||||
|
||||
if let Some(ep) = endpoint {
|
||||
s3_config = s3_config.endpoint_url(ep).force_path_style(true);
|
||||
}
|
||||
|
||||
aws_sdk_s3::Client::from_conf(s3_config.build())
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
name: name.clone(),
|
||||
bucket: bucket.into(),
|
||||
prefix: prefix.map(|x| x.into()),
|
||||
sidecars,
|
||||
client: Arc::new(client),
|
||||
})
|
||||
}
|
||||
|
||||
async fn find_sidecar_key(&self, key: &str) -> Option<SmartString<LazyCompact>> {
|
||||
// First try {key}.toml
|
||||
let full_toml = format!("{key}.toml");
|
||||
if self
|
||||
.client
|
||||
.head_object()
|
||||
.bucket(self.bucket.as_str())
|
||||
.key(&full_toml)
|
||||
.send()
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
return Some(full_toml.into());
|
||||
}
|
||||
|
||||
// Then try {key-with-extension-stripped}.toml
|
||||
let stripped = std::path::Path::new(key).with_extension("toml");
|
||||
if let Some(stripped_str) = stripped.to_str()
|
||||
&& stripped_str != full_toml.as_str()
|
||||
&& self
|
||||
.client
|
||||
.head_object()
|
||||
.bucket(self.bucket.as_str())
|
||||
.key(stripped_str)
|
||||
.send()
|
||||
.await
|
||||
.is_ok()
|
||||
{
|
||||
return Some(stripped_str.into());
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
async fn make_item(self: &Arc<Self>, key: impl Into<SmartString<LazyCompact>>) -> Item {
|
||||
let key: SmartString<LazyCompact> = key.into();
|
||||
let mime = mime_guess::from_path(key.as_str()).first_or_octet_stream();
|
||||
|
||||
let sidecar = if self.sidecars {
|
||||
self.find_sidecar_key(key.as_str())
|
||||
.await
|
||||
.map(|sidecar_key| {
|
||||
Box::new(Item::S3 {
|
||||
source: Arc::clone(self),
|
||||
mime: mime_guess::from_path(sidecar_key.as_str()).first_or_octet_stream(),
|
||||
key: sidecar_key,
|
||||
sidecar: None,
|
||||
})
|
||||
})
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
Item::S3 {
|
||||
source: Arc::clone(self),
|
||||
mime,
|
||||
key,
|
||||
sidecar,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl DataSource for Arc<S3DataSource> {
|
||||
async fn get(&self, key: &str) -> Result<Option<Item>, std::io::Error> {
|
||||
if self.sidecars && key.ends_with(".toml") {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let result = self
|
||||
.client
|
||||
.head_object()
|
||||
.bucket(self.bucket.as_str())
|
||||
.key(key)
|
||||
.send()
|
||||
.await;
|
||||
|
||||
match result {
|
||||
Err(sdk_err) => {
|
||||
let not_found = sdk_err
|
||||
.as_service_error()
|
||||
.map(|e| e.is_not_found())
|
||||
.unwrap_or(false);
|
||||
if not_found {
|
||||
return Ok(None);
|
||||
}
|
||||
Err(std::io::Error::other(sdk_err))
|
||||
}
|
||||
Ok(_) => Ok(Some(self.make_item(key).await)),
|
||||
}
|
||||
}
|
||||
|
||||
fn iter(&self) -> ReceiverStream<Result<Item, std::io::Error>> {
|
||||
let (tx, rx) = tokio::sync::mpsc::channel(64);
|
||||
let source = Arc::clone(self);
|
||||
|
||||
tokio::spawn(async move {
|
||||
let mut continuation_token: Option<String> = None;
|
||||
|
||||
loop {
|
||||
let mut req = source
|
||||
.client
|
||||
.list_objects_v2()
|
||||
.bucket(source.bucket.as_str());
|
||||
|
||||
if let Some(prefix) = &source.prefix {
|
||||
req = req.prefix(prefix.as_str());
|
||||
}
|
||||
|
||||
if let Some(token) = continuation_token {
|
||||
req = req.continuation_token(token);
|
||||
}
|
||||
|
||||
let resp = match req.send().await {
|
||||
Err(e) => {
|
||||
let _ = tx.send(Err(std::io::Error::other(e))).await;
|
||||
break;
|
||||
}
|
||||
Ok(resp) => resp,
|
||||
};
|
||||
|
||||
let next_token = resp.next_continuation_token().map(ToOwned::to_owned);
|
||||
let is_truncated = resp.is_truncated().unwrap_or(false);
|
||||
|
||||
for obj in resp.contents() {
|
||||
let key = match obj.key() {
|
||||
Some(k) => k.to_owned(),
|
||||
None => continue,
|
||||
};
|
||||
|
||||
if source.sidecars && key.ends_with(".toml") {
|
||||
continue;
|
||||
}
|
||||
|
||||
let item = source.make_item(key).await;
|
||||
|
||||
if tx.send(Ok(item)).await.is_err() {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
if !is_truncated {
|
||||
break;
|
||||
}
|
||||
continuation_token = next_token;
|
||||
}
|
||||
});
|
||||
|
||||
ReceiverStream::new(rx)
|
||||
}
|
||||
|
||||
async fn latest_change(&self) -> Result<Option<DateTime<Utc>>, std::io::Error> {
|
||||
let mut ts: Option<DateTime<Utc>> = None;
|
||||
let mut continuation_token: Option<String> = None;
|
||||
|
||||
loop {
|
||||
let mut req = self.client.list_objects_v2().bucket(self.bucket.as_str());
|
||||
|
||||
if let Some(prefix) = &self.prefix {
|
||||
req = req.prefix(prefix.as_str());
|
||||
}
|
||||
|
||||
if let Some(token) = continuation_token {
|
||||
req = req.continuation_token(token);
|
||||
}
|
||||
|
||||
let resp = match req.send().await {
|
||||
Err(_) => return Ok(None),
|
||||
Ok(resp) => resp,
|
||||
};
|
||||
|
||||
let next_token = resp.next_continuation_token().map(ToOwned::to_owned);
|
||||
let is_truncated = resp.is_truncated().unwrap_or(false);
|
||||
|
||||
for obj in resp.contents() {
|
||||
if let Some(last_modified) = obj.last_modified() {
|
||||
let dt = DateTime::from_timestamp(
|
||||
last_modified.secs(),
|
||||
last_modified.subsec_nanos(),
|
||||
);
|
||||
if let Some(dt) = dt {
|
||||
ts = Some(match ts {
|
||||
None => dt,
|
||||
Some(prev) => prev.max(dt),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if !is_truncated {
|
||||
break;
|
||||
}
|
||||
continuation_token = next_token;
|
||||
}
|
||||
|
||||
Ok(ts)
|
||||
}
|
||||
}
|
||||
158
crates/pile-value/src/source/s3reader.rs
Normal file
158
crates/pile-value/src/source/s3reader.rs
Normal file
@@ -0,0 +1,158 @@
|
||||
use aws_sdk_s3::{error::SdkError, operation::get_object::GetObjectError};
|
||||
use mime::Mime;
|
||||
use std::io::{Error as IoError, Seek, SeekFrom, Write};
|
||||
use thiserror::Error;
|
||||
|
||||
use super::S3Client;
|
||||
use crate::retry;
|
||||
|
||||
#[derive(Debug, Error)]
|
||||
#[expect(clippy::large_enum_variant)]
|
||||
pub enum S3ReaderError {
|
||||
#[error("sdk error")]
|
||||
SdkError(#[from] SdkError<GetObjectError>),
|
||||
|
||||
#[error("byte stream error")]
|
||||
ByteStreamError(#[from] aws_sdk_s3::primitives::ByteStreamError),
|
||||
|
||||
#[error("i/o error")]
|
||||
IoError(#[from] IoError),
|
||||
}
|
||||
|
||||
/// Provides a [`std::io::Read`]-like interface to an S3 object. \
|
||||
/// This doesn't actually implement [`std::io::Read`] because Read isn't async.
|
||||
///
|
||||
/// Also implements [`std::io::Seek`]
|
||||
pub struct S3Reader {
|
||||
pub(super) client: S3Client,
|
||||
pub(super) bucket: String,
|
||||
pub(super) key: String,
|
||||
|
||||
pub(super) cursor: u64,
|
||||
pub(super) size: u64,
|
||||
pub(super) mime: Mime,
|
||||
}
|
||||
|
||||
impl S3Reader {
|
||||
pub async fn read(&mut self, mut buf: &mut [u8]) -> Result<usize, S3ReaderError> {
|
||||
let len_left = self.size - self.cursor;
|
||||
if len_left == 0 || buf.is_empty() {
|
||||
return Ok(0);
|
||||
}
|
||||
|
||||
#[expect(clippy::unwrap_used)] // TODO: probably fits?
|
||||
let start_byte = usize::try_from(self.cursor).unwrap();
|
||||
|
||||
#[expect(clippy::unwrap_used)] // usize fits in u64
|
||||
let len_to_read = u64::try_from(buf.len()).unwrap().min(len_left);
|
||||
|
||||
#[expect(clippy::unwrap_used)] // must fit, we called min()
|
||||
let len_to_read = usize::try_from(len_to_read).unwrap();
|
||||
|
||||
let end_byte = start_byte + len_to_read - 1;
|
||||
|
||||
let b = retry!(
|
||||
self.client.retries,
|
||||
self.client
|
||||
.client
|
||||
.get_object()
|
||||
.bucket(self.bucket.as_str())
|
||||
.key(self.key.as_str())
|
||||
.range(format!("bytes={start_byte}-{end_byte}"))
|
||||
.send()
|
||||
.await
|
||||
)?;
|
||||
|
||||
// Looks like `bytes 31000000-31999999/33921176``
|
||||
// println!("{:?}", b.content_range);
|
||||
|
||||
let mut bytes = b.body.collect().await?.into_bytes();
|
||||
bytes.truncate(len_to_read);
|
||||
let l = bytes.len();
|
||||
|
||||
// Memory to memory writes are infallible
|
||||
#[expect(clippy::unwrap_used)]
|
||||
buf.write_all(&bytes).unwrap();
|
||||
|
||||
// Cannot fail, usize should always fit into u64
|
||||
#[expect(clippy::unwrap_used)]
|
||||
{
|
||||
self.cursor += u64::try_from(l).unwrap();
|
||||
}
|
||||
|
||||
return Ok(len_to_read);
|
||||
}
|
||||
|
||||
pub fn is_done(&self) -> bool {
|
||||
return self.cursor == self.size;
|
||||
}
|
||||
|
||||
pub fn mime(&self) -> &Mime {
|
||||
&self.mime
|
||||
}
|
||||
|
||||
/// Write the entire contents of this reader to `r`.
|
||||
///
|
||||
/// This method always downloads the whole object,
|
||||
/// and always preserves `self.cursor`.
|
||||
pub async fn download<W: Write>(&mut self, r: &mut W) -> Result<(), S3ReaderError> {
|
||||
let pos = self.stream_position()?;
|
||||
|
||||
const BUF_LEN: usize = 10_000_000;
|
||||
#[expect(clippy::unwrap_used)] // Cannot fail
|
||||
let mut buf: Box<[u8; BUF_LEN]> = vec![0u8; BUF_LEN].try_into().unwrap();
|
||||
|
||||
while !self.is_done() {
|
||||
let b = self.read(&mut buf[..]).await?;
|
||||
r.write_all(&buf[0..b])?;
|
||||
}
|
||||
|
||||
self.seek(SeekFrom::Start(pos))?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Seek for S3Reader {
|
||||
fn seek(&mut self, pos: SeekFrom) -> std::io::Result<u64> {
|
||||
match pos {
|
||||
SeekFrom::Start(x) => self.cursor = x.min(self.size - 1),
|
||||
|
||||
// Cannot panic, we handle all cases
|
||||
#[expect(clippy::unwrap_used)]
|
||||
SeekFrom::Current(x) => {
|
||||
if x < 0 {
|
||||
if u64::try_from(x.abs()).unwrap() > self.cursor {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"cannot seek past start",
|
||||
));
|
||||
}
|
||||
self.cursor -= u64::try_from(x.abs()).unwrap();
|
||||
} else {
|
||||
self.cursor += u64::try_from(x).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
// Cannot panic, we handle all cases
|
||||
#[expect(clippy::unwrap_used)]
|
||||
SeekFrom::End(x) => {
|
||||
if x < 0 {
|
||||
if u64::try_from(x.abs()).unwrap() > self.size {
|
||||
return Err(std::io::Error::new(
|
||||
std::io::ErrorKind::InvalidInput,
|
||||
"cannot seek past start",
|
||||
));
|
||||
}
|
||||
// Cannot fail, is abs
|
||||
self.cursor = self.size - u64::try_from(x.abs()).unwrap();
|
||||
} else {
|
||||
// Cannot fail, is positive
|
||||
self.cursor = self.size + u64::try_from(x).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
self.cursor = self.cursor.min(self.size - 1);
|
||||
return Ok(self.cursor);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user