update api

This commit is contained in:
yggverse 2024-10-25 15:01:17 +03:00
parent 9793a67187
commit a8230bed37
9 changed files with 91 additions and 325 deletions

View file

@ -1,156 +0,0 @@
pub mod buffer;
pub mod error;
pub use buffer::Buffer;
pub use error::Error;
use gio::{prelude::InputStreamExt, Cancellable, InputStream};
use glib::Priority;
// Defaults
pub const DEFAULT_READ_CHUNK: usize = 0x100;
pub struct Input {
buffer: Buffer,
stream: InputStream,
}
impl Input {
// Constructors
/// Create new `Input` from `gio::InputStream`
///
/// https://docs.gtk.org/gio/class.InputStream.html
pub fn new_from_stream(stream: InputStream) -> Self {
Self {
buffer: Buffer::new(),
stream,
}
}
// Actions
/// Synchronously read all bytes from `gio::InputStream` to `input::Buffer`
///
/// Options:
/// * `cancellable` https://docs.gtk.org/gio/class.Cancellable.html
/// * `chunk` max bytes to read per chunk (256 by default)
pub fn read_all(
mut self,
cancelable: Option<Cancellable>,
chunk: Option<usize>,
) -> Result<Buffer, Error> {
loop {
// Continue bytes reading
match self.stream.read_bytes(
match chunk {
Some(value) => value,
None => DEFAULT_READ_CHUNK,
},
match cancelable.clone() {
Some(value) => Some(value),
None => None::<Cancellable>,
}
.as_ref(),
) {
Ok(bytes) => {
// No bytes were read, end of stream
if bytes.len() == 0 {
return Ok(self.buffer);
}
// Save chunk to buffer
match self.buffer.push(bytes) {
Ok(_) => continue,
Err(buffer::Error::Overflow) => return Err(Error::BufferOverflow),
Err(_) => return Err(Error::BufferWrite),
};
}
Err(_) => return Err(Error::StreamChunkRead),
};
}
}
/// Asynchronously read all bytes from `gio::InputStream` to `input::Buffer`,
///
/// applies `callback` function on last byte reading complete.
///
/// Options:
/// * `cancellable` https://docs.gtk.org/gio/class.Cancellable.html (`None::<&Cancellable>` by default)
/// * `priority` e.g. https://docs.gtk.org/glib/const.PRIORITY_DEFAULT.html (`Priority::DEFAULT` by default)
/// * `chunk` optional max bytes to read per chunk (`DEFAULT_READ_CHUNK` by default)
/// * `callback` user function to apply on async iteration complete
pub fn read_all_async(
mut self,
cancelable: Option<Cancellable>,
priority: Option<Priority>,
chunk: Option<usize>,
callback: impl FnOnce(Result<Buffer, Error>) + 'static,
) {
// Continue bytes reading
self.stream.clone().read_bytes_async(
match chunk {
Some(value) => value,
None => DEFAULT_READ_CHUNK,
},
match priority {
Some(value) => value,
None => Priority::DEFAULT,
},
match cancelable.clone() {
Some(value) => Some(value),
None => None::<Cancellable>,
}
.as_ref(),
move |result| {
match result {
Ok(bytes) => {
// No bytes were read, end of stream
if bytes.len() == 0 {
return callback(Ok(self.buffer));
}
// Save chunk to buffer
match self.buffer.push(bytes) {
Err(buffer::Error::Overflow) => {
return callback(Err(Error::BufferOverflow))
}
// Other errors related to write issues @TODO test
Err(_) => return callback(Err(Error::BufferWrite)),
// Async function, nothing to return yet
_ => (),
};
// Continue bytes reading...
self.read_all_async(cancelable, priority, chunk, callback);
}
Err(_) => callback(Err(Error::StreamChunkReadAsync)),
}
},
);
}
// Setters
pub fn set_buffer(&mut self, buffer: Buffer) {
self.buffer = buffer;
}
pub fn set_stream(&mut self, stream: InputStream) {
self.stream = stream;
}
// Getters
/// Get reference to `Buffer`
pub fn buffer(&self) -> &Buffer {
&self.buffer
}
/// Get reference to `gio::InputStream`
pub fn stream(&self) -> &InputStream {
&self.stream
}
}

View file

@ -1,87 +0,0 @@
pub mod error;
pub use error::Error;
use glib::Bytes;
pub const DEFAULT_CAPACITY: usize = 0x400;
pub const DEFAULT_MAX_SIZE: usize = 0xfffff;
pub struct Buffer {
bytes: Vec<Bytes>,
max_size: usize,
}
impl Buffer {
// Constructors
/// Create new dynamically allocated `Buffer` with default `capacity` and `max_size` limit
pub fn new() -> Self {
Self::new_with_options(Some(DEFAULT_CAPACITY), Some(DEFAULT_MAX_SIZE))
}
/// Create new dynamically allocated `Buffer` with options
///
/// Options:
/// * `capacity` initial bytes request to reduce extra memory overwrites (1024 by default)
/// * `max_size` max bytes to prevent memory overflow (1M by default)
pub fn new_with_options(capacity: Option<usize>, max_size: Option<usize>) -> Self {
Self {
bytes: Vec::with_capacity(match capacity {
Some(value) => value,
None => DEFAULT_CAPACITY,
}),
max_size: match max_size {
Some(value) => value,
None => DEFAULT_MAX_SIZE,
},
}
}
// Setters
/// Set new `Buffer.max_size` value to prevent memory overflow
///
/// Use `DEFAULT_MAX_SIZE` if `None` given.
pub fn set_max_size(&mut self, value: Option<usize>) {
self.max_size = match value {
Some(size) => size,
None => DEFAULT_MAX_SIZE,
}
}
// Actions
/// Push `glib::Bytes` to `Buffer.bytes`
///
/// Return `Error::Overflow` on `Buffer.max_size` reached.
pub fn push(&mut self, bytes: Bytes) -> Result<usize, Error> {
// Calculate new size value
let total = self.bytes.len() + bytes.len();
// Validate overflow
if total > self.max_size {
return Err(Error::Overflow);
}
// Success
self.bytes.push(bytes);
Ok(total)
}
// Getters
/// Get reference to bytes collected
pub fn bytes(&self) -> &Vec<Bytes> {
&self.bytes
}
/// Return copy of bytes as UTF-8 vector
pub fn to_utf8(&self) -> Vec<u8> {
self.bytes
.iter()
.flat_map(|byte| byte.iter())
.cloned()
.collect()
}
}

View file

@ -1,5 +0,0 @@
pub enum Error {
Overflow,
StreamChunkRead,
StreamChunkReadAsync,
}

View file

@ -1,6 +0,0 @@
pub enum Error {
BufferOverflow,
BufferWrite,
StreamChunkRead,
StreamChunkReadAsync,
}