Start major sheet refactoring
This commit is contained in:
parent
6cb7abad4c
commit
4ddfd75b2a
27
Cargo.lock
generated
27
Cargo.lock
generated
@ -1756,6 +1756,12 @@ dependencies = [
|
|||||||
"windows-sys 0.52.0",
|
"windows-sys 0.52.0",
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "rustversion"
|
||||||
|
version = "1.0.14"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ryu"
|
name = "ryu"
|
||||||
version = "1.0.16"
|
version = "1.0.16"
|
||||||
@ -1866,6 +1872,8 @@ dependencies = [
|
|||||||
"relm4-components",
|
"relm4-components",
|
||||||
"relm4-icons",
|
"relm4-icons",
|
||||||
"sqlx",
|
"sqlx",
|
||||||
|
"strum",
|
||||||
|
"strum_macros",
|
||||||
"tokio",
|
"tokio",
|
||||||
"walkdir",
|
"walkdir",
|
||||||
]
|
]
|
||||||
@ -2169,6 +2177,25 @@ version = "0.10.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
|
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "strum"
|
||||||
|
version = "0.26.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "723b93e8addf9aa965ebe2d11da6d7540fa2283fcea14b3371ff055f7ba13f5f"
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "strum_macros"
|
||||||
|
version = "0.26.1"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "7a3417fc93d76740d974a01654a09777cb500428cc874ca9f45edfe0c4d4cd18"
|
||||||
|
dependencies = [
|
||||||
|
"heck",
|
||||||
|
"proc-macro2",
|
||||||
|
"quote",
|
||||||
|
"rustversion",
|
||||||
|
"syn 2.0.48",
|
||||||
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "subtle"
|
name = "subtle"
|
||||||
version = "2.5.0"
|
version = "2.5.0"
|
||||||
|
@ -27,5 +27,9 @@ blake3 = "1.5.0"
|
|||||||
dotenvy = "0.15.7"
|
dotenvy = "0.15.7"
|
||||||
chrono = "0.4.33"
|
chrono = "0.4.33"
|
||||||
|
|
||||||
|
strum = "0.26"
|
||||||
|
strum_macros = "0.26"
|
||||||
|
# strum = { version = "0.26", features = ["derive"] }
|
||||||
|
|
||||||
[profile.dev.package.sqlx-macros]
|
[profile.dev.package.sqlx-macros]
|
||||||
opt-level = 3
|
opt-level = 3
|
||||||
|
@ -1,9 +1,9 @@
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use sqlx::{migrate::MigrateDatabase, Sqlite, SqlitePool};
|
use sqlx::{migrate::MigrateDatabase, sqlite::SqliteRow, Executor, Sqlite, SqlitePool};
|
||||||
|
|
||||||
use crate::sheet::{OrphanFile, Sheet};
|
use crate::sheet::{EnumSheet, Sheet};
|
||||||
|
|
||||||
pub struct Database {
|
pub struct Database {
|
||||||
connection: SqlitePool,
|
connection: SqlitePool,
|
||||||
@ -36,43 +36,35 @@ impl Database {
|
|||||||
Ok(connection)
|
Ok(connection)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn _insert_sheet(&self, sheet: Sheet) -> sqlx::Result<()> {
|
pub async fn insert_sheet(&self, sheet: &EnumSheet) -> sqlx::Result<()> {
|
||||||
sqlx::query(
|
sheet
|
||||||
"
|
.insert_to_database_query()
|
||||||
INSERT INTO sheets (name, composer_id, path, file_size, file_hash, last_opened)
|
|
||||||
VALUES ($1, $2, $3, $4, $5, $6)
|
|
||||||
",
|
|
||||||
)
|
|
||||||
.bind(sheet.name)
|
|
||||||
.bind(sheet.composer_id)
|
|
||||||
.bind(sheet.path.to_str().unwrap().to_string())
|
|
||||||
.bind(sheet.file_size as i32)
|
|
||||||
.bind(sheet.file_hash)
|
|
||||||
.bind(sheet.last_opened.timestamp())
|
|
||||||
.execute(&self.connection)
|
|
||||||
.await
|
|
||||||
.map(|_| ())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub async fn update_sheet_path(&self, sheet: &Sheet) -> sqlx::Result<()> {
|
|
||||||
sqlx::query("UPDATE sheets SET path = $1 WHERE id = $2")
|
|
||||||
.bind(sheet.path.to_str().unwrap().to_string())
|
|
||||||
.bind(sheet.id)
|
|
||||||
.execute(&self.connection)
|
.execute(&self.connection)
|
||||||
.await
|
.await
|
||||||
.map(|_| ())
|
.map(|_| ())
|
||||||
}
|
}
|
||||||
pub async fn update_sheet_last_opened(&self, sheet: &Sheet) -> sqlx::Result<()> {
|
|
||||||
sqlx::query("UPDATE sheets SET last_opened = $1 WHERE id = $2")
|
pub async fn update_sheet_path(&self, sheet: &EnumSheet) -> sqlx::Result<()> {
|
||||||
.bind(sheet.last_opened.timestamp())
|
sheet
|
||||||
.bind(sheet.id)
|
.update_path_in_database_query()
|
||||||
|
.execute(&self.connection)
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_sheet_last_opened(&self, sheet: &EnumSheet) -> sqlx::Result<()> {
|
||||||
|
sheet
|
||||||
|
.update_last_opened_in_database_query()
|
||||||
.execute(&self.connection)
|
.execute(&self.connection)
|
||||||
.await
|
.await
|
||||||
.map(|_| ())
|
.map(|_| ())
|
||||||
// TODO: check for success
|
// TODO: check for success
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn fetch_all_sheets(&self) -> sqlx::Result<Vec<Sheet>> {
|
pub async fn fetch_all_sheets(&self) -> sqlx::Result<Vec<EnumSheet>> {
|
||||||
|
let mut stream = sqlx::query("SELECT * FROM users")
|
||||||
|
.map(|row: SqliteRow| {})
|
||||||
|
.fetch(&mut conn);
|
||||||
sqlx::query_as::<_, Sheet>("SELECT * FROM sheets")
|
sqlx::query_as::<_, Sheet>("SELECT * FROM sheets")
|
||||||
.fetch_all(&self.connection)
|
.fetch_all(&self.connection)
|
||||||
.await
|
.await
|
||||||
@ -118,4 +110,11 @@ impl Database {
|
|||||||
.fetch_all(&self.connection)
|
.fetch_all(&self.connection)
|
||||||
.await
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_executor(&self) -> E
|
||||||
|
where
|
||||||
|
E: Executor,
|
||||||
|
{
|
||||||
|
self.connection
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
mod database;
|
mod database;
|
||||||
mod sheet;
|
mod sheet;
|
||||||
|
mod sheet_dao;
|
||||||
mod ui;
|
mod ui;
|
||||||
|
|
||||||
use std::{
|
use std::{
|
||||||
@ -12,7 +13,7 @@ use database::Database;
|
|||||||
use env_logger::Env;
|
use env_logger::Env;
|
||||||
use log::{debug, error};
|
use log::{debug, error};
|
||||||
use relm4::RelmApp;
|
use relm4::RelmApp;
|
||||||
use sheet::{OrphanFile, Sheet};
|
use sheet::Sheet;
|
||||||
use walkdir::WalkDir;
|
use walkdir::WalkDir;
|
||||||
|
|
||||||
use crate::ui::app::AppModel;
|
use crate::ui::app::AppModel;
|
||||||
|
254
src/sheet.rs
254
src/sheet.rs
@ -1,49 +1,134 @@
|
|||||||
use std::{
|
use std::{
|
||||||
|
cmp::Ordering,
|
||||||
fs,
|
fs,
|
||||||
path::{Path, PathBuf},
|
path::{Path, PathBuf},
|
||||||
};
|
};
|
||||||
|
|
||||||
use sqlx::{prelude::*, sqlite::SqliteRow};
|
|
||||||
// use sqlx::{FromRow, sqlite::SqliteRow, sqlx::Row};
|
|
||||||
use chrono::{DateTime, NaiveDateTime, Utc};
|
use chrono::{DateTime, NaiveDateTime, Utc};
|
||||||
|
use sqlx::{prelude::FromRow, sqlite::SqliteRow, QueryBuilder};
|
||||||
|
use strum_macros::{EnumDiscriminants, EnumIter, EnumMessage};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
pub trait PdfSheet {
|
||||||
pub struct Sheet {
|
fn get_pdf(&self) -> &Pdf;
|
||||||
pub id: i64,
|
|
||||||
pub name: String,
|
|
||||||
pub composer_id: i64,
|
|
||||||
pub path: PathBuf,
|
|
||||||
pub file_size: u64,
|
|
||||||
pub file_hash: String,
|
|
||||||
pub last_opened: DateTime<Utc>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FromRow<'_, SqliteRow> for Sheet {
|
#[derive(PartialEq, Eq, PartialOrd)]
|
||||||
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
|
pub struct EnumSheet {
|
||||||
Ok(Self {
|
pub id: i64,
|
||||||
id: row.try_get("id")?,
|
pub last_opened: I64DateTime,
|
||||||
name: row.try_get("name")?,
|
pub kind: SheetKind,
|
||||||
composer_id: row.try_get("composer_id")?,
|
}
|
||||||
path: row.try_get::<&str, _>("path")?.into(),
|
|
||||||
file_size: row.try_get::<i64, _>("file_size")? as u64,
|
#[derive(Debug, EnumDiscriminants)]
|
||||||
file_hash: row.try_get("file_hash")?,
|
#[strum_discriminants(derive(EnumIter, EnumMessage))]
|
||||||
last_opened: NaiveDateTime::from_timestamp_opt(
|
pub enum SheetKind {
|
||||||
row.try_get::<i64, _>("last_opened")?,
|
#[strum_discriminants(strum(message = "sheets"))] // Message is the sqlite table name
|
||||||
0,
|
Sheet {
|
||||||
)
|
pdf: Pdf,
|
||||||
.unwrap()
|
name: String,
|
||||||
.and_utc(),
|
composer_id: i64,
|
||||||
})
|
},
|
||||||
}
|
#[strum_discriminants(strum(message = "orphans"))] // Message is the sqlite table name
|
||||||
|
Orphan { pdf: Pdf },
|
||||||
|
#[strum_discriminants(strum(message = "books"))] // Message is the sqlite table name
|
||||||
|
Book {
|
||||||
|
pdf: Pdf,
|
||||||
|
name: String,
|
||||||
|
composer_id: i64,
|
||||||
|
sheet_ids: Vec<i64>,
|
||||||
|
},
|
||||||
|
#[strum_discriminants(strum(message = "booksheets"))] // Message is the sqlite table name
|
||||||
|
BookSheet {
|
||||||
|
book_id: i64,
|
||||||
|
first_page: i64,
|
||||||
|
last_page: i64,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
pub enum SheetKindTable {
|
||||||
|
Sheet,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait AnySheet: Ord {}
|
||||||
|
|
||||||
|
#[derive(sqlx::FromRow, PartialEq, Eq)]
|
||||||
|
pub struct Sheet {
|
||||||
|
id: i64,
|
||||||
|
#[sqlx(flatten)]
|
||||||
|
pdf: Pdf,
|
||||||
|
#[sqlx(try_from = "i64")]
|
||||||
|
last_opened: I64DateTime,
|
||||||
|
name: String,
|
||||||
|
composer_id: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(sqlx::FromRow, Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct Orphan {
|
||||||
|
id: i64,
|
||||||
|
#[sqlx(flatten)]
|
||||||
|
pdf: Pdf,
|
||||||
|
#[sqlx(try_from = "i64")]
|
||||||
|
last_opened: I64DateTime,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(sqlx::FromRow, Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct Book {
|
||||||
|
id: i64,
|
||||||
|
#[sqlx(flatten)]
|
||||||
|
pdf: Pdf,
|
||||||
|
#[sqlx(try_from = "i64")]
|
||||||
|
last_opened: I64DateTime,
|
||||||
|
name: String,
|
||||||
|
composer_id: i64,
|
||||||
|
sheet_ids: Vec<i64>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(sqlx::FromRow, Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct BookSheet {
|
||||||
|
id: i64,
|
||||||
|
#[sqlx(try_from = "i64")]
|
||||||
|
last_opened: I64DateTime,
|
||||||
|
book_id: i64,
|
||||||
|
first_page: i64,
|
||||||
|
last_page: i64,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(sqlx::FromRow, Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct Pdf {
|
||||||
|
#[sqlx(try_from = "String")]
|
||||||
|
path: PathBuf,
|
||||||
|
file_size: u64,
|
||||||
|
file_hash: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(sqlx::FromRow)]
|
#[derive(sqlx::FromRow)]
|
||||||
pub struct Composer {
|
pub struct Composer {
|
||||||
pub id: i64,
|
id: i64,
|
||||||
pub name: String,
|
name: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Sheet {
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
|
pub struct I64DateTime(DateTime<Utc>);
|
||||||
|
|
||||||
|
impl TryFrom<i64> for I64DateTime {
|
||||||
|
type Error = String;
|
||||||
|
|
||||||
|
fn try_from(value: i64) -> Result<Self, Self::Error> {
|
||||||
|
Ok(I64DateTime(
|
||||||
|
NaiveDateTime::from_timestamp_opt(value, 0)
|
||||||
|
.ok_or("Failed converting i64 to DateTime")?
|
||||||
|
.and_utc(),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl From<I64DateTime> for i64 {
|
||||||
|
fn from(value: I64DateTime) -> Self {
|
||||||
|
value.0.timestamp()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Pdf {
|
||||||
pub fn validate_path(&self, path: impl AsRef<Path>) -> std::io::Result<bool> {
|
pub fn validate_path(&self, path: impl AsRef<Path>) -> std::io::Result<bool> {
|
||||||
// First compare file size since it is faster than hashing
|
// First compare file size since it is faster than hashing
|
||||||
let file_size = fs::metadata(path.as_ref())?.len();
|
let file_size = fs::metadata(path.as_ref())?.len();
|
||||||
@ -59,24 +144,7 @@ impl Sheet {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl OrphanFile {
|
impl TryFrom<PathBuf> for Pdf {
|
||||||
// TODO: fix duplication
|
|
||||||
pub fn validate_path(&self, path: impl AsRef<Path>) -> std::io::Result<bool> {
|
|
||||||
// First compare file size since it is faster than hashing
|
|
||||||
let file_size = fs::metadata(path.as_ref())?.len();
|
|
||||||
if file_size == self.file_size {
|
|
||||||
let file_content = fs::read(path.as_ref())?;
|
|
||||||
let file_hash = blake3::hash(&file_content);
|
|
||||||
if file_hash.to_string() == self.file_hash {
|
|
||||||
return Ok(true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(false)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TryFrom<PathBuf> for OrphanFile {
|
|
||||||
type Error = std::io::Error;
|
type Error = std::io::Error;
|
||||||
|
|
||||||
fn try_from(path: PathBuf) -> Result<Self, Self::Error> {
|
fn try_from(path: PathBuf) -> Result<Self, Self::Error> {
|
||||||
@ -84,38 +152,74 @@ impl TryFrom<PathBuf> for OrphanFile {
|
|||||||
let file_content = fs::read(path.as_path())?;
|
let file_content = fs::read(path.as_path())?;
|
||||||
let file_hash = blake3::hash(&file_content).to_string();
|
let file_hash = blake3::hash(&file_content).to_string();
|
||||||
|
|
||||||
Ok(OrphanFile {
|
Ok(Pdf {
|
||||||
id: -1,
|
|
||||||
path,
|
path,
|
||||||
file_size,
|
file_size,
|
||||||
file_hash,
|
file_hash,
|
||||||
last_opened: DateTime::default(),
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
impl PdfSheet for Sheet {
|
||||||
pub struct OrphanFile {
|
fn get_pdf(&self) -> &Pdf {
|
||||||
pub id: i64,
|
&self.pdf
|
||||||
pub path: PathBuf,
|
|
||||||
pub file_size: u64,
|
|
||||||
pub file_hash: String,
|
|
||||||
pub last_opened: DateTime<Utc>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl FromRow<'_, SqliteRow> for OrphanFile {
|
|
||||||
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
|
|
||||||
Ok(Self {
|
|
||||||
id: row.try_get("id")?,
|
|
||||||
path: row.try_get::<&str, _>("path")?.into(),
|
|
||||||
file_size: row.try_get::<i64, _>("file_size")? as u64,
|
|
||||||
file_hash: row.try_get("file_hash")?,
|
|
||||||
last_opened: NaiveDateTime::from_timestamp_opt(
|
|
||||||
row.try_get::<i64, _>("last_opened")?,
|
|
||||||
0,
|
|
||||||
)
|
|
||||||
.unwrap()
|
|
||||||
.and_utc(),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl PdfSheet for Orphan {
|
||||||
|
fn get_pdf(&self) -> &Pdf {
|
||||||
|
&self.pdf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PdfSheet for Book {
|
||||||
|
fn get_pdf(&self) -> &Pdf {
|
||||||
|
&self.pdf
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AnySheet for Sheet {}
|
||||||
|
|
||||||
|
impl Ord for EnumSheet {
|
||||||
|
fn cmp(&self, other: &Self) -> Ordering {
|
||||||
|
self.last_opened.cmp(other.last_opened)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl EnumSheet {
|
||||||
|
pub fn update_path_in_database_query(&self) -> String {
|
||||||
|
todo!()
|
||||||
|
// sqlx::query("UPDATE sheets SET path = $1 WHERE id = $2")
|
||||||
|
// .bind(sheet.path.to_str().unwrap().to_string())
|
||||||
|
// .bind(sheet.id)
|
||||||
|
}
|
||||||
|
pub fn update_last_opened_in_database_query(&self) -> String {
|
||||||
|
// sqlx::query("UPDATE sheets SET last_opened = $1 WHERE id = $2")
|
||||||
|
// .bind(sheet.last_opened.timestamp())
|
||||||
|
// .bind(sheet.id)
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
pub fn insert_to_database_query(&self) -> String {
|
||||||
|
todo!()
|
||||||
|
// sqlx::query(
|
||||||
|
// "
|
||||||
|
// INSERT INTO orphan_files (path, file_size, file_hash, last_opened)
|
||||||
|
// VALUES ($1, $2, $3, $4)
|
||||||
|
// ",
|
||||||
|
// )
|
||||||
|
// .bind(file.path.to_str().unwrap().to_string())
|
||||||
|
// .bind(file.file_size as i32)
|
||||||
|
// .bind(file.file_hash.clone())
|
||||||
|
// .bind(file.last_opened.timestamp())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_database_table_name(&self) -> &str {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// impl PartialOrd for EnumSheet {
|
||||||
|
// fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||||
|
// Some(self.cmp(other))
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
100
src/sheet_dao.rs
Normal file
100
src/sheet_dao.rs
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
use chrono::NaiveDateTime;
|
||||||
|
use sqlx::{sqlite::SqliteRow, SqlitePool};
|
||||||
|
|
||||||
|
use crate::sheet::{EnumSheet, Pdf, SheetKind, SheetKindDiscriminants};
|
||||||
|
|
||||||
|
pub async fn insert_sheet(connection: SqlitePool, sheet: &EnumSheet) -> sqlx::Result<()> {
|
||||||
|
let table = sheet.kind.into::<SheetKindDiscriminants>().get_message();
|
||||||
|
sqlx::query(&format!(
|
||||||
|
"
|
||||||
|
INSERT INTO {} (path, file_size, file_hash, last_opened)
|
||||||
|
VALUES ($1, $2, $3, $4)
|
||||||
|
",
|
||||||
|
table,
|
||||||
|
))
|
||||||
|
.bind(sheet.pdf.path.to_str().unwrap().to_string())
|
||||||
|
.bind(sheet.pdf.file_size as i32)
|
||||||
|
.bind(sheet.pdf.file_hash.clone())
|
||||||
|
.bind(sheet.last_opened.timestamp())
|
||||||
|
.execute(&mut connection)
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_sheet_path(connection: SqlitePool, sheet: &EnumSheet) -> sqlx::Result<()> {
|
||||||
|
let table = sheet.kind.into::<SheetKindDiscriminants>().get_message();
|
||||||
|
sqlx::query(&format!("UPDATE {} SET path = $1 WHERE id = $2", table))
|
||||||
|
.bind(sheet.kind.pdf.path.to_str().unwrap().to_string())
|
||||||
|
.bind(sheet.id)
|
||||||
|
.execute(&mut connection)
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
|
// TODO: check for success
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn update_sheet_last_opened(
|
||||||
|
connection: SqlitePool,
|
||||||
|
sheet: &EnumSheet,
|
||||||
|
) -> sqlx::Result<()> {
|
||||||
|
let table = sheet.kind.into::<SheetKindDiscriminants>().get_message();
|
||||||
|
sqlx::query(&format!(
|
||||||
|
"UPDATE {} SET last_opened = $1 WHERE id = $2",
|
||||||
|
table
|
||||||
|
))
|
||||||
|
.bind(sheet.last_opened.timestamp())
|
||||||
|
.bind(sheet.id)
|
||||||
|
.execute(&mut connection)
|
||||||
|
.await
|
||||||
|
.map(|_| ())
|
||||||
|
// TODO: check for success
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn fetch_all_sheets(&connection: SqlitePool) -> sqlx::Result<Vec<EnumSheet>> {
|
||||||
|
let mut sheets: Vec<EnumSheet> = Vec::new();
|
||||||
|
|
||||||
|
for kind in SheetKindDiscriminants::iter() {
|
||||||
|
let table = kind.get_message();
|
||||||
|
|
||||||
|
let mut sheets_of_kind = sqlx::query(&format!("SELECT * FROM {}", table))
|
||||||
|
.map(|row: SqliteRow| EnumSheet {
|
||||||
|
id: row.try_get("id")?,
|
||||||
|
last_opened: NaiveDateTime::from_timestamp_opt(
|
||||||
|
row.try_get::<i64, _>("last_opened")?,
|
||||||
|
0,
|
||||||
|
)
|
||||||
|
.unwrap()
|
||||||
|
.and_utc(),
|
||||||
|
kind: parse_kind_from_row(kind, row),
|
||||||
|
})
|
||||||
|
.fetch_all(&mut connection)
|
||||||
|
.await?;
|
||||||
|
|
||||||
|
sheets.append(&mut sheets_of_kind);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(sheets)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse_kind_from_row(kind: SheetKindDiscriminants, row: SqliteRow) -> sqlx::Result<SheetKind> {
|
||||||
|
Ok(match kind {
|
||||||
|
SheetKindDiscriminants::Sheet => SheetKind::Sheet {
|
||||||
|
name: row.try_get("name")?,
|
||||||
|
composer_id: row.try_get("composer_id")?,
|
||||||
|
pdf: Pdf::from_row(row)?,
|
||||||
|
},
|
||||||
|
SheetKindDiscriminants::Orphan => SheetKind::Orphan {
|
||||||
|
pdf: Pdf::from_row(row)?,
|
||||||
|
},
|
||||||
|
SheetKindDiscriminants::Book => SheetKind::Book {
|
||||||
|
name: row.try_get("name")?,
|
||||||
|
composer_id: row.try_get("composer_id")?,
|
||||||
|
pdf: Pdf::from_row(row)?,
|
||||||
|
sheet_ids: todo!(),
|
||||||
|
},
|
||||||
|
SheetKindDiscriminants::BookSheet => SheetKind::BookSheet {
|
||||||
|
book_id: row.try_get("book_id")?,
|
||||||
|
first_page: row.try_get("first_page")?,
|
||||||
|
last_page: row.try_get("last_page")?,
|
||||||
|
},
|
||||||
|
})
|
||||||
|
}
|
Loading…
x
Reference in New Issue
Block a user