Start major sheet refactoring

This commit is contained in:
Julian Mutter 2024-02-10 00:58:40 +01:00
parent 6cb7abad4c
commit 4ddfd75b2a
6 changed files with 340 additions and 105 deletions

27
Cargo.lock generated
View File

@ -1756,6 +1756,12 @@ dependencies = [
"windows-sys 0.52.0",
]
[[package]]
name = "rustversion"
version = "1.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7ffc183a10b4478d04cbbbfc96d0873219d962dd5accaff2ffbd4ceb7df837f4"
[[package]]
name = "ryu"
version = "1.0.16"
@ -1866,6 +1872,8 @@ dependencies = [
"relm4-components",
"relm4-icons",
"sqlx",
"strum",
"strum_macros",
"tokio",
"walkdir",
]
@ -2169,6 +2177,25 @@ version = "0.10.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623"
[[package]]
name = "strum"
version = "0.26.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "723b93e8addf9aa965ebe2d11da6d7540fa2283fcea14b3371ff055f7ba13f5f"
[[package]]
name = "strum_macros"
version = "0.26.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7a3417fc93d76740d974a01654a09777cb500428cc874ca9f45edfe0c4d4cd18"
dependencies = [
"heck",
"proc-macro2",
"quote",
"rustversion",
"syn 2.0.48",
]
[[package]]
name = "subtle"
version = "2.5.0"

View File

@ -27,5 +27,9 @@ blake3 = "1.5.0"
dotenvy = "0.15.7"
chrono = "0.4.33"
strum = "0.26"
strum_macros = "0.26"
# strum = { version = "0.26", features = ["derive"] }
[profile.dev.package.sqlx-macros]
opt-level = 3

View File

@ -1,9 +1,9 @@
use std::path::Path;
use log::debug;
use sqlx::{migrate::MigrateDatabase, Sqlite, SqlitePool};
use sqlx::{migrate::MigrateDatabase, sqlite::SqliteRow, Executor, Sqlite, SqlitePool};
use crate::sheet::{OrphanFile, Sheet};
use crate::sheet::{EnumSheet, Sheet};
pub struct Database {
connection: SqlitePool,
@ -36,43 +36,35 @@ impl Database {
Ok(connection)
}
pub async fn _insert_sheet(&self, sheet: Sheet) -> sqlx::Result<()> {
sqlx::query(
"
INSERT INTO sheets (name, composer_id, path, file_size, file_hash, last_opened)
VALUES ($1, $2, $3, $4, $5, $6)
",
)
.bind(sheet.name)
.bind(sheet.composer_id)
.bind(sheet.path.to_str().unwrap().to_string())
.bind(sheet.file_size as i32)
.bind(sheet.file_hash)
.bind(sheet.last_opened.timestamp())
.execute(&self.connection)
.await
.map(|_| ())
}
pub async fn update_sheet_path(&self, sheet: &Sheet) -> sqlx::Result<()> {
sqlx::query("UPDATE sheets SET path = $1 WHERE id = $2")
.bind(sheet.path.to_str().unwrap().to_string())
.bind(sheet.id)
pub async fn insert_sheet(&self, sheet: &EnumSheet) -> sqlx::Result<()> {
sheet
.insert_to_database_query()
.execute(&self.connection)
.await
.map(|_| ())
}
pub async fn update_sheet_last_opened(&self, sheet: &Sheet) -> sqlx::Result<()> {
sqlx::query("UPDATE sheets SET last_opened = $1 WHERE id = $2")
.bind(sheet.last_opened.timestamp())
.bind(sheet.id)
pub async fn update_sheet_path(&self, sheet: &EnumSheet) -> sqlx::Result<()> {
sheet
.update_path_in_database_query()
.execute(&self.connection)
.await
.map(|_| ())
}
pub async fn update_sheet_last_opened(&self, sheet: &EnumSheet) -> sqlx::Result<()> {
sheet
.update_last_opened_in_database_query()
.execute(&self.connection)
.await
.map(|_| ())
// TODO: check for success
}
pub async fn fetch_all_sheets(&self) -> sqlx::Result<Vec<Sheet>> {
pub async fn fetch_all_sheets(&self) -> sqlx::Result<Vec<EnumSheet>> {
let mut stream = sqlx::query("SELECT * FROM users")
.map(|row: SqliteRow| {})
.fetch(&mut conn);
sqlx::query_as::<_, Sheet>("SELECT * FROM sheets")
.fetch_all(&self.connection)
.await
@ -118,4 +110,11 @@ impl Database {
.fetch_all(&self.connection)
.await
}
pub fn get_executor(&self) -> E
where
E: Executor,
{
self.connection
}
}

View File

@ -1,5 +1,6 @@
mod database;
mod sheet;
mod sheet_dao;
mod ui;
use std::{
@ -12,7 +13,7 @@ use database::Database;
use env_logger::Env;
use log::{debug, error};
use relm4::RelmApp;
use sheet::{OrphanFile, Sheet};
use sheet::Sheet;
use walkdir::WalkDir;
use crate::ui::app::AppModel;

View File

@ -1,49 +1,134 @@
use std::{
cmp::Ordering,
fs,
path::{Path, PathBuf},
};
use sqlx::{prelude::*, sqlite::SqliteRow};
// use sqlx::{FromRow, sqlite::SqliteRow, sqlx::Row};
use chrono::{DateTime, NaiveDateTime, Utc};
use sqlx::{prelude::FromRow, sqlite::SqliteRow, QueryBuilder};
use strum_macros::{EnumDiscriminants, EnumIter, EnumMessage};
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct Sheet {
pub id: i64,
pub name: String,
pub composer_id: i64,
pub path: PathBuf,
pub file_size: u64,
pub file_hash: String,
pub last_opened: DateTime<Utc>,
pub trait PdfSheet {
fn get_pdf(&self) -> &Pdf;
}
impl FromRow<'_, SqliteRow> for Sheet {
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
Ok(Self {
id: row.try_get("id")?,
name: row.try_get("name")?,
composer_id: row.try_get("composer_id")?,
path: row.try_get::<&str, _>("path")?.into(),
file_size: row.try_get::<i64, _>("file_size")? as u64,
file_hash: row.try_get("file_hash")?,
last_opened: NaiveDateTime::from_timestamp_opt(
row.try_get::<i64, _>("last_opened")?,
0,
)
.unwrap()
.and_utc(),
})
}
#[derive(PartialEq, Eq, PartialOrd)]
pub struct EnumSheet {
pub id: i64,
pub last_opened: I64DateTime,
pub kind: SheetKind,
}
#[derive(Debug, EnumDiscriminants)]
#[strum_discriminants(derive(EnumIter, EnumMessage))]
pub enum SheetKind {
#[strum_discriminants(strum(message = "sheets"))] // Message is the sqlite table name
Sheet {
pdf: Pdf,
name: String,
composer_id: i64,
},
#[strum_discriminants(strum(message = "orphans"))] // Message is the sqlite table name
Orphan { pdf: Pdf },
#[strum_discriminants(strum(message = "books"))] // Message is the sqlite table name
Book {
pdf: Pdf,
name: String,
composer_id: i64,
sheet_ids: Vec<i64>,
},
#[strum_discriminants(strum(message = "booksheets"))] // Message is the sqlite table name
BookSheet {
book_id: i64,
first_page: i64,
last_page: i64,
},
}
pub enum SheetKindTable {
Sheet,
}
pub trait AnySheet: Ord {}
#[derive(sqlx::FromRow, PartialEq, Eq)]
pub struct Sheet {
id: i64,
#[sqlx(flatten)]
pdf: Pdf,
#[sqlx(try_from = "i64")]
last_opened: I64DateTime,
name: String,
composer_id: i64,
}
#[derive(sqlx::FromRow, Debug, Clone, PartialEq, Eq)]
pub struct Orphan {
id: i64,
#[sqlx(flatten)]
pdf: Pdf,
#[sqlx(try_from = "i64")]
last_opened: I64DateTime,
}
#[derive(sqlx::FromRow, Debug, Clone, PartialEq, Eq)]
pub struct Book {
id: i64,
#[sqlx(flatten)]
pdf: Pdf,
#[sqlx(try_from = "i64")]
last_opened: I64DateTime,
name: String,
composer_id: i64,
sheet_ids: Vec<i64>,
}
#[derive(sqlx::FromRow, Debug, Clone, PartialEq, Eq)]
pub struct BookSheet {
id: i64,
#[sqlx(try_from = "i64")]
last_opened: I64DateTime,
book_id: i64,
first_page: i64,
last_page: i64,
}
#[derive(sqlx::FromRow, Debug, Clone, PartialEq, Eq)]
pub struct Pdf {
#[sqlx(try_from = "String")]
path: PathBuf,
file_size: u64,
file_hash: String,
}
#[derive(sqlx::FromRow)]
pub struct Composer {
pub id: i64,
pub name: String,
id: i64,
name: String,
}
impl Sheet {
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct I64DateTime(DateTime<Utc>);
impl TryFrom<i64> for I64DateTime {
type Error = String;
fn try_from(value: i64) -> Result<Self, Self::Error> {
Ok(I64DateTime(
NaiveDateTime::from_timestamp_opt(value, 0)
.ok_or("Failed converting i64 to DateTime")?
.and_utc(),
))
}
}
impl From<I64DateTime> for i64 {
fn from(value: I64DateTime) -> Self {
value.0.timestamp()
}
}
impl Pdf {
pub fn validate_path(&self, path: impl AsRef<Path>) -> std::io::Result<bool> {
// First compare file size since it is faster than hashing
let file_size = fs::metadata(path.as_ref())?.len();
@ -59,24 +144,7 @@ impl Sheet {
}
}
impl OrphanFile {
// TODO: fix duplication
pub fn validate_path(&self, path: impl AsRef<Path>) -> std::io::Result<bool> {
// First compare file size since it is faster than hashing
let file_size = fs::metadata(path.as_ref())?.len();
if file_size == self.file_size {
let file_content = fs::read(path.as_ref())?;
let file_hash = blake3::hash(&file_content);
if file_hash.to_string() == self.file_hash {
return Ok(true);
}
}
Ok(false)
}
}
impl TryFrom<PathBuf> for OrphanFile {
impl TryFrom<PathBuf> for Pdf {
type Error = std::io::Error;
fn try_from(path: PathBuf) -> Result<Self, Self::Error> {
@ -84,38 +152,74 @@ impl TryFrom<PathBuf> for OrphanFile {
let file_content = fs::read(path.as_path())?;
let file_hash = blake3::hash(&file_content).to_string();
Ok(OrphanFile {
id: -1,
Ok(Pdf {
path,
file_size,
file_hash,
last_opened: DateTime::default(),
})
}
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct OrphanFile {
pub id: i64,
pub path: PathBuf,
pub file_size: u64,
pub file_hash: String,
pub last_opened: DateTime<Utc>,
}
impl FromRow<'_, SqliteRow> for OrphanFile {
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
Ok(Self {
id: row.try_get("id")?,
path: row.try_get::<&str, _>("path")?.into(),
file_size: row.try_get::<i64, _>("file_size")? as u64,
file_hash: row.try_get("file_hash")?,
last_opened: NaiveDateTime::from_timestamp_opt(
row.try_get::<i64, _>("last_opened")?,
0,
)
.unwrap()
.and_utc(),
})
impl PdfSheet for Sheet {
fn get_pdf(&self) -> &Pdf {
&self.pdf
}
}
impl PdfSheet for Orphan {
fn get_pdf(&self) -> &Pdf {
&self.pdf
}
}
impl PdfSheet for Book {
fn get_pdf(&self) -> &Pdf {
&self.pdf
}
}
impl AnySheet for Sheet {}
impl Ord for EnumSheet {
fn cmp(&self, other: &Self) -> Ordering {
self.last_opened.cmp(other.last_opened)
}
}
impl EnumSheet {
pub fn update_path_in_database_query(&self) -> String {
todo!()
// sqlx::query("UPDATE sheets SET path = $1 WHERE id = $2")
// .bind(sheet.path.to_str().unwrap().to_string())
// .bind(sheet.id)
}
pub fn update_last_opened_in_database_query(&self) -> String {
// sqlx::query("UPDATE sheets SET last_opened = $1 WHERE id = $2")
// .bind(sheet.last_opened.timestamp())
// .bind(sheet.id)
todo!()
}
pub fn insert_to_database_query(&self) -> String {
todo!()
// sqlx::query(
// "
// INSERT INTO orphan_files (path, file_size, file_hash, last_opened)
// VALUES ($1, $2, $3, $4)
// ",
// )
// .bind(file.path.to_str().unwrap().to_string())
// .bind(file.file_size as i32)
// .bind(file.file_hash.clone())
// .bind(file.last_opened.timestamp())
}
pub fn get_database_table_name(&self) -> &str {
todo!()
}
}
// impl PartialOrd for EnumSheet {
// fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
// Some(self.cmp(other))
// }
// }

100
src/sheet_dao.rs Normal file
View File

@ -0,0 +1,100 @@
use chrono::NaiveDateTime;
use sqlx::{sqlite::SqliteRow, SqlitePool};
use crate::sheet::{EnumSheet, Pdf, SheetKind, SheetKindDiscriminants};
pub async fn insert_sheet(connection: SqlitePool, sheet: &EnumSheet) -> sqlx::Result<()> {
let table = sheet.kind.into::<SheetKindDiscriminants>().get_message();
sqlx::query(&format!(
"
INSERT INTO {} (path, file_size, file_hash, last_opened)
VALUES ($1, $2, $3, $4)
",
table,
))
.bind(sheet.pdf.path.to_str().unwrap().to_string())
.bind(sheet.pdf.file_size as i32)
.bind(sheet.pdf.file_hash.clone())
.bind(sheet.last_opened.timestamp())
.execute(&mut connection)
.await
.map(|_| ())
}
pub async fn update_sheet_path(connection: SqlitePool, sheet: &EnumSheet) -> sqlx::Result<()> {
let table = sheet.kind.into::<SheetKindDiscriminants>().get_message();
sqlx::query(&format!("UPDATE {} SET path = $1 WHERE id = $2", table))
.bind(sheet.kind.pdf.path.to_str().unwrap().to_string())
.bind(sheet.id)
.execute(&mut connection)
.await
.map(|_| ())
// TODO: check for success
}
pub async fn update_sheet_last_opened(
connection: SqlitePool,
sheet: &EnumSheet,
) -> sqlx::Result<()> {
let table = sheet.kind.into::<SheetKindDiscriminants>().get_message();
sqlx::query(&format!(
"UPDATE {} SET last_opened = $1 WHERE id = $2",
table
))
.bind(sheet.last_opened.timestamp())
.bind(sheet.id)
.execute(&mut connection)
.await
.map(|_| ())
// TODO: check for success
}
pub async fn fetch_all_sheets(&connection: SqlitePool) -> sqlx::Result<Vec<EnumSheet>> {
let mut sheets: Vec<EnumSheet> = Vec::new();
for kind in SheetKindDiscriminants::iter() {
let table = kind.get_message();
let mut sheets_of_kind = sqlx::query(&format!("SELECT * FROM {}", table))
.map(|row: SqliteRow| EnumSheet {
id: row.try_get("id")?,
last_opened: NaiveDateTime::from_timestamp_opt(
row.try_get::<i64, _>("last_opened")?,
0,
)
.unwrap()
.and_utc(),
kind: parse_kind_from_row(kind, row),
})
.fetch_all(&mut connection)
.await?;
sheets.append(&mut sheets_of_kind);
}
Ok(sheets)
}
fn parse_kind_from_row(kind: SheetKindDiscriminants, row: SqliteRow) -> sqlx::Result<SheetKind> {
Ok(match kind {
SheetKindDiscriminants::Sheet => SheetKind::Sheet {
name: row.try_get("name")?,
composer_id: row.try_get("composer_id")?,
pdf: Pdf::from_row(row)?,
},
SheetKindDiscriminants::Orphan => SheetKind::Orphan {
pdf: Pdf::from_row(row)?,
},
SheetKindDiscriminants::Book => SheetKind::Book {
name: row.try_get("name")?,
composer_id: row.try_get("composer_id")?,
pdf: Pdf::from_row(row)?,
sheet_ids: todo!(),
},
SheetKindDiscriminants::BookSheet => SheetKind::BookSheet {
book_id: row.try_get("book_id")?,
first_page: row.try_get("first_page")?,
last_page: row.try_get("last_page")?,
},
})
}