Compare commits
38 Commits
Author | SHA1 | Date | |
---|---|---|---|
94de09f429 | |||
bdc2e7a050 | |||
67d5dac0d1 | |||
d20bcf6a2d | |||
808698dd1c | |||
59864de6bd | |||
dd36dab497 | |||
26133a692f | |||
9a212a85ea | |||
e201539219 | |||
d7379a2a9f | |||
182675d14e | |||
de00301c39 | |||
41f79a992e | |||
367997839c | |||
b01c08bdbe | |||
c3e5db6889 | |||
e0feae0546 | |||
48f789ca83 | |||
35a47704db | |||
f4080ca651 | |||
fae93ee352 | |||
011dab8d99 | |||
0e34138a6a | |||
a7db35e8ac | |||
ee8a887caa | |||
9692b1a825 | |||
2ffd0bbbc3 | |||
58901dae37 | |||
cfb3cc9835 | |||
7cce6409b7 | |||
7b931fb033 | |||
3f5e621b6b | |||
f73cc8f7ea | |||
17f2985bb7 | |||
42b7d422a8 | |||
d3f2375995 | |||
4ddfd75b2a |
1499
Cargo.lock
generated
1499
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
27
Cargo.toml
27
Cargo.toml
@ -7,25 +7,34 @@ edition = "2021"
|
||||
|
||||
[dependencies]
|
||||
# Core library
|
||||
relm4 = "0.6.2"
|
||||
relm4 = { version = "0.8.1" }
|
||||
# relm4-macros = "0.6.2"
|
||||
# Optional: reusable components
|
||||
relm4-components = "0.6.2"
|
||||
relm4-components = "0.8.1"
|
||||
# Optional: icons
|
||||
relm4-icons = { version = "0.6.0", features = ["plus"] }
|
||||
relm4-icons = "0.8.2"
|
||||
|
||||
walkdir = "2" # For traversing directories recursively
|
||||
opener = "0.6.1" # For opening files with the systems default application
|
||||
opener = "0.7.1" # For opening files with the systems default application
|
||||
|
||||
log = "0.4.20"
|
||||
env_logger = "0.10.1"
|
||||
clap = { version = "4.4.6", features = ["derive"] }
|
||||
log = "0.4.21"
|
||||
env_logger = "0.11.3"
|
||||
clap = { version = "4.5.4", features = ["derive"] }
|
||||
|
||||
sqlx = { version = "0.7", features = [ "runtime-tokio", "sqlite", "migrate", "macros" ] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
blake3 = "1.5.0"
|
||||
blake3 = "1.5.1"
|
||||
dotenvy = "0.15.7"
|
||||
chrono = "0.4.33"
|
||||
chrono = "0.4.38"
|
||||
|
||||
strum = "0.26"
|
||||
strum_macros = "0.26"
|
||||
rand = "0.8.5"
|
||||
xdg = "2.5.2"
|
||||
toml = "0.8.19"
|
||||
serde = { version = "1.0", features = ["derive"] }
|
||||
anyhow = "1.0.93"
|
||||
# strum = { version = "0.26", features = ["derive"] }
|
||||
|
||||
[profile.dev.package.sqlx-macros]
|
||||
opt-level = 3
|
||||
|
10
Readme.md
10
Readme.md
@ -1,2 +1,12 @@
|
||||
# Sheet Organizer
|
||||
A simple tool for organizing and opening digital sheet music on a touch display as part of a digital music stand.
|
||||
|
||||
## Dependencies
|
||||
This tool offers editing pdf using [Xournal++](https://github.com/xournalpp/xournalpp).
|
||||
|
||||
## Configuration
|
||||
You can configure sheet-organizer using an file `config.toml` inside one of your `$XDG_CONFIG_DIRECTORIES` (e.g. `~/.config/sheet-organizer/config.toml`).
|
||||
|
||||
```toml
|
||||
working_directory = "~/my-sheets"
|
||||
```
|
||||
|
@ -1,4 +1,8 @@
|
||||
CREATE TABLE IF NOT EXISTS sheets (id integer primary key autoincrement, name TEXT, composer_id integer, path TEXT, file_size INTEGER, file_hash TEXT, last_opened INTEGER);
|
||||
CREATE TABLE IF NOT EXISTS composers (id integer primary key autoincrement, name TEXT);
|
||||
CREATE TABLE IF NOT EXISTS sheets (id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
last_opened INTEGER, name TEXT, composer_id INTEGER, first_page INTEGER, book_id INTEGER, path TEXT, file_size INTEGER, file_hash TEXT);
|
||||
CREATE TABLE IF NOT EXISTS orphans (id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
last_opened INTEGER, path TEXT, file_size INTEGER, file_hash TEXT);
|
||||
CREATE TABLE IF NOT EXISTS books (id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
last_opened INTEGER, name TEXT, composer_id INTEGER, sheet_ids TEXT, path TEXT, file_size INTEGER, file_hash TEXT);
|
||||
|
||||
CREATE TABLE IF NOT EXISTS orphan_files (id integer primary key autoincrement, path TEXT, file_size INTEGER, file_hash TEXT, last_opened INTEGER);
|
||||
CREATE TABLE IF NOT EXISTS composers (id INTEGER primary key autoincrement, name TEXT);
|
||||
|
82
flake.lock
generated
82
flake.lock
generated
@ -1,45 +1,45 @@
|
||||
{
|
||||
"nodes": {
|
||||
"naersk": {
|
||||
"inputs": {
|
||||
"nixpkgs": "nixpkgs"
|
||||
},
|
||||
"crane": {
|
||||
"locked": {
|
||||
"lastModified": 1698420672,
|
||||
"narHash": "sha256-/TdeHMPRjjdJub7p7+w55vyABrsJlt5QkznPYy55vKA=",
|
||||
"owner": "nix-community",
|
||||
"repo": "naersk",
|
||||
"rev": "aeb58d5e8faead8980a807c840232697982d47b9",
|
||||
"lastModified": 1736101677,
|
||||
"narHash": "sha256-iKOPq86AOWCohuzxwFy/MtC8PcSVGnrxBOvxpjpzrAY=",
|
||||
"owner": "ipetkov",
|
||||
"repo": "crane",
|
||||
"rev": "61ba163d85e5adeddc7b3a69bb174034965965b2",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "nix-community",
|
||||
"ref": "master",
|
||||
"repo": "naersk",
|
||||
"owner": "ipetkov",
|
||||
"repo": "crane",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"flake-utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1704842529,
|
||||
"narHash": "sha256-OTeQA+F8d/Evad33JMfuXC89VMetQbsU4qcaePchGr4=",
|
||||
"lastModified": 1736241350,
|
||||
"narHash": "sha256-CHd7yhaDigUuJyDeX0SADbTM9FXfiWaeNyY34FL1wQU=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "eabe8d3eface69f5bb16c18f8662a702f50c20d5",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"id": "nixpkgs",
|
||||
"type": "indirect"
|
||||
}
|
||||
},
|
||||
"nixpkgs_2": {
|
||||
"locked": {
|
||||
"lastModified": 1704842529,
|
||||
"narHash": "sha256-OTeQA+F8d/Evad33JMfuXC89VMetQbsU4qcaePchGr4=",
|
||||
"owner": "NixOS",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "eabe8d3eface69f5bb16c18f8662a702f50c20d5",
|
||||
"rev": "8c9fd3e564728e90829ee7dbac6edc972971cd0f",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -51,9 +51,9 @@
|
||||
},
|
||||
"root": {
|
||||
"inputs": {
|
||||
"naersk": "naersk",
|
||||
"nixpkgs": "nixpkgs_2",
|
||||
"utils": "utils"
|
||||
"crane": "crane",
|
||||
"flake-utils": "flake-utils",
|
||||
"nixpkgs": "nixpkgs"
|
||||
}
|
||||
},
|
||||
"systems": {
|
||||
@ -70,24 +70,6 @@
|
||||
"repo": "default",
|
||||
"type": "github"
|
||||
}
|
||||
},
|
||||
"utils": {
|
||||
"inputs": {
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1701680307,
|
||||
"narHash": "sha256-kAuep2h5ajznlPMD9rnQyffWG8EM/C73lejGofXvdM8=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "4022d587cbbfd70fe950c1e2083a02621806a725",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"type": "github"
|
||||
}
|
||||
}
|
||||
},
|
||||
"root": "root",
|
||||
|
139
flake.nix
139
flake.nix
@ -1,38 +1,119 @@
|
||||
{
|
||||
description = "My own sheet-organizer using rust and relm4 (and nix)";
|
||||
|
||||
inputs = {
|
||||
naersk.url = "github:nix-community/naersk/master";
|
||||
nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
|
||||
utils.url = "github:numtide/flake-utils";
|
||||
flake-utils.url = "github:numtide/flake-utils";
|
||||
crane.url = "github:ipetkov/crane";
|
||||
};
|
||||
|
||||
outputs = { self, nixpkgs, utils, naersk }:
|
||||
utils.lib.eachDefaultSystem (system:
|
||||
let
|
||||
pkgs = import nixpkgs { inherit system; };
|
||||
naersk-lib = pkgs.callPackage naersk { };
|
||||
outputs =
|
||||
{
|
||||
self,
|
||||
nixpkgs,
|
||||
flake-utils,
|
||||
crane,
|
||||
...
|
||||
}:
|
||||
let
|
||||
packageOutputs = flake-utils.lib.eachDefaultSystem (
|
||||
system:
|
||||
let
|
||||
pkgs = nixpkgs.legacyPackages.${system};
|
||||
|
||||
# Needed at compile time (on build system)
|
||||
nativeBuildInputs = with pkgs; [
|
||||
gtk4
|
||||
pkg-config
|
||||
# wrapGAppsHook
|
||||
];
|
||||
# Needed at runtime (on run system)
|
||||
buildInputs = with pkgs; [ ];
|
||||
in rec {
|
||||
defaultPackage = naersk-lib.buildPackage {
|
||||
src = ./.;
|
||||
inherit buildInputs;
|
||||
inherit nativeBuildInputs;
|
||||
};
|
||||
devShell = with pkgs;
|
||||
mkShell {
|
||||
buildInputs =
|
||||
[ cargo rustc rustfmt pre-commit rustPackages.clippy ];
|
||||
# Without inheriting nativeBuildinputs, cargo build will fail but that is good since we want to use only nix build
|
||||
# inherit nativeBuildInputs;
|
||||
craneLib = crane.mkLib pkgs;
|
||||
|
||||
RUST_SRC_PATH = rustPlatform.rustLibSrc;
|
||||
dbMigrationsFilter = path: _type: builtins.match ".*sql$" path != null;
|
||||
dbMigrationsOrCargoFilter =
|
||||
path: type: (dbMigrationsFilter path type) || (craneLib.filterCargoSources path type);
|
||||
|
||||
dbMigrations = pkgs.lib.cleanSourceWith {
|
||||
src = craneLib.path ./db-migrations; # The original, unfiltered source
|
||||
filter = dbMigrationsFilter;
|
||||
};
|
||||
});
|
||||
|
||||
# Common arguments can be set here to avoid repeating them later
|
||||
# Note: changes here will rebuild all dependency crates
|
||||
commonArgs = rec {
|
||||
strictDeps = true; # When this is not set, all dependency crates will be compiled again
|
||||
src = pkgs.lib.cleanSourceWith {
|
||||
src = craneLib.path ./.; # The original, unfiltered source
|
||||
filter = dbMigrationsOrCargoFilter;
|
||||
};
|
||||
|
||||
# Add icons.toml to $src when compiling dependencies (needed by relm4-icons)
|
||||
extraDummyScript = ''
|
||||
cp --no-preserve=mode,ownership ${./icons.toml} $out/icons.toml
|
||||
'';
|
||||
|
||||
nativeBuildInputs = with pkgs; [ pkg-config ];
|
||||
|
||||
buildInputs =
|
||||
with pkgs;
|
||||
[
|
||||
gtk4
|
||||
]
|
||||
++ pkgs.lib.optionals pkgs.stdenv.isDarwin [
|
||||
# Additional darwin specific inputs can be set here
|
||||
pkgs.libiconv
|
||||
];
|
||||
};
|
||||
|
||||
# Build *just* the cargo dependencies, so we can reuse
|
||||
# all of that work (e.g. via cachix) when running in CI
|
||||
cargoArtifacts = craneLib.buildDepsOnly (commonArgs);
|
||||
|
||||
# Run clippy (and deny all warnings) on the crate source,
|
||||
# reusing the dependency artifacts (e.g. from build scripts or
|
||||
# proc-macros) from above.
|
||||
#
|
||||
# Note that this is done as a separate derivation so it
|
||||
# does not impact building just the crate by itself.
|
||||
myCrateClippy = craneLib.cargoClippy (
|
||||
commonArgs
|
||||
// {
|
||||
# Again we apply some extra arguments only to this derivation
|
||||
# and not every where else. In this case we add some clippy flags
|
||||
inherit cargoArtifacts;
|
||||
cargoClippyExtraArgs = "--all-targets -- --deny warnings";
|
||||
}
|
||||
);
|
||||
|
||||
# Build the actual crate itself, reusing the dependency
|
||||
# artifacts from above.
|
||||
myCrate = craneLib.buildPackage (
|
||||
commonArgs
|
||||
// {
|
||||
inherit cargoArtifacts;
|
||||
}
|
||||
// {
|
||||
postInstall = ''
|
||||
mkdir -p $out/share/applications
|
||||
cp ${./sheet-organizer.desktop} $out/share/applications/sheet-organizer.desktop
|
||||
|
||||
mkdir -p $out/share/icons
|
||||
cp ${./sheet-organizer.png} $out/share/icons/sheet-organizer.png
|
||||
'';
|
||||
}
|
||||
);
|
||||
|
||||
# Also run the crate tests under cargo-tarpaulin so that we can keep
|
||||
# track of code coverage
|
||||
myCrateCoverage = craneLib.cargoTarpaulin (commonArgs // { inherit cargoArtifacts; });
|
||||
|
||||
in
|
||||
{
|
||||
packages.default = myCrate;
|
||||
checks = {
|
||||
inherit
|
||||
# Build the crate as part of `nix flake check` for convenience
|
||||
myCrate
|
||||
myCrateClippy
|
||||
myCrateCoverage
|
||||
;
|
||||
};
|
||||
}
|
||||
);
|
||||
in
|
||||
packageOutputs;
|
||||
}
|
||||
|
9
icons.toml
Normal file
9
icons.toml
Normal file
@ -0,0 +1,9 @@
|
||||
# Recommended: Specify your app ID *OR* your base resource path for more robust icon loading
|
||||
base_resource_path = "/org/gtkrs/"
|
||||
|
||||
# List of icon names you found (shipped with this crate)
|
||||
# Note: the file ending `-symbolic.svg` isn't part of the icon name.
|
||||
icons = ["refresh", "edit", "arrow-sort-regular", "playlist-shuffle", "user-trash", "open-filled", "document-settings-filled"]
|
||||
|
||||
# Optional: Specify a folder containing your own SVG icons
|
||||
# icon_folder = "my_svg_icons"
|
6
sheet-organizer.desktop
Normal file
6
sheet-organizer.desktop
Normal file
@ -0,0 +1,6 @@
|
||||
[Desktop Entry]
|
||||
Type=Application
|
||||
Terminal=false
|
||||
Name=Sheet Organizer
|
||||
Icon=sheet-organizer
|
||||
Exec=sheet-organizer
|
BIN
sheet-organizer.png
Normal file
BIN
sheet-organizer.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 293 KiB |
39
src/config.rs
Normal file
39
src/config.rs
Normal file
@ -0,0 +1,39 @@
|
||||
use anyhow::{anyhow, Context, Result};
|
||||
use serde::Deserialize;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use xdg::BaseDirectories;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Config {
|
||||
pub working_directory: Option<PathBuf>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn default() -> Config {
|
||||
Config {
|
||||
working_directory: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn load_config(app_name: &str, file_name: &str) -> Result<Config> {
|
||||
// Create an XDG base directories instance
|
||||
let xdg_dirs =
|
||||
BaseDirectories::with_prefix(app_name).context("Failed to initialize XDG directories")?;
|
||||
|
||||
let config_path = xdg_dirs
|
||||
.place_config_file(file_name)
|
||||
.context("Failed to determine configuration file path")?;
|
||||
|
||||
if !config_path.exists() {
|
||||
return Err(anyhow!("No configuration file at {:?}", config_path));
|
||||
}
|
||||
|
||||
let contents = fs::read_to_string(&config_path)
|
||||
.with_context(|| format!("Failed to read configuration file at {:?}", config_path))?;
|
||||
|
||||
let config: Config = toml::from_str(&contents)
|
||||
.with_context(|| format!("Failed to parse TOML configuration at {:?}", config_path))?;
|
||||
Ok(config)
|
||||
}
|
@ -3,10 +3,8 @@ use std::path::Path;
|
||||
use log::debug;
|
||||
use sqlx::{migrate::MigrateDatabase, Sqlite, SqlitePool};
|
||||
|
||||
use crate::sheet::{OrphanFile, Sheet};
|
||||
|
||||
pub struct Database {
|
||||
connection: SqlitePool,
|
||||
pub connection: SqlitePool,
|
||||
}
|
||||
|
||||
impl Database {
|
||||
@ -35,87 +33,4 @@ impl Database {
|
||||
debug!("Connected to database");
|
||||
Ok(connection)
|
||||
}
|
||||
|
||||
pub async fn _insert_sheet(&self, sheet: Sheet) -> sqlx::Result<()> {
|
||||
sqlx::query(
|
||||
"
|
||||
INSERT INTO sheets (name, composer_id, path, file_size, file_hash, last_opened)
|
||||
VALUES ($1, $2, $3, $4, $5, $6)
|
||||
",
|
||||
)
|
||||
.bind(sheet.name)
|
||||
.bind(sheet.composer_id)
|
||||
.bind(sheet.path.to_str().unwrap().to_string())
|
||||
.bind(sheet.file_size as i32)
|
||||
.bind(sheet.file_hash)
|
||||
.bind(sheet.last_opened.timestamp())
|
||||
.execute(&self.connection)
|
||||
.await
|
||||
.map(|_| ())
|
||||
}
|
||||
|
||||
pub async fn update_sheet_path(&self, sheet: &Sheet) -> sqlx::Result<()> {
|
||||
sqlx::query("UPDATE sheets SET path = $1 WHERE id = $2")
|
||||
.bind(sheet.path.to_str().unwrap().to_string())
|
||||
.bind(sheet.id)
|
||||
.execute(&self.connection)
|
||||
.await
|
||||
.map(|_| ())
|
||||
}
|
||||
pub async fn update_sheet_last_opened(&self, sheet: &Sheet) -> sqlx::Result<()> {
|
||||
sqlx::query("UPDATE sheets SET last_opened = $1 WHERE id = $2")
|
||||
.bind(sheet.last_opened.timestamp())
|
||||
.bind(sheet.id)
|
||||
.execute(&self.connection)
|
||||
.await
|
||||
.map(|_| ())
|
||||
// TODO: check for success
|
||||
}
|
||||
|
||||
pub async fn fetch_all_sheets(&self) -> sqlx::Result<Vec<Sheet>> {
|
||||
sqlx::query_as::<_, Sheet>("SELECT * FROM sheets")
|
||||
.fetch_all(&self.connection)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn insert_orphan_file(&self, file: &OrphanFile) -> sqlx::Result<i64> {
|
||||
sqlx::query(
|
||||
"
|
||||
INSERT INTO orphan_files (path, file_size, file_hash, last_opened)
|
||||
VALUES ($1, $2, $3, $4)
|
||||
",
|
||||
)
|
||||
.bind(file.path.to_str().unwrap().to_string())
|
||||
.bind(file.file_size as i32)
|
||||
.bind(file.file_hash.clone())
|
||||
.bind(file.last_opened.timestamp())
|
||||
.execute(&self.connection)
|
||||
.await
|
||||
.map(|result| result.last_insert_rowid())
|
||||
}
|
||||
|
||||
pub async fn update_orphan_file_path(&self, orphan: &OrphanFile) -> sqlx::Result<()> {
|
||||
sqlx::query("UPDATE orphan_files SET path = $1 WHERE id = $2")
|
||||
.bind(orphan.path.to_str().unwrap().to_string())
|
||||
.bind(orphan.id)
|
||||
.execute(&self.connection)
|
||||
.await
|
||||
.map(|_| ())
|
||||
}
|
||||
|
||||
pub async fn update_orphan_last_opened(&self, orphan: &OrphanFile) -> sqlx::Result<()> {
|
||||
sqlx::query("UPDATE orphan_files SET last_opened = $1 WHERE id = $2")
|
||||
.bind(orphan.last_opened.timestamp())
|
||||
.bind(orphan.id)
|
||||
.execute(&self.connection)
|
||||
.await
|
||||
.map(|_| ())
|
||||
// TODO: check for success
|
||||
}
|
||||
|
||||
pub async fn fetch_all_orphan_files(&self) -> sqlx::Result<Vec<OrphanFile>> {
|
||||
sqlx::query_as::<_, OrphanFile>("SELECT * FROM orphan_files")
|
||||
.fetch_all(&self.connection)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
166
src/main.rs
166
src/main.rs
@ -1,168 +1,70 @@
|
||||
mod config;
|
||||
mod database;
|
||||
mod sheet;
|
||||
mod sheet_dao;
|
||||
mod sheet_validation;
|
||||
mod ui;
|
||||
|
||||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
process,
|
||||
};
|
||||
use std::{path::PathBuf, process};
|
||||
|
||||
use clap::Parser;
|
||||
use config::Config;
|
||||
use database::Database;
|
||||
use env_logger::Env;
|
||||
use log::{debug, error};
|
||||
use log::{error, warn};
|
||||
use relm4::RelmApp;
|
||||
use sheet::{OrphanFile, Sheet};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::ui::app::AppModel;
|
||||
use crate::ui::app::{AppInitData, AppModel};
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(author, version, about)]
|
||||
struct Cli {
|
||||
directory: PathBuf,
|
||||
working_directory: Option<PathBuf>,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
env_logger::Builder::from_env(Env::default().default_filter_or("debug")).init();
|
||||
|
||||
let mut config = match config::load_config("sheet-organizer", "config.toml") {
|
||||
Ok(config) => config,
|
||||
Err(err) => {
|
||||
warn!("Could not get configuration: {:#}", err);
|
||||
Config::default()
|
||||
}
|
||||
};
|
||||
|
||||
let cli = Cli::parse();
|
||||
if !cli.directory.is_dir() {
|
||||
error!("Sheet folder path is no dir or does not exist");
|
||||
// Overwrite config by cli options if specified
|
||||
if cli.working_directory.is_some() {
|
||||
config.working_directory = cli.working_directory;
|
||||
}
|
||||
|
||||
let working_directory = config.working_directory.unwrap_or_else(|| {
|
||||
error!("No working directory specified, neither in config nor in cli. Exiting...");
|
||||
process::exit(1);
|
||||
});
|
||||
if !working_directory.is_dir() {
|
||||
error!(
|
||||
"Working directory '{}' does not exist",
|
||||
working_directory.to_string_lossy()
|
||||
);
|
||||
process::exit(1);
|
||||
}
|
||||
|
||||
let database = Database::setup(cli.directory.join("database.sqlite"))
|
||||
let database = Database::setup(working_directory.join("database.sqlite"))
|
||||
.await
|
||||
.unwrap();
|
||||
// database.insert_sheet(Sheet::new_debug()).await.unwrap();
|
||||
let sheets = database.fetch_all_sheets().await.unwrap();
|
||||
let orphan_files = database.fetch_all_orphan_files().await.unwrap();
|
||||
|
||||
debug!("Validating sheets from database...");
|
||||
let mut validation_result = validate_sheet_files(sheets, orphan_files, &cli.directory);
|
||||
debug!("{}", validation_result.get_stats()); // TODO: handle invalidated files
|
||||
for updated in validation_result.updated_sheets.iter() {
|
||||
database.update_sheet_path(updated).await.unwrap();
|
||||
}
|
||||
for updated in validation_result.updated_orphan_files.iter() {
|
||||
database.update_orphan_file_path(updated).await.unwrap();
|
||||
}
|
||||
|
||||
let mut orphans = validation_result.validated_orphan_files;
|
||||
orphans.append(&mut validation_result.updated_orphan_files);
|
||||
debug!("Inserting unassigned files into orphan table...");
|
||||
for unassigned in validation_result.unassigned_files {
|
||||
let mut orphan = OrphanFile::try_from(unassigned).unwrap();
|
||||
let id = database.insert_orphan_file(&orphan).await.unwrap();
|
||||
orphan.id = id;
|
||||
orphans.push(orphan);
|
||||
}
|
||||
|
||||
let mut sheets = validation_result.validated_sheets;
|
||||
sheets.append(&mut validation_result.updated_sheets);
|
||||
let sheets = sheet_validation::load_and_validate_sheets(&database, &working_directory).await;
|
||||
|
||||
let app_init_data = AppInitData {
|
||||
sheets,
|
||||
orphans,
|
||||
database,
|
||||
directory: working_directory,
|
||||
};
|
||||
|
||||
let app = RelmApp::new("de.frajul.sheet-organizer");
|
||||
// Pass empty command line args to allow my own parsing
|
||||
app.with_args(Vec::new())
|
||||
.run_async::<AppModel>(app_init_data);
|
||||
}
|
||||
|
||||
pub struct AppInitData {
|
||||
sheets: Vec<Sheet>,
|
||||
orphans: Vec<OrphanFile>,
|
||||
database: Database,
|
||||
}
|
||||
|
||||
pub struct FileValidationResult {
|
||||
validated_sheets: Vec<Sheet>,
|
||||
invalidated_sheets: Vec<Sheet>,
|
||||
updated_sheets: Vec<Sheet>,
|
||||
|
||||
validated_orphan_files: Vec<OrphanFile>,
|
||||
invalidated_orphan_files: Vec<OrphanFile>,
|
||||
updated_orphan_files: Vec<OrphanFile>,
|
||||
|
||||
unassigned_files: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
impl FileValidationResult {
|
||||
fn get_stats(&self) -> String {
|
||||
format!("Validated sheets: {}\nInvalidated sheets: {}\nUpdated sheets: {}\nValidated orphan_files: {}\nInvalidated orphan_files: {}\nUpdated orphan_files: {}\nUnassigned files: {}",
|
||||
self.validated_sheets.len(), self.invalidated_sheets.len(), self.updated_sheets.len(),
|
||||
self.validated_orphan_files.len(), self.invalidated_orphan_files.len(), self.updated_orphan_files.len(), self.unassigned_files.len())
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_sheet_files(
|
||||
sheets: Vec<Sheet>,
|
||||
orphan_files: Vec<OrphanFile>,
|
||||
dir: impl AsRef<Path>,
|
||||
) -> FileValidationResult {
|
||||
// TODO: fix duplication
|
||||
let (validated_sheets, mut invalidated_sheets): (Vec<_>, Vec<_>) = sheets
|
||||
.into_iter()
|
||||
.partition(|sheet| sheet.validate_path(&sheet.path).unwrap_or(false));
|
||||
let (validated_orphan_files, mut invalidated_orphan_files): (Vec<_>, Vec<_>) =
|
||||
orphan_files.into_iter().partition(|orphan_file| {
|
||||
orphan_file
|
||||
.validate_path(&orphan_file.path)
|
||||
.unwrap_or(false)
|
||||
});
|
||||
|
||||
let mut updated_sheets = Vec::new();
|
||||
let mut updated_orphan_files = Vec::new();
|
||||
let mut unassigned_files = Vec::new();
|
||||
|
||||
for pdf_file in WalkDir::new(dir)
|
||||
.into_iter()
|
||||
.filter_map(|e| e.ok())
|
||||
.filter(|file| file.file_type().is_file())
|
||||
.map(|file| file.into_path())
|
||||
.filter(|path| {
|
||||
path.extension()
|
||||
.map(|s| s.to_string_lossy().to_ascii_lowercase() == "pdf")
|
||||
.unwrap_or(false)
|
||||
})
|
||||
{
|
||||
if let Some((i, _)) = invalidated_sheets
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(_, sheet)| sheet.validate_path(&pdf_file).unwrap_or(false))
|
||||
{
|
||||
let mut sheet = invalidated_sheets.remove(i);
|
||||
sheet.path = pdf_file;
|
||||
updated_sheets.push(sheet);
|
||||
} else if let Some((i, _)) = invalidated_orphan_files
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(_, orphan_file)| orphan_file.validate_path(&pdf_file).unwrap_or(false))
|
||||
{
|
||||
let mut orphan_file = invalidated_orphan_files.remove(i);
|
||||
orphan_file.path = pdf_file;
|
||||
updated_orphan_files.push(orphan_file);
|
||||
} else if !validated_sheets.iter().any(|sheet| sheet.path == pdf_file)
|
||||
&& !validated_orphan_files
|
||||
.iter()
|
||||
.any(|orphan| orphan.path == pdf_file)
|
||||
{
|
||||
unassigned_files.push(pdf_file);
|
||||
}
|
||||
}
|
||||
|
||||
FileValidationResult {
|
||||
validated_sheets,
|
||||
invalidated_sheets,
|
||||
updated_sheets,
|
||||
validated_orphan_files,
|
||||
invalidated_orphan_files,
|
||||
updated_orphan_files,
|
||||
unassigned_files,
|
||||
}
|
||||
}
|
||||
|
190
src/sheet.rs
190
src/sheet.rs
@ -1,39 +1,94 @@
|
||||
use std::{
|
||||
cmp::Ordering,
|
||||
ffi::OsStr,
|
||||
fs,
|
||||
path::{Path, PathBuf},
|
||||
};
|
||||
|
||||
use sqlx::{prelude::*, sqlite::SqliteRow};
|
||||
// use sqlx::{FromRow, sqlite::SqliteRow, sqlx::Row};
|
||||
use chrono::{DateTime, NaiveDateTime, Utc};
|
||||
use chrono::{DateTime, Utc};
|
||||
use log::debug;
|
||||
use strum_macros::{EnumDiscriminants, EnumIter};
|
||||
|
||||
pub trait PdfSheet {
|
||||
fn get_pdf(&self) -> &Pdf;
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Sheet {
|
||||
pub id: i64,
|
||||
pub name: String,
|
||||
pub composer_id: i64,
|
||||
pub path: PathBuf,
|
||||
pub file_size: u64,
|
||||
pub file_hash: String,
|
||||
pub last_opened: DateTime<Utc>,
|
||||
pub last_opened: I64DateTime,
|
||||
pub kind: SheetKind,
|
||||
pub pdf: Pdf,
|
||||
}
|
||||
|
||||
impl FromRow<'_, SqliteRow> for Sheet {
|
||||
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
|
||||
Ok(Self {
|
||||
id: row.try_get("id")?,
|
||||
name: row.try_get("name")?,
|
||||
composer_id: row.try_get("composer_id")?,
|
||||
path: row.try_get::<&str, _>("path")?.into(),
|
||||
file_size: row.try_get::<i64, _>("file_size")? as u64,
|
||||
file_hash: row.try_get("file_hash")?,
|
||||
last_opened: NaiveDateTime::from_timestamp_opt(
|
||||
row.try_get::<i64, _>("last_opened")?,
|
||||
0,
|
||||
)
|
||||
.unwrap()
|
||||
.and_utc(),
|
||||
})
|
||||
#[derive(Debug, Clone, PartialEq, Eq, EnumDiscriminants)]
|
||||
#[strum_discriminants(derive(EnumIter))]
|
||||
pub enum SheetKind {
|
||||
Sheet {
|
||||
name: String,
|
||||
composer_id: i64,
|
||||
first_page: i64,
|
||||
book_id: Option<i64>,
|
||||
},
|
||||
Orphan,
|
||||
Book {
|
||||
name: String,
|
||||
composer_id: i64,
|
||||
sheet_ids: Vec<i64>,
|
||||
},
|
||||
}
|
||||
|
||||
impl PartialOrd for Sheet {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
|
||||
impl Ord for Sheet {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
self.last_opened.cmp(&other.last_opened)
|
||||
}
|
||||
}
|
||||
|
||||
impl Sheet {
|
||||
pub fn construct_xopp_file_path(&self) -> PathBuf {
|
||||
let mut xopp_path = self.pdf.path.with_extension("").into_os_string();
|
||||
xopp_path.push(".xopp");
|
||||
PathBuf::from(xopp_path)
|
||||
}
|
||||
|
||||
pub fn construct_annotated_file_path(&self) -> PathBuf {
|
||||
let mut annotated_path = self.pdf.path.with_extension("").into_os_string();
|
||||
annotated_path.push("_annotated.pdf");
|
||||
PathBuf::from(annotated_path)
|
||||
}
|
||||
|
||||
pub fn open_file_or_annotated_version_if_exists(&self) {
|
||||
let annotated_version = self.construct_annotated_file_path();
|
||||
if annotated_version.exists() {
|
||||
// TODO: open on first_page
|
||||
opener::open(annotated_version).unwrap();
|
||||
} else {
|
||||
// TODO: open on first_page
|
||||
opener::open(&self.pdf.path).unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_part_of_book(&self) -> bool {
|
||||
if let SheetKind::Sheet { book_id, .. } = &self.kind {
|
||||
return book_id.is_some();
|
||||
}
|
||||
false
|
||||
}
|
||||
}
|
||||
|
||||
impl SheetKindDiscriminants {
|
||||
pub fn get_database_table_name(&self) -> &str {
|
||||
match self {
|
||||
SheetKindDiscriminants::Sheet => "sheets",
|
||||
SheetKindDiscriminants::Orphan => "orphans",
|
||||
SheetKindDiscriminants::Book => "books",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -43,7 +98,41 @@ pub struct Composer {
|
||||
pub name: String,
|
||||
}
|
||||
|
||||
impl Sheet {
|
||||
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
|
||||
pub struct I64DateTime(pub DateTime<Utc>);
|
||||
|
||||
impl TryFrom<i64> for I64DateTime {
|
||||
type Error = String;
|
||||
|
||||
fn try_from(value: i64) -> Result<Self, Self::Error> {
|
||||
Ok(I64DateTime(
|
||||
DateTime::<Utc>::from_timestamp(value, 0).ok_or("Failed converting i64 to DateTime")?,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&I64DateTime> for i64 {
|
||||
fn from(value: &I64DateTime) -> Self {
|
||||
value.0.timestamp()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct Pdf {
|
||||
pub path: PathBuf,
|
||||
pub file_size: u64,
|
||||
pub file_hash: String,
|
||||
}
|
||||
|
||||
impl Pdf {
|
||||
pub fn get_name(&self) -> &str {
|
||||
self.path.file_name().unwrap().to_str().unwrap()
|
||||
}
|
||||
|
||||
pub fn validate_own_path(&self) -> std::io::Result<bool> {
|
||||
self.validate_path(&self.path)
|
||||
}
|
||||
|
||||
pub fn validate_path(&self, path: impl AsRef<Path>) -> std::io::Result<bool> {
|
||||
// First compare file size since it is faster than hashing
|
||||
let file_size = fs::metadata(path.as_ref())?.len();
|
||||
@ -59,24 +148,7 @@ impl Sheet {
|
||||
}
|
||||
}
|
||||
|
||||
impl OrphanFile {
|
||||
// TODO: fix duplication
|
||||
pub fn validate_path(&self, path: impl AsRef<Path>) -> std::io::Result<bool> {
|
||||
// First compare file size since it is faster than hashing
|
||||
let file_size = fs::metadata(path.as_ref())?.len();
|
||||
if file_size == self.file_size {
|
||||
let file_content = fs::read(path.as_ref())?;
|
||||
let file_hash = blake3::hash(&file_content);
|
||||
if file_hash.to_string() == self.file_hash {
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFrom<PathBuf> for OrphanFile {
|
||||
impl TryFrom<PathBuf> for Pdf {
|
||||
type Error = std::io::Error;
|
||||
|
||||
fn try_from(path: PathBuf) -> Result<Self, Self::Error> {
|
||||
@ -84,38 +156,10 @@ impl TryFrom<PathBuf> for OrphanFile {
|
||||
let file_content = fs::read(path.as_path())?;
|
||||
let file_hash = blake3::hash(&file_content).to_string();
|
||||
|
||||
Ok(OrphanFile {
|
||||
id: -1,
|
||||
Ok(Pdf {
|
||||
path,
|
||||
file_size,
|
||||
file_hash,
|
||||
last_opened: DateTime::default(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub struct OrphanFile {
|
||||
pub id: i64,
|
||||
pub path: PathBuf,
|
||||
pub file_size: u64,
|
||||
pub file_hash: String,
|
||||
pub last_opened: DateTime<Utc>,
|
||||
}
|
||||
|
||||
impl FromRow<'_, SqliteRow> for OrphanFile {
|
||||
fn from_row(row: &SqliteRow) -> sqlx::Result<Self> {
|
||||
Ok(Self {
|
||||
id: row.try_get("id")?,
|
||||
path: row.try_get::<&str, _>("path")?.into(),
|
||||
file_size: row.try_get::<i64, _>("file_size")? as u64,
|
||||
file_hash: row.try_get("file_hash")?,
|
||||
last_opened: NaiveDateTime::from_timestamp_opt(
|
||||
row.try_get::<i64, _>("last_opened")?,
|
||||
0,
|
||||
)
|
||||
.unwrap()
|
||||
.and_utc(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
151
src/sheet_dao.rs
Normal file
151
src/sheet_dao.rs
Normal file
@ -0,0 +1,151 @@
|
||||
use crate::{
|
||||
database::Database,
|
||||
sheet::{Composer, I64DateTime, Pdf, Sheet, SheetKind, SheetKindDiscriminants},
|
||||
};
|
||||
use sqlx::{sqlite::SqliteRow, Row};
|
||||
use std::path::{Path, PathBuf};
|
||||
use strum::IntoEnumIterator;
|
||||
|
||||
pub async fn insert_file_as_orphan(
|
||||
database: &Database,
|
||||
file: impl AsRef<Path>,
|
||||
) -> sqlx::Result<Sheet> {
|
||||
let pdf = Pdf::try_from(file.as_ref().to_path_buf()).unwrap();
|
||||
let last_opened = chrono::offset::Utc::now();
|
||||
|
||||
let result = sqlx::query(
|
||||
"
|
||||
INSERT INTO orphans (path, file_size, file_hash, last_opened)
|
||||
VALUES ($1, $2, $3, $4)
|
||||
",
|
||||
)
|
||||
.bind(pdf.path.to_str().unwrap().to_string())
|
||||
.bind(pdf.file_size as i32)
|
||||
.bind(pdf.file_hash.clone())
|
||||
.bind(last_opened.timestamp())
|
||||
.execute(&database.connection)
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let id = result.last_insert_rowid();
|
||||
|
||||
Ok(Sheet {
|
||||
id,
|
||||
pdf,
|
||||
last_opened: I64DateTime(last_opened),
|
||||
kind: SheetKind::Orphan,
|
||||
})
|
||||
}
|
||||
|
||||
// pub async fn find_path_of_book(database: &Database, book_id: &i64) -> sqlx::Result<PathBuf> {
|
||||
// sqlx::query("SELECT path FROM books WHERE id = $1")
|
||||
// .bind(book_id)
|
||||
// .map(|row: SqliteRow| PathBuf::try_from(row.try_get::<String, _>("path").unwrap()).unwrap())
|
||||
// .fetch_one(&database.connection)
|
||||
// .await
|
||||
// }
|
||||
|
||||
pub async fn update_sheet_path(database: &Database, sheet: &Sheet) -> sqlx::Result<()> {
|
||||
// TODO: when updating book or sheet of book, update all
|
||||
let sheet_kind = SheetKindDiscriminants::from(&sheet.kind);
|
||||
let table = sheet_kind.get_database_table_name();
|
||||
sqlx::query(&format!("UPDATE {} SET path = $1 WHERE id = $2", table))
|
||||
.bind(sheet.pdf.path.to_str().unwrap().to_string())
|
||||
.bind(sheet.id)
|
||||
.execute(&database.connection)
|
||||
.await
|
||||
.map(|_| ())
|
||||
}
|
||||
|
||||
pub async fn update_sheet_last_opened(database: &Database, sheet: &Sheet) -> sqlx::Result<()> {
|
||||
let sheet_kind = SheetKindDiscriminants::from(&sheet.kind);
|
||||
let table = sheet_kind.get_database_table_name();
|
||||
sqlx::query(&format!(
|
||||
"UPDATE {} SET last_opened = $1 WHERE id = $2",
|
||||
table
|
||||
))
|
||||
.bind(i64::from(&sheet.last_opened))
|
||||
.bind(sheet.id)
|
||||
.execute(&database.connection)
|
||||
.await
|
||||
.map(|_| ())
|
||||
}
|
||||
|
||||
pub async fn get_composer_by_id(database: &Database, id: i64) -> sqlx::Result<Composer> {
|
||||
sqlx::query(&format!("SELECT * FROM {} WHERE id = {}", "composers", id))
|
||||
.map(|row: SqliteRow| Composer {
|
||||
id,
|
||||
name: row.try_get("name").unwrap(),
|
||||
})
|
||||
.fetch_one(&database.connection)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn remove_duplicate_sheets(database: &Database) -> sqlx::Result<()> {
|
||||
for kind in SheetKindDiscriminants::iter() {
|
||||
let table = kind.get_database_table_name();
|
||||
sqlx::query(&format!(
|
||||
"DELETE FROM {} WHERE id NOT IN (SELECT MIN(id) FROM {} GROUP BY file_hash)",
|
||||
table, table
|
||||
))
|
||||
.execute(&database.connection)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn fetch_all_sheets(database: &Database) -> sqlx::Result<Vec<Sheet>> {
|
||||
let mut sheets: Vec<Sheet> = Vec::new();
|
||||
|
||||
for kind in SheetKindDiscriminants::iter() {
|
||||
let table = kind.get_database_table_name();
|
||||
|
||||
let mut sheets_of_kind = sqlx::query(&format!("SELECT * FROM {}", table))
|
||||
.map(|row: SqliteRow| Sheet {
|
||||
id: row.try_get("id").unwrap(),
|
||||
last_opened: I64DateTime::try_from(row.try_get::<i64, _>("last_opened").unwrap())
|
||||
.unwrap(),
|
||||
pdf: parse_pdf_from_row(&row).unwrap(),
|
||||
kind: parse_kind_from_row(kind, row).unwrap(),
|
||||
})
|
||||
.fetch_all(&database.connection)
|
||||
.await?;
|
||||
|
||||
sheets.append(&mut sheets_of_kind);
|
||||
}
|
||||
|
||||
Ok(sheets)
|
||||
}
|
||||
|
||||
fn parse_kind_from_row(kind: SheetKindDiscriminants, row: SqliteRow) -> sqlx::Result<SheetKind> {
|
||||
Ok(match kind {
|
||||
SheetKindDiscriminants::Sheet => SheetKind::Sheet {
|
||||
name: row.try_get("name")?,
|
||||
composer_id: row.try_get("composer_id")?,
|
||||
first_page: row.try_get("first_page")?,
|
||||
book_id: row.try_get("book_id").ok(),
|
||||
},
|
||||
SheetKindDiscriminants::Orphan => SheetKind::Orphan,
|
||||
SheetKindDiscriminants::Book => SheetKind::Book {
|
||||
name: row.try_get("name")?,
|
||||
composer_id: row.try_get("composer_id")?,
|
||||
sheet_ids: sheet_ids_from_string(row.try_get("sheet_ids").unwrap()),
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
fn sheet_ids_from_string(s: String) -> Vec<i64> {
|
||||
s.trim()
|
||||
.split(',')
|
||||
.map(|s| s.parse::<i64>().unwrap())
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn parse_pdf_from_row(row: &SqliteRow) -> sqlx::Result<Pdf> {
|
||||
// TODO: use get instead of try_get???
|
||||
Ok(Pdf {
|
||||
path: PathBuf::from(row.try_get::<String, _>("path").unwrap()),
|
||||
file_size: row.try_get::<i64, _>("file_size")? as u64,
|
||||
file_hash: row.try_get("file_hash")?,
|
||||
})
|
||||
}
|
115
src/sheet_validation.rs
Normal file
115
src/sheet_validation.rs
Normal file
@ -0,0 +1,115 @@
|
||||
use std::path::{Path, PathBuf};
|
||||
|
||||
use log::debug;
|
||||
use walkdir::WalkDir;
|
||||
|
||||
use crate::{
|
||||
database::Database,
|
||||
sheet::{Pdf, Sheet},
|
||||
sheet_dao,
|
||||
};
|
||||
|
||||
pub async fn load_and_validate_sheets(
|
||||
database: &Database,
|
||||
directory: impl AsRef<Path>,
|
||||
) -> Vec<Sheet> {
|
||||
sheet_dao::remove_duplicate_sheets(database).await.unwrap();
|
||||
|
||||
let sheets = sheet_dao::fetch_all_sheets(database).await.unwrap();
|
||||
|
||||
debug!("Validating sheets from database...");
|
||||
let mut validation_result = validate_sheet_files(sheets, directory);
|
||||
debug!("{}", validation_result.get_stats()); // TODO: handle invalidated files
|
||||
for updated in validation_result.updated_sheets.iter() {
|
||||
sheet_dao::update_sheet_path(database, updated)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
let mut sheets = validation_result.validated_sheets;
|
||||
sheets.append(&mut validation_result.updated_sheets);
|
||||
|
||||
debug!("Inserting unassigned files into orphan table...");
|
||||
for unassigned in validation_result.unassigned_files {
|
||||
let orphan = sheet_dao::insert_file_as_orphan(database, unassigned)
|
||||
.await
|
||||
.unwrap();
|
||||
sheets.push(orphan);
|
||||
}
|
||||
|
||||
sheets
|
||||
}
|
||||
|
||||
pub struct FileValidationResult {
|
||||
validated_sheets: Vec<Sheet>,
|
||||
invalidated_sheets: Vec<Sheet>,
|
||||
updated_sheets: Vec<Sheet>,
|
||||
|
||||
unassigned_files: Vec<PathBuf>,
|
||||
}
|
||||
|
||||
impl FileValidationResult {
|
||||
fn get_stats(&self) -> String {
|
||||
format!("Validated sheets: {}\nInvalidated sheets: {}\nUpdated sheets: {}\nUnassigned files: {}",
|
||||
self.validated_sheets.len(), self.invalidated_sheets.len(), self.updated_sheets.len(),
|
||||
self.unassigned_files.len())
|
||||
}
|
||||
}
|
||||
|
||||
fn validate_sheet_files(sheets: Vec<Sheet>, dir: impl AsRef<Path>) -> FileValidationResult {
|
||||
let (validated_sheets, mut invalidated_sheets): (Vec<_>, Vec<_>) = sheets
|
||||
.into_iter()
|
||||
.partition(|sheet| sheet.pdf.validate_own_path().unwrap_or(false));
|
||||
|
||||
let mut updated_sheets = Vec::new();
|
||||
let mut unassigned_files = Vec::new();
|
||||
|
||||
// TODO: improve performance?
|
||||
for pdf_file in find_all_pdfs_in_directory_recursive(dir) {
|
||||
// Make sure annotated files are not handled (they are then only opened if existent)
|
||||
if pdf_file
|
||||
.file_name()
|
||||
.unwrap()
|
||||
.to_string_lossy()
|
||||
.ends_with("_annotated.pdf")
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some((i, _)) = invalidated_sheets
|
||||
.iter()
|
||||
.enumerate()
|
||||
.find(|(_, sheet)| sheet.pdf.validate_path(&pdf_file).unwrap_or(false))
|
||||
{
|
||||
let mut sheet = invalidated_sheets.remove(i);
|
||||
let new_pdf = Pdf::try_from(pdf_file).unwrap();
|
||||
sheet.pdf = new_pdf;
|
||||
updated_sheets.push(sheet);
|
||||
} else if !validated_sheets
|
||||
.iter()
|
||||
.any(|sheet| sheet.pdf.path == pdf_file)
|
||||
{
|
||||
unassigned_files.push(pdf_file);
|
||||
}
|
||||
}
|
||||
|
||||
FileValidationResult {
|
||||
validated_sheets,
|
||||
invalidated_sheets,
|
||||
updated_sheets,
|
||||
unassigned_files,
|
||||
}
|
||||
}
|
||||
|
||||
fn find_all_pdfs_in_directory_recursive(dir: impl AsRef<Path>) -> impl Iterator<Item = PathBuf> {
|
||||
WalkDir::new(dir)
|
||||
.into_iter()
|
||||
.filter_map(|e| e.ok())
|
||||
.filter(|file| file.file_type().is_file())
|
||||
.map(|file| file.into_path())
|
||||
.filter(|path| {
|
||||
path.extension()
|
||||
.map(|s| s.to_string_lossy().to_ascii_lowercase() == "pdf")
|
||||
.unwrap_or(false)
|
||||
})
|
||||
}
|
210
src/ui/app.rs
210
src/ui/app.rs
@ -1,32 +1,60 @@
|
||||
use std::{path::PathBuf, process::Command, sync::Arc};
|
||||
|
||||
use chrono::Utc;
|
||||
use gtk::prelude::*;
|
||||
use relm4::{
|
||||
component::{AsyncComponent, AsyncComponentParts},
|
||||
component::{AsyncComponent, AsyncComponentParts, AsyncController},
|
||||
gtk::{gdk, Adjustment},
|
||||
prelude::*,
|
||||
AsyncComponentSender,
|
||||
};
|
||||
use relm4_icons::icon_names;
|
||||
|
||||
use crate::{
|
||||
database::Database,
|
||||
ui::{mcdu::McduOutput, sheet_model::SheetModelType},
|
||||
AppInitData,
|
||||
database::{self, Database},
|
||||
sheet::{I64DateTime, Sheet},
|
||||
sheet_dao, sheet_validation,
|
||||
ui::mcdu::McduOutput,
|
||||
};
|
||||
|
||||
use super::{
|
||||
mcdu::McduModel,
|
||||
sheet_listing::{SheetListingInput, SheetListingModel},
|
||||
sheet_edit_dialog::{SheetEditDialogInit, SheetEditDialogModel},
|
||||
sheet_listing::{SheetListingInput, SheetListingModel, SheetListingOutput},
|
||||
};
|
||||
|
||||
pub struct AppModel {
|
||||
database: Database,
|
||||
database: Arc<Database>,
|
||||
directory: Arc<PathBuf>,
|
||||
mcdu: Controller<McduModel>,
|
||||
sheets_and_files_listing: Controller<SheetListingModel>,
|
||||
sheets_listing: Controller<SheetListingModel>,
|
||||
click_mode: ClickMode,
|
||||
scroll_adjustment: Adjustment,
|
||||
sheet_edit_dialog: Option<AsyncController<SheetEditDialogModel>>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ClickMode {
|
||||
Open,
|
||||
Edit,
|
||||
Annotate,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum AppInput {
|
||||
SearchStarted(String),
|
||||
SheetPressed(SheetModelType),
|
||||
SheetPressed(Sheet),
|
||||
Refresh,
|
||||
Sort,
|
||||
Shuffle,
|
||||
SetClickMode(ClickMode),
|
||||
SheetListingContentsChanged,
|
||||
}
|
||||
|
||||
pub struct AppInitData {
|
||||
pub sheets: Vec<Sheet>,
|
||||
pub database: Database,
|
||||
pub directory: PathBuf,
|
||||
}
|
||||
|
||||
#[relm4::component(pub, async)]
|
||||
@ -34,7 +62,7 @@ impl AsyncComponent for AppModel {
|
||||
type Input = AppInput;
|
||||
type Output = ();
|
||||
type Init = AppInitData;
|
||||
type CommandOutput = ();
|
||||
type CommandOutput = Vec<Sheet>;
|
||||
|
||||
view! {
|
||||
#[root]
|
||||
@ -48,10 +76,49 @@ impl AsyncComponent for AppModel {
|
||||
gtk::Box {
|
||||
set_orientation: gtk::Orientation::Vertical,
|
||||
set_hexpand: true,
|
||||
gtk::Box {
|
||||
set_orientation: gtk::Orientation::Horizontal,
|
||||
set_margin_all: 10,
|
||||
set_spacing: 3,
|
||||
gtk::Button {
|
||||
set_icon_name: icon_names::REFRESH,
|
||||
set_margin_end: 10,
|
||||
connect_clicked[sender] => move |_| sender.input(AppInput::Refresh),
|
||||
},
|
||||
#[name = "button_sort"]
|
||||
gtk::ToggleButton {
|
||||
set_icon_name: icon_names::ARROW_SORT_REGULAR,
|
||||
set_active: true,
|
||||
connect_clicked[sender] => move |_| sender.input(AppInput::Sort),
|
||||
},
|
||||
gtk::ToggleButton {
|
||||
set_icon_name: icon_names::PLAYLIST_SHUFFLE,
|
||||
set_group: Some(&button_sort),
|
||||
set_margin_end: 10,
|
||||
connect_clicked[sender] => move |_| sender.input(AppInput::Shuffle),
|
||||
},
|
||||
#[name = "button_open"]
|
||||
gtk::ToggleButton {
|
||||
set_icon_name: icon_names::OPEN_FILLED,
|
||||
set_active: true,
|
||||
connect_clicked[sender] => move |button| if button.is_active() { sender.input(AppInput::SetClickMode(ClickMode::Open)) },
|
||||
},
|
||||
gtk::ToggleButton {
|
||||
set_icon_name: icon_names::DOCUMENT_SETTINGS_FILLED,
|
||||
set_group: Some(&button_open),
|
||||
connect_clicked[sender] => move |button| if button.is_active() { sender.input(AppInput::SetClickMode(ClickMode::Edit)) },
|
||||
},
|
||||
gtk::ToggleButton {
|
||||
set_icon_name: icon_names::EDIT,
|
||||
set_group: Some(&button_open),
|
||||
connect_clicked[sender] => move |button| if button.is_active() { sender.input(AppInput::SetClickMode(ClickMode::Annotate)) },
|
||||
},
|
||||
},
|
||||
gtk::ScrolledWindow {
|
||||
model.sheets_and_files_listing.widget(),
|
||||
set_vexpand: true,
|
||||
set_hexpand: true,
|
||||
model.sheets_listing.widget(),
|
||||
set_vexpand: true,
|
||||
set_hexpand: true,
|
||||
set_vadjustment: Some(&model.scroll_adjustment),
|
||||
},
|
||||
},
|
||||
model.mcdu.widget() {
|
||||
@ -67,6 +134,12 @@ impl AsyncComponent for AppModel {
|
||||
sender: AsyncComponentSender<Self>,
|
||||
) -> AsyncComponentParts<Self> {
|
||||
relm4_icons::initialize_icons();
|
||||
gtk::init().unwrap();
|
||||
let display = gdk::Display::default().unwrap();
|
||||
let theme = gtk::IconTheme::for_display(&display);
|
||||
|
||||
theme.add_resource_path("/org/gtkrs/icons/");
|
||||
// theme.add_resource_path("/org/gtkrs/icons/scalable/actions/");
|
||||
|
||||
let mcdu = McduModel::builder()
|
||||
.launch(())
|
||||
@ -74,31 +147,25 @@ impl AsyncComponent for AppModel {
|
||||
McduOutput::SearchStarted(query) => AppInput::SearchStarted(query),
|
||||
});
|
||||
|
||||
let mut orphan_files: Vec<SheetModelType> = init_data
|
||||
.orphans
|
||||
.into_iter()
|
||||
.map(|orphan| SheetModelType::Orphan { orphan })
|
||||
.collect();
|
||||
orphan_files.sort_by(|a, b| a.cmp(b).reverse());
|
||||
let mut sheets = init_data.sheets;
|
||||
sheets.sort_by(|a, b| a.cmp(b).reverse());
|
||||
|
||||
let mut sheets_and_files: Vec<SheetModelType> = init_data
|
||||
.sheets
|
||||
.into_iter()
|
||||
.map(|sheet| SheetModelType::Sheet { sheet })
|
||||
.chain(orphan_files)
|
||||
.collect();
|
||||
sheets_and_files.sort_by(|a, b| a.cmp(b).reverse());
|
||||
|
||||
let sheets_and_files_listing = SheetListingModel::builder()
|
||||
.launch(sheets_and_files)
|
||||
.forward(sender.input_sender(), |response| {
|
||||
AppInput::SheetPressed(response.sheet_model_type)
|
||||
});
|
||||
let sheets_listing = SheetListingModel::builder().launch(sheets).forward(
|
||||
sender.input_sender(),
|
||||
|response| match response {
|
||||
SheetListingOutput::SheetModelSelected(sheet) => AppInput::SheetPressed(sheet),
|
||||
SheetListingOutput::ContentsChanged => AppInput::SheetListingContentsChanged,
|
||||
},
|
||||
);
|
||||
|
||||
let model = AppModel {
|
||||
database: init_data.database,
|
||||
database: Arc::new(init_data.database),
|
||||
directory: Arc::new(init_data.directory),
|
||||
mcdu,
|
||||
sheets_and_files_listing,
|
||||
sheets_listing,
|
||||
click_mode: ClickMode::Open,
|
||||
scroll_adjustment: Adjustment::builder().build(),
|
||||
sheet_edit_dialog: None,
|
||||
};
|
||||
|
||||
let widgets = view_output!();
|
||||
@ -109,34 +176,71 @@ impl AsyncComponent for AppModel {
|
||||
async fn update(
|
||||
&mut self,
|
||||
message: Self::Input,
|
||||
_sender: AsyncComponentSender<Self>,
|
||||
_root: &Self::Root,
|
||||
sender: AsyncComponentSender<Self>,
|
||||
root: &Self::Root,
|
||||
) {
|
||||
// AppInput::SheetPressed(sheet) => opener::open(sheet).unwrap(),
|
||||
match message {
|
||||
AppInput::SearchStarted(query) => {
|
||||
self.sheets_and_files_listing
|
||||
self.sheets_listing
|
||||
.emit(SheetListingInput::Query(query.clone()));
|
||||
}
|
||||
AppInput::SheetPressed(sheet_model_type) => {
|
||||
opener::open(sheet_model_type.get_path()).unwrap();
|
||||
match sheet_model_type {
|
||||
SheetModelType::Orphan { mut orphan } => {
|
||||
orphan.last_opened = Utc::now();
|
||||
self.database
|
||||
.update_orphan_last_opened(&orphan)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
SheetModelType::Sheet { mut sheet } => {
|
||||
sheet.last_opened = Utc::now();
|
||||
self.database
|
||||
.update_sheet_last_opened(&sheet)
|
||||
.await
|
||||
.unwrap();
|
||||
AppInput::SheetPressed(sheet) => {
|
||||
match self.click_mode {
|
||||
ClickMode::Open => open_sheet(&sheet, &self.database).await,
|
||||
ClickMode::Edit => {
|
||||
self.sheet_edit_dialog = Some(
|
||||
SheetEditDialogModel::builder()
|
||||
.transient_for(root)
|
||||
.launch(SheetEditDialogInit {
|
||||
sheet,
|
||||
database: Arc::clone(&self.database),
|
||||
})
|
||||
.forward(sender.input_sender(), |_| todo!()),
|
||||
);
|
||||
}
|
||||
ClickMode::Annotate => annotate_sheet(&sheet).await,
|
||||
};
|
||||
}
|
||||
AppInput::Refresh => {
|
||||
let db = Arc::clone(&self.database);
|
||||
let dir = Arc::clone(&self.directory);
|
||||
sender.oneshot_command(async move {
|
||||
sheet_validation::load_and_validate_sheets(&db, dir.as_ref()).await
|
||||
});
|
||||
}
|
||||
AppInput::Sort => self.sheets_listing.emit(SheetListingInput::Sort),
|
||||
AppInput::Shuffle => self.sheets_listing.emit(SheetListingInput::Shuffle),
|
||||
AppInput::SetClickMode(click_mode) => self.click_mode = click_mode,
|
||||
AppInput::SheetListingContentsChanged => self.scroll_adjustment.set_value(0.0),
|
||||
}
|
||||
}
|
||||
|
||||
async fn update_cmd(
|
||||
&mut self,
|
||||
message: Self::CommandOutput,
|
||||
_sender: AsyncComponentSender<Self>,
|
||||
_: &Self::Root,
|
||||
) {
|
||||
let mut sheets = message;
|
||||
sheets.sort_by(|a, b| a.cmp(b).reverse());
|
||||
|
||||
self.sheets_listing
|
||||
.emit(SheetListingInput::ReloadSheets(sheets));
|
||||
}
|
||||
}
|
||||
|
||||
async fn open_sheet(sheet: &Sheet, database: &Database) {
|
||||
sheet.open_file_or_annotated_version_if_exists();
|
||||
let mut sheet = sheet.to_owned();
|
||||
sheet.last_opened = I64DateTime(Utc::now());
|
||||
sheet_dao::update_sheet_last_opened(database, &sheet)
|
||||
.await
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
async fn annotate_sheet(sheet: &Sheet) {
|
||||
Command::new("xournalpp")
|
||||
.arg(&sheet.pdf.path)
|
||||
.spawn()
|
||||
.expect("failed to execute process");
|
||||
}
|
||||
|
@ -60,7 +60,7 @@ impl SimpleComponent for McduModel {
|
||||
|
||||
fn init(
|
||||
_init: Self::Init,
|
||||
root: &Self::Root,
|
||||
root: Self::Root,
|
||||
sender: ComponentSender<Self>,
|
||||
) -> ComponentParts<Self> {
|
||||
let model = McduModel::new();
|
||||
|
@ -1,4 +1,5 @@
|
||||
pub mod app;
|
||||
pub mod mcdu;
|
||||
pub mod sheet_edit_dialog;
|
||||
pub mod sheet_listing;
|
||||
pub mod sheet_model;
|
||||
|
256
src/ui/sheet_edit_dialog.rs
Normal file
256
src/ui/sheet_edit_dialog.rs
Normal file
@ -0,0 +1,256 @@
|
||||
use gtk::prelude::*;
|
||||
use std::sync::Arc;
|
||||
|
||||
use relm4::{
|
||||
component::{AsyncComponent, AsyncComponentParts, Connector},
|
||||
gtk::{
|
||||
gio::ListStore,
|
||||
glib::{self, GString, Type, Value},
|
||||
EntryBuffer, EntryCompletion,
|
||||
},
|
||||
prelude::*,
|
||||
AsyncComponentSender,
|
||||
};
|
||||
use relm4_components::alert::{Alert, AlertMsg, AlertSettings};
|
||||
|
||||
use crate::{database::Database, sheet::Sheet, sheet_dao};
|
||||
|
||||
pub struct SheetEditDialogModel {
|
||||
database: Arc<Database>,
|
||||
hidden: bool,
|
||||
sheet: Option<Sheet>,
|
||||
name_entry_buffer: EntryBuffer,
|
||||
composer_entry_buffer: EntryBuffer,
|
||||
composer_entry_completion: EntryCompletion,
|
||||
is_book: bool,
|
||||
book_sheets: Vec<(String, String, i64)>,
|
||||
alert_empty_fields: Connector<Alert>,
|
||||
}
|
||||
|
||||
pub struct SheetEditDialogInit {
|
||||
pub database: Arc<Database>,
|
||||
pub sheet: Sheet,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SheetEditDialogInput {
|
||||
Accept,
|
||||
Cancel,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum SheetEditDialogOutput {
|
||||
SheetEdited(Sheet),
|
||||
}
|
||||
|
||||
#[relm4::component(pub, async)]
|
||||
impl AsyncComponent for SheetEditDialogModel {
|
||||
type Init = SheetEditDialogInit;
|
||||
type Input = SheetEditDialogInput;
|
||||
type Output = SheetEditDialogOutput;
|
||||
type CommandOutput = ();
|
||||
|
||||
view! {
|
||||
gtk::Window {
|
||||
#[watch]
|
||||
set_visible: !model.hidden,
|
||||
set_modal: true,
|
||||
set_title: Some("Edit sheet"),
|
||||
set_default_width: 10,
|
||||
set_default_height: 10,
|
||||
gtk::Box {
|
||||
set_orientation: gtk::Orientation::Vertical,
|
||||
set_margin_all : 15,
|
||||
set_spacing: 5,
|
||||
gtk::Box {
|
||||
set_spacing: 10,
|
||||
gtk::Label {
|
||||
set_text: "Sheet name"
|
||||
},
|
||||
gtk::Entry {
|
||||
set_buffer: &model.name_entry_buffer,
|
||||
set_width_chars: 40,
|
||||
},
|
||||
},
|
||||
gtk::Box {
|
||||
set_spacing: 10,
|
||||
gtk::Label {
|
||||
set_text: "Sheet composer"
|
||||
},
|
||||
gtk::Entry {
|
||||
set_buffer: &model.composer_entry_buffer,
|
||||
set_completion: Some(&model.composer_entry_completion),
|
||||
set_hexpand: true,
|
||||
},
|
||||
},
|
||||
gtk::Box {
|
||||
set_spacing: 10,
|
||||
gtk::Label {
|
||||
set_text: "Book"
|
||||
},
|
||||
gtk::CheckButton {
|
||||
#[watch]
|
||||
set_active: model.is_book,
|
||||
},
|
||||
},
|
||||
gtk::Box {
|
||||
set_orientation: gtk::Orientation::Horizontal,
|
||||
set_margin_top: 10,
|
||||
set_spacing: 10,
|
||||
set_homogeneous: true,
|
||||
set_halign: gtk::Align::Center,
|
||||
set_hexpand: true,
|
||||
|
||||
gtk::Button {
|
||||
set_label: "Cancel",
|
||||
connect_clicked[sender] => move |_| sender.input(SheetEditDialogInput::Cancel)
|
||||
},
|
||||
gtk::Button {
|
||||
set_label : "Confirm",
|
||||
connect_clicked[sender] => move |_| sender.input(SheetEditDialogInput::Accept)
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn init(
|
||||
params: Self::Init,
|
||||
root: Self::Root,
|
||||
sender: AsyncComponentSender<Self>,
|
||||
) -> AsyncComponentParts<Self> {
|
||||
let sheet = params.sheet;
|
||||
let mut sheet_name = String::new();
|
||||
let mut sheet_composer = String::new();
|
||||
let mut is_book = false;
|
||||
|
||||
match &sheet.kind {
|
||||
crate::sheet::SheetKind::Sheet {
|
||||
name, composer_id, ..
|
||||
} => {
|
||||
sheet_name = name.to_string();
|
||||
if let Ok(composer) =
|
||||
sheet_dao::get_composer_by_id(¶ms.database, *composer_id).await
|
||||
{
|
||||
sheet_composer = composer.name;
|
||||
}
|
||||
}
|
||||
crate::sheet::SheetKind::Orphan => {
|
||||
sheet_name = sheet.pdf.get_name().to_string();
|
||||
}
|
||||
crate::sheet::SheetKind::Book {
|
||||
name,
|
||||
composer_id,
|
||||
sheet_ids: _,
|
||||
} => {
|
||||
is_book = true;
|
||||
sheet_name = name.to_string();
|
||||
if let Ok(composer) =
|
||||
sheet_dao::get_composer_by_id(¶ms.database, *composer_id).await
|
||||
{
|
||||
sheet_composer = composer.name;
|
||||
}
|
||||
// TODO: load sheets of book
|
||||
}
|
||||
};
|
||||
|
||||
let composer_entry_completion = EntryCompletion::new();
|
||||
let data = [
|
||||
"France".to_string(),
|
||||
"Italy".to_string(),
|
||||
"Sweden".to_string(),
|
||||
"Switzerland".to_string(),
|
||||
];
|
||||
let store = gtk::ListStore::new(&[glib::Type::STRING]);
|
||||
for d in data.iter() {
|
||||
store.set(&store.append(), &[(0, &d)]);
|
||||
}
|
||||
|
||||
composer_entry_completion.set_model(Some(&store));
|
||||
// Use the first (and only) column available to set the autocompletion text
|
||||
composer_entry_completion.set_text_column(0);
|
||||
// how many keystrokes to wait before attempting to autocomplete?
|
||||
composer_entry_completion.set_minimum_key_length(1);
|
||||
// whether the completions should be presented in a popup window
|
||||
composer_entry_completion.set_popup_completion(true);
|
||||
|
||||
let model = SheetEditDialogModel {
|
||||
database: params.database,
|
||||
hidden: false,
|
||||
sheet: Some(sheet),
|
||||
name_entry_buffer: EntryBuffer::new(Some(sheet_name)),
|
||||
composer_entry_buffer: EntryBuffer::new(Some(sheet_composer)),
|
||||
composer_entry_completion,
|
||||
is_book,
|
||||
book_sheets: Vec::new(),
|
||||
alert_empty_fields: Alert::builder().transient_for(&root).launch(AlertSettings {
|
||||
text: String::from("Missing input"),
|
||||
secondary_text: Some(String::from("Please make sure all fields are filled")),
|
||||
is_modal: true,
|
||||
destructive_accept: false,
|
||||
confirm_label: Some(String::from("Ok")),
|
||||
cancel_label: None,
|
||||
option_label: None,
|
||||
}),
|
||||
};
|
||||
let widgets = view_output!();
|
||||
|
||||
AsyncComponentParts { model, widgets }
|
||||
}
|
||||
|
||||
// TODO: init_loading_widgets
|
||||
|
||||
async fn update(
|
||||
&mut self,
|
||||
msg: Self::Input,
|
||||
_sender: AsyncComponentSender<Self>,
|
||||
_root: &Self::Root,
|
||||
) {
|
||||
match msg {
|
||||
SheetEditDialogInput::Accept => {
|
||||
if let Some(_sheet) = &self.sheet {
|
||||
let sheet_name_string = self.name_entry_buffer.text();
|
||||
let sheet_name = sheet_name_string.trim();
|
||||
|
||||
let sheet_composer_string = self.composer_entry_buffer.text();
|
||||
let sheet_composer = sheet_composer_string.trim();
|
||||
|
||||
if sheet_name.is_empty() || sheet_composer.is_empty() {
|
||||
self.alert_empty_fields.emit(AlertMsg::Show);
|
||||
return;
|
||||
}
|
||||
|
||||
// match sheet.kind.borrow_mut() {
|
||||
// crate::sheet::SheetKind::Sheet {
|
||||
// name,
|
||||
// composer_id,
|
||||
// first_page,
|
||||
// book_id,
|
||||
// } => {
|
||||
// todo!("Do something!!!");
|
||||
// // name = "hello world";
|
||||
// // name = &mut self.sheet_name.clone();
|
||||
// // composer_id = 0;
|
||||
// }
|
||||
// crate::sheet::SheetKind::Orphan => {
|
||||
// todo!("Create Sheet");
|
||||
// }
|
||||
// crate::sheet::SheetKind::Book {
|
||||
// name,
|
||||
// composer_id,
|
||||
// sheet_ids,
|
||||
// } => todo!(),
|
||||
// };
|
||||
// sender
|
||||
// .output(SheetEditDialogOutput::SheetEdited(sheet))
|
||||
// .unwrap();
|
||||
}
|
||||
self.hidden = true;
|
||||
}
|
||||
SheetEditDialogInput::Cancel => {
|
||||
self.hidden = true;
|
||||
self.sheet = None;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
@ -4,7 +4,11 @@ use relm4::factory::FactoryVecDeque;
|
||||
use relm4::RelmListBoxExt;
|
||||
use relm4::{gtk, ComponentParts, ComponentSender, SimpleComponent};
|
||||
|
||||
use super::sheet_model::{OnQueryUpdate, SheetModel, SheetModelType};
|
||||
use crate::sheet::Sheet;
|
||||
|
||||
use super::sheet_model::{OnQueryUpdate, SheetModel};
|
||||
|
||||
use rand::seq::SliceRandom;
|
||||
|
||||
pub struct SheetListingModel {
|
||||
sheets: FactoryVecDeque<SheetModel>,
|
||||
@ -14,18 +18,23 @@ pub struct SheetListingModel {
|
||||
pub enum SheetListingInput {
|
||||
Query(String),
|
||||
ListBoxRowClicked(i32),
|
||||
Sort,
|
||||
Shuffle,
|
||||
ReloadSheets(Vec<Sheet>),
|
||||
None,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct SheetModelSelected {
|
||||
pub sheet_model_type: SheetModelType,
|
||||
pub enum SheetListingOutput {
|
||||
SheetModelSelected(Sheet),
|
||||
ContentsChanged,
|
||||
}
|
||||
|
||||
#[relm4::component(pub)]
|
||||
impl SimpleComponent for SheetListingModel {
|
||||
type Init = Vec<SheetModelType>;
|
||||
type Init = Vec<Sheet>;
|
||||
type Input = SheetListingInput;
|
||||
type Output = SheetModelSelected;
|
||||
type Output = SheetListingOutput;
|
||||
|
||||
view! {
|
||||
#[root]
|
||||
@ -46,10 +55,12 @@ impl SimpleComponent for SheetListingModel {
|
||||
|
||||
fn init(
|
||||
init: Self::Init,
|
||||
root: &Self::Root,
|
||||
root: Self::Root,
|
||||
sender: ComponentSender<Self>,
|
||||
) -> ComponentParts<Self> {
|
||||
let mut sheets = FactoryVecDeque::new(gtk::ListBox::default(), sender.input_sender());
|
||||
let mut sheets = FactoryVecDeque::builder()
|
||||
.launch(gtk::ListBox::default())
|
||||
.forward(sender.input_sender(), |_| SheetListingInput::None);
|
||||
for sheet_model_type in init {
|
||||
sheets.guard().push_back(sheet_model_type);
|
||||
}
|
||||
@ -65,13 +76,109 @@ impl SimpleComponent for SheetListingModel {
|
||||
self.sheets.broadcast(OnQueryUpdate { query });
|
||||
}
|
||||
SheetListingInput::ListBoxRowClicked(index) => {
|
||||
let x = self.sheets.get(index as usize).unwrap();
|
||||
let sheet_model = self.sheets.get(index as usize).unwrap();
|
||||
sender
|
||||
.output(SheetModelSelected {
|
||||
sheet_model_type: x.sheet_model_type.clone(),
|
||||
})
|
||||
.output(SheetListingOutput::SheetModelSelected(
|
||||
sheet_model.sheet.clone(),
|
||||
))
|
||||
.unwrap();
|
||||
}
|
||||
SheetListingInput::Sort => {
|
||||
sort_sheets(&mut self.sheets);
|
||||
sender.output(SheetListingOutput::ContentsChanged).unwrap();
|
||||
}
|
||||
SheetListingInput::Shuffle => {
|
||||
shuffle_sheets(&mut self.sheets);
|
||||
sender.output(SheetListingOutput::ContentsChanged).unwrap();
|
||||
}
|
||||
SheetListingInput::ReloadSheets(sheets) => {
|
||||
self.sheets.guard().clear();
|
||||
for sheet_model_type in sheets {
|
||||
self.sheets.guard().push_back(sheet_model_type);
|
||||
}
|
||||
sender.output(SheetListingOutput::ContentsChanged).unwrap();
|
||||
}
|
||||
SheetListingInput::None => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn shuffle_sheets(sheets: &mut FactoryVecDeque<SheetModel>) {
|
||||
let mut new_order: Vec<usize> = (0..sheets.len()).collect();
|
||||
new_order.shuffle(&mut rand::thread_rng());
|
||||
order_sheets(sheets, &mut new_order);
|
||||
}
|
||||
|
||||
fn sort_sheets(sheets: &mut FactoryVecDeque<SheetModel>) {
|
||||
let mut order = Vec::new();
|
||||
{
|
||||
let guard = sheets.guard();
|
||||
let mut numerated_sheets: Vec<_> = guard.iter().enumerate().collect();
|
||||
numerated_sheets.sort_by(|a, b| a.1.sheet.cmp(&b.1.sheet).reverse());
|
||||
for (i, _) in numerated_sheets {
|
||||
order.push(i);
|
||||
}
|
||||
}
|
||||
order_sheets(sheets, &mut order);
|
||||
}
|
||||
|
||||
fn order_sheets(sheets: &mut FactoryVecDeque<SheetModel>, order: &mut Vec<usize>) {
|
||||
assert!(sheets.len() == order.len());
|
||||
|
||||
let mut wish_positions = vec![0; sheets.len()];
|
||||
for (i, i2) in order.iter().enumerate() {
|
||||
wish_positions[*i2] = i;
|
||||
}
|
||||
|
||||
for i in 0..sheets.len() {
|
||||
let new_i = order[i];
|
||||
let old_i = i;
|
||||
|
||||
if old_i != new_i {
|
||||
order.swap(old_i, wish_positions[old_i]);
|
||||
wish_positions.swap(old_i, new_i);
|
||||
sheets.guard().swap(old_i, new_i);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
// Note this useful idiom: importing names from outer (for mod tests) scope.
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_sort() {
|
||||
let original: Vec<usize> = (0..100).collect();
|
||||
let mut to_sort = original.clone();
|
||||
to_sort.shuffle(&mut rand::thread_rng());
|
||||
|
||||
println!("To sort: {:?}", to_sort);
|
||||
|
||||
let mut order_builder: Vec<_> = to_sort.clone().into_iter().enumerate().collect();
|
||||
order_builder.sort_by(|a, b| a.1.cmp(&b.1));
|
||||
let mut order: Vec<_> = order_builder.into_iter().map(|(i, _)| i).collect();
|
||||
|
||||
println!("Initial order: {:?}", order);
|
||||
|
||||
let mut wish_positions = vec![0; to_sort.len()];
|
||||
for (i, i2) in order.iter().enumerate() {
|
||||
wish_positions[*i2] = i;
|
||||
}
|
||||
|
||||
for i in 0..to_sort.len() {
|
||||
let new_i = order[i];
|
||||
let old_i = i;
|
||||
|
||||
println!("Swap {} and {}", old_i, new_i);
|
||||
if old_i != new_i {
|
||||
order.swap(old_i, wish_positions[old_i]);
|
||||
wish_positions.swap(old_i, new_i);
|
||||
to_sort.swap(old_i, new_i);
|
||||
}
|
||||
println!("order: {:?} - to_sort: {:?}", order, to_sort);
|
||||
}
|
||||
|
||||
assert_eq!(original, to_sort);
|
||||
}
|
||||
}
|
||||
|
@ -1,55 +1,14 @@
|
||||
use std::{cmp::Ordering, path::Path};
|
||||
|
||||
use gtk::prelude::*;
|
||||
use relm4::prelude::*;
|
||||
|
||||
use crate::sheet::{OrphanFile, Sheet};
|
||||
|
||||
use super::sheet_listing::SheetListingInput;
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
pub enum SheetModelType {
|
||||
Sheet { sheet: Sheet },
|
||||
Orphan { orphan: OrphanFile },
|
||||
}
|
||||
|
||||
impl SheetModelType {
|
||||
pub fn get_path(&self) -> &Path {
|
||||
match self {
|
||||
SheetModelType::Sheet { sheet } => sheet.path.as_path(),
|
||||
SheetModelType::Orphan { orphan } => orphan.path.as_path(),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl Ord for SheetModelType {
|
||||
fn cmp(&self, other: &Self) -> Ordering {
|
||||
let self_last_opened = match self {
|
||||
SheetModelType::Sheet { sheet } => sheet.last_opened,
|
||||
SheetModelType::Orphan { orphan } => orphan.last_opened,
|
||||
};
|
||||
let other_last_opened = match other {
|
||||
SheetModelType::Sheet { sheet } => sheet.last_opened,
|
||||
SheetModelType::Orphan { orphan } => orphan.last_opened,
|
||||
};
|
||||
self_last_opened.cmp(&other_last_opened)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialOrd for SheetModelType {
|
||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
||||
Some(self.cmp(other))
|
||||
}
|
||||
}
|
||||
use crate::sheet::Sheet;
|
||||
|
||||
pub struct SheetModel {
|
||||
pub label: String,
|
||||
pub sheet_model_type: SheetModelType,
|
||||
pub sheet: Sheet,
|
||||
visible: bool,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct RowActivated;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct OnQueryUpdate {
|
||||
pub query: String,
|
||||
@ -57,12 +16,11 @@ pub struct OnQueryUpdate {
|
||||
|
||||
#[relm4::factory(pub)]
|
||||
impl FactoryComponent for SheetModel {
|
||||
type Init = SheetModelType;
|
||||
type Init = Sheet;
|
||||
type ParentWidget = gtk::ListBox;
|
||||
type CommandOutput = ();
|
||||
type ParentInput = SheetListingInput;
|
||||
type Input = OnQueryUpdate;
|
||||
type Output = RowActivated;
|
||||
type Output = ();
|
||||
|
||||
view! {
|
||||
#[root]
|
||||
@ -80,21 +38,16 @@ impl FactoryComponent for SheetModel {
|
||||
}
|
||||
}
|
||||
|
||||
fn init_model(value: Self::Init, _index: &DynamicIndex, _sender: FactorySender<Self>) -> Self {
|
||||
let label = match &value {
|
||||
SheetModelType::Sheet { sheet } => sheet.name.to_string(),
|
||||
SheetModelType::Orphan { orphan } => orphan
|
||||
.path
|
||||
.file_name()
|
||||
.unwrap()
|
||||
.to_str()
|
||||
.unwrap()
|
||||
.to_string(),
|
||||
fn init_model(sheet: Self::Init, _index: &DynamicIndex, _sender: FactorySender<Self>) -> Self {
|
||||
let label = match &sheet.kind {
|
||||
crate::sheet::SheetKind::Sheet { name, .. } => name,
|
||||
crate::sheet::SheetKind::Orphan {} => sheet.pdf.get_name(),
|
||||
crate::sheet::SheetKind::Book { name, .. } => name,
|
||||
};
|
||||
|
||||
SheetModel {
|
||||
label,
|
||||
sheet_model_type: value,
|
||||
label: label.to_string(),
|
||||
sheet,
|
||||
visible: true,
|
||||
}
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user