Compare commits

6 Commits

6 changed files with 752 additions and 515 deletions

1033
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,9 @@
# Sheet Organizer # Sheet Organizer
A simple tool for organizing and opening digital sheet music on a touch display as part of a digital music stand. A simple tool for organizing and opening digital sheet music on a touch display as part of a digital music stand.
## Dependencies
This tool offers editing pdf using [Xournal++](https://github.com/xournalpp/xournalpp).
## Configuration ## Configuration
You can configure sheet-organizer using an file `config.toml` inside one of your `$XDG_CONFIG_DIRECTORIES` (e.g. `~/.config/sheet-organizer/config.toml`). You can configure sheet-organizer using an file `config.toml` inside one of your `$XDG_CONFIG_DIRECTORIES` (e.g. `~/.config/sheet-organizer/config.toml`).

23
flake.lock generated
View File

@@ -1,17 +1,12 @@
{ {
"nodes": { "nodes": {
"crane": { "crane": {
"inputs": {
"nixpkgs": [
"nixpkgs"
]
},
"locked": { "locked": {
"lastModified": 1717469187, "lastModified": 1736101677,
"narHash": "sha256-UVvFGiWFGPfVXG7Xr6HPKChx9hhtzkGaGAS/Ph1Khjg=", "narHash": "sha256-iKOPq86AOWCohuzxwFy/MtC8PcSVGnrxBOvxpjpzrAY=",
"owner": "ipetkov", "owner": "ipetkov",
"repo": "crane", "repo": "crane",
"rev": "7e86136dc729cdf237aa59a5a02687bc0d1144b6", "rev": "61ba163d85e5adeddc7b3a69bb174034965965b2",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -25,11 +20,11 @@
"systems": "systems" "systems": "systems"
}, },
"locked": { "locked": {
"lastModified": 1710146030, "lastModified": 1731533236,
"narHash": "sha256-SZ5L6eA7HJ/nmkzGG7/ISclqe6oZdOZTNoesiInkXPQ=", "narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide", "owner": "numtide",
"repo": "flake-utils", "repo": "flake-utils",
"rev": "b1d9ab70662946ef0850d488da1c9019f3a9752a", "rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github" "type": "github"
}, },
"original": { "original": {
@@ -40,11 +35,11 @@
}, },
"nixpkgs": { "nixpkgs": {
"locked": { "locked": {
"lastModified": 1716715802, "lastModified": 1736241350,
"narHash": "sha256-usk0vE7VlxPX8jOavrtpOqphdfqEQpf9lgedlY/r66c=", "narHash": "sha256-CHd7yhaDigUuJyDeX0SADbTM9FXfiWaeNyY34FL1wQU=",
"owner": "NixOS", "owner": "NixOS",
"repo": "nixpkgs", "repo": "nixpkgs",
"rev": "e2dd4e18cc1c7314e24154331bae07df76eb582f", "rev": "8c9fd3e564728e90829ee7dbac6edc972971cd0f",
"type": "github" "type": "github"
}, },
"original": { "original": {

193
flake.nix
View File

@@ -3,124 +3,117 @@
inputs = { inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable"; nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
crane = {
url = "github:ipetkov/crane";
inputs.nixpkgs.follows = "nixpkgs";
};
flake-utils.url = "github:numtide/flake-utils"; flake-utils.url = "github:numtide/flake-utils";
crane.url = "github:ipetkov/crane";
}; };
outputs = outputs =
{ {
self, self,
nixpkgs, nixpkgs,
crane,
flake-utils, flake-utils,
crane,
... ...
}: }:
flake-utils.lib.eachDefaultSystem ( let
system: packageOutputs = flake-utils.lib.eachDefaultSystem (
let system:
pkgs = nixpkgs.legacyPackages.${system}; let
pkgs = nixpkgs.legacyPackages.${system};
craneLib = crane.mkLib pkgs; craneLib = crane.mkLib pkgs;
dbMigrationsFilter = path: _type: builtins.match ".*sql$" path != null; dbMigrationsFilter = path: _type: builtins.match ".*sql$" path != null;
dbMigrationsOrCargoFilter = dbMigrationsOrCargoFilter =
path: type: (dbMigrationsFilter path type) || (craneLib.filterCargoSources path type); path: type: (dbMigrationsFilter path type) || (craneLib.filterCargoSources path type);
dbMigrations = pkgs.lib.cleanSourceWith { dbMigrations = pkgs.lib.cleanSourceWith {
src = craneLib.path ./db-migrations; # The original, unfiltered source src = craneLib.path ./db-migrations; # The original, unfiltered source
filter = dbMigrationsFilter; filter = dbMigrationsFilter;
};
# Common arguments can be set here to avoid repeating them later
# Note: changes here will rebuild all dependency crates
commonArgs = rec {
strictDeps = true; # When this is not set, all dependency crates will be compiled again
src = pkgs.lib.cleanSourceWith {
src = craneLib.path ./.; # The original, unfiltered source
filter = dbMigrationsOrCargoFilter;
}; };
# Add icons.toml to $src when compiling dependencies (needed by relm4-icons) # Common arguments can be set here to avoid repeating them later
extraDummyScript = '' # Note: changes here will rebuild all dependency crates
cp --no-preserve=mode,ownership ${./icons.toml} $out/icons.toml commonArgs = rec {
''; strictDeps = true; # When this is not set, all dependency crates will be compiled again
src = pkgs.lib.cleanSourceWith {
src = craneLib.path ./.; # The original, unfiltered source
filter = dbMigrationsOrCargoFilter;
};
nativeBuildInputs = with pkgs; [ pkg-config ]; # Add icons.toml to $src when compiling dependencies (needed by relm4-icons)
extraDummyScript = ''
buildInputs = cp --no-preserve=mode,ownership ${./icons.toml} $out/icons.toml
with pkgs;
[
gtk4
xournalpp # not needed for building
]
++ pkgs.lib.optionals pkgs.stdenv.isDarwin [
# Additional darwin specific inputs can be set here
pkgs.libiconv
];
};
# Build *just* the cargo dependencies, so we can reuse
# all of that work (e.g. via cachix) when running in CI
cargoArtifacts = craneLib.buildDepsOnly (commonArgs);
# Run clippy (and deny all warnings) on the crate source,
# reusing the dependency artifacts (e.g. from build scripts or
# proc-macros) from above.
#
# Note that this is done as a separate derivation so it
# does not impact building just the crate by itself.
myCrateClippy = craneLib.cargoClippy (
commonArgs
// {
# Again we apply some extra arguments only to this derivation
# and not every where else. In this case we add some clippy flags
inherit cargoArtifacts;
cargoClippyExtraArgs = "--all-targets -- --deny warnings";
}
);
# Build the actual crate itself, reusing the dependency
# artifacts from above.
myCrate = craneLib.buildPackage (
commonArgs
// {
inherit cargoArtifacts;
}
// {
postInstall = ''
mkdir -p $out/share/applications
cp ${./sheet-organizer.desktop} $out/share/applications/sheet-organizer.desktop
mkdir -p $out/share/icons
cp ${./sheet-organizer.png} $out/share/icons/sheet-organizer.png
''; '';
}
);
# Also run the crate tests under cargo-tarpaulin so that we can keep nativeBuildInputs = with pkgs; [ pkg-config ];
# track of code coverage
myCrateCoverage = craneLib.cargoTarpaulin (commonArgs // { inherit cargoArtifacts; });
in buildInputs =
{ with pkgs;
packages.default = myCrate; [
checks = { gtk4
inherit ]
# Build the crate as part of `nix flake check` for convenience ++ pkgs.lib.optionals pkgs.stdenv.isDarwin [
myCrate # Additional darwin specific inputs can be set here
myCrateClippy pkgs.libiconv
myCrateCoverage ];
; };
};
hydraJobs = { # Build *just* the cargo dependencies, so we can reuse
"sheet-organizer" = myCrate; # all of that work (e.g. via cachix) when running in CI
}; cargoArtifacts = craneLib.buildDepsOnly (commonArgs);
}
); # Run clippy (and deny all warnings) on the crate source,
# reusing the dependency artifacts (e.g. from build scripts or
# proc-macros) from above.
#
# Note that this is done as a separate derivation so it
# does not impact building just the crate by itself.
myCrateClippy = craneLib.cargoClippy (
commonArgs
// {
# Again we apply some extra arguments only to this derivation
# and not every where else. In this case we add some clippy flags
inherit cargoArtifacts;
cargoClippyExtraArgs = "--all-targets -- --deny warnings";
}
);
# Build the actual crate itself, reusing the dependency
# artifacts from above.
myCrate = craneLib.buildPackage (
commonArgs
// {
inherit cargoArtifacts;
}
// {
postInstall = ''
mkdir -p $out/share/applications
cp ${./sheet-organizer.desktop} $out/share/applications/sheet-organizer.desktop
mkdir -p $out/share/icons
cp ${./sheet-organizer.png} $out/share/icons/sheet-organizer.png
'';
}
);
# Also run the crate tests under cargo-tarpaulin so that we can keep
# track of code coverage
myCrateCoverage = craneLib.cargoTarpaulin (commonArgs // { inherit cargoArtifacts; });
in
{
packages.default = myCrate;
checks = {
inherit
# Build the crate as part of `nix flake check` for convenience
myCrate
myCrateClippy
myCrateCoverage
;
};
}
);
in
packageOutputs;
} }

View File

@@ -81,6 +81,19 @@ pub async fn get_composer_by_id(database: &Database, id: i64) -> sqlx::Result<Co
.await .await
} }
pub async fn remove_duplicate_sheets(database: &Database) -> sqlx::Result<()> {
for kind in SheetKindDiscriminants::iter() {
let table = kind.get_database_table_name();
sqlx::query(&format!(
"DELETE FROM {} WHERE id NOT IN (SELECT MIN(id) FROM {} GROUP BY file_hash)",
table, table
))
.execute(&database.connection)
.await?;
}
Ok(())
}
pub async fn fetch_all_sheets(database: &Database) -> sqlx::Result<Vec<Sheet>> { pub async fn fetch_all_sheets(database: &Database) -> sqlx::Result<Vec<Sheet>> {
let mut sheets: Vec<Sheet> = Vec::new(); let mut sheets: Vec<Sheet> = Vec::new();

View File

@@ -13,6 +13,8 @@ pub async fn load_and_validate_sheets(
database: &Database, database: &Database,
directory: impl AsRef<Path>, directory: impl AsRef<Path>,
) -> Vec<Sheet> { ) -> Vec<Sheet> {
sheet_dao::remove_duplicate_sheets(database).await.unwrap();
let sheets = sheet_dao::fetch_all_sheets(database).await.unwrap(); let sheets = sheet_dao::fetch_all_sheets(database).await.unwrap();
debug!("Validating sheets from database..."); debug!("Validating sheets from database...");