Improve library sync, add more config values

This commit is contained in:
2026-01-24 19:00:22 +01:00
parent 1c26770db0
commit de94315bd9
6 changed files with 56 additions and 31 deletions

View File

@@ -1,4 +1,5 @@
# Server Configuration # Server Configuration
SERVER_HOSTNAME=127.0.0.1
SERVER_PORT=8080 SERVER_PORT=8080
GIN_MODE=debug GIN_MODE=debug

View File

@@ -8,6 +8,7 @@ import (
type Config struct { type Config struct {
Server struct { Server struct {
Hostname string
Port string Port string
Mode string // gin.Mode: debug, release, test Mode string // gin.Mode: debug, release, test
} }
@@ -30,7 +31,8 @@ func Load() {
cfg := &Config{} cfg := &Config{}
// Server configuration // Server configuration
cfg.Server.Port = getEnv("SERVER_PORT", ":8080") cfg.Server.Hostname = getEnv("SERVER_HOSTNAME", "127.0.0.1")
cfg.Server.Port = getEnv("SERVER_PORT", "8080")
cfg.Server.Mode = getEnv("GIN_MODE", "debug") cfg.Server.Mode = getEnv("GIN_MODE", "debug")
// JWT configuration // JWT configuration

View File

@@ -41,7 +41,7 @@ func main() {
routes.SetupRoutes(r) routes.SetupRoutes(r)
log.Printf("Server starting on port %s...", config.AppConfig.Server.Port) log.Printf("Server starting on port %s...", config.AppConfig.Server.Port)
if err := r.Run(config.AppConfig.Server.Port); err != nil { if err := r.Run(config.AppConfig.Server.Hostname + ":" + config.AppConfig.Server.Port); err != nil {
log.Fatal("Failed to start server:", err) log.Fatal("Failed to start server:", err)
} }
} }

View File

@@ -13,7 +13,7 @@ type Sheet struct {
Description string `json:"description"` Description string `json:"description"`
FilePath string `json:"file_path" gorm:"not null"` FilePath string `json:"file_path" gorm:"not null"`
FileSize int64 `json:"file_size"` FileSize int64 `json:"file_size"`
FileHash uint64 `json:"file_hash"` FileHash string `json:"file_hash"`
ComposerUuid uuid.UUID `json:"composer_uuid"` ComposerUuid uuid.UUID `json:"composer_uuid"`
Composer Composer `json:"composer" gorm:"foreignKey:ComposerUuid"` Composer Composer `json:"composer" gorm:"foreignKey:ComposerUuid"`
CreatedAt time.Time `json:"created_at"` CreatedAt time.Time `json:"created_at"`

View File

@@ -9,10 +9,14 @@ import (
"sheetless-server/handlers" "sheetless-server/handlers"
"sheetless-server/models" "sheetless-server/models"
"sheetless-server/utils" "sheetless-server/utils"
"strings"
"time" "time"
) )
func SyncSheets() error { func SyncSheets() error {
log.Println("Running library sync")
syncStartTime := time.Now()
// Get all sheets // Get all sheets
var sheets []models.Sheet var sheets []models.Sheet
if err := database.DB.Find(&sheets).Error; err != nil { if err := database.DB.Find(&sheets).Error; err != nil {
@@ -21,33 +25,31 @@ func SyncSheets() error {
// Maps // Maps
pathsInDb := make(map[string]*models.Sheet) pathsInDb := make(map[string]*models.Sheet)
hashToSheets := make(map[uint64][]*models.Sheet) hashToSheets := make(map[string][]*models.Sheet)
for i := range sheets { for i := range sheets {
sheet := &sheets[i] sheet := &sheets[i]
pathsInDb[sheet.FilePath] = sheet pathsInDb[sheet.FilePath] = sheet
hashToSheets[sheet.FileHash] = append(hashToSheets[sheet.FileHash], sheet) hashToSheets[sheet.FileHash] = append(hashToSheets[sheet.FileHash], sheet)
} }
// Walk uploads dir numFilesWithNewHash := 0
files, err := os.ReadDir(config.AppConfig.SheetsDirectory) numRenamedFiles := 0
if err != nil { numNewFiles := 0
return err
// Walk sheets directory recursively for PDF files
err := filepath.Walk(config.AppConfig.SheetsDirectory, func(filePath string, info os.FileInfo, walkErr error) error {
if walkErr != nil {
return walkErr
} }
for _, file := range files { // Skip directories and non-PDF files
if file.IsDir() { if info.IsDir() || filepath.Ext(filePath) != ".pdf" {
continue return nil
} }
filePath := filepath.Join(config.AppConfig.SheetsDirectory, file.Name())
hash, err := utils.FileHash(filePath) hash, err := utils.FileHash(filePath)
if err != nil { if err != nil {
log.Printf("Error hashing file %s: %v", filePath, err) log.Printf("Error hashing file %s: %v", filePath, err)
continue return nil
}
info, err := file.Info()
if err != nil {
log.Printf("Error getting file info %s: %v", filePath, err)
continue
} }
existingSheet, exists := pathsInDb[filePath] existingSheet, exists := pathsInDb[filePath]
@@ -58,6 +60,8 @@ func SyncSheets() error {
existingSheet.UpdatedAt = time.Now() existingSheet.UpdatedAt = time.Now()
if err := database.DB.Save(existingSheet).Error; err != nil { if err := database.DB.Save(existingSheet).Error; err != nil {
log.Printf("Error updating sheet hash for %s: %v", filePath, err) log.Printf("Error updating sheet hash for %s: %v", filePath, err)
} else {
numFilesWithNewHash++
} }
} }
} else { } else {
@@ -70,6 +74,8 @@ func SyncSheets() error {
s.UpdatedAt = time.Now() s.UpdatedAt = time.Now()
if err := database.DB.Save(s).Error; err != nil { if err := database.DB.Save(s).Error; err != nil {
log.Printf("Error updating sheet path for %s: %v", filePath, err) log.Printf("Error updating sheet path for %s: %v", filePath, err)
} else {
numRenamedFiles++
} }
break break
} }
@@ -79,11 +85,11 @@ func SyncSheets() error {
uuid, err := handlers.GenerateNonexistentSheetUuid() uuid, err := handlers.GenerateNonexistentSheetUuid()
if err != nil { if err != nil {
log.Printf("Error generating uuid: %v", err) log.Printf("Error generating uuid: %v", err)
continue return nil
} }
newSheet := models.Sheet{ newSheet := models.Sheet{
Uuid: *uuid, Uuid: *uuid,
Title: file.Name(), // use filename as title Title: strings.TrimSuffix(filepath.Base(filePath), ".pdf"), // use filename as title
FilePath: filePath, FilePath: filePath,
FileSize: info.Size(), FileSize: info.Size(),
FileHash: hash, FileHash: hash,
@@ -92,10 +98,19 @@ func SyncSheets() error {
} }
if err := database.DB.Create(&newSheet).Error; err != nil { if err := database.DB.Create(&newSheet).Error; err != nil {
log.Printf("Error creating new sheet for %s: %v", filePath, err) log.Printf("Error creating new sheet for %s: %v", filePath, err)
} else {
numNewFiles++
} }
} }
} }
return nil
})
if numFilesWithNewHash != 0 || numRenamedFiles != 0 || numNewFiles != 0 {
log.Printf("Library sync succesfully run.\nChanged hashes: %d, renamed files: %d, new files: %d", numFilesWithNewHash, numRenamedFiles, numNewFiles)
} }
return nil log.Printf("Sync took %s", time.Since(syncStartTime))
return err
} }

View File

@@ -1,32 +1,39 @@
package utils package utils
import ( import (
"encoding/binary"
"hash/fnv" "hash/fnv"
"io" "io"
"mime/multipart" "mime/multipart"
"os" "os"
) )
func FileHashFromUpload(file multipart.File) (uint64, error) { func FileHashFromUpload(file multipart.File) (string, error) {
h := fnv.New64a() h := fnv.New64a()
if _, err := io.Copy(h, file); err != nil { if _, err := io.Copy(h, file); err != nil {
return 0, err return "", err
} }
return h.Sum64(), nil return u64ToString(h.Sum64()), nil
} }
func FileHash(path string) (uint64, error) { func FileHash(path string) (string, error) {
f, err := os.Open(path) f, err := os.Open(path)
if err != nil { if err != nil {
return 0, err return "", err
} }
defer f.Close() defer f.Close()
h := fnv.New64a() h := fnv.New64a()
if _, err := io.Copy(h, f); err != nil { if _, err := io.Copy(h, f); err != nil {
return 0, err return "", err
} }
return h.Sum64(), nil return u64ToString(h.Sum64()), nil
}
func u64ToString(x uint64) string {
var b [8]byte
binary.LittleEndian.PutUint64(b[:], x)
return string(b[:])
} }