Improve library sync, add more config values

This commit is contained in:
2026-01-24 19:00:22 +01:00
parent 1c26770db0
commit de94315bd9
6 changed files with 56 additions and 31 deletions

View File

@@ -1,4 +1,5 @@
# Server Configuration
SERVER_HOSTNAME=127.0.0.1
SERVER_PORT=8080
GIN_MODE=debug

View File

@@ -8,8 +8,9 @@ import (
type Config struct {
Server struct {
Port string
Mode string // gin.Mode: debug, release, test
Hostname string
Port string
Mode string // gin.Mode: debug, release, test
}
JWT struct {
Secret string
@@ -30,7 +31,8 @@ func Load() {
cfg := &Config{}
// Server configuration
cfg.Server.Port = getEnv("SERVER_PORT", ":8080")
cfg.Server.Hostname = getEnv("SERVER_HOSTNAME", "127.0.0.1")
cfg.Server.Port = getEnv("SERVER_PORT", "8080")
cfg.Server.Mode = getEnv("GIN_MODE", "debug")
// JWT configuration

View File

@@ -41,7 +41,7 @@ func main() {
routes.SetupRoutes(r)
log.Printf("Server starting on port %s...", config.AppConfig.Server.Port)
if err := r.Run(config.AppConfig.Server.Port); err != nil {
if err := r.Run(config.AppConfig.Server.Hostname + ":" + config.AppConfig.Server.Port); err != nil {
log.Fatal("Failed to start server:", err)
}
}

View File

@@ -13,7 +13,7 @@ type Sheet struct {
Description string `json:"description"`
FilePath string `json:"file_path" gorm:"not null"`
FileSize int64 `json:"file_size"`
FileHash uint64 `json:"file_hash"`
FileHash string `json:"file_hash"`
ComposerUuid uuid.UUID `json:"composer_uuid"`
Composer Composer `json:"composer" gorm:"foreignKey:ComposerUuid"`
CreatedAt time.Time `json:"created_at"`

View File

@@ -9,10 +9,14 @@ import (
"sheetless-server/handlers"
"sheetless-server/models"
"sheetless-server/utils"
"strings"
"time"
)
func SyncSheets() error {
log.Println("Running library sync")
syncStartTime := time.Now()
// Get all sheets
var sheets []models.Sheet
if err := database.DB.Find(&sheets).Error; err != nil {
@@ -21,33 +25,31 @@ func SyncSheets() error {
// Maps
pathsInDb := make(map[string]*models.Sheet)
hashToSheets := make(map[uint64][]*models.Sheet)
hashToSheets := make(map[string][]*models.Sheet)
for i := range sheets {
sheet := &sheets[i]
pathsInDb[sheet.FilePath] = sheet
hashToSheets[sheet.FileHash] = append(hashToSheets[sheet.FileHash], sheet)
}
// Walk uploads dir
files, err := os.ReadDir(config.AppConfig.SheetsDirectory)
if err != nil {
return err
}
numFilesWithNewHash := 0
numRenamedFiles := 0
numNewFiles := 0
for _, file := range files {
if file.IsDir() {
continue
// Walk sheets directory recursively for PDF files
err := filepath.Walk(config.AppConfig.SheetsDirectory, func(filePath string, info os.FileInfo, walkErr error) error {
if walkErr != nil {
return walkErr
}
// Skip directories and non-PDF files
if info.IsDir() || filepath.Ext(filePath) != ".pdf" {
return nil
}
filePath := filepath.Join(config.AppConfig.SheetsDirectory, file.Name())
hash, err := utils.FileHash(filePath)
if err != nil {
log.Printf("Error hashing file %s: %v", filePath, err)
continue
}
info, err := file.Info()
if err != nil {
log.Printf("Error getting file info %s: %v", filePath, err)
continue
return nil
}
existingSheet, exists := pathsInDb[filePath]
@@ -58,6 +60,8 @@ func SyncSheets() error {
existingSheet.UpdatedAt = time.Now()
if err := database.DB.Save(existingSheet).Error; err != nil {
log.Printf("Error updating sheet hash for %s: %v", filePath, err)
} else {
numFilesWithNewHash++
}
}
} else {
@@ -70,6 +74,8 @@ func SyncSheets() error {
s.UpdatedAt = time.Now()
if err := database.DB.Save(s).Error; err != nil {
log.Printf("Error updating sheet path for %s: %v", filePath, err)
} else {
numRenamedFiles++
}
break
}
@@ -79,11 +85,11 @@ func SyncSheets() error {
uuid, err := handlers.GenerateNonexistentSheetUuid()
if err != nil {
log.Printf("Error generating uuid: %v", err)
continue
return nil
}
newSheet := models.Sheet{
Uuid: *uuid,
Title: file.Name(), // use filename as title
Title: strings.TrimSuffix(filepath.Base(filePath), ".pdf"), // use filename as title
FilePath: filePath,
FileSize: info.Size(),
FileHash: hash,
@@ -92,10 +98,19 @@ func SyncSheets() error {
}
if err := database.DB.Create(&newSheet).Error; err != nil {
log.Printf("Error creating new sheet for %s: %v", filePath, err)
} else {
numNewFiles++
}
}
}
return nil
})
if numFilesWithNewHash != 0 || numRenamedFiles != 0 || numNewFiles != 0 {
log.Printf("Library sync succesfully run.\nChanged hashes: %d, renamed files: %d, new files: %d", numFilesWithNewHash, numRenamedFiles, numNewFiles)
}
return nil
log.Printf("Sync took %s", time.Since(syncStartTime))
return err
}

View File

@@ -1,32 +1,39 @@
package utils
import (
"encoding/binary"
"hash/fnv"
"io"
"mime/multipart"
"os"
)
func FileHashFromUpload(file multipart.File) (uint64, error) {
func FileHashFromUpload(file multipart.File) (string, error) {
h := fnv.New64a()
if _, err := io.Copy(h, file); err != nil {
return 0, err
return "", err
}
return h.Sum64(), nil
return u64ToString(h.Sum64()), nil
}
func FileHash(path string) (uint64, error) {
func FileHash(path string) (string, error) {
f, err := os.Open(path)
if err != nil {
return 0, err
return "", err
}
defer f.Close()
h := fnv.New64a()
if _, err := io.Copy(h, f); err != nil {
return 0, err
return "", err
}
return h.Sum64(), nil
return u64ToString(h.Sum64()), nil
}
func u64ToString(x uint64) string {
var b [8]byte
binary.LittleEndian.PutUint64(b[:], x)
return string(b[:])
}