Add library sync functionality

This commit is contained in:
2026-01-23 22:33:01 +01:00
parent 5141bfe673
commit 0a18e19e88
3 changed files with 119 additions and 0 deletions

1
.gitignore vendored
View File

@@ -1,3 +1,4 @@
/.direnv/ /.direnv/
/sheetless-server /sheetless-server
/src/sheetless-server
/sheetless.db /sheetless.db

View File

@@ -4,6 +4,8 @@ import (
"log" "log"
"sheetless-server/database" "sheetless-server/database"
"sheetless-server/routes" "sheetless-server/routes"
"sheetless-server/sync"
"time"
"github.com/gin-gonic/gin" "github.com/gin-gonic/gin"
) )
@@ -11,6 +13,20 @@ import (
func main() { func main() {
database.InitDatabase() database.InitDatabase()
// Start sync runner
go func() {
ticker := time.NewTicker(1 * time.Minute)
defer ticker.Stop()
for {
select {
case <-ticker.C:
if err := sync.SyncSheets(); err != nil {
log.Printf("Sync error: %v", err)
}
}
}
}()
r := gin.Default() r := gin.Default()
routes.SetupRoutes(r) routes.SetupRoutes(r)

102
src/sync/sync.go Normal file
View File

@@ -0,0 +1,102 @@
package sync
import (
"log"
"os"
"path/filepath"
"sheetless-server/database"
"sheetless-server/handlers"
"sheetless-server/models"
"sheetless-server/utils"
"time"
)
const uploadDir = "./uploads"
func SyncSheets() error {
// Get all sheets
var sheets []models.Sheet
if err := database.DB.Find(&sheets).Error; err != nil {
return err
}
// Maps
pathsInDb := make(map[string]*models.Sheet)
hashToSheets := make(map[uint64][]*models.Sheet)
for i := range sheets {
sheet := &sheets[i]
pathsInDb[sheet.FilePath] = sheet
hashToSheets[sheet.FileHash] = append(hashToSheets[sheet.FileHash], sheet)
}
// Walk uploads dir
files, err := os.ReadDir(uploadDir)
if err != nil {
return err
}
for _, file := range files {
if file.IsDir() {
continue
}
filePath := filepath.Join(uploadDir, file.Name())
hash, err := utils.FileHash(filePath)
if err != nil {
log.Printf("Error hashing file %s: %v", filePath, err)
continue
}
info, err := file.Info()
if err != nil {
log.Printf("Error getting file info %s: %v", filePath, err)
continue
}
existingSheet, exists := pathsInDb[filePath]
if exists {
if existingSheet.FileHash != hash {
// Case 1: File has been altered -> update hash
existingSheet.FileHash = hash
existingSheet.UpdatedAt = time.Now()
if err := database.DB.Save(existingSheet).Error; err != nil {
log.Printf("Error updating sheet hash for %s: %v", filePath, err)
}
}
} else {
sheetsWithHash, hasHash := hashToSheets[hash]
if hasHash {
for _, s := range sheetsWithHash {
if _, err := os.Stat(s.FilePath); os.IsNotExist(err) {
// Case 2: File has been renamed or moved -> update path
s.FilePath = filePath
s.UpdatedAt = time.Now()
if err := database.DB.Save(s).Error; err != nil {
log.Printf("Error updating sheet path for %s: %v", filePath, err)
}
break
}
}
} else {
// Case 3: New sheets file -> Add to database
uuid, err := handlers.GenerateNonexistentSheetUuid()
if err != nil {
log.Printf("Error generating uuid: %v", err)
continue
}
newSheet := models.Sheet{
Uuid: *uuid,
Title: file.Name(), // use filename as title
FilePath: filePath,
FileSize: info.Size(),
FileHash: hash,
CreatedAt: time.Now(),
UpdatedAt: time.Now(),
}
if err := database.DB.Create(&newSheet).Error; err != nil {
log.Printf("Error creating new sheet for %s: %v", filePath, err)
}
}
}
}
return nil
}