badger/internal/library/general.go
2023-03-14 09:53:33 +01:00

153 lines
4.1 KiB
Go

package library
import (
"errors"
"os"
"strconv"
"strings"
"sync"
"velvettear/badger/internal/config"
"velvettear/badger/internal/database"
"velvettear/badger/internal/database/models"
"velvettear/badger/internal/log"
"velvettear/badger/internal/metadata"
"velvettear/badger/internal/tools"
"gorm.io/gorm"
)
// unexported function(s)
func findFiles(directory string) (files []string) {
if len(directory) == 0 {
return files
}
timestamp := tools.LogTimestamp()
formats := config.Formats()
what := strings.Join(formats, "', '")
log.Info("scanning directory '" + directory + "' for '" + what + "' files...")
files, error := tools.ScanDirectory(directory, formats...)
if error != nil {
log.Fatal("encountered an error scanning directory '"+directory+"' for '"+what+"' files", error.Error())
}
log.InfoTimed("found "+strconv.Itoa(len(files))+"'"+what+"' files in directory '"+directory+"'", timestamp)
return files
}
func filterChangedFiles(files []string) ([]string, []int) {
timestamp := tools.LogTimestamp()
var deletedTrackIDs []int
if len(files) == 0 {
return files, deletedTrackIDs
}
log.Info("filtering unknown or changed files...")
var connection *gorm.DB
if config.DatabaseInMemory() {
connection = database.Memory()
} else {
connection = database.File()
}
var tracks []models.Track
result := connection.Select("id", "path", "checksum", "modified", "size").Find(&tracks)
if result.RowsAffected <= 0 {
return files, deletedTrackIDs
}
var changedFiles []string
checkedPaths := make(map[string]bool)
for _, track := range tracks {
if checkedPaths[track.Path] {
continue
}
checkedPaths[track.Path] = true
changed, error := track.HasChanged()
if error != nil {
if errors.Is(error, os.ErrNotExist) {
deletedTrackIDs = append(deletedTrackIDs, track.ID)
continue
}
log.Error("encountered an error checking if file '" + track.Path + "' has changed")
continue
}
if !changed {
continue
}
changedFiles = append(changedFiles, track.Path)
}
for _, file := range files {
if checkedPaths[file] {
continue
}
checkedPaths[file] = true
changedFiles = append(changedFiles, file)
}
log.DebugTimed("filtered "+strconv.Itoa(len(changedFiles))+" unknown or changed files", timestamp)
return changedFiles, deletedTrackIDs
}
func parseFiles(files []string, ipc chan wrapper, wait *sync.WaitGroup) {
count := len(files)
if count == 0 {
return
}
timestamp := tools.LogTimestamp()
parseWait := new(sync.WaitGroup)
parseWait.Add(count)
waitChannel := make(chan struct{}, config.Concurrency())
done := 0
log.Info("parsing metadata of "+strconv.Itoa(count)+" files...", "concurrency: "+strconv.Itoa(config.Concurrency()))
for index, file := range files {
waitChannel <- struct{}{}
go func(file string, last bool) {
metadata := metadata.FromFile(file)
if !metadata.Complete {
parseWait.Done()
<-waitChannel
return
}
ipc <- wrapper{metadata: metadata, last: last}
done++
parseWait.Done()
<-waitChannel
}(file, index == count-1)
}
parseWait.Wait()
close(waitChannel)
log.InfoTimed("successfully parsed metadata of "+strconv.Itoa(done)+" files in database", timestamp)
wait.Done()
}
func storeMetadata(ipc chan wrapper, wait *sync.WaitGroup) {
timestamp := tools.LogTimestamp()
log.Debug("starting loop to store parsed metadata...")
stored := 0
for {
wrapper := <-ipc
error := wrapper.metadata.Store()
if error != nil {
log.Error("encountered an error storing metadata of file '" + wrapper.metadata.Path + "' in database")
} else {
stored++
}
if wrapper.last {
break
}
}
log.InfoTimed("successfully stored metadata of "+strconv.Itoa(stored)+" files in database", timestamp)
wait.Done()
}
func removeDeletedTracks(trackIDs []int) bool {
timestamp := tools.LogTimestamp()
if len(trackIDs) == 0 {
return false
}
result := database.Connection().Debug().Delete(&models.Track{}, trackIDs)
affectedRows := result.RowsAffected
log.InfoTimed("removed "+strconv.FormatInt(affectedRows, 10)+" deleted tracks from database", timestamp)
return affectedRows > 0
}
// struct(s)
type wrapper struct {
metadata metadata.Metadata
last bool
}