252 lines
7.7 KiB
Go
252 lines
7.7 KiB
Go
|
package database
|
||
|
|
||
|
import (
|
||
|
"io"
|
||
|
"os"
|
||
|
"path"
|
||
|
"strconv"
|
||
|
"time"
|
||
|
"velvettear/badger/internal/config"
|
||
|
"velvettear/badger/internal/database/models"
|
||
|
"velvettear/badger/internal/log"
|
||
|
"velvettear/badger/internal/tools"
|
||
|
|
||
|
defaultLog "log"
|
||
|
|
||
|
"gorm.io/driver/sqlite"
|
||
|
"gorm.io/gorm"
|
||
|
"gorm.io/gorm/logger"
|
||
|
)
|
||
|
|
||
|
const inMemoryUrl = "file:memdb?mode=memory&cache=shared"
|
||
|
|
||
|
var memoryConnection *gorm.DB
|
||
|
var fileConnection *gorm.DB
|
||
|
|
||
|
// exported function(s)
|
||
|
func Initialize() {
|
||
|
var connection *gorm.DB
|
||
|
connection, error := connect(config.DatabaseFile())
|
||
|
if error != nil {
|
||
|
log.Fatal("encountered an error connecting to file database '"+config.DatabaseFile()+"'", error.Error())
|
||
|
}
|
||
|
fileConnection = connection
|
||
|
connection, error = connect(inMemoryUrl)
|
||
|
if error != nil {
|
||
|
log.Fatal("encountered an error connecting to the in memory database", error.Error())
|
||
|
}
|
||
|
memoryConnection = connection
|
||
|
setSQLiteBusyTimeout()
|
||
|
setSQLiteJournalMode()
|
||
|
copyFileToInMemory()
|
||
|
}
|
||
|
|
||
|
func Connection() *gorm.DB {
|
||
|
if config.DatabaseInMemory() {
|
||
|
return memoryConnection
|
||
|
}
|
||
|
return fileConnection
|
||
|
}
|
||
|
|
||
|
func Memory() *gorm.DB {
|
||
|
return memoryConnection
|
||
|
}
|
||
|
|
||
|
func File() *gorm.DB {
|
||
|
return fileConnection
|
||
|
}
|
||
|
|
||
|
func Persist() {
|
||
|
if !config.DatabaseInMemory() {
|
||
|
return
|
||
|
}
|
||
|
timestamp := tools.LogTimestamp()
|
||
|
database := config.DatabaseFile()
|
||
|
if tools.Exists(database) {
|
||
|
backup := path.Base(database) + "-backup" + path.Ext(database)
|
||
|
if tools.Exists(backup) {
|
||
|
error := tools.Delete(backup)
|
||
|
if error != nil {
|
||
|
log.Error("encountered an error deleting the backup database '"+backup+"'", error.Error())
|
||
|
}
|
||
|
}
|
||
|
error := tools.MoveFile(database, database+".backup")
|
||
|
if error != nil {
|
||
|
log.Error("encountered an error renaming/moving the database '"+database+"' to '"+backup+"'", error.Error())
|
||
|
}
|
||
|
}
|
||
|
memoryConnection.Exec("VACUUM INTO '" + database + "'")
|
||
|
log.DebugTimed("persisted in memory database to '"+database+"'", timestamp)
|
||
|
}
|
||
|
|
||
|
// unexported function(s)
|
||
|
func createLogger() logger.Interface {
|
||
|
if !config.Debug() {
|
||
|
return logger.New(defaultLog.New(io.Discard, "", defaultLog.LstdFlags), logger.Config{})
|
||
|
}
|
||
|
return logger.New(
|
||
|
defaultLog.New(os.Stdout, "\r\n", defaultLog.LstdFlags),
|
||
|
logger.Config{
|
||
|
SlowThreshold: time.Second,
|
||
|
LogLevel: logger.Warn,
|
||
|
IgnoreRecordNotFoundError: true,
|
||
|
Colorful: true,
|
||
|
},
|
||
|
)
|
||
|
}
|
||
|
|
||
|
func connect(path string) (*gorm.DB, error) {
|
||
|
var connection *gorm.DB
|
||
|
if path == inMemoryUrl && !config.DatabaseInMemory() {
|
||
|
return connection, nil
|
||
|
}
|
||
|
if path != inMemoryUrl && !tools.Exists(path) {
|
||
|
tools.CreateFile(path)
|
||
|
}
|
||
|
connection, error := gorm.Open(sqlite.Open(path), &gorm.Config{
|
||
|
Logger: createLogger(),
|
||
|
})
|
||
|
if error != nil {
|
||
|
return connection, error
|
||
|
}
|
||
|
migrateSchema(connection)
|
||
|
return connection, nil
|
||
|
}
|
||
|
|
||
|
func migrateSchema(connection *gorm.DB) {
|
||
|
connection.AutoMigrate(&models.Track{})
|
||
|
connection.AutoMigrate(&models.Artist{})
|
||
|
connection.AutoMigrate(&models.Album{})
|
||
|
}
|
||
|
|
||
|
func copyFileToInMemory() {
|
||
|
timestamp := tools.LogTimestamp()
|
||
|
database := config.DatabaseFile()
|
||
|
chunkSize := config.DatabaseChunkSize()
|
||
|
if chunkSize == 0 {
|
||
|
log.Info("copying content of database '" + config.DatabaseFile() + "' to memory...")
|
||
|
// copy artists
|
||
|
tmpTimestamp := tools.LogTimestamp()
|
||
|
var artists []models.Artist
|
||
|
result := fileConnection.Find(&artists)
|
||
|
if result.RowsAffected > 0 {
|
||
|
result := memoryConnection.Create(artists)
|
||
|
log.DebugTimed("copied "+strconv.FormatInt(result.RowsAffected, 10)+" artists from '"+database+"' to memory", tmpTimestamp)
|
||
|
}
|
||
|
// copy albums
|
||
|
tmpTimestamp = tools.LogTimestamp()
|
||
|
var albums []models.Album
|
||
|
result = fileConnection.Find(&albums)
|
||
|
if result.RowsAffected > 0 {
|
||
|
result := memoryConnection.Create(albums)
|
||
|
log.DebugTimed("copied "+strconv.FormatInt(result.RowsAffected, 10)+" albums from '"+database+"' to memory", tmpTimestamp)
|
||
|
}
|
||
|
// copy tracks
|
||
|
tmpTimestamp = tools.LogTimestamp()
|
||
|
var tracks []models.Track
|
||
|
result = fileConnection.Find(&tracks)
|
||
|
if result.RowsAffected > 0 {
|
||
|
result := memoryConnection.Create(tracks)
|
||
|
log.DebugTimed("copied "+strconv.FormatInt(result.RowsAffected, 10)+" artists from '"+database+"' to memory", tmpTimestamp)
|
||
|
}
|
||
|
log.InfoTimed("copied content of database '"+config.DatabaseFile()+"' to memory", timestamp)
|
||
|
return
|
||
|
}
|
||
|
|
||
|
log.Info("copying content of database '" + config.DatabaseFile() + "' to memory in chunks of " + strconv.Itoa(chunkSize) + "...")
|
||
|
// copy artists
|
||
|
tmpTimestamp := tools.LogTimestamp()
|
||
|
var artists []models.Artist
|
||
|
result := fileConnection.FindInBatches(&artists, chunkSize, func(batch *gorm.DB, index int) error {
|
||
|
if batch.RowsAffected == 0 {
|
||
|
return nil
|
||
|
}
|
||
|
memoryConnection.Create(artists)
|
||
|
return nil
|
||
|
})
|
||
|
log.DebugTimed("copied "+strconv.FormatInt(result.RowsAffected, 10)+" artists from '"+database+"' to memory", tmpTimestamp)
|
||
|
// copy albums
|
||
|
tmpTimestamp = tools.LogTimestamp()
|
||
|
var albums []models.Album
|
||
|
result = fileConnection.FindInBatches(&albums, chunkSize, func(batch *gorm.DB, index int) error {
|
||
|
if batch.RowsAffected == 0 {
|
||
|
return nil
|
||
|
}
|
||
|
memoryConnection.Create(albums)
|
||
|
return nil
|
||
|
})
|
||
|
log.DebugTimed("copied "+strconv.FormatInt(result.RowsAffected, 10)+" albums from '"+database+"' to memory", tmpTimestamp)
|
||
|
// copy tracks
|
||
|
tmpTimestamp = tools.LogTimestamp()
|
||
|
var tracks []models.Track
|
||
|
result = fileConnection.FindInBatches(&tracks, chunkSize, func(batch *gorm.DB, index int) error {
|
||
|
if batch.RowsAffected == 0 {
|
||
|
return nil
|
||
|
}
|
||
|
memoryConnection.Create(tracks)
|
||
|
return nil
|
||
|
})
|
||
|
log.DebugTimed("copied "+strconv.FormatInt(result.RowsAffected, 10)+" tracks from '"+database+"' to memory", tmpTimestamp)
|
||
|
log.InfoTimed("copied content of database '"+config.DatabaseFile()+"' to memory", timestamp)
|
||
|
}
|
||
|
|
||
|
func setSQLiteBusyTimeout() {
|
||
|
timestamp := tools.LogTimestamp()
|
||
|
busyTimeout := config.DatabaseBusyTimeout()
|
||
|
var defaultTimeout int
|
||
|
Connection().Raw("PRAGMA busy_timeout").Scan(&defaultTimeout)
|
||
|
if defaultTimeout == busyTimeout {
|
||
|
log.InfoTimed("did not set 'busy_timeout', current value is already "+strconv.Itoa(busyTimeout), timestamp)
|
||
|
return
|
||
|
}
|
||
|
Connection().Exec("PRAGMA busy_timeout = " + strconv.Itoa(busyTimeout))
|
||
|
var newTimeout int
|
||
|
Connection().Raw("PRAGMA busy_timeout").Scan(&newTimeout)
|
||
|
if newTimeout != busyTimeout {
|
||
|
log.WarningTimed("could not set 'busy_timeout' from "+strconv.Itoa(defaultTimeout)+" to "+strconv.Itoa(busyTimeout), timestamp)
|
||
|
return
|
||
|
}
|
||
|
log.InfoTimed("set 'busy_timeout' from "+strconv.Itoa(defaultTimeout)+"ms to "+strconv.Itoa(newTimeout)+"ms", timestamp)
|
||
|
}
|
||
|
|
||
|
func setSQLiteJournalMode() {
|
||
|
timestamp := tools.LogTimestamp()
|
||
|
journalMode := config.DatabaseJournalMode()
|
||
|
if tools.IsEmpty(journalMode) {
|
||
|
log.Warning("aborting to set 'journal.mode' to an empty value")
|
||
|
return
|
||
|
}
|
||
|
var availableJournalModes []string
|
||
|
if config.DatabaseInMemory() {
|
||
|
availableJournalModes = append(
|
||
|
availableJournalModes,
|
||
|
SQLITE_JOURNALMODE_MEMORY,
|
||
|
SQLITE_JOURNALMODE_OFF,
|
||
|
)
|
||
|
} else {
|
||
|
availableJournalModes = append(
|
||
|
availableJournalModes,
|
||
|
SQLITE_JOURNALMODE_DELETE,
|
||
|
SQLITE_JOURNALMODE_TRUNCATE,
|
||
|
SQLITE_JOURNALMODE_PERSIST,
|
||
|
SQLITE_JOURNALMODE_MEMORY,
|
||
|
SQLITE_JOURNALMODE_WAL,
|
||
|
SQLITE_JOURNALMODE_OFF,
|
||
|
)
|
||
|
}
|
||
|
var defaultJournalMode string
|
||
|
Connection().Raw("PRAGMA journal_mode").Scan(&defaultJournalMode)
|
||
|
if !tools.ContainsString(availableJournalModes, journalMode) {
|
||
|
log.WarningTimed("could not set 'journal_mode' to '"+journalMode+"', mode is unavailable", timestamp)
|
||
|
return
|
||
|
}
|
||
|
var newJournalMode string
|
||
|
Connection().Exec("PRAGMA journal_mode = '" + journalMode + "'")
|
||
|
Connection().Raw("PRAGMA journal_mode").Scan(&newJournalMode)
|
||
|
if newJournalMode != journalMode {
|
||
|
log.WarningTimed("could not set 'journal_mode' from "+defaultJournalMode+" to "+journalMode, timestamp)
|
||
|
return
|
||
|
}
|
||
|
log.InfoTimed("set 'journal_mode' from '"+defaultJournalMode+"' to '"+newJournalMode+"'", timestamp)
|
||
|
}
|