Sync: Import/index after downloading #225
Signed-off-by: Michael Mayer <michael@liquidbytes.net>
This commit is contained in:
parent
3c8e746ca4
commit
02810ffa94
22 changed files with 380 additions and 173 deletions
|
@ -2,6 +2,7 @@ package commands
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/photoprism/photoprism/internal/config"
|
||||
"github.com/urfave/cli"
|
||||
|
@ -19,6 +20,8 @@ func configAction(ctx *cli.Context) error {
|
|||
conf := config.NewConfig(ctx)
|
||||
|
||||
fmt.Printf("NAME VALUE\n")
|
||||
fmt.Printf("admin-password %s\n", conf.AdminPassword())
|
||||
fmt.Printf("webdav-password %s\n", conf.WebDAVPassword())
|
||||
fmt.Printf("name %s\n", conf.Name())
|
||||
fmt.Printf("url %s\n", conf.Url())
|
||||
fmt.Printf("title %s\n", conf.Title())
|
||||
|
@ -33,8 +36,7 @@ func configAction(ctx *cli.Context) error {
|
|||
fmt.Printf("public %t\n", conf.Public())
|
||||
fmt.Printf("experimental %t\n", conf.Experimental())
|
||||
fmt.Printf("workers %d\n", conf.Workers())
|
||||
fmt.Printf("admin-password %s\n", conf.AdminPassword())
|
||||
fmt.Printf("webdav-password %s\n", conf.WebDAVPassword())
|
||||
fmt.Printf("wakeup-interval %d\n", conf.WakeupInterval() / time.Second)
|
||||
fmt.Printf("log-level %s\n", conf.LogLevel())
|
||||
fmt.Printf("log-filename %s\n", conf.LogFilename())
|
||||
fmt.Printf("pid-filename %s\n", conf.PIDFilename())
|
||||
|
|
|
@ -224,6 +224,15 @@ func (c *Config) Workers() int {
|
|||
return 1
|
||||
}
|
||||
|
||||
// WakeupInterval returns the background worker wakeup interval.
|
||||
func (c *Config) WakeupInterval() time.Duration {
|
||||
if c.config.WakeupInterval <= 0 {
|
||||
return 5 * time.Minute
|
||||
}
|
||||
|
||||
return time.Duration(c.config.WakeupInterval) * time.Second
|
||||
}
|
||||
|
||||
// ThumbQuality returns the thumbnail jpeg quality setting (25-100).
|
||||
func (c *Config) ThumbQuality() int {
|
||||
if c.config.ThumbQuality > 100 {
|
||||
|
|
|
@ -6,6 +6,18 @@ import (
|
|||
|
||||
// GlobalFlags lists all CLI flags
|
||||
var GlobalFlags = []cli.Flag{
|
||||
cli.StringFlag{
|
||||
Name: "admin-password",
|
||||
Usage: "admin password",
|
||||
Value: "photoprism",
|
||||
EnvVar: "PHOTOPRISM_ADMIN_PASSWORD",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "webdav-password",
|
||||
Usage: "WebDAV password (none to disable)",
|
||||
Value: "",
|
||||
EnvVar: "PHOTOPRISM_WEBDAV_PASSWORD",
|
||||
},
|
||||
cli.BoolFlag{
|
||||
Name: "debug",
|
||||
Usage: "run in debug mode",
|
||||
|
@ -31,6 +43,11 @@ var GlobalFlags = []cli.Flag{
|
|||
Usage: "number of workers for indexing",
|
||||
EnvVar: "PHOTOPRISM_WORKERS",
|
||||
},
|
||||
cli.IntFlag{
|
||||
Name: "wakeup-interval",
|
||||
Usage: "background worker wakeup interval in seconds",
|
||||
EnvVar: "PHOTOPRISM_WAKEUP_INTERVAL",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "url",
|
||||
Usage: "canonical site URL",
|
||||
|
@ -67,18 +84,6 @@ var GlobalFlags = []cli.Flag{
|
|||
Value: "@browseyourlife",
|
||||
EnvVar: "PHOTOPRISM_TWITTER",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "admin-password",
|
||||
Usage: "admin password",
|
||||
Value: "photoprism",
|
||||
EnvVar: "PHOTOPRISM_ADMIN_PASSWORD",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "webdav-password",
|
||||
Usage: "WebDAV password (none to disable)",
|
||||
Value: "",
|
||||
EnvVar: "PHOTOPRISM_WEBDAV_PASSWORD",
|
||||
},
|
||||
cli.StringFlag{
|
||||
Name: "log-level, l",
|
||||
Usage: "trace, debug, info, warning, error, fatal or panic",
|
||||
|
|
|
@ -28,6 +28,8 @@ const (
|
|||
//
|
||||
// See https://github.com/photoprism/photoprism/issues/50#issuecomment-433856358
|
||||
type Params struct {
|
||||
AdminPassword string `yaml:"admin-password" flag:"admin-password"`
|
||||
WebDAVPassword string `yaml:"webdav-password" flag:"webdav-password"`
|
||||
Name string
|
||||
Url string `yaml:"url" flag:"url"`
|
||||
Title string `yaml:"title" flag:"title"`
|
||||
|
@ -42,8 +44,7 @@ type Params struct {
|
|||
Public bool `yaml:"public" flag:"public"`
|
||||
Experimental bool `yaml:"experimental" flag:"experimental"`
|
||||
Workers int `yaml:"workers" flag:"workers"`
|
||||
AdminPassword string `yaml:"admin-password" flag:"admin-password"`
|
||||
WebDAVPassword string `yaml:"webdav-password" flag:"webdav-password"`
|
||||
WakeupInterval int `yaml:"wakeup-interval" flag:"wakeup-interval"`
|
||||
LogLevel string `yaml:"log-level" flag:"log-level"`
|
||||
ConfigFile string
|
||||
ConfigPath string `yaml:"config-path" flag:"config-path"`
|
||||
|
|
|
@ -79,6 +79,7 @@ func (m *Account) Save(form form.Account, db *gorm.DB) error {
|
|||
m.AccSync = false
|
||||
}
|
||||
|
||||
// Set defaults
|
||||
if m.SharePath == "" {
|
||||
m.SharePath = "/"
|
||||
}
|
||||
|
@ -87,6 +88,11 @@ func (m *Account) Save(form form.Account, db *gorm.DB) error {
|
|||
m.SyncPath = "/"
|
||||
}
|
||||
|
||||
// Refresh after performing changes
|
||||
if m.AccSync && m.SyncStatus == AccountSyncStatusSynced {
|
||||
m.SyncStatus = AccountSyncStatusRefresh
|
||||
}
|
||||
|
||||
return db.Save(m).Error
|
||||
}
|
||||
|
||||
|
|
|
@ -43,7 +43,7 @@ func (c *Convert) Start(path string) error {
|
|||
wg.Add(numWorkers)
|
||||
for i := 0; i < numWorkers; i++ {
|
||||
go func() {
|
||||
convertWorker(jobs)
|
||||
ConvertWorker(jobs)
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
|
|
|
@ -7,7 +7,7 @@ type ConvertJob struct {
|
|||
convert *Convert
|
||||
}
|
||||
|
||||
func convertWorker(jobs <-chan ConvertJob) {
|
||||
func ConvertWorker(jobs <-chan ConvertJob) {
|
||||
for job := range jobs {
|
||||
if _, err := job.convert.ToJpeg(job.image); err != nil {
|
||||
fileName := job.image.RelativeName(job.convert.conf.OriginalsPath())
|
||||
|
|
|
@ -71,7 +71,7 @@ func (imp *Import) Start(opt ImportOptions) {
|
|||
wg.Add(numWorkers)
|
||||
for i := 0; i < numWorkers; i++ {
|
||||
go func() {
|
||||
importWorker(jobs)
|
||||
ImportWorker(jobs)
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
|
@ -131,7 +131,7 @@ func (imp *Import) Start(opt ImportOptions) {
|
|||
|
||||
var files MediaFiles
|
||||
|
||||
for _, f := range related.files {
|
||||
for _, f := range related.Files {
|
||||
if done[f.FileName()] {
|
||||
continue
|
||||
}
|
||||
|
@ -142,14 +142,14 @@ func (imp *Import) Start(opt ImportOptions) {
|
|||
|
||||
done[mf.FileName()] = true
|
||||
|
||||
related.files = files
|
||||
related.Files = files
|
||||
|
||||
jobs <- ImportJob{
|
||||
fileName: fileName,
|
||||
related: related,
|
||||
indexOpt: indexOpt,
|
||||
importOpt: opt,
|
||||
imp: imp,
|
||||
FileName: fileName,
|
||||
Related: related,
|
||||
IndexOpt: indexOpt,
|
||||
ImportOpt: opt,
|
||||
Imp: imp,
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
|
@ -9,43 +9,43 @@ import (
|
|||
)
|
||||
|
||||
type ImportJob struct {
|
||||
fileName string
|
||||
related RelatedFiles
|
||||
indexOpt IndexOptions
|
||||
importOpt ImportOptions
|
||||
imp *Import
|
||||
FileName string
|
||||
Related RelatedFiles
|
||||
IndexOpt IndexOptions
|
||||
ImportOpt ImportOptions
|
||||
Imp *Import
|
||||
}
|
||||
|
||||
func importWorker(jobs <-chan ImportJob) {
|
||||
func ImportWorker(jobs <-chan ImportJob) {
|
||||
for job := range jobs {
|
||||
var destinationMainFilename string
|
||||
related := job.related
|
||||
imp := job.imp
|
||||
opt := job.importOpt
|
||||
indexOpt := job.indexOpt
|
||||
importPath := job.importOpt.Path
|
||||
related := job.Related
|
||||
imp := job.Imp
|
||||
opt := job.ImportOpt
|
||||
indexOpt := job.IndexOpt
|
||||
importPath := job.ImportOpt.Path
|
||||
|
||||
if related.main == nil {
|
||||
log.Warnf("import: no main file found for %s", job.fileName)
|
||||
if related.Main == nil {
|
||||
log.Warnf("import: no main file found for %s", job.FileName)
|
||||
continue
|
||||
}
|
||||
|
||||
originalName := related.main.RelativeName(importPath)
|
||||
originalName := related.Main.RelativeName(importPath)
|
||||
|
||||
event.Publish("import.file", event.Data{
|
||||
"fileName": originalName,
|
||||
"baseName": filepath.Base(related.main.FileName()),
|
||||
"baseName": filepath.Base(related.Main.FileName()),
|
||||
})
|
||||
|
||||
for _, f := range related.files {
|
||||
for _, f := range related.Files {
|
||||
relativeFilename := f.RelativeName(importPath)
|
||||
|
||||
if destinationFilename, err := imp.DestinationFilename(related.main, f); err == nil {
|
||||
if destinationFilename, err := imp.DestinationFilename(related.Main, f); err == nil {
|
||||
if err := os.MkdirAll(path.Dir(destinationFilename), os.ModePerm); err != nil {
|
||||
log.Errorf("import: could not create directories (%s)", err.Error())
|
||||
}
|
||||
|
||||
if related.main.HasSameName(f) {
|
||||
if related.Main.HasSameName(f) {
|
||||
destinationMainFilename = destinationFilename
|
||||
log.Infof("import: moving main %s file \"%s\" to \"%s\"", f.Type(), relativeFilename, destinationFilename)
|
||||
} else {
|
||||
|
@ -104,15 +104,15 @@ func importWorker(jobs <-chan ImportJob) {
|
|||
done := make(map[string]bool)
|
||||
ind := imp.index
|
||||
|
||||
if related.main != nil {
|
||||
res := ind.MediaFile(related.main, indexOpt, originalName)
|
||||
log.Infof("import: %s main %s file \"%s\"", res, related.main.Type(), related.main.RelativeName(ind.originalsPath()))
|
||||
done[related.main.FileName()] = true
|
||||
if related.Main != nil {
|
||||
res := ind.MediaFile(related.Main, indexOpt, originalName)
|
||||
log.Infof("import: %s main %s file \"%s\"", res, related.Main.Type(), related.Main.RelativeName(ind.originalsPath()))
|
||||
done[related.Main.FileName()] = true
|
||||
} else {
|
||||
log.Warnf("import: no main file for %s (conversion to jpeg failed?)", destinationMainFilename)
|
||||
}
|
||||
|
||||
for _, f := range related.files {
|
||||
for _, f := range related.Files {
|
||||
if f == nil {
|
||||
continue
|
||||
}
|
||||
|
|
|
@ -14,6 +14,7 @@ import (
|
|||
"github.com/photoprism/photoprism/internal/event"
|
||||
"github.com/photoprism/photoprism/internal/mutex"
|
||||
"github.com/photoprism/photoprism/internal/nsfw"
|
||||
"github.com/photoprism/photoprism/internal/query"
|
||||
"github.com/photoprism/photoprism/pkg/fs"
|
||||
)
|
||||
|
||||
|
@ -23,6 +24,7 @@ type Index struct {
|
|||
tensorFlow *classify.TensorFlow
|
||||
nsfwDetector *nsfw.Detector
|
||||
db *gorm.DB
|
||||
q *query.Query
|
||||
}
|
||||
|
||||
// NewIndex returns a new indexer and expects its dependencies as arguments.
|
||||
|
@ -32,6 +34,7 @@ func NewIndex(conf *config.Config, tensorFlow *classify.TensorFlow, nsfwDetector
|
|||
tensorFlow: tensorFlow,
|
||||
nsfwDetector: nsfwDetector,
|
||||
db: conf.Db(),
|
||||
q: query.New(conf.Db()),
|
||||
}
|
||||
|
||||
return i
|
||||
|
@ -81,7 +84,7 @@ func (ind *Index) Start(options IndexOptions) map[string]bool {
|
|||
wg.Add(numWorkers)
|
||||
for i := 0; i < numWorkers; i++ {
|
||||
go func() {
|
||||
indexWorker(jobs) // HLc
|
||||
IndexWorker(jobs) // HLc
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
|
@ -127,7 +130,7 @@ func (ind *Index) Start(options IndexOptions) map[string]bool {
|
|||
|
||||
var files MediaFiles
|
||||
|
||||
for _, f := range related.files {
|
||||
for _, f := range related.Files {
|
||||
if done[f.FileName()] {
|
||||
continue
|
||||
}
|
||||
|
@ -138,13 +141,13 @@ func (ind *Index) Start(options IndexOptions) map[string]bool {
|
|||
|
||||
done[mf.FileName()] = true
|
||||
|
||||
related.files = files
|
||||
related.Files = files
|
||||
|
||||
jobs <- IndexJob{
|
||||
filename: mf.FileName(),
|
||||
related: related,
|
||||
opt: options,
|
||||
ind: ind,
|
||||
FileName: mf.FileName(),
|
||||
Related: related,
|
||||
IndexOpt: options,
|
||||
Ind: ind,
|
||||
}
|
||||
|
||||
return nil
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package photoprism
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"sort"
|
||||
|
@ -16,18 +17,38 @@ import (
|
|||
)
|
||||
|
||||
const (
|
||||
indexResultUpdated IndexResult = "updated"
|
||||
indexResultAdded IndexResult = "added"
|
||||
indexResultSkipped IndexResult = "skipped"
|
||||
indexResultFailed IndexResult = "failed"
|
||||
IndexUpdated IndexStatus = "updated"
|
||||
IndexAdded IndexStatus = "added"
|
||||
IndexSkipped IndexStatus = "skipped"
|
||||
IndexFailed IndexStatus = "failed"
|
||||
)
|
||||
|
||||
type IndexResult string
|
||||
type IndexStatus string
|
||||
|
||||
func (ind *Index) MediaFile(m *MediaFile, o IndexOptions, originalName string) IndexResult {
|
||||
type IndexResult struct {
|
||||
Status IndexStatus
|
||||
Error error
|
||||
FileID uint
|
||||
FileUUID string
|
||||
PhotoID uint
|
||||
PhotoUUID string
|
||||
}
|
||||
|
||||
func (r IndexResult) String() string {
|
||||
return string(r.Status)
|
||||
}
|
||||
|
||||
func (r IndexResult) Success() bool {
|
||||
return r.Error == nil && r.FileID > 0
|
||||
}
|
||||
|
||||
func (ind *Index) MediaFile(m *MediaFile, o IndexOptions, originalName string) (result IndexResult) {
|
||||
if m == nil {
|
||||
log.Error("index: media file is nil - you might have found a bug")
|
||||
return indexResultFailed
|
||||
err := errors.New("index: media file is nil - you might have found a bug")
|
||||
log.Error(err)
|
||||
result.Error = err
|
||||
result.Status = IndexFailed
|
||||
return result
|
||||
}
|
||||
|
||||
start := time.Now()
|
||||
|
@ -85,7 +106,8 @@ func (ind *Index) MediaFile(m *MediaFile, o IndexOptions, originalName string) I
|
|||
photoExists = photoQuery.Error == nil
|
||||
|
||||
if !fileChanged && photoExists && o.SkipUnchanged() {
|
||||
return indexResultSkipped
|
||||
result.Status = IndexSkipped
|
||||
return result
|
||||
}
|
||||
|
||||
if photoExists {
|
||||
|
@ -309,14 +331,18 @@ func (ind *Index) MediaFile(m *MediaFile, o IndexOptions, originalName string) I
|
|||
|
||||
if err := ind.db.Unscoped().Save(&photo).Error; err != nil {
|
||||
log.Errorf("index: %s", err)
|
||||
return indexResultFailed
|
||||
result.Status = IndexFailed
|
||||
result.Error = err
|
||||
return result
|
||||
}
|
||||
} else {
|
||||
photo.PhotoFavorite = false
|
||||
|
||||
if err := ind.db.Create(&photo).Error; err != nil {
|
||||
log.Errorf("index: %s", err)
|
||||
return indexResultFailed
|
||||
result.Status = IndexFailed
|
||||
result.Error = err
|
||||
return result
|
||||
}
|
||||
|
||||
event.Publish("count.photos", event.Data{
|
||||
|
@ -332,31 +358,53 @@ func (ind *Index) MediaFile(m *MediaFile, o IndexOptions, originalName string) I
|
|||
}
|
||||
|
||||
file.PhotoID = photo.ID
|
||||
result.PhotoID = photo.ID
|
||||
|
||||
file.PhotoUUID = photo.PhotoUUID
|
||||
result.PhotoUUID = photo.PhotoUUID
|
||||
|
||||
if file.FilePrimary && (fileChanged || o.UpdateKeywords) {
|
||||
photo.IndexKeywords(ind.db)
|
||||
}
|
||||
|
||||
result.Status = IndexUpdated
|
||||
|
||||
if fileQuery.Error == nil {
|
||||
file.UpdatedIn = int64(time.Since(start))
|
||||
|
||||
if err := ind.db.Unscoped().Save(&file).Error; err != nil {
|
||||
log.Errorf("index: %s", err)
|
||||
return indexResultFailed
|
||||
result.Status = IndexFailed
|
||||
result.Error = err
|
||||
return result
|
||||
}
|
||||
} else {
|
||||
file.CreatedIn = int64(time.Since(start))
|
||||
|
||||
if err := ind.db.Create(&file).Error; err != nil {
|
||||
log.Errorf("index: %s", err)
|
||||
result.Status = IndexFailed
|
||||
result.Error = err
|
||||
return result
|
||||
}
|
||||
|
||||
return indexResultUpdated
|
||||
result.Status = IndexAdded
|
||||
}
|
||||
|
||||
file.CreatedIn = int64(time.Since(start))
|
||||
result.FileID = file.ID
|
||||
result.FileUUID = file.FileUUID
|
||||
|
||||
if err := ind.db.Create(&file).Error; err != nil {
|
||||
downloadedAs := fileName
|
||||
|
||||
if originalName != "" {
|
||||
downloadedAs = originalName
|
||||
}
|
||||
|
||||
if err := ind.q.SetDownloadFileID(downloadedAs, file.ID); err != nil {
|
||||
log.Errorf("index: %s", err)
|
||||
return indexResultFailed
|
||||
}
|
||||
|
||||
return indexResultAdded
|
||||
return result
|
||||
}
|
||||
|
||||
// isNSFW returns true if media file might be offensive and detection is enabled.
|
||||
|
|
|
@ -1,29 +1,29 @@
|
|||
package photoprism
|
||||
|
||||
type IndexJob struct {
|
||||
filename string
|
||||
related RelatedFiles
|
||||
opt IndexOptions
|
||||
ind *Index
|
||||
FileName string
|
||||
Related RelatedFiles
|
||||
IndexOpt IndexOptions
|
||||
Ind *Index
|
||||
}
|
||||
|
||||
func indexWorker(jobs <-chan IndexJob) {
|
||||
func IndexWorker(jobs <-chan IndexJob) {
|
||||
for job := range jobs {
|
||||
done := make(map[string]bool)
|
||||
related := job.related
|
||||
opt := job.opt
|
||||
ind := job.ind
|
||||
related := job.Related
|
||||
opt := job.IndexOpt
|
||||
ind := job.Ind
|
||||
|
||||
if related.main != nil {
|
||||
res := ind.MediaFile(related.main, opt, "")
|
||||
done[related.main.FileName()] = true
|
||||
if related.Main != nil {
|
||||
res := ind.MediaFile(related.Main, opt, "")
|
||||
done[related.Main.FileName()] = true
|
||||
|
||||
log.Infof("index: %s main %s file \"%s\"", res, related.main.Type(), related.main.RelativeName(ind.originalsPath()))
|
||||
log.Infof("index: %s main %s file \"%s\"", res, related.Main.Type(), related.Main.RelativeName(ind.originalsPath()))
|
||||
} else {
|
||||
log.Warnf("index: no main file for %s (conversion to jpeg failed?)", job.filename)
|
||||
log.Warnf("index: no main file for %s (conversion to jpeg failed?)", job.FileName)
|
||||
}
|
||||
|
||||
for _, f := range related.files {
|
||||
for _, f := range related.Files {
|
||||
if done[f.FileName()] {
|
||||
continue
|
||||
}
|
||||
|
|
|
@ -290,22 +290,22 @@ func (m *MediaFile) RelatedFiles() (result RelatedFiles, err error) {
|
|||
continue
|
||||
}
|
||||
|
||||
if result.main == nil && resultFile.IsJpeg() {
|
||||
result.main = resultFile
|
||||
if result.Main == nil && resultFile.IsJpeg() {
|
||||
result.Main = resultFile
|
||||
} else if resultFile.IsRaw() {
|
||||
result.main = resultFile
|
||||
result.Main = resultFile
|
||||
} else if resultFile.IsHEIF() {
|
||||
result.main = resultFile
|
||||
} else if resultFile.IsJpeg() && len(result.main.FileName()) > len(resultFile.FileName()) {
|
||||
result.main = resultFile
|
||||
result.Main = resultFile
|
||||
} else if resultFile.IsJpeg() && len(result.Main.FileName()) > len(resultFile.FileName()) {
|
||||
result.Main = resultFile
|
||||
} else if resultFile.IsImageOther() {
|
||||
result.main = resultFile
|
||||
result.Main = resultFile
|
||||
}
|
||||
|
||||
result.files = append(result.files, resultFile)
|
||||
result.Files = append(result.Files, resultFile)
|
||||
}
|
||||
|
||||
sort.Sort(result.files)
|
||||
sort.Sort(result.Files)
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
@ -372,22 +372,7 @@ func (m MediaFile) Directory() string {
|
|||
|
||||
// Base returns the filename base without any extensions and path.
|
||||
func (m MediaFile) Base() string {
|
||||
basename := filepath.Base(m.FileName())
|
||||
|
||||
if end := strings.Index(basename, "."); end != -1 {
|
||||
// ignore everything behind the first dot in the file name
|
||||
basename = basename[:end]
|
||||
}
|
||||
|
||||
if end := strings.Index(basename, " ("); end != -1 {
|
||||
// copies created by Chrome & Windows, example: IMG_1234 (2)
|
||||
basename = basename[:end]
|
||||
} else if end := strings.Index(basename, " copy"); end != -1 {
|
||||
// copies created by OS X, example: IMG_1234 copy 2
|
||||
basename = basename[:end]
|
||||
}
|
||||
|
||||
return basename
|
||||
return fs.Base(m.FileName())
|
||||
}
|
||||
|
||||
// AbsBase returns the directory and base filename without any extensions.
|
||||
|
|
|
@ -261,9 +261,9 @@ func TestMediaFile_RelatedFiles(t *testing.T) {
|
|||
|
||||
assert.Nil(t, err)
|
||||
|
||||
assert.Len(t, related.files, 3)
|
||||
assert.Len(t, related.Files, 3)
|
||||
|
||||
for _, result := range related.files {
|
||||
for _, result := range related.Files {
|
||||
t.Logf("FileName: %s", result.FileName())
|
||||
|
||||
filename := result.FileName()
|
||||
|
@ -287,9 +287,9 @@ func TestMediaFile_RelatedFiles(t *testing.T) {
|
|||
|
||||
assert.Nil(t, err)
|
||||
|
||||
assert.Len(t, related.files, 3)
|
||||
assert.Len(t, related.Files, 3)
|
||||
|
||||
for _, result := range related.files {
|
||||
for _, result := range related.Files {
|
||||
t.Logf("FileName: %s", result.FileName())
|
||||
|
||||
filename := result.FileName()
|
||||
|
@ -314,12 +314,12 @@ func TestMediaFile_RelatedFiles_Ordering(t *testing.T) {
|
|||
|
||||
assert.Nil(t, err)
|
||||
|
||||
assert.Len(t, related.files, 5)
|
||||
assert.Len(t, related.Files, 5)
|
||||
|
||||
assert.Equal(t, conf.ExamplesPath()+"/IMG_4120.AAE", related.files[0].FileName())
|
||||
assert.Equal(t, conf.ExamplesPath()+"/IMG_4120.JPG", related.files[1].FileName())
|
||||
assert.Equal(t, conf.ExamplesPath()+"/IMG_4120.AAE", related.Files[0].FileName())
|
||||
assert.Equal(t, conf.ExamplesPath()+"/IMG_4120.JPG", related.Files[1].FileName())
|
||||
|
||||
for _, result := range related.files {
|
||||
for _, result := range related.Files {
|
||||
filename := result.FileName()
|
||||
t.Logf("FileName: %s", filename)
|
||||
}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
package photoprism
|
||||
|
||||
type RelatedFiles struct {
|
||||
files MediaFiles
|
||||
main *MediaFile
|
||||
Files MediaFiles
|
||||
Main *MediaFile
|
||||
}
|
||||
|
|
|
@ -41,7 +41,7 @@ func (rs *Resample) Start(force bool) error {
|
|||
wg.Add(numWorkers)
|
||||
for i := 0; i < numWorkers; i++ {
|
||||
go func() {
|
||||
resampleWorker(jobs)
|
||||
ResampleWorker(jobs)
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
|
|
|
@ -6,7 +6,7 @@ type ResampleJob struct {
|
|||
force bool
|
||||
}
|
||||
|
||||
func resampleWorker(jobs <-chan ResampleJob) {
|
||||
func ResampleWorker(jobs <-chan ResampleJob) {
|
||||
for job := range jobs {
|
||||
mf := job.mediaFile
|
||||
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
package query
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"os"
|
||||
|
||||
"github.com/photoprism/photoprism/internal/entity"
|
||||
)
|
||||
|
||||
|
@ -16,8 +19,8 @@ func (q *Query) FileSyncs(accountId uint, status string) (result []entity.FileSy
|
|||
s = s.Where("status = ?", status)
|
||||
}
|
||||
|
||||
s = s.Order("created_at ASC")
|
||||
s = s.Limit(100).Offset(0)
|
||||
s = s.Order("remote_name ASC")
|
||||
s = s.Limit(1000).Offset(0)
|
||||
|
||||
s = s.Preload("File")
|
||||
|
||||
|
@ -27,3 +30,21 @@ func (q *Query) FileSyncs(accountId uint, status string) (result []entity.FileSy
|
|||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
// SetDownloadFileID updates the local file id for remote downloads.
|
||||
func (q *Query) SetDownloadFileID(filename string, fileId uint) error {
|
||||
if len(filename) == 0 {
|
||||
return errors.New("sync: can't update, filename empty")
|
||||
}
|
||||
|
||||
// TODO: Might break on Windows
|
||||
if filename[0] != os.PathSeparator {
|
||||
filename = string(os.PathSeparator) + filename
|
||||
}
|
||||
|
||||
result := q.db.Model(entity.FileSync{}).
|
||||
Where("remote_name = ? AND status = ? AND file_id = 0", filename, entity.FileSyncDownloaded).
|
||||
Update("file_id", fileId)
|
||||
|
||||
return result.Error
|
||||
}
|
||||
|
|
|
@ -14,16 +14,23 @@ import (
|
|||
"github.com/photoprism/photoprism/internal/remote"
|
||||
"github.com/photoprism/photoprism/internal/remote/webdav"
|
||||
"github.com/photoprism/photoprism/internal/service"
|
||||
"github.com/photoprism/photoprism/pkg/fs"
|
||||
)
|
||||
|
||||
// Sync represents a sync worker.
|
||||
type Sync struct {
|
||||
conf *config.Config
|
||||
q *query.Query
|
||||
}
|
||||
|
||||
type Downloads map[string][]entity.FileSync
|
||||
|
||||
// NewSync returns a new service sync worker.
|
||||
func NewSync(conf *config.Config) *Sync {
|
||||
return &Sync{conf: conf}
|
||||
return &Sync{
|
||||
conf: conf,
|
||||
q: query.New(conf.Db()),
|
||||
}
|
||||
}
|
||||
|
||||
// DownloadPath returns a temporary download path.
|
||||
|
@ -45,10 +52,7 @@ func (s *Sync) Start() (err error) {
|
|||
}
|
||||
|
||||
db := s.conf.Db()
|
||||
q := query.New(db)
|
||||
|
||||
runImport := false
|
||||
runIndex := false
|
||||
q := s.q
|
||||
|
||||
accounts, err := q.Accounts(f)
|
||||
|
||||
|
@ -80,18 +84,14 @@ func (s *Sync) Start() (err error) {
|
|||
a.SyncDate.Time = time.Now()
|
||||
a.SyncDate.Valid = true
|
||||
}
|
||||
|
||||
event.Publish("sync.refreshed", event.Data{"account": a})
|
||||
}
|
||||
case entity.AccountSyncStatusDownload:
|
||||
if complete, err := s.download(a); err != nil {
|
||||
a.AccErrors++
|
||||
a.AccError = err.Error()
|
||||
} else if complete {
|
||||
if a.SyncFilenames {
|
||||
runIndex = true
|
||||
} else {
|
||||
runImport = true
|
||||
}
|
||||
|
||||
if a.SyncUpload {
|
||||
a.SyncStatus = entity.AccountSyncStatusUpload
|
||||
} else {
|
||||
|
@ -128,16 +128,6 @@ func (s *Sync) Start() (err error) {
|
|||
}
|
||||
}
|
||||
|
||||
if runImport {
|
||||
opt := photoprism.ImportOptionsMove(s.DownloadPath())
|
||||
service.Import().Start(opt)
|
||||
}
|
||||
|
||||
if runIndex {
|
||||
opt := photoprism.IndexOptionsNone()
|
||||
service.Index().Start(opt)
|
||||
}
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -192,22 +182,56 @@ func (s *Sync) getRemoteFiles(a entity.Account) (complete bool, err error) {
|
|||
return true, nil
|
||||
}
|
||||
|
||||
func (s *Sync) relatedDownloads(a entity.Account) (result Downloads, err error) {
|
||||
result = make(Downloads)
|
||||
|
||||
files, err := s.q.FileSyncs(a.ID, entity.FileSyncNew)
|
||||
|
||||
if err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
for i, file := range files {
|
||||
k := fs.AbsBase(file.RemoteName)
|
||||
|
||||
result[k] = append(result[k], file)
|
||||
|
||||
if i > 990 {
|
||||
return result, nil
|
||||
}
|
||||
}
|
||||
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (s *Sync) download(a entity.Account) (complete bool, err error) {
|
||||
db := s.conf.Db()
|
||||
q := query.New(db)
|
||||
|
||||
files, err := q.FileSyncs(a.ID, entity.FileSyncNew)
|
||||
// Set up index worker
|
||||
indexJobs := make(chan photoprism.IndexJob)
|
||||
go photoprism.IndexWorker(indexJobs)
|
||||
defer close(indexJobs)
|
||||
|
||||
// Set up import worker
|
||||
importJobs := make(chan photoprism.ImportJob)
|
||||
go photoprism.ImportWorker(importJobs)
|
||||
defer close(importJobs)
|
||||
|
||||
relatedFiles, err := s.relatedDownloads(a)
|
||||
|
||||
if err != nil {
|
||||
log.Errorf("sync: %s", err.Error())
|
||||
return false, err
|
||||
}
|
||||
|
||||
if len(files) == 0 {
|
||||
if len(relatedFiles) == 0 {
|
||||
log.Infof("sync: download complete for %s", a.AccName)
|
||||
event.Publish("sync.downloaded", event.Data{"account": a})
|
||||
return true, nil
|
||||
}
|
||||
|
||||
log.Infof("sync: downloading from %s", a.AccName)
|
||||
|
||||
client := webdav.New(a.AccURL, a.AccUser, a.AccPass)
|
||||
|
||||
var baseDir string
|
||||
|
@ -218,33 +242,85 @@ func (s *Sync) download(a entity.Account) (complete bool, err error) {
|
|||
baseDir = fmt.Sprintf("%s/%d", s.DownloadPath(), a.ID)
|
||||
}
|
||||
|
||||
for _, file := range files {
|
||||
if mutex.Sync.Canceled() {
|
||||
return false, nil
|
||||
done := make(map[string]bool)
|
||||
|
||||
for _, files := range relatedFiles {
|
||||
for _, file := range files {
|
||||
if mutex.Sync.Canceled() {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
if file.Errors > a.RetryLimit {
|
||||
log.Warnf("sync: downloading %s failed more than %d times", file.RemoteName, a.RetryLimit)
|
||||
continue
|
||||
}
|
||||
|
||||
localName := baseDir + file.RemoteName
|
||||
|
||||
if err := client.Download(file.RemoteName, localName, false); err != nil {
|
||||
log.Errorf("sync: %s", err.Error())
|
||||
file.Errors++
|
||||
file.Error = err.Error()
|
||||
} else {
|
||||
log.Infof("sync: downloaded %s from %s", file.RemoteName, a.AccName)
|
||||
file.Status = entity.FileSyncDownloaded
|
||||
}
|
||||
|
||||
if mutex.Sync.Canceled() {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
if err := db.Save(&file).Error; err != nil {
|
||||
log.Errorf("sync: %s", err.Error())
|
||||
}
|
||||
}
|
||||
|
||||
if file.Errors > a.RetryLimit {
|
||||
log.Warnf("sync: downloading %s failed more than %d times", file.RemoteName, a.RetryLimit)
|
||||
continue
|
||||
}
|
||||
for _, file := range files {
|
||||
mf, err := photoprism.NewMediaFile(baseDir + file.RemoteName)
|
||||
|
||||
localName := baseDir + file.RemoteName
|
||||
if err != nil || !mf.IsPhoto() {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := client.Download(file.RemoteName, localName, false); err != nil {
|
||||
log.Errorf("sync: %s", err.Error())
|
||||
file.Errors++
|
||||
file.Error = err.Error()
|
||||
} else {
|
||||
log.Infof("sync: downloaded %s from %s", file.RemoteName, a.AccName)
|
||||
file.Status = entity.FileSyncDownloaded
|
||||
}
|
||||
related, err := mf.RelatedFiles()
|
||||
|
||||
if mutex.Sync.Canceled() {
|
||||
return false, nil
|
||||
}
|
||||
if err != nil {
|
||||
log.Warnf("sync: %s", err.Error())
|
||||
continue
|
||||
}
|
||||
|
||||
if err := db.Save(&file).Error; err != nil {
|
||||
log.Errorf("sync: %s", err.Error())
|
||||
var rf photoprism.MediaFiles
|
||||
|
||||
for _, f := range related.Files {
|
||||
if done[f.FileName()] {
|
||||
continue
|
||||
}
|
||||
|
||||
rf = append(rf, f)
|
||||
done[f.FileName()] = true
|
||||
}
|
||||
|
||||
done[mf.FileName()] = true
|
||||
related.Files = rf
|
||||
|
||||
if a.SyncFilenames {
|
||||
log.Infof("sync: indexing %s and related files", file.RemoteName)
|
||||
indexJobs <- photoprism.IndexJob{
|
||||
FileName: mf.FileName(),
|
||||
Related: related,
|
||||
IndexOpt: photoprism.IndexOptionsAll(),
|
||||
Ind: service.Index(),
|
||||
}
|
||||
} else {
|
||||
log.Infof("sync: importing %s and related files", file.RemoteName)
|
||||
importJobs <- photoprism.ImportJob{
|
||||
FileName: mf.FileName(),
|
||||
Related: related,
|
||||
IndexOpt: photoprism.IndexOptionsAll(),
|
||||
ImportOpt: photoprism.ImportOptionsMove(baseDir),
|
||||
Imp: service.Import(),
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -13,13 +13,13 @@ var stop = make(chan bool, 1)
|
|||
|
||||
// Start runs the service workers every 10 minutes.
|
||||
func Start(conf *config.Config) {
|
||||
ticker := time.NewTicker(10 * time.Minute)
|
||||
ticker := time.NewTicker(conf.WakeupInterval())
|
||||
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case <-stop:
|
||||
log.Info("shutting down service workers")
|
||||
log.Info("shutting down workers")
|
||||
ticker.Stop()
|
||||
mutex.Share.Cancel()
|
||||
mutex.Sync.Cancel()
|
||||
|
|
32
pkg/fs/base.go
Normal file
32
pkg/fs/base.go
Normal file
|
@ -0,0 +1,32 @@
|
|||
package fs
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Base returns the filename base without any extensions and path.
|
||||
func Base(fileName string) string {
|
||||
basename := filepath.Base(fileName)
|
||||
|
||||
if end := strings.Index(basename, "."); end != -1 {
|
||||
// ignore everything behind the first dot in the file name
|
||||
basename = basename[:end]
|
||||
}
|
||||
|
||||
if end := strings.Index(basename, " ("); end != -1 {
|
||||
// copies created by Chrome & Windows, example: IMG_1234 (2)
|
||||
basename = basename[:end]
|
||||
} else if end := strings.Index(basename, " copy"); end != -1 {
|
||||
// copies created by OS X, example: IMG_1234 copy 2
|
||||
basename = basename[:end]
|
||||
}
|
||||
|
||||
return basename
|
||||
}
|
||||
|
||||
// AbsBase returns the directory and base filename without any extensions.
|
||||
func AbsBase(fileName string) string {
|
||||
return filepath.Dir(fileName) + string(os.PathSeparator) + Base(fileName)
|
||||
}
|
19
pkg/fs/base_test.go
Normal file
19
pkg/fs/base_test.go
Normal file
|
@ -0,0 +1,19 @@
|
|||
package fs
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestBase(t *testing.T) {
|
||||
result := Base("/testdata/test.jpg")
|
||||
|
||||
assert.Equal(t, "test", result)
|
||||
}
|
||||
|
||||
func TestBaseAbs(t *testing.T) {
|
||||
result := AbsBase("/testdata/test.jpg")
|
||||
|
||||
assert.Equal(t, "/testdata/test", result)
|
||||
}
|
Loading…
Reference in a new issue