Performance: Refactor database schema and UI components #995 #1438 #1811

This commit is contained in:
Michael Mayer 2022-03-30 20:36:25 +02:00
parent 68094e9b79
commit 9eda12ac20
100 changed files with 1625 additions and 743 deletions

View file

@ -357,6 +357,14 @@ export default class Config {
return this.values.settings;
}
searchBatchSize() {
if (!this.values || !this.values.settings || !this.values.settings.search.batchSize) {
return 80;
}
return this.values.settings.search.batchSize;
}
rtl() {
if (!this.values || !this.values.settings || !this.values.settings.ui.language) {
return false;

View file

@ -888,7 +888,7 @@ export class Photo extends RestModel {
}
static batchSize() {
return 60;
return config.searchBatchSize();
}
static getCollectionResource() {

View file

@ -138,7 +138,7 @@ export class Subject extends RestModel {
}
static batchSize() {
return 60;
return config.searchBatchSize();
}
static getCollectionResource() {

View file

@ -1,6 +1,6 @@
<template>
<div v-infinite-scroll="loadMore" class="p-page p-page-album-photos" :infinite-scroll-disabled="scrollDisabled"
:infinite-scroll-distance="1200" :infinite-scroll-listen-for-event="'scrollRefresh'">
:infinite-scroll-distance="1600" :infinite-scroll-listen-for-event="'scrollRefresh'">
<p-album-toolbar :album="model" :settings="settings" :filter="filter" :filter-change="updateQuery"
:refresh="refresh"></p-album-toolbar>
@ -50,6 +50,7 @@ import {Photo, TypeLive, TypeRaw, TypeVideo} from "model/photo";
import Album from "model/album";
import Event from "pubsub-js";
import Thumb from "model/thumb";
import Api from "common/api";
export default {
name: 'PPageAlbumPhotos',
@ -67,6 +68,7 @@ export default {
const view = this.viewType();
const filter = {country: country, camera: camera, order: order, q: q};
const settings = {view: view};
const batchSize = Photo.batchSize();
return {
subscriptions: [],
@ -77,7 +79,7 @@ export default {
uid: uid,
results: [],
scrollDisabled: true,
batchSize: Photo.batchSize(),
batchSize: batchSize,
offset: 0,
page: 0,
selection: this.$clipboard.selection,
@ -89,6 +91,8 @@ export default {
viewer: {
results: [],
loading: false,
complete: true,
batchSize: batchSize > 160 ? 480 : batchSize * 3
},
};
},
@ -189,56 +193,58 @@ export default {
} else if (showMerged) {
this.$viewer.show(Thumb.fromFiles([selected]), 0);
} else {
this.viewerResults().then((results) => {
const thumbsIndex = results.findIndex(result => result.UID === selected.UID);
if (thumbsIndex < 0) {
this.$viewer.show(Thumb.fromPhotos(this.results), index);
} else {
this.$viewer.show(Thumb.fromPhotos(results), thumbsIndex);
if (this.viewer.results && this.viewer.results.length > 0) {
// Reuse existing viewer result if possible.
const i = this.viewer.results.findIndex(p => p.uid === selected.UID);
if (i > -1 && (
(this.complete && this.viewer.results.length === this.results.length) ||
(this.viewer.complete && this.viewer.results.length > this.results.length) ||
(this.viewer.results.length - i < this.viewer.batchSize))
) {
this.$viewer.show(this.viewer.results, i);
return;
}
}
// Fetch photos from server API.
this.viewer.loading = true;
const params = this.searchParams();
params.count = this.complete ? params.offset : params.offset + this.viewer.batchSize;
params.offset = 0;
// Fetch viewer results from API.
return Api.get("photos/view", {params}).then((response) => {
let count = response && response.data ? response.data.length : 0;
if (count > 0) {
// Process response.
if (response.headers && response.headers["x-count"]) {
count = parseInt(response.headers["x-count"]);
}
this.viewer.results = Thumb.wrap(response.data);
this.viewer.complete = (count < this.batchSize);
const i = this.viewer.results.findIndex(p => p.uid === selected.UID);
// Show photos.
this.$viewer.show(this.viewer.results, i);
} else {
// Don't open viewer if nothing was found.
this.viewer.results = [];
this.viewer.complete = false;
this.$notify.warn(this.$gettext("No pictures found"));
}
}).catch(() => {
// Reset results in case of an error.
this.viewer.results = [];
this.viewer.complete = false;
}).finally(() => {
// Unblock.
this.viewer.loading = false;
});
}
return true;
},
viewerResults() {
if (this.complete || this.loading || this.viewer.loading) {
return Promise.resolve(this.results);
}
if (this.viewer.results.length >= this.results.length) {
return Promise.resolve(this.viewer.results);
}
this.viewer.loading = true;
const params = {
count: Photo.limit(),
offset: 0,
album: this.uid,
filter: this.model.Filter ? this.model.Filter : "",
merged: true,
};
Object.assign(params, this.lastFilter);
if (this.staticFilter) {
Object.assign(params, this.staticFilter);
}
return Photo.search(params).then(resp => {
// Success.
this.viewer.loading = false;
this.viewer.results = resp.models;
return Promise.resolve(this.viewer.results);
}, () => {
// Error.
this.viewer.loading = false;
this.viewer.results = [];
return Promise.resolve(this.results);
});
},
loadMore() {
if (this.scrollDisabled) return;
@ -294,10 +300,6 @@ export default {
this.dirty = false;
this.loading = false;
this.listen = true;
if (offset === 0) {
this.viewerResults();
}
});
},
updateQuery() {
@ -374,6 +376,7 @@ export default {
this.offset = this.batchSize;
this.results = response.models;
this.viewer.results = [];
this.viewer.complete = false;
this.complete = (response.count < this.batchSize);
this.scrollDisabled = this.complete;
@ -398,8 +401,6 @@ export default {
this.dirty = false;
this.loading = false;
this.listen = true;
this.viewerResults();
});
},
findAlbum() {

View file

@ -1,6 +1,6 @@
<template>
<div v-infinite-scroll="loadMore" class="p-page p-page-albums" style="user-select: none"
:infinite-scroll-disabled="scrollDisabled" :infinite-scroll-distance="1200"
:infinite-scroll-disabled="scrollDisabled" :infinite-scroll-distance="1600"
:infinite-scroll-listen-for-event="'scrollRefresh'">
<v-form ref="form" class="p-albums-search" lazy-validation dense @submit.prevent="updateQuery">

View file

@ -1,6 +1,6 @@
<template>
<div v-infinite-scroll="loadMore" class="p-page p-page-labels" style="user-select: none"
:infinite-scroll-disabled="scrollDisabled" :infinite-scroll-distance="1200"
:infinite-scroll-disabled="scrollDisabled" :infinite-scroll-distance="1600"
:infinite-scroll-listen-for-event="'scrollRefresh'">
<v-form ref="form" class="p-labels-search" lazy-validation dense @submit.stop.prevent>

View file

@ -1,6 +1,6 @@
<template>
<div v-infinite-scroll="loadMore" class="p-page p-page-errors" :infinite-scroll-disabled="scrollDisabled"
:infinite-scroll-distance="1200" :infinite-scroll-listen-for-event="'scrollRefresh'">
:infinite-scroll-distance="1600" :infinite-scroll-listen-for-event="'scrollRefresh'">
<v-toolbar flat :dense="$vuetify.breakpoint.smAndDown" class="page-toolbar" color="secondary">
<v-text-field :value="filter.q"
solo hide-details clearable overflow single-line validate-on-blur

View file

@ -1,6 +1,6 @@
<template>
<div v-infinite-scroll="loadMore" class="p-page p-page-subjects" style="user-select: none"
:infinite-scroll-disabled="scrollDisabled" :infinite-scroll-distance="1200"
:infinite-scroll-disabled="scrollDisabled" :infinite-scroll-distance="1600"
:infinite-scroll-listen-for-event="'scrollRefresh'">
<v-form ref="form" class="p-people-search" lazy-validation dense @submit.prevent="updateQuery">

View file

@ -1,6 +1,6 @@
<template>
<div v-infinite-scroll="loadMore" class="p-page p-page-photos" style="user-select: none"
:infinite-scroll-disabled="scrollDisabled" :infinite-scroll-distance="1200"
:infinite-scroll-disabled="scrollDisabled" :infinite-scroll-distance="settings.prefetchDist"
:infinite-scroll-listen-for-event="'scrollRefresh'">
<p-photo-toolbar :settings="settings" :filter="filter" :filter-change="updateQuery" :dirty="dirty"
@ -45,6 +45,7 @@
import {Photo, TypeLive, TypeRaw, TypeVideo} from "model/photo";
import Thumb from "model/thumb";
import Event from "pubsub-js";
import Api from "common/api";
export default {
name: 'PPagePhotos',
@ -78,13 +79,23 @@ export default {
const settings = this.$config.settings();
if (settings && settings.features.private) {
filter.public = "true";
let prefetchDist = 1600;
if (settings) {
if (settings.features.private) {
filter.public = "true";
}
if (settings.features.review && (!this.staticFilter || !("quality" in this.staticFilter))) {
filter.quality = "3";
}
if (settings.search.prefetchDist > 0) {
prefetchDist = settings.search.prefetchDist;
}
}
if (settings && settings.features.review && (!this.staticFilter || !("quality" in this.staticFilter))) {
filter.quality = "3";
}
const batchSize = Photo.batchSize();
return {
subscriptions: [],
@ -93,11 +104,14 @@ export default {
complete: false,
results: [],
scrollDisabled: true,
batchSize: Photo.batchSize(),
batchSize: batchSize,
offset: 0,
page: 0,
selection: this.$clipboard.selection,
settings: {view: view},
settings: {
view,
prefetchDist
},
filter: filter,
lastFilter: {},
routeName: routeName,
@ -105,11 +119,13 @@ export default {
viewer: {
results: [],
loading: false,
complete: true,
batchSize: batchSize > 160 ? 480 : batchSize * 3
},
};
},
computed: {
selectMode: function() {
selectMode: function () {
return this.selection.length > 0;
},
context: function () {
@ -230,53 +246,56 @@ export default {
} else if (showMerged) {
this.$viewer.show(Thumb.fromFiles([selected]), 0);
} else {
this.viewerResults().then((results) => {
const thumbsIndex = results.findIndex(result => result.UID === selected.UID);
if (thumbsIndex < 0) {
this.$viewer.show(Thumb.fromPhotos(this.results), index);
} else {
this.$viewer.show(Thumb.fromPhotos(results), thumbsIndex);
if (this.viewer.results && this.viewer.results.length > 0) {
// Reuse existing viewer result if possible.
const i = this.viewer.results.findIndex(p => p.uid === selected.UID);
if (i > -1 && (
(this.complete && this.viewer.results.length === this.results.length) ||
(this.viewer.complete && this.viewer.results.length > this.results.length) ||
(this.viewer.results.length - i < this.viewer.batchSize))
) {
this.$viewer.show(this.viewer.results, i);
return;
}
}
// Fetch photos from server API.
this.viewer.loading = true;
const params = this.searchParams();
params.count = this.complete ? params.offset : params.offset + this.viewer.batchSize;
params.offset = 0;
// Fetch viewer results from API.
return Api.get("photos/view", {params}).then((response) => {
let count = response && response.data ? response.data.length : 0;
if (count > 0) {
// Process response.
if (response.headers && response.headers["x-count"]) {
count = parseInt(response.headers["x-count"]);
}
this.viewer.results = Thumb.wrap(response.data);
this.viewer.complete = (count < this.batchSize);
const i = this.viewer.results.findIndex(p => p.uid === selected.UID);
// Show photos.
this.$viewer.show(this.viewer.results, i);
} else {
// Don't open viewer if nothing was found.
this.viewer.results = [];
this.viewer.complete = false;
this.$notify.warn(this.$gettext("No pictures found"));
}
}).catch(() => {
// Reset results in case of an error.
this.viewer.results = [];
this.viewer.complete = false;
}).finally(() => {
// Unblock.
this.viewer.loading = false;
});
}
},
viewerResults() {
if (this.complete || this.loading || this.viewer.loading) {
return Promise.resolve(this.results);
}
if (this.viewer.results.length > (this.results.length + this.batchSize)) {
return Promise.resolve(this.viewer.results);
}
this.viewer.loading = true;
const params = {
count: this.batchSize * (this.page + 6),
offset: 0,
merged: true,
};
Object.assign(params, this.lastFilter);
if (this.staticFilter) {
Object.assign(params, this.staticFilter);
}
return Photo.search(params).then((resp) => {
// Success.
this.viewer.loading = false;
this.viewer.results = resp.models;
return Promise.resolve(this.viewer.results);
}, () => {
// Error.
this.viewer.loading = false;
this.viewer.results = [];
return Promise.resolve(this.results);
}
);
},
loadMore() {
if (this.scrollDisabled) return;
@ -330,10 +349,6 @@ export default {
this.dirty = false;
this.loading = false;
this.listen = true;
if (offset === 0) {
this.viewerResults();
}
});
},
updateQuery() {
@ -410,6 +425,7 @@ export default {
this.offset = this.batchSize;
this.results = response.models;
this.viewer.results = [];
this.viewer.complete = false;
this.complete = (response.count < this.batchSize);
this.scrollDisabled = this.complete;
@ -434,8 +450,6 @@ export default {
this.dirty = false;
this.loading = false;
this.listen = true;
this.viewerResults();
});
},
onImportCompleted() {

View file

@ -1,6 +1,6 @@
<template>
<div v-infinite-scroll="loadMore" class="p-page p-page-albums" style="user-select: none"
:infinite-scroll-disabled="scrollDisabled" :infinite-scroll-distance="1200"
:infinite-scroll-disabled="scrollDisabled" :infinite-scroll-distance="1600"
:infinite-scroll-listen-for-event="'scrollRefresh'">
<v-toolbar flat color="secondary" :dense="$vuetify.breakpoint.smAndDown">
<v-toolbar-title>

View file

@ -1,6 +1,6 @@
<template>
<div v-infinite-scroll="loadMore" class="p-page p-page-album-photos" style="user-select: none"
:infinite-scroll-disabled="scrollDisabled" :infinite-scroll-distance="1200"
:infinite-scroll-disabled="scrollDisabled" :infinite-scroll-distance="1600"
:infinite-scroll-listen-for-event="'scrollRefresh'">
<v-form ref="form" lazy-validation
@ -93,6 +93,7 @@ import Event from "pubsub-js";
import Thumb from "model/thumb";
import Notify from "common/notify";
import download from "common/download";
import Api from "common/api";
export default {
name: 'PPageAlbumPhotos',
@ -110,6 +111,7 @@ export default {
const view = this.viewType();
const filter = {country: country, camera: camera, order: order, q: q};
const settings = {view: view};
const batchSize = Photo.batchSize();
return {
subscriptions: [],
@ -120,7 +122,7 @@ export default {
uid: uid,
results: [],
scrollDisabled: true,
batchSize: Photo.batchSize(),
batchSize: batchSize,
offset: 0,
page: 0,
selection: this.$clipboard.selection,
@ -133,6 +135,8 @@ export default {
viewer: {
results: [],
loading: false,
complete: true,
batchSize: batchSize > 160 ? 480 : batchSize * 3
},
};
},
@ -245,56 +249,58 @@ export default {
} else if (showMerged) {
this.$viewer.show(Thumb.fromFiles([selected]), 0);
} else {
this.viewerResults().then((results) => {
const thumbsIndex = results.findIndex(result => result.UID === selected.UID);
if (thumbsIndex < 0) {
this.$viewer.show(Thumb.fromPhotos(this.results), index);
} else {
this.$viewer.show(Thumb.fromPhotos(results), thumbsIndex);
if (this.viewer.results && this.viewer.results.length > 0) {
// Reuse existing viewer result if possible.
const i = this.viewer.results.findIndex(p => p.uid === selected.UID);
if (i > -1 && (
(this.complete && this.viewer.results.length === this.results.length) ||
(this.viewer.complete && this.viewer.results.length > this.results.length) ||
(this.viewer.results.length - i < this.viewer.batchSize))
) {
this.$viewer.show(this.viewer.results, i);
return;
}
}
// Fetch photos from server API.
this.viewer.loading = true;
const params = this.searchParams();
params.count = this.complete ? params.offset : params.offset + this.viewer.batchSize;
params.offset = 0;
// Fetch viewer results from API.
return Api.get("photos/view", {params}).then((response) => {
let count = response && response.data ? response.data.length : 0;
if (count > 0) {
// Process response.
if (response.headers && response.headers["x-count"]) {
count = parseInt(response.headers["x-count"]);
}
this.viewer.results = Thumb.wrap(response.data);
this.viewer.complete = (count < this.batchSize);
const i = this.viewer.results.findIndex(p => p.uid === selected.UID);
// Show photos.
this.$viewer.show(this.viewer.results, i);
} else {
// Don't open viewer if nothing was found.
this.viewer.results = [];
this.viewer.complete = false;
this.$notify.warn(this.$gettext("No pictures found"));
}
}).catch(() => {
// Reset results in case of an error.
this.viewer.results = [];
this.viewer.complete = false;
}).finally(() => {
// Unblock.
this.viewer.loading = false;
});
}
return true;
},
viewerResults() {
if (this.complete || this.loading || this.viewer.loading) {
return Promise.resolve(this.results);
}
if (this.viewer.results.length >= this.results.length) {
return Promise.resolve(this.viewer.results);
}
this.viewer.loading = true;
const params = {
count: Photo.limit(),
offset: 0,
album: this.uid,
filter: this.model.Filter ? this.model.Filter : "",
merged: true,
};
Object.assign(params, this.lastFilter);
if (this.staticFilter) {
Object.assign(params, this.staticFilter);
}
return Photo.search(params).then(resp => {
// Success.
this.viewer.loading = false;
this.viewer.results = resp.models;
return Promise.resolve(this.viewer.results);
}, () => {
// Error.
this.viewer.loading = false;
this.viewer.results = [];
return Promise.resolve(this.results);
});
},
loadMore() {
if (this.scrollDisabled) return;
@ -350,10 +356,6 @@ export default {
this.dirty = false;
this.loading = false;
this.listen = true;
if (offset === 0) {
this.viewerResults();
}
});
},
updateQuery() {
@ -430,6 +432,7 @@ export default {
this.offset = this.batchSize;
this.results = response.models;
this.viewer.results = [];
this.viewer.complete = false;
this.complete = (response.count < this.batchSize);
this.scrollDisabled = this.complete;
@ -454,8 +457,6 @@ export default {
this.dirty = false;
this.loading = false;
this.listen = true;
this.viewerResults();
});
},
findAlbum() {

View file

@ -293,8 +293,9 @@ const clientConfig = {
theme: "default",
language: "en",
},
templates: {
default: "index.tmpl",
search: {
batchSize: 60,
prefetchDist: 999,
},
maps: {
animate: 0,
@ -342,6 +343,9 @@ const clientConfig = {
download: {
name: "file",
},
templates: {
default: "index.tmpl",
},
},
disable: {
backups: false,

View file

@ -3,8 +3,6 @@ package api
import (
"net/http"
"github.com/photoprism/photoprism/pkg/sanitize"
"github.com/gin-gonic/gin"
"github.com/gin-gonic/gin/binding"
@ -12,12 +10,16 @@ import (
"github.com/photoprism/photoprism/internal/form"
"github.com/photoprism/photoprism/internal/search"
"github.com/photoprism/photoprism/internal/service"
"github.com/photoprism/photoprism/pkg/sanitize"
"github.com/photoprism/photoprism/pkg/txt"
)
// SearchGeo finds photos and returns results as JSON, so they can be displayed on a map or in a viewer.
//
// GET /api/v1/geo
//
// See form.SearchGeo for supported search params and data types.
func SearchGeo(router *gin.RouterGroup) {
handler := func(c *gin.Context) {
s := Auth(SessionID(c), acl.ResourcePhotos, acl.ActionSearch)
@ -58,6 +60,9 @@ func SearchGeo(router *gin.RouterGroup) {
return
}
// Add response headers.
AddTokenHeaders(c)
var resp []byte
// Render JSON response.
@ -74,11 +79,10 @@ func SearchGeo(router *gin.RouterGroup) {
return
}
AddTokenHeaders(c)
c.Data(http.StatusOK, "application/json", resp)
}
// Register route handlers.
router.GET("/geo", handler)
router.GET("/geo/:format", handler)
}

View file

@ -5,50 +5,41 @@ import (
"github.com/gin-gonic/gin"
"github.com/gin-gonic/gin/binding"
"github.com/photoprism/photoprism/internal/acl"
"github.com/photoprism/photoprism/internal/form"
"github.com/photoprism/photoprism/internal/i18n"
"github.com/photoprism/photoprism/internal/search"
"github.com/photoprism/photoprism/internal/service"
)
// SearchPhotos searches the pictures index and returns the result as JSON.
//
// GET /api/v1/photos
//
// Query:
// q: string Query string
// label: string Label
// cat: string Category
// country: string Country code
// camera: int UpdateCamera ID
// order: string Sort order
// count: int Max result count (required)
// offset: int Result offset
// before: date Find photos taken before (format: "2006-01-02")
// after: date Find photos taken after (format: "2006-01-02")
// favorite: bool Find favorites only
// See form.SearchPhotos for supported search params and data types.
func SearchPhotos(router *gin.RouterGroup) {
router.GET("/photos", func(c *gin.Context) {
// searchPhotos checking authorization and parses the search request.
searchForm := func(c *gin.Context) (f form.SearchPhotos, err error) {
s := Auth(SessionID(c), acl.ResourcePhotos, acl.ActionSearch)
if s.Invalid() {
AbortUnauthorized(c)
return
return f, i18n.Error(i18n.ErrUnauthorized)
}
var f form.SearchPhotos
err := c.MustBindWith(&f, binding.Form)
err = c.MustBindWith(&f, binding.Form)
if err != nil {
AbortBadRequest(c)
return
return f, err
}
// Guests may only see public content in shared albums.
if s.Guest() {
if f.Album == "" || !s.HasShare(f.Album) {
AbortUnauthorized(c)
return
return f, i18n.Error(i18n.ErrUnauthorized)
}
f.UID = ""
@ -60,6 +51,18 @@ func SearchPhotos(router *gin.RouterGroup) {
f.Review = false
}
return f, nil
}
// defaultHandler a standard JSON result with all fields.
defaultHandler := func(c *gin.Context) {
f, err := searchForm(c)
// Abort if authorization or form are invalid.
if err != nil {
return
}
result, count, err := search.Photos(f)
if err != nil {
@ -68,11 +71,45 @@ func SearchPhotos(router *gin.RouterGroup) {
return
}
// Add response headers.
AddCountHeader(c, count)
AddLimitHeader(c, f.Count)
AddOffsetHeader(c, f.Offset)
AddTokenHeaders(c)
// Render as JSON.
c.JSON(http.StatusOK, result)
})
}
// viewHandler returns a photo viewer formatted result.
viewHandler := func(c *gin.Context) {
f, err := searchForm(c)
// Abort if authorization or form are invalid.
if err != nil {
return
}
conf := service.Config()
result, count, err := search.PhotosViewerResults(f, conf.ContentUri(), conf.ApiUri(), conf.PreviewToken(), conf.DownloadToken())
if err != nil {
log.Warnf("search: %s", err)
AbortBadRequest(c)
return
}
// Add response headers.
AddCountHeader(c, count)
AddLimitHeader(c, f.Count)
AddOffsetHeader(c, f.Offset)
AddTokenHeaders(c)
// Render as JSON.
c.JSON(http.StatusOK, result)
}
// Register route handlers.
router.GET("/photos", defaultHandler)
router.GET("/photos/view", viewHandler)
}

View file

@ -19,6 +19,19 @@ func TestSearchPhotos(t *testing.T) {
assert.Equal(t, http.StatusOK, r.Code)
})
t.Run("ViewerJSON", func(t *testing.T) {
app, router, _ := NewApiTest()
SearchPhotos(router)
r := PerformRequest(app, "GET", "/api/v1/photos?count=10&format=view")
body := r.Body.String()
t.Logf("response body: %s", body)
count := gjson.Get(body, "#")
assert.LessOrEqual(t, int64(2), count.Int())
assert.Equal(t, http.StatusOK, r.Code)
})
t.Run("InvalidRequest", func(t *testing.T) {
app, router, _ := NewApiTest()
SearchPhotos(router)

View file

@ -30,7 +30,7 @@ var BackupCommand = cli.Command{
Name: "backup",
Description: backupDescription,
Usage: "Creates an index SQL dump and optionally album YAML files organized by type",
ArgsUsage: "[FILENAME | -]",
ArgsUsage: "[filename.sql | -]",
Flags: backupFlags,
Action: backupAction,
}

View file

@ -2,6 +2,7 @@ package commands
import (
"context"
"strings"
"time"
"github.com/sirupsen/logrus"
@ -12,8 +13,9 @@ import (
// MigrateCommand registers the "migrate" CLI command.
var MigrateCommand = cli.Command{
Name: "migrate",
Usage: "Updates the index database schema",
Name: "migrate",
Usage: "Updates the index database schema",
ArgsUsage: "[migrations...]",
Flags: []cli.Flag{
cli.BoolFlag{
Name: "failed, f",
@ -53,9 +55,17 @@ func migrateAction(ctx *cli.Context) error {
log.Infoln("migrate: running previously failed migrations")
}
var ids []string
// Check argument for specific migrations to be run.
if migrations := strings.TrimSpace(ctx.Args().First()); migrations != "" {
ids = strings.Fields(migrations)
}
log.Infoln("migrating database schema...")
conf.MigrateDb(runFailed)
// Run migrations.
conf.MigrateDb(runFailed, ids)
elapsed := time.Since(start)

View file

@ -175,7 +175,7 @@ func resetIndexDb(conf *config.Config) {
tables.Drop(conf.Db())
log.Infoln("restoring default schema")
entity.MigrateDb(true, false)
entity.MigrateDb(true, false, nil)
if conf.AdminPassword() != "" {
log.Infoln("restoring initial admin password")

View file

@ -181,6 +181,7 @@ func (c *Config) PublicConfig() ClientConfig {
result := ClientConfig{
Settings: Settings{
UI: settings.UI,
Search: settings.Search,
Maps: settings.Maps,
Features: settings.Features,
Share: settings.Share,
@ -251,6 +252,7 @@ func (c *Config) GuestConfig() ClientConfig {
result := ClientConfig{
Settings: Settings{
UI: settings.UI,
Search: settings.Search,
Maps: settings.Maps,
Features: settings.Features,
Share: settings.Share,
@ -427,9 +429,7 @@ func (c *Config) UserConfig() ClientConfig {
c.Db().
Table("files").
Select("COUNT(*) AS files").
Where("file_missing = 0").
Where("deleted_at IS NULL").
Select("COUNT(media_id) AS files").
Take(&result.Count)
c.Db().

View file

@ -244,14 +244,14 @@ func (c *Config) SetDbOptions() {
// InitDb initializes the database without running previously failed migrations.
func (c *Config) InitDb() {
c.MigrateDb(false)
c.MigrateDb(false, nil)
}
// MigrateDb initializes the database and migrates the schema if needed.
func (c *Config) MigrateDb(runFailed bool) {
func (c *Config) MigrateDb(runFailed bool, ids []string) {
c.SetDbOptions()
entity.SetDbProvider(c)
entity.MigrateDb(true, runFailed)
entity.MigrateDb(true, runFailed, ids)
entity.Admin.InitPassword(c.AdminPassword())

View file

@ -336,7 +336,7 @@ var GlobalFlags = []cli.Flag{
EnvVar: "PHOTOPRISM_HTTP_COMPRESSION",
},
cli.StringFlag{
Name: "database-driver",
Name: "database-driver, db",
Usage: "database `DRIVER` (sqlite, mysql)",
Value: "sqlite",
EnvVar: "PHOTOPRISM_DATABASE_DRIVER",
@ -347,24 +347,24 @@ var GlobalFlags = []cli.Flag{
EnvVar: "PHOTOPRISM_DATABASE_DSN",
},
cli.StringFlag{
Name: "database-server, db",
Name: "database-server, db-server",
Usage: "database `HOST` incl. port e.g. \"mariadb:3306\" (or socket path)",
EnvVar: "PHOTOPRISM_DATABASE_SERVER",
},
cli.StringFlag{
Name: "database-name",
Name: "database-name, db-name",
Value: "photoprism",
Usage: "database schema `NAME`",
EnvVar: "PHOTOPRISM_DATABASE_NAME",
},
cli.StringFlag{
Name: "database-user",
Name: "database-user, db-user",
Value: "photoprism",
Usage: "database user `NAME`",
EnvVar: "PHOTOPRISM_DATABASE_USER",
},
cli.StringFlag{
Name: "database-password",
Name: "database-password, db-pass",
Usage: "database user `PASSWORD`",
EnvVar: "PHOTOPRISM_DATABASE_PASSWORD",
},

View file

@ -20,6 +20,12 @@ type UISettings struct {
Language string `json:"language" yaml:"Language"`
}
// SearchSettings represents search UI preferences.
type SearchSettings struct {
BatchSize int `json:"batchSize" yaml:"BatchSize"`
PrefetchDist int `json:"prefetchDist" yaml:"PrefetchDist"`
}
// TemplateSettings represents template settings for the UI and messaging.
type TemplateSettings struct {
Default string `json:"default" yaml:"Default"`
@ -88,7 +94,7 @@ type DownloadSettings struct {
// Settings represents user settings for Web UI, indexing, and import.
type Settings struct {
UI UISettings `json:"ui" yaml:"UI"`
Templates TemplateSettings `json:"templates" yaml:"Templates"`
Search SearchSettings `json:"search" yaml:"Search"`
Maps MapsSettings `json:"maps" yaml:"Maps"`
Features FeatureSettings `json:"features" yaml:"Features"`
Import ImportSettings `json:"import" yaml:"Import"`
@ -96,6 +102,7 @@ type Settings struct {
Stack StackSettings `json:"stack" yaml:"Stack"`
Share ShareSettings `json:"share" yaml:"Share"`
Download DownloadSettings `json:"download" yaml:"Download"`
Templates TemplateSettings `json:"templates" yaml:"Templates"`
}
// NewSettings creates a new Settings instance.
@ -107,8 +114,9 @@ func NewSettings(c *Config) *Settings {
Theme: c.DefaultTheme(),
Language: c.DefaultLocale(),
},
Templates: TemplateSettings{
Default: "index.tmpl",
Search: SearchSettings{
BatchSize: 0,
PrefetchDist: 0,
},
Maps: MapsSettings{
Animate: 0,
@ -155,6 +163,9 @@ func NewSettings(c *Config) *Settings {
Download: DownloadSettings{
Name: entity.DownloadNameDefault,
},
Templates: TemplateSettings{
Default: "index.tmpl",
},
}
}

View file

@ -3,8 +3,9 @@ UI:
Zoom: false
Theme: onyx
Language: de
Templates:
Default: index.tmpl
Search:
BatchSize: 0
PrefetchDist: 0
Maps:
Animate: 0
Style: streets
@ -44,3 +45,5 @@ Share:
Title: ""
Download:
Name: file
Templates:
Default: index.tmpl

View file

@ -25,21 +25,28 @@ const (
type Accounts []Account
// Account represents a remote service account for uploading, downloading or syncing media files.
//
// Field Descriptions:
// - AccTimeout configures the timeout for requests, options: "", high, medium, low, none.
// - AccErrors holds the number of connection errors since the last reset.
// - AccShare enables manual upload, see SharePath, ShareSize, and ShareExpires.
// - AccSync enables automatic file synchronization, see SyncDownload and SyncUpload.
// - RetryLimit specifies the number of retry attempts, a negative value disables the limit.
type Account struct {
ID uint `gorm:"primary_key"`
AccName string `gorm:"type:VARCHAR(160);"`
AccOwner string `gorm:"type:VARCHAR(160);"`
AccURL string `gorm:"type:VARBINARY(512);"`
AccURL string `gorm:"type:VARCHAR(255);"`
AccType string `gorm:"type:VARBINARY(255);"`
AccKey string `gorm:"type:VARBINARY(255);"`
AccUser string `gorm:"type:VARBINARY(255);"`
AccPass string `gorm:"type:VARBINARY(255);"`
AccTimeout string `gorm:"type:VARBINARY(16);"` // Request timeout: default, high, medium, low, none
AccTimeout string `gorm:"type:VARBINARY(16);"`
AccError string `gorm:"type:VARBINARY(512);"`
AccErrors int // Number of general account errors, there are counters for individual files too.
AccShare bool // Manual upload enabled, see SharePath, ShareSize, and ShareExpires.
AccSync bool // Background sync enabled, see SyncDownload and SyncUpload.
RetryLimit int // Number of remote request retry attempts.
AccErrors int
AccShare bool
AccSync bool
RetryLimit int
SharePath string `gorm:"type:VARBINARY(500);"`
ShareSize string `gorm:"type:VARBINARY(16);"`
ShareExpires int
@ -56,6 +63,11 @@ type Account struct {
DeletedAt *time.Time `deepcopier:"skip" sql:"index"`
}
// TableName returns the entity database table name.
func (Account) TableName() string {
return "accounts"
}
// CreateAccount creates a new account entity in the database.
func CreateAccount(form form.Account) (model *Account, err error) {
model = &Account{

View file

@ -22,7 +22,7 @@ type Address struct {
AddressCity string `gorm:"size:128;" json:"City" yaml:"City,omitempty"`
AddressState string `gorm:"size:128;" json:"State" yaml:"State,omitempty"`
AddressCountry string `gorm:"type:VARBINARY(2);default:'zz'" json:"Country" yaml:"Country,omitempty"`
AddressNotes string `gorm:"type:TEXT;" json:"Notes" yaml:"Notes,omitempty"`
AddressNotes string `gorm:"type:VARCHAR(1024);" json:"Notes" yaml:"Notes,omitempty"`
CreatedAt time.Time `json:"CreatedAt" yaml:"-"`
UpdatedAt time.Time `json:"UpdatedAt" yaml:"-"`
DeletedAt *time.Time `sql:"index" json:"DeletedAt,omitempty" yaml:"-"`

View file

@ -39,10 +39,10 @@ type Album struct {
AlbumTitle string `gorm:"type:VARCHAR(160);index;" json:"Title" yaml:"Title"`
AlbumLocation string `gorm:"type:VARCHAR(160);" json:"Location" yaml:"Location,omitempty"`
AlbumCategory string `gorm:"type:VARCHAR(100);index;" json:"Category" yaml:"Category,omitempty"`
AlbumCaption string `gorm:"type:TEXT;" json:"Caption" yaml:"Caption,omitempty"`
AlbumDescription string `gorm:"type:TEXT;" json:"Description" yaml:"Description,omitempty"`
AlbumNotes string `gorm:"type:TEXT;" json:"Notes" yaml:"Notes,omitempty"`
AlbumFilter string `gorm:"type:VARBINARY(767);" json:"Filter" yaml:"Filter,omitempty"`
AlbumCaption string `gorm:"type:VARCHAR(1024);" json:"Caption" yaml:"Caption,omitempty"`
AlbumDescription string `gorm:"type:VARCHAR(2048);" json:"Description" yaml:"Description,omitempty"`
AlbumNotes string `gorm:"type:VARCHAR(1024);" json:"Notes" yaml:"Notes,omitempty"`
AlbumFilter string `gorm:"type:VARBINARY(2048);" json:"Filter" yaml:"Filter,omitempty"`
AlbumOrder string `gorm:"type:VARBINARY(32);" json:"Order" yaml:"Order,omitempty"`
AlbumTemplate string `gorm:"type:VARBINARY(255);" json:"Template" yaml:"Template,omitempty"`
AlbumState string `gorm:"type:VARCHAR(100);index;" json:"State" yaml:"State,omitempty"`

View file

@ -23,13 +23,18 @@ type Camera struct {
CameraMake string `gorm:"type:VARCHAR(160);" json:"Make" yaml:"Make,omitempty"`
CameraModel string `gorm:"type:VARCHAR(160);" json:"Model" yaml:"Model,omitempty"`
CameraType string `gorm:"type:VARCHAR(100);" json:"Type,omitempty" yaml:"Type,omitempty"`
CameraDescription string `gorm:"type:TEXT;" json:"Description,omitempty" yaml:"Description,omitempty"`
CameraNotes string `gorm:"type:TEXT;" json:"Notes,omitempty" yaml:"Notes,omitempty"`
CameraDescription string `gorm:"type:VARCHAR(2048);" json:"Description,omitempty" yaml:"Description,omitempty"`
CameraNotes string `gorm:"type:VARCHAR(1024);" json:"Notes,omitempty" yaml:"Notes,omitempty"`
CreatedAt time.Time `json:"-" yaml:"-"`
UpdatedAt time.Time `json:"-" yaml:"-"`
DeletedAt *time.Time `sql:"index" json:"-" yaml:"-"`
}
// TableName returns the entity database table name.
func (Camera) TableName() string {
return "cameras"
}
var UnknownCamera = Camera{
CameraSlug: UnknownID,
CameraName: "Unknown",

View file

@ -22,13 +22,18 @@ type Country struct {
ID string `gorm:"type:VARBINARY(2);primary_key" json:"ID" yaml:"ID"`
CountrySlug string `gorm:"type:VARBINARY(160);unique_index;" json:"Slug" yaml:"-"`
CountryName string `gorm:"type:VARCHAR(160);" json:"Name" yaml:"Name,omitempty"`
CountryDescription string `gorm:"type:TEXT;" json:"Description,omitempty" yaml:"Description,omitempty"`
CountryNotes string `gorm:"type:TEXT;" json:"Notes,omitempty" yaml:"Notes,omitempty"`
CountryDescription string `gorm:"type:VARCHAR(2048);" json:"Description,omitempty" yaml:"Description,omitempty"`
CountryNotes string `gorm:"type:VARCHAR(1024);" json:"Notes,omitempty" yaml:"Notes,omitempty"`
CountryPhoto *Photo `json:"-" yaml:"-"`
CountryPhotoID uint `json:"-" yaml:"-"`
New bool `gorm:"-" json:"-" yaml:"-"`
}
// TableName returns the entity database table name.
func (Country) TableName() string {
return "countries"
}
// UnknownCountry is defined here to use it as a default
var UnknownCountry = Country{
ID: UnknownID,

View file

@ -19,7 +19,7 @@ func CreateDefaultFixtures() {
func ResetTestFixtures() {
start := time.Now()
Entities.Migrate(Db(), false)
Entities.Migrate(Db(), false, nil)
Entities.WaitForMigration(Db())
Entities.Truncate(Db())

View file

@ -8,14 +8,14 @@ import (
)
// MigrateDb creates database tables and inserts default fixtures as needed.
func MigrateDb(dropDeprecated, runFailed bool) {
func MigrateDb(dropDeprecated, runFailed bool, ids []string) {
start := time.Now()
if dropDeprecated {
if dropDeprecated && len(ids) == 0 {
DeprecatedTables.Drop(Db())
}
Entities.Migrate(Db(), runFailed)
Entities.Migrate(Db(), runFailed, ids)
Entities.WaitForMigration(Db())
CreateDefaultFixtures()
@ -56,6 +56,7 @@ func InitTestDb(driver, dsn string) *Gorm {
// Insert test fixtures.
SetDbProvider(db)
ResetTestFixtures()
File{}.RegenerateIndex()
return db
}

View file

@ -42,14 +42,14 @@ func TestMySQL8(t *testing.T) {
Entities.Drop(db)
// First migration.
Entities.Migrate(db, false)
Entities.Migrate(db, false, nil)
Entities.WaitForMigration(db)
// Second migration.
Entities.Migrate(db, false)
Entities.Migrate(db, false, nil)
Entities.WaitForMigration(db)
// Third migration with force flag.
Entities.Migrate(db, true)
Entities.Migrate(db, true, []string{"20211121-094727"})
Entities.WaitForMigration(db)
}

View file

@ -84,21 +84,23 @@ func (list Tables) Truncate(db *gorm.DB) {
}
// Migrate migrates all database tables of registered entities.
func (list Tables) Migrate(db *gorm.DB, runFailed bool) {
for name, entity := range list {
if err := db.AutoMigrate(entity).Error; err != nil {
log.Debugf("entity: %s (waiting 1s)", err.Error())
time.Sleep(time.Second)
func (list Tables) Migrate(db *gorm.DB, runFailed bool, ids []string) {
if len(ids) == 0 {
for name, entity := range list {
if err := db.AutoMigrate(entity).Error; err != nil {
log.Errorf("entity: failed migrating %s", sanitize.Log(name))
panic(err)
log.Debugf("entity: %s (waiting 1s)", err.Error())
time.Sleep(time.Second)
if err := db.AutoMigrate(entity).Error; err != nil {
log.Errorf("entity: failed migrating %s", sanitize.Log(name))
panic(err)
}
}
}
}
if err := migrate.Auto(db, runFailed); err != nil {
if err := migrate.Auto(db, runFailed, ids); err != nil {
log.Error(err)
}
}

View file

@ -10,28 +10,30 @@ import (
var photoDetailsMutex = sync.Mutex{}
// ClipDetail is the size of a Details database column in runes.
const ClipDetail = 250
// Details stores additional metadata fields for each photo to improve search performance.
type Details struct {
PhotoID uint `gorm:"primary_key;auto_increment:false" yaml:"-"`
Keywords string `gorm:"type:TEXT;" json:"Keywords" yaml:"Keywords"`
Keywords string `gorm:"type:VARCHAR(2048);" json:"Keywords" yaml:"Keywords"`
KeywordsSrc string `gorm:"type:VARBINARY(8);" json:"KeywordsSrc" yaml:"KeywordsSrc,omitempty"`
Notes string `gorm:"type:TEXT;" json:"Notes" yaml:"Notes,omitempty"`
Notes string `gorm:"type:VARCHAR(2048);" json:"Notes" yaml:"Notes,omitempty"`
NotesSrc string `gorm:"type:VARBINARY(8);" json:"NotesSrc" yaml:"NotesSrc,omitempty"`
Subject string `gorm:"type:VARCHAR(250);" json:"Subject" yaml:"Subject,omitempty"`
Subject string `gorm:"type:VARCHAR(1024);" json:"Subject" yaml:"Subject,omitempty"`
SubjectSrc string `gorm:"type:VARBINARY(8);" json:"SubjectSrc" yaml:"SubjectSrc,omitempty"`
Artist string `gorm:"type:VARCHAR(250);" json:"Artist" yaml:"Artist,omitempty"`
Artist string `gorm:"type:VARCHAR(1024);" json:"Artist" yaml:"Artist,omitempty"`
ArtistSrc string `gorm:"type:VARBINARY(8);" json:"ArtistSrc" yaml:"ArtistSrc,omitempty"`
Copyright string `gorm:"type:VARCHAR(250);" json:"Copyright" yaml:"Copyright,omitempty"`
Copyright string `gorm:"type:VARCHAR(1024);" json:"Copyright" yaml:"Copyright,omitempty"`
CopyrightSrc string `gorm:"type:VARBINARY(8);" json:"CopyrightSrc" yaml:"CopyrightSrc,omitempty"`
License string `gorm:"type:VARCHAR(250);" json:"License" yaml:"License,omitempty"`
License string `gorm:"type:VARCHAR(1024);" json:"License" yaml:"License,omitempty"`
LicenseSrc string `gorm:"type:VARBINARY(8);" json:"LicenseSrc" yaml:"LicenseSrc,omitempty"`
CreatedAt time.Time `yaml:"-"`
UpdatedAt time.Time `yaml:"-"`
}
// TableName returns the entity database table name.
func (Details) TableName() string {
return "details"
}
// NewDetails creates new photo details.
func NewDetails(photo Photo) Details {
return Details{PhotoID: photo.ID}
@ -139,7 +141,7 @@ func (m *Details) HasLicense() bool {
// SetKeywords updates the photo details field.
func (m *Details) SetKeywords(data, src string) {
val := txt.Clip(data, txt.ClipDescription)
val := txt.Clip(data, txt.ClipText)
if val == "" {
return
@ -161,25 +163,9 @@ func (m *Details) SetKeywords(data, src string) {
m.KeywordsSrc = src
}
// SetSubject updates the photo details field.
func (m *Details) SetSubject(data, src string) {
val := txt.Clip(data, ClipDetail)
if val == "" {
return
}
if (SrcPriority[src] < SrcPriority[m.SubjectSrc]) && m.HasSubject() {
return
}
m.Subject = val
m.SubjectSrc = src
}
// SetNotes updates the photo details field.
func (m *Details) SetNotes(data, src string) {
val := txt.Clip(data, txt.ClipDescription)
val := txt.Clip(data, txt.ClipText)
if val == "" {
return
@ -193,9 +179,25 @@ func (m *Details) SetNotes(data, src string) {
m.NotesSrc = src
}
// SetSubject updates the photo details field.
func (m *Details) SetSubject(data, src string) {
val := txt.Clip(data, txt.ClipShortText)
if val == "" {
return
}
if (SrcPriority[src] < SrcPriority[m.SubjectSrc]) && m.HasSubject() {
return
}
m.Subject = val
m.SubjectSrc = src
}
// SetArtist updates the photo details field.
func (m *Details) SetArtist(data, src string) {
val := txt.Clip(data, ClipDetail)
val := txt.Clip(data, txt.ClipShortText)
if val == "" {
return
@ -211,7 +213,7 @@ func (m *Details) SetArtist(data, src string) {
// SetCopyright updates the photo details field.
func (m *Details) SetCopyright(data, src string) {
val := txt.Clip(data, ClipDetail)
val := txt.Clip(data, txt.ClipShortText)
if val == "" {
return
@ -227,7 +229,7 @@ func (m *Details) SetCopyright(data, src string) {
// SetLicense updates the photo details field.
func (m *Details) SetLicense(data, src string) {
val := txt.Clip(data, ClipDetail)
val := txt.Clip(data, txt.ClipShortText)
if val == "" {
return

View file

@ -14,6 +14,7 @@ import (
"github.com/ulule/deepcopier"
"github.com/photoprism/photoprism/internal/face"
"github.com/photoprism/photoprism/pkg/colors"
"github.com/photoprism/photoprism/pkg/fs"
"github.com/photoprism/photoprism/pkg/rnd"
@ -29,16 +30,22 @@ const (
DownloadNameDefault = DownloadNameFile
)
// Files represents a file result set.
type Files []File
var primaryFileMutex = sync.Mutex{}
// Index updates should not run simultaneously.
var fileIndexMutex = sync.Mutex{}
var filePrimaryMutex = sync.Mutex{}
// File represents an image or sidecar file that belongs to a photo.
type File struct {
ID uint `gorm:"primary_key" json:"-" yaml:"-"`
Photo *Photo `json:"-" yaml:"-"`
PhotoID uint `gorm:"index;" json:"-" yaml:"-"`
PhotoID uint `gorm:"index:idx_files_photo_id;" json:"-" yaml:"-"`
PhotoUID string `gorm:"type:VARBINARY(42);index;" json:"PhotoUID" yaml:"PhotoUID"`
PhotoTakenAt time.Time `gorm:"type:DATETIME;index;" json:"TakenAt" yaml:"TakenAt"`
TimeIndex *string `gorm:"type:VARBINARY(48);" json:"TimeIndex" yaml:"TimeIndex"`
MediaID *string `gorm:"type:VARBINARY(32);" json:"MediaID" yaml:"MediaID"`
InstanceID string `gorm:"type:VARBINARY(42);index;" json:"InstanceID,omitempty" yaml:"InstanceID,omitempty"`
FileUID string `gorm:"type:VARBINARY(42);unique_index;" json:"UID" yaml:"UID"`
FileName string `gorm:"type:VARBINARY(755);unique_index:idx_files_name_root;" json:"Name" yaml:"Name"`
@ -49,7 +56,7 @@ type File struct {
FileCodec string `gorm:"type:VARBINARY(32)" json:"Codec" yaml:"Codec,omitempty"`
FileType string `gorm:"type:VARBINARY(32)" json:"Type" yaml:"Type,omitempty"`
FileMime string `gorm:"type:VARBINARY(64)" json:"Mime" yaml:"Mime,omitempty"`
FilePrimary bool `json:"Primary" yaml:"Primary,omitempty"`
FilePrimary bool `gorm:"index:idx_files_photo_id;" json:"Primary" yaml:"Primary,omitempty"`
FileSidecar bool `json:"Sidecar" yaml:"Sidecar,omitempty"`
FileMissing bool `json:"Missing" yaml:"Missing,omitempty"`
FilePortrait bool `json:"Portrait" yaml:"Portrait,omitempty"`
@ -58,10 +65,10 @@ type File struct {
FileWidth int `json:"Width" yaml:"Width,omitempty"`
FileHeight int `json:"Height" yaml:"Height,omitempty"`
FileOrientation int `json:"Orientation" yaml:"Orientation,omitempty"`
FileProjection string `gorm:"type:VARBINARY(40);" json:"Projection,omitempty" yaml:"Projection,omitempty"`
FileProjection string `gorm:"type:VARBINARY(64);" json:"Projection,omitempty" yaml:"Projection,omitempty"`
FileAspectRatio float32 `gorm:"type:FLOAT;" json:"AspectRatio" yaml:"AspectRatio,omitempty"`
FileHDR bool `gorm:"column:file_hdr;" json:"IsHDR" yaml:"IsHDR,omitempty"`
FileColorProfile string `gorm:"type:VARBINARY(40);" json:"ColorProfile,omitempty" yaml:"ColorProfile,omitempty"`
FileColorProfile string `gorm:"type:VARBINARY(64);" json:"ColorProfile,omitempty" yaml:"ColorProfile,omitempty"`
FileMainColor string `gorm:"type:VARBINARY(16);index;" json:"MainColor" yaml:"MainColor,omitempty"`
FileColors string `gorm:"type:VARBINARY(9);" json:"Colors" yaml:"Colors,omitempty"`
FileLuminance string `gorm:"type:VARBINARY(9);" json:"Luminance" yaml:"Luminance,omitempty"`
@ -84,6 +91,59 @@ func (File) TableName() string {
return "files"
}
// RegenerateIndex updates the search index columns.
func (m File) RegenerateIndex() {
fileIndexMutex.Lock()
defer fileIndexMutex.Unlock()
start := time.Now()
filesTable := File{}.TableName()
photosTable := Photo{}.TableName()
var updateWhere *gorm.SqlExpr
if m.PhotoID > 0 {
updateWhere = gorm.Expr("photo_id = ?", m.PhotoID)
} else if m.PhotoUID != "" {
updateWhere = gorm.Expr("photo_uid = ?", m.PhotoUID)
} else if m.ID > 0 {
updateWhere = gorm.Expr("id = ?", m.ID)
} else {
updateWhere = gorm.Expr("photo_id IS NOT NULL")
}
switch DbDialect() {
case MySQL:
Log("files", "regenerate photo_taken_at",
Db().Exec("UPDATE ? f JOIN ? p ON p.id = f.photo_id SET f.photo_taken_at = p.taken_at_local WHERE ?",
gorm.Expr(filesTable), gorm.Expr(photosTable), updateWhere).Error)
Log("files", "regenerate media_id",
Db().Exec("UPDATE ? SET media_id = CASE WHEN file_missing = 0 AND deleted_at IS NULL THEN CONCAT(HEX(100000000000 - photo_id), '-', 1 + file_sidecar - file_primary, '-', file_uid) END WHERE ?",
gorm.Expr(filesTable), updateWhere).Error)
Log("files", "regenerate time_index",
Db().Exec("UPDATE ? SET time_index = CASE WHEN file_missing = 0 AND deleted_at IS NULL THEN CONCAT(100000000000000 - CAST(photo_taken_at AS UNSIGNED), '-', media_id) END WHERE ?",
gorm.Expr(filesTable), updateWhere).Error)
case SQLite3:
Log("files", "regenerate photo_taken_at",
Db().Exec("UPDATE ? SET photo_taken_at = (SELECT p.taken_at_local FROM ? p WHERE p.id = photo_id) WHERE ?",
gorm.Expr(filesTable), gorm.Expr(photosTable), updateWhere).Error)
Log("files", "regenerate media_id",
Db().Exec("UPDATE ? SET media_id = CASE WHEN file_missing = 0 AND deleted_at IS NULL THEN (HEX(100000000000 - photo_id) || '-' || (1 + file_sidecar - file_primary) || '-' || file_uid) ELSE NULL END WHERE ?",
gorm.Expr(filesTable), updateWhere).Error)
Log("files", "regenerate time_index",
Db().Exec("UPDATE ? SET time_index = CASE WHEN file_missing = 0 AND deleted_at IS NULL THEN ((100000000000000 - CAST(photo_taken_at AS UNSIGNED)) || '-' || media_id) ELSE NULL END WHERE ?",
gorm.Expr(filesTable), updateWhere).Error)
default:
log.Warnf("sql: unsupported dialect %s", DbDialect())
}
log.Debugf("files: updated search index [%s]", time.Since(start))
}
type FileInfos struct {
FileWidth int
FileHeight int
@ -338,15 +398,22 @@ func (m *File) Create() error {
}
// ResolvePrimary ensures there is only one primary file for a photo.
func (m *File) ResolvePrimary() error {
primaryFileMutex.Lock()
defer primaryFileMutex.Unlock()
func (m *File) ResolvePrimary() (err error) {
filePrimaryMutex.Lock()
defer filePrimaryMutex.Unlock()
if m.FilePrimary {
return UnscopedDb().Exec("UPDATE `files` SET file_primary = (id = ?) WHERE photo_id = ?", m.ID, m.PhotoID).Error
if !m.FilePrimary {
return nil
}
return nil
err = UnscopedDb().
Exec("UPDATE files SET file_primary = (id = ?) WHERE photo_id = ?", m.ID, m.PhotoID).Error
if err == nil {
m.RegenerateIndex()
}
return err
}
// Save stores the file in the database.
@ -477,12 +544,12 @@ func (m *File) Panorama() bool {
// Projection returns the panorama projection name if any.
func (m *File) Projection() string {
return SanitizeTypeString(m.FileProjection)
return SanitizeStringTypeLower(m.FileProjection)
}
// SetProjection sets the panorama projection name.
func (m *File) SetProjection(name string) {
m.FileProjection = SanitizeTypeString(name)
m.FileProjection = SanitizeStringTypeLower(name)
}
// IsHDR returns true if it is a high dynamic range file.
@ -504,7 +571,7 @@ func (m *File) ResetHDR() {
// ColorProfile returns the ICC color profile name if any.
func (m *File) ColorProfile() string {
return SanitizeTypeCaseSensitive(m.FileColorProfile)
return SanitizeStringType(m.FileColorProfile)
}
// HasColorProfile tests if the file has a matching color profile.
@ -514,8 +581,8 @@ func (m *File) HasColorProfile(profile colors.Profile) bool {
// SetColorProfile sets the ICC color profile name such as "Display P3".
func (m *File) SetColorProfile(name string) {
if name = SanitizeTypeCaseSensitive(name); name != "" {
m.FileColorProfile = SanitizeTypeCaseSensitive(name)
if name = SanitizeStringType(name); name != "" {
m.FileColorProfile = SanitizeStringType(name)
}
}

View file

@ -331,9 +331,9 @@ func TestFile_SetProjection(t *testing.T) {
})
t.Run("Sanitize", func(t *testing.T) {
m := &File{}
m.SetProjection(" 幸福 Hanzi are logograms developed for the writing of Chinese! ")
assert.Equal(t, "hanzi are logograms developed for the wr", m.FileProjection)
assert.Equal(t, TrimTypeString, len(m.FileProjection))
m.SetProjection(" 幸福 Hanzi are logograms developed for the writing of Chinese! Expressions in an index may not ...!")
assert.Equal(t, "hanzi are logograms developed for the writing of chinese! expres", m.FileProjection)
assert.Equal(t, ClipStringType, len(m.FileProjection))
})
}

View file

@ -27,7 +27,7 @@ type Folder struct {
FolderType string `gorm:"type:VARBINARY(16);" json:"Type" yaml:"Type,omitempty"`
FolderTitle string `gorm:"type:VARCHAR(200);" json:"Title" yaml:"Title,omitempty"`
FolderCategory string `gorm:"type:VARCHAR(100);index;" json:"Category" yaml:"Category,omitempty"`
FolderDescription string `gorm:"type:TEXT;" json:"Description,omitempty" yaml:"Description,omitempty"`
FolderDescription string `gorm:"type:VARCHAR(2048);" json:"Description,omitempty" yaml:"Description,omitempty"`
FolderOrder string `gorm:"type:VARBINARY(32);" json:"Order" yaml:"Order,omitempty"`
FolderCountry string `gorm:"type:VARBINARY(2);index:idx_folders_country_year_month;default:'zz'" json:"Country" yaml:"Country,omitempty"`
FolderYear int `gorm:"index:idx_folders_country_year_month;" json:"Year" yaml:"Year,omitempty"`
@ -44,6 +44,11 @@ type Folder struct {
DeletedAt *time.Time `sql:"index" json:"-"`
}
// TableName returns the entity database table name.
func (Folder) TableName() string {
return "folders"
}
// BeforeCreate creates a random UID if needed before inserting a new row to the database.
func (m *Folder) BeforeCreate(scope *gorm.Scope) error {
if rnd.IsUID(m.FolderUID, 'd') {

View file

@ -27,8 +27,8 @@ type Label struct {
LabelName string `gorm:"type:VARCHAR(160);" json:"Name" yaml:"Name"`
LabelPriority int `json:"Priority" yaml:"Priority,omitempty"`
LabelFavorite bool `json:"Favorite" yaml:"Favorite,omitempty"`
LabelDescription string `gorm:"type:TEXT;" json:"Description" yaml:"Description,omitempty"`
LabelNotes string `gorm:"type:TEXT;" json:"Notes" yaml:"Notes,omitempty"`
LabelDescription string `gorm:"type:VARCHAR(2048);" json:"Description" yaml:"Description,omitempty"`
LabelNotes string `gorm:"type:VARCHAR(1024);" json:"Notes" yaml:"Notes,omitempty"`
LabelCategories []*Label `gorm:"many2many:categories;association_jointable_foreignkey:category_id" json:"-" yaml:"-"`
PhotoCount int `gorm:"default:1" json:"PhotoCount" yaml:"-"`
Thumb string `gorm:"type:VARBINARY(128);index;default:''" json:"Thumb" yaml:"Thumb,omitempty"`

View file

@ -23,13 +23,18 @@ type Lens struct {
LensMake string `gorm:"type:VARCHAR(160);" json:"Make" yaml:"Make,omitempty"`
LensModel string `gorm:"type:VARCHAR(160);" json:"Model" yaml:"Model,omitempty"`
LensType string `gorm:"type:VARCHAR(100);" json:"Type" yaml:"Type,omitempty"`
LensDescription string `gorm:"type:TEXT;" json:"Description,omitempty" yaml:"Description,omitempty"`
LensNotes string `gorm:"type:TEXT;" json:"Notes,omitempty" yaml:"Notes,omitempty"`
LensDescription string `gorm:"type:VARCHAR(2048);" json:"Description,omitempty" yaml:"Description,omitempty"`
LensNotes string `gorm:"type:VARCHAR(1024);" json:"Notes,omitempty" yaml:"Notes,omitempty"`
CreatedAt time.Time `json:"-" yaml:"-"`
UpdatedAt time.Time `json:"-" yaml:"-"`
DeletedAt *time.Time `sql:"index" json:"-" yaml:"-"`
}
// TableName returns the entity database table name.
func (Lens) TableName() string {
return "lenses"
}
var UnknownLens = Lens{
LensSlug: UnknownID,
LensName: "Unknown",
@ -42,11 +47,6 @@ func CreateUnknownLens() {
UnknownLens = *FirstOrCreateLens(&UnknownLens)
}
// TableName returns the entity database table name.
func (Lens) TableName() string {
return "lenses"
}
// NewLens creates a new lens in database
func NewLens(modelName string, makeName string) *Lens {
modelName = strings.TrimSpace(modelName)

View file

@ -720,7 +720,7 @@ func CreateMarkerIfNotExists(m *Marker) (*Marker, error) {
if m.MarkerUID != "" {
return m, nil
} else if Db().Where(`file_uid = ? AND marker_type = ? AND thumb = ?`, m.FileUID, m.MarkerType, m.Thumb).
} else if Db().Where("file_uid = ? AND marker_type = ? AND thumb = ?", m.FileUID, m.MarkerType, m.Thumb).
First(&result).Error == nil {
return &result, nil
} else if err := m.Create(); err != nil {

View file

@ -151,7 +151,7 @@ func FindMarkers(fileUID string) (Markers, error) {
m := Markers{}
err := Db().
Where(`file_uid = ?`, fileUID).
Where("file_uid = ?", fileUID).
Order("x").
Offset(0).Limit(1000).
Find(&m).Error

View file

@ -48,15 +48,15 @@ func MapKey(takenAt time.Time, cellId string) string {
type Photo struct {
ID uint `gorm:"primary_key" yaml:"-"`
UUID string `gorm:"type:VARBINARY(42);index;" json:"DocumentID,omitempty" yaml:"DocumentID,omitempty"`
TakenAt time.Time `gorm:"type:datetime;index:idx_photos_taken_uid;" json:"TakenAt" yaml:"TakenAt"`
TakenAtLocal time.Time `gorm:"type:datetime;" yaml:"-"`
TakenAt time.Time `gorm:"type:DATETIME;index:idx_photos_taken_uid;" json:"TakenAt" yaml:"TakenAt"`
TakenAtLocal time.Time `gorm:"type:DATETIME;" yaml:"-"`
TakenSrc string `gorm:"type:VARBINARY(8);" json:"TakenSrc" yaml:"TakenSrc,omitempty"`
PhotoUID string `gorm:"type:VARBINARY(42);unique_index;index:idx_photos_taken_uid;" json:"UID" yaml:"UID"`
PhotoType string `gorm:"type:VARBINARY(8);default:'image';" json:"Type" yaml:"Type"`
TypeSrc string `gorm:"type:VARBINARY(8);" json:"TypeSrc" yaml:"TypeSrc,omitempty"`
PhotoTitle string `gorm:"type:VARCHAR(200);" json:"Title" yaml:"Title"`
TitleSrc string `gorm:"type:VARBINARY(8);" json:"TitleSrc" yaml:"TitleSrc,omitempty"`
PhotoDescription string `gorm:"type:TEXT;" json:"Description" yaml:"Description,omitempty"`
PhotoDescription string `gorm:"type:VARCHAR(4096);" json:"Description" yaml:"Description,omitempty"`
DescriptionSrc string `gorm:"type:VARBINARY(8);" json:"DescriptionSrc" yaml:"DescriptionSrc,omitempty"`
PhotoPath string `gorm:"type:VARBINARY(500);index:idx_photos_path_name;" json:"Path" yaml:"-"`
PhotoName string `gorm:"type:VARBINARY(255);index:idx_photos_path_name;" json:"Name" yaml:"-"`
@ -462,7 +462,7 @@ func (m *Photo) IndexKeywords() error {
func (m *Photo) PreloadFiles() {
q := Db().
Table("files").
Select(`files.*`).
Select("files.*").
Where("files.photo_id = ? AND files.deleted_at IS NULL", m.ID).
Order("files.file_name DESC")
@ -597,7 +597,7 @@ func (m *Photo) AddLabels(labels classify.Labels) {
// SetDescription changes the photo description if not empty and from the same source.
func (m *Photo) SetDescription(desc, source string) {
newDesc := txt.Clip(desc, txt.ClipDescription)
newDesc := txt.Clip(desc, txt.ClipLongText)
if newDesc == "" {
return
@ -860,7 +860,7 @@ func (m *Photo) PrimaryFile() (*File, error) {
}
// SetPrimary sets a new primary file.
func (m *Photo) SetPrimary(fileUID string) error {
func (m *Photo) SetPrimary(fileUID string) (err error) {
if m.PhotoUID == "" {
return fmt.Errorf("photo uid is empty")
}
@ -869,7 +869,7 @@ func (m *Photo) SetPrimary(fileUID string) error {
if fileUID != "" {
// Do nothing.
} else if err := Db().Model(File{}).
} else if err = Db().Model(File{}).
Where("photo_uid = ? AND file_type = 'jpg' AND file_missing = 0 AND file_error = ''", m.PhotoUID).
Order("file_width DESC, file_hdr DESC").Limit(1).
Pluck("file_uid", &files).Error; err != nil {
@ -884,15 +884,21 @@ func (m *Photo) SetPrimary(fileUID string) error {
return fmt.Errorf("file uid is empty")
}
Db().Model(File{}).Where("photo_uid = ? AND file_uid <> ?", m.PhotoUID, fileUID).UpdateColumn("file_primary", 0)
if err := Db().Model(File{}).Where("photo_uid = ? AND file_uid = ?", m.PhotoUID, fileUID).UpdateColumn("file_primary", 1).Error; err != nil {
if err = Db().Model(File{}).
Where("photo_uid = ? AND file_uid <> ?", m.PhotoUID, fileUID).
UpdateColumn("file_primary", 0).Error; err != nil {
return err
} else if err = Db().Model(File{}).Where("photo_uid = ? AND file_uid = ?", m.PhotoUID, fileUID).
UpdateColumn("file_primary", 1).Error; err != nil {
return err
} else if m.PhotoQuality < 0 {
m.PhotoQuality = 0
return m.UpdateQuality()
err = m.UpdateQuality()
}
// Regenerate file search index.
File{PhotoID: m.ID, PhotoUID: m.PhotoUID}.RegenerateIndex()
return nil
}

View file

@ -118,4 +118,11 @@ func (m *Photo) UpdateDateFields() {
m.PhotoMonth = int(m.TakenAtLocal.Month())
m.PhotoDay = m.TakenAtLocal.Day()
}
// Update photo_taken_at column in related files.
Log("photo", "update date fields",
UnscopedDb().Model(File{}).
Where("photo_id = ? AND photo_taken_at <> ?", m.ID, m.TakenAtLocal).
Updates(File{PhotoTakenAt: m.TakenAtLocal}).Error,
)
}

View file

@ -13,7 +13,9 @@ var photoMergeMutex = sync.Mutex{}
func (m *Photo) ResolvePrimary() error {
var file File
if err := Db().Where("file_primary = 1 AND photo_id = ?", m.ID).Order("file_width DESC, file_hdr DESC").First(&file).Error; err == nil && file.ID > 0 {
if err := Db().Where("file_primary = 1 AND photo_id = ?", m.ID).
Order("file_width DESC, file_hdr DESC").
First(&file).Error; err == nil && file.ID > 0 {
return file.ResolvePrimary()
}
@ -101,18 +103,18 @@ func (m *Photo) Merge(mergeMeta, mergeUuid bool) (original Photo, merged Photos,
deleted := TimeStamp()
logResult(UnscopedDb().Exec("UPDATE `files` SET photo_id = ?, photo_uid = ?, file_primary = 0 WHERE photo_id = ?", original.ID, original.PhotoUID, merge.ID))
logResult(UnscopedDb().Exec("UPDATE `photos` SET photo_quality = -1, deleted_at = ? WHERE id = ?", TimeStamp(), merge.ID))
logResult(UnscopedDb().Exec("UPDATE files SET photo_id = ?, photo_uid = ?, file_primary = 0 WHERE photo_id = ?", original.ID, original.PhotoUID, merge.ID))
logResult(UnscopedDb().Exec("UPDATE photos SET photo_quality = -1, deleted_at = ? WHERE id = ?", TimeStamp(), merge.ID))
switch DbDialect() {
case MySQL:
logResult(UnscopedDb().Exec("UPDATE IGNORE `photos_keywords` SET `photo_id` = ? WHERE photo_id = ?", original.ID, merge.ID))
logResult(UnscopedDb().Exec("UPDATE IGNORE `photos_labels` SET `photo_id` = ? WHERE photo_id = ?", original.ID, merge.ID))
logResult(UnscopedDb().Exec("UPDATE IGNORE `photos_albums` SET `photo_uid` = ? WHERE photo_uid = ?", original.PhotoUID, merge.PhotoUID))
logResult(UnscopedDb().Exec("UPDATE IGNORE photos_keywords SET photo_id = ? WHERE photo_id = ?", original.ID, merge.ID))
logResult(UnscopedDb().Exec("UPDATE IGNORE photos_labels SET photo_id = ? WHERE photo_id = ?", original.ID, merge.ID))
logResult(UnscopedDb().Exec("UPDATE IGNORE photos_albums SET photo_uid = ? WHERE photo_uid = ?", original.PhotoUID, merge.PhotoUID))
case SQLite3:
logResult(UnscopedDb().Exec("UPDATE OR IGNORE `photos_keywords` SET `photo_id` = ? WHERE photo_id = ?", original.ID, merge.ID))
logResult(UnscopedDb().Exec("UPDATE OR IGNORE `photos_labels` SET `photo_id` = ? WHERE photo_id = ?", original.ID, merge.ID))
logResult(UnscopedDb().Exec("UPDATE OR IGNORE `photos_albums` SET `photo_uid` = ? WHERE photo_uid = ?", original.PhotoUID, merge.PhotoUID))
logResult(UnscopedDb().Exec("UPDATE OR IGNORE photos_keywords SET photo_id = ? WHERE photo_id = ?", original.ID, merge.ID))
logResult(UnscopedDb().Exec("UPDATE OR IGNORE photos_labels SET photo_id = ? WHERE photo_id = ?", original.ID, merge.ID))
logResult(UnscopedDb().Exec("UPDATE OR IGNORE photos_albums SET photo_uid = ? WHERE photo_uid = ?", original.PhotoUID, merge.PhotoUID))
default:
log.Warnf("sql: unsupported dialect %s", DbDialect())
}
@ -129,5 +131,7 @@ func (m *Photo) Merge(mergeMeta, mergeUuid bool) (original Photo, merged Photos,
m.PhotoQuality = -1
}
File{PhotoID: original.ID, PhotoUID: original.PhotoUID}.RegenerateIndex()
return original, merged, err
}

View file

@ -27,8 +27,8 @@ type Subject struct {
SubjSlug string `gorm:"type:VARBINARY(160);index;default:'';" json:"Slug" yaml:"-"`
SubjName string `gorm:"type:VARCHAR(160);unique_index;default:'';" json:"Name" yaml:"Name"`
SubjAlias string `gorm:"type:VARCHAR(160);default:'';" json:"Alias" yaml:"Alias"`
SubjBio string `gorm:"type:TEXT;" json:"Bio" yaml:"Bio,omitempty"`
SubjNotes string `gorm:"type:TEXT;" json:"Notes,omitempty" yaml:"Notes,omitempty"`
SubjBio string `gorm:"type:VARCHAR(2048);" json:"Bio" yaml:"Bio,omitempty"`
SubjNotes string `gorm:"type:VARCHAR(1024);" json:"Notes,omitempty" yaml:"Notes,omitempty"`
SubjFavorite bool `gorm:"default:false;" json:"Favorite" yaml:"Favorite,omitempty"`
SubjHidden bool `gorm:"default:false;" json:"Hidden" yaml:"Hidden,omitempty"`
SubjPrivate bool `gorm:"default:false;" json:"Private" yaml:"Private,omitempty"`

View file

@ -6,7 +6,7 @@ import (
)
const (
TrimTypeString = 40
ClipStringType = 64
)
// Values is a shortcut for map[string]interface{}
@ -55,8 +55,8 @@ func ToASCII(s string) string {
return string(result)
}
// Trim shortens a string to the given number of characters, and removes all leading and trailing white space.
func Trim(s string, maxLen int) string {
// Clip shortens a string to the given number of characters, and removes all leading and trailing white space.
func Clip(s string, maxLen int) string {
s = strings.TrimSpace(s)
l := len(s)
@ -67,14 +67,14 @@ func Trim(s string, maxLen int) string {
}
}
// SanitizeTypeString converts a type string to lowercase, omits invalid runes, and shortens it if needed.
func SanitizeTypeString(s string) string {
return Trim(ToASCII(strings.ToLower(s)), TrimTypeString)
// SanitizeStringType omits invalid runes, ensures a maximum length of 32 characters, and returns the result.
func SanitizeStringType(s string) string {
return Clip(ToASCII(s), ClipStringType)
}
// SanitizeTypeCaseSensitive omits invalid runes, ensures a maximum length of 32 characters, and returns the result.
func SanitizeTypeCaseSensitive(s string) string {
return Trim(ToASCII(s), TrimTypeString)
// SanitizeStringTypeLower converts a type string to lowercase, omits invalid runes, and shortens it if needed.
func SanitizeStringTypeLower(s string) string {
return SanitizeStringType(strings.ToLower(s))
}
// TypeString returns an entity type string for logging.

View file

@ -12,36 +12,42 @@ func TestToASCII(t *testing.T) {
assert.Equal(t, " = Happiness.", result)
}
func TestTrim(t *testing.T) {
func TestClip(t *testing.T) {
t.Run("Foo", func(t *testing.T) {
result := Trim("Foo", 16)
result := Clip("Foo", 16)
assert.Equal(t, "Foo", result)
assert.Equal(t, 3, len(result))
})
t.Run("TrimFoo", func(t *testing.T) {
result := Trim(" Foo ", 16)
result := Clip(" Foo ", 16)
assert.Equal(t, "Foo", result)
assert.Equal(t, 3, len(result))
})
t.Run("TooLong", func(t *testing.T) {
result := Trim(" 幸福 Hanzi are logograms developed for the writing of Chinese! ", 16)
result := Clip(" 幸福 Hanzi are logograms developed for the writing of Chinese! ", 16)
assert.Equal(t, "幸福 Hanzi are", result)
assert.Equal(t, 16, len(result))
})
t.Run("ToASCII", func(t *testing.T) {
result := Trim(ToASCII(strings.ToLower(" 幸福 Hanzi are logograms developed for the writing of Chinese! ")), TrimTypeString)
assert.Equal(t, "hanzi are logograms developed for the wr", result)
assert.Equal(t, 40, len(result))
result := Clip(ToASCII(strings.ToLower(" 幸福 Hanzi are logograms developed for the writing of Chinese! Expressions in an index may not ...!")), ClipStringType)
assert.Equal(t, "hanzi are logograms developed for the writing of chinese! expres", result)
assert.Equal(t, 64, len(result))
})
t.Run("Empty", func(t *testing.T) {
result := Trim("", 999)
result := Clip("", 999)
assert.Equal(t, "", result)
assert.Equal(t, 0, len(result))
})
}
func TestSanitizeTypeString(t *testing.T) {
result := SanitizeTypeString(" 幸福 Hanzi are logograms developed for the writing of Chinese! ")
assert.Equal(t, "hanzi are logograms developed for the wr", result)
assert.Equal(t, TrimTypeString, len(result))
func TestSanitizeStringType(t *testing.T) {
result := SanitizeStringType(" 幸福 Hanzi are logograms developed for the writing of Chinese! Expressions in an index may not ...!")
assert.Equal(t, "Hanzi are logograms developed for the writing of Chinese! Expres", result)
assert.Equal(t, ClipStringType, len(result))
}
func TestSanitizeStringTypeLower(t *testing.T) {
result := SanitizeStringTypeLower(" 幸福 Hanzi are logograms developed for the writing of Chinese! Expressions in an index may not ...!")
assert.Equal(t, "hanzi are logograms developed for the writing of chinese! expres", result)
assert.Equal(t, ClipStringType, len(result))
}

View file

@ -44,33 +44,33 @@ type SearchPhotos struct {
Mono bool `form:"mono"`
Portrait bool `form:"portrait"`
Geo bool `form:"geo"`
Keywords string `form:"keywords"`
Label string `form:"label"`
Category string `form:"category"` // Moments
Country string `form:"country"` // Moments
State string `form:"state"` // Moments
Year string `form:"year"` // Moments
Month string `form:"month"` // Moments
Day string `form:"day"` // Moments
Face string `form:"face"` // UIDs
Subject string `form:"subject"` // UIDs
Person string `form:"person"` // Alias for Subject
Subjects string `form:"subjects"` // Text
People string `form:"people"` // Alias for Subjects
Album string `form:"album"` // UIDs
Albums string `form:"albums"` // Text
Color string `form:"color"`
Faces string `form:"faces"` // Find or exclude faces if detected.
Quality int `form:"quality"`
Review bool `form:"review"`
Camera string `form:"camera"`
Lens string `form:"lens"`
Before time.Time `form:"before" time_format:"2006-01-02"`
After time.Time `form:"after" time_format:"2006-01-02"`
Count int `form:"count" binding:"required" serialize:"-"`
Offset int `form:"offset" serialize:"-"`
Order string `form:"order" serialize:"-"`
Merged bool `form:"merged" serialize:"-"`
Keywords string `form:"keywords"` // Filter by keyword(s)
Label string `form:"label"` // Label name
Category string `form:"category"` // Moments
Country string `form:"country"` // Moments
State string `form:"state"` // Moments
Year string `form:"year"` // Moments
Month string `form:"month"` // Moments
Day string `form:"day"` // Moments
Face string `form:"face"` // UIDs
Subject string `form:"subject"` // UIDs
Person string `form:"person"` // Alias for Subject
Subjects string `form:"subjects"` // People names
People string `form:"people"` // Alias for Subjects
Album string `form:"album"` // Album UIDs or name
Albums string `form:"albums"` // Multi search with and/or
Color string `form:"color"` // Main color
Faces string `form:"faces"` // Find or exclude faces if detected.
Quality int `form:"quality"` // Photo quality score
Review bool `form:"review"` // Find photos in review
Camera string `form:"camera"` // Camera UID or name
Lens string `form:"lens"` // Lens UID or name
Before time.Time `form:"before" time_format:"2006-01-02"` // Finds images taken before date
After time.Time `form:"after" time_format:"2006-01-02"` // Finds images taken after date
Count int `form:"count" binding:"required" serialize:"-"` // Result FILE limit
Offset int `form:"offset" serialize:"-"` // Result FILE offset
Order string `form:"order" serialize:"-"` // Sort order
Merged bool `form:"merged" serialize:"-"` // Merge FILES in response
}
func (f *SearchPhotos) GetQuery() string {

View file

@ -18,7 +18,7 @@ import (
func RawExif(fileName string, fileType fs.FileFormat, bruteForce bool) (rawExif []byte, err error) {
defer func() {
if e := recover(); e != nil {
err = fmt.Errorf("metadata: %s in %s (raw exif panic)\nstack: %s", e, sanitize.Log(filepath.Base(fileName)), debug.Stack())
err = fmt.Errorf("%s in %s (raw exif panic)\nstack: %s", e, sanitize.Log(filepath.Base(fileName)), debug.Stack())
}
}()
@ -35,13 +35,13 @@ func RawExif(fileName string, fileType fs.FileFormat, bruteForce bool) (rawExif
sl, err := jpegMp.ParseFile(fileName)
if err != nil {
log.Infof("metadata: %s in %s (parse jpeg)", err, logName)
log.Infof("metadata: %s while parsing jpeg file %s", err, logName)
} else {
_, rawExif, err = sl.Exif()
if err != nil {
if !bruteForce || strings.HasPrefix(err.Error(), "no exif header") {
return rawExif, fmt.Errorf("metadata: found no exif header in %s (parse jpeg)", logName)
return rawExif, fmt.Errorf("found no exif header")
} else if strings.HasPrefix(err.Error(), "no exif data") {
log.Debugf("metadata: failed parsing %s, starting brute-force search (parse jpeg)", logName)
} else {
@ -57,13 +57,13 @@ func RawExif(fileName string, fileType fs.FileFormat, bruteForce bool) (rawExif
cs, err := pngMp.ParseFile(fileName)
if err != nil {
return rawExif, fmt.Errorf("metadata: %s in %s (parse png)", err, logName)
return rawExif, fmt.Errorf("%s while parsing png file", err)
} else {
_, rawExif, err = cs.Exif()
if err != nil {
if err.Error() == "file does not have EXIF" || strings.HasPrefix(err.Error(), "no exif data") {
return rawExif, fmt.Errorf("metadata: found no exif header in %s (parse png)", logName)
return rawExif, fmt.Errorf("found no exif header")
} else {
log.Infof("metadata: %s in %s (parse png)", err, logName)
}
@ -77,13 +77,13 @@ func RawExif(fileName string, fileType fs.FileFormat, bruteForce bool) (rawExif
cs, err := heicMp.ParseFile(fileName)
if err != nil {
return rawExif, fmt.Errorf("metadata: %s in %s (parse heic)", err, logName)
return rawExif, fmt.Errorf("%s while parsing heic file", err)
} else {
_, rawExif, err = cs.Exif()
if err != nil {
if err.Error() == "file does not have EXIF" || strings.HasPrefix(err.Error(), "no exif data") {
return rawExif, fmt.Errorf("metadata: found no exif header in %s (parse heic)", logName)
return rawExif, fmt.Errorf("found no exif header")
} else {
log.Infof("metadata: %s in %s (parse heic)", err, logName)
}
@ -97,13 +97,13 @@ func RawExif(fileName string, fileType fs.FileFormat, bruteForce bool) (rawExif
cs, err := tiffMp.ParseFile(fileName)
if err != nil {
return rawExif, fmt.Errorf("metadata: %s in %s (parse tiff)", err, logName)
return rawExif, fmt.Errorf("%s while parsing tiff file", err)
} else {
_, rawExif, err = cs.Exif()
if err != nil {
if err.Error() == "file does not have EXIF" || strings.HasPrefix(err.Error(), "no exif data") {
return rawExif, fmt.Errorf("metadata: found no exif header in %s (parse tiff)", logName)
return rawExif, fmt.Errorf("found no exif header")
} else {
log.Infof("metadata: %s in %s (parse tiff)", err, logName)
}
@ -112,7 +112,7 @@ func RawExif(fileName string, fileType fs.FileFormat, bruteForce bool) (rawExif
}
}
} else {
log.Infof("metadata: no file format parser for %s, performing brute-force search", logName)
log.Debugf("metadata: no native file format support for %s, performing brute-force exif search", logName)
bruteForce = true
}
@ -121,7 +121,7 @@ func RawExif(fileName string, fileType fs.FileFormat, bruteForce bool) (rawExif
rawExif, err = exif.SearchFileAndExtractExif(fileName)
if err != nil {
return rawExif, fmt.Errorf("metadata: found no exif header in %s (search and extract)", logName)
return rawExif, fmt.Errorf("found no exif data")
}
}

View file

@ -108,7 +108,7 @@ func TestExif(t *testing.T) {
t.Fatal("err should NOT be nil")
}
assert.Equal(t, "metadata: found no exif header in tweethog.png (parse png)", err.Error())
assert.Equal(t, "found no exif header", err.Error())
})
t.Run("iphone_7.heic", func(t *testing.T) {
@ -223,7 +223,7 @@ func TestExif(t *testing.T) {
t.Fatal("err should NOT be nil")
}
assert.Equal(t, "metadata: found no exif header in no-exif-data.jpg (parse jpeg)", err.Error())
assert.Equal(t, "found no exif header", err.Error())
})
t.Run("no-exif-data.jpg/BruteForce", func(t *testing.T) {
@ -233,7 +233,7 @@ func TestExif(t *testing.T) {
t.Fatal("err should NOT be nil")
}
assert.Equal(t, "metadata: found no exif header in no-exif-data.jpg (search and extract)", err.Error())
assert.Equal(t, "found no exif data", err.Error())
})
t.Run("screenshot.png", func(t *testing.T) {
@ -278,13 +278,13 @@ func TestExif(t *testing.T) {
t.Run("gopher-preview.jpg", func(t *testing.T) {
_, err := Exif("testdata/gopher-preview.jpg", fs.FormatJpeg, false)
assert.EqualError(t, err, "metadata: found no exif header in gopher-preview.jpg (parse jpeg)")
assert.EqualError(t, err, "found no exif header")
})
t.Run("gopher-preview.jpg/BruteForce", func(t *testing.T) {
_, err := Exif("testdata/gopher-preview.jpg", fs.FormatJpeg, true)
assert.EqualError(t, err, "metadata: found no exif header in gopher-preview.jpg (search and extract)")
assert.EqualError(t, err, "found no exif data")
})
t.Run("huawei-gps-error.jpg", func(t *testing.T) {

View file

@ -7,7 +7,7 @@ import (
)
// Auto automatically migrates the database provided.
func Auto(db *gorm.DB, runFailed bool) error {
func Auto(db *gorm.DB, runFailed bool, ids []string) error {
if db == nil {
return fmt.Errorf("migrate: database connection required")
}
@ -23,7 +23,7 @@ func Auto(db *gorm.DB, runFailed bool) error {
}
if migrations, ok := Dialects[name]; ok && len(migrations) > 0 {
migrations.Start(db, runFailed)
migrations.Start(db, runFailed, ids)
return nil
} else {
return fmt.Errorf("migrate: no migrations found for %s", name)

View file

@ -5,21 +5,76 @@ var DialectMySQL = Migrations{
{
ID: "20211121-094727",
Dialect: "mysql",
Statements: []string{"DROP INDEX uix_places_place_label ON `places`;"},
Statements: []string{"DROP INDEX IF EXISTS uix_places_place_label ON places;"},
},
{
ID: "20211124-120008",
Dialect: "mysql",
Statements: []string{"DROP INDEX idx_places_place_label ON `places`;", "DROP INDEX uix_places_label ON `places`;"},
Statements: []string{"DROP INDEX IF EXISTS idx_places_place_label ON places;", "DROP INDEX IF EXISTS uix_places_label ON places;"},
},
{
ID: "20220103-115400",
ID: "20220329-030000",
Dialect: "mysql",
Statements: []string{"ALTER TABLE files MODIFY file_projection VARBINARY(40) NULL;", "ALTER TABLE files MODIFY file_color_profile VARBINARY(40) NULL;"},
Statements: []string{"ALTER TABLE files MODIFY file_projection VARBINARY(64) NULL;", "ALTER TABLE files MODIFY file_color_profile VARBINARY(64) NULL;"},
},
{
ID: "20220118-172400",
ID: "20220329-040000",
Dialect: "mysql",
Statements: []string{"ALTER TABLE albums MODIFY album_filter VARBINARY(767) DEFAULT '';", "CREATE INDEX IF NOT EXISTS idx_albums_album_filter ON albums (album_filter);"},
Statements: []string{"DROP INDEX IF EXISTS idx_albums_album_filter ON albums;", "ALTER TABLE albums MODIFY album_filter VARBINARY(2048) DEFAULT '';", "CREATE OR REPLACE INDEX idx_albums_album_filter ON albums (album_filter(512));"},
},
{
ID: "20220329-050000",
Dialect: "mysql",
Statements: []string{"UPDATE photos SET photo_description = SUBSTR(photo_description, 0, 4096) WHERE 1;", "ALTER TABLE photos MODIFY photo_description VARCHAR(4096);"},
},
{
ID: "20220329-060000",
Dialect: "mysql",
Statements: []string{"ALTER TABLE accounts MODIFY acc_url VARCHAR(255);", "ALTER TABLE addresses MODIFY address_notes VARCHAR(1024);", "ALTER TABLE albums MODIFY album_caption VARCHAR(1024);", "ALTER TABLE albums MODIFY album_description VARCHAR(2048);", "ALTER TABLE albums MODIFY album_notes VARCHAR(1024);", "ALTER TABLE cameras MODIFY camera_description VARCHAR(2048);", "ALTER TABLE cameras MODIFY camera_notes VARCHAR(1024);", "ALTER TABLE countries MODIFY country_description VARCHAR(2048);", "ALTER TABLE countries MODIFY country_notes VARCHAR(1024);", "UPDATE details SET keywords = SUBSTR(keywords, 0, 2048), notes = SUBSTR(notes, 0, 2048) WHERE 1;", "ALTER TABLE details MODIFY keywords VARCHAR(2048);", "ALTER TABLE details MODIFY notes VARCHAR(2048);", "ALTER TABLE details MODIFY subject VARCHAR(1024);", "ALTER TABLE details MODIFY artist VARCHAR(1024);", "ALTER TABLE details MODIFY copyright VARCHAR(1024);", "ALTER TABLE details MODIFY license VARCHAR(1024);", "UPDATE folders SET folder_description = SUBSTR(folder_description, 0, 2048) WHERE 1;", "ALTER TABLE folders MODIFY folder_description VARCHAR(2048);", "ALTER TABLE labels MODIFY label_description VARCHAR(2048);", "ALTER TABLE labels MODIFY label_notes VARCHAR(1024);", "ALTER TABLE lenses MODIFY lens_description VARCHAR(2048);", "ALTER TABLE lenses MODIFY lens_notes VARCHAR(1024);", "ALTER TABLE subjects MODIFY subj_bio VARCHAR(2048);", "ALTER TABLE subjects MODIFY subj_notes VARCHAR(1024);"},
},
{
ID: "20220329-061000",
Dialect: "mysql",
Statements: []string{"CREATE OR REPLACE INDEX idx_files_photo_id ON files (photo_id, file_primary);"},
},
{
ID: "20220329-070000",
Dialect: "mysql",
Statements: []string{"ALTER TABLE files MODIFY COLUMN IF EXISTS photo_taken_at DATETIME AFTER photo_uid;", "ALTER TABLE files ADD COLUMN IF NOT EXISTS photo_taken_at DATETIME AFTER photo_uid;"},
},
{
ID: "20220329-071000",
Dialect: "mysql",
Statements: []string{"UPDATE files f JOIN photos p ON p.id = f.photo_id SET f.photo_taken_at = p.taken_at_local;"},
},
{
ID: "20220329-080000",
Dialect: "mysql",
Statements: []string{"ALTER TABLE files MODIFY IF EXISTS media_id VARBINARY(32) AFTER photo_taken_at;", "ALTER TABLE files ADD IF NOT EXISTS media_id VARBINARY(32) AFTER photo_taken_at;"},
},
{
ID: "20220329-081000",
Dialect: "mysql",
Statements: []string{"CREATE OR REPLACE UNIQUE INDEX idx_files_search_media ON files (media_id);"},
},
{
ID: "20220329-082000",
Dialect: "mysql",
Statements: []string{"UPDATE files SET media_id = CASE WHEN file_missing = 0 AND deleted_at IS NULL THEN CONCAT(HEX(100000000000 - photo_id), '-', 1 + file_sidecar - file_primary, '-', file_uid) END;"},
},
{
ID: "20220329-090000",
Dialect: "mysql",
Statements: []string{"ALTER TABLE files MODIFY IF EXISTS time_index VARBINARY(48) AFTER photo_taken_at;", "ALTER TABLE files ADD IF NOT EXISTS time_index VARBINARY(48) AFTER photo_taken_at;"},
},
{
ID: "20220329-091000",
Dialect: "mysql",
Statements: []string{"CREATE OR REPLACE UNIQUE INDEX idx_files_search_timeline ON files (time_index);"},
},
{
ID: "20220329-092000",
Dialect: "mysql",
Statements: []string{"UPDATE files SET time_index = CASE WHEN file_missing = 0 AND deleted_at IS NULL THEN CONCAT(100000000000000 - CAST(photo_taken_at AS UNSIGNED), '-', media_id) END;"},
},
}

View file

@ -12,4 +12,44 @@ var DialectSQLite3 = Migrations{
Dialect: "sqlite3",
Statements: []string{"DROP INDEX IF EXISTS uix_places_place_label;", "DROP INDEX IF EXISTS uix_places_label;"},
},
{
ID: "20220329-040000",
Dialect: "sqlite3",
Statements: []string{"DROP INDEX IF EXISTS idx_albums_album_filter;"},
},
{
ID: "20220329-050000",
Dialect: "sqlite3",
Statements: []string{"CREATE INDEX idx_albums_album_filter ON albums (album_filter);"},
},
{
ID: "20220329-061000",
Dialect: "sqlite3",
Statements: []string{"DROP INDEX IF EXISTS idx_files_photo_id;", "CREATE INDEX IF NOT EXISTS idx_files_photo_id ON files (photo_id, file_primary);"},
},
{
ID: "20220329-071000",
Dialect: "sqlite3",
Statements: []string{"UPDATE files SET photo_taken_at = (SELECT photos.taken_at_local FROM photos WHERE photos.id = files.photo_id) WHERE 1;"},
},
{
ID: "20220329-081000",
Dialect: "sqlite3",
Statements: []string{"CREATE UNIQUE INDEX IF NOT EXISTS idx_files_search_media ON files (media_id);"},
},
{
ID: "20220329-082000",
Dialect: "sqlite3",
Statements: []string{"UPDATE files SET media_id = CASE WHEN file_missing = 0 AND deleted_at IS NULL THEN (HEX(100000000000 - photo_id) || '-' || 1 + file_sidecar - file_primary || '-' || file_uid) END;"},
},
{
ID: "20220329-091000",
Dialect: "sqlite3",
Statements: []string{"CREATE UNIQUE INDEX IF NOT EXISTS idx_files_search_timeline ON files (time_index);"},
},
{
ID: "20220329-092000",
Dialect: "sqlite3",
Statements: []string{"UPDATE files SET time_index = CASE WHEN file_missing = 0 AND deleted_at IS NULL THEN (100000000000000 - CAST(photo_taken_at AS UNSIGNED) || '-' || media_id) END;"},
},
}

View file

@ -41,10 +41,25 @@ func (m *Migration) Finish(db *gorm.DB) error {
// Execute runs the migration.
func (m *Migration) Execute(db *gorm.DB) error {
for _, s := range m.Statements {
for _, s := range m.Statements { // ADD
if err := db.Exec(s).Error; err != nil {
if strings.HasPrefix(s, "DROP ") && strings.Contains(err.Error(), "DROP") {
log.Tracef("migrate: %s (drop statement)", err)
// Normalize query and error for comparison.
q := strings.ToUpper(s)
e := strings.ToUpper(err.Error())
// Log the errors triggered by ALTER and DROP statements
// and otherwise ignore them, since some databases do not
// support "IF EXISTS".
if strings.HasPrefix(q, "ALTER TABLE ") &&
strings.Contains(s, " ADD ") &&
strings.Contains(e, "DUPLICATE") {
log.Tracef("migrate: %s (ignored, column already exists)", err)
} else if strings.HasPrefix(q, "DROP INDEX ") &&
strings.Contains(e, "DROP") {
log.Tracef("migrate: %s (ignored, probably didn't exist anymore)", err)
} else if strings.HasPrefix(q, "DROP TABLE ") &&
strings.Contains(e, "DROP") {
log.Tracef("migrate: %s (ignored, probably didn't exist anymored)", err)
} else {
return err
}

View file

@ -4,6 +4,8 @@ import (
"database/sql"
"time"
"github.com/photoprism/photoprism/pkg/list"
"github.com/dustin/go-humanize/english"
"github.com/jinzhu/gorm"
)
@ -47,12 +49,12 @@ func Existing(db *gorm.DB) MigrationMap {
}
// Start runs all migrations that haven't been executed yet.
func (m *Migrations) Start(db *gorm.DB, runFailed bool) {
func (m *Migrations) Start(db *gorm.DB, runFailed bool, ids []string) {
// Find previously executed migrations.
executed := Existing(db)
if prev := len(executed); prev == 0 {
log.Infof("migrate: found no previous migrations")
log.Infof("migrate: no previously executed migrations")
} else {
log.Debugf("migrate: found %s", english.Plural(len(executed), "previous migration", "previous migrations"))
}
@ -61,10 +63,16 @@ func (m *Migrations) Start(db *gorm.DB, runFailed bool) {
start := time.Now()
migration.StartedAt = start.UTC().Round(time.Second)
// Excluded?
if list.Excludes(ids, migration.ID) {
log.Debugf("migrate: %s skipped", migration.ID)
continue
}
// Already executed?
if done, ok := executed[migration.ID]; ok {
// Try to run failed migrations again?
if !runFailed || done.Error == "" {
if (!runFailed || done.Error == "") && !list.Contains(ids, migration.ID) {
log.Debugf("migrate: %s skipped", migration.ID)
continue
}

View file

@ -1 +1 @@
DROP INDEX uix_places_place_label ON `places`;
DROP INDEX IF EXISTS uix_places_place_label ON places;

View file

@ -1,2 +1,2 @@
DROP INDEX idx_places_place_label ON `places`;
DROP INDEX uix_places_label ON `places`;
DROP INDEX IF EXISTS idx_places_place_label ON places;
DROP INDEX IF EXISTS uix_places_label ON places;

View file

@ -1,2 +0,0 @@
ALTER TABLE files MODIFY file_projection VARBINARY(40) NULL;
ALTER TABLE files MODIFY file_color_profile VARBINARY(40) NULL;

View file

@ -1,2 +0,0 @@
ALTER TABLE albums MODIFY album_filter VARBINARY(767) DEFAULT '';
CREATE INDEX IF NOT EXISTS idx_albums_album_filter ON albums (album_filter);

View file

@ -0,0 +1,2 @@
ALTER TABLE files MODIFY file_projection VARBINARY(64) NULL;
ALTER TABLE files MODIFY file_color_profile VARBINARY(64) NULL;

View file

@ -0,0 +1,3 @@
DROP INDEX IF EXISTS idx_albums_album_filter ON albums;
ALTER TABLE albums MODIFY album_filter VARBINARY(2048) DEFAULT '';
CREATE OR REPLACE INDEX idx_albums_album_filter ON albums (album_filter(512));

View file

@ -0,0 +1,2 @@
UPDATE photos SET photo_description = SUBSTR(photo_description, 0, 4096) WHERE 1;
ALTER TABLE photos MODIFY photo_description VARCHAR(4096);

View file

@ -0,0 +1,24 @@
ALTER TABLE accounts MODIFY acc_url VARCHAR(255);
ALTER TABLE addresses MODIFY address_notes VARCHAR(1024);
ALTER TABLE albums MODIFY album_caption VARCHAR(1024);
ALTER TABLE albums MODIFY album_description VARCHAR(2048);
ALTER TABLE albums MODIFY album_notes VARCHAR(1024);
ALTER TABLE cameras MODIFY camera_description VARCHAR(2048);
ALTER TABLE cameras MODIFY camera_notes VARCHAR(1024);
ALTER TABLE countries MODIFY country_description VARCHAR(2048);
ALTER TABLE countries MODIFY country_notes VARCHAR(1024);
UPDATE details SET keywords = SUBSTR(keywords, 0, 2048), notes = SUBSTR(notes, 0, 2048) WHERE 1;
ALTER TABLE details MODIFY keywords VARCHAR(2048);
ALTER TABLE details MODIFY notes VARCHAR(2048);
ALTER TABLE details MODIFY subject VARCHAR(1024);
ALTER TABLE details MODIFY artist VARCHAR(1024);
ALTER TABLE details MODIFY copyright VARCHAR(1024);
ALTER TABLE details MODIFY license VARCHAR(1024);
UPDATE folders SET folder_description = SUBSTR(folder_description, 0, 2048) WHERE 1;
ALTER TABLE folders MODIFY folder_description VARCHAR(2048);
ALTER TABLE labels MODIFY label_description VARCHAR(2048);
ALTER TABLE labels MODIFY label_notes VARCHAR(1024);
ALTER TABLE lenses MODIFY lens_description VARCHAR(2048);
ALTER TABLE lenses MODIFY lens_notes VARCHAR(1024);
ALTER TABLE subjects MODIFY subj_bio VARCHAR(2048);
ALTER TABLE subjects MODIFY subj_notes VARCHAR(1024);

View file

@ -0,0 +1 @@
CREATE OR REPLACE INDEX idx_files_photo_id ON files (photo_id, file_primary);

View file

@ -0,0 +1,2 @@
ALTER TABLE files MODIFY COLUMN IF EXISTS photo_taken_at DATETIME AFTER photo_uid;
ALTER TABLE files ADD COLUMN IF NOT EXISTS photo_taken_at DATETIME AFTER photo_uid;

View file

@ -0,0 +1 @@
UPDATE files f JOIN photos p ON p.id = f.photo_id SET f.photo_taken_at = p.taken_at_local;

View file

@ -0,0 +1,2 @@
ALTER TABLE files MODIFY IF EXISTS media_id VARBINARY(32) AFTER photo_taken_at;
ALTER TABLE files ADD IF NOT EXISTS media_id VARBINARY(32) AFTER photo_taken_at;

View file

@ -0,0 +1 @@
CREATE OR REPLACE UNIQUE INDEX idx_files_search_media ON files (media_id);

View file

@ -0,0 +1 @@
UPDATE files SET media_id = CASE WHEN file_missing = 0 AND deleted_at IS NULL THEN CONCAT(HEX(100000000000 - photo_id), '-', 1 + file_sidecar - file_primary, '-', file_uid) END;

View file

@ -0,0 +1,2 @@
ALTER TABLE files MODIFY IF EXISTS time_index VARBINARY(48) AFTER photo_taken_at;
ALTER TABLE files ADD IF NOT EXISTS time_index VARBINARY(48) AFTER photo_taken_at;

View file

@ -0,0 +1 @@
CREATE OR REPLACE UNIQUE INDEX idx_files_search_timeline ON files (time_index);

View file

@ -0,0 +1 @@
UPDATE files SET time_index = CASE WHEN file_missing = 0 AND deleted_at IS NULL THEN CONCAT(100000000000000 - CAST(photo_taken_at AS UNSIGNED), '-', media_id) END;

View file

@ -0,0 +1 @@
DROP INDEX IF EXISTS idx_albums_album_filter;

View file

@ -0,0 +1 @@
CREATE INDEX idx_albums_album_filter ON albums (album_filter);

View file

@ -0,0 +1,2 @@
DROP INDEX IF EXISTS idx_files_photo_id;
CREATE INDEX IF NOT EXISTS idx_files_photo_id ON files (photo_id, file_primary);

View file

@ -0,0 +1 @@
UPDATE files SET photo_taken_at = (SELECT photos.taken_at_local FROM photos WHERE photos.id = files.photo_id) WHERE 1;

View file

@ -0,0 +1 @@
CREATE UNIQUE INDEX IF NOT EXISTS idx_files_search_media ON files (media_id);

View file

@ -0,0 +1 @@
UPDATE files SET media_id = CASE WHEN file_missing = 0 AND deleted_at IS NULL THEN (HEX(100000000000 - photo_id) || '-' || 1 + file_sidecar - file_primary || '-' || file_uid) END;

View file

@ -0,0 +1 @@
CREATE UNIQUE INDEX IF NOT EXISTS idx_files_search_timeline ON files (time_index);

View file

@ -0,0 +1 @@
UPDATE files SET time_index = CASE WHEN file_missing = 0 AND deleted_at IS NULL THEN (100000000000000 - CAST(photo_taken_at AS UNSIGNED) || '-' || media_id) END;

View file

@ -221,6 +221,7 @@ func (ind *Index) MediaFile(m *MediaFile, o IndexOptions, originalName, photoUID
if photoExists && (file.PhotoID != photo.ID || file.PhotoUID != photo.PhotoUID) {
file.PhotoID = photo.ID
file.PhotoUID = photo.PhotoUID
file.PhotoTakenAt = photo.TakenAtLocal
}
// Skip unchanged files.
@ -733,7 +734,7 @@ func (ind *Index) MediaFile(m *MediaFile, o IndexOptions, originalName, photoUID
details.Keywords = strings.Join(txt.UniqueWords(w), ", ")
if details.Keywords != "" {
log.Tracef("index: using keywords %s for %s", details.Keywords, logName)
log.Tracef("index: %s has keywords %s", logName, details.Keywords)
} else {
log.Tracef("index: found no keywords for %s", logName)
}

View file

@ -52,9 +52,7 @@ func (c *Counts) Refresh() {
Take(c)
Db().Table("files").
Select("COUNT(*) AS files").
Where("file_missing = 0").
Where("deleted_at IS NULL").
Select("COUNT(media_id) AS files").
Take(c)
Db().Table("countries").

View file

@ -102,7 +102,7 @@ func RenameFile(srcRoot, srcName, destRoot, destName string) error {
}
// SetPhotoPrimary sets a new primary image file for a photo.
func SetPhotoPrimary(photoUID, fileUID string) error {
func SetPhotoPrimary(photoUID, fileUID string) (err error) {
if photoUID == "" {
return fmt.Errorf("photo uid is missing")
}
@ -123,8 +123,19 @@ func SetPhotoPrimary(photoUID, fileUID string) error {
return fmt.Errorf("file uid is missing")
}
Db().Model(entity.File{}).Where("photo_uid = ? AND file_uid <> ?", photoUID, fileUID).UpdateColumn("file_primary", 0)
return Db().Model(entity.File{}).Where("photo_uid = ? AND file_uid = ?", photoUID, fileUID).UpdateColumn("file_primary", 1).Error
if err = Db().Model(entity.File{}).
Where("photo_uid = ? AND file_uid <> ?", photoUID, fileUID).
UpdateColumn("file_primary", 0).Error; err != nil {
return err
} else if err = Db().
Model(entity.File{}).Where("photo_uid = ? AND file_uid = ?", photoUID, fileUID).
UpdateColumn("file_primary", 1).Error; err != nil {
return err
} else {
entity.File{PhotoUID: photoUID}.RegenerateIndex()
}
return nil
}
// SetFileError updates the file error column.

View file

@ -11,6 +11,7 @@ import (
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/internal/form"
"github.com/photoprism/photoprism/pkg/fs"
"github.com/photoprism/photoprism/pkg/pluscode"
"github.com/photoprism/photoprism/pkg/rnd"
@ -18,10 +19,14 @@ import (
"github.com/photoprism/photoprism/pkg/txt"
)
// GeoCols contains the geo query column names.
var GeoCols = SelectString(GeoResult{}, []string{"*"})
// Geo searches for photos based on Form values and returns GeoResults ([]GeoResult).
func Geo(f form.SearchGeo) (results GeoResults, err error) {
start := time.Now()
// Parse query string into fields.
if err := f.ParseQueryString(); err != nil {
return GeoResults{}, err
}
@ -47,12 +52,8 @@ func Geo(f form.SearchGeo) (results GeoResults, err error) {
// s.LogMode(true)
s = s.Table("photos").
Select(`photos.id, photos.photo_uid, photos.photo_type, photos.photo_lat, photos.photo_lng,
photos.photo_title, photos.photo_description, photos.photo_favorite, photos.taken_at, photos.taken_at_local,
files.file_hash, files.file_width, files.file_height`).
Joins(`JOIN files ON files.photo_id = photos.id AND
files.file_missing = 0 AND files.file_primary AND files.deleted_at IS NULL`).
s = s.Table("photos").Select(GeoCols).
Joins(`JOIN files ON files.photo_id = photos.id AND files.file_primary = 1 AND files.media_id IS NOT NULL`).
Where("photos.deleted_at IS NULL").
Where("photos.photo_lat <> 0")
@ -110,8 +111,7 @@ func Geo(f form.SearchGeo) (results GeoResults, err error) {
for _, l := range labels {
labelIds = append(labelIds, l.ID)
Db().Where("category_id = ?", l.ID).Find(&categories)
Log("find categories", Db().Where("category_id = ?", l.ID).Find(&categories).Error)
log.Debugf("search: label %s includes %d categories", txt.LogParamLower(l.LabelName), len(categories))
for _, category := range categories {

View file

@ -11,19 +11,19 @@ import (
// GeoResult represents a photo geo search result.
type GeoResult struct {
ID string `json:"-"`
PhotoUID string `json:"UID"`
PhotoType string `json:"Type,omitempty"`
PhotoLat float32 `json:"Lat"`
PhotoLng float32 `json:"Lng"`
PhotoTitle string `json:"Title"`
PhotoDescription string `json:"Description,omitempty"`
PhotoFavorite bool `json:"Favorite,omitempty"`
FileHash string `json:"Hash"`
FileWidth int `json:"Width"`
FileHeight int `json:"Height"`
TakenAt time.Time `json:"TakenAt"`
TakenAtLocal time.Time `json:"TakenAtLocal"`
ID string `json:"-" select:"photos.id"`
PhotoUID string `json:"UID" select:"photos.photo_uid"`
PhotoType string `json:"Type,omitempty" select:"photos.photo_type"`
PhotoLat float32 `json:"Lat" select:"photos.photo_lat"`
PhotoLng float32 `json:"Lng" select:"photos.photo_lng"`
PhotoTitle string `json:"Title" select:"photos.photo_title"`
PhotoDescription string `json:"Description,omitempty" select:"photos.photo_description"`
PhotoFavorite bool `json:"Favorite,omitempty" select:"photos.photo_favorite"`
FileHash string `json:"Hash" select:"files.file_hash"`
FileWidth int `json:"Width" select:"files.file_width"`
FileHeight int `json:"Height" select:"files.file_height"`
TakenAt time.Time `json:"TakenAt" select:"photos.taken_at"`
TakenAtLocal time.Time `json:"TakenAtLocal" select:"photos.taken_at_local"`
}
// Lat returns the position latitude.

View file

@ -1,43 +0,0 @@
package search
import (
"encoding/json"
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/internal/thumb"
"github.com/photoprism/photoprism/internal/viewer"
)
// NewViewerResult creates a new photo viewer result.
func NewViewerResult(p GeoResult, contentUri, apiUri, previewToken, downloadToken string) viewer.Result {
return viewer.Result{
UID: p.PhotoUID,
Title: p.PhotoTitle,
Taken: p.TakenAtLocal,
Description: p.PhotoDescription,
Favorite: p.PhotoFavorite,
Playable: p.PhotoType == entity.TypeVideo || p.PhotoType == entity.TypeLive,
DownloadUrl: viewer.DownloadUrl(p.FileHash, apiUri, downloadToken),
OriginalW: p.FileWidth,
OriginalH: p.FileHeight,
Fit720: viewer.NewThumb(p.FileWidth, p.FileHeight, p.FileHash, thumb.Sizes[thumb.Fit720], contentUri, previewToken),
Fit1280: viewer.NewThumb(p.FileWidth, p.FileHeight, p.FileHash, thumb.Sizes[thumb.Fit1280], contentUri, previewToken),
Fit1920: viewer.NewThumb(p.FileWidth, p.FileHeight, p.FileHash, thumb.Sizes[thumb.Fit1920], contentUri, previewToken),
Fit2048: viewer.NewThumb(p.FileWidth, p.FileHeight, p.FileHash, thumb.Sizes[thumb.Fit2048], contentUri, previewToken),
Fit2560: viewer.NewThumb(p.FileWidth, p.FileHeight, p.FileHash, thumb.Sizes[thumb.Fit2560], contentUri, previewToken),
Fit3840: viewer.NewThumb(p.FileWidth, p.FileHeight, p.FileHash, thumb.Sizes[thumb.Fit3840], contentUri, previewToken),
Fit4096: viewer.NewThumb(p.FileWidth, p.FileHeight, p.FileHash, thumb.Sizes[thumb.Fit4096], contentUri, previewToken),
Fit7680: viewer.NewThumb(p.FileWidth, p.FileHeight, p.FileHash, thumb.Sizes[thumb.Fit7680], contentUri, previewToken),
}
}
// ViewerJSON returns the results as photo viewer JSON.
func (photos GeoResults) ViewerJSON(contentUri, apiUri, previewToken, downloadToken string) ([]byte, error) {
results := make(viewer.Results, 0, len(photos))
for _, p := range photos {
results = append(results, NewViewerResult(p, contentUri, apiUri, previewToken, downloadToken))
}
return json.Marshal(results)
}

View file

@ -1,64 +0,0 @@
package search
import (
"testing"
"time"
"github.com/photoprism/photoprism/internal/entity"
)
func TestGeoResults_ViewerJSON(t *testing.T) {
taken := time.Date(2000, 1, 1, 1, 1, 1, 1, time.UTC).UTC().Round(time.Second)
items := GeoResults{
GeoResult{
ID: "1",
PhotoLat: 7.775,
PhotoLng: 8.775,
PhotoUID: "p1",
PhotoTitle: "Title 1",
PhotoDescription: "Description 1",
PhotoFavorite: false,
PhotoType: entity.TypeVideo,
FileHash: "d2b4a5d18276f96f1b5a1bf17fd82d6fab3807f2",
FileWidth: 1920,
FileHeight: 1080,
TakenAtLocal: taken,
},
GeoResult{
ID: "2",
PhotoLat: 1.775,
PhotoLng: -5.775,
PhotoUID: "p2",
PhotoTitle: "Title 2",
PhotoDescription: "Description 2",
PhotoFavorite: true,
PhotoType: entity.TypeImage,
FileHash: "da639e836dfa9179e66c619499b0a5e592f72fc1",
FileWidth: 3024,
FileHeight: 3024,
TakenAtLocal: taken,
},
GeoResult{
ID: "3",
PhotoLat: -1.775,
PhotoLng: 100.775,
PhotoUID: "p3",
PhotoTitle: "Title 3",
PhotoDescription: "Description 3",
PhotoFavorite: false,
PhotoType: entity.TypeRaw,
FileHash: "412fe4c157a82b636efebc5bc4bc4a15c321aad1",
FileWidth: 5000,
FileHeight: 10000,
TakenAtLocal: taken,
},
}
b, err := items.ViewerJSON("/content", "/api/v1", "preview-token", "download-token")
if err != nil {
t.Fatal(err)
}
t.Logf("result: %s", b)
}

View file

@ -6,20 +6,41 @@ import (
"strings"
"time"
"github.com/photoprism/photoprism/internal/viewer"
"github.com/dustin/go-humanize/english"
"github.com/jinzhu/gorm"
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/internal/form"
"github.com/photoprism/photoprism/pkg/fs"
"github.com/photoprism/photoprism/pkg/rnd"
"github.com/photoprism/photoprism/pkg/txt"
)
// Photos searches for photos based on a Form and returns PhotoResults ([]Photo).
var PhotosColsAll = SelectString(Photo{}, []string{"*"})
var PhotosColsView = SelectString(Photo{}, SelectCols(GeoResult{}, []string{"*"}))
// Photos finds photos based on the search form provided and returns them as PhotoResults.
func Photos(f form.SearchPhotos) (results PhotoResults, count int, err error) {
return searchPhotos(f, PhotosColsAll)
}
// PhotosViewerResults finds photos based on the search form provided and returns them as viewer.Results.
func PhotosViewerResults(f form.SearchPhotos, contentUri, apiUri, previewToken, downloadToken string) (viewer.Results, int, error) {
if results, count, err := searchPhotos(f, PhotosColsView); err != nil {
return viewer.Results{}, count, err
} else {
return results.ViewerResults(contentUri, apiUri, previewToken, downloadToken), count, err
}
}
// photos searches for photos based on a Form and returns PhotoResults ([]Photo).
func searchPhotos(f form.SearchPhotos, resultCols string) (results PhotoResults, count int, err error) {
start := time.Now()
// Parse query string into fields.
if err := f.ParseQueryString(); err != nil {
return PhotoResults{}, 0, err
}
@ -27,53 +48,12 @@ func Photos(f form.SearchPhotos) (results PhotoResults, count int, err error) {
s := UnscopedDb()
// s = s.LogMode(true)
// Select columns.
cols := []string{
"photos.*",
"files.file_uid",
"files.id AS file_id",
"files.photo_id AS composite_id",
"files.instance_id",
"files.file_primary",
"files.file_sidecar",
"files.file_portrait",
"files.file_video",
"files.file_missing",
"files.file_name",
"files.file_root",
"files.file_hash",
"files.file_codec",
"files.file_type",
"files.file_mime",
"files.file_width",
"files.file_height",
"files.file_aspect_ratio",
"files.file_orientation",
"files.file_main_color",
"files.file_colors",
"files.file_luminance",
"files.file_chroma",
"files.file_projection",
"files.file_diff",
"files.file_duration",
"files.file_size",
"cameras.camera_make",
"cameras.camera_model",
"lenses.lens_make",
"lenses.lens_model",
"places.place_label",
"places.place_city",
"places.place_state",
"places.place_country",
}
// Database tables.
s = s.Table("files").Select(strings.Join(cols, ", ")).
Joins("JOIN photos ON photos.id = files.photo_id").
s = s.Table("files").Select(resultCols).
Joins("JOIN photos ON files.photo_id = photos.id AND files.media_id IS NOT NULL").
Joins("LEFT JOIN cameras ON photos.camera_id = cameras.id").
Joins("LEFT JOIN lenses ON photos.lens_id = lenses.id").
Joins("LEFT JOIN places ON photos.place_id = places.id").
Where("files.deleted_at IS NULL AND files.file_missing = 0")
Joins("LEFT JOIN places ON photos.place_id = places.id")
// Offset and count.
if f.Count > 0 && f.Count <= MaxResults {
@ -85,24 +65,24 @@ func Photos(f form.SearchPhotos) (results PhotoResults, count int, err error) {
// Sort order.
switch f.Order {
case entity.SortOrderEdited:
s = s.Where("photos.edited_at IS NOT NULL").Order("photos.edited_at DESC, files.photo_id DESC, files.file_primary DESC, files.id")
s = s.Where("photos.edited_at IS NOT NULL").Order("photos.edited_at DESC, files.media_id")
case entity.SortOrderRelevance:
if f.Label != "" {
s = s.Order("photos.photo_quality DESC, photos_labels.uncertainty ASC, photos.taken_at DESC, files.photo_id DESC, files.file_primary DESC, files.id")
s = s.Order("photos.photo_quality DESC, photos_labels.uncertainty ASC, files.time_index")
} else {
s = s.Order("photos.photo_quality DESC, photos.taken_at DESC, files.photo_id DESC, files.file_primary DESC, files.id")
s = s.Order("photos.photo_quality DESC, files.time_index")
}
case entity.SortOrderNewest:
s = s.Order("photos.taken_at DESC, files.photo_id DESC, files.file_primary DESC, files.id")
s = s.Order("files.time_index")
case entity.SortOrderOldest:
s = s.Order("photos.taken_at, files.photo_id DESC, files.file_primary DESC, files.id")
s = s.Order("files.photo_taken_at, files.media_id")
case entity.SortOrderSimilar:
s = s.Where("files.file_diff > 0")
s = s.Order("photos.photo_color, photos.cell_id, files.file_diff, photos.taken_at DESC, files.photo_id DESC, files.file_primary DESC, files.id")
s = s.Order("photos.photo_color, photos.cell_id, files.file_diff, files.time_index")
case entity.SortOrderName:
s = s.Order("photos.photo_path, photos.photo_name, files.photo_id DESC, files.file_primary DESC, files.id")
s = s.Order("photos.photo_path, photos.photo_name, files.time_index")
case entity.SortOrderDefault, entity.SortOrderImported, entity.SortOrderAdded:
s = s.Order("files.photo_id DESC, files.file_primary DESC, files.id")
s = s.Order("files.media_id")
default:
return PhotoResults{}, 0, fmt.Errorf("invalid sort order")
}
@ -128,7 +108,7 @@ func Photos(f form.SearchPhotos) (results PhotoResults, count int, err error) {
// Take shortcut?
if f.Album == "" && f.Query == "" {
s = s.Order("files.photo_id DESC, files.file_primary DESC, files.id")
s = s.Order("files.media_id")
if result := s.Scan(&results); result.Error != nil {
return results, 0, result.Error
@ -157,9 +137,8 @@ func Photos(f form.SearchPhotos) (results PhotoResults, count int, err error) {
for _, l := range labels {
labelIds = append(labelIds, l.ID)
Db().Where("category_id = ?", l.ID).Find(&categories)
log.Infof("search: label %s includes %d categories", txt.LogParamLower(l.LabelName), len(categories))
Log("find categories", Db().Where("category_id = ?", l.ID).Find(&categories).Error)
log.Debugf("search: label %s includes %d categories", txt.LogParamLower(l.LabelName), len(categories))
for _, category := range categories {
labelIds = append(labelIds, category.LabelID)
@ -501,7 +480,7 @@ func Photos(f form.SearchPhotos) (results PhotoResults, count int, err error) {
f.Dist = 5000
}
// Filter by approx distance to coordinates:
// Filter by approx distance to co-ordinates:
if f.Lat != 0 {
latMin := f.Lat - Radius*float32(f.Dist)
latMax := f.Lat + Radius*float32(f.Dist)
@ -523,7 +502,7 @@ func Photos(f form.SearchPhotos) (results PhotoResults, count int, err error) {
// Find stacks only?
if f.Stack {
s = s.Where("files.photo_id IN (SELECT a.photo_id FROM files a JOIN files b ON a.id != b.id AND a.photo_id = b.photo_id AND a.file_type = b.file_type WHERE a.file_type='jpg')")
s = s.Where("photos.id IN (SELECT a.photo_id FROM files a JOIN files b ON a.id != b.id AND a.photo_id = b.photo_id AND a.file_type = b.file_type WHERE a.file_type='jpg')")
}
// Filter by album?

View file

@ -1,9 +1,10 @@
package search
import (
"testing"
"github.com/photoprism/photoprism/internal/form"
"github.com/stretchr/testify/assert"
"testing"
)
func TestPhotosFilterName(t *testing.T) {

View file

@ -6,105 +6,125 @@ import (
"time"
"github.com/gosimple/slug"
"github.com/photoprism/photoprism/internal/entity"
"github.com/ulule/deepcopier"
"github.com/photoprism/photoprism/internal/entity"
)
// Photo represents a photo search result.
type Photo struct {
ID uint `json:"-"`
CompositeID string `json:"ID"`
UUID string `json:"DocumentID,omitempty"`
PhotoUID string `json:"UID"`
PhotoType string `json:"Type"`
TypeSrc string `json:"TypeSrc"`
TakenAt time.Time `json:"TakenAt"`
TakenAtLocal time.Time `json:"TakenAtLocal"`
TakenSrc string `json:"TakenSrc"`
TimeZone string `json:"TimeZone"`
PhotoPath string `json:"Path"`
PhotoName string `json:"Name"`
OriginalName string `json:"OriginalName"`
PhotoTitle string `json:"Title"`
PhotoDescription string `json:"Description"`
PhotoYear int `json:"Year"`
PhotoMonth int `json:"Month"`
PhotoDay int `json:"Day"`
PhotoCountry string `json:"Country"`
PhotoStack int8 `json:"Stack"`
PhotoFavorite bool `json:"Favorite"`
PhotoPrivate bool `json:"Private"`
PhotoIso int `json:"Iso"`
PhotoFocalLength int `json:"FocalLength"`
PhotoFNumber float32 `json:"FNumber"`
PhotoExposure string `json:"Exposure"`
PhotoFaces int `json:"Faces,omitempty"`
PhotoQuality int `json:"Quality"`
PhotoResolution int `json:"Resolution"`
PhotoColor uint8 `json:"Color"`
PhotoScan bool `json:"Scan"`
PhotoPanorama bool `json:"Panorama"`
CameraID uint `json:"CameraID"` // Camera
CameraSerial string `json:"CameraSerial,omitempty"`
CameraSrc string `json:"CameraSrc,omitempty"`
CameraModel string `json:"CameraModel"`
CameraMake string `json:"CameraMake"`
LensID uint `json:"LensID"` // Lens
LensModel string `json:"LensModel"`
LensMake string `json:"LensMake"`
PhotoAltitude int `json:"Altitude,omitempty"`
PhotoLat float32 `json:"Lat"`
PhotoLng float32 `json:"Lng"`
CellID string `json:"CellID"` // Cell
CellAccuracy int `json:"CellAccuracy,omitempty"`
PlaceID string `json:"PlaceID"`
PlaceSrc string `json:"PlaceSrc"`
PlaceLabel string `json:"PlaceLabel"`
PlaceCity string `json:"PlaceCity"`
PlaceState string `json:"PlaceState"`
PlaceCountry string `json:"PlaceCountry"`
InstanceID string `json:"InstanceID"`
FileID uint `json:"-"` // File
FileUID string `json:"FileUID"`
FileRoot string `json:"FileRoot"`
FileName string `json:"FileName"`
FileHash string `json:"Hash"`
FileWidth int `json:"Width"`
FileHeight int `json:"Height"`
FilePortrait bool `json:"Portrait"`
FilePrimary bool `json:"-"`
FileSidecar bool `json:"-"`
FileMissing bool `json:"-"`
FileVideo bool `json:"-"`
FileDuration time.Duration `json:"-"`
FileCodec string `json:"-"`
FileType string `json:"-"`
FileMime string `json:"-"`
FileSize int64 `json:"-"`
FileOrientation int `json:"-"`
FileProjection string `json:"-"`
FileAspectRatio float32 `json:"-"`
FileColors string `json:"-"`
FileChroma uint8 `json:"-"`
FileLuminance string `json:"-"`
FileDiff uint32 `json:"-"`
Merged bool `json:"Merged"`
CreatedAt time.Time `json:"CreatedAt"`
UpdatedAt time.Time `json:"UpdatedAt"`
EditedAt time.Time `json:"EditedAt,omitempty"`
CheckedAt time.Time `json:"CheckedAt,omitempty"`
DeletedAt time.Time `json:"DeletedAt,omitempty"`
ID uint `json:"-" select:"photos.id"`
CompositeID string `json:"ID" select:"files.photo_id AS composite_id"`
UUID string `json:"DocumentID,omitempty" select:"photos.uuid"`
PhotoUID string `json:"UID" select:"photos.photo_uid"`
PhotoType string `json:"Type" select:"photos.photo_type"`
TypeSrc string `json:"TypeSrc" select:"photos.taken_src"`
TakenAt time.Time `json:"TakenAt" select:"photos.taken_at"`
TakenAtLocal time.Time `json:"TakenAtLocal" select:"photos.taken_at_local"`
TakenSrc string `json:"TakenSrc" select:"photos.taken_src"`
TimeZone string `json:"TimeZone" select:"photos.time_zone"`
PhotoPath string `json:"Path" select:"photos.photo_path"`
PhotoName string `json:"Name" select:"photos.photo_name"`
OriginalName string `json:"OriginalName" select:"photos.original_name"`
PhotoTitle string `json:"Title" select:"photos.photo_title"`
PhotoDescription string `json:"Description" select:"photos.photo_description"`
PhotoYear int `json:"Year" select:"photos.photo_year"`
PhotoMonth int `json:"Month" select:"photos.photo_month"`
PhotoDay int `json:"Day" select:"photos.photo_day"`
PhotoCountry string `json:"Country" select:"photos.photo_country"`
PhotoStack int8 `json:"Stack" select:"photos.photo_stack"`
PhotoFavorite bool `json:"Favorite" select:"photos.photo_favorite"`
PhotoPrivate bool `json:"Private" select:"photos.photo_private"`
PhotoIso int `json:"Iso" select:"photos.photo_iso"`
PhotoFocalLength int `json:"FocalLength" select:"photos.photo_focal_length"`
PhotoFNumber float32 `json:"FNumber" select:"photos.photo_f_number"`
PhotoExposure string `json:"Exposure" select:"photos.photo_exposure"`
PhotoFaces int `json:"Faces,omitempty" select:"photos.photo_faces"`
PhotoQuality int `json:"Quality" select:"photos.photo_quality"`
PhotoResolution int `json:"Resolution" select:"photos.photo_resolution"`
PhotoColor uint8 `json:"Color" select:"photos.photo_color"`
PhotoScan bool `json:"Scan" select:"photos.photo_scan"`
PhotoPanorama bool `json:"Panorama" select:"photos.photo_panorama"`
CameraID uint `json:"CameraID" select:"photos.camera_id"` // Camera
CameraSrc string `json:"CameraSrc,omitempty" select:"photos.camera_src"`
CameraSerial string `json:"CameraSerial,omitempty" select:"photos.camera_serial"`
CameraModel string `json:"CameraModel,omitempty" select:"cameras.camera_model"`
CameraMake string `json:"CameraMake,omitempty" select:"cameras.camera_make"`
LensID uint `json:"LensID" select:"photos.lens_id"` // Lens
LensModel string `json:"LensModel,omitempty" select:"lenses.lens_make"`
LensMake string `json:"LensMake,omitempty" select:"lenses.lens_model"`
PhotoAltitude int `json:"Altitude,omitempty" select:"photos.photo_altitude"`
PhotoLat float32 `json:"Lat" select:"photos.photo_lat"`
PhotoLng float32 `json:"Lng" select:"photos.photo_lng"`
CellID string `json:"CellID" select:"photos.cell_id"` // Cell
CellAccuracy int `json:"CellAccuracy,omitempty" select:"photos.cell_accuracy"`
PlaceID string `json:"PlaceID" select:"photos.place_id"`
PlaceSrc string `json:"PlaceSrc" select:"photos.place_src"`
PlaceLabel string `json:"PlaceLabel" select:"places.place_label"`
PlaceCity string `json:"PlaceCity" select:"places.place_city"`
PlaceState string `json:"PlaceState" select:"places.place_state"`
PlaceCountry string `json:"PlaceCountry" select:"places.place_country"`
InstanceID string `json:"InstanceID" select:"files.instance_id"`
FileID uint `json:"-" select:"files.id AS file_id"` // File
FileUID string `json:"FileUID" select:"files.file_uid"`
FileRoot string `json:"FileRoot" select:"files.file_root"`
FileName string `json:"FileName" select:"files.file_name"`
FileHash string `json:"Hash" select:"files.file_hash"`
FileWidth int `json:"Width" select:"files.file_width"`
FileHeight int `json:"Height" select:"files.file_height"`
FilePortrait bool `json:"Portrait" select:"files.file_portrait"`
FilePrimary bool `json:"-" select:"files.file_primary"`
FileSidecar bool `json:"-" select:"files.file_sidecar"`
FileMissing bool `json:"-" select:"files.file_missing"`
FileVideo bool `json:"-" select:"files.file_video"`
FileDuration time.Duration `json:"-" select:"files.file_duration"`
FileCodec string `json:"-" select:"files.file_codec"`
FileType string `json:"-" select:"files.file_type"`
FileMime string `json:"-" select:"files.file_mime"`
FileSize int64 `json:"-" select:"files.file_size"`
FileOrientation int `json:"-" select:"files.file_orientation"`
FileProjection string `json:"-" select:"files.file_projection"`
FileAspectRatio float32 `json:"-" select:"files.file_aspect_ratio"`
FileColors string `json:"-" select:"files.file_colors"`
FileChroma uint8 `json:"-" select:"files.file_chroma"`
FileLuminance string `json:"-" select:"files.file_luminance"`
FileDiff uint32 `json:"-" select:"files.file_diff"`
Merged bool `json:"Merged" select:"-"`
CreatedAt time.Time `json:"CreatedAt" select:"photos.created_at"`
UpdatedAt time.Time `json:"UpdatedAt" select:"photos.updated_at"`
EditedAt time.Time `json:"EditedAt,omitempty" select:"photos.edited_at"`
CheckedAt time.Time `json:"CheckedAt,omitempty" select:"photos.checked_at"`
DeletedAt time.Time `json:"DeletedAt,omitempty" select:"photos.deleted_at"`
Files []entity.File `json:"Files"`
}
// ShareBase returns a meaningful file name for sharing.
func (photo *Photo) ShareBase(seq int) string {
var name string
if photo.PhotoTitle != "" {
name = strings.Title(slug.MakeLang(photo.PhotoTitle, "en"))
} else {
name = photo.PhotoUID
}
taken := photo.TakenAtLocal.Format("20060102-150405")
if seq > 0 {
return fmt.Sprintf("%s-%s (%d).%s", taken, name, seq, photo.FileType)
}
return fmt.Sprintf("%s-%s.%s", taken, name, photo.FileType)
}
type PhotoResults []Photo
// UIDs returns a slice of photo UIDs.
func (m PhotoResults) UIDs() []string {
result := make([]string, len(m))
func (photos PhotoResults) UIDs() []string {
result := make([]string, len(photos))
for i, el := range m {
for i, el := range photos {
result[i] = el.PhotoUID
}
@ -112,25 +132,25 @@ func (m PhotoResults) UIDs() []string {
}
// Merge consecutive file results that belong to the same photo.
func (m PhotoResults) Merge() (photos PhotoResults, count int, err error) {
count = len(m)
photos = make(PhotoResults, 0, count)
func (photos PhotoResults) Merge() (merged PhotoResults, count int, err error) {
count = len(photos)
merged = make(PhotoResults, 0, count)
var i int
var photoId uint
for _, photo := range m {
for _, photo := range photos {
file := entity.File{}
if err = deepcopier.Copy(&file).From(photo); err != nil {
return photos, count, err
return merged, count, err
}
file.ID = photo.FileID
if photoId == photo.ID && i > 0 {
photos[i-1].Files = append(photos[i-1].Files, file)
photos[i-1].Merged = true
merged[i-1].Files = append(merged[i-1].Files, file)
merged[i-1].Merged = true
continue
}
@ -139,27 +159,8 @@ func (m PhotoResults) Merge() (photos PhotoResults, count int, err error) {
photo.CompositeID = fmt.Sprintf("%d-%d", photoId, file.ID)
photo.Files = append(photo.Files, file)
photos = append(photos, photo)
merged = append(merged, photo)
}
return photos, count, nil
}
// ShareBase returns a meaningful file name for sharing.
func (m *Photo) ShareBase(seq int) string {
var name string
if m.PhotoTitle != "" {
name = strings.Title(slug.MakeLang(m.PhotoTitle, "en"))
} else {
name = m.PhotoUID
}
taken := m.TakenAtLocal.Format("20060102-150405")
if seq > 0 {
return fmt.Sprintf("%s-%s (%d).%s", taken, name, seq, m.FileType)
}
return fmt.Sprintf("%s-%s.%s", taken, name, m.FileType)
return merged, count, nil
}

View file

@ -41,9 +41,6 @@ const MaxResults = 25000
// Radius is about 1 km.
const Radius = 0.009
// Cols represents a list of database columns.
type Cols []string
// Query searches given an originals path and a db instance.
type Query struct {
db *gorm.DB
@ -63,3 +60,10 @@ func Db() *gorm.DB {
func UnscopedDb() *gorm.DB {
return entity.Db().Unscoped()
}
// Log logs the error if any and keeps quiet otherwise.
func Log(action string, err error) {
if err != nil {
log.Errorf("search: %s (%s)", err, action)
}
}

47
internal/search/select.go Normal file
View file

@ -0,0 +1,47 @@
package search
import (
"reflect"
"strings"
"github.com/photoprism/photoprism/pkg/list"
)
// Cols represents a list of database columns.
type Cols []string
// SelectString returns the columns for a search result struct as a string.
func SelectString(f interface{}, tags []string) string {
return strings.Join(SelectCols(f, tags), ", ")
}
// SelectCols returns the columns for a search result struct.
func SelectCols(f interface{}, tags []string) Cols {
v := reflect.ValueOf(f)
if v.Kind() == reflect.Ptr {
v = v.Elem()
}
if v.Kind() != reflect.Struct {
return Cols{}
}
cols := make(Cols, 0, v.NumField())
// Find matching columns for struct fields.
for i := 0; i < v.NumField(); i++ {
s := strings.TrimSpace(v.Type().Field(i).Tag.Get("select"))
// Serialize field values as string.
if s == "" || s == "-" {
continue
} else if c := strings.Split(s, ","); c[0] == "" {
continue
} else if len(tags) == 0 || list.ContainsAny(c, tags) {
cols = append(cols, c[0])
}
}
return cols
}

View file

@ -0,0 +1,41 @@
package search
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestSelectString(t *testing.T) {
t.Run("PhotoWildcard", func(t *testing.T) {
// SelectCols returns a string containing the
// comma separated column names.
result := SelectString(Photo{}, []string{"*"})
assert.Len(t, result, 1599)
})
t.Run("PhotoGeoResult", func(t *testing.T) {
// SelectCols returns a string containing
// the selected column names.
result := SelectString(Photo{}, SelectCols(GeoResult{}, []string{"*"}))
t.Logf("PhotoGeoResult: %d cols, %#v", len(result), result)
assert.Len(t, result, 245)
})
}
func TestSelectCols(t *testing.T) {
t.Run("PhotoWildcard", func(t *testing.T) {
// SelectCols returns a string containing
// the selected column names.
result := SelectCols(Photo{}, []string{"*"})
assert.Len(t, result, 81)
})
t.Run("PhotoGeoResult", func(t *testing.T) {
// SelectCols returns a string containing
// the selected column names.
result := SelectCols(Photo{}, SelectCols(GeoResult{}, []string{"*"}))
t.Logf("PhotoGeoResult: %d cols, %#v", len(result), result)
assert.Len(t, result, 13)
})
}

82
internal/search/viewer.go Normal file
View file

@ -0,0 +1,82 @@
package search
import (
"encoding/json"
"github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/internal/thumb"
"github.com/photoprism/photoprism/internal/viewer"
)
// ViewerResult returns a new photo viewer result.
func (photo Photo) ViewerResult(contentUri, apiUri, previewToken, downloadToken string) viewer.Result {
return viewer.Result{
UID: photo.PhotoUID,
Title: photo.PhotoTitle,
Taken: photo.TakenAtLocal,
Description: photo.PhotoDescription,
Favorite: photo.PhotoFavorite,
Playable: photo.PhotoType == entity.TypeVideo || photo.PhotoType == entity.TypeLive,
DownloadUrl: viewer.DownloadUrl(photo.FileHash, apiUri, downloadToken),
OriginalW: photo.FileWidth,
OriginalH: photo.FileHeight,
Fit720: viewer.NewThumb(photo.FileWidth, photo.FileHeight, photo.FileHash, thumb.Sizes[thumb.Fit720], contentUri, previewToken),
Fit1280: viewer.NewThumb(photo.FileWidth, photo.FileHeight, photo.FileHash, thumb.Sizes[thumb.Fit1280], contentUri, previewToken),
Fit1920: viewer.NewThumb(photo.FileWidth, photo.FileHeight, photo.FileHash, thumb.Sizes[thumb.Fit1920], contentUri, previewToken),
Fit2048: viewer.NewThumb(photo.FileWidth, photo.FileHeight, photo.FileHash, thumb.Sizes[thumb.Fit2048], contentUri, previewToken),
Fit2560: viewer.NewThumb(photo.FileWidth, photo.FileHeight, photo.FileHash, thumb.Sizes[thumb.Fit2560], contentUri, previewToken),
Fit3840: viewer.NewThumb(photo.FileWidth, photo.FileHeight, photo.FileHash, thumb.Sizes[thumb.Fit3840], contentUri, previewToken),
Fit4096: viewer.NewThumb(photo.FileWidth, photo.FileHeight, photo.FileHash, thumb.Sizes[thumb.Fit4096], contentUri, previewToken),
Fit7680: viewer.NewThumb(photo.FileWidth, photo.FileHeight, photo.FileHash, thumb.Sizes[thumb.Fit7680], contentUri, previewToken),
}
}
// ViewerJSON returns the results as photo viewer JSON.
func (photos PhotoResults) ViewerJSON(contentUri, apiUri, previewToken, downloadToken string) ([]byte, error) {
return json.Marshal(photos.ViewerResults(contentUri, apiUri, previewToken, downloadToken))
}
// ViewerResults returns the results photo viewer formatted.
func (photos PhotoResults) ViewerResults(contentUri, apiUri, previewToken, downloadToken string) (results viewer.Results) {
results = make(viewer.Results, 0, len(photos))
for _, p := range photos {
results = append(results, p.ViewerResult(contentUri, apiUri, previewToken, downloadToken))
}
return results
}
// ViewerResult creates a new photo viewer result.
func (photo GeoResult) ViewerResult(contentUri, apiUri, previewToken, downloadToken string) viewer.Result {
return viewer.Result{
UID: photo.PhotoUID,
Title: photo.PhotoTitle,
Taken: photo.TakenAtLocal,
Description: photo.PhotoDescription,
Favorite: photo.PhotoFavorite,
Playable: photo.PhotoType == entity.TypeVideo || photo.PhotoType == entity.TypeLive,
DownloadUrl: viewer.DownloadUrl(photo.FileHash, apiUri, downloadToken),
OriginalW: photo.FileWidth,
OriginalH: photo.FileHeight,
Fit720: viewer.NewThumb(photo.FileWidth, photo.FileHeight, photo.FileHash, thumb.Sizes[thumb.Fit720], contentUri, previewToken),
Fit1280: viewer.NewThumb(photo.FileWidth, photo.FileHeight, photo.FileHash, thumb.Sizes[thumb.Fit1280], contentUri, previewToken),
Fit1920: viewer.NewThumb(photo.FileWidth, photo.FileHeight, photo.FileHash, thumb.Sizes[thumb.Fit1920], contentUri, previewToken),
Fit2048: viewer.NewThumb(photo.FileWidth, photo.FileHeight, photo.FileHash, thumb.Sizes[thumb.Fit2048], contentUri, previewToken),
Fit2560: viewer.NewThumb(photo.FileWidth, photo.FileHeight, photo.FileHash, thumb.Sizes[thumb.Fit2560], contentUri, previewToken),
Fit3840: viewer.NewThumb(photo.FileWidth, photo.FileHeight, photo.FileHash, thumb.Sizes[thumb.Fit3840], contentUri, previewToken),
Fit4096: viewer.NewThumb(photo.FileWidth, photo.FileHeight, photo.FileHash, thumb.Sizes[thumb.Fit4096], contentUri, previewToken),
Fit7680: viewer.NewThumb(photo.FileWidth, photo.FileHeight, photo.FileHash, thumb.Sizes[thumb.Fit7680], contentUri, previewToken),
}
}
// ViewerJSON returns the results as photo viewer JSON.
func (photos GeoResults) ViewerJSON(contentUri, apiUri, previewToken, downloadToken string) ([]byte, error) {
results := make(viewer.Results, 0, len(photos))
for _, p := range photos {
results = append(results, p.ViewerResult(contentUri, apiUri, previewToken, downloadToken))
}
return json.Marshal(results)
}

View file

@ -0,0 +1,194 @@
package search
import (
"testing"
"time"
"github.com/photoprism/photoprism/internal/entity"
)
func TestPhotoResults_ViewerJSON(t *testing.T) {
result1 := Photo{
ID: 111111,
CreatedAt: time.Time{},
UpdatedAt: time.Time{},
DeletedAt: time.Time{},
TakenAt: time.Time{},
TakenAtLocal: time.Time{},
TakenSrc: "",
TimeZone: "",
PhotoUID: "123",
PhotoPath: "",
PhotoName: "",
PhotoTitle: "Photo1",
PhotoYear: 0,
PhotoMonth: 0,
PhotoCountry: "",
PhotoFavorite: false,
PhotoPrivate: false,
PhotoLat: 0,
PhotoLng: 0,
PhotoAltitude: 0,
PhotoIso: 0,
PhotoFocalLength: 0,
PhotoFNumber: 0,
PhotoExposure: "",
PhotoQuality: 0,
PhotoResolution: 0,
Merged: false,
CameraID: 0,
CameraModel: "",
CameraMake: "",
LensID: 0,
LensModel: "",
LensMake: "",
CellID: "",
PlaceID: "",
PlaceLabel: "",
PlaceCity: "",
PlaceState: "",
PlaceCountry: "",
FileID: 0,
FileUID: "",
FilePrimary: false,
FileMissing: false,
FileName: "",
FileHash: "",
FileType: "",
FileMime: "",
FileWidth: 0,
FileHeight: 0,
FileOrientation: 0,
FileAspectRatio: 0,
FileColors: "",
FileChroma: 0,
FileLuminance: "",
FileDiff: 0,
Files: nil,
}
result2 := Photo{
ID: 22222,
CreatedAt: time.Time{},
UpdatedAt: time.Time{},
DeletedAt: time.Time{},
TakenAt: time.Time{},
TakenAtLocal: time.Time{},
TakenSrc: "",
TimeZone: "",
PhotoUID: "456",
PhotoPath: "",
PhotoName: "",
PhotoTitle: "Photo2",
PhotoYear: 0,
PhotoMonth: 0,
PhotoCountry: "",
PhotoFavorite: false,
PhotoPrivate: false,
PhotoLat: 0,
PhotoLng: 0,
PhotoAltitude: 0,
PhotoIso: 0,
PhotoFocalLength: 0,
PhotoFNumber: 0,
PhotoExposure: "",
PhotoQuality: 0,
PhotoResolution: 0,
Merged: false,
CameraID: 0,
CameraModel: "",
CameraMake: "",
LensID: 0,
LensModel: "",
LensMake: "",
CellID: "",
PlaceID: "",
PlaceLabel: "",
PlaceCity: "",
PlaceState: "",
PlaceCountry: "",
FileID: 0,
FileUID: "",
FilePrimary: false,
FileMissing: false,
FileName: "",
FileHash: "",
FileType: "",
FileMime: "",
FileWidth: 0,
FileHeight: 0,
FileOrientation: 0,
FileAspectRatio: 0,
FileColors: "",
FileChroma: 0,
FileLuminance: "",
FileDiff: 0,
Files: nil,
}
results := PhotoResults{result1, result2}
b, err := results.ViewerJSON("/content", "/api/v1", "preview-token", "download-token")
if err != nil {
t.Fatal(err)
}
t.Logf("result: %s", b)
}
func TestGeoResults_ViewerJSON(t *testing.T) {
taken := time.Date(2000, 1, 1, 1, 1, 1, 1, time.UTC).UTC().Round(time.Second)
items := GeoResults{
GeoResult{
ID: "1",
PhotoLat: 7.775,
PhotoLng: 8.775,
PhotoUID: "p1",
PhotoTitle: "Title 1",
PhotoDescription: "Description 1",
PhotoFavorite: false,
PhotoType: entity.TypeVideo,
FileHash: "d2b4a5d18276f96f1b5a1bf17fd82d6fab3807f2",
FileWidth: 1920,
FileHeight: 1080,
TakenAtLocal: taken,
},
GeoResult{
ID: "2",
PhotoLat: 1.775,
PhotoLng: -5.775,
PhotoUID: "p2",
PhotoTitle: "Title 2",
PhotoDescription: "Description 2",
PhotoFavorite: true,
PhotoType: entity.TypeImage,
FileHash: "da639e836dfa9179e66c619499b0a5e592f72fc1",
FileWidth: 3024,
FileHeight: 3024,
TakenAtLocal: taken,
},
GeoResult{
ID: "3",
PhotoLat: -1.775,
PhotoLng: 100.775,
PhotoUID: "p3",
PhotoTitle: "Title 3",
PhotoDescription: "Description 3",
PhotoFavorite: false,
PhotoType: entity.TypeRaw,
FileHash: "412fe4c157a82b636efebc5bc4bc4a15c321aad1",
FileWidth: 5000,
FileHeight: 10000,
TakenAtLocal: taken,
},
}
b, err := items.ViewerJSON("/content", "/api/v1", "preview-token", "download-token")
if err != nil {
t.Fatal(err)
}
t.Logf("result: %s", b)
}

40
pkg/list/contains.go Normal file
View file

@ -0,0 +1,40 @@
package list
// Contains tests if a string is contained in the list.
func Contains(list []string, s string) bool {
if len(list) == 0 || s == "" {
return false
} else if s == "*" {
return true
}
// Find matches.
for i := range list {
if s == list[i] {
return true
}
}
return false
}
// ContainsAny tests if two lists have at least one common entry.
func ContainsAny(l, s []string) bool {
if len(l) == 0 || len(s) == 0 {
return false
} else if s[0] == "*" {
return true
}
// Find matches.
for i := range l {
for j := range s {
if s[j] == l[i] || s[j] == "*" {
return true
}
}
}
// Nothing found.
return false
}

75
pkg/list/contains_test.go Normal file
View file

@ -0,0 +1,75 @@
package list
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestContains(t *testing.T) {
t.Run("True", func(t *testing.T) {
assert.True(t, Contains([]string{"foo", "bar"}, "bar"))
assert.True(t, Contains([]string{"foo", "bAr"}, "bAr"))
assert.True(t, Contains([]string{"foo", "bar ", "foo ", "baz"}, "foo"))
assert.True(t, Contains([]string{"foo", "bar ", "foo ", "baz"}, "foo "))
assert.True(t, Contains([]string{"foo", "bar ", "foo ", "baz"}, "bar "))
})
t.Run("False", func(t *testing.T) {
assert.False(t, Contains([]string{"foo", "bar"}, ""))
assert.False(t, Contains([]string{"foo", "bar"}, "bAr"))
assert.False(t, Contains([]string{"foo", "bar"}, "baz"))
})
t.Run("Empty", func(t *testing.T) {
assert.False(t, Contains(nil, ""))
assert.False(t, Contains(nil, "foo"))
assert.False(t, Contains([]string{}, ""))
assert.False(t, Contains([]string{}, "foo"))
assert.False(t, Contains([]string{""}, ""))
assert.False(t, Contains([]string{"foo", "bar"}, ""))
})
t.Run("Wildcard", func(t *testing.T) {
assert.False(t, Contains(nil, "*"))
assert.False(t, Contains(nil, "* "))
assert.False(t, Contains([]string{}, "*"))
assert.False(t, Contains([]string{"foo", "*"}, "baz"))
assert.True(t, Contains([]string{"foo", "*"}, "foo"))
assert.True(t, Contains([]string{""}, "*"))
assert.True(t, Contains([]string{"foo", "bar"}, "*"))
})
}
func TestContainsAny(t *testing.T) {
t.Run("True", func(t *testing.T) {
assert.True(t, ContainsAny(List{"foo", "bar"}, List{"bar"}))
assert.True(t, ContainsAny([]string{"foo", "bAr"}, List{"bAr"}))
assert.True(t, ContainsAny([]string{"foo", "bar ", "foo ", "baz"}, List{"foo"}))
assert.True(t, ContainsAny([]string{"foo", "bar ", "foo ", "baz"}, List{"foo "}))
assert.True(t, ContainsAny([]string{"foo", "bar ", "foo ", "baz"}, List{"bar "}))
})
t.Run("False", func(t *testing.T) {
assert.False(t, ContainsAny([]string{"foo", "bar"}, List{""}))
assert.False(t, ContainsAny([]string{"foo", "bar"}, List{"bAr"}))
assert.False(t, ContainsAny([]string{"foo", "bar"}, List{"baz"}))
})
t.Run("Empty", func(t *testing.T) {
assert.False(t, ContainsAny(nil, nil))
assert.False(t, ContainsAny(nil, List{"foo"}))
assert.False(t, ContainsAny([]string{}, []string{}))
assert.False(t, ContainsAny([]string{}, []string{"foo"}))
assert.False(t, ContainsAny(List{}, List{}))
assert.False(t, ContainsAny(List{}, List{"foo"}))
assert.False(t, ContainsAny([]string{""}, List{}))
assert.False(t, ContainsAny([]string{}, List{""}))
assert.True(t, ContainsAny([]string{""}, List{""}))
assert.False(t, ContainsAny([]string{"foo", "bar"}, List{""}))
})
t.Run("Wildcard", func(t *testing.T) {
assert.False(t, ContainsAny(nil, List{"*"}))
assert.False(t, ContainsAny(nil, List{"* "}))
assert.False(t, ContainsAny([]string{}, List{"*"}))
assert.False(t, ContainsAny([]string{"foo", "*"}, List{"baz"}))
assert.True(t, ContainsAny([]string{"foo", "*"}, List{"foo"}))
assert.True(t, ContainsAny([]string{""}, List{"*"}))
assert.True(t, ContainsAny([]string{"foo", "bar"}, List{"*"}))
})
}

19
pkg/list/excludes.go Normal file
View file

@ -0,0 +1,19 @@
package list
// Excludes tests if a string is not contained in the list.
func Excludes(list []string, s string) bool {
if len(list) == 0 || s == "" {
return false
}
return !Contains(list, s)
}
// ExcludesAny tests if two lists exclude each other.
func ExcludesAny(l, s []string) bool {
if len(l) == 0 || len(s) == 0 {
return false
}
return !ContainsAny(l, s)
}

75
pkg/list/excludes_test.go Normal file
View file

@ -0,0 +1,75 @@
package list
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestExcludes(t *testing.T) {
t.Run("True", func(t *testing.T) {
assert.True(t, Excludes([]string{"foo", "bar"}, "baz"))
assert.True(t, Excludes([]string{"foo", "bar"}, "zzz"))
assert.True(t, Excludes([]string{"foo", "bar"}, " "))
assert.True(t, Excludes([]string{"foo", "bar"}, "645656"))
assert.True(t, Excludes([]string{"foo", "bar ", "foo ", "baz"}, "bar"))
assert.True(t, Excludes([]string{"foo", "bar", "foo ", "baz"}, "bar "))
})
t.Run("False", func(t *testing.T) {
assert.False(t, Excludes([]string{"foo", "bar"}, "foo"))
assert.False(t, Excludes([]string{"foo", "bar"}, "bar"))
})
t.Run("Empty", func(t *testing.T) {
assert.False(t, Excludes(nil, ""))
assert.False(t, Excludes(nil, "foo"))
assert.False(t, Excludes([]string{}, ""))
assert.False(t, Excludes([]string{}, "foo"))
assert.False(t, Excludes([]string{""}, ""))
assert.False(t, Excludes([]string{"foo", "bar"}, ""))
})
t.Run("Wildcard", func(t *testing.T) {
assert.False(t, Excludes(nil, "*"))
assert.False(t, Excludes(nil, "* "))
assert.False(t, Excludes([]string{}, "*"))
assert.True(t, Excludes([]string{"foo", "*"}, "baz"))
assert.False(t, Excludes([]string{"foo", "*"}, "foo"))
assert.False(t, Excludes([]string{""}, "*"))
assert.False(t, Excludes([]string{"foo", "bar"}, "*"))
})
}
func TestExcludesAny(t *testing.T) {
t.Run("True", func(t *testing.T) {
assert.False(t, ExcludesAny(List{"foo", "bar"}, List{"bar"}))
assert.False(t, ExcludesAny([]string{"foo", "bAr"}, List{"bAr"}))
assert.False(t, ExcludesAny([]string{"foo", "bar ", "foo ", "baz"}, List{"foo"}))
assert.False(t, ExcludesAny([]string{"foo", "bar ", "foo ", "baz"}, List{"foo "}))
assert.False(t, ExcludesAny([]string{"foo", "bar ", "foo ", "baz"}, List{"bar "}))
})
t.Run("False", func(t *testing.T) {
assert.True(t, ExcludesAny([]string{"foo", "bar"}, List{""}))
assert.True(t, ExcludesAny([]string{"foo", "bar"}, List{"bAr"}))
assert.True(t, ExcludesAny([]string{"foo", "bar"}, List{"baz"}))
})
t.Run("Empty", func(t *testing.T) {
assert.False(t, ExcludesAny(nil, nil))
assert.False(t, ExcludesAny(nil, List{"foo"}))
assert.False(t, ExcludesAny([]string{}, []string{}))
assert.False(t, ExcludesAny([]string{}, []string{"foo"}))
assert.False(t, ExcludesAny(List{}, List{}))
assert.False(t, ExcludesAny(List{}, List{"foo"}))
assert.False(t, ExcludesAny([]string{""}, List{}))
assert.False(t, ExcludesAny([]string{}, List{""}))
assert.False(t, ExcludesAny([]string{""}, List{""}))
assert.True(t, ExcludesAny([]string{"foo", "bar"}, List{""}))
})
t.Run("Wildcard", func(t *testing.T) {
assert.False(t, ExcludesAny(nil, List{"*"}))
assert.False(t, ExcludesAny(nil, List{"* "}))
assert.False(t, ExcludesAny([]string{}, List{"*"}))
assert.True(t, ExcludesAny([]string{"foo", "*"}, List{"baz"}))
assert.False(t, ExcludesAny([]string{"foo", "*"}, List{"foo"}))
assert.False(t, ExcludesAny([]string{""}, List{"*"}))
assert.False(t, ExcludesAny([]string{"foo", "bar"}, List{"*"}))
})
}

30
pkg/list/list.go Normal file
View file

@ -0,0 +1,30 @@
/*
Package list provides string slice abstraction
Copyright (c) 2018 - 2022 Michael Mayer <hello@photoprism.app>
This program is free software: you can redistribute it and/or modify
it under Version 3 of the GNU Affero General Public License (the "AGPL"):
<https://docs.photoprism.app/license/agpl>
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
The AGPL is supplemented by our Trademark and Brand Guidelines,
which describe how our Brand Assets may be used:
<https://photoprism.app/trademark>
Feel free to send an e-mail to hello@photoprism.app if you have questions,
want to support our work, or just want to say hello.
Additional information can be found in our Developer Guide:
<https://docs.photoprism.app/developer-guide/>
*/
package list
// List represents a list of strings e.g. tags or terms.
type List []string

View file

@ -5,21 +5,18 @@ import (
)
const (
Ellipsis = "…"
ClipCountryCode = 2
ClipKeyword = 40
ClipUsername = 64
ClipSlug = 80
ClipCategory = 100
ClipPlace = 128
ClipDefault = 160
ClipName = 160
ClipTitle = 200
ClipVarchar = 255
ClipPath = 500
ClipLabel = 500
ClipQuery = 1000
ClipDescription = 16000
Ellipsis = "…"
ClipKeyword = 40
ClipUsername = 64
ClipSlug = 80
ClipCategory = 100
ClipDefault = 160
ClipName = 160
ClipTitle = 200
ClipPath = 500
ClipShortText = 1024
ClipText = 2048
ClipLongText = 4096
)
// Clip shortens a string to the given number of runes, and removes all leading and trailing white space.