From e03c254d1b3886ea32e9a7f5bf187dfa545a6276 Mon Sep 17 00:00:00 2001 From: Michael Mayer Date: Wed, 29 Apr 2020 11:07:15 +0200 Subject: [PATCH] Backend: Increase threshold for NSFW detection Signed-off-by: Michael Mayer --- internal/nsfw/nsfw.go | 22 ++++++++++++---------- internal/nsfw/nsfw_test.go | 15 +++++++++++---- internal/photoprism/index_mediafile.go | 3 ++- 3 files changed, 25 insertions(+), 15 deletions(-) diff --git a/internal/nsfw/nsfw.go b/internal/nsfw/nsfw.go index 58627ba12..8146d706e 100644 --- a/internal/nsfw/nsfw.go +++ b/internal/nsfw/nsfw.go @@ -1,5 +1,5 @@ /* -This package detects porn images. +This package detects images that may not be safe for work. Additional information can be found in our Developer Guide: @@ -12,6 +12,12 @@ import ( "github.com/photoprism/photoprism/internal/event" ) +const ( + ThresholdSafe = 0.75 + ThresholdMedium = 0.85 + ThresholdHigh = 0.9 +) + var log = event.Log type Labels struct { @@ -22,22 +28,18 @@ type Labels struct { Sexy float32 } +// IsSafe returns true if the image is probably safe for work. func (l *Labels) IsSafe() bool { - return !l.NSFW() + return !l.NSFW(ThresholdSafe) } -func (l *Labels) NSFW() bool { +// NSFW returns true if the image is may not be safe for work. +func (l *Labels) NSFW(threshold float32) bool { if l.Neutral > 0.25 { return false } - if l.Porn > 0.75 { - return true - } - if l.Sexy > 0.75 { - return true - } - if l.Hentai > 0.75 { + if l.Porn > threshold || l.Sexy > threshold || l.Hentai > threshold { return true } diff --git a/internal/nsfw/nsfw_test.go b/internal/nsfw/nsfw_test.go index 56a6c646b..420751ca3 100644 --- a/internal/nsfw/nsfw_test.go +++ b/internal/nsfw/nsfw_test.go @@ -104,10 +104,17 @@ func TestIsSafe(t *testing.T) { func TestNSFW(t *testing.T) { porn := Labels{0, 0, 0.11, 0.88, 0} sexy := Labels{0, 0, 0.2, 0.59, 0.98} - hentai := Labels{0, 0.98, 0.2, 0, 0} + hentai := Labels{0, 0.80, 0.2, 0, 0} - assert.Equal(t, true, porn.NSFW()) - assert.Equal(t, true, sexy.NSFW()) - assert.Equal(t, true, hentai.NSFW()) + assert.Equal(t, true, porn.NSFW(ThresholdSafe)) + assert.Equal(t, true, sexy.NSFW(ThresholdSafe)) + assert.Equal(t, true, hentai.NSFW(ThresholdSafe)) + assert.Equal(t, true, porn.NSFW(ThresholdMedium)) + assert.Equal(t, true, sexy.NSFW(ThresholdMedium)) + assert.Equal(t, false, hentai.NSFW(ThresholdMedium)) + + assert.Equal(t, false, porn.NSFW(ThresholdHigh)) + assert.Equal(t, true, sexy.NSFW(ThresholdHigh)) + assert.Equal(t, false, hentai.NSFW(ThresholdHigh)) } diff --git a/internal/photoprism/index_mediafile.go b/internal/photoprism/index_mediafile.go index 53ab6a8d7..fb5776748 100644 --- a/internal/photoprism/index_mediafile.go +++ b/internal/photoprism/index_mediafile.go @@ -13,6 +13,7 @@ import ( "github.com/photoprism/photoprism/internal/entity" "github.com/photoprism/photoprism/internal/event" "github.com/photoprism/photoprism/internal/meta" + "github.com/photoprism/photoprism/internal/nsfw" "github.com/photoprism/photoprism/pkg/fs" "github.com/photoprism/photoprism/pkg/txt" ) @@ -430,7 +431,7 @@ func (ind *Index) NSFW(jpeg *MediaFile) bool { log.Error(err) return false } else { - if nsfwLabels.NSFW() { + if nsfwLabels.NSFW(nsfw.ThresholdHigh) { log.Warnf("index: \"%s\" might contain offensive content", jpeg.FileName()) return true }