Backend: Increase threshold for NSFW detection

Signed-off-by: Michael Mayer <michael@liquidbytes.net>
This commit is contained in:
Michael Mayer 2020-04-29 11:07:15 +02:00
parent 8a3973aaa2
commit e03c254d1b
3 changed files with 25 additions and 15 deletions

View file

@ -1,5 +1,5 @@
/* /*
This package detects porn images. This package detects images that may not be safe for work.
Additional information can be found in our Developer Guide: Additional information can be found in our Developer Guide:
@ -12,6 +12,12 @@ import (
"github.com/photoprism/photoprism/internal/event" "github.com/photoprism/photoprism/internal/event"
) )
const (
ThresholdSafe = 0.75
ThresholdMedium = 0.85
ThresholdHigh = 0.9
)
var log = event.Log var log = event.Log
type Labels struct { type Labels struct {
@ -22,22 +28,18 @@ type Labels struct {
Sexy float32 Sexy float32
} }
// IsSafe returns true if the image is probably safe for work.
func (l *Labels) IsSafe() bool { func (l *Labels) IsSafe() bool {
return !l.NSFW() return !l.NSFW(ThresholdSafe)
} }
func (l *Labels) NSFW() bool { // NSFW returns true if the image is may not be safe for work.
func (l *Labels) NSFW(threshold float32) bool {
if l.Neutral > 0.25 { if l.Neutral > 0.25 {
return false return false
} }
if l.Porn > 0.75 { if l.Porn > threshold || l.Sexy > threshold || l.Hentai > threshold {
return true
}
if l.Sexy > 0.75 {
return true
}
if l.Hentai > 0.75 {
return true return true
} }

View file

@ -104,10 +104,17 @@ func TestIsSafe(t *testing.T) {
func TestNSFW(t *testing.T) { func TestNSFW(t *testing.T) {
porn := Labels{0, 0, 0.11, 0.88, 0} porn := Labels{0, 0, 0.11, 0.88, 0}
sexy := Labels{0, 0, 0.2, 0.59, 0.98} sexy := Labels{0, 0, 0.2, 0.59, 0.98}
hentai := Labels{0, 0.98, 0.2, 0, 0} hentai := Labels{0, 0.80, 0.2, 0, 0}
assert.Equal(t, true, porn.NSFW()) assert.Equal(t, true, porn.NSFW(ThresholdSafe))
assert.Equal(t, true, sexy.NSFW()) assert.Equal(t, true, sexy.NSFW(ThresholdSafe))
assert.Equal(t, true, hentai.NSFW()) assert.Equal(t, true, hentai.NSFW(ThresholdSafe))
assert.Equal(t, true, porn.NSFW(ThresholdMedium))
assert.Equal(t, true, sexy.NSFW(ThresholdMedium))
assert.Equal(t, false, hentai.NSFW(ThresholdMedium))
assert.Equal(t, false, porn.NSFW(ThresholdHigh))
assert.Equal(t, true, sexy.NSFW(ThresholdHigh))
assert.Equal(t, false, hentai.NSFW(ThresholdHigh))
} }

View file

@ -13,6 +13,7 @@ import (
"github.com/photoprism/photoprism/internal/entity" "github.com/photoprism/photoprism/internal/entity"
"github.com/photoprism/photoprism/internal/event" "github.com/photoprism/photoprism/internal/event"
"github.com/photoprism/photoprism/internal/meta" "github.com/photoprism/photoprism/internal/meta"
"github.com/photoprism/photoprism/internal/nsfw"
"github.com/photoprism/photoprism/pkg/fs" "github.com/photoprism/photoprism/pkg/fs"
"github.com/photoprism/photoprism/pkg/txt" "github.com/photoprism/photoprism/pkg/txt"
) )
@ -430,7 +431,7 @@ func (ind *Index) NSFW(jpeg *MediaFile) bool {
log.Error(err) log.Error(err)
return false return false
} else { } else {
if nsfwLabels.NSFW() { if nsfwLabels.NSFW(nsfw.ThresholdHigh) {
log.Warnf("index: \"%s\" might contain offensive content", jpeg.FileName()) log.Warnf("index: \"%s\" might contain offensive content", jpeg.FileName())
return true return true
} }