File storage rewrite (#2676)

* Restructure data layer part 2 (#2599)
* Refactor and separate image model
* Refactor image query builder
* Handle relationships in image query builder
* Remove relationship management methods
* Refactor gallery model/query builder
* Add scenes to gallery model
* Convert scene model
* Refactor scene models
* Remove unused methods
* Add unit tests for gallery
* Add image tests
* Add scene tests
* Convert unnecessary scene value pointers to values
* Convert unnecessary pointer values to values
* Refactor scene partial
* Add scene partial tests
* Refactor ImagePartial
* Add image partial tests
* Refactor gallery partial update
* Add partial gallery update tests
* Use zero/null package for null values
* Add files and scan system
* Add sqlite implementation for files/folders
* Add unit tests for files/folders
* Image refactors
* Update image data layer
* Refactor gallery model and creation
* Refactor scene model
* Refactor scenes
* Don't set title from filename
* Allow galleries to freely add/remove images
* Add multiple scene file support to graphql and UI
* Add multiple file support for images in graphql/UI
* Add multiple file for galleries in graphql/UI
* Remove use of some deprecated fields
* Remove scene path usage
* Remove gallery path usage
* Remove path from image
* Move funscript to video file
* Refactor caption detection
* Migrate existing data
* Add post commit/rollback hook system
* Lint. Comment out import/export tests
* Add WithDatabase read only wrapper
* Prepend tasks to list
* Add 32 pre-migration
* Add warnings in release and migration notes
This commit is contained in:
WithoutPants 2022-07-13 16:30:54 +10:00
parent 30877c75fb
commit 5495d72849
359 changed files with 43690 additions and 16000 deletions

4
go.mod
View File

@ -19,7 +19,7 @@ require (
github.com/jinzhu/copier v0.0.0-20190924061706-b57f9002281a
github.com/jmoiron/sqlx v1.3.1
github.com/json-iterator/go v1.1.12
github.com/mattn/go-sqlite3 v1.14.6
github.com/mattn/go-sqlite3 v1.14.7
github.com/natefinch/pie v0.0.0-20170715172608-9a0d72014007
github.com/pkg/browser v0.0.0-20210911075715-681adbf594b8
github.com/remeh/sizedwaitgroup v1.0.0
@ -47,6 +47,7 @@ require (
require (
github.com/asticode/go-astisub v0.20.0
github.com/doug-martin/goqu/v9 v9.18.0
github.com/go-chi/httplog v0.2.1
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4
github.com/hashicorp/golang-lru v0.5.4
@ -56,6 +57,7 @@ require (
github.com/spf13/cast v1.4.1
github.com/vearutop/statigz v1.1.6
github.com/vektah/gqlparser/v2 v2.4.1
gopkg.in/guregu/null.v4 v4.0.0
)
require (

15
go.sum
View File

@ -65,6 +65,8 @@ github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBp
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/ClickHouse/clickhouse-go v1.4.3/go.mod h1:EaI/sW7Azgz9UATzd5ZdZHRUhHgv5+JMS9NSr2smCJI=
github.com/DATA-DOG/go-sqlmock v1.5.0 h1:Shsta01QNfFxHCfpW6YH2STWB0MudeXXEWMr20OEh60=
github.com/DATA-DOG/go-sqlmock v1.5.0/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
github.com/Microsoft/go-winio v0.4.16/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
@ -206,6 +208,8 @@ github.com/docker/docker v17.12.0-ce-rc1.0.20210128214336-420b1d36250f+incompati
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815/go.mod h1:WwZ+bS3ebgob9U8Nd0kOddGdZWjyMGR8Wziv+TBNwSE=
github.com/doug-martin/goqu/v9 v9.18.0 h1:/6bcuEtAe6nsSMVK/M+fOiXUNfyFF3yYtE07DBPFMYY=
github.com/doug-martin/goqu/v9 v9.18.0/go.mod h1:nf0Wc2/hV3gYK9LiyqIrzBEVGlI8qW3GuDCEobC4wBQ=
github.com/dustin/go-humanize v0.0.0-20180421182945-02af3965c54e/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/edsrzf/mmap-go v0.0.0-20170320065105-0bce6a688712/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M=
@ -248,8 +252,9 @@ github.com/go-kit/kit v0.9.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
github.com/go-sql-driver/mysql v1.4.0/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-sql-driver/mysql v1.6.0 h1:BCTh4TKNUYmOmMUcQ3IipzF5prigylS7XXjEkfCHuOE=
github.com/go-sql-driver/mysql v1.6.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4 h1:qZNfIGkIANxGv/OqtnntR4DfOY2+BgwR60cAcu/i3SE=
github.com/go-toast/toast v0.0.0-20190211030409-01e6764cf0a4/go.mod h1:kW3HQ4UdaAyrUCSSDR4xUzBKW6O2iA4uHhk7AtyYp10=
@ -535,8 +540,9 @@ github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/lib/pq v1.8.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lib/pq v1.10.0 h1:Zx5DJFEYQXio93kgXnQ09fXNiUKsqv4OUEu2UtGcB1E=
github.com/lib/pq v1.10.0/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/lib/pq v1.10.1 h1:6VXZrLU0jHBYyAqrSPa+MgPfnSvTPuMgK+k0o5kVFWo=
github.com/lib/pq v1.10.1/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o=
github.com/logrusorgru/aurora/v3 v3.0.0/go.mod h1:vsR12bk5grlLvLXAYrBsb5Oc/N+LxAlxggSjiwMnCUc=
github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY=
github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0=
@ -570,8 +576,9 @@ github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOA
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-sqlite3 v1.9.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-sqlite3 v1.14.6 h1:dNPt6NO46WmLVt2DLNpwczCmdV5boIZ6g/tlDrlRUbg=
github.com/mattn/go-sqlite3 v1.14.6/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/mattn/go-sqlite3 v1.14.7 h1:fxWBnXkxfM6sRiuH3bqJ4CfzZojMOLVc0UTsTglEghA=
github.com/mattn/go-sqlite3 v1.14.7/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso=
@ -1300,6 +1307,8 @@ gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8X
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/guregu/null.v4 v4.0.0 h1:1Wm3S1WEA2I26Kq+6vcW+w0gcDo44YKYD7YIEJNHDjg=
gopkg.in/guregu/null.v4 v4.0.0/go.mod h1:YoQhUrADuG3i9WqesrCmpNRwm1ypAgSHYqoOcTu/JrI=
gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s=
gopkg.in/inf.v0 v0.9.1/go.mod h1:cWUDdTG/fYaXco+Dcufb5Vnc6Gp2YChqWtbxRZE0mXw=
gopkg.in/ini.v1 v1.66.2/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=

View File

@ -22,10 +22,18 @@ autobind:
- github.com/stashapp/stash/pkg/scraper/stashbox
models:
# autobind on config causes generation issues
# Scalars
Timestamp:
model: github.com/stashapp/stash/pkg/models.Timestamp
Int64:
model: github.com/stashapp/stash/pkg/models.Int64
# define to force resolvers
Image:
model: github.com/stashapp/stash/pkg/models.Image
fields:
title:
resolver: true
# autobind on config causes generation issues
StashConfig:
model: github.com/stashapp/stash/internal/manager/config.StashConfig
StashConfigInput:
@ -83,6 +91,8 @@ models:
ScanMetaDataFilterInput:
model: github.com/stashapp/stash/internal/manager.ScanMetaDataFilterInput
# renamed types
BulkUpdateIdMode:
model: github.com/stashapp/stash/pkg/models.RelationshipUpdateMode
DLNAStatus:
model: github.com/stashapp/stash/internal/dlna.Status
DLNAIP:
@ -102,6 +112,8 @@ models:
ScraperSource:
model: github.com/stashapp/stash/pkg/scraper.Source
# rebind inputs to types
StashIDInput:
model: github.com/stashapp/stash/pkg/models.StashID
IdentifySourceInput:
model: github.com/stashapp/stash/internal/identify.Source
IdentifyFieldOptionsInput:

View File

@ -0,0 +1,40 @@
fragment FolderData on Folder {
id
path
}
fragment VideoFileData on VideoFile {
path
size
duration
video_codec
audio_codec
width
height
frame_rate
bit_rate
fingerprints {
type
value
}
}
fragment ImageFileData on ImageFile {
path
size
width
height
fingerprints {
type
value
}
}
fragment GalleryFileData on GalleryFile {
path
size
fingerprints {
type
value
}
}

View File

@ -1,19 +1,21 @@
fragment SlimGalleryData on Gallery {
id
checksum
path
title
date
url
details
rating
organized
files {
...GalleryFileData
}
folder {
...FolderData
}
image_count
cover {
file {
size
width
height
files {
...ImageFileData
}
paths {
@ -37,8 +39,6 @@ fragment SlimGalleryData on Gallery {
image_path
}
scenes {
id
title
path
...SlimSceneData
}
}

View File

@ -1,7 +1,5 @@
fragment GalleryData on Gallery {
id
checksum
path
created_at
updated_at
title
@ -10,6 +8,14 @@ fragment GalleryData on Gallery {
details
rating
organized
files {
...GalleryFileData
}
folder {
...FolderData
}
images {
...SlimImageData
}

View File

@ -1,16 +1,12 @@
fragment SlimImageData on Image {
id
checksum
title
rating
organized
o_counter
path
file {
size
width
height
files {
...ImageFileData
}
paths {
@ -20,8 +16,13 @@ fragment SlimImageData on Image {
galleries {
id
path
title
files {
path
}
folder {
path
}
}
studio {

View File

@ -1,18 +1,14 @@
fragment ImageData on Image {
id
checksum
title
rating
organized
o_counter
path
created_at
updated_at
file {
size
width
height
files {
...ImageFileData
}
paths {

View File

@ -1,7 +1,5 @@
fragment SlimSceneData on Scene {
id
checksum
oshash
title
details
url
@ -9,8 +7,6 @@ fragment SlimSceneData on Scene {
rating
o_counter
organized
path
phash
interactive
interactive_speed
captions {
@ -18,15 +14,8 @@ fragment SlimSceneData on Scene {
caption_type
}
file {
size
duration
video_codec
audio_codec
width
height
framerate
bitrate
files {
...VideoFileData
}
paths {

View File

@ -1,7 +1,5 @@
fragment SceneData on Scene {
id
checksum
oshash
title
details
url
@ -9,8 +7,6 @@ fragment SceneData on Scene {
rating
o_counter
organized
path
phash
interactive
interactive_speed
captions {
@ -20,15 +16,8 @@ fragment SceneData on Scene {
created_at
updated_at
file {
size
duration
video_codec
audio_codec
width
height
framerate
bitrate
files {
...VideoFileData
}
paths {

View File

@ -0,0 +1,97 @@
type Fingerprint {
type: String!
value: String!
}
type Folder {
id: ID!
path: String!
parent_folder_id: ID
zip_file_id: ID
mod_time: Time!
created_at: Time!
updated_at: Time!
}
interface BaseFile {
id: ID!
path: String!
basename: String!
parent_folder_id: ID!
zip_file_id: ID
mod_time: Time!
size: Int64!
fingerprints: [Fingerprint!]!
created_at: Time!
updated_at: Time!
}
type VideoFile implements BaseFile {
id: ID!
path: String!
basename: String!
parent_folder_id: ID!
zip_file_id: ID
mod_time: Time!
size: Int64!
fingerprints: [Fingerprint!]!
format: String!
width: Int!
height: Int!
duration: Float!
video_codec: String!
audio_codec: String!
frame_rate: Float!
bit_rate: Int!
created_at: Time!
updated_at: Time!
}
type ImageFile implements BaseFile {
id: ID!
path: String!
basename: String!
parent_folder_id: ID!
zip_file_id: ID
mod_time: Time!
size: Int64!
fingerprints: [Fingerprint!]!
width: Int!
height: Int!
created_at: Time!
updated_at: Time!
}
type GalleryFile implements BaseFile {
id: ID!
path: String!
basename: String!
parent_folder_id: ID!
zip_file_id: ID
mod_time: Time!
size: Int64!
fingerprints: [Fingerprint!]!
created_at: Time!
updated_at: Time!
}

View File

@ -1,8 +1,8 @@
"""Gallery type"""
type Gallery {
id: ID!
checksum: String!
path: String
checksum: String! @deprecated(reason: "Use files.fingerprints")
path: String @deprecated(reason: "Use files.path")
title: String
url: String
date: String
@ -11,7 +11,10 @@ type Gallery {
organized: Boolean!
created_at: Time!
updated_at: Time!
file_mod_time: Time
file_mod_time: Time @deprecated(reason: "Use files.mod_time")
files: [GalleryFile!]!
folder: Folder
scenes: [Scene!]!
studio: Studio
@ -24,12 +27,6 @@ type Gallery {
cover: Image
}
type GalleryFilesType {
index: Int!
name: String
path: String
}
input GalleryCreateInput {
title: String!
url: String

View File

@ -1,16 +1,18 @@
type Image {
id: ID!
checksum: String
checksum: String @deprecated(reason: "Use files.fingerprints")
title: String
rating: Int
o_counter: Int
organized: Boolean!
path: String!
path: String! @deprecated(reason: "Use files.path")
created_at: Time!
updated_at: Time!
file_mod_time: Time
file_mod_time: Time @deprecated(reason: "Use files.mod_time")
file: ImageFileType! # Resolver
file: ImageFileType! @deprecated(reason: "Use files.mod_time")
files: [ImageFile!]!
paths: ImagePathsType! # Resolver
galleries: [Gallery!]!
@ -20,9 +22,10 @@ type Image {
}
type ImageFileType {
size: Int
width: Int
height: Int
mod_time: Time!
size: Int!
width: Int!
height: Int!
}
type ImagePathsType {

View File

@ -9,4 +9,6 @@ scalar Timestamp
# generic JSON object
scalar Map
scalar Any
scalar Any
scalar Int64

View File

@ -27,15 +27,15 @@ type SceneMovie {
scene_index: Int
}
type SceneCaption {
type VideoCaption {
language_code: String!
caption_type: String!
}
type Scene {
id: ID!
checksum: String
oshash: String
checksum: String @deprecated(reason: "Use files.fingerprints")
oshash: String @deprecated(reason: "Use files.fingerprints")
title: String
details: String
url: String
@ -43,16 +43,17 @@ type Scene {
rating: Int
organized: Boolean!
o_counter: Int
path: String!
phash: String
path: String! @deprecated(reason: "Use files.path")
phash: String @deprecated(reason: "Use files.fingerprints")
interactive: Boolean!
interactive_speed: Int
captions: [SceneCaption!]
captions: [VideoCaption!]
created_at: Time!
updated_at: Time!
file_mod_time: Time
file: SceneFileType! # Resolver
file: SceneFileType! @deprecated(reason: "Use files")
files: [VideoFile!]!
paths: ScenePathsType! # Resolver
scene_markers: [SceneMarker!]!

View File

@ -3,6 +3,7 @@ package api
import (
"context"
"database/sql"
"fmt"
"strconv"
"github.com/99designs/gqlgen/graphql"
@ -89,6 +90,14 @@ func (t changesetTranslator) nullString(value *string, field string) *sql.NullSt
return ret
}
func (t changesetTranslator) optionalString(value *string, field string) models.OptionalString {
if !t.hasField(field) {
return models.OptionalString{}
}
return models.NewOptionalStringPtr(value)
}
func (t changesetTranslator) sqliteDate(value *string, field string) *models.SQLiteDate {
if !t.hasField(field) {
return nil
@ -104,6 +113,21 @@ func (t changesetTranslator) sqliteDate(value *string, field string) *models.SQL
return ret
}
func (t changesetTranslator) optionalDate(value *string, field string) models.OptionalDate {
if !t.hasField(field) {
return models.OptionalDate{}
}
if value == nil {
return models.OptionalDate{
Set: true,
Null: true,
}
}
return models.NewOptionalDate(models.NewDate(*value))
}
func (t changesetTranslator) nullInt64(value *int, field string) *sql.NullInt64 {
if !t.hasField(field) {
return nil
@ -119,6 +143,14 @@ func (t changesetTranslator) nullInt64(value *int, field string) *sql.NullInt64
return ret
}
func (t changesetTranslator) optionalInt(value *int, field string) models.OptionalInt {
if !t.hasField(field) {
return models.OptionalInt{}
}
return models.NewOptionalIntPtr(value)
}
func (t changesetTranslator) nullInt64FromString(value *string, field string) *sql.NullInt64 {
if !t.hasField(field) {
return nil
@ -134,6 +166,25 @@ func (t changesetTranslator) nullInt64FromString(value *string, field string) *s
return ret
}
func (t changesetTranslator) optionalIntFromString(value *string, field string) (models.OptionalInt, error) {
if !t.hasField(field) {
return models.OptionalInt{}, nil
}
if value == nil {
return models.OptionalInt{
Set: true,
Null: true,
}, nil
}
vv, err := strconv.Atoi(*value)
if err != nil {
return models.OptionalInt{}, fmt.Errorf("converting %v to int: %w", *value, err)
}
return models.NewOptionalInt(vv), nil
}
func (t changesetTranslator) nullBool(value *bool, field string) *sql.NullBool {
if !t.hasField(field) {
return nil
@ -148,3 +199,11 @@ func (t changesetTranslator) nullBool(value *bool, field string) *sql.NullBool {
return ret
}
func (t changesetTranslator) optionalBool(value *bool, field string) models.OptionalBool {
if !t.hasField(field) {
return models.OptionalBool{}
}
return models.NewOptionalBoolPtr(value)
}

View File

@ -31,8 +31,11 @@ type hookExecutor interface {
}
type Resolver struct {
txnManager txn.Manager
repository models.Repository
txnManager txn.Manager
repository manager.Repository
sceneService manager.SceneService
imageService manager.ImageService
galleryService manager.GalleryService
hookExecutor hookExecutor
}

View File

@ -2,24 +2,91 @@ package api
import (
"context"
"strconv"
"time"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
func (r *galleryResolver) Path(ctx context.Context, obj *models.Gallery) (*string, error) {
if obj.Path.Valid {
return &obj.Path.String, nil
func (r *galleryResolver) Files(ctx context.Context, obj *models.Gallery) ([]*GalleryFile, error) {
ret := make([]*GalleryFile, len(obj.Files))
for i, f := range obj.Files {
base := f.Base()
ret[i] = &GalleryFile{
ID: strconv.Itoa(int(base.ID)),
Path: base.Path,
Basename: base.Basename,
ParentFolderID: strconv.Itoa(int(base.ParentFolderID)),
ModTime: base.ModTime,
Size: base.Size,
CreatedAt: base.CreatedAt,
UpdatedAt: base.UpdatedAt,
Fingerprints: resolveFingerprints(base),
}
if base.ZipFileID != nil {
zipFileID := strconv.Itoa(int(*base.ZipFileID))
ret[i].ZipFileID = &zipFileID
}
}
return nil, nil
return ret, nil
}
func (r *galleryResolver) Title(ctx context.Context, obj *models.Gallery) (*string, error) {
if obj.Title.Valid {
return &obj.Title.String, nil
func (r *galleryResolver) Folder(ctx context.Context, obj *models.Gallery) (*Folder, error) {
if obj.FolderID == nil {
return nil, nil
}
var ret *file.Folder
if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = r.repository.Folder.Find(ctx, *obj.FolderID)
if err != nil {
return err
}
return err
}); err != nil {
return nil, err
}
if ret == nil {
return nil, nil
}
rr := &Folder{
ID: ret.ID.String(),
Path: ret.Path,
ModTime: ret.ModTime,
CreatedAt: ret.CreatedAt,
UpdatedAt: ret.UpdatedAt,
}
if ret.ParentFolderID != nil {
pfidStr := ret.ParentFolderID.String()
rr.ParentFolderID = &pfidStr
}
if ret.ZipFileID != nil {
zfidStr := ret.ZipFileID.String()
rr.ZipFileID = &zfidStr
}
return rr, nil
}
func (r *galleryResolver) FileModTime(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
f := obj.PrimaryFile()
if f != nil {
return &f.Base().ModTime, nil
}
return nil, nil
}
@ -70,35 +137,13 @@ func (r *galleryResolver) Cover(ctx context.Context, obj *models.Gallery) (ret *
}
func (r *galleryResolver) Date(ctx context.Context, obj *models.Gallery) (*string, error) {
if obj.Date.Valid {
result := utils.GetYMDFromDatabaseDate(obj.Date.String)
if obj.Date != nil {
result := obj.Date.String()
return &result, nil
}
return nil, nil
}
func (r *galleryResolver) URL(ctx context.Context, obj *models.Gallery) (*string, error) {
if obj.URL.Valid {
return &obj.URL.String, nil
}
return nil, nil
}
func (r *galleryResolver) Details(ctx context.Context, obj *models.Gallery) (*string, error) {
if obj.Details.Valid {
return &obj.Details.String, nil
}
return nil, nil
}
func (r *galleryResolver) Rating(ctx context.Context, obj *models.Gallery) (*int, error) {
if obj.Rating.Valid {
rating := int(obj.Rating.Int64)
return &rating, nil
}
return nil, nil
}
func (r *galleryResolver) Scenes(ctx context.Context, obj *models.Gallery) (ret []*models.Scene, err error) {
if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error
@ -112,13 +157,13 @@ func (r *galleryResolver) Scenes(ctx context.Context, obj *models.Gallery) (ret
}
func (r *galleryResolver) Studio(ctx context.Context, obj *models.Gallery) (ret *models.Studio, err error) {
if !obj.StudioID.Valid {
if obj.StudioID == nil {
return nil, nil
}
if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = r.repository.Studio.Find(ctx, int(obj.StudioID.Int64))
ret, err = r.repository.Studio.Find(ctx, *obj.StudioID)
return err
}); err != nil {
return nil, err
@ -162,15 +207,3 @@ func (r *galleryResolver) ImageCount(ctx context.Context, obj *models.Gallery) (
return ret, nil
}
func (r *galleryResolver) CreatedAt(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
return &obj.CreatedAt.Timestamp, nil
}
func (r *galleryResolver) UpdatedAt(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
return &obj.UpdatedAt.Timestamp, nil
}
func (r *galleryResolver) FileModTime(ctx context.Context, obj *models.Gallery) (*time.Time, error) {
return &obj.FileModTime.Timestamp, nil
}

View File

@ -2,35 +2,64 @@ package api
import (
"context"
"strconv"
"time"
"github.com/stashapp/stash/internal/api/urlbuilders"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
)
func (r *imageResolver) Title(ctx context.Context, obj *models.Image) (*string, error) {
ret := image.GetTitle(obj)
ret := obj.GetTitle()
return &ret, nil
}
func (r *imageResolver) Rating(ctx context.Context, obj *models.Image) (*int, error) {
if obj.Rating.Valid {
rating := int(obj.Rating.Int64)
return &rating, nil
}
return nil, nil
func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*ImageFileType, error) {
f := obj.PrimaryFile()
width := f.Width
height := f.Height
size := f.Size
return &ImageFileType{
Size: int(size),
Width: width,
Height: height,
}, nil
}
func (r *imageResolver) File(ctx context.Context, obj *models.Image) (*models.ImageFileType, error) {
width := int(obj.Width.Int64)
height := int(obj.Height.Int64)
size := int(obj.Size.Int64)
return &models.ImageFileType{
Size: &size,
Width: &width,
Height: &height,
}, nil
func (r *imageResolver) Files(ctx context.Context, obj *models.Image) ([]*ImageFile, error) {
ret := make([]*ImageFile, len(obj.Files))
for i, f := range obj.Files {
ret[i] = &ImageFile{
ID: strconv.Itoa(int(f.ID)),
Path: f.Path,
Basename: f.Basename,
ParentFolderID: strconv.Itoa(int(f.ParentFolderID)),
ModTime: f.ModTime,
Size: f.Size,
Width: f.Width,
Height: f.Height,
CreatedAt: f.CreatedAt,
UpdatedAt: f.UpdatedAt,
Fingerprints: resolveFingerprints(f.Base()),
}
if f.ZipFileID != nil {
zipFileID := strconv.Itoa(int(*f.ZipFileID))
ret[i].ZipFileID = &zipFileID
}
}
return ret, nil
}
func (r *imageResolver) FileModTime(ctx context.Context, obj *models.Image) (*time.Time, error) {
f := obj.PrimaryFile()
if f != nil {
return &f.ModTime, nil
}
return nil, nil
}
func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*ImagePathsType, error) {
@ -47,7 +76,7 @@ func (r *imageResolver) Paths(ctx context.Context, obj *models.Image) (*ImagePat
func (r *imageResolver) Galleries(ctx context.Context, obj *models.Image) (ret []*models.Gallery, err error) {
if err := r.withTxn(ctx, func(ctx context.Context) error {
var err error
ret, err = r.repository.Gallery.FindByImageID(ctx, obj.ID)
ret, err = r.repository.Gallery.FindMany(ctx, obj.GalleryIDs)
return err
}); err != nil {
return nil, err
@ -57,12 +86,12 @@ func (r *imageResolver) Galleries(ctx context.Context, obj *models.Image) (ret [
}
func (r *imageResolver) Studio(ctx context.Context, obj *models.Image) (ret *models.Studio, err error) {
if !obj.StudioID.Valid {
if obj.StudioID == nil {
return nil, nil
}
if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.repository.Studio.Find(ctx, int(obj.StudioID.Int64))
ret, err = r.repository.Studio.Find(ctx, *obj.StudioID)
return err
}); err != nil {
return nil, err
@ -73,7 +102,7 @@ func (r *imageResolver) Studio(ctx context.Context, obj *models.Image) (ret *mod
func (r *imageResolver) Tags(ctx context.Context, obj *models.Image) (ret []*models.Tag, err error) {
if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.repository.Tag.FindByImageID(ctx, obj.ID)
ret, err = r.repository.Tag.FindMany(ctx, obj.TagIDs)
return err
}); err != nil {
return nil, err
@ -84,7 +113,7 @@ func (r *imageResolver) Tags(ctx context.Context, obj *models.Image) (ret []*mod
func (r *imageResolver) Performers(ctx context.Context, obj *models.Image) (ret []*models.Performer, err error) {
if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.repository.Performer.FindByImageID(ctx, obj.ID)
ret, err = r.repository.Performer.FindMany(ctx, obj.PerformerIDs)
return err
}); err != nil {
return nil, err
@ -92,15 +121,3 @@ func (r *imageResolver) Performers(ctx context.Context, obj *models.Image) (ret
return ret, nil
}
func (r *imageResolver) CreatedAt(ctx context.Context, obj *models.Image) (*time.Time, error) {
return &obj.CreatedAt.Timestamp, nil
}
func (r *imageResolver) UpdatedAt(ctx context.Context, obj *models.Image) (*time.Time, error) {
return &obj.UpdatedAt.Timestamp, nil
}
func (r *imageResolver) FileModTime(ctx context.Context, obj *models.Image) (*time.Time, error) {
return &obj.FileModTime.Timestamp, nil
}

View File

@ -2,95 +2,115 @@ package api
import (
"context"
"fmt"
"strconv"
"time"
"github.com/stashapp/stash/internal/api/urlbuilders"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/utils"
)
func (r *sceneResolver) Checksum(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Checksum.Valid {
return &obj.Checksum.String, nil
}
return nil, nil
}
func (r *sceneResolver) Oshash(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.OSHash.Valid {
return &obj.OSHash.String, nil
}
return nil, nil
}
func (r *sceneResolver) Title(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Title.Valid {
return &obj.Title.String, nil
}
return nil, nil
}
func (r *sceneResolver) Details(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Details.Valid {
return &obj.Details.String, nil
}
return nil, nil
}
func (r *sceneResolver) URL(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.URL.Valid {
return &obj.URL.String, nil
func (r *sceneResolver) FileModTime(ctx context.Context, obj *models.Scene) (*time.Time, error) {
if obj.PrimaryFile() != nil {
return &obj.PrimaryFile().ModTime, nil
}
return nil, nil
}
func (r *sceneResolver) Date(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Date.Valid {
result := utils.GetYMDFromDatabaseDate(obj.Date.String)
if obj.Date != nil {
result := obj.Date.String()
return &result, nil
}
return nil, nil
}
func (r *sceneResolver) Rating(ctx context.Context, obj *models.Scene) (*int, error) {
if obj.Rating.Valid {
rating := int(obj.Rating.Int64)
return &rating, nil
}
return nil, nil
}
func (r *sceneResolver) InteractiveSpeed(ctx context.Context, obj *models.Scene) (*int, error) {
if obj.InteractiveSpeed.Valid {
interactive_speed := int(obj.InteractiveSpeed.Int64)
return &interactive_speed, nil
}
return nil, nil
}
// File is deprecated
func (r *sceneResolver) File(ctx context.Context, obj *models.Scene) (*models.SceneFileType, error) {
width := int(obj.Width.Int64)
height := int(obj.Height.Int64)
bitrate := int(obj.Bitrate.Int64)
f := obj.PrimaryFile()
if f == nil {
return nil, nil
}
bitrate := int(f.BitRate)
size := strconv.FormatInt(f.Size, 10)
return &models.SceneFileType{
Size: &obj.Size.String,
Duration: handleFloat64(obj.Duration.Float64),
VideoCodec: &obj.VideoCodec.String,
AudioCodec: &obj.AudioCodec.String,
Width: &width,
Height: &height,
Framerate: handleFloat64(obj.Framerate.Float64),
Size: &size,
Duration: handleFloat64(f.Duration),
VideoCodec: &f.VideoCodec,
AudioCodec: &f.AudioCodec,
Width: &f.Width,
Height: &f.Height,
Framerate: handleFloat64(f.FrameRate),
Bitrate: &bitrate,
}, nil
}
func (r *sceneResolver) Files(ctx context.Context, obj *models.Scene) ([]*VideoFile, error) {
ret := make([]*VideoFile, len(obj.Files))
for i, f := range obj.Files {
ret[i] = &VideoFile{
ID: strconv.Itoa(int(f.ID)),
Path: f.Path,
Basename: f.Basename,
ParentFolderID: strconv.Itoa(int(f.ParentFolderID)),
ModTime: f.ModTime,
Format: f.Format,
Size: f.Size,
Duration: handleFloat64Value(f.Duration),
VideoCodec: f.VideoCodec,
AudioCodec: f.AudioCodec,
Width: f.Width,
Height: f.Height,
FrameRate: handleFloat64Value(f.FrameRate),
BitRate: int(f.BitRate),
CreatedAt: f.CreatedAt,
UpdatedAt: f.UpdatedAt,
Fingerprints: resolveFingerprints(f.Base()),
}
if f.ZipFileID != nil {
zipFileID := strconv.Itoa(int(*f.ZipFileID))
ret[i].ZipFileID = &zipFileID
}
}
return ret, nil
}
func resolveFingerprints(f *file.BaseFile) []*Fingerprint {
ret := make([]*Fingerprint, len(f.Fingerprints))
for i, fp := range f.Fingerprints {
ret[i] = &Fingerprint{
Type: fp.Type,
Value: formatFingerprint(fp.Fingerprint),
}
}
return ret
}
func formatFingerprint(fp interface{}) string {
switch v := fp.(type) {
case int64:
return strconv.FormatUint(uint64(v), 16)
default:
return fmt.Sprintf("%v", fp)
}
}
func (r *sceneResolver) Paths(ctx context.Context, obj *models.Scene) (*ScenePathsType, error) {
baseURL, _ := ctx.Value(BaseURLCtxKey).(string)
config := manager.GetInstance().Config
builder := urlbuilders.NewSceneURLBuilder(baseURL, obj.ID)
builder.APIKey = config.GetAPIKey()
screenshotPath := builder.GetScreenshotURL(obj.UpdatedAt.Timestamp)
screenshotPath := builder.GetScreenshotURL(obj.UpdatedAt)
previewPath := builder.GetStreamPreviewURL()
streamPath := builder.GetStreamURL()
webpPath := builder.GetStreamPreviewImageURL()
@ -126,9 +146,14 @@ func (r *sceneResolver) SceneMarkers(ctx context.Context, obj *models.Scene) (re
return ret, nil
}
func (r *sceneResolver) Captions(ctx context.Context, obj *models.Scene) (ret []*models.SceneCaption, err error) {
func (r *sceneResolver) Captions(ctx context.Context, obj *models.Scene) (ret []*models.VideoCaption, err error) {
primaryFile := obj.PrimaryFile()
if primaryFile == nil {
return nil, nil
}
if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.repository.Scene.GetCaptions(ctx, obj.ID)
ret, err = r.repository.File.GetCaptions(ctx, primaryFile.Base().ID)
return err
}); err != nil {
return nil, err
@ -149,12 +174,12 @@ func (r *sceneResolver) Galleries(ctx context.Context, obj *models.Scene) (ret [
}
func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (ret *models.Studio, err error) {
if !obj.StudioID.Valid {
if obj.StudioID == nil {
return nil, nil
}
if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.repository.Studio.Find(ctx, int(obj.StudioID.Int64))
ret, err = r.repository.Studio.Find(ctx, *obj.StudioID)
return err
}); err != nil {
return nil, err
@ -165,15 +190,9 @@ func (r *sceneResolver) Studio(ctx context.Context, obj *models.Scene) (ret *mod
func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*SceneMovie, err error) {
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
mqb := r.repository.Movie
sceneMovies, err := qb.GetMovies(ctx, obj.ID)
if err != nil {
return err
}
for _, sm := range sceneMovies {
for _, sm := range obj.Movies {
movie, err := mqb.Find(ctx, sm.MovieID)
if err != nil {
return err
@ -181,12 +200,8 @@ func (r *sceneResolver) Movies(ctx context.Context, obj *models.Scene) (ret []*S
sceneIdx := sm.SceneIndex
sceneMovie := &SceneMovie{
Movie: movie,
}
if sceneIdx.Valid {
idx := int(sceneIdx.Int64)
sceneMovie.SceneIndex = &idx
Movie: movie,
SceneIndex: sceneIdx,
}
ret = append(ret, sceneMovie)
@ -221,37 +236,15 @@ func (r *sceneResolver) Performers(ctx context.Context, obj *models.Scene) (ret
return ret, nil
}
func (r *sceneResolver) StashIds(ctx context.Context, obj *models.Scene) (ret []*models.StashID, err error) {
if err := r.withTxn(ctx, func(ctx context.Context) error {
ret, err = r.repository.Scene.GetStashIDs(ctx, obj.ID)
return err
}); err != nil {
return nil, err
}
return ret, nil
}
func (r *sceneResolver) Phash(ctx context.Context, obj *models.Scene) (*string, error) {
if obj.Phash.Valid {
hexval := utils.PhashToString(obj.Phash.Int64)
phash := obj.Phash()
if phash != 0 {
hexval := utils.PhashToString(phash)
return &hexval, nil
}
return nil, nil
}
func (r *sceneResolver) CreatedAt(ctx context.Context, obj *models.Scene) (*time.Time, error) {
return &obj.CreatedAt.Timestamp, nil
}
func (r *sceneResolver) UpdatedAt(ctx context.Context, obj *models.Scene) (*time.Time, error) {
return &obj.UpdatedAt.Timestamp, nil
}
func (r *sceneResolver) FileModTime(ctx context.Context, obj *models.Scene) (*time.Time, error) {
return &obj.FileModTime.Timestamp, nil
}
func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([]*manager.SceneStreamEndpoint, error) {
config := manager.GetInstance().Config
@ -260,3 +253,21 @@ func (r *sceneResolver) SceneStreams(ctx context.Context, obj *models.Scene) ([]
return manager.GetSceneStreamPaths(obj, builder.GetStreamURL(), config.GetMaxStreamingTranscodeSize())
}
func (r *sceneResolver) Interactive(ctx context.Context, obj *models.Scene) (bool, error) {
primaryFile := obj.PrimaryFile()
if primaryFile == nil {
return false, nil
}
return primaryFile.Interactive, nil
}
func (r *sceneResolver) InteractiveSpeed(ctx context.Context, obj *models.Scene) (*int, error) {
primaryFile := obj.PrimaryFile()
if primaryFile == nil {
return nil, nil
}
return primaryFile.InteractiveSpeed, nil
}

View File

@ -2,7 +2,6 @@ package api
import (
"context"
"database/sql"
"errors"
"fmt"
"os"
@ -11,8 +10,6 @@ import (
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin"
@ -38,69 +35,49 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat
return nil, errors.New("title must not be empty")
}
// for manually created galleries, generate checksum from title
checksum := md5.FromString(input.Title)
// Populate a new performer from the input
currentTime := time.Now()
newGallery := models.Gallery{
Title: sql.NullString{
String: input.Title,
Valid: true,
},
Checksum: checksum,
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
Title: input.Title,
CreatedAt: currentTime,
UpdatedAt: currentTime,
}
if input.URL != nil {
newGallery.URL = sql.NullString{String: *input.URL, Valid: true}
newGallery.URL = *input.URL
}
if input.Details != nil {
newGallery.Details = sql.NullString{String: *input.Details, Valid: true}
}
if input.URL != nil {
newGallery.URL = sql.NullString{String: *input.URL, Valid: true}
}
if input.Date != nil {
newGallery.Date = models.SQLiteDate{String: *input.Date, Valid: true}
}
if input.Rating != nil {
newGallery.Rating = sql.NullInt64{Int64: int64(*input.Rating), Valid: true}
} else {
// rating must be nullable
newGallery.Rating = sql.NullInt64{Valid: false}
newGallery.Details = *input.Details
}
if input.Date != nil {
d := models.NewDate(*input.Date)
newGallery.Date = &d
}
newGallery.Rating = input.Rating
if input.StudioID != nil {
studioID, _ := strconv.ParseInt(*input.StudioID, 10, 64)
newGallery.StudioID = sql.NullInt64{Int64: studioID, Valid: true}
} else {
// studio must be nullable
newGallery.StudioID = sql.NullInt64{Valid: false}
studioID, _ := strconv.Atoi(*input.StudioID)
newGallery.StudioID = &studioID
}
var err error
newGallery.PerformerIDs, err = stringslice.StringSliceToIntSlice(input.PerformerIds)
if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
}
newGallery.TagIDs, err = stringslice.StringSliceToIntSlice(input.TagIds)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
newGallery.SceneIDs, err = stringslice.StringSliceToIntSlice(input.SceneIds)
if err != nil {
return nil, fmt.Errorf("converting scene ids: %w", err)
}
// Start the transaction and save the gallery
var gallery *models.Gallery
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Gallery
var err error
gallery, err = qb.Create(ctx, newGallery)
if err != nil {
return err
}
// Save the performers
if err := r.updateGalleryPerformers(ctx, qb, gallery.ID, input.PerformerIds); err != nil {
return err
}
// Save the tags
if err := r.updateGalleryTags(ctx, qb, gallery.ID, input.TagIds); err != nil {
return err
}
// Save the scenes
if err := r.updateGalleryScenes(ctx, qb, gallery.ID, input.SceneIds); err != nil {
if err := qb.Create(ctx, &newGallery, nil); err != nil {
return err
}
@ -109,38 +86,14 @@ func (r *mutationResolver) GalleryCreate(ctx context.Context, input GalleryCreat
return nil, err
}
r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryCreatePost, input, nil)
return r.getGallery(ctx, gallery.ID)
}
func (r *mutationResolver) updateGalleryPerformers(ctx context.Context, qb gallery.PerformerUpdater, galleryID int, performerIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(performerIDs)
if err != nil {
return err
}
return qb.UpdatePerformers(ctx, galleryID, ids)
}
func (r *mutationResolver) updateGalleryTags(ctx context.Context, qb gallery.TagUpdater, galleryID int, tagIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(tagIDs)
if err != nil {
return err
}
return qb.UpdateTags(ctx, galleryID, ids)
r.hookExecutor.ExecutePostHooks(ctx, newGallery.ID, plugin.GalleryCreatePost, input, nil)
return r.getGallery(ctx, newGallery.ID)
}
type GallerySceneUpdater interface {
UpdateScenes(ctx context.Context, galleryID int, sceneIDs []int) error
}
func (r *mutationResolver) updateGalleryScenes(ctx context.Context, qb GallerySceneUpdater, galleryID int, sceneIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(sceneIDs)
if err != nil {
return err
}
return qb.UpdateScenes(ctx, galleryID, ids)
}
func (r *mutationResolver) GalleryUpdate(ctx context.Context, input models.GalleryUpdateInput) (ret *models.Gallery, err error) {
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
@ -219,11 +172,7 @@ func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.Galle
return nil, errors.New("not found")
}
updatedTime := time.Now()
updatedGallery := models.GalleryPartial{
ID: galleryID,
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
}
updatedGallery := models.NewGalleryPartial()
if input.Title != nil {
// ensure title is not empty
@ -231,71 +180,90 @@ func (r *mutationResolver) galleryUpdate(ctx context.Context, input models.Galle
return nil, errors.New("title must not be empty")
}
// if gallery is not zip-based, then generate the checksum from the title
if !originalGallery.Path.Valid {
checksum := md5.FromString(*input.Title)
updatedGallery.Checksum = &checksum
}
updatedGallery.Title = &sql.NullString{String: *input.Title, Valid: true}
updatedGallery.Title = models.NewOptionalString(*input.Title)
}
updatedGallery.Details = translator.nullString(input.Details, "details")
updatedGallery.URL = translator.nullString(input.URL, "url")
updatedGallery.Date = translator.sqliteDate(input.Date, "date")
updatedGallery.Rating = translator.nullInt64(input.Rating, "rating")
updatedGallery.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
updatedGallery.Organized = input.Organized
updatedGallery.Details = translator.optionalString(input.Details, "details")
updatedGallery.URL = translator.optionalString(input.URL, "url")
updatedGallery.Date = translator.optionalDate(input.Date, "date")
updatedGallery.Rating = translator.optionalInt(input.Rating, "rating")
updatedGallery.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
}
updatedGallery.Organized = translator.optionalBool(input.Organized, "organized")
if translator.hasField("performer_ids") {
updatedGallery.PerformerIDs, err = translateUpdateIDs(input.PerformerIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
}
}
if translator.hasField("tag_ids") {
updatedGallery.TagIDs, err = translateUpdateIDs(input.TagIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
}
if translator.hasField("scene_ids") {
updatedGallery.SceneIDs, err = translateUpdateIDs(input.SceneIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting scene ids: %w", err)
}
}
// gallery scene is set from the scene only
gallery, err := qb.UpdatePartial(ctx, updatedGallery)
gallery, err := qb.UpdatePartial(ctx, galleryID, updatedGallery)
if err != nil {
return nil, err
}
// Save the performers
if translator.hasField("performer_ids") {
if err := r.updateGalleryPerformers(ctx, qb, galleryID, input.PerformerIds); err != nil {
return nil, err
}
}
// Save the tags
if translator.hasField("tag_ids") {
if err := r.updateGalleryTags(ctx, qb, galleryID, input.TagIds); err != nil {
return nil, err
}
}
// Save the scenes
if translator.hasField("scene_ids") {
if err := r.updateGalleryScenes(ctx, qb, galleryID, input.SceneIds); err != nil {
return nil, err
}
}
return gallery, nil
}
func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGalleryUpdateInput) ([]*models.Gallery, error) {
// Populate gallery from the input
updatedTime := time.Now()
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
updatedGallery := models.GalleryPartial{
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
updatedGallery := models.NewGalleryPartial()
updatedGallery.Details = translator.optionalString(input.Details, "details")
updatedGallery.URL = translator.optionalString(input.URL, "url")
updatedGallery.Date = translator.optionalDate(input.Date, "date")
updatedGallery.Rating = translator.optionalInt(input.Rating, "rating")
var err error
updatedGallery.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
}
updatedGallery.Organized = translator.optionalBool(input.Organized, "organized")
if translator.hasField("performer_ids") {
updatedGallery.PerformerIDs, err = translateUpdateIDs(input.PerformerIds.Ids, input.PerformerIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
}
}
updatedGallery.Details = translator.nullString(input.Details, "details")
updatedGallery.URL = translator.nullString(input.URL, "url")
updatedGallery.Date = translator.sqliteDate(input.Date, "date")
updatedGallery.Rating = translator.nullInt64(input.Rating, "rating")
updatedGallery.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
updatedGallery.Organized = input.Organized
if translator.hasField("tag_ids") {
updatedGallery.TagIDs, err = translateUpdateIDs(input.TagIds.Ids, input.TagIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
}
if translator.hasField("scene_ids") {
updatedGallery.SceneIDs, err = translateUpdateIDs(input.SceneIds.Ids, input.SceneIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting scene ids: %w", err)
}
}
ret := []*models.Gallery{}
@ -305,50 +273,13 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGall
for _, galleryIDStr := range input.Ids {
galleryID, _ := strconv.Atoi(galleryIDStr)
updatedGallery.ID = galleryID
gallery, err := qb.UpdatePartial(ctx, updatedGallery)
gallery, err := qb.UpdatePartial(ctx, galleryID, updatedGallery)
if err != nil {
return err
}
ret = append(ret, gallery)
// Save the performers
if translator.hasField("performer_ids") {
performerIDs, err := adjustGalleryPerformerIDs(ctx, qb, galleryID, *input.PerformerIds)
if err != nil {
return err
}
if err := qb.UpdatePerformers(ctx, galleryID, performerIDs); err != nil {
return err
}
}
// Save the tags
if translator.hasField("tag_ids") {
tagIDs, err := adjustGalleryTagIDs(ctx, qb, galleryID, *input.TagIds)
if err != nil {
return err
}
if err := qb.UpdateTags(ctx, galleryID, tagIDs); err != nil {
return err
}
}
// Save the scenes
if translator.hasField("scene_ids") {
sceneIDs, err := adjustGallerySceneIDs(ctx, qb, galleryID, *input.SceneIds)
if err != nil {
return err
}
if err := qb.UpdateScenes(ctx, galleryID, sceneIDs); err != nil {
return err
}
}
}
return nil
@ -372,45 +303,10 @@ func (r *mutationResolver) BulkGalleryUpdate(ctx context.Context, input BulkGall
return newRet, nil
}
type GalleryPerformerGetter interface {
GetPerformerIDs(ctx context.Context, galleryID int) ([]int, error)
}
type GalleryTagGetter interface {
GetTagIDs(ctx context.Context, galleryID int) ([]int, error)
}
type GallerySceneGetter interface {
GetSceneIDs(ctx context.Context, galleryID int) ([]int, error)
}
func adjustGalleryPerformerIDs(ctx context.Context, qb GalleryPerformerGetter, galleryID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetPerformerIDs(ctx, galleryID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func adjustGalleryTagIDs(ctx context.Context, qb GalleryTagGetter, galleryID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetTagIDs(ctx, galleryID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func adjustGallerySceneIDs(ctx context.Context, qb GallerySceneGetter, galleryID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = qb.GetSceneIDs(ctx, galleryID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.GalleryDestroyInput) (bool, error) {
galleryIDs, err := stringslice.StringSliceToIntSlice(input.Ids)
if err != nil {
@ -420,7 +316,7 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
var galleries []*models.Gallery
var imgsDestroyed []*models.Image
fileDeleter := &image.FileDeleter{
Deleter: *file.NewDeleter(),
Deleter: file.NewDeleter(),
Paths: manager.GetInstance().Paths,
}
@ -429,7 +325,6 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Gallery
iqb := r.repository.Image
for _, id := range galleryIDs {
gallery, err := qb.Find(ctx, id)
@ -443,53 +338,8 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
galleries = append(galleries, gallery)
// if this is a zip-based gallery, delete the images as well first
if gallery.Zip {
imgs, err := iqb.FindByGalleryID(ctx, id)
if err != nil {
return err
}
for _, img := range imgs {
if err := image.Destroy(ctx, img, iqb, fileDeleter, deleteGenerated, false); err != nil {
return err
}
imgsDestroyed = append(imgsDestroyed, img)
}
if deleteFile {
if err := fileDeleter.Files([]string{gallery.Path.String}); err != nil {
return err
}
}
} else if deleteFile {
// Delete image if it is only attached to this gallery
imgs, err := iqb.FindByGalleryID(ctx, id)
if err != nil {
return err
}
for _, img := range imgs {
imgGalleries, err := qb.FindByImageID(ctx, img.ID)
if err != nil {
return err
}
if len(imgGalleries) == 1 {
if err := image.Destroy(ctx, img, iqb, fileDeleter, deleteGenerated, deleteFile); err != nil {
return err
}
imgsDestroyed = append(imgsDestroyed, img)
}
}
// we only want to delete a folder-based gallery if it is empty.
// don't do this with the file deleter
}
if err := qb.Destroy(ctx, id); err != nil {
imgsDestroyed, err = r.galleryService.Destroy(ctx, gallery, fileDeleter, deleteGenerated, deleteFile)
if err != nil {
return err
}
}
@ -505,10 +355,11 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
for _, gallery := range galleries {
// don't delete stash library paths
if utils.IsTrue(input.DeleteFile) && !gallery.Zip && gallery.Path.Valid && !isStashPath(gallery.Path.String) {
path := gallery.Path()
if deleteFile && path != "" && !isStashPath(path) {
// try to remove the folder - it is possible that it is not empty
// so swallow the error if present
_ = os.Remove(gallery.Path.String)
_ = os.Remove(path)
}
}
@ -516,16 +367,16 @@ func (r *mutationResolver) GalleryDestroy(ctx context.Context, input models.Gall
for _, gallery := range galleries {
r.hookExecutor.ExecutePostHooks(ctx, gallery.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
GalleryDestroyInput: input,
Checksum: gallery.Checksum,
Path: gallery.Path.String,
Checksum: gallery.Checksum(),
Path: gallery.Path(),
}, nil)
}
// call image destroy post hook as well
for _, img := range imgsDestroyed {
r.hookExecutor.ExecutePostHooks(ctx, img.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
Checksum: img.Checksum,
Path: img.Path,
Checksum: img.Checksum(),
Path: img.Path(),
}, nil)
}
@ -565,10 +416,6 @@ func (r *mutationResolver) AddGalleryImages(ctx context.Context, input GalleryAd
return errors.New("gallery not found")
}
if gallery.Zip {
return errors.New("cannot modify zip gallery images")
}
newIDs, err := qb.GetImageIDs(ctx, galleryID)
if err != nil {
return err
@ -605,10 +452,6 @@ func (r *mutationResolver) RemoveGalleryImages(ctx context.Context, input Galler
return errors.New("gallery not found")
}
if gallery.Zip {
return errors.New("cannot modify zip gallery images")
}
newIDs, err := qb.GetImageIDs(ctx, galleryID)
if err != nil {
return err

View File

@ -4,7 +4,6 @@ import (
"context"
"fmt"
"strconv"
"time"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/file"
@ -93,70 +92,45 @@ func (r *mutationResolver) imageUpdate(ctx context.Context, input ImageUpdateInp
return nil, err
}
updatedTime := time.Now()
updatedImage := models.ImagePartial{
ID: imageID,
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
updatedImage := models.NewImagePartial()
updatedImage.Title = translator.optionalString(input.Title, "title")
updatedImage.Rating = translator.optionalInt(input.Rating, "rating")
updatedImage.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
}
updatedImage.Organized = translator.optionalBool(input.Organized, "organized")
if translator.hasField("gallery_ids") {
updatedImage.GalleryIDs, err = translateUpdateIDs(input.GalleryIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting gallery ids: %w", err)
}
}
updatedImage.Title = translator.nullString(input.Title, "title")
updatedImage.Rating = translator.nullInt64(input.Rating, "rating")
updatedImage.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
updatedImage.Organized = input.Organized
if translator.hasField("performer_ids") {
updatedImage.PerformerIDs, err = translateUpdateIDs(input.PerformerIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
}
}
if translator.hasField("tag_ids") {
updatedImage.TagIDs, err = translateUpdateIDs(input.TagIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
}
qb := r.repository.Image
image, err := qb.Update(ctx, updatedImage)
image, err := qb.UpdatePartial(ctx, imageID, updatedImage)
if err != nil {
return nil, err
}
if translator.hasField("gallery_ids") {
if err := r.updateImageGalleries(ctx, imageID, input.GalleryIds); err != nil {
return nil, err
}
}
// Save the performers
if translator.hasField("performer_ids") {
if err := r.updateImagePerformers(ctx, imageID, input.PerformerIds); err != nil {
return nil, err
}
}
// Save the tags
if translator.hasField("tag_ids") {
if err := r.updateImageTags(ctx, imageID, input.TagIds); err != nil {
return nil, err
}
}
return image, nil
}
func (r *mutationResolver) updateImageGalleries(ctx context.Context, imageID int, galleryIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(galleryIDs)
if err != nil {
return err
}
return r.repository.Image.UpdateGalleries(ctx, imageID, ids)
}
func (r *mutationResolver) updateImagePerformers(ctx context.Context, imageID int, performerIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(performerIDs)
if err != nil {
return err
}
return r.repository.Image.UpdatePerformers(ctx, imageID, ids)
}
func (r *mutationResolver) updateImageTags(ctx context.Context, imageID int, tagsIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(tagsIDs)
if err != nil {
return err
}
return r.repository.Image.UpdateTags(ctx, imageID, ids)
}
func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageUpdateInput) (ret []*models.Image, err error) {
imageIDs, err := stringslice.StringSliceToIntSlice(input.Ids)
if err != nil {
@ -164,70 +138,52 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU
}
// Populate image from the input
updatedTime := time.Now()
updatedImage := models.ImagePartial{
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
}
updatedImage := models.NewImagePartial()
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
updatedImage.Title = translator.nullString(input.Title, "title")
updatedImage.Rating = translator.nullInt64(input.Rating, "rating")
updatedImage.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
updatedImage.Organized = input.Organized
updatedImage.Title = translator.optionalString(input.Title, "title")
updatedImage.Rating = translator.optionalInt(input.Rating, "rating")
updatedImage.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
}
updatedImage.Organized = translator.optionalBool(input.Organized, "organized")
if translator.hasField("gallery_ids") {
updatedImage.GalleryIDs, err = translateUpdateIDs(input.GalleryIds.Ids, input.GalleryIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting gallery ids: %w", err)
}
}
if translator.hasField("performer_ids") {
updatedImage.PerformerIDs, err = translateUpdateIDs(input.PerformerIds.Ids, input.PerformerIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
}
}
if translator.hasField("tag_ids") {
updatedImage.TagIDs, err = translateUpdateIDs(input.TagIds.Ids, input.TagIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
}
// Start the transaction and save the image marker
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Image
for _, imageID := range imageIDs {
updatedImage.ID = imageID
image, err := qb.Update(ctx, updatedImage)
image, err := qb.UpdatePartial(ctx, imageID, updatedImage)
if err != nil {
return err
}
ret = append(ret, image)
// Save the galleries
if translator.hasField("gallery_ids") {
galleryIDs, err := r.adjustImageGalleryIDs(ctx, imageID, *input.GalleryIds)
if err != nil {
return err
}
if err := qb.UpdateGalleries(ctx, imageID, galleryIDs); err != nil {
return err
}
}
// Save the performers
if translator.hasField("performer_ids") {
performerIDs, err := r.adjustImagePerformerIDs(ctx, imageID, *input.PerformerIds)
if err != nil {
return err
}
if err := qb.UpdatePerformers(ctx, imageID, performerIDs); err != nil {
return err
}
}
// Save the tags
if translator.hasField("tag_ids") {
tagIDs, err := r.adjustImageTagIDs(ctx, imageID, *input.TagIds)
if err != nil {
return err
}
if err := qb.UpdateTags(ctx, imageID, tagIDs); err != nil {
return err
}
}
}
return nil
@ -251,33 +207,6 @@ func (r *mutationResolver) BulkImageUpdate(ctx context.Context, input BulkImageU
return newRet, nil
}
func (r *mutationResolver) adjustImageGalleryIDs(ctx context.Context, imageID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = r.repository.Image.GetGalleryIDs(ctx, imageID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) adjustImagePerformerIDs(ctx context.Context, imageID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = r.repository.Image.GetPerformerIDs(ctx, imageID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) adjustImageTagIDs(ctx context.Context, imageID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = r.repository.Image.GetTagIDs(ctx, imageID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageDestroyInput) (ret bool, err error) {
imageID, err := strconv.Atoi(input.ID)
if err != nil {
@ -286,12 +215,10 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
var i *models.Image
fileDeleter := &image.FileDeleter{
Deleter: *file.NewDeleter(),
Deleter: file.NewDeleter(),
Paths: manager.GetInstance().Paths,
}
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Image
i, err = r.repository.Image.Find(ctx, imageID)
if err != nil {
return err
@ -301,7 +228,7 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
return fmt.Errorf("image with id %d not found", imageID)
}
return image.Destroy(ctx, i, qb, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile))
return r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile))
}); err != nil {
fileDeleter.Rollback()
return false, err
@ -313,8 +240,8 @@ func (r *mutationResolver) ImageDestroy(ctx context.Context, input models.ImageD
// call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, i.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
ImageDestroyInput: input,
Checksum: i.Checksum,
Path: i.Path,
Checksum: i.Checksum(),
Path: i.Path(),
}, nil)
return true, nil
@ -328,14 +255,13 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
var images []*models.Image
fileDeleter := &image.FileDeleter{
Deleter: *file.NewDeleter(),
Deleter: file.NewDeleter(),
Paths: manager.GetInstance().Paths,
}
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Image
for _, imageID := range imageIDs {
i, err := qb.Find(ctx, imageID)
if err != nil {
return err
@ -347,7 +273,7 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
images = append(images, i)
if err := image.Destroy(ctx, i, qb, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)); err != nil {
if err := r.imageService.Destroy(ctx, i, fileDeleter, utils.IsTrue(input.DeleteGenerated), utils.IsTrue(input.DeleteFile)); err != nil {
return err
}
}
@ -365,8 +291,8 @@ func (r *mutationResolver) ImagesDestroy(ctx context.Context, input models.Image
// call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, image.ID, plugin.ImageDestroyPost, plugin.ImagesDestroyInput{
ImagesDestroyInput: input,
Checksum: image.Checksum,
Path: image.Path,
Checksum: image.Checksum(),
Path: image.Path(),
}, nil)
}

View File

@ -152,7 +152,7 @@ func (r *mutationResolver) PerformerCreate(ctx context.Context, input PerformerC
// Save the stash_ids
if input.StashIds != nil {
stashIDJoins := models.StashIDsFromInput(input.StashIds)
stashIDJoins := input.StashIds
if err := qb.UpdateStashIDs(ctx, performer.ID, stashIDJoins); err != nil {
return err
}
@ -275,7 +275,7 @@ func (r *mutationResolver) PerformerUpdate(ctx context.Context, input PerformerU
// Save the stash_ids
if translator.hasField("stash_ids") {
stashIDJoins := models.StashIDsFromInput(input.StashIds)
stashIDJoins := input.StashIds
if err := qb.UpdateStashIDs(ctx, performerID, stashIDJoins); err != nil {
return err
}

View File

@ -98,19 +98,55 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
var coverImageData []byte
updatedTime := time.Now()
updatedScene := models.ScenePartial{
ID: sceneID,
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
updatedScene := models.NewScenePartial()
updatedScene.Title = translator.optionalString(input.Title, "title")
updatedScene.Details = translator.optionalString(input.Details, "details")
updatedScene.URL = translator.optionalString(input.URL, "url")
updatedScene.Date = translator.optionalDate(input.Date, "date")
updatedScene.Rating = translator.optionalInt(input.Rating, "rating")
updatedScene.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
}
updatedScene.Title = translator.nullString(input.Title, "title")
updatedScene.Details = translator.nullString(input.Details, "details")
updatedScene.URL = translator.nullString(input.URL, "url")
updatedScene.Date = translator.sqliteDate(input.Date, "date")
updatedScene.Rating = translator.nullInt64(input.Rating, "rating")
updatedScene.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
updatedScene.Organized = input.Organized
updatedScene.Organized = translator.optionalBool(input.Organized, "organized")
if translator.hasField("performer_ids") {
updatedScene.PerformerIDs, err = translateUpdateIDs(input.PerformerIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
}
}
if translator.hasField("tag_ids") {
updatedScene.TagIDs, err = translateUpdateIDs(input.TagIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
}
if translator.hasField("gallery_ids") {
updatedScene.GalleryIDs, err = translateUpdateIDs(input.GalleryIds, models.RelationshipUpdateModeSet)
if err != nil {
return nil, fmt.Errorf("converting gallery ids: %w", err)
}
}
// Save the movies
if translator.hasField("movies") {
updatedScene.MovieIDs, err = models.UpdateMovieIDsFromInput(input.Movies)
if err != nil {
return nil, fmt.Errorf("converting movie ids: %w", err)
}
}
// Save the stash_ids
if translator.hasField("stash_ids") {
updatedScene.StashIDs = &models.UpdateStashIDs{
StashIDs: input.StashIds,
Mode: models.RelationshipUpdateModeSet,
}
}
if input.CoverImage != nil && *input.CoverImage != "" {
var err error
@ -123,7 +159,7 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
}
qb := r.repository.Scene
s, err := qb.Update(ctx, updatedScene)
s, err := qb.UpdatePartial(ctx, sceneID, updatedScene)
if err != nil {
return nil, err
}
@ -135,42 +171,6 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
}
}
// Save the performers
if translator.hasField("performer_ids") {
if err := r.updateScenePerformers(ctx, sceneID, input.PerformerIds); err != nil {
return nil, err
}
}
// Save the movies
if translator.hasField("movies") {
if err := r.updateSceneMovies(ctx, sceneID, input.Movies); err != nil {
return nil, err
}
}
// Save the tags
if translator.hasField("tag_ids") {
if err := r.updateSceneTags(ctx, sceneID, input.TagIds); err != nil {
return nil, err
}
}
// Save the galleries
if translator.hasField("gallery_ids") {
if err := r.updateSceneGalleries(ctx, sceneID, input.GalleryIds); err != nil {
return nil, err
}
}
// Save the stash_ids
if translator.hasField("stash_ids") {
stashIDJoins := models.StashIDsFromInput(input.StashIds)
if err := qb.UpdateStashIDs(ctx, sceneID, stashIDJoins); err != nil {
return nil, err
}
}
// only update the cover image if provided and everything else was successful
if coverImageData != nil {
err = scene.SetScreenshot(manager.GetInstance().Paths, s.GetHash(config.GetInstance().GetVideoFileNamingAlgorithm()), coverImageData)
@ -182,56 +182,6 @@ func (r *mutationResolver) sceneUpdate(ctx context.Context, input models.SceneUp
return s, nil
}
func (r *mutationResolver) updateScenePerformers(ctx context.Context, sceneID int, performerIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(performerIDs)
if err != nil {
return err
}
return r.repository.Scene.UpdatePerformers(ctx, sceneID, ids)
}
func (r *mutationResolver) updateSceneMovies(ctx context.Context, sceneID int, movies []*models.SceneMovieInput) error {
var movieJoins []models.MoviesScenes
for _, movie := range movies {
movieID, err := strconv.Atoi(movie.MovieID)
if err != nil {
return err
}
movieJoin := models.MoviesScenes{
MovieID: movieID,
}
if movie.SceneIndex != nil {
movieJoin.SceneIndex = sql.NullInt64{
Int64: int64(*movie.SceneIndex),
Valid: true,
}
}
movieJoins = append(movieJoins, movieJoin)
}
return r.repository.Scene.UpdateMovies(ctx, sceneID, movieJoins)
}
func (r *mutationResolver) updateSceneTags(ctx context.Context, sceneID int, tagsIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(tagsIDs)
if err != nil {
return err
}
return r.repository.Scene.UpdateTags(ctx, sceneID, ids)
}
func (r *mutationResolver) updateSceneGalleries(ctx context.Context, sceneID int, galleryIDs []string) error {
ids, err := stringslice.StringSliceToIntSlice(galleryIDs)
if err != nil {
return err
}
return r.repository.Scene.UpdateGalleries(ctx, sceneID, ids)
}
func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneUpdateInput) ([]*models.Scene, error) {
sceneIDs, err := stringslice.StringSliceToIntSlice(input.Ids)
if err != nil {
@ -239,23 +189,51 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU
}
// Populate scene from the input
updatedTime := time.Now()
translator := changesetTranslator{
inputMap: getUpdateInputMap(ctx),
}
updatedScene := models.ScenePartial{
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
updatedScene := models.NewScenePartial()
updatedScene.Title = translator.optionalString(input.Title, "title")
updatedScene.Details = translator.optionalString(input.Details, "details")
updatedScene.URL = translator.optionalString(input.URL, "url")
updatedScene.Date = translator.optionalDate(input.Date, "date")
updatedScene.Rating = translator.optionalInt(input.Rating, "rating")
updatedScene.StudioID, err = translator.optionalIntFromString(input.StudioID, "studio_id")
if err != nil {
return nil, fmt.Errorf("converting studio id: %w", err)
}
updatedScene.Title = translator.nullString(input.Title, "title")
updatedScene.Details = translator.nullString(input.Details, "details")
updatedScene.URL = translator.nullString(input.URL, "url")
updatedScene.Date = translator.sqliteDate(input.Date, "date")
updatedScene.Rating = translator.nullInt64(input.Rating, "rating")
updatedScene.StudioID = translator.nullInt64FromString(input.StudioID, "studio_id")
updatedScene.Organized = input.Organized
updatedScene.Organized = translator.optionalBool(input.Organized, "organized")
if translator.hasField("performer_ids") {
updatedScene.PerformerIDs, err = translateUpdateIDs(input.PerformerIds.Ids, input.PerformerIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting performer ids: %w", err)
}
}
if translator.hasField("tag_ids") {
updatedScene.TagIDs, err = translateUpdateIDs(input.TagIds.Ids, input.TagIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting tag ids: %w", err)
}
}
if translator.hasField("gallery_ids") {
updatedScene.GalleryIDs, err = translateUpdateIDs(input.GalleryIds.Ids, input.GalleryIds.Mode)
if err != nil {
return nil, fmt.Errorf("converting gallery ids: %w", err)
}
}
// Save the movies
if translator.hasField("movies") {
updatedScene.MovieIDs, err = translateSceneMovieIDs(*input.MovieIds)
if err != nil {
return nil, fmt.Errorf("converting movie ids: %w", err)
}
}
ret := []*models.Scene{}
@ -264,62 +242,12 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU
qb := r.repository.Scene
for _, sceneID := range sceneIDs {
updatedScene.ID = sceneID
scene, err := qb.Update(ctx, updatedScene)
scene, err := qb.UpdatePartial(ctx, sceneID, updatedScene)
if err != nil {
return err
}
ret = append(ret, scene)
// Save the performers
if translator.hasField("performer_ids") {
performerIDs, err := r.adjustScenePerformerIDs(ctx, sceneID, *input.PerformerIds)
if err != nil {
return err
}
if err := qb.UpdatePerformers(ctx, sceneID, performerIDs); err != nil {
return err
}
}
// Save the tags
if translator.hasField("tag_ids") {
tagIDs, err := adjustTagIDs(ctx, qb, sceneID, *input.TagIds)
if err != nil {
return err
}
if err := qb.UpdateTags(ctx, sceneID, tagIDs); err != nil {
return err
}
}
// Save the galleries
if translator.hasField("gallery_ids") {
galleryIDs, err := r.adjustSceneGalleryIDs(ctx, sceneID, *input.GalleryIds)
if err != nil {
return err
}
if err := qb.UpdateGalleries(ctx, sceneID, galleryIDs); err != nil {
return err
}
}
// Save the movies
if translator.hasField("movie_ids") {
movies, err := r.adjustSceneMovieIDs(ctx, sceneID, *input.MovieIds)
if err != nil {
return err
}
if err := qb.UpdateMovies(ctx, sceneID, movies); err != nil {
return err
}
}
}
return nil
@ -345,7 +273,7 @@ func (r *mutationResolver) BulkSceneUpdate(ctx context.Context, input BulkSceneU
func adjustIDs(existingIDs []int, updateIDs BulkUpdateIds) []int {
// if we are setting the ids, just return the ids
if updateIDs.Mode == BulkUpdateIDModeSet {
if updateIDs.Mode == models.RelationshipUpdateModeSet {
existingIDs = []int{}
for _, idStr := range updateIDs.Ids {
id, _ := strconv.Atoi(idStr)
@ -362,7 +290,7 @@ func adjustIDs(existingIDs []int, updateIDs BulkUpdateIds) []int {
foundExisting := false
for idx, existingID := range existingIDs {
if existingID == id {
if updateIDs.Mode == BulkUpdateIDModeRemove {
if updateIDs.Mode == models.RelationshipUpdateModeRemove {
// remove from the list
existingIDs = append(existingIDs[:idx], existingIDs[idx+1:]...)
}
@ -372,7 +300,7 @@ func adjustIDs(existingIDs []int, updateIDs BulkUpdateIds) []int {
}
}
if !foundExisting && updateIDs.Mode != BulkUpdateIDModeRemove {
if !foundExisting && updateIDs.Mode != models.RelationshipUpdateModeRemove {
existingIDs = append(existingIDs, id)
}
}
@ -380,15 +308,6 @@ func adjustIDs(existingIDs []int, updateIDs BulkUpdateIds) []int {
return existingIDs
}
func (r *mutationResolver) adjustScenePerformerIDs(ctx context.Context, sceneID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = r.repository.Scene.GetPerformerIDs(ctx, sceneID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
type tagIDsGetter interface {
GetTagIDs(ctx context.Context, id int) ([]int, error)
}
@ -402,57 +321,6 @@ func adjustTagIDs(ctx context.Context, qb tagIDsGetter, sceneID int, ids BulkUpd
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) adjustSceneGalleryIDs(ctx context.Context, sceneID int, ids BulkUpdateIds) (ret []int, err error) {
ret, err = r.repository.Scene.GetGalleryIDs(ctx, sceneID)
if err != nil {
return nil, err
}
return adjustIDs(ret, ids), nil
}
func (r *mutationResolver) adjustSceneMovieIDs(ctx context.Context, sceneID int, updateIDs BulkUpdateIds) ([]models.MoviesScenes, error) {
existingMovies, err := r.repository.Scene.GetMovies(ctx, sceneID)
if err != nil {
return nil, err
}
// if we are setting the ids, just return the ids
if updateIDs.Mode == BulkUpdateIDModeSet {
existingMovies = []models.MoviesScenes{}
for _, idStr := range updateIDs.Ids {
id, _ := strconv.Atoi(idStr)
existingMovies = append(existingMovies, models.MoviesScenes{MovieID: id})
}
return existingMovies, nil
}
for _, idStr := range updateIDs.Ids {
id, _ := strconv.Atoi(idStr)
// look for the id in the list
foundExisting := false
for idx, existingMovie := range existingMovies {
if existingMovie.MovieID == id {
if updateIDs.Mode == BulkUpdateIDModeRemove {
// remove from the list
existingMovies = append(existingMovies[:idx], existingMovies[idx+1:]...)
}
foundExisting = true
break
}
}
if !foundExisting && updateIDs.Mode != BulkUpdateIDModeRemove {
existingMovies = append(existingMovies, models.MoviesScenes{MovieID: id})
}
}
return existingMovies, err
}
func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneDestroyInput) (bool, error) {
sceneID, err := strconv.Atoi(input.ID)
if err != nil {
@ -463,7 +331,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
var s *models.Scene
fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(),
Deleter: file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths,
}
@ -486,7 +354,7 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
// kill any running encoders
manager.KillRunningStreams(s, fileNamingAlgo)
return scene.Destroy(ctx, s, r.repository.Scene, r.repository.SceneMarker, fileDeleter, deleteGenerated, deleteFile)
return r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile)
}); err != nil {
fileDeleter.Rollback()
return false, err
@ -498,9 +366,9 @@ func (r *mutationResolver) SceneDestroy(ctx context.Context, input models.SceneD
// call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, s.ID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{
SceneDestroyInput: input,
Checksum: s.Checksum.String,
OSHash: s.OSHash.String,
Path: s.Path,
Checksum: s.Checksum(),
OSHash: s.OSHash(),
Path: s.Path(),
}, nil)
return true, nil
@ -511,7 +379,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(),
Deleter: file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths,
}
@ -536,7 +404,7 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
// kill any running encoders
manager.KillRunningStreams(s, fileNamingAlgo)
if err := scene.Destroy(ctx, s, r.repository.Scene, r.repository.SceneMarker, fileDeleter, deleteGenerated, deleteFile); err != nil {
if err := r.sceneService.Destroy(ctx, s, fileDeleter, deleteGenerated, deleteFile); err != nil {
return err
}
}
@ -554,9 +422,9 @@ func (r *mutationResolver) ScenesDestroy(ctx context.Context, input models.Scene
// call post hook after performing the other actions
r.hookExecutor.ExecutePostHooks(ctx, scene.ID, plugin.SceneDestroyPost, plugin.ScenesDestroyInput{
ScenesDestroyInput: input,
Checksum: scene.Checksum.String,
OSHash: scene.OSHash.String,
Path: scene.Path,
Checksum: scene.Checksum(),
OSHash: scene.OSHash(),
Path: scene.Path(),
}, nil)
}
@ -661,7 +529,7 @@ func (r *mutationResolver) SceneMarkerDestroy(ctx context.Context, id string) (b
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(),
Deleter: file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths,
}
@ -707,7 +575,7 @@ func (r *mutationResolver) changeMarker(ctx context.Context, changeType int, cha
fileNamingAlgo := manager.GetInstance().Config.GetVideoFileNamingAlgorithm()
fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(),
Deleter: file.NewDeleter(),
FileNamingAlgo: fileNamingAlgo,
Paths: manager.GetInstance().Paths,
}

View File

@ -90,7 +90,7 @@ func (r *mutationResolver) StudioCreate(ctx context.Context, input StudioCreateI
// Save the stash_ids
if input.StashIds != nil {
stashIDJoins := models.StashIDsFromInput(input.StashIds)
stashIDJoins := input.StashIds
if err := qb.UpdateStashIDs(ctx, s.ID, stashIDJoins); err != nil {
return err
}
@ -182,7 +182,7 @@ func (r *mutationResolver) StudioUpdate(ctx context.Context, input StudioUpdateI
// Save the stash_ids
if translator.hasField("stash_ids") {
stashIDJoins := models.StashIDsFromInput(input.StashIds)
stashIDJoins := input.StashIds
if err := qb.UpdateStashIDs(ctx, studioID, stashIDJoins); err != nil {
return err
}

View File

@ -5,6 +5,7 @@ import (
"errors"
"testing"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stashapp/stash/pkg/plugin"
@ -15,9 +16,13 @@ import (
// TODO - move this into a common area
func newResolver() *Resolver {
txnMgr := &mocks.TxnManager{}
return &Resolver{
txnManager: &mocks.TxnManager{},
repository: mocks.NewTxnRepository(),
txnManager: txnMgr,
repository: manager.Repository{
TxnManager: txnMgr,
Tag: &mocks.TagReaderWriter{},
},
hookExecutor: &mockHookExecutor{},
}
}

View File

@ -27,7 +27,15 @@ func (r *queryResolver) FindImage(ctx context.Context, id *string, checksum *str
return err
}
} else if checksum != nil {
image, err = qb.FindByChecksum(ctx, *checksum)
var images []*models.Image
images, err = qb.FindByChecksum(ctx, *checksum)
if err != nil {
return err
}
if len(images) > 0 {
image = images[0]
}
}
return err

View File

@ -25,7 +25,11 @@ func (r *queryResolver) FindScene(ctx context.Context, id *string, checksum *str
return err
}
} else if checksum != nil {
scene, err = qb.FindByChecksum(ctx, *checksum)
var scenes []*models.Scene
scenes, err = qb.FindByChecksum(ctx, *checksum)
if len(scenes) > 0 {
scene = scenes[0]
}
}
return err
@ -41,19 +45,24 @@ func (r *queryResolver) FindSceneByHash(ctx context.Context, input SceneHashInpu
if err := r.withTxn(ctx, func(ctx context.Context) error {
qb := r.repository.Scene
var err error
if input.Checksum != nil {
scene, err = qb.FindByChecksum(ctx, *input.Checksum)
scenes, err := qb.FindByChecksum(ctx, *input.Checksum)
if err != nil {
return err
}
if len(scenes) > 0 {
scene = scenes[0]
}
}
if scene == nil && input.Oshash != nil {
scene, err = qb.FindByOSHash(ctx, *input.Oshash)
scenes, err := qb.FindByOSHash(ctx, *input.Oshash)
if err != nil {
return err
}
if len(scenes) > 0 {
scene = scenes[0]
}
}
return nil
@ -77,9 +86,14 @@ func (r *queryResolver) FindScenes(ctx context.Context, sceneFilter *models.Scen
if err == nil {
result.Count = len(scenes)
for _, s := range scenes {
result.TotalDuration += s.Duration.Float64
size, _ := strconv.ParseFloat(s.Size.String, 64)
result.TotalSize += size
f := s.PrimaryFile()
if f == nil {
continue
}
result.TotalDuration += f.Duration
result.TotalSize += float64(f.Size)
}
}
} else {

View File

@ -9,6 +9,7 @@ import (
"github.com/go-chi/chi"
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/logger"
@ -18,7 +19,7 @@ import (
type ImageFinder interface {
Find(ctx context.Context, id int) (*models.Image, error)
FindByChecksum(ctx context.Context, checksum string) (*models.Image, error)
FindByChecksum(ctx context.Context, checksum string) ([]*models.Image, error)
}
type imageRoutes struct {
@ -43,7 +44,7 @@ func (rs imageRoutes) Routes() chi.Router {
func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
img := r.Context().Value(imageKey).(*models.Image)
filepath := manager.GetInstance().Paths.Generated.GetThumbnailPath(img.Checksum, models.DefaultGthumbWidth)
filepath := manager.GetInstance().Paths.Generated.GetThumbnailPath(img.Checksum(), models.DefaultGthumbWidth)
w.Header().Add("Cache-Control", "max-age=604800000")
@ -52,8 +53,16 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
if exists {
http.ServeFile(w, r, filepath)
} else {
// don't return anything if there is no file
f := img.PrimaryFile()
if f == nil {
// TODO - probably want to return a placeholder
http.Error(w, http.StatusText(404), 404)
return
}
encoder := image.NewThumbnailEncoder(manager.GetInstance().FFMPEG)
data, err := encoder.GetThumbnail(img, models.DefaultGthumbWidth)
data, err := encoder.GetThumbnail(f, models.DefaultGthumbWidth)
if err != nil {
// don't log for unsupported image format
if !errors.Is(err, image.ErrNotSupportedForThumbnail) {
@ -72,7 +81,7 @@ func (rs imageRoutes) Thumbnail(w http.ResponseWriter, r *http.Request) {
// write the generated thumbnail to disk if enabled
if manager.GetInstance().Config.IsWriteImageThumbnails() {
logger.Debugf("writing thumbnail to disk: %s", img.Path)
logger.Debugf("writing thumbnail to disk: %s", img.Path())
if err := fsutil.WriteFile(filepath, data); err != nil {
logger.Errorf("error writing thumbnail for image %s: %s", img.Path, err)
}
@ -87,7 +96,13 @@ func (rs imageRoutes) Image(w http.ResponseWriter, r *http.Request) {
i := r.Context().Value(imageKey).(*models.Image)
// if image is in a zip file, we need to serve it specifically
image.Serve(w, r, i.Path)
if len(i.Files) == 0 {
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
return
}
i.Files[0].Serve(&file.OsFS{}, w, r)
}
// endregion
@ -101,7 +116,10 @@ func (rs imageRoutes) ImageCtx(next http.Handler) http.Handler {
readTxnErr := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
qb := rs.imageFinder
if imageID == 0 {
image, _ = qb.FindByChecksum(ctx, imageIdentifierQueryParam)
images, _ := qb.FindByChecksum(ctx, imageIdentifierQueryParam)
if len(images) > 0 {
image = images[0]
}
} else {
image, _ = qb.Find(ctx, imageID)
}

View File

@ -11,6 +11,8 @@ import (
"github.com/stashapp/stash/internal/manager"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/file/video"
"github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
@ -23,9 +25,8 @@ type SceneFinder interface {
manager.SceneCoverGetter
scene.IDFinder
FindByChecksum(ctx context.Context, checksum string) (*models.Scene, error)
FindByOSHash(ctx context.Context, oshash string) (*models.Scene, error)
GetCaptions(ctx context.Context, sceneID int) ([]*models.SceneCaption, error)
FindByChecksum(ctx context.Context, checksum string) ([]*models.Scene, error)
FindByOSHash(ctx context.Context, oshash string) ([]*models.Scene, error)
}
type SceneMarkerFinder interface {
@ -33,9 +34,14 @@ type SceneMarkerFinder interface {
FindBySceneID(ctx context.Context, sceneID int) ([]*models.SceneMarker, error)
}
type CaptionFinder interface {
GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error)
}
type sceneRoutes struct {
txnManager txn.Manager
sceneFinder SceneFinder
captionFinder CaptionFinder
sceneMarkerFinder SceneMarkerFinder
tagFinder scene.MarkerTagFinder
}
@ -116,7 +122,7 @@ func (rs sceneRoutes) StreamHLS(w http.ResponseWriter, r *http.Request) {
scene := r.Context().Value(sceneKey).(*models.Scene)
ffprobe := manager.GetInstance().FFProbe
videoFile, err := ffprobe.NewVideoFile(scene.Path)
videoFile, err := ffprobe.NewVideoFile(scene.Path())
if err != nil {
logger.Errorf("[stream] error reading video file: %v", err)
return
@ -149,9 +155,11 @@ func (rs sceneRoutes) StreamTS(w http.ResponseWriter, r *http.Request) {
}
func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, streamFormat ffmpeg.StreamFormat) {
logger.Debugf("Streaming as %s", streamFormat.MimeType)
scene := r.Context().Value(sceneKey).(*models.Scene)
f := scene.PrimaryFile()
logger.Debugf("Streaming as %s", streamFormat.MimeType)
// start stream based on query param, if provided
if err := r.ParseForm(); err != nil {
logger.Warnf("[stream] error parsing query form: %v", err)
@ -162,17 +170,20 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, st
requestedSize := r.Form.Get("resolution")
audioCodec := ffmpeg.MissingUnsupported
if scene.AudioCodec.Valid {
audioCodec = ffmpeg.ProbeAudioCodec(scene.AudioCodec.String)
if f.AudioCodec != "" {
audioCodec = ffmpeg.ProbeAudioCodec(f.AudioCodec)
}
width := f.Width
height := f.Height
options := ffmpeg.TranscodeStreamOptions{
Input: scene.Path,
Input: f.Path,
Codec: streamFormat,
VideoOnly: audioCodec == ffmpeg.MissingUnsupported,
VideoWidth: int(scene.Width.Int64),
VideoHeight: int(scene.Height.Int64),
VideoWidth: width,
VideoHeight: height,
StartTime: ss,
MaxTranscodeSize: config.GetInstance().GetMaxStreamingTranscodeSize().GetMaxResolution(),
@ -186,7 +197,7 @@ func (rs sceneRoutes) streamTranscode(w http.ResponseWriter, r *http.Request, st
lm := manager.GetInstance().ReadLockManager
streamRequestCtx := manager.NewStreamRequestContext(w, r)
lockCtx := lm.ReadLock(streamRequestCtx, scene.Path)
lockCtx := lm.ReadLock(streamRequestCtx, f.Path)
defer lockCtx.Cancel()
stream, err := encoder.GetTranscodeStream(lockCtx, options)
@ -295,7 +306,7 @@ func (rs sceneRoutes) ChapterVtt(w http.ResponseWriter, r *http.Request) {
func (rs sceneRoutes) Funscript(w http.ResponseWriter, r *http.Request) {
s := r.Context().Value(sceneKey).(*models.Scene)
funscript := scene.GetFunscriptPath(s.Path)
funscript := video.GetFunscriptPath(s.Path())
serveFileNoCache(w, r, funscript)
}
@ -311,10 +322,15 @@ func (rs sceneRoutes) Caption(w http.ResponseWriter, r *http.Request, lang strin
if err := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
var err error
captions, err := rs.sceneFinder.GetCaptions(ctx, s.ID)
primaryFile := s.PrimaryFile()
if primaryFile == nil {
return nil
}
captions, err := rs.captionFinder.GetCaptions(ctx, primaryFile.Base().ID)
for _, caption := range captions {
if lang == caption.LanguageCode && ext == caption.CaptionType {
sub, err := scene.ReadSubs(caption.Path(s.Path))
sub, err := video.ReadSubs(caption.Path(s.Path()))
if err == nil {
var b bytes.Buffer
err = sub.WriteToWebVTT(&b)
@ -460,11 +476,17 @@ func (rs sceneRoutes) SceneCtx(next http.Handler) http.Handler {
readTxnErr := txn.WithTxn(r.Context(), rs.txnManager, func(ctx context.Context) error {
qb := rs.sceneFinder
if sceneID == 0 {
var scenes []*models.Scene
// determine checksum/os by the length of the query param
if len(sceneIdentifierQueryParam) == 32 {
scene, _ = qb.FindByChecksum(ctx, sceneIdentifierQueryParam)
scenes, _ = qb.FindByChecksum(ctx, sceneIdentifierQueryParam)
} else {
scene, _ = qb.FindByOSHash(ctx, sceneIdentifierQueryParam)
scenes, _ = qb.FindByOSHash(ctx, sceneIdentifierQueryParam)
}
if len(scenes) > 0 {
scene = scenes[0]
}
} else {
scene, _ = qb.Find(ctx, sceneID)

View File

@ -75,10 +75,16 @@ func Start() error {
txnManager := manager.GetInstance().Repository
pluginCache := manager.GetInstance().PluginCache
sceneService := manager.GetInstance().SceneService
imageService := manager.GetInstance().ImageService
galleryService := manager.GetInstance().GalleryService
resolver := &Resolver{
txnManager: txnManager,
repository: txnManager,
hookExecutor: pluginCache,
txnManager: txnManager,
repository: txnManager,
sceneService: sceneService,
imageService: imageService,
galleryService: galleryService,
hookExecutor: pluginCache,
}
gqlSrv := gqlHandler.New(NewExecutableSchema(Config{Resolvers: resolver}))
@ -125,6 +131,7 @@ func Start() error {
r.Mount("/scene", sceneRoutes{
txnManager: txnManager,
sceneFinder: txnManager.Scene,
captionFinder: txnManager.File,
sceneMarkerFinder: txnManager.SceneMarker,
tagFinder: txnManager.Tag,
}.Routes())

View File

@ -1,6 +1,12 @@
package api
import "math"
import (
"fmt"
"math"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sliceutil/stringslice"
)
// An enum https://golang.org/ref/spec#Iota
const (
@ -17,3 +23,41 @@ func handleFloat64(v float64) *float64 {
return &v
}
func handleFloat64Value(v float64) float64 {
if math.IsInf(v, 0) || math.IsNaN(v) {
return 0
}
return v
}
func translateUpdateIDs(strIDs []string, mode models.RelationshipUpdateMode) (*models.UpdateIDs, error) {
ids, err := stringslice.StringSliceToIntSlice(strIDs)
if err != nil {
return nil, fmt.Errorf("converting ids [%v]: %w", strIDs, err)
}
return &models.UpdateIDs{
IDs: ids,
Mode: mode,
}, nil
}
func translateSceneMovieIDs(input BulkUpdateIds) (*models.UpdateMovieIDs, error) {
ids, err := stringslice.StringSliceToIntSlice(input.Ids)
if err != nil {
return nil, fmt.Errorf("converting ids [%v]: %w", input.Ids, err)
}
ret := &models.UpdateMovieIDs{
Mode: input.Mode,
}
for _, id := range ids {
ret.Movies = append(ret.Movies, models.MoviesScenes{
MovieID: id,
})
}
return ret, nil
}

View File

@ -1,8 +1,9 @@
package urlbuilders
import (
"github.com/stashapp/stash/pkg/models"
"strconv"
"github.com/stashapp/stash/pkg/models"
)
type ImageURLBuilder struct {
@ -15,7 +16,7 @@ func NewImageURLBuilder(baseURL string, image *models.Image) ImageURLBuilder {
return ImageURLBuilder{
BaseURL: baseURL,
ImageID: strconv.Itoa(image.ID),
UpdatedAt: strconv.FormatInt(image.UpdatedAt.Timestamp.Unix(), 10),
UpdatedAt: strconv.FormatInt(image.UpdatedAt.Unix(), 10),
}
}

View File

@ -9,25 +9,30 @@ import (
)
func getGalleryFileTagger(s *models.Gallery, cache *match.Cache) tagger {
var path string
if s.Path() != "" {
path = s.Path()
}
// only trim the extension if gallery is file-based
trimExt := s.Zip
trimExt := s.PrimaryFile() != nil
return tagger{
ID: s.ID,
Type: "gallery",
Name: s.GetTitle(),
Path: s.Path.String,
Path: path,
trimExt: trimExt,
cache: cache,
}
}
// GalleryPerformers tags the provided gallery with performers whose name matches the gallery's path.
func GalleryPerformers(ctx context.Context, s *models.Gallery, rw gallery.PerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
func GalleryPerformers(ctx context.Context, s *models.Gallery, rw gallery.PartialUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
t := getGalleryFileTagger(s, cache)
return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) {
return gallery.AddPerformer(ctx, rw, subjectID, otherID)
return gallery.AddPerformer(ctx, rw, s, otherID)
})
}
@ -35,7 +40,7 @@ func GalleryPerformers(ctx context.Context, s *models.Gallery, rw gallery.Perfor
//
// Gallerys will not be tagged if studio is already set.
func GalleryStudios(ctx context.Context, s *models.Gallery, rw GalleryFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error {
if s.StudioID.Valid {
if s.StudioID != nil {
// don't modify
return nil
}
@ -43,15 +48,15 @@ func GalleryStudios(ctx context.Context, s *models.Gallery, rw GalleryFinderUpda
t := getGalleryFileTagger(s, cache)
return t.tagStudios(ctx, studioReader, func(subjectID, otherID int) (bool, error) {
return addGalleryStudio(ctx, rw, subjectID, otherID)
return addGalleryStudio(ctx, rw, s, otherID)
})
}
// GalleryTags tags the provided gallery with tags whose name matches the gallery's path.
func GalleryTags(ctx context.Context, s *models.Gallery, rw gallery.TagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
func GalleryTags(ctx context.Context, s *models.Gallery, rw gallery.PartialUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
t := getGalleryFileTagger(s, cache)
return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) {
return gallery.AddTag(ctx, rw, subjectID, otherID)
return gallery.AddTag(ctx, rw, s, otherID)
})
}

View File

@ -4,6 +4,7 @@ import (
"context"
"testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert"
@ -44,13 +45,21 @@ func TestGalleryPerformers(t *testing.T) {
mockPerformerReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once()
if test.Matches {
mockGalleryReader.On("GetPerformerIDs", testCtx, galleryID).Return(nil, nil).Once()
mockGalleryReader.On("UpdatePerformers", testCtx, galleryID, []int{performerID}).Return(nil).Once()
mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
}
gallery := models.Gallery{
ID: galleryID,
Path: models.NullString(test.Path),
ID: galleryID,
Files: []file.File{
&file.BaseFile{
Path: test.Path,
},
},
}
err := GalleryPerformers(testCtx, &gallery, mockGalleryReader, mockPerformerReader, nil)
@ -65,7 +74,7 @@ func TestGalleryStudios(t *testing.T) {
const galleryID = 1
const studioName = "studio name"
const studioID = 2
var studioID = 2
studio := models.Studio{
ID: studioID,
Name: models.NullString(studioName),
@ -84,17 +93,19 @@ func TestGalleryStudios(t *testing.T) {
doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockGalleryReader *mocks.GalleryReaderWriter, test pathTestTable) {
if test.Matches {
mockGalleryReader.On("Find", testCtx, galleryID).Return(&models.Gallery{}, nil).Once()
expectedStudioID := models.NullInt64(studioID)
mockGalleryReader.On("UpdatePartial", testCtx, models.GalleryPartial{
ID: galleryID,
StudioID: &expectedStudioID,
expectedStudioID := studioID
mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
StudioID: models.NewOptionalInt(expectedStudioID),
}).Return(nil, nil).Once()
}
gallery := models.Gallery{
ID: galleryID,
Path: models.NullString(test.Path),
ID: galleryID,
Files: []file.File{
&file.BaseFile{
Path: test.Path,
},
},
}
err := GalleryStudios(testCtx, &gallery, mockGalleryReader, mockStudioReader, nil)
@ -157,13 +168,21 @@ func TestGalleryTags(t *testing.T) {
doTest := func(mockTagReader *mocks.TagReaderWriter, mockGalleryReader *mocks.GalleryReaderWriter, test pathTestTable) {
if test.Matches {
mockGalleryReader.On("GetTagIDs", testCtx, galleryID).Return(nil, nil).Once()
mockGalleryReader.On("UpdateTags", testCtx, galleryID, []int{tagID}).Return(nil).Once()
mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
}
gallery := models.Gallery{
ID: galleryID,
Path: models.NullString(test.Path),
ID: galleryID,
Files: []file.File{
&file.BaseFile{
Path: test.Path,
},
},
}
err := GalleryTags(testCtx, &gallery, mockGalleryReader, mockTagReader, nil)

View File

@ -13,17 +13,17 @@ func getImageFileTagger(s *models.Image, cache *match.Cache) tagger {
ID: s.ID,
Type: "image",
Name: s.GetTitle(),
Path: s.Path,
Path: s.Path(),
cache: cache,
}
}
// ImagePerformers tags the provided image with performers whose name matches the image's path.
func ImagePerformers(ctx context.Context, s *models.Image, rw image.PerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
func ImagePerformers(ctx context.Context, s *models.Image, rw image.PartialUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
t := getImageFileTagger(s, cache)
return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) {
return image.AddPerformer(ctx, rw, subjectID, otherID)
return image.AddPerformer(ctx, rw, s, otherID)
})
}
@ -31,7 +31,7 @@ func ImagePerformers(ctx context.Context, s *models.Image, rw image.PerformerUpd
//
// Images will not be tagged if studio is already set.
func ImageStudios(ctx context.Context, s *models.Image, rw ImageFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error {
if s.StudioID.Valid {
if s.StudioID != nil {
// don't modify
return nil
}
@ -39,15 +39,15 @@ func ImageStudios(ctx context.Context, s *models.Image, rw ImageFinderUpdater, s
t := getImageFileTagger(s, cache)
return t.tagStudios(ctx, studioReader, func(subjectID, otherID int) (bool, error) {
return addImageStudio(ctx, rw, subjectID, otherID)
return addImageStudio(ctx, rw, s, otherID)
})
}
// ImageTags tags the provided image with tags whose name matches the image's path.
func ImageTags(ctx context.Context, s *models.Image, rw image.TagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
func ImageTags(ctx context.Context, s *models.Image, rw image.PartialUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
t := getImageFileTagger(s, cache)
return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) {
return image.AddTag(ctx, rw, subjectID, otherID)
return image.AddTag(ctx, rw, s, otherID)
})
}

View File

@ -3,6 +3,7 @@ package autotag
import (
"testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert"
@ -11,6 +12,14 @@ import (
const imageExt = "jpg"
func makeImageFile(p string) *file.ImageFile {
return &file.ImageFile{
BaseFile: &file.BaseFile{
Path: p,
},
}
}
func TestImagePerformers(t *testing.T) {
t.Parallel()
@ -41,13 +50,17 @@ func TestImagePerformers(t *testing.T) {
mockPerformerReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once()
if test.Matches {
mockImageReader.On("GetPerformerIDs", testCtx, imageID).Return(nil, nil).Once()
mockImageReader.On("UpdatePerformers", testCtx, imageID, []int{performerID}).Return(nil).Once()
mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
}
image := models.Image{
ID: imageID,
Path: test.Path,
ID: imageID,
Files: []*file.ImageFile{makeImageFile(test.Path)},
}
err := ImagePerformers(testCtx, &image, mockImageReader, mockPerformerReader, nil)
@ -62,7 +75,7 @@ func TestImageStudios(t *testing.T) {
const imageID = 1
const studioName = "studio name"
const studioID = 2
var studioID = 2
studio := models.Studio{
ID: studioID,
Name: models.NullString(studioName),
@ -81,17 +94,15 @@ func TestImageStudios(t *testing.T) {
doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockImageReader *mocks.ImageReaderWriter, test pathTestTable) {
if test.Matches {
mockImageReader.On("Find", testCtx, imageID).Return(&models.Image{}, nil).Once()
expectedStudioID := models.NullInt64(studioID)
mockImageReader.On("Update", testCtx, models.ImagePartial{
ID: imageID,
StudioID: &expectedStudioID,
expectedStudioID := studioID
mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
StudioID: models.NewOptionalInt(expectedStudioID),
}).Return(nil, nil).Once()
}
image := models.Image{
ID: imageID,
Path: test.Path,
ID: imageID,
Files: []*file.ImageFile{makeImageFile(test.Path)},
}
err := ImageStudios(testCtx, &image, mockImageReader, mockStudioReader, nil)
@ -154,13 +165,17 @@ func TestImageTags(t *testing.T) {
doTest := func(mockTagReader *mocks.TagReaderWriter, mockImageReader *mocks.ImageReaderWriter, test pathTestTable) {
if test.Matches {
mockImageReader.On("GetTagIDs", testCtx, imageID).Return(nil, nil).Once()
mockImageReader.On("UpdateTags", testCtx, imageID, []int{tagID}).Return(nil).Once()
mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
}
image := models.Image{
ID: imageID,
Path: test.Path,
ID: imageID,
Files: []*file.ImageFile{makeImageFile(test.Path)},
}
err := ImageTags(testCtx, &image, mockImageReader, mockTagReader, nil)

View File

@ -8,15 +8,19 @@ import (
"database/sql"
"fmt"
"os"
"path/filepath"
"testing"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/sqlite"
"github.com/stashapp/stash/pkg/txn"
_ "github.com/golang-migrate/migrate/v4/database/sqlite3"
_ "github.com/golang-migrate/migrate/v4/source/file"
// necessary to register custom migrations
_ "github.com/stashapp/stash/pkg/sqlite/migrations"
)
const testName = "Foo's Bar"
@ -28,6 +32,8 @@ const existingStudioGalleryName = testName + ".dontChangeStudio.mp4"
var existingStudioID int
const expectedMatchTitle = "expected match"
var db *sqlite.Database
var r models.Repository
@ -53,7 +59,7 @@ func runTests(m *testing.M) int {
f.Close()
databaseFile := f.Name()
db = &sqlite.Database{}
db = sqlite.NewDatabase()
if err := db.Open(databaseFile); err != nil {
panic(fmt.Sprintf("Could not initialize database: %s", err.Error()))
}
@ -117,187 +123,354 @@ func createTag(ctx context.Context, qb models.TagWriter) error {
return nil
}
func createScenes(ctx context.Context, sqb models.SceneReaderWriter) error {
func createScenes(ctx context.Context, sqb models.SceneReaderWriter, folderStore file.FolderStore, fileStore file.Store) error {
// create the scenes
scenePatterns, falseScenePatterns := generateTestPaths(testName, sceneExt)
for _, fn := range scenePatterns {
err := createScene(ctx, sqb, makeScene(fn, true))
f, err := createSceneFile(ctx, fn, folderStore, fileStore)
if err != nil {
return err
}
const expectedResult = true
if err := createScene(ctx, sqb, makeScene(expectedResult), f); err != nil {
return err
}
}
for _, fn := range falseScenePatterns {
err := createScene(ctx, sqb, makeScene(fn, false))
f, err := createSceneFile(ctx, fn, folderStore, fileStore)
if err != nil {
return err
}
const expectedResult = false
if err := createScene(ctx, sqb, makeScene(expectedResult), f); err != nil {
return err
}
}
// add organized scenes
for _, fn := range scenePatterns {
s := makeScene("organized"+fn, false)
s.Organized = true
err := createScene(ctx, sqb, s)
f, err := createSceneFile(ctx, "organized"+fn, folderStore, fileStore)
if err != nil {
return err
}
const expectedResult = false
s := makeScene(expectedResult)
s.Organized = true
if err := createScene(ctx, sqb, s, f); err != nil {
return err
}
}
// create scene with existing studio io
studioScene := makeScene(existingStudioSceneName, true)
studioScene.StudioID = sql.NullInt64{Valid: true, Int64: int64(existingStudioID)}
err := createScene(ctx, sqb, studioScene)
f, err := createSceneFile(ctx, existingStudioSceneName, folderStore, fileStore)
if err != nil {
return err
}
s := &models.Scene{
Title: expectedMatchTitle,
URL: existingStudioSceneName,
StudioID: &existingStudioID,
}
if err := createScene(ctx, sqb, s, f); err != nil {
return err
}
return nil
}
func makeScene(name string, expectedResult bool) *models.Scene {
scene := &models.Scene{
Checksum: sql.NullString{String: md5.FromString(name), Valid: true},
Path: name,
}
func makeScene(expectedResult bool) *models.Scene {
s := &models.Scene{}
// if expectedResult is true then we expect it to match, set the title accordingly
if expectedResult {
scene.Title = sql.NullString{Valid: true, String: name}
s.Title = expectedMatchTitle
}
return scene
return s
}
func createScene(ctx context.Context, sqb models.SceneWriter, scene *models.Scene) error {
_, err := sqb.Create(ctx, *scene)
func createSceneFile(ctx context.Context, name string, folderStore file.FolderStore, fileStore file.Store) (*file.VideoFile, error) {
folderPath := filepath.Dir(name)
basename := filepath.Base(name)
folder, err := getOrCreateFolder(ctx, folderStore, folderPath)
if err != nil {
return nil, err
}
folderID := folder.ID
f := &file.VideoFile{
BaseFile: &file.BaseFile{
Basename: basename,
ParentFolderID: folderID,
},
}
if err := fileStore.Create(ctx, f); err != nil {
return nil, err
}
return f, nil
}
func getOrCreateFolder(ctx context.Context, folderStore file.FolderStore, folderPath string) (*file.Folder, error) {
f, err := folderStore.FindByPath(ctx, folderPath)
if err != nil {
return nil, fmt.Errorf("getting folder by path: %w", err)
}
if f != nil {
return f, nil
}
var parentID file.FolderID
dir := filepath.Dir(folderPath)
if dir != "." {
parent, err := getOrCreateFolder(ctx, folderStore, dir)
if err != nil {
return nil, err
}
parentID = parent.ID
}
f = &file.Folder{
Path: folderPath,
}
if parentID != 0 {
f.ParentFolderID = &parentID
}
if err := folderStore.Create(ctx, f); err != nil {
return nil, fmt.Errorf("creating folder: %w", err)
}
return f, nil
}
func createScene(ctx context.Context, sqb models.SceneWriter, s *models.Scene, f *file.VideoFile) error {
err := sqb.Create(ctx, s, []file.ID{f.ID})
if err != nil {
return fmt.Errorf("Failed to create scene with name '%s': %s", scene.Path, err.Error())
return fmt.Errorf("Failed to create scene with path '%s': %s", f.Path, err.Error())
}
return nil
}
func createImages(ctx context.Context, sqb models.ImageReaderWriter) error {
func createImages(ctx context.Context, w models.ImageReaderWriter, folderStore file.FolderStore, fileStore file.Store) error {
// create the images
imagePatterns, falseImagePatterns := generateTestPaths(testName, imageExt)
for _, fn := range imagePatterns {
err := createImage(ctx, sqb, makeImage(fn, true))
f, err := createImageFile(ctx, fn, folderStore, fileStore)
if err != nil {
return err
}
const expectedResult = true
if err := createImage(ctx, w, makeImage(expectedResult), f); err != nil {
return err
}
}
for _, fn := range falseImagePatterns {
err := createImage(ctx, sqb, makeImage(fn, false))
f, err := createImageFile(ctx, fn, folderStore, fileStore)
if err != nil {
return err
}
const expectedResult = false
if err := createImage(ctx, w, makeImage(expectedResult), f); err != nil {
return err
}
}
// add organized images
for _, fn := range imagePatterns {
s := makeImage("organized"+fn, false)
s.Organized = true
err := createImage(ctx, sqb, s)
f, err := createImageFile(ctx, "organized"+fn, folderStore, fileStore)
if err != nil {
return err
}
const expectedResult = false
s := makeImage(expectedResult)
s.Organized = true
if err := createImage(ctx, w, s, f); err != nil {
return err
}
}
// create image with existing studio io
studioImage := makeImage(existingStudioImageName, true)
studioImage.StudioID = sql.NullInt64{Valid: true, Int64: int64(existingStudioID)}
err := createImage(ctx, sqb, studioImage)
f, err := createImageFile(ctx, existingStudioImageName, folderStore, fileStore)
if err != nil {
return err
}
s := &models.Image{
Title: existingStudioImageName,
StudioID: &existingStudioID,
}
if err := createImage(ctx, w, s, f); err != nil {
return err
}
return nil
}
func makeImage(name string, expectedResult bool) *models.Image {
image := &models.Image{
Checksum: md5.FromString(name),
Path: name,
func createImageFile(ctx context.Context, name string, folderStore file.FolderStore, fileStore file.Store) (*file.ImageFile, error) {
folderPath := filepath.Dir(name)
basename := filepath.Base(name)
folder, err := getOrCreateFolder(ctx, folderStore, folderPath)
if err != nil {
return nil, err
}
folderID := folder.ID
f := &file.ImageFile{
BaseFile: &file.BaseFile{
Basename: basename,
ParentFolderID: folderID,
},
}
if err := fileStore.Create(ctx, f); err != nil {
return nil, err
}
return f, nil
}
func makeImage(expectedResult bool) *models.Image {
o := &models.Image{}
// if expectedResult is true then we expect it to match, set the title accordingly
if expectedResult {
image.Title = sql.NullString{Valid: true, String: name}
o.Title = expectedMatchTitle
}
return image
return o
}
func createImage(ctx context.Context, sqb models.ImageWriter, image *models.Image) error {
_, err := sqb.Create(ctx, *image)
func createImage(ctx context.Context, w models.ImageWriter, o *models.Image, f *file.ImageFile) error {
err := w.Create(ctx, &models.ImageCreateInput{
Image: o,
FileIDs: []file.ID{f.ID},
})
if err != nil {
return fmt.Errorf("Failed to create image with name '%s': %s", image.Path, err.Error())
return fmt.Errorf("Failed to create image with path '%s': %s", f.Path, err.Error())
}
return nil
}
func createGalleries(ctx context.Context, sqb models.GalleryReaderWriter) error {
func createGalleries(ctx context.Context, w models.GalleryReaderWriter, folderStore file.FolderStore, fileStore file.Store) error {
// create the galleries
galleryPatterns, falseGalleryPatterns := generateTestPaths(testName, galleryExt)
for _, fn := range galleryPatterns {
err := createGallery(ctx, sqb, makeGallery(fn, true))
f, err := createGalleryFile(ctx, fn, folderStore, fileStore)
if err != nil {
return err
}
const expectedResult = true
if err := createGallery(ctx, w, makeGallery(expectedResult), f); err != nil {
return err
}
}
for _, fn := range falseGalleryPatterns {
err := createGallery(ctx, sqb, makeGallery(fn, false))
f, err := createGalleryFile(ctx, fn, folderStore, fileStore)
if err != nil {
return err
}
const expectedResult = false
if err := createGallery(ctx, w, makeGallery(expectedResult), f); err != nil {
return err
}
}
// add organized galleries
for _, fn := range galleryPatterns {
s := makeGallery("organized"+fn, false)
s.Organized = true
err := createGallery(ctx, sqb, s)
f, err := createGalleryFile(ctx, "organized"+fn, folderStore, fileStore)
if err != nil {
return err
}
const expectedResult = false
s := makeGallery(expectedResult)
s.Organized = true
if err := createGallery(ctx, w, s, f); err != nil {
return err
}
}
// create gallery with existing studio io
studioGallery := makeGallery(existingStudioGalleryName, true)
studioGallery.StudioID = sql.NullInt64{Valid: true, Int64: int64(existingStudioID)}
err := createGallery(ctx, sqb, studioGallery)
f, err := createGalleryFile(ctx, existingStudioGalleryName, folderStore, fileStore)
if err != nil {
return err
}
s := &models.Gallery{
Title: existingStudioGalleryName,
StudioID: &existingStudioID,
}
if err := createGallery(ctx, w, s, f); err != nil {
return err
}
return nil
}
func makeGallery(name string, expectedResult bool) *models.Gallery {
gallery := &models.Gallery{
Checksum: md5.FromString(name),
Path: models.NullString(name),
func createGalleryFile(ctx context.Context, name string, folderStore file.FolderStore, fileStore file.Store) (*file.BaseFile, error) {
folderPath := filepath.Dir(name)
basename := filepath.Base(name)
folder, err := getOrCreateFolder(ctx, folderStore, folderPath)
if err != nil {
return nil, err
}
folderID := folder.ID
f := &file.BaseFile{
Basename: basename,
ParentFolderID: folderID,
}
if err := fileStore.Create(ctx, f); err != nil {
return nil, err
}
return f, nil
}
func makeGallery(expectedResult bool) *models.Gallery {
o := &models.Gallery{}
// if expectedResult is true then we expect it to match, set the title accordingly
if expectedResult {
gallery.Title = sql.NullString{Valid: true, String: name}
o.Title = expectedMatchTitle
}
return gallery
return o
}
func createGallery(ctx context.Context, sqb models.GalleryWriter, gallery *models.Gallery) error {
_, err := sqb.Create(ctx, *gallery)
func createGallery(ctx context.Context, w models.GalleryWriter, o *models.Gallery, f *file.BaseFile) error {
err := w.Create(ctx, o, []file.ID{f.ID})
if err != nil {
return fmt.Errorf("Failed to create gallery with name '%s': %s", gallery.Path.String, err.Error())
return fmt.Errorf("Failed to create gallery with path '%s': %s", f.Path, err.Error())
}
return nil
@ -332,17 +505,17 @@ func populateDB() error {
return err
}
err = createScenes(ctx, r.Scene)
err = createScenes(ctx, r.Scene, r.Folder, r.File)
if err != nil {
return err
}
err = createImages(ctx, r.Image)
err = createImages(ctx, r.Image, r.Folder, r.File)
if err != nil {
return err
}
err = createGalleries(ctx, r.Gallery)
err = createGalleries(ctx, r.Gallery, r.Folder, r.File)
if err != nil {
return err
}
@ -391,10 +564,10 @@ func TestParsePerformerScenes(t *testing.T) {
}
// title is only set on scenes where we expect performer to be set
if scene.Title.String == scene.Path && len(performers) == 0 {
t.Errorf("Did not set performer '%s' for path '%s'", testName, scene.Path)
} else if scene.Title.String != scene.Path && len(performers) > 0 {
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, scene.Path)
if scene.Title == expectedMatchTitle && len(performers) == 0 {
t.Errorf("Did not set performer '%s' for path '%s'", testName, scene.Path())
} else if scene.Title != expectedMatchTitle && len(performers) > 0 {
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, scene.Path())
}
}
@ -435,21 +608,21 @@ func TestParseStudioScenes(t *testing.T) {
for _, scene := range scenes {
// check for existing studio id scene first
if scene.Path == existingStudioSceneName {
if scene.StudioID.Int64 != int64(existingStudioID) {
if scene.URL == existingStudioSceneName {
if scene.StudioID == nil || *scene.StudioID != existingStudioID {
t.Error("Incorrectly overwrote studio ID for scene with existing studio ID")
}
} else {
// title is only set on scenes where we expect studio to be set
if scene.Title.String == scene.Path {
if !scene.StudioID.Valid {
t.Errorf("Did not set studio '%s' for path '%s'", testName, scene.Path)
} else if scene.StudioID.Int64 != int64(studios[1].ID) {
t.Errorf("Incorrect studio id %d set for path '%s'", scene.StudioID.Int64, scene.Path)
if scene.Title == expectedMatchTitle {
if scene.StudioID == nil {
t.Errorf("Did not set studio '%s' for path '%s'", testName, scene.Path())
} else if scene.StudioID != nil && *scene.StudioID != studios[1].ID {
t.Errorf("Incorrect studio id %d set for path '%s'", scene.StudioID, scene.Path())
}
} else if scene.Title.String != scene.Path && scene.StudioID.Int64 == int64(studios[1].ID) {
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, scene.Path)
} else if scene.Title != expectedMatchTitle && scene.StudioID != nil && *scene.StudioID == studios[1].ID {
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, scene.Path())
}
}
}
@ -499,10 +672,10 @@ func TestParseTagScenes(t *testing.T) {
}
// title is only set on scenes where we expect tag to be set
if scene.Title.String == scene.Path && len(tags) == 0 {
t.Errorf("Did not set tag '%s' for path '%s'", testName, scene.Path)
} else if scene.Title.String != scene.Path && len(tags) > 0 {
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, scene.Path)
if scene.Title == expectedMatchTitle && len(tags) == 0 {
t.Errorf("Did not set tag '%s' for path '%s'", testName, scene.Path())
} else if (scene.Title != expectedMatchTitle) && len(tags) > 0 {
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, scene.Path())
}
}
@ -546,10 +719,11 @@ func TestParsePerformerImages(t *testing.T) {
}
// title is only set on images where we expect performer to be set
if image.Title.String == image.Path && len(performers) == 0 {
t.Errorf("Did not set performer '%s' for path '%s'", testName, image.Path)
} else if image.Title.String != image.Path && len(performers) > 0 {
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, image.Path)
expectedMatch := image.Title == expectedMatchTitle || image.Title == existingStudioImageName
if expectedMatch && len(performers) == 0 {
t.Errorf("Did not set performer '%s' for path '%s'", testName, image.Path())
} else if !expectedMatch && len(performers) > 0 {
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, image.Path())
}
}
@ -590,21 +764,21 @@ func TestParseStudioImages(t *testing.T) {
for _, image := range images {
// check for existing studio id image first
if image.Path == existingStudioImageName {
if image.StudioID.Int64 != int64(existingStudioID) {
if image.Title == existingStudioImageName {
if *image.StudioID != existingStudioID {
t.Error("Incorrectly overwrote studio ID for image with existing studio ID")
}
} else {
// title is only set on images where we expect studio to be set
if image.Title.String == image.Path {
if !image.StudioID.Valid {
t.Errorf("Did not set studio '%s' for path '%s'", testName, image.Path)
} else if image.StudioID.Int64 != int64(studios[1].ID) {
t.Errorf("Incorrect studio id %d set for path '%s'", image.StudioID.Int64, image.Path)
if image.Title == expectedMatchTitle {
if image.StudioID == nil {
t.Errorf("Did not set studio '%s' for path '%s'", testName, image.Path())
} else if *image.StudioID != studios[1].ID {
t.Errorf("Incorrect studio id %d set for path '%s'", *image.StudioID, image.Path())
}
} else if image.Title.String != image.Path && image.StudioID.Int64 == int64(studios[1].ID) {
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, image.Path)
} else if image.Title != expectedMatchTitle && image.StudioID != nil && *image.StudioID == studios[1].ID {
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, image.Path())
}
}
}
@ -654,10 +828,11 @@ func TestParseTagImages(t *testing.T) {
}
// title is only set on images where we expect performer to be set
if image.Title.String == image.Path && len(tags) == 0 {
t.Errorf("Did not set tag '%s' for path '%s'", testName, image.Path)
} else if image.Title.String != image.Path && len(tags) > 0 {
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, image.Path)
expectedMatch := image.Title == expectedMatchTitle || image.Title == existingStudioImageName
if expectedMatch && len(tags) == 0 {
t.Errorf("Did not set tag '%s' for path '%s'", testName, image.Path())
} else if !expectedMatch && len(tags) > 0 {
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, image.Path())
}
}
@ -701,10 +876,11 @@ func TestParsePerformerGalleries(t *testing.T) {
}
// title is only set on galleries where we expect performer to be set
if gallery.Title.String == gallery.Path.String && len(performers) == 0 {
t.Errorf("Did not set performer '%s' for path '%s'", testName, gallery.Path.String)
} else if gallery.Title.String != gallery.Path.String && len(performers) > 0 {
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, gallery.Path.String)
expectedMatch := gallery.Title == expectedMatchTitle || gallery.Title == existingStudioGalleryName
if expectedMatch && len(performers) == 0 {
t.Errorf("Did not set performer '%s' for path '%s'", testName, gallery.Path())
} else if !expectedMatch && len(performers) > 0 {
t.Errorf("Incorrectly set performer '%s' for path '%s'", testName, gallery.Path())
}
}
@ -745,21 +921,21 @@ func TestParseStudioGalleries(t *testing.T) {
for _, gallery := range galleries {
// check for existing studio id gallery first
if gallery.Path.String == existingStudioGalleryName {
if gallery.StudioID.Int64 != int64(existingStudioID) {
if gallery.Title == existingStudioGalleryName {
if *gallery.StudioID != existingStudioID {
t.Error("Incorrectly overwrote studio ID for gallery with existing studio ID")
}
} else {
// title is only set on galleries where we expect studio to be set
if gallery.Title.String == gallery.Path.String {
if !gallery.StudioID.Valid {
t.Errorf("Did not set studio '%s' for path '%s'", testName, gallery.Path.String)
} else if gallery.StudioID.Int64 != int64(studios[1].ID) {
t.Errorf("Incorrect studio id %d set for path '%s'", gallery.StudioID.Int64, gallery.Path.String)
if gallery.Title == expectedMatchTitle {
if gallery.StudioID == nil {
t.Errorf("Did not set studio '%s' for path '%s'", testName, gallery.Path())
} else if *gallery.StudioID != studios[1].ID {
t.Errorf("Incorrect studio id %d set for path '%s'", *gallery.StudioID, gallery.Path())
}
} else if gallery.Title.String != gallery.Path.String && gallery.StudioID.Int64 == int64(studios[1].ID) {
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, gallery.Path.String)
} else if gallery.Title != expectedMatchTitle && (gallery.StudioID != nil && *gallery.StudioID == studios[1].ID) {
t.Errorf("Incorrectly set studio '%s' for path '%s'", testName, gallery.Path())
}
}
}
@ -809,10 +985,11 @@ func TestParseTagGalleries(t *testing.T) {
}
// title is only set on galleries where we expect performer to be set
if gallery.Title.String == gallery.Path.String && len(tags) == 0 {
t.Errorf("Did not set tag '%s' for path '%s'", testName, gallery.Path.String)
} else if gallery.Title.String != gallery.Path.String && len(tags) > 0 {
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, gallery.Path.String)
expectedMatch := gallery.Title == expectedMatchTitle || gallery.Title == existingStudioGalleryName
if expectedMatch && len(tags) == 0 {
t.Errorf("Did not set tag '%s' for path '%s'", testName, gallery.Path())
} else if !expectedMatch && len(tags) > 0 {
t.Errorf("Incorrectly set tag '%s' for path '%s'", testName, gallery.Path())
}
}

View File

@ -12,17 +12,17 @@ import (
type SceneQueryPerformerUpdater interface {
scene.Queryer
scene.PerformerUpdater
scene.PartialUpdater
}
type ImageQueryPerformerUpdater interface {
image.Queryer
image.PerformerUpdater
image.PartialUpdater
}
type GalleryQueryPerformerUpdater interface {
gallery.Queryer
gallery.PerformerUpdater
gallery.PartialUpdater
}
func getPerformerTagger(p *models.Performer, cache *match.Cache) tagger {
@ -38,8 +38,8 @@ func getPerformerTagger(p *models.Performer, cache *match.Cache) tagger {
func PerformerScenes(ctx context.Context, p *models.Performer, paths []string, rw SceneQueryPerformerUpdater, cache *match.Cache) error {
t := getPerformerTagger(p, cache)
return t.tagScenes(ctx, paths, rw, func(subjectID, otherID int) (bool, error) {
return scene.AddPerformer(ctx, rw, otherID, subjectID)
return t.tagScenes(ctx, paths, rw, func(o *models.Scene) (bool, error) {
return scene.AddPerformer(ctx, rw, o, p.ID)
})
}
@ -47,8 +47,8 @@ func PerformerScenes(ctx context.Context, p *models.Performer, paths []string, r
func PerformerImages(ctx context.Context, p *models.Performer, paths []string, rw ImageQueryPerformerUpdater, cache *match.Cache) error {
t := getPerformerTagger(p, cache)
return t.tagImages(ctx, paths, rw, func(subjectID, otherID int) (bool, error) {
return image.AddPerformer(ctx, rw, otherID, subjectID)
return t.tagImages(ctx, paths, rw, func(i *models.Image) (bool, error) {
return image.AddPerformer(ctx, rw, i, p.ID)
})
}
@ -56,7 +56,7 @@ func PerformerImages(ctx context.Context, p *models.Performer, paths []string, r
func PerformerGalleries(ctx context.Context, p *models.Performer, paths []string, rw GalleryQueryPerformerUpdater, cache *match.Cache) error {
t := getPerformerTagger(p, cache)
return t.tagGalleries(ctx, paths, rw, func(subjectID, otherID int) (bool, error) {
return gallery.AddPerformer(ctx, rw, otherID, subjectID)
return t.tagGalleries(ctx, paths, rw, func(o *models.Gallery) (bool, error) {
return gallery.AddPerformer(ctx, rw, o, p.ID)
})
}

View File

@ -3,6 +3,7 @@ package autotag
import (
"testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
@ -47,8 +48,14 @@ func testPerformerScenes(t *testing.T, performerName, expectedRegex string) {
matchingPaths, falsePaths := generateTestPaths(performerName, "mp4")
for i, p := range append(matchingPaths, falsePaths...) {
scenes = append(scenes, &models.Scene{
ID: i + 1,
Path: p,
ID: i + 1,
Files: []*file.VideoFile{
{
BaseFile: &file.BaseFile{
Path: p,
},
},
},
})
}
@ -77,8 +84,12 @@ func testPerformerScenes(t *testing.T, performerName, expectedRegex string) {
for i := range matchingPaths {
sceneID := i + 1
mockSceneReader.On("GetPerformerIDs", testCtx, sceneID).Return(nil, nil).Once()
mockSceneReader.On("UpdatePerformers", testCtx, sceneID, []int{performerID}).Return(nil).Once()
mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
}
err := PerformerScenes(testCtx, &performer, nil, mockSceneReader, nil)
@ -122,8 +133,8 @@ func testPerformerImages(t *testing.T, performerName, expectedRegex string) {
matchingPaths, falsePaths := generateTestPaths(performerName, imageExt)
for i, p := range append(matchingPaths, falsePaths...) {
images = append(images, &models.Image{
ID: i + 1,
Path: p,
ID: i + 1,
Files: []*file.ImageFile{makeImageFile(p)},
})
}
@ -152,8 +163,12 @@ func testPerformerImages(t *testing.T, performerName, expectedRegex string) {
for i := range matchingPaths {
imageID := i + 1
mockImageReader.On("GetPerformerIDs", testCtx, imageID).Return(nil, nil).Once()
mockImageReader.On("UpdatePerformers", testCtx, imageID, []int{performerID}).Return(nil).Once()
mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
}
err := PerformerImages(testCtx, &performer, nil, mockImageReader, nil)
@ -196,9 +211,14 @@ func testPerformerGalleries(t *testing.T, performerName, expectedRegex string) {
var galleries []*models.Gallery
matchingPaths, falsePaths := generateTestPaths(performerName, galleryExt)
for i, p := range append(matchingPaths, falsePaths...) {
v := p
galleries = append(galleries, &models.Gallery{
ID: i + 1,
Path: models.NullString(p),
ID: i + 1,
Files: []file.File{
&file.BaseFile{
Path: v,
},
},
})
}
@ -226,8 +246,12 @@ func testPerformerGalleries(t *testing.T, performerName, expectedRegex string) {
for i := range matchingPaths {
galleryID := i + 1
mockGalleryReader.On("GetPerformerIDs", testCtx, galleryID).Return(nil, nil).Once()
mockGalleryReader.On("UpdatePerformers", testCtx, galleryID, []int{performerID}).Return(nil).Once()
mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
}
err := PerformerGalleries(testCtx, &performer, nil, mockGalleryReader, nil)

View File

@ -13,17 +13,17 @@ func getSceneFileTagger(s *models.Scene, cache *match.Cache) tagger {
ID: s.ID,
Type: "scene",
Name: s.GetTitle(),
Path: s.Path,
Path: s.Path(),
cache: cache,
}
}
// ScenePerformers tags the provided scene with performers whose name matches the scene's path.
func ScenePerformers(ctx context.Context, s *models.Scene, rw scene.PerformerUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
func ScenePerformers(ctx context.Context, s *models.Scene, rw scene.PartialUpdater, performerReader match.PerformerAutoTagQueryer, cache *match.Cache) error {
t := getSceneFileTagger(s, cache)
return t.tagPerformers(ctx, performerReader, func(subjectID, otherID int) (bool, error) {
return scene.AddPerformer(ctx, rw, subjectID, otherID)
return scene.AddPerformer(ctx, rw, s, otherID)
})
}
@ -31,7 +31,7 @@ func ScenePerformers(ctx context.Context, s *models.Scene, rw scene.PerformerUpd
//
// Scenes will not be tagged if studio is already set.
func SceneStudios(ctx context.Context, s *models.Scene, rw SceneFinderUpdater, studioReader match.StudioAutoTagQueryer, cache *match.Cache) error {
if s.StudioID.Valid {
if s.StudioID != nil {
// don't modify
return nil
}
@ -39,15 +39,15 @@ func SceneStudios(ctx context.Context, s *models.Scene, rw SceneFinderUpdater, s
t := getSceneFileTagger(s, cache)
return t.tagStudios(ctx, studioReader, func(subjectID, otherID int) (bool, error) {
return addSceneStudio(ctx, rw, subjectID, otherID)
return addSceneStudio(ctx, rw, s, otherID)
})
}
// SceneTags tags the provided scene with tags whose name matches the scene's path.
func SceneTags(ctx context.Context, s *models.Scene, rw scene.TagUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
func SceneTags(ctx context.Context, s *models.Scene, rw scene.PartialUpdater, tagReader match.TagAutoTagQueryer, cache *match.Cache) error {
t := getSceneFileTagger(s, cache)
return t.tagTags(ctx, tagReader, func(subjectID, otherID int) (bool, error) {
return scene.AddTag(ctx, rw, subjectID, otherID)
return scene.AddTag(ctx, rw, s, otherID)
})
}

View File

@ -5,6 +5,7 @@ import (
"strings"
"testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
"github.com/stretchr/testify/assert"
@ -176,15 +177,26 @@ func TestScenePerformers(t *testing.T) {
mockPerformerReader.On("Query", testCtx, mock.Anything, mock.Anything).Return(nil, 0, nil)
mockPerformerReader.On("QueryForAutoTag", testCtx, mock.Anything).Return([]*models.Performer{&performer, &reversedPerformer}, nil).Once()
if test.Matches {
mockSceneReader.On("GetPerformerIDs", testCtx, sceneID).Return(nil, nil).Once()
mockSceneReader.On("UpdatePerformers", testCtx, sceneID, []int{performerID}).Return(nil).Once()
scene := models.Scene{
ID: sceneID,
Files: []*file.VideoFile{
{
BaseFile: &file.BaseFile{
Path: test.Path,
},
},
},
}
scene := models.Scene{
ID: sceneID,
Path: test.Path,
if test.Matches {
mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
PerformerIDs: &models.UpdateIDs{
IDs: []int{performerID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
}
err := ScenePerformers(testCtx, &scene, mockSceneReader, mockPerformerReader, nil)
assert.Nil(err)
@ -196,9 +208,11 @@ func TestScenePerformers(t *testing.T) {
func TestSceneStudios(t *testing.T) {
t.Parallel()
const sceneID = 1
const studioName = "studio name"
const studioID = 2
var (
sceneID = 1
studioName = "studio name"
studioID = 2
)
studio := models.Studio{
ID: studioID,
Name: models.NullString(studioName),
@ -217,17 +231,21 @@ func TestSceneStudios(t *testing.T) {
doTest := func(mockStudioReader *mocks.StudioReaderWriter, mockSceneReader *mocks.SceneReaderWriter, test pathTestTable) {
if test.Matches {
mockSceneReader.On("Find", testCtx, sceneID).Return(&models.Scene{}, nil).Once()
expectedStudioID := models.NullInt64(studioID)
mockSceneReader.On("Update", testCtx, models.ScenePartial{
ID: sceneID,
StudioID: &expectedStudioID,
expectedStudioID := studioID
mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
StudioID: models.NewOptionalInt(expectedStudioID),
}).Return(nil, nil).Once()
}
scene := models.Scene{
ID: sceneID,
Path: test.Path,
ID: sceneID,
Files: []*file.VideoFile{
{
BaseFile: &file.BaseFile{
Path: test.Path,
},
},
},
}
err := SceneStudios(testCtx, &scene, mockSceneReader, mockStudioReader, nil)
@ -290,13 +308,23 @@ func TestSceneTags(t *testing.T) {
doTest := func(mockTagReader *mocks.TagReaderWriter, mockSceneReader *mocks.SceneReaderWriter, test pathTestTable) {
if test.Matches {
mockSceneReader.On("GetTagIDs", testCtx, sceneID).Return(nil, nil).Once()
mockSceneReader.On("UpdateTags", testCtx, sceneID, []int{tagID}).Return(nil).Once()
mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
}
scene := models.Scene{
ID: sceneID,
Path: test.Path,
ID: sceneID,
Files: []*file.VideoFile{
{
BaseFile: &file.BaseFile{
Path: test.Path,
},
},
},
}
err := SceneTags(testCtx, &scene, mockSceneReader, mockTagReader, nil)

View File

@ -2,7 +2,6 @@ package autotag
import (
"context"
"database/sql"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
@ -11,73 +10,52 @@ import (
"github.com/stashapp/stash/pkg/scene"
)
func addSceneStudio(ctx context.Context, sceneWriter SceneFinderUpdater, sceneID, studioID int) (bool, error) {
func addSceneStudio(ctx context.Context, sceneWriter scene.PartialUpdater, o *models.Scene, studioID int) (bool, error) {
// don't set if already set
scene, err := sceneWriter.Find(ctx, sceneID)
if err != nil {
return false, err
}
if scene.StudioID.Valid {
if o.StudioID != nil {
return false, nil
}
// set the studio id
s := sql.NullInt64{Int64: int64(studioID), Valid: true}
scenePartial := models.ScenePartial{
ID: sceneID,
StudioID: &s,
StudioID: models.NewOptionalInt(studioID),
}
if _, err := sceneWriter.Update(ctx, scenePartial); err != nil {
if _, err := sceneWriter.UpdatePartial(ctx, o.ID, scenePartial); err != nil {
return false, err
}
return true, nil
}
func addImageStudio(ctx context.Context, imageWriter ImageFinderUpdater, imageID, studioID int) (bool, error) {
func addImageStudio(ctx context.Context, imageWriter image.PartialUpdater, i *models.Image, studioID int) (bool, error) {
// don't set if already set
image, err := imageWriter.Find(ctx, imageID)
if err != nil {
return false, err
}
if image.StudioID.Valid {
if i.StudioID != nil {
return false, nil
}
// set the studio id
s := sql.NullInt64{Int64: int64(studioID), Valid: true}
imagePartial := models.ImagePartial{
ID: imageID,
StudioID: &s,
StudioID: models.NewOptionalInt(studioID),
}
if _, err := imageWriter.Update(ctx, imagePartial); err != nil {
if _, err := imageWriter.UpdatePartial(ctx, i.ID, imagePartial); err != nil {
return false, err
}
return true, nil
}
func addGalleryStudio(ctx context.Context, galleryWriter GalleryFinderUpdater, galleryID, studioID int) (bool, error) {
func addGalleryStudio(ctx context.Context, galleryWriter GalleryFinderUpdater, o *models.Gallery, studioID int) (bool, error) {
// don't set if already set
gallery, err := galleryWriter.Find(ctx, galleryID)
if err != nil {
return false, err
}
if gallery.StudioID.Valid {
if o.StudioID != nil {
return false, nil
}
// set the studio id
s := sql.NullInt64{Int64: int64(studioID), Valid: true}
galleryPartial := models.GalleryPartial{
ID: galleryID,
StudioID: &s,
StudioID: models.NewOptionalInt(studioID),
}
if _, err := galleryWriter.UpdatePartial(ctx, galleryPartial); err != nil {
if _, err := galleryWriter.UpdatePartial(ctx, o.ID, galleryPartial); err != nil {
return false, err
}
return true, nil
@ -104,8 +82,7 @@ func getStudioTagger(p *models.Studio, aliases []string, cache *match.Cache) []t
type SceneFinderUpdater interface {
scene.Queryer
Find(ctx context.Context, id int) (*models.Scene, error)
Update(ctx context.Context, updatedScene models.ScenePartial) (*models.Scene, error)
scene.PartialUpdater
}
// StudioScenes searches for scenes whose path matches the provided studio name and tags the scene with the studio, if studio is not already set on the scene.
@ -113,8 +90,8 @@ func StudioScenes(ctx context.Context, p *models.Studio, paths []string, aliases
t := getStudioTagger(p, aliases, cache)
for _, tt := range t {
if err := tt.tagScenes(ctx, paths, rw, func(subjectID, otherID int) (bool, error) {
return addSceneStudio(ctx, rw, otherID, subjectID)
if err := tt.tagScenes(ctx, paths, rw, func(o *models.Scene) (bool, error) {
return addSceneStudio(ctx, rw, o, p.ID)
}); err != nil {
return err
}
@ -126,7 +103,7 @@ func StudioScenes(ctx context.Context, p *models.Studio, paths []string, aliases
type ImageFinderUpdater interface {
image.Queryer
Find(ctx context.Context, id int) (*models.Image, error)
Update(ctx context.Context, updatedImage models.ImagePartial) (*models.Image, error)
UpdatePartial(ctx context.Context, id int, partial models.ImagePartial) (*models.Image, error)
}
// StudioImages searches for images whose path matches the provided studio name and tags the image with the studio, if studio is not already set on the image.
@ -134,8 +111,8 @@ func StudioImages(ctx context.Context, p *models.Studio, paths []string, aliases
t := getStudioTagger(p, aliases, cache)
for _, tt := range t {
if err := tt.tagImages(ctx, paths, rw, func(subjectID, otherID int) (bool, error) {
return addImageStudio(ctx, rw, otherID, subjectID)
if err := tt.tagImages(ctx, paths, rw, func(i *models.Image) (bool, error) {
return addImageStudio(ctx, rw, i, p.ID)
}); err != nil {
return err
}
@ -146,8 +123,8 @@ func StudioImages(ctx context.Context, p *models.Studio, paths []string, aliases
type GalleryFinderUpdater interface {
gallery.Queryer
gallery.PartialUpdater
Find(ctx context.Context, id int) (*models.Gallery, error)
UpdatePartial(ctx context.Context, updatedGallery models.GalleryPartial) (*models.Gallery, error)
}
// StudioGalleries searches for galleries whose path matches the provided studio name and tags the gallery with the studio, if studio is not already set on the gallery.
@ -155,8 +132,8 @@ func StudioGalleries(ctx context.Context, p *models.Studio, paths []string, alia
t := getStudioTagger(p, aliases, cache)
for _, tt := range t {
if err := tt.tagGalleries(ctx, paths, rw, func(subjectID, otherID int) (bool, error) {
return addGalleryStudio(ctx, rw, otherID, subjectID)
if err := tt.tagGalleries(ctx, paths, rw, func(o *models.Gallery) (bool, error) {
return addGalleryStudio(ctx, rw, o, p.ID)
}); err != nil {
return err
}

View File

@ -3,6 +3,7 @@ package autotag
import (
"testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
@ -72,7 +73,7 @@ func testStudioScenes(t *testing.T, tc testStudioCase) {
mockSceneReader := &mocks.SceneReaderWriter{}
const studioID = 2
var studioID = 2
var aliases []string
@ -87,8 +88,14 @@ func testStudioScenes(t *testing.T, tc testStudioCase) {
var scenes []*models.Scene
for i, p := range append(matchingPaths, falsePaths...) {
scenes = append(scenes, &models.Scene{
ID: i + 1,
Path: p,
ID: i + 1,
Files: []*file.VideoFile{
{
BaseFile: &file.BaseFile{
Path: p,
},
},
},
})
}
@ -134,11 +141,9 @@ func testStudioScenes(t *testing.T, tc testStudioCase) {
for i := range matchingPaths {
sceneID := i + 1
mockSceneReader.On("Find", testCtx, sceneID).Return(&models.Scene{}, nil).Once()
expectedStudioID := models.NullInt64(studioID)
mockSceneReader.On("Update", testCtx, models.ScenePartial{
ID: sceneID,
StudioID: &expectedStudioID,
expectedStudioID := studioID
mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
StudioID: models.NewOptionalInt(expectedStudioID),
}).Return(nil, nil).Once()
}
@ -166,7 +171,7 @@ func testStudioImages(t *testing.T, tc testStudioCase) {
mockImageReader := &mocks.ImageReaderWriter{}
const studioID = 2
var studioID = 2
var aliases []string
@ -180,8 +185,8 @@ func testStudioImages(t *testing.T, tc testStudioCase) {
matchingPaths, falsePaths := generateTestPaths(testPathName, imageExt)
for i, p := range append(matchingPaths, falsePaths...) {
images = append(images, &models.Image{
ID: i + 1,
Path: p,
ID: i + 1,
Files: []*file.ImageFile{makeImageFile(p)},
})
}
@ -226,11 +231,9 @@ func testStudioImages(t *testing.T, tc testStudioCase) {
for i := range matchingPaths {
imageID := i + 1
mockImageReader.On("Find", testCtx, imageID).Return(&models.Image{}, nil).Once()
expectedStudioID := models.NullInt64(studioID)
mockImageReader.On("Update", testCtx, models.ImagePartial{
ID: imageID,
StudioID: &expectedStudioID,
expectedStudioID := studioID
mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
StudioID: models.NewOptionalInt(expectedStudioID),
}).Return(nil, nil).Once()
}
@ -257,7 +260,7 @@ func testStudioGalleries(t *testing.T, tc testStudioCase) {
aliasRegex := tc.aliasRegex
mockGalleryReader := &mocks.GalleryReaderWriter{}
const studioID = 2
var studioID = 2
var aliases []string
@ -270,9 +273,14 @@ func testStudioGalleries(t *testing.T, tc testStudioCase) {
var galleries []*models.Gallery
matchingPaths, falsePaths := generateTestPaths(testPathName, galleryExt)
for i, p := range append(matchingPaths, falsePaths...) {
v := p
galleries = append(galleries, &models.Gallery{
ID: i + 1,
Path: models.NullString(p),
ID: i + 1,
Files: []file.File{
&file.BaseFile{
Path: v,
},
},
})
}
@ -316,11 +324,9 @@ func testStudioGalleries(t *testing.T, tc testStudioCase) {
for i := range matchingPaths {
galleryID := i + 1
mockGalleryReader.On("Find", testCtx, galleryID).Return(&models.Gallery{}, nil).Once()
expectedStudioID := models.NullInt64(studioID)
mockGalleryReader.On("UpdatePartial", testCtx, models.GalleryPartial{
ID: galleryID,
StudioID: &expectedStudioID,
expectedStudioID := studioID
mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
StudioID: models.NewOptionalInt(expectedStudioID),
}).Return(nil, nil).Once()
}

View File

@ -12,17 +12,17 @@ import (
type SceneQueryTagUpdater interface {
scene.Queryer
scene.TagUpdater
scene.PartialUpdater
}
type ImageQueryTagUpdater interface {
image.Queryer
image.TagUpdater
image.PartialUpdater
}
type GalleryQueryTagUpdater interface {
gallery.Queryer
gallery.TagUpdater
gallery.PartialUpdater
}
func getTagTaggers(p *models.Tag, aliases []string, cache *match.Cache) []tagger {
@ -50,8 +50,8 @@ func TagScenes(ctx context.Context, p *models.Tag, paths []string, aliases []str
t := getTagTaggers(p, aliases, cache)
for _, tt := range t {
if err := tt.tagScenes(ctx, paths, rw, func(subjectID, otherID int) (bool, error) {
return scene.AddTag(ctx, rw, otherID, subjectID)
if err := tt.tagScenes(ctx, paths, rw, func(o *models.Scene) (bool, error) {
return scene.AddTag(ctx, rw, o, p.ID)
}); err != nil {
return err
}
@ -64,8 +64,8 @@ func TagImages(ctx context.Context, p *models.Tag, paths []string, aliases []str
t := getTagTaggers(p, aliases, cache)
for _, tt := range t {
if err := tt.tagImages(ctx, paths, rw, func(subjectID, otherID int) (bool, error) {
return image.AddTag(ctx, rw, otherID, subjectID)
if err := tt.tagImages(ctx, paths, rw, func(i *models.Image) (bool, error) {
return image.AddTag(ctx, rw, i, p.ID)
}); err != nil {
return err
}
@ -78,8 +78,8 @@ func TagGalleries(ctx context.Context, p *models.Tag, paths []string, aliases []
t := getTagTaggers(p, aliases, cache)
for _, tt := range t {
if err := tt.tagGalleries(ctx, paths, rw, func(subjectID, otherID int) (bool, error) {
return gallery.AddTag(ctx, rw, otherID, subjectID)
if err := tt.tagGalleries(ctx, paths, rw, func(o *models.Gallery) (bool, error) {
return gallery.AddTag(ctx, rw, o, p.ID)
}); err != nil {
return err
}

View File

@ -3,6 +3,7 @@ package autotag
import (
"testing"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/mocks"
@ -87,8 +88,14 @@ func testTagScenes(t *testing.T, tc testTagCase) {
var scenes []*models.Scene
for i, p := range append(matchingPaths, falsePaths...) {
scenes = append(scenes, &models.Scene{
ID: i + 1,
Path: p,
ID: i + 1,
Files: []*file.VideoFile{
{
BaseFile: &file.BaseFile{
Path: p,
},
},
},
})
}
@ -133,8 +140,12 @@ func testTagScenes(t *testing.T, tc testTagCase) {
for i := range matchingPaths {
sceneID := i + 1
mockSceneReader.On("GetTagIDs", testCtx, sceneID).Return(nil, nil).Once()
mockSceneReader.On("UpdateTags", testCtx, sceneID, []int{tagID}).Return(nil).Once()
mockSceneReader.On("UpdatePartial", testCtx, sceneID, models.ScenePartial{
TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
}
err := TagScenes(testCtx, &tag, nil, aliases, mockSceneReader, nil)
@ -175,8 +186,8 @@ func testTagImages(t *testing.T, tc testTagCase) {
matchingPaths, falsePaths := generateTestPaths(testPathName, "mp4")
for i, p := range append(matchingPaths, falsePaths...) {
images = append(images, &models.Image{
ID: i + 1,
Path: p,
ID: i + 1,
Files: []*file.ImageFile{makeImageFile(p)},
})
}
@ -221,8 +232,13 @@ func testTagImages(t *testing.T, tc testTagCase) {
for i := range matchingPaths {
imageID := i + 1
mockImageReader.On("GetTagIDs", testCtx, imageID).Return(nil, nil).Once()
mockImageReader.On("UpdateTags", testCtx, imageID, []int{tagID}).Return(nil).Once()
mockImageReader.On("UpdatePartial", testCtx, imageID, models.ImagePartial{
TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
}
err := TagImages(testCtx, &tag, nil, aliases, mockImageReader, nil)
@ -262,9 +278,14 @@ func testTagGalleries(t *testing.T, tc testTagCase) {
var galleries []*models.Gallery
matchingPaths, falsePaths := generateTestPaths(testPathName, "mp4")
for i, p := range append(matchingPaths, falsePaths...) {
v := p
galleries = append(galleries, &models.Gallery{
ID: i + 1,
Path: models.NullString(p),
ID: i + 1,
Files: []file.File{
&file.BaseFile{
Path: v,
},
},
})
}
@ -308,8 +329,14 @@ func testTagGalleries(t *testing.T, tc testTagCase) {
for i := range matchingPaths {
galleryID := i + 1
mockGalleryReader.On("GetTagIDs", testCtx, galleryID).Return(nil, nil).Once()
mockGalleryReader.On("UpdateTags", testCtx, galleryID, []int{tagID}).Return(nil).Once()
mockGalleryReader.On("UpdatePartial", testCtx, galleryID, models.GalleryPartial{
TagIDs: &models.UpdateIDs{
IDs: []int{tagID},
Mode: models.RelationshipUpdateModeAdd,
},
}).Return(nil, nil).Once()
}
err := TagGalleries(testCtx, &tag, nil, aliases, mockGalleryReader, nil)

View File

@ -21,6 +21,7 @@ import (
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/match"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
)
@ -35,6 +36,9 @@ type tagger struct {
}
type addLinkFunc func(subjectID, otherID int) (bool, error)
type addImageLinkFunc func(o *models.Image) (bool, error)
type addGalleryLinkFunc func(o *models.Gallery) (bool, error)
type addSceneLinkFunc func(o *models.Scene) (bool, error)
func (t *tagger) addError(otherType, otherName string, err error) error {
return fmt.Errorf("error adding %s '%s' to %s '%s': %s", otherType, otherName, t.Type, t.Name, err.Error())
@ -107,14 +111,14 @@ func (t *tagger) tagTags(ctx context.Context, tagReader match.TagAutoTagQueryer,
return nil
}
func (t *tagger) tagScenes(ctx context.Context, paths []string, sceneReader scene.Queryer, addFunc addLinkFunc) error {
func (t *tagger) tagScenes(ctx context.Context, paths []string, sceneReader scene.Queryer, addFunc addSceneLinkFunc) error {
others, err := match.PathToScenes(ctx, t.Name, paths, sceneReader)
if err != nil {
return err
}
for _, p := range others {
added, err := addFunc(t.ID, p.ID)
added, err := addFunc(p)
if err != nil {
return t.addError("scene", p.GetTitle(), err)
@ -128,14 +132,14 @@ func (t *tagger) tagScenes(ctx context.Context, paths []string, sceneReader scen
return nil
}
func (t *tagger) tagImages(ctx context.Context, paths []string, imageReader image.Queryer, addFunc addLinkFunc) error {
func (t *tagger) tagImages(ctx context.Context, paths []string, imageReader image.Queryer, addFunc addImageLinkFunc) error {
others, err := match.PathToImages(ctx, t.Name, paths, imageReader)
if err != nil {
return err
}
for _, p := range others {
added, err := addFunc(t.ID, p.ID)
added, err := addFunc(p)
if err != nil {
return t.addError("image", p.GetTitle(), err)
@ -149,14 +153,14 @@ func (t *tagger) tagImages(ctx context.Context, paths []string, imageReader imag
return nil
}
func (t *tagger) tagGalleries(ctx context.Context, paths []string, galleryReader gallery.Queryer, addFunc addLinkFunc) error {
func (t *tagger) tagGalleries(ctx context.Context, paths []string, galleryReader gallery.Queryer, addFunc addGalleryLinkFunc) error {
others, err := match.PathToGalleries(ctx, t.Name, paths, galleryReader)
if err != nil {
return err
}
for _, p := range others {
added, err := addFunc(t.ID, p.ID)
added, err := addFunc(p)
if err != nil {
return t.addError("gallery", p.GetTitle(), err)

View File

@ -108,9 +108,18 @@ func sceneToContainer(scene *models.Scene, parent string, host string) interface
}
mimeType := "video/mp4"
size, _ := strconv.Atoi(scene.Size.String)
var (
size int
bitrate uint
duration int64
)
duration := int64(scene.Duration.Float64)
f := scene.PrimaryFile()
if f != nil {
size = int(f.Size)
bitrate = uint(f.BitRate)
duration = int64(f.Duration)
}
item.Res = append(item.Res, upnpav.Resource{
URL: (&url.URL{
@ -124,8 +133,7 @@ func sceneToContainer(scene *models.Scene, parent string, host string) interface
ProtocolInfo: fmt.Sprintf("http-get:*:%s:%s", mimeType, dlna.ContentFeatures{
SupportRange: true,
}.String()),
Bitrate: uint(scene.Bitrate.Int64),
// TODO - make %d:%02d:%02d string
Bitrate: bitrate,
Duration: formatDurationSexagesimal(time.Duration(duration) * time.Second),
Size: uint64(size),
// Resolution: resolution,
@ -370,7 +378,7 @@ func (me *contentDirectoryService) handleBrowseMetadata(obj object, host string)
// http://upnp.org/specs/av/UPnP-av-ContentDirectory-v1-Service.pdf
// maximum update ID is 2**32, then rolls back to 0
const maxUpdateID int64 = 1 << 32
updateID = fmt.Sprint(scene.UpdatedAt.Timestamp.Unix() % maxUpdateID)
updateID = fmt.Sprint(scene.UpdatedAt.Unix() % maxUpdateID)
} else {
return nil, upnp.Errorf(upnpav.NoSuchObjectErrorCode, "scene not found")
}

View File

@ -2,7 +2,6 @@ package identify
import (
"context"
"database/sql"
"fmt"
"github.com/stashapp/stash/pkg/logger"
@ -129,10 +128,7 @@ func (t *SceneIdentifier) getSceneUpdater(ctx context.Context, s *models.Scene,
}
if studioID != nil {
ret.Partial.StudioID = &sql.NullInt64{
Int64: *studioID,
Valid: true,
}
ret.Partial.StudioID = models.NewOptionalInt(*studioID)
}
ignoreMale := false
@ -143,20 +139,38 @@ func (t *SceneIdentifier) getSceneUpdater(ctx context.Context, s *models.Scene,
}
}
ret.PerformerIDs, err = rel.performers(ctx, ignoreMale)
performerIDs, err := rel.performers(ctx, ignoreMale)
if err != nil {
return nil, err
}
if performerIDs != nil {
ret.Partial.PerformerIDs = &models.UpdateIDs{
IDs: performerIDs,
Mode: models.RelationshipUpdateModeSet,
}
}
ret.TagIDs, err = rel.tags(ctx)
tagIDs, err := rel.tags(ctx)
if err != nil {
return nil, err
}
if tagIDs != nil {
ret.Partial.TagIDs = &models.UpdateIDs{
IDs: tagIDs,
Mode: models.RelationshipUpdateModeSet,
}
}
ret.StashIDs, err = rel.stashIDs(ctx)
stashIDs, err := rel.stashIDs(ctx)
if err != nil {
return nil, err
}
if stashIDs != nil {
ret.Partial.StashIDs = &models.UpdateStashIDs{
StashIDs: stashIDs,
Mode: models.RelationshipUpdateModeSet,
}
}
setCoverImage := false
for _, o := range options {
@ -198,8 +212,8 @@ func (t *SceneIdentifier) modifyScene(ctx context.Context, txnManager txn.Manage
as := ""
title := updater.Partial.Title
if title != nil {
as = fmt.Sprintf(" as %s", title.String)
if title.Ptr() != nil {
as = fmt.Sprintf(" as %s", title.Value)
}
logger.Infof("Successfully identified %s%s using %s", s.Path, as, result.source.Name)
@ -233,37 +247,33 @@ func getFieldOptions(options []MetadataOptions) map[string]*FieldOptions {
}
func getScenePartial(scene *models.Scene, scraped *scraper.ScrapedScene, fieldOptions map[string]*FieldOptions, setOrganized bool) models.ScenePartial {
partial := models.ScenePartial{
ID: scene.ID,
}
partial := models.ScenePartial{}
if scraped.Title != nil && scene.Title.String != *scraped.Title {
if shouldSetSingleValueField(fieldOptions["title"], scene.Title.String != "") {
partial.Title = models.NullStringPtr(*scraped.Title)
if scraped.Title != nil && (scene.Title != *scraped.Title) {
if shouldSetSingleValueField(fieldOptions["title"], scene.Title != "") {
partial.Title = models.NewOptionalString(*scraped.Title)
}
}
if scraped.Date != nil && scene.Date.String != *scraped.Date {
if shouldSetSingleValueField(fieldOptions["date"], scene.Date.Valid) {
partial.Date = &models.SQLiteDate{
String: *scraped.Date,
Valid: true,
}
if scraped.Date != nil && (scene.Date == nil || scene.Date.String() != *scraped.Date) {
if shouldSetSingleValueField(fieldOptions["date"], scene.Date != nil) {
d := models.NewDate(*scraped.Date)
partial.Date = models.NewOptionalDate(d)
}
}
if scraped.Details != nil && scene.Details.String != *scraped.Details {
if shouldSetSingleValueField(fieldOptions["details"], scene.Details.String != "") {
partial.Details = models.NullStringPtr(*scraped.Details)
if scraped.Details != nil && (scene.Details != *scraped.Details) {
if shouldSetSingleValueField(fieldOptions["details"], scene.Details != "") {
partial.Details = models.NewOptionalString(*scraped.Details)
}
}
if scraped.URL != nil && scene.URL.String != *scraped.URL {
if shouldSetSingleValueField(fieldOptions["url"], scene.URL.String != "") {
partial.URL = models.NullStringPtr(*scraped.URL)
if scraped.URL != nil && (scene.URL != *scraped.URL) {
if shouldSetSingleValueField(fieldOptions["url"], scene.URL != "") {
partial.URL = models.NewOptionalString(*scraped.URL)
}
}
if setOrganized && !scene.Organized {
// just reuse the boolean since we know it's true
partial.Organized = &setOrganized
partial.Organized = models.NewOptionalBool(setOrganized)
}
return partial

View File

@ -74,12 +74,12 @@ func TestSceneIdentifier_Identify(t *testing.T) {
mockSceneReaderWriter := &mocks.SceneReaderWriter{}
mockSceneReaderWriter.On("Update", testCtx, mock.MatchedBy(func(partial models.ScenePartial) bool {
return partial.ID != errUpdateID
})).Return(nil, nil)
mockSceneReaderWriter.On("Update", testCtx, mock.MatchedBy(func(partial models.ScenePartial) bool {
return partial.ID == errUpdateID
})).Return(nil, errors.New("update error"))
mockSceneReaderWriter.On("UpdatePartial", testCtx, mock.MatchedBy(func(id int) bool {
return id == errUpdateID
}), mock.Anything).Return(nil, errors.New("update error"))
mockSceneReaderWriter.On("UpdatePartial", testCtx, mock.MatchedBy(func(id int) bool {
return id != errUpdateID
}), mock.Anything).Return(nil, nil)
tests := []struct {
name string
@ -245,26 +245,26 @@ func Test_getFieldOptions(t *testing.T) {
func Test_getScenePartial(t *testing.T) {
var (
originalTitle = "originalTitle"
originalDate = "originalDate"
originalDate = "2001-01-01"
originalDetails = "originalDetails"
originalURL = "originalURL"
)
var (
scrapedTitle = "scrapedTitle"
scrapedDate = "scrapedDate"
scrapedDate = "2002-02-02"
scrapedDetails = "scrapedDetails"
scrapedURL = "scrapedURL"
)
originalDateObj := models.NewDate(originalDate)
scrapedDateObj := models.NewDate(scrapedDate)
originalScene := &models.Scene{
Title: models.NullString(originalTitle),
Date: models.SQLiteDate{
String: originalDate,
Valid: true,
},
Details: models.NullString(originalDetails),
URL: models.NullString(originalURL),
Title: originalTitle,
Date: &originalDateObj,
Details: originalDetails,
URL: originalURL,
}
organisedScene := *originalScene
@ -273,13 +273,10 @@ func Test_getScenePartial(t *testing.T) {
emptyScene := &models.Scene{}
postPartial := models.ScenePartial{
Title: models.NullStringPtr(scrapedTitle),
Date: &models.SQLiteDate{
String: scrapedDate,
Valid: true,
},
Details: models.NullStringPtr(scrapedDetails),
URL: models.NullStringPtr(scrapedURL),
Title: models.NewOptionalString(scrapedTitle),
Date: models.NewOptionalDate(scrapedDateObj),
Details: models.NewOptionalString(scrapedDetails),
URL: models.NewOptionalString(scrapedURL),
}
scrapedScene := &scraper.ScrapedScene{
@ -387,7 +384,7 @@ func Test_getScenePartial(t *testing.T) {
true,
},
models.ScenePartial{
Organized: &setOrganised,
Organized: models.NewOptionalBool(setOrganised),
},
},
{

View File

@ -13,7 +13,7 @@ import (
type PerformerCreator interface {
Create(ctx context.Context, newPerformer models.Performer) (*models.Performer, error)
UpdateStashIDs(ctx context.Context, performerID int, stashIDs []models.StashID) error
UpdateStashIDs(ctx context.Context, performerID int, stashIDs []*models.StashID) error
}
func getPerformerID(ctx context.Context, endpoint string, w PerformerCreator, p *models.ScrapedPerformer, createMissing bool) (*int, error) {
@ -39,7 +39,7 @@ func createMissingPerformer(ctx context.Context, endpoint string, w PerformerCre
}
if endpoint != "" && p.RemoteSiteID != nil {
if err := w.UpdateStashIDs(ctx, created.ID, []models.StashID{
if err := w.UpdateStashIDs(ctx, created.ID, []*models.StashID{
{
Endpoint: endpoint,
StashID: *p.RemoteSiteID,

View File

@ -141,13 +141,13 @@ func Test_createMissingPerformer(t *testing.T) {
return p.Name.String == invalidName
})).Return(nil, errors.New("error creating performer"))
mockPerformerReaderWriter.On("UpdateStashIDs", testCtx, performerID, []models.StashID{
mockPerformerReaderWriter.On("UpdateStashIDs", testCtx, performerID, []*models.StashID{
{
Endpoint: invalidEndpoint,
StashID: remoteSiteID,
},
}).Return(errors.New("error updating stash ids"))
mockPerformerReaderWriter.On("UpdateStashIDs", testCtx, performerID, []models.StashID{
mockPerformerReaderWriter.On("UpdateStashIDs", testCtx, performerID, []*models.StashID{
{
Endpoint: validEndpoint,
StashID: remoteSiteID,

View File

@ -16,9 +16,6 @@ import (
)
type SceneReaderUpdater interface {
GetPerformerIDs(ctx context.Context, sceneID int) ([]int, error)
GetTagIDs(ctx context.Context, sceneID int) ([]int, error)
GetStashIDs(ctx context.Context, sceneID int) ([]*models.StashID, error)
GetCover(ctx context.Context, sceneID int) ([]byte, error)
scene.Updater
}
@ -37,7 +34,7 @@ type sceneRelationships struct {
fieldOptions map[string]*FieldOptions
}
func (g sceneRelationships) studio(ctx context.Context) (*int64, error) {
func (g sceneRelationships) studio(ctx context.Context) (*int, error) {
existingID := g.scene.StudioID
fieldStrategy := g.fieldOptions["studio"]
createMissing := fieldStrategy != nil && utils.IsTrue(fieldStrategy.CreateMissing)
@ -45,19 +42,19 @@ func (g sceneRelationships) studio(ctx context.Context) (*int64, error) {
scraped := g.result.result.Studio
endpoint := g.result.source.RemoteSite
if scraped == nil || !shouldSetSingleValueField(fieldStrategy, existingID.Valid) {
if scraped == nil || !shouldSetSingleValueField(fieldStrategy, existingID != nil) {
return nil, nil
}
if scraped.StoredID != nil {
// existing studio, just set it
studioID, err := strconv.ParseInt(*scraped.StoredID, 10, 64)
studioID, err := strconv.Atoi(*scraped.StoredID)
if err != nil {
return nil, fmt.Errorf("error converting studio ID %s: %w", *scraped.StoredID, err)
}
// only return value if different to current
if existingID.Int64 != studioID {
if existingID == nil || *existingID != studioID {
return &studioID, nil
}
} else if createMissing {
@ -85,10 +82,7 @@ func (g sceneRelationships) performers(ctx context.Context, ignoreMale bool) ([]
endpoint := g.result.source.RemoteSite
var performerIDs []int
originalPerformerIDs, err := g.sceneReader.GetPerformerIDs(ctx, g.scene.ID)
if err != nil {
return nil, fmt.Errorf("error getting scene performers: %w", err)
}
originalPerformerIDs := g.scene.PerformerIDs
if strategy == FieldStrategyMerge {
// add to existing
@ -135,10 +129,7 @@ func (g sceneRelationships) tags(ctx context.Context) ([]int, error) {
}
var tagIDs []int
originalTagIDs, err := g.sceneReader.GetTagIDs(ctx, target.ID)
if err != nil {
return nil, fmt.Errorf("error getting scene tags: %w", err)
}
originalTagIDs := target.TagIDs
if strategy == FieldStrategyMerge {
// add to existing
@ -194,21 +185,13 @@ func (g sceneRelationships) stashIDs(ctx context.Context) ([]models.StashID, err
strategy = fieldStrategy.Strategy
}
var originalStashIDs []models.StashID
var stashIDs []models.StashID
stashIDPtrs, err := g.sceneReader.GetStashIDs(ctx, target.ID)
if err != nil {
return nil, fmt.Errorf("error getting scene tag: %w", err)
}
// convert existing to non-pointer types
for _, stashID := range stashIDPtrs {
originalStashIDs = append(originalStashIDs, *stashID)
}
originalStashIDs := target.StashIDs
if strategy == FieldStrategyMerge {
// add to existing
stashIDs = originalStashIDs
// make a copy so we don't modify the original
stashIDs = append(stashIDs, originalStashIDs...)
}
for i, stashID := range stashIDs {

View File

@ -16,7 +16,7 @@ import (
func Test_sceneRelationships_studio(t *testing.T) {
validStoredID := "1"
var validStoredIDInt int64 = 1
var validStoredIDInt = 1
invalidStoredID := "invalidStoredID"
createMissing := true
@ -39,7 +39,7 @@ func Test_sceneRelationships_studio(t *testing.T) {
scene *models.Scene
fieldOptions *FieldOptions
result *models.ScrapedStudio
want *int64
want *int
wantErr bool
}{
{
@ -75,7 +75,7 @@ func Test_sceneRelationships_studio(t *testing.T) {
{
"same stored id",
&models.Scene{
StudioID: models.NullInt64(validStoredIDInt),
StudioID: &validStoredIDInt,
},
defaultOptions,
&models.ScrapedStudio{
@ -156,19 +156,25 @@ func Test_sceneRelationships_performers(t *testing.T) {
Strategy: FieldStrategyMerge,
}
mockSceneReaderWriter := &mocks.SceneReaderWriter{}
mockSceneReaderWriter.On("GetPerformerIDs", testCtx, sceneID).Return(nil, nil)
mockSceneReaderWriter.On("GetPerformerIDs", testCtx, sceneWithPerformerID).Return([]int{existingPerformerID}, nil)
mockSceneReaderWriter.On("GetPerformerIDs", testCtx, errSceneID).Return(nil, errors.New("error getting IDs"))
emptyScene := &models.Scene{
ID: sceneID,
}
sceneWithPerformer := &models.Scene{
ID: sceneWithPerformerID,
PerformerIDs: []int{
existingPerformerID,
},
}
tr := sceneRelationships{
sceneReader: mockSceneReaderWriter,
sceneReader: &mocks.SceneReaderWriter{},
fieldOptions: make(map[string]*FieldOptions),
}
tests := []struct {
name string
sceneID int
sceneID *models.Scene
fieldOptions *FieldOptions
scraped []*models.ScrapedPerformer
ignoreMale bool
@ -177,7 +183,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
}{
{
"ignore",
sceneID,
emptyScene,
&FieldOptions{
Strategy: FieldStrategyIgnore,
},
@ -192,27 +198,16 @@ func Test_sceneRelationships_performers(t *testing.T) {
},
{
"none",
sceneID,
emptyScene,
defaultOptions,
[]*models.ScrapedPerformer{},
false,
nil,
false,
},
{
"error getting ids",
errSceneID,
defaultOptions,
[]*models.ScrapedPerformer{
{},
},
false,
nil,
true,
},
{
"merge existing",
sceneWithPerformerID,
sceneWithPerformer,
defaultOptions,
[]*models.ScrapedPerformer{
{
@ -226,7 +221,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
},
{
"merge add",
sceneWithPerformerID,
sceneWithPerformer,
defaultOptions,
[]*models.ScrapedPerformer{
{
@ -240,7 +235,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
},
{
"ignore male",
sceneID,
emptyScene,
defaultOptions,
[]*models.ScrapedPerformer{
{
@ -255,7 +250,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
},
{
"overwrite",
sceneWithPerformerID,
sceneWithPerformer,
&FieldOptions{
Strategy: FieldStrategyOverwrite,
},
@ -271,7 +266,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
},
{
"ignore male (not male)",
sceneWithPerformerID,
sceneWithPerformer,
&FieldOptions{
Strategy: FieldStrategyOverwrite,
},
@ -288,7 +283,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
},
{
"error getting tag ID",
sceneID,
emptyScene,
&FieldOptions{
Strategy: FieldStrategyOverwrite,
CreateMissing: &createMissing,
@ -306,9 +301,7 @@ func Test_sceneRelationships_performers(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tr.scene = &models.Scene{
ID: tt.sceneID,
}
tr.scene = tt.sceneID
tr.fieldOptions["performers"] = tt.fieldOptions
tr.result = &scrapeResult{
result: &scraper.ScrapedScene{
@ -347,11 +340,19 @@ func Test_sceneRelationships_tags(t *testing.T) {
Strategy: FieldStrategyMerge,
}
emptyScene := &models.Scene{
ID: sceneID,
}
sceneWithTag := &models.Scene{
ID: sceneWithTagID,
TagIDs: []int{
existingID,
},
}
mockSceneReaderWriter := &mocks.SceneReaderWriter{}
mockTagReaderWriter := &mocks.TagReaderWriter{}
mockSceneReaderWriter.On("GetTagIDs", testCtx, sceneID).Return(nil, nil)
mockSceneReaderWriter.On("GetTagIDs", testCtx, sceneWithTagID).Return([]int{existingID}, nil)
mockSceneReaderWriter.On("GetTagIDs", testCtx, errSceneID).Return(nil, errors.New("error getting IDs"))
mockTagReaderWriter.On("Create", testCtx, mock.MatchedBy(func(p models.Tag) bool {
return p.Name == validName
@ -370,7 +371,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
tests := []struct {
name string
sceneID int
scene *models.Scene
fieldOptions *FieldOptions
scraped []*models.ScrapedTag
want []int
@ -378,7 +379,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
}{
{
"ignore",
sceneID,
emptyScene,
&FieldOptions{
Strategy: FieldStrategyIgnore,
},
@ -392,25 +393,15 @@ func Test_sceneRelationships_tags(t *testing.T) {
},
{
"none",
sceneID,
emptyScene,
defaultOptions,
[]*models.ScrapedTag{},
nil,
false,
},
{
"error getting ids",
errSceneID,
defaultOptions,
[]*models.ScrapedTag{
{},
},
nil,
true,
},
{
"merge existing",
sceneWithTagID,
sceneWithTag,
defaultOptions,
[]*models.ScrapedTag{
{
@ -423,7 +414,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
},
{
"merge add",
sceneWithTagID,
sceneWithTag,
defaultOptions,
[]*models.ScrapedTag{
{
@ -436,7 +427,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
},
{
"overwrite",
sceneWithTagID,
sceneWithTag,
&FieldOptions{
Strategy: FieldStrategyOverwrite,
},
@ -451,7 +442,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
},
{
"error getting tag ID",
sceneID,
emptyScene,
&FieldOptions{
Strategy: FieldStrategyOverwrite,
},
@ -466,7 +457,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
},
{
"create missing",
sceneID,
emptyScene,
&FieldOptions{
Strategy: FieldStrategyOverwrite,
CreateMissing: &createMissing,
@ -481,7 +472,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
},
{
"error creating",
sceneID,
emptyScene,
&FieldOptions{
Strategy: FieldStrategyOverwrite,
CreateMissing: &createMissing,
@ -497,9 +488,7 @@ func Test_sceneRelationships_tags(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tr.scene = &models.Scene{
ID: tt.sceneID,
}
tr.scene = tt.scene
tr.fieldOptions["tags"] = tt.fieldOptions
tr.result = &scrapeResult{
result: &scraper.ScrapedScene{
@ -536,15 +525,21 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
Strategy: FieldStrategyMerge,
}
mockSceneReaderWriter := &mocks.SceneReaderWriter{}
mockSceneReaderWriter.On("GetStashIDs", testCtx, sceneID).Return(nil, nil)
mockSceneReaderWriter.On("GetStashIDs", testCtx, sceneWithStashID).Return([]*models.StashID{
{
StashID: remoteSiteID,
Endpoint: existingEndpoint,
emptyScene := &models.Scene{
ID: sceneID,
}
sceneWithStashIDs := &models.Scene{
ID: sceneWithStashID,
StashIDs: []models.StashID{
{
StashID: remoteSiteID,
Endpoint: existingEndpoint,
},
},
}, nil)
mockSceneReaderWriter.On("GetStashIDs", testCtx, errSceneID).Return(nil, errors.New("error getting IDs"))
}
mockSceneReaderWriter := &mocks.SceneReaderWriter{}
tr := sceneRelationships{
sceneReader: mockSceneReaderWriter,
@ -553,7 +548,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
tests := []struct {
name string
sceneID int
scene *models.Scene
fieldOptions *FieldOptions
endpoint string
remoteSiteID *string
@ -562,7 +557,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
}{
{
"ignore",
sceneID,
emptyScene,
&FieldOptions{
Strategy: FieldStrategyIgnore,
},
@ -573,7 +568,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
},
{
"no endpoint",
sceneID,
emptyScene,
defaultOptions,
"",
&remoteSiteID,
@ -582,25 +577,16 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
},
{
"no site id",
sceneID,
emptyScene,
defaultOptions,
newEndpoint,
nil,
nil,
false,
},
{
"error getting ids",
errSceneID,
defaultOptions,
newEndpoint,
&remoteSiteID,
nil,
true,
},
{
"merge existing",
sceneWithStashID,
sceneWithStashIDs,
defaultOptions,
existingEndpoint,
&remoteSiteID,
@ -609,7 +595,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
},
{
"merge existing new value",
sceneWithStashID,
sceneWithStashIDs,
defaultOptions,
existingEndpoint,
&newRemoteSiteID,
@ -623,7 +609,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
},
{
"merge add",
sceneWithStashID,
sceneWithStashIDs,
defaultOptions,
newEndpoint,
&newRemoteSiteID,
@ -641,7 +627,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
},
{
"overwrite",
sceneWithStashID,
sceneWithStashIDs,
&FieldOptions{
Strategy: FieldStrategyOverwrite,
},
@ -657,7 +643,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
},
{
"overwrite same",
sceneWithStashID,
sceneWithStashIDs,
&FieldOptions{
Strategy: FieldStrategyOverwrite,
},
@ -669,9 +655,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
tr.scene = &models.Scene{
ID: tt.sceneID,
}
tr.scene = tt.scene
tr.fieldOptions["stash_ids"] = tt.fieldOptions
tr.result = &scrapeResult{
source: ScraperSource{
@ -688,7 +672,7 @@ func Test_sceneRelationships_stashIDs(t *testing.T) {
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("sceneRelationships.stashIDs() = %v, want %v", got, tt.want)
t.Errorf("sceneRelationships.stashIDs() = %+v, want %+v", got, tt.want)
}
})
}

View File

@ -12,17 +12,17 @@ import (
type StudioCreator interface {
Create(ctx context.Context, newStudio models.Studio) (*models.Studio, error)
UpdateStashIDs(ctx context.Context, studioID int, stashIDs []models.StashID) error
UpdateStashIDs(ctx context.Context, studioID int, stashIDs []*models.StashID) error
}
func createMissingStudio(ctx context.Context, endpoint string, w StudioCreator, studio *models.ScrapedStudio) (*int64, error) {
func createMissingStudio(ctx context.Context, endpoint string, w StudioCreator, studio *models.ScrapedStudio) (*int, error) {
created, err := w.Create(ctx, scrapedToStudioInput(studio))
if err != nil {
return nil, fmt.Errorf("error creating studio: %w", err)
}
if endpoint != "" && studio.RemoteSiteID != nil {
if err := w.UpdateStashIDs(ctx, created.ID, []models.StashID{
if err := w.UpdateStashIDs(ctx, created.ID, []*models.StashID{
{
Endpoint: endpoint,
StashID: *studio.RemoteSiteID,
@ -32,8 +32,7 @@ func createMissingStudio(ctx context.Context, endpoint string, w StudioCreator,
}
}
createdID := int64(created.ID)
return &createdID, nil
return &created.ID, nil
}
func scrapedToStudioInput(studio *models.ScrapedStudio) models.Studio {

View File

@ -18,7 +18,6 @@ func Test_createMissingStudio(t *testing.T) {
validName := "validName"
invalidName := "invalidName"
createdID := 1
createdID64 := int64(createdID)
repo := mocks.NewTxnRepository()
mockStudioReaderWriter := repo.Studio.(*mocks.StudioReaderWriter)
@ -31,13 +30,13 @@ func Test_createMissingStudio(t *testing.T) {
return p.Name.String == invalidName
})).Return(nil, errors.New("error creating performer"))
mockStudioReaderWriter.On("UpdateStashIDs", testCtx, createdID, []models.StashID{
mockStudioReaderWriter.On("UpdateStashIDs", testCtx, createdID, []*models.StashID{
{
Endpoint: invalidEndpoint,
StashID: remoteSiteID,
},
}).Return(errors.New("error updating stash ids"))
mockStudioReaderWriter.On("UpdateStashIDs", testCtx, createdID, []models.StashID{
mockStudioReaderWriter.On("UpdateStashIDs", testCtx, createdID, []*models.StashID{
{
Endpoint: validEndpoint,
StashID: remoteSiteID,
@ -51,7 +50,7 @@ func Test_createMissingStudio(t *testing.T) {
tests := []struct {
name string
args args
want *int64
want *int
wantErr bool
}{
{
@ -62,7 +61,7 @@ func Test_createMissingStudio(t *testing.T) {
Name: validName,
},
},
&createdID64,
&createdID,
false,
},
{
@ -85,7 +84,7 @@ func Test_createMissingStudio(t *testing.T) {
RemoteSiteID: &remoteSiteID,
},
},
&createdID64,
&createdID,
false,
},
{
@ -109,7 +108,7 @@ func Test_createMissingStudio(t *testing.T) {
return
}
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("createMissingStudio() = %v, want %v", got, tt.want)
t.Errorf("createMissingStudio() = %d, want %d", got, tt.want)
}
})
}

View File

@ -2,7 +2,6 @@ package manager
import (
"context"
"database/sql"
"errors"
"path/filepath"
"regexp"
@ -238,9 +237,10 @@ type sceneHolder struct {
func newSceneHolder(scene *models.Scene) *sceneHolder {
sceneCopy := models.Scene{
ID: scene.ID,
Checksum: scene.Checksum,
Path: scene.Path,
ID: scene.ID,
Files: scene.Files,
// Checksum: scene.Checksum,
// Path: scene.Path,
}
ret := sceneHolder{
scene: scene,
@ -307,11 +307,9 @@ func (h *sceneHolder) setDate(field *parserField, value string) {
// ensure the date is valid
// only set if new value is different from the old
if validateDate(fullDate) && h.scene.Date.String != fullDate {
h.result.Date = models.SQLiteDate{
String: fullDate,
Valid: true,
}
if validateDate(fullDate) && h.scene.Date != nil && h.scene.Date.String() != fullDate {
d := models.NewDate(fullDate)
h.result.Date = &d
}
}
@ -337,24 +335,17 @@ func (h *sceneHolder) setField(field parserField, value interface{}) {
switch field.field {
case "title":
h.result.Title = sql.NullString{
String: value.(string),
Valid: true,
}
v := value.(string)
h.result.Title = v
case "date":
if validateDate(value.(string)) {
h.result.Date = models.SQLiteDate{
String: value.(string),
Valid: true,
}
d := models.NewDate(value.(string))
h.result.Date = &d
}
case "rating":
rating, _ := strconv.Atoi(value.(string))
if validateRating(rating) {
h.result.Rating = sql.NullInt64{
Int64: int64(rating),
Valid: true,
}
h.result.Rating = &rating
}
case "performer":
// add performer to list
@ -394,9 +385,9 @@ func (m parseMapper) parse(scene *models.Scene) *sceneHolder {
// scene path in the match. Otherwise, use the default behaviour of just
// the file's basename
// must be double \ because of the regex escaping
filename := filepath.Base(scene.Path)
filename := filepath.Base(scene.Path())
if strings.Contains(m.regexString, `\\`) || strings.Contains(m.regexString, "/") {
filename = scene.Path
filename = scene.Path()
}
result := m.regex.FindStringSubmatch(filename)
@ -694,8 +685,8 @@ func (p *SceneFilenameParser) setMovies(ctx context.Context, qb MovieNameFinder,
}
func (p *SceneFilenameParser) setParserResult(ctx context.Context, repo SceneFilenameParserRepository, h sceneHolder, result *SceneParserResult) {
if h.result.Title.Valid {
title := h.result.Title.String
if h.result.Title != "" {
title := h.result.Title
title = p.replaceWhitespaceCharacters(title)
if p.ParserInput.CapitalizeTitle != nil && *p.ParserInput.CapitalizeTitle {
@ -705,13 +696,13 @@ func (p *SceneFilenameParser) setParserResult(ctx context.Context, repo SceneFil
result.Title = &title
}
if h.result.Date.Valid {
result.Date = &h.result.Date.String
if h.result.Date != nil {
dateStr := h.result.Date.String()
result.Date = &dateStr
}
if h.result.Rating.Valid {
rating := int(h.result.Rating.Int64)
result.Rating = &rating
if h.result.Rating != nil {
result.Rating = h.result.Rating
}
if len(h.performers) > 0 {
@ -725,5 +716,4 @@ func (p *SceneFilenameParser) setParserResult(ctx context.Context, repo SceneFil
if len(h.movies) > 0 {
p.setMovies(ctx, repo.Movie, h, result)
}
}

View File

@ -0,0 +1,88 @@
package manager
import (
"errors"
"fmt"
"io"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/hash/oshash"
)
type fingerprintCalculator struct {
Config *config.Instance
}
func (c *fingerprintCalculator) calculateOshash(f *file.BaseFile, o file.Opener) (*file.Fingerprint, error) {
r, err := o.Open()
if err != nil {
return nil, fmt.Errorf("opening file: %w", err)
}
defer r.Close()
rc, isRC := r.(io.ReadSeeker)
if !isRC {
return nil, errors.New("cannot calculate oshash for non-readcloser")
}
hash, err := oshash.FromReader(rc, f.Size)
if err != nil {
return nil, fmt.Errorf("calculating oshash: %w", err)
}
return &file.Fingerprint{
Type: file.FingerprintTypeOshash,
Fingerprint: hash,
}, nil
}
func (c *fingerprintCalculator) calculateMD5(o file.Opener) (*file.Fingerprint, error) {
r, err := o.Open()
if err != nil {
return nil, fmt.Errorf("opening file: %w", err)
}
defer r.Close()
hash, err := md5.FromReader(r)
if err != nil {
return nil, fmt.Errorf("calculating md5: %w", err)
}
return &file.Fingerprint{
Type: file.FingerprintTypeMD5,
Fingerprint: hash,
}, nil
}
func (c *fingerprintCalculator) CalculateFingerprints(f *file.BaseFile, o file.Opener) ([]file.Fingerprint, error) {
var ret []file.Fingerprint
calculateMD5 := true
if isVideo(f.Basename) {
// calculate oshash first
fp, err := c.calculateOshash(f, o)
if err != nil {
return nil, err
}
ret = append(ret, *fp)
// only calculate MD5 if enabled in config
calculateMD5 = c.Config.IsCalculateMD5()
}
if calculateMD5 {
fp, err := c.calculateMD5(o)
if err != nil {
return nil, err
}
ret = append(ret, *fp)
}
return ret, nil
}

View File

@ -8,10 +8,11 @@ import (
)
func DeleteGalleryFile(gallery *models.Gallery) {
if gallery.Path.Valid {
err := os.Remove(gallery.Path.String)
path := gallery.Path()
if path != "" {
err := os.Remove(path)
if err != nil {
logger.Warnf("Could not delete file %s: %s", gallery.Path.String, err.Error())
logger.Warnf("Could not delete file %s: %s", path, err.Error())
}
}
}

View File

@ -14,7 +14,7 @@ import (
)
type InteractiveHeatmapSpeedGenerator struct {
InteractiveSpeed int64
InteractiveSpeed int
Funscript Script
FunscriptPath string
HeatmapPath string
@ -175,7 +175,7 @@ func (g *InteractiveHeatmapSpeedGenerator) RenderHeatmap() error {
return err
}
func (funscript *Script) CalculateMedian() int64 {
func (funscript *Script) CalculateMedian() int {
sort.Slice(funscript.Actions, func(i, j int) bool {
return funscript.Actions[i].Speed < funscript.Actions[j].Speed
})
@ -183,10 +183,10 @@ func (funscript *Script) CalculateMedian() int64 {
mNumber := len(funscript.Actions) / 2
if len(funscript.Actions)%2 != 0 {
return int64(funscript.Actions[mNumber].Speed)
return int(funscript.Actions[mNumber].Speed)
}
return int64((funscript.Actions[mNumber-1].Speed + funscript.Actions[mNumber].Speed) / 2)
return int((funscript.Actions[mNumber-1].Speed + funscript.Actions[mNumber].Speed) / 2)
}
func (gt GradientTable) GetInterpolatedColorFor(t float64) colorful.Color {

View File

@ -1,59 +0,0 @@
package manager
import (
"archive/zip"
"strings"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/logger"
)
func walkGalleryZip(path string, walkFunc func(file *zip.File) error) error {
readCloser, err := zip.OpenReader(path)
if err != nil {
return err
}
defer readCloser.Close()
excludeImgRegex := generateRegexps(config.GetInstance().GetImageExcludes())
for _, f := range readCloser.File {
if f.FileInfo().IsDir() {
continue
}
if strings.Contains(f.Name, "__MACOSX") {
continue
}
if !isImage(f.Name) {
continue
}
if matchFileRegex(file.ZipFile(path, f).Path(), excludeImgRegex) {
continue
}
err := walkFunc(f)
if err != nil {
return err
}
}
return nil
}
func countImagesInZip(path string) int {
ret := 0
err := walkGalleryZip(path, func(file *zip.File) error {
ret++
return nil
})
if err != nil {
logger.Warnf("Error while walking gallery zip: %v", err)
}
return ret
}

View File

@ -18,17 +18,27 @@ import (
"github.com/stashapp/stash/internal/log"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/file"
file_image "github.com/stashapp/stash/pkg/file/image"
"github.com/stashapp/stash/pkg/file/video"
"github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/models/paths"
"github.com/stashapp/stash/pkg/plugin"
"github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/scene/generate"
"github.com/stashapp/stash/pkg/scraper"
"github.com/stashapp/stash/pkg/session"
"github.com/stashapp/stash/pkg/sqlite"
"github.com/stashapp/stash/pkg/utils"
"github.com/stashapp/stash/ui"
// register custom migrations
_ "github.com/stashapp/stash/pkg/sqlite/migrations"
)
type SystemStatus struct {
@ -115,7 +125,14 @@ type Manager struct {
DLNAService *dlna.Service
Database *sqlite.Database
Repository models.Repository
Repository Repository
SceneService SceneService
ImageService ImageService
GalleryService GalleryService
Scanner *file.Scanner
Cleaner *file.Cleaner
scanSubs *subscriptionManager
}
@ -150,7 +167,7 @@ func initialize() error {
l := initLog()
initProfiling(cfg.GetCPUProfilePath())
db := &sqlite.Database{}
db := sqlite.NewDatabase()
instance = &Manager{
Config: cfg,
@ -159,24 +176,29 @@ func initialize() error {
DownloadStore: NewDownloadStore(),
PluginCache: plugin.NewCache(cfg),
Database: db,
Repository: models.Repository{
TxnManager: db,
Gallery: sqlite.GalleryReaderWriter,
Image: sqlite.ImageReaderWriter,
Movie: sqlite.MovieReaderWriter,
Performer: sqlite.PerformerReaderWriter,
Scene: sqlite.SceneReaderWriter,
SceneMarker: sqlite.SceneMarkerReaderWriter,
ScrapedItem: sqlite.ScrapedItemReaderWriter,
Studio: sqlite.StudioReaderWriter,
Tag: sqlite.TagReaderWriter,
SavedFilter: sqlite.SavedFilterReaderWriter,
},
Database: db,
Repository: sqliteRepository(db),
scanSubs: &subscriptionManager{},
}
instance.SceneService = &scene.Service{
File: db.File,
Repository: db.Scene,
MarkerDestroyer: instance.Repository.SceneMarker,
}
instance.ImageService = &image.Service{
File: db.File,
Repository: db.Image,
}
instance.GalleryService = &gallery.Service{
Repository: db.Gallery,
ImageFinder: db.Image,
ImageService: instance.ImageService,
}
instance.JobManager = initJobManager()
sceneServer := SceneServer{
@ -200,13 +222,15 @@ func initialize() error {
}
if err != nil {
panic(fmt.Sprintf("error initializing configuration: %s", err.Error()))
} else if err := instance.PostInit(ctx); err != nil {
return fmt.Errorf("error initializing configuration: %w", err)
}
if err := instance.PostInit(ctx); err != nil {
var migrationNeededErr *sqlite.MigrationNeededError
if errors.As(err, &migrationNeededErr) {
logger.Warn(err.Error())
} else {
panic(err)
return err
}
}
@ -228,6 +252,9 @@ func initialize() error {
logger.Warnf("could not initialize FFMPEG subsystem: %v", err)
}
instance.Scanner = makeScanner(db, instance.PluginCache)
instance.Cleaner = makeCleaner(db, instance.PluginCache)
// if DLNA is enabled, start it now
if instance.Config.GetDLNADefaultEnabled() {
if err := instance.DLNAService.Start(nil); err != nil {
@ -238,6 +265,71 @@ func initialize() error {
return nil
}
func videoFileFilter(f file.File) bool {
return isVideo(f.Base().Basename)
}
func imageFileFilter(f file.File) bool {
return isImage(f.Base().Basename)
}
func galleryFileFilter(f file.File) bool {
return isZip(f.Base().Basename)
}
type coverGenerator struct {
}
func (g *coverGenerator) GenerateCover(ctx context.Context, scene *models.Scene, f *file.VideoFile) error {
gg := generate.Generator{
Encoder: instance.FFMPEG,
LockManager: instance.ReadLockManager,
ScenePaths: instance.Paths.Scene,
}
return gg.Screenshot(ctx, f.Path, scene.GetHash(instance.Config.GetVideoFileNamingAlgorithm()), f.Width, f.Duration, generate.ScreenshotOptions{})
}
func makeScanner(db *sqlite.Database, pluginCache *plugin.Cache) *file.Scanner {
return &file.Scanner{
Repository: file.Repository{
Manager: db,
DatabaseProvider: db,
Store: db.File,
FolderStore: db.Folder,
},
FileDecorators: []file.Decorator{
&file.FilteredDecorator{
Decorator: &video.Decorator{
FFProbe: instance.FFProbe,
},
Filter: file.FilterFunc(videoFileFilter),
},
&file.FilteredDecorator{
Decorator: &file_image.Decorator{},
Filter: file.FilterFunc(imageFileFilter),
},
},
FingerprintCalculator: &fingerprintCalculator{instance.Config},
FS: &file.OsFS{},
}
}
func makeCleaner(db *sqlite.Database, pluginCache *plugin.Cache) *file.Cleaner {
return &file.Cleaner{
FS: &file.OsFS{},
Repository: file.Repository{
Manager: db,
DatabaseProvider: db,
Store: db.File,
FolderStore: db.Folder,
},
Handlers: []file.CleanHandler{
&cleanHandler{},
},
}
}
func initJobManager() *job.Manager {
ret := job.NewManager()
@ -370,8 +462,12 @@ func (s *Manager) PostInit(ctx context.Context) error {
if err := fsutil.EmptyDir(instance.Paths.Generated.Downloads); err != nil {
logger.Warnf("could not empty Downloads directory: %v", err)
}
if err := fsutil.EmptyDir(instance.Paths.Generated.Tmp); err != nil {
logger.Warnf("could not empty Tmp directory: %v", err)
if err := fsutil.EnsureDir(instance.Paths.Generated.Tmp); err != nil {
logger.Warnf("could not create Tmp directory: %v", err)
} else {
if err := fsutil.EmptyDir(instance.Paths.Generated.Tmp); err != nil {
logger.Warnf("could not empty Tmp directory: %v", err)
}
}
}, deleteTimeout, func(done chan struct{}) {
logger.Info("Please wait. Deleting temporary files...") // print
@ -526,6 +622,8 @@ func (s *Manager) Setup(ctx context.Context, input SetupInput) error {
return fmt.Errorf("error initializing FFMPEG subsystem: %v", err)
}
instance.Scanner = makeScanner(instance.Database, instance.PluginCache)
return nil
}

View File

@ -13,18 +13,13 @@ import (
"github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
)
func isGallery(pathname string) bool {
func isZip(pathname string) bool {
gExt := config.GetInstance().GetGalleryExtensions()
return fsutil.MatchExtension(pathname, gExt)
}
func isCaptions(pathname string) bool {
return fsutil.MatchExtension(pathname, scene.CaptionExts)
}
func isVideo(pathname string) bool {
vidExt := config.GetInstance().GetVideoExtensions()
return fsutil.MatchExtension(pathname, vidExt)
@ -36,13 +31,15 @@ func isImage(pathname string) bool {
}
func getScanPaths(inputPaths []string) []*config.StashConfig {
stashPaths := config.GetInstance().GetStashPaths()
if len(inputPaths) == 0 {
return config.GetInstance().GetStashPaths()
return stashPaths
}
var ret []*config.StashConfig
for _, p := range inputPaths {
s := getStashFromDirPath(p)
s := getStashFromDirPath(stashPaths, p)
if s == nil {
logger.Warnf("%s is not in the configured stash paths", p)
continue
@ -84,7 +81,7 @@ func (s *Manager) Scan(ctx context.Context, input ScanMetadataInput) (int, error
}
scanJob := ScanJob{
txnManager: s.Repository,
scanner: s.Scanner,
input: input,
subscriptions: s.scanSubs,
}
@ -237,9 +234,12 @@ type CleanMetadataInput struct {
func (s *Manager) Clean(ctx context.Context, input CleanMetadataInput) int {
j := cleanJob{
txnManager: s.Repository,
input: input,
scanSubs: s.scanSubs,
cleaner: s.Cleaner,
txnManager: s.Repository,
sceneService: s.SceneService,
imageService: s.ImageService,
input: input,
scanSubs: s.scanSubs,
}
return s.JobManager.Add(ctx, "Cleaning...", &j)

View File

@ -0,0 +1,93 @@
package manager
import (
"context"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/sqlite"
"github.com/stashapp/stash/pkg/txn"
)
type ImageReaderWriter interface {
models.ImageReaderWriter
image.FinderCreatorUpdater
}
type GalleryReaderWriter interface {
models.GalleryReaderWriter
gallery.FinderCreatorUpdater
}
type SceneReaderWriter interface {
models.SceneReaderWriter
scene.CreatorUpdater
}
type FileReaderWriter interface {
file.Store
file.Finder
Query(ctx context.Context, options models.FileQueryOptions) (*models.FileQueryResult, error)
GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error)
}
type FolderReaderWriter interface {
file.FolderStore
Find(ctx context.Context, id file.FolderID) (*file.Folder, error)
}
type Repository struct {
models.TxnManager
File FileReaderWriter
Folder FolderReaderWriter
Gallery GalleryReaderWriter
Image ImageReaderWriter
Movie models.MovieReaderWriter
Performer models.PerformerReaderWriter
Scene SceneReaderWriter
SceneMarker models.SceneMarkerReaderWriter
ScrapedItem models.ScrapedItemReaderWriter
Studio models.StudioReaderWriter
Tag models.TagReaderWriter
SavedFilter models.SavedFilterReaderWriter
}
func (r *Repository) WithTxn(ctx context.Context, fn txn.TxnFunc) error {
return txn.WithTxn(ctx, r, fn)
}
func sqliteRepository(d *sqlite.Database) Repository {
txnRepo := d.TxnRepository()
return Repository{
TxnManager: txnRepo,
File: d.File,
Folder: d.Folder,
Gallery: d.Gallery,
Image: d.Image,
Movie: txnRepo.Movie,
Performer: txnRepo.Performer,
Scene: d.Scene,
SceneMarker: txnRepo.SceneMarker,
ScrapedItem: txnRepo.ScrapedItem,
Studio: txnRepo.Studio,
Tag: txnRepo.Tag,
SavedFilter: txnRepo.SavedFilter,
}
}
type SceneService interface {
Destroy(ctx context.Context, scene *models.Scene, fileDeleter *scene.FileDeleter, deleteGenerated, deleteFile bool) error
}
type ImageService interface {
Destroy(ctx context.Context, image *models.Image, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) error
}
type GalleryService interface {
Destroy(ctx context.Context, i *models.Gallery, fileDeleter *image.FileDeleter, deleteGenerated, deleteFile bool) ([]*models.Image, error)
}

View File

@ -38,7 +38,7 @@ func (c *StreamRequestContext) Cancel() {
}
func KillRunningStreams(scene *models.Scene, fileNamingAlgo models.HashAlgorithm) {
instance.ReadLockManager.Cancel(scene.Path)
instance.ReadLockManager.Cancel(scene.Path())
sceneHash := scene.GetHash(fileNamingAlgo)
@ -62,7 +62,7 @@ type SceneServer struct {
func (s *SceneServer) StreamSceneDirect(scene *models.Scene, w http.ResponseWriter, r *http.Request) {
fileNamingAlgo := config.GetInstance().GetVideoFileNamingAlgorithm()
filepath := GetInstance().Paths.Scene.GetStreamPath(scene.Path, scene.GetHash(fileNamingAlgo))
filepath := GetInstance().Paths.Scene.GetStreamPath(scene.Path(), scene.GetHash(fileNamingAlgo))
streamRequestCtx := NewStreamRequestContext(w, r)
// #2579 - hijacking and closing the connection here causes video playback to fail in Safari

View File

@ -11,17 +11,18 @@ import (
func GetSceneFileContainer(scene *models.Scene) (ffmpeg.Container, error) {
var container ffmpeg.Container
if scene.Format.Valid {
container = ffmpeg.Container(scene.Format.String)
format := scene.Format()
if format != "" {
container = ffmpeg.Container(format)
} else { // container isn't in the DB
// shouldn't happen, fallback to ffprobe
ffprobe := GetInstance().FFProbe
tmpVideoFile, err := ffprobe.NewVideoFile(scene.Path)
tmpVideoFile, err := ffprobe.NewVideoFile(scene.Path())
if err != nil {
return ffmpeg.Container(""), fmt.Errorf("error reading video file: %v", err)
}
return ffmpeg.MatchContainer(tmpVideoFile.Container, scene.Path)
return ffmpeg.MatchContainer(tmpVideoFile.Container, scene.Path())
}
return container, nil
@ -32,7 +33,7 @@ func includeSceneStreamPath(scene *models.Scene, streamingResolution models.Stre
// resolution
convertedRes := models.ResolutionEnum(streamingResolution)
minResolution := int64(convertedRes.GetMinResolution())
minResolution := convertedRes.GetMinResolution()
sceneResolution := scene.GetMinResolution()
// don't include if scene resolution is smaller than the streamingResolution
@ -47,7 +48,7 @@ func includeSceneStreamPath(scene *models.Scene, streamingResolution models.Stre
// convert StreamingResolutionEnum to ResolutionEnum
maxStreamingResolution := models.ResolutionEnum(maxStreamingTranscodeSize)
return int64(maxStreamingResolution.GetMinResolution()) >= minResolution
return maxStreamingResolution.GetMinResolution() >= minResolution
}
type SceneStreamEndpoint struct {
@ -79,8 +80,8 @@ func GetSceneStreamPaths(scene *models.Scene, directStreamURL string, maxStreami
// direct stream should only apply when the audio codec is supported
audioCodec := ffmpeg.MissingUnsupported
if scene.AudioCodec.Valid {
audioCodec = ffmpeg.ProbeAudioCodec(scene.AudioCodec.String)
if scene.AudioCodec() != "" {
audioCodec = ffmpeg.ProbeAudioCodec(scene.AudioCodec())
}
// don't care if we can't get the container

View File

@ -19,7 +19,7 @@ import (
)
type autoTagJob struct {
txnManager models.Repository
txnManager Repository
input AutoTagMetadataInput
cache match.Cache
@ -165,13 +165,13 @@ func (j *autoTagJob) autoTagPerformers(ctx context.Context, progress *job.Progre
if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error {
r := j.txnManager
if err := autotag.PerformerScenes(ctx, performer, paths, r.Scene, &j.cache); err != nil {
return err
return fmt.Errorf("processing scenes: %w", err)
}
if err := autotag.PerformerImages(ctx, performer, paths, r.Image, &j.cache); err != nil {
return err
return fmt.Errorf("processing images: %w", err)
}
if err := autotag.PerformerGalleries(ctx, performer, paths, r.Gallery, &j.cache); err != nil {
return err
return fmt.Errorf("processing galleries: %w", err)
}
return nil
@ -241,17 +241,17 @@ func (j *autoTagJob) autoTagStudios(ctx context.Context, progress *job.Progress,
if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error {
aliases, err := r.Studio.GetAliases(ctx, studio.ID)
if err != nil {
return err
return fmt.Errorf("getting studio aliases: %w", err)
}
if err := autotag.StudioScenes(ctx, studio, paths, aliases, r.Scene, &j.cache); err != nil {
return err
return fmt.Errorf("processing scenes: %w", err)
}
if err := autotag.StudioImages(ctx, studio, paths, aliases, r.Image, &j.cache); err != nil {
return err
return fmt.Errorf("processing images: %w", err)
}
if err := autotag.StudioGalleries(ctx, studio, paths, aliases, r.Gallery, &j.cache); err != nil {
return err
return fmt.Errorf("processing galleries: %w", err)
}
return nil
@ -315,17 +315,17 @@ func (j *autoTagJob) autoTagTags(ctx context.Context, progress *job.Progress, pa
if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error {
aliases, err := r.Tag.GetAliases(ctx, tag.ID)
if err != nil {
return err
return fmt.Errorf("getting tag aliases: %w", err)
}
if err := autotag.TagScenes(ctx, tag, paths, aliases, r.Scene, &j.cache); err != nil {
return err
return fmt.Errorf("processing scenes: %w", err)
}
if err := autotag.TagImages(ctx, tag, paths, aliases, r.Image, &j.cache); err != nil {
return err
return fmt.Errorf("processing images: %w", err)
}
if err := autotag.TagGalleries(ctx, tag, paths, aliases, r.Gallery, &j.cache); err != nil {
return err
return fmt.Errorf("processing galleries: %w", err)
}
return nil
@ -351,7 +351,7 @@ type autoTagFilesTask struct {
tags bool
progress *job.Progress
txnManager models.Repository
txnManager Repository
cache *match.Cache
}
@ -431,7 +431,7 @@ func (t *autoTagFilesTask) makeGalleryFilter() *models.GalleryFilterType {
return ret
}
func (t *autoTagFilesTask) getCount(ctx context.Context, r models.Repository) (int, error) {
func (t *autoTagFilesTask) getCount(ctx context.Context, r Repository) (int, error) {
pp := 0
findFilter := &models.FindFilterType{
PerPage: &pp,
@ -445,7 +445,7 @@ func (t *autoTagFilesTask) getCount(ctx context.Context, r models.Repository) (i
SceneFilter: t.makeSceneFilter(),
})
if err != nil {
return 0, err
return 0, fmt.Errorf("getting scene count: %w", err)
}
sceneCount := sceneResults.Count
@ -458,20 +458,20 @@ func (t *autoTagFilesTask) getCount(ctx context.Context, r models.Repository) (i
ImageFilter: t.makeImageFilter(),
})
if err != nil {
return 0, err
return 0, fmt.Errorf("getting image count: %w", err)
}
imageCount := imageResults.Count
_, galleryCount, err := r.Gallery.Query(ctx, t.makeGalleryFilter(), findFilter)
if err != nil {
return 0, err
return 0, fmt.Errorf("getting gallery count: %w", err)
}
return sceneCount + imageCount + galleryCount, nil
}
func (t *autoTagFilesTask) processScenes(ctx context.Context, r models.Repository) error {
func (t *autoTagFilesTask) processScenes(ctx context.Context, r Repository) error {
if job.IsCancelled(ctx) {
return nil
}
@ -483,9 +483,13 @@ func (t *autoTagFilesTask) processScenes(ctx context.Context, r models.Repositor
more := true
for more {
scenes, err := scene.Query(ctx, r.Scene, sceneFilter, findFilter)
if err != nil {
var scenes []*models.Scene
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error
scenes, err = scene.Query(ctx, r.Scene, sceneFilter, findFilter)
return err
}); err != nil {
return fmt.Errorf("querying scenes: %w", err)
}
for _, ss := range scenes {
@ -524,7 +528,7 @@ func (t *autoTagFilesTask) processScenes(ctx context.Context, r models.Repositor
return nil
}
func (t *autoTagFilesTask) processImages(ctx context.Context, r models.Repository) error {
func (t *autoTagFilesTask) processImages(ctx context.Context, r Repository) error {
if job.IsCancelled(ctx) {
return nil
}
@ -536,9 +540,13 @@ func (t *autoTagFilesTask) processImages(ctx context.Context, r models.Repositor
more := true
for more {
images, err := image.Query(ctx, r.Image, imageFilter, findFilter)
if err != nil {
var images []*models.Image
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error
images, err = image.Query(ctx, r.Image, imageFilter, findFilter)
return err
}); err != nil {
return fmt.Errorf("querying images: %w", err)
}
for _, ss := range images {
@ -577,7 +585,7 @@ func (t *autoTagFilesTask) processImages(ctx context.Context, r models.Repositor
return nil
}
func (t *autoTagFilesTask) processGalleries(ctx context.Context, r models.Repository) error {
func (t *autoTagFilesTask) processGalleries(ctx context.Context, r Repository) error {
if job.IsCancelled(ctx) {
return nil
}
@ -589,9 +597,13 @@ func (t *autoTagFilesTask) processGalleries(ctx context.Context, r models.Reposi
more := true
for more {
galleries, _, err := r.Gallery.Query(ctx, galleryFilter, findFilter)
if err != nil {
var galleries []*models.Gallery
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error
galleries, _, err = r.Gallery.Query(ctx, galleryFilter, findFilter)
return err
}); err != nil {
return fmt.Errorf("querying galleries: %w", err)
}
for _, ss := range galleries {
@ -639,36 +651,39 @@ func (t *autoTagFilesTask) process(ctx context.Context) {
}
t.progress.SetTotal(total)
logger.Infof("Starting autotag of %d files", total)
logger.Info("Autotagging scenes...")
if err := t.processScenes(ctx, r); err != nil {
return err
}
logger.Info("Autotagging images...")
if err := t.processImages(ctx, r); err != nil {
return err
}
logger.Info("Autotagging galleries...")
if err := t.processGalleries(ctx, r); err != nil {
return err
}
if job.IsCancelled(ctx) {
logger.Info("Stopping due to user request")
}
return nil
}); err != nil {
logger.Error(err.Error())
logger.Errorf("error getting count for autotag task: %v", err)
return
}
logger.Info("Autotagging scenes...")
if err := t.processScenes(ctx, r); err != nil {
logger.Errorf("error processing scenes: %w", err)
return
}
logger.Info("Autotagging images...")
if err := t.processImages(ctx, r); err != nil {
logger.Errorf("error processing images: %w", err)
return
}
logger.Info("Autotagging galleries...")
if err := t.processGalleries(ctx, r); err != nil {
logger.Errorf("error processing galleries: %w", err)
return
}
if job.IsCancelled(ctx) {
logger.Info("Stopping due to user request")
}
}
type autoTagSceneTask struct {
txnManager models.Repository
txnManager Repository
scene *models.Scene
performers bool
@ -684,17 +699,17 @@ func (t *autoTagSceneTask) Start(ctx context.Context, wg *sync.WaitGroup) {
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
if t.performers {
if err := autotag.ScenePerformers(ctx, t.scene, r.Scene, r.Performer, t.cache); err != nil {
return fmt.Errorf("error tagging scene performers for %s: %v", t.scene.Path, err)
return fmt.Errorf("error tagging scene performers for %s: %v", t.scene.Path(), err)
}
}
if t.studios {
if err := autotag.SceneStudios(ctx, t.scene, r.Scene, r.Studio, t.cache); err != nil {
return fmt.Errorf("error tagging scene studio for %s: %v", t.scene.Path, err)
return fmt.Errorf("error tagging scene studio for %s: %v", t.scene.Path(), err)
}
}
if t.tags {
if err := autotag.SceneTags(ctx, t.scene, r.Scene, r.Tag, t.cache); err != nil {
return fmt.Errorf("error tagging scene tags for %s: %v", t.scene.Path, err)
return fmt.Errorf("error tagging scene tags for %s: %v", t.scene.Path(), err)
}
}
@ -705,7 +720,7 @@ func (t *autoTagSceneTask) Start(ctx context.Context, wg *sync.WaitGroup) {
}
type autoTagImageTask struct {
txnManager models.Repository
txnManager Repository
image *models.Image
performers bool
@ -721,17 +736,17 @@ func (t *autoTagImageTask) Start(ctx context.Context, wg *sync.WaitGroup) {
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
if t.performers {
if err := autotag.ImagePerformers(ctx, t.image, r.Image, r.Performer, t.cache); err != nil {
return fmt.Errorf("error tagging image performers for %s: %v", t.image.Path, err)
return fmt.Errorf("error tagging image performers for %s: %v", t.image.Path(), err)
}
}
if t.studios {
if err := autotag.ImageStudios(ctx, t.image, r.Image, r.Studio, t.cache); err != nil {
return fmt.Errorf("error tagging image studio for %s: %v", t.image.Path, err)
return fmt.Errorf("error tagging image studio for %s: %v", t.image.Path(), err)
}
}
if t.tags {
if err := autotag.ImageTags(ctx, t.image, r.Image, r.Tag, t.cache); err != nil {
return fmt.Errorf("error tagging image tags for %s: %v", t.image.Path, err)
return fmt.Errorf("error tagging image tags for %s: %v", t.image.Path(), err)
}
}
@ -742,7 +757,7 @@ func (t *autoTagImageTask) Start(ctx context.Context, wg *sync.WaitGroup) {
}
type autoTagGalleryTask struct {
txnManager models.Repository
txnManager Repository
gallery *models.Gallery
performers bool
@ -758,17 +773,17 @@ func (t *autoTagGalleryTask) Start(ctx context.Context, wg *sync.WaitGroup) {
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
if t.performers {
if err := autotag.GalleryPerformers(ctx, t.gallery, r.Gallery, r.Performer, t.cache); err != nil {
return fmt.Errorf("error tagging gallery performers for %s: %v", t.gallery.Path.String, err)
return fmt.Errorf("error tagging gallery performers for %s: %v", t.gallery.Path(), err)
}
}
if t.studios {
if err := autotag.GalleryStudios(ctx, t.gallery, r.Gallery, r.Studio, t.cache); err != nil {
return fmt.Errorf("error tagging gallery studio for %s: %v", t.gallery.Path.String, err)
return fmt.Errorf("error tagging gallery studio for %s: %v", t.gallery.Path(), err)
}
}
if t.tags {
if err := autotag.GalleryTags(ctx, t.gallery, r.Gallery, r.Tag, t.cache); err != nil {
return fmt.Errorf("error tagging gallery tags for %s: %v", t.gallery.Path.String, err)
return fmt.Errorf("error tagging gallery tags for %s: %v", t.gallery.Path(), err)
}
}

View File

@ -3,61 +3,45 @@ package manager
import (
"context"
"fmt"
"io/fs"
"path/filepath"
"time"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin"
"github.com/stashapp/stash/pkg/scene"
)
type cleaner interface {
Clean(ctx context.Context, options file.CleanOptions, progress *job.Progress)
}
type cleanJob struct {
txnManager models.Repository
input CleanMetadataInput
scanSubs *subscriptionManager
cleaner cleaner
txnManager Repository
input CleanMetadataInput
sceneService SceneService
imageService ImageService
scanSubs *subscriptionManager
}
func (j *cleanJob) Execute(ctx context.Context, progress *job.Progress) {
logger.Infof("Starting cleaning of tracked files")
start := time.Now()
if j.input.DryRun {
logger.Infof("Running in Dry Mode")
}
r := j.txnManager
if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error {
total, err := j.getCount(ctx, r)
if err != nil {
return fmt.Errorf("error getting count: %w", err)
}
progress.SetTotal(total)
if job.IsCancelled(ctx) {
return nil
}
if err := j.processScenes(ctx, progress, r.Scene); err != nil {
return fmt.Errorf("error cleaning scenes: %w", err)
}
if err := j.processImages(ctx, progress, r.Image); err != nil {
return fmt.Errorf("error cleaning images: %w", err)
}
if err := j.processGalleries(ctx, progress, r.Gallery, r.Image); err != nil {
return fmt.Errorf("error cleaning galleries: %w", err)
}
return nil
}); err != nil {
logger.Error(err.Error())
return
}
j.cleaner.Clean(ctx, file.CleanOptions{
Paths: j.input.Paths,
DryRun: j.input.DryRun,
PathFilter: newCleanFilter(instance.Config),
}, progress)
if job.IsCancelled(ctx) {
logger.Info("Stopping due to user request")
@ -65,303 +49,91 @@ func (j *cleanJob) Execute(ctx context.Context, progress *job.Progress) {
}
j.scanSubs.notify()
logger.Info("Finished Cleaning")
elapsed := time.Since(start)
logger.Info(fmt.Sprintf("Finished Cleaning (%s)", elapsed))
}
func (j *cleanJob) getCount(ctx context.Context, r models.Repository) (int, error) {
sceneFilter := scene.PathsFilter(j.input.Paths)
sceneResult, err := r.Scene.Query(ctx, models.SceneQueryOptions{
QueryOptions: models.QueryOptions{
Count: true,
type cleanFilter struct {
scanFilter
}
func newCleanFilter(c *config.Instance) *cleanFilter {
return &cleanFilter{
scanFilter: scanFilter{
stashPaths: c.GetStashPaths(),
generatedPath: c.GetGeneratedPath(),
vidExt: c.GetVideoExtensions(),
imgExt: c.GetImageExtensions(),
zipExt: c.GetGalleryExtensions(),
videoExcludeRegex: generateRegexps(c.GetExcludes()),
imageExcludeRegex: generateRegexps(c.GetImageExcludes()),
},
SceneFilter: sceneFilter,
})
if err != nil {
return 0, err
}
imageCount, err := r.Image.QueryCount(ctx, image.PathsFilter(j.input.Paths), nil)
if err != nil {
return 0, err
}
galleryCount, err := r.Gallery.QueryCount(ctx, gallery.PathsFilter(j.input.Paths), nil)
if err != nil {
return 0, err
}
return sceneResult.Count + imageCount + galleryCount, nil
}
func (j *cleanJob) processScenes(ctx context.Context, progress *job.Progress, qb scene.Queryer) error {
batchSize := 1000
func (f *cleanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool {
// #1102 - clean anything in generated path
generatedPath := f.generatedPath
findFilter := models.BatchFindFilter(batchSize)
sceneFilter := scene.PathsFilter(j.input.Paths)
sort := "path"
findFilter.Sort = &sort
var stash *config.StashConfig
fileOrFolder := "File"
var toDelete []int
more := true
for more {
if job.IsCancelled(ctx) {
return nil
}
scenes, err := scene.Query(ctx, qb, sceneFilter, findFilter)
if err != nil {
return fmt.Errorf("error querying for scenes: %w", err)
}
for _, scene := range scenes {
progress.ExecuteTask(fmt.Sprintf("Assessing scene %s for clean", scene.Path), func() {
if j.shouldCleanScene(scene) {
toDelete = append(toDelete, scene.ID)
} else {
// increment progress, no further processing
progress.Increment()
}
})
}
if len(scenes) != batchSize {
more = false
} else {
*findFilter.Page++
}
if info.IsDir() {
fileOrFolder = "Folder"
stash = getStashFromDirPath(f.stashPaths, path)
} else {
stash = getStashFromPath(f.stashPaths, path)
}
if j.input.DryRun && len(toDelete) > 0 {
// add progress for scenes that would've been deleted
progress.AddProcessed(len(toDelete))
}
fileNamingAlgorithm := instance.Config.GetVideoFileNamingAlgorithm()
if !j.input.DryRun && len(toDelete) > 0 {
progress.ExecuteTask(fmt.Sprintf("Cleaning %d scenes", len(toDelete)), func() {
for _, sceneID := range toDelete {
if job.IsCancelled(ctx) {
return
}
j.deleteScene(ctx, fileNamingAlgorithm, sceneID)
progress.Increment()
}
})
}
return nil
}
func (j *cleanJob) processGalleries(ctx context.Context, progress *job.Progress, qb gallery.Queryer, iqb models.ImageReader) error {
batchSize := 1000
findFilter := models.BatchFindFilter(batchSize)
galleryFilter := gallery.PathsFilter(j.input.Paths)
sort := "path"
findFilter.Sort = &sort
var toDelete []int
more := true
for more {
if job.IsCancelled(ctx) {
return nil
}
galleries, _, err := qb.Query(ctx, galleryFilter, findFilter)
if err != nil {
return fmt.Errorf("error querying for galleries: %w", err)
}
for _, gallery := range galleries {
progress.ExecuteTask(fmt.Sprintf("Assessing gallery %s for clean", gallery.GetTitle()), func() {
if j.shouldCleanGallery(ctx, gallery, iqb) {
toDelete = append(toDelete, gallery.ID)
} else {
// increment progress, no further processing
progress.Increment()
}
})
}
if len(galleries) != batchSize {
more = false
} else {
*findFilter.Page++
}
}
if j.input.DryRun && len(toDelete) > 0 {
// add progress for galleries that would've been deleted
progress.AddProcessed(len(toDelete))
}
if !j.input.DryRun && len(toDelete) > 0 {
progress.ExecuteTask(fmt.Sprintf("Cleaning %d galleries", len(toDelete)), func() {
for _, galleryID := range toDelete {
if job.IsCancelled(ctx) {
return
}
j.deleteGallery(ctx, galleryID)
progress.Increment()
}
})
}
return nil
}
func (j *cleanJob) processImages(ctx context.Context, progress *job.Progress, qb image.Queryer) error {
batchSize := 1000
findFilter := models.BatchFindFilter(batchSize)
imageFilter := image.PathsFilter(j.input.Paths)
// performance consideration: order by path since default ordering by
// title is slow
sortBy := "path"
findFilter.Sort = &sortBy
var toDelete []int
more := true
for more {
if job.IsCancelled(ctx) {
return nil
}
images, err := image.Query(ctx, qb, imageFilter, findFilter)
if err != nil {
return fmt.Errorf("error querying for images: %w", err)
}
for _, image := range images {
progress.ExecuteTask(fmt.Sprintf("Assessing image %s for clean", image.Path), func() {
if j.shouldCleanImage(image) {
toDelete = append(toDelete, image.ID)
} else {
// increment progress, no further processing
progress.Increment()
}
})
}
if len(images) != batchSize {
more = false
} else {
*findFilter.Page++
}
}
if j.input.DryRun && len(toDelete) > 0 {
// add progress for images that would've been deleted
progress.AddProcessed(len(toDelete))
}
if !j.input.DryRun && len(toDelete) > 0 {
progress.ExecuteTask(fmt.Sprintf("Cleaning %d images", len(toDelete)), func() {
for _, imageID := range toDelete {
if job.IsCancelled(ctx) {
return
}
j.deleteImage(ctx, imageID)
progress.Increment()
}
})
}
return nil
}
func (j *cleanJob) shouldClean(path string) bool {
// use image.FileExists for zip file checking
fileExists := image.FileExists(path)
// #1102 - clean anything in generated path
generatedPath := config.GetInstance().GetGeneratedPath()
if !fileExists || getStashFromPath(path) == nil || fsutil.IsPathInDir(generatedPath, path) {
logger.Infof("File not found. Marking to clean: \"%s\"", path)
return true
}
return false
}
func (j *cleanJob) shouldCleanScene(s *models.Scene) bool {
if j.shouldClean(s.Path) {
return true
}
stash := getStashFromPath(s.Path)
if stash.ExcludeVideo {
logger.Infof("File in stash library that excludes video. Marking to clean: \"%s\"", s.Path)
return true
}
config := config.GetInstance()
if !fsutil.MatchExtension(s.Path, config.GetVideoExtensions()) {
logger.Infof("File extension does not match video extensions. Marking to clean: \"%s\"", s.Path)
return true
}
if matchFile(s.Path, config.GetExcludes()) {
logger.Infof("File matched regex. Marking to clean: \"%s\"", s.Path)
return true
}
return false
}
func (j *cleanJob) shouldCleanGallery(ctx context.Context, g *models.Gallery, qb models.ImageReader) bool {
// never clean manually created galleries
if !g.Path.Valid {
if stash == nil {
logger.Infof("%s not in any stash library directories. Marking to clean: \"%s\"", fileOrFolder, path)
return false
}
path := g.Path.String
if j.shouldClean(path) {
if fsutil.IsPathInDir(generatedPath, path) {
logger.Infof("%s is in generated path. Marking to clean: \"%s\"", fileOrFolder, path)
return false
}
if info.IsDir() {
return !f.shouldCleanFolder(path, stash)
}
return !f.shouldCleanFile(path, info, stash)
}
func (f *cleanFilter) shouldCleanFolder(path string, s *config.StashConfig) bool {
// only delete folders where it is excluded from everything
pathExcludeTest := path + string(filepath.Separator)
if (s.ExcludeVideo || matchFileRegex(pathExcludeTest, f.videoExcludeRegex)) && (s.ExcludeImage || matchFileRegex(pathExcludeTest, f.imageExcludeRegex)) {
logger.Infof("Folder is excluded from both video and image. Marking to clean: \"%s\"", path)
return true
}
stash := getStashFromPath(path)
if stash.ExcludeImage {
logger.Infof("File in stash library that excludes images. Marking to clean: \"%s\"", path)
return false
}
func (f *cleanFilter) shouldCleanFile(path string, info fs.FileInfo, stash *config.StashConfig) bool {
switch {
case info.IsDir() || fsutil.MatchExtension(path, f.zipExt):
return f.shouldCleanGallery(path, stash)
case fsutil.MatchExtension(path, f.vidExt):
return f.shouldCleanVideoFile(path, stash)
case fsutil.MatchExtension(path, f.imgExt):
return f.shouldCleanImage(path, stash)
default:
logger.Infof("File extension does not match any media extensions. Marking to clean: \"%s\"", path)
return true
}
}
func (f *cleanFilter) shouldCleanVideoFile(path string, stash *config.StashConfig) bool {
if stash.ExcludeVideo {
logger.Infof("File in stash library that excludes video. Marking to clean: \"%s\"", path)
return true
}
config := config.GetInstance()
if g.Zip {
if !fsutil.MatchExtension(path, config.GetGalleryExtensions()) {
logger.Infof("File extension does not match gallery extensions. Marking to clean: \"%s\"", path)
return true
}
if countImagesInZip(path) == 0 {
logger.Infof("Gallery has 0 images. Marking to clean: \"%s\"", path)
return true
}
} else {
// folder-based - delete if it has no images
count, err := qb.CountByGalleryID(ctx, g.ID)
if err != nil {
logger.Warnf("Error trying to count gallery images for %q: %v", path, err)
return false
}
if count == 0 {
return true
}
}
if matchFile(path, config.GetImageExcludes()) {
if matchFileRegex(path, f.videoExcludeRegex) {
logger.Infof("File matched regex. Marking to clean: \"%s\"", path)
return true
}
@ -369,141 +141,186 @@ func (j *cleanJob) shouldCleanGallery(ctx context.Context, g *models.Gallery, qb
return false
}
func (j *cleanJob) shouldCleanImage(s *models.Image) bool {
if j.shouldClean(s.Path) {
return true
}
stash := getStashFromPath(s.Path)
func (f *cleanFilter) shouldCleanGallery(path string, stash *config.StashConfig) bool {
if stash.ExcludeImage {
logger.Infof("File in stash library that excludes images. Marking to clean: \"%s\"", s.Path)
logger.Infof("File in stash library that excludes images. Marking to clean: \"%s\"", path)
return true
}
config := config.GetInstance()
if !fsutil.MatchExtension(s.Path, config.GetImageExtensions()) {
logger.Infof("File extension does not match image extensions. Marking to clean: \"%s\"", s.Path)
return true
}
if matchFile(s.Path, config.GetImageExcludes()) {
logger.Infof("File matched regex. Marking to clean: \"%s\"", s.Path)
if matchFileRegex(path, f.imageExcludeRegex) {
logger.Infof("File matched regex. Marking to clean: \"%s\"", path)
return true
}
return false
}
func (j *cleanJob) deleteScene(ctx context.Context, fileNamingAlgorithm models.HashAlgorithm, sceneID int) {
fileNamingAlgo := GetInstance().Config.GetVideoFileNamingAlgorithm()
func (f *cleanFilter) shouldCleanImage(path string, stash *config.StashConfig) bool {
if stash.ExcludeImage {
logger.Infof("File in stash library that excludes images. Marking to clean: \"%s\"", path)
return true
}
fileDeleter := &scene.FileDeleter{
Deleter: *file.NewDeleter(),
if matchFileRegex(path, f.imageExcludeRegex) {
logger.Infof("File matched regex. Marking to clean: \"%s\"", path)
return true
}
return false
}
type cleanHandler struct {
PluginCache *plugin.Cache
}
func (h *cleanHandler) HandleFile(ctx context.Context, fileDeleter *file.Deleter, fileID file.ID) error {
if err := h.deleteRelatedScenes(ctx, fileDeleter, fileID); err != nil {
return err
}
if err := h.deleteRelatedGalleries(ctx, fileID); err != nil {
return err
}
if err := h.deleteRelatedImages(ctx, fileDeleter, fileID); err != nil {
return err
}
return nil
}
func (h *cleanHandler) HandleFolder(ctx context.Context, fileDeleter *file.Deleter, folderID file.FolderID) error {
return h.deleteRelatedFolderGalleries(ctx, folderID)
}
func (h *cleanHandler) deleteRelatedScenes(ctx context.Context, fileDeleter *file.Deleter, fileID file.ID) error {
mgr := GetInstance()
sceneQB := mgr.Database.Scene
scenes, err := sceneQB.FindByFileID(ctx, fileID)
if err != nil {
return err
}
fileNamingAlgo := mgr.Config.GetVideoFileNamingAlgorithm()
sceneFileDeleter := &scene.FileDeleter{
Deleter: fileDeleter,
FileNamingAlgo: fileNamingAlgo,
Paths: GetInstance().Paths,
Paths: mgr.Paths,
}
var s *models.Scene
if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error {
repo := j.txnManager
qb := repo.Scene
var err error
s, err = qb.Find(ctx, sceneID)
if err != nil {
for _, scene := range scenes {
// only delete if the scene has no other files
if len(scene.Files) <= 1 {
logger.Infof("Deleting scene %q since it has no other related files", scene.GetTitle())
if err := mgr.SceneService.Destroy(ctx, scene, sceneFileDeleter, true, false); err != nil {
return err
}
checksum := scene.Checksum()
oshash := scene.OSHash()
mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, scene.ID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{
Checksum: checksum,
OSHash: oshash,
Path: scene.Path(),
}, nil)
}
}
return nil
}
func (h *cleanHandler) deleteRelatedGalleries(ctx context.Context, fileID file.ID) error {
mgr := GetInstance()
qb := mgr.Database.Gallery
galleries, err := qb.FindByFileID(ctx, fileID)
if err != nil {
return err
}
for _, g := range galleries {
// only delete if the gallery has no other files
if len(g.Files) <= 1 {
logger.Infof("Deleting gallery %q since it has no other related files", g.GetTitle())
if err := qb.Destroy(ctx, g.ID); err != nil {
return err
}
mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, g.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
Checksum: g.Checksum(),
Path: g.Path(),
}, nil)
}
}
return nil
}
func (h *cleanHandler) deleteRelatedFolderGalleries(ctx context.Context, folderID file.FolderID) error {
mgr := GetInstance()
qb := mgr.Database.Gallery
galleries, err := qb.FindByFolderID(ctx, folderID)
if err != nil {
return err
}
for _, g := range galleries {
logger.Infof("Deleting folder-based gallery %q since the folder no longer exists", g.GetTitle())
if err := qb.Destroy(ctx, g.ID); err != nil {
return err
}
return scene.Destroy(ctx, s, repo.Scene, repo.SceneMarker, fileDeleter, true, false)
}); err != nil {
fileDeleter.Rollback()
logger.Errorf("Error deleting scene from database: %s", err.Error())
return
mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, g.ID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
Checksum: g.Checksum(),
Path: g.Path(),
}, nil)
}
// perform the post-commit actions
fileDeleter.Commit()
GetInstance().PluginCache.ExecutePostHooks(ctx, sceneID, plugin.SceneDestroyPost, plugin.SceneDestroyInput{
Checksum: s.Checksum.String,
OSHash: s.OSHash.String,
Path: s.Path,
}, nil)
return nil
}
func (j *cleanJob) deleteGallery(ctx context.Context, galleryID int) {
var g *models.Gallery
if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error {
qb := j.txnManager.Gallery
var err error
g, err = qb.Find(ctx, galleryID)
if err != nil {
return err
}
return qb.Destroy(ctx, galleryID)
}); err != nil {
logger.Errorf("Error deleting gallery from database: %s", err.Error())
return
func (h *cleanHandler) deleteRelatedImages(ctx context.Context, fileDeleter *file.Deleter, fileID file.ID) error {
mgr := GetInstance()
imageQB := mgr.Database.Image
images, err := imageQB.FindByFileID(ctx, fileID)
if err != nil {
return err
}
GetInstance().PluginCache.ExecutePostHooks(ctx, galleryID, plugin.GalleryDestroyPost, plugin.GalleryDestroyInput{
Checksum: g.Checksum,
Path: g.Path.String,
}, nil)
}
func (j *cleanJob) deleteImage(ctx context.Context, imageID int) {
fileDeleter := &image.FileDeleter{
Deleter: *file.NewDeleter(),
imageFileDeleter := &image.FileDeleter{
Deleter: fileDeleter,
Paths: GetInstance().Paths,
}
var i *models.Image
if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error {
qb := j.txnManager.Image
for _, i := range images {
if len(i.Files) <= 1 {
logger.Infof("Deleting image %q since it has no other related files", i.GetTitle())
if err := mgr.ImageService.Destroy(ctx, i, imageFileDeleter, true, false); err != nil {
return err
}
var err error
i, err = qb.Find(ctx, imageID)
if err != nil {
return err
mgr.PluginCache.RegisterPostHooks(ctx, mgr.Database, i.ID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
Checksum: i.Checksum(),
Path: i.Path(),
}, nil)
}
if i == nil {
return fmt.Errorf("image not found: %d", imageID)
}
return image.Destroy(ctx, i, qb, fileDeleter, true, false)
}); err != nil {
fileDeleter.Rollback()
logger.Errorf("Error deleting image from database: %s", err.Error())
return
}
// perform the post-commit actions
fileDeleter.Commit()
GetInstance().PluginCache.ExecutePostHooks(ctx, imageID, plugin.ImageDestroyPost, plugin.ImageDestroyInput{
Checksum: i.Checksum,
Path: i.Path,
}, nil)
return nil
}
func getStashFromPath(pathToCheck string) *config.StashConfig {
for _, s := range config.GetInstance().GetStashPaths() {
if fsutil.IsPathInDir(s.Path, filepath.Dir(pathToCheck)) {
return s
func getStashFromPath(stashes []*config.StashConfig, pathToCheck string) *config.StashConfig {
for _, f := range stashes {
if fsutil.IsPathInDir(f.Path, filepath.Dir(pathToCheck)) {
return f
}
}
return nil
}
func getStashFromDirPath(pathToCheck string) *config.StashConfig {
for _, s := range config.GetInstance().GetStashPaths() {
if fsutil.IsPathInDir(s.Path, pathToCheck) {
return s
func getStashFromDirPath(stashes []*config.StashConfig, pathToCheck string) *config.StashConfig {
for _, f := range stashes {
if fsutil.IsPathInDir(f.Path, pathToCheck) {
return f
}
}
return nil

View File

@ -32,7 +32,7 @@ import (
)
type ExportTask struct {
txnManager models.Repository
txnManager Repository
full bool
baseDir string
@ -286,7 +286,7 @@ func (t *ExportTask) zipFile(fn, outDir string, z *zip.Writer) error {
return nil
}
func (t *ExportTask) populateMovieScenes(ctx context.Context, repo models.Repository) {
func (t *ExportTask) populateMovieScenes(ctx context.Context, repo Repository) {
reader := repo.Movie
sceneReader := repo.Scene
@ -316,7 +316,7 @@ func (t *ExportTask) populateMovieScenes(ctx context.Context, repo models.Reposi
}
}
func (t *ExportTask) populateGalleryImages(ctx context.Context, repo models.Repository) {
func (t *ExportTask) populateGalleryImages(ctx context.Context, repo Repository) {
reader := repo.Gallery
imageReader := repo.Image
@ -346,7 +346,7 @@ func (t *ExportTask) populateGalleryImages(ctx context.Context, repo models.Repo
}
}
func (t *ExportTask) ExportScenes(ctx context.Context, workers int, repo models.Repository) {
func (t *ExportTask) ExportScenes(ctx context.Context, workers int, repo Repository) {
var scenesWg sync.WaitGroup
sceneReader := repo.Scene
@ -380,7 +380,7 @@ func (t *ExportTask) ExportScenes(ctx context.Context, workers int, repo models.
if (i % 100) == 0 { // make progress easier to read
logger.Progressf("[scenes] %d of %d", index, len(scenes))
}
t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathNameMapping{Path: scene.Path, Checksum: scene.GetHash(t.fileNamingAlgorithm)})
t.Mappings.Scenes = append(t.Mappings.Scenes, jsonschema.PathNameMapping{Path: scene.Path(), Checksum: scene.GetHash(t.fileNamingAlgorithm)})
jobCh <- scene // feed workers
}
@ -390,7 +390,7 @@ func (t *ExportTask) ExportScenes(ctx context.Context, workers int, repo models.
logger.Infof("[scenes] export complete in %s. %d workers used.", time.Since(startTime), workers)
}
func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Scene, repo models.Repository, t *ExportTask) {
func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Scene, repo Repository, t *ExportTask) {
defer wg.Done()
sceneReader := repo.Scene
studioReader := repo.Studio
@ -443,15 +443,15 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
continue
}
newSceneJSON.Movies, err = scene.GetSceneMoviesJSON(ctx, movieReader, sceneReader, s)
newSceneJSON.Movies, err = scene.GetSceneMoviesJSON(ctx, movieReader, s)
if err != nil {
logger.Errorf("[scenes] <%s> error getting scene movies JSON: %s", sceneHash, err.Error())
continue
}
if t.includeDependencies {
if s.StudioID.Valid {
t.studios.IDs = intslice.IntAppendUnique(t.studios.IDs, int(s.StudioID.Int64))
if s.StudioID != nil {
t.studios.IDs = intslice.IntAppendUnique(t.studios.IDs, *s.StudioID)
}
t.galleries.IDs = intslice.IntAppendUniques(t.galleries.IDs, gallery.GetIDs(galleries))
@ -463,7 +463,7 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
}
t.tags.IDs = intslice.IntAppendUniques(t.tags.IDs, tagIDs)
movieIDs, err := scene.GetDependentMovieIDs(ctx, sceneReader, s)
movieIDs, err := scene.GetDependentMovieIDs(ctx, s)
if err != nil {
logger.Errorf("[scenes] <%s> error getting scene movies: %s", sceneHash, err.Error())
continue
@ -484,7 +484,7 @@ func exportScene(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
}
}
func (t *ExportTask) ExportImages(ctx context.Context, workers int, repo models.Repository) {
func (t *ExportTask) ExportImages(ctx context.Context, workers int, repo Repository) {
var imagesWg sync.WaitGroup
imageReader := repo.Image
@ -518,7 +518,7 @@ func (t *ExportTask) ExportImages(ctx context.Context, workers int, repo models.
if (i % 100) == 0 { // make progress easier to read
logger.Progressf("[images] %d of %d", index, len(images))
}
t.Mappings.Images = append(t.Mappings.Images, jsonschema.PathNameMapping{Path: image.Path, Checksum: image.Checksum})
t.Mappings.Images = append(t.Mappings.Images, jsonschema.PathNameMapping{Path: image.Path(), Checksum: image.Checksum()})
jobCh <- image // feed workers
}
@ -528,7 +528,7 @@ func (t *ExportTask) ExportImages(ctx context.Context, workers int, repo models.
logger.Infof("[images] export complete in %s. %d workers used.", time.Since(startTime), workers)
}
func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Image, repo models.Repository, t *ExportTask) {
func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Image, repo Repository, t *ExportTask) {
defer wg.Done()
studioReader := repo.Studio
galleryReader := repo.Gallery
@ -536,7 +536,7 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
tagReader := repo.Tag
for s := range jobChan {
imageHash := s.Checksum
imageHash := s.Checksum()
newImageJSON := image.ToBasicJSON(s)
@ -572,8 +572,8 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
newImageJSON.Tags = tag.GetNames(tags)
if t.includeDependencies {
if s.StudioID.Valid {
t.studios.IDs = intslice.IntAppendUnique(t.studios.IDs, int(s.StudioID.Int64))
if s.StudioID != nil {
t.studios.IDs = intslice.IntAppendUnique(t.studios.IDs, *s.StudioID)
}
t.galleries.IDs = intslice.IntAppendUniques(t.galleries.IDs, gallery.GetIDs(imageGalleries))
@ -594,12 +594,12 @@ func exportImage(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models
func (t *ExportTask) getGalleryChecksums(galleries []*models.Gallery) (ret []string) {
for _, g := range galleries {
ret = append(ret, g.Checksum)
ret = append(ret, g.Checksum())
}
return
}
func (t *ExportTask) ExportGalleries(ctx context.Context, workers int, repo models.Repository) {
func (t *ExportTask) ExportGalleries(ctx context.Context, workers int, repo Repository) {
var galleriesWg sync.WaitGroup
reader := repo.Gallery
@ -634,10 +634,13 @@ func (t *ExportTask) ExportGalleries(ctx context.Context, workers int, repo mode
logger.Progressf("[galleries] %d of %d", index, len(galleries))
}
title := gallery.Title
path := gallery.Path()
t.Mappings.Galleries = append(t.Mappings.Galleries, jsonschema.PathNameMapping{
Path: gallery.Path.String,
Name: gallery.Title.String,
Checksum: gallery.Checksum,
Path: path,
Name: title,
Checksum: gallery.Checksum(),
})
jobCh <- gallery
}
@ -648,14 +651,14 @@ func (t *ExportTask) ExportGalleries(ctx context.Context, workers int, repo mode
logger.Infof("[galleries] export complete in %s. %d workers used.", time.Since(startTime), workers)
}
func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Gallery, repo models.Repository, t *ExportTask) {
func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Gallery, repo Repository, t *ExportTask) {
defer wg.Done()
studioReader := repo.Studio
performerReader := repo.Performer
tagReader := repo.Tag
for g := range jobChan {
galleryHash := g.Checksum
galleryHash := g.Checksum()
newGalleryJSON, err := gallery.ToBasicJSON(g)
if err != nil {
@ -686,8 +689,8 @@ func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *mode
newGalleryJSON.Tags = tag.GetNames(tags)
if t.includeDependencies {
if g.StudioID.Valid {
t.studios.IDs = intslice.IntAppendUnique(t.studios.IDs, int(g.StudioID.Int64))
if g.StudioID != nil {
t.studios.IDs = intslice.IntAppendUnique(t.studios.IDs, *g.StudioID)
}
t.tags.IDs = intslice.IntAppendUniques(t.tags.IDs, tag.GetIDs(tags))
@ -705,7 +708,7 @@ func exportGallery(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *mode
}
}
func (t *ExportTask) ExportPerformers(ctx context.Context, workers int, repo models.Repository) {
func (t *ExportTask) ExportPerformers(ctx context.Context, workers int, repo Repository) {
var performersWg sync.WaitGroup
reader := repo.Performer
@ -745,7 +748,7 @@ func (t *ExportTask) ExportPerformers(ctx context.Context, workers int, repo mod
logger.Infof("[performers] export complete in %s. %d workers used.", time.Since(startTime), workers)
}
func (t *ExportTask) exportPerformer(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Performer, repo models.Repository) {
func (t *ExportTask) exportPerformer(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Performer, repo Repository) {
defer wg.Done()
performerReader := repo.Performer
@ -783,7 +786,7 @@ func (t *ExportTask) exportPerformer(ctx context.Context, wg *sync.WaitGroup, jo
}
}
func (t *ExportTask) ExportStudios(ctx context.Context, workers int, repo models.Repository) {
func (t *ExportTask) ExportStudios(ctx context.Context, workers int, repo Repository) {
var studiosWg sync.WaitGroup
reader := repo.Studio
@ -824,7 +827,7 @@ func (t *ExportTask) ExportStudios(ctx context.Context, workers int, repo models
logger.Infof("[studios] export complete in %s. %d workers used.", time.Since(startTime), workers)
}
func (t *ExportTask) exportStudio(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Studio, repo models.Repository) {
func (t *ExportTask) exportStudio(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Studio, repo Repository) {
defer wg.Done()
studioReader := repo.Studio
@ -848,7 +851,7 @@ func (t *ExportTask) exportStudio(ctx context.Context, wg *sync.WaitGroup, jobCh
}
}
func (t *ExportTask) ExportTags(ctx context.Context, workers int, repo models.Repository) {
func (t *ExportTask) ExportTags(ctx context.Context, workers int, repo Repository) {
var tagsWg sync.WaitGroup
reader := repo.Tag
@ -892,7 +895,7 @@ func (t *ExportTask) ExportTags(ctx context.Context, workers int, repo models.Re
logger.Infof("[tags] export complete in %s. %d workers used.", time.Since(startTime), workers)
}
func (t *ExportTask) exportTag(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Tag, repo models.Repository) {
func (t *ExportTask) exportTag(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Tag, repo Repository) {
defer wg.Done()
tagReader := repo.Tag
@ -919,7 +922,7 @@ func (t *ExportTask) exportTag(ctx context.Context, wg *sync.WaitGroup, jobChan
}
}
func (t *ExportTask) ExportMovies(ctx context.Context, workers int, repo models.Repository) {
func (t *ExportTask) ExportMovies(ctx context.Context, workers int, repo Repository) {
var moviesWg sync.WaitGroup
reader := repo.Movie
@ -960,7 +963,7 @@ func (t *ExportTask) ExportMovies(ctx context.Context, workers int, repo models.
logger.Infof("[movies] export complete in %s. %d workers used.", time.Since(startTime), workers)
}
func (t *ExportTask) exportMovie(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Movie, repo models.Repository) {
func (t *ExportTask) exportMovie(ctx context.Context, wg *sync.WaitGroup, jobChan <-chan *models.Movie, repo Repository) {
defer wg.Done()
movieReader := repo.Movie
@ -993,7 +996,7 @@ func (t *ExportTask) exportMovie(ctx context.Context, wg *sync.WaitGroup, jobCha
}
}
func (t *ExportTask) ExportScrapedItems(ctx context.Context, repo models.Repository) {
func (t *ExportTask) ExportScrapedItems(ctx context.Context, repo Repository) {
qb := repo.ScrapedItem
sqb := repo.Studio
scrapedItems, err := qb.All(ctx)

View File

@ -2,7 +2,6 @@ package manager
import (
"context"
"errors"
"fmt"
"time"
@ -54,7 +53,7 @@ type GeneratePreviewOptionsInput struct {
const generateQueueSize = 200000
type GenerateJob struct {
txnManager models.Repository
txnManager Repository
input GenerateMetadataInput
overwrite bool
@ -192,36 +191,29 @@ func (j *GenerateJob) queueTasks(ctx context.Context, g *generate.Generator, que
findFilter := models.BatchFindFilter(batchSize)
if err := j.txnManager.WithTxn(ctx, func(ctx context.Context) error {
for more := true; more; {
if job.IsCancelled(ctx) {
return context.Canceled
}
scenes, err := scene.Query(ctx, j.txnManager.Scene, nil, findFilter)
if err != nil {
return err
}
for _, ss := range scenes {
if job.IsCancelled(ctx) {
return context.Canceled
}
j.queueSceneJobs(ctx, g, ss, queue, &totals)
}
if len(scenes) != batchSize {
more = false
} else {
*findFilter.Page++
}
for more := true; more; {
if job.IsCancelled(ctx) {
return totals
}
return nil
}); err != nil {
if !errors.Is(err, context.Canceled) {
scenes, err := scene.Query(ctx, j.txnManager.Scene, nil, findFilter)
if err != nil {
logger.Errorf("Error encountered queuing files to scan: %s", err.Error())
return totals
}
for _, ss := range scenes {
if job.IsCancelled(ctx) {
return totals
}
j.queueSceneJobs(ctx, g, ss, queue, &totals)
}
if len(scenes) != batchSize {
more = false
} else {
*findFilter.Page++
}
}
@ -351,17 +343,21 @@ func (j *GenerateJob) queueSceneJobs(ctx context.Context, g *generate.Generator,
}
if utils.IsTrue(j.input.Phashes) {
task := &GeneratePhashTask{
Scene: *scene,
fileNamingAlgorithm: j.fileNamingAlgo,
txnManager: j.txnManager,
Overwrite: j.overwrite,
}
// generate for all files in scene
for _, f := range scene.Files {
task := &GeneratePhashTask{
File: f,
fileNamingAlgorithm: j.fileNamingAlgo,
txnManager: j.txnManager,
fileUpdater: j.txnManager.File,
Overwrite: j.overwrite,
}
if task.shouldGenerate() {
totals.phashes++
totals.tasks++
queue <- task
if task.shouldGenerate() {
totals.phashes++
totals.tasks++
queue <- task
}
}
}

View File

@ -2,24 +2,23 @@ package manager
import (
"context"
"database/sql"
"fmt"
"github.com/stashapp/stash/pkg/file/video"
"github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
)
type GenerateInteractiveHeatmapSpeedTask struct {
Scene models.Scene
Overwrite bool
fileNamingAlgorithm models.HashAlgorithm
TxnManager models.Repository
TxnManager Repository
}
func (t *GenerateInteractiveHeatmapSpeedTask) GetDescription() string {
return fmt.Sprintf("Generating heatmap and speed for %s", t.Scene.Path)
return fmt.Sprintf("Generating heatmap and speed for %s", t.Scene.Path())
}
func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
@ -28,7 +27,7 @@ func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
}
videoChecksum := t.Scene.GetHash(t.fileNamingAlgorithm)
funscriptPath := scene.GetFunscriptPath(t.Scene.Path)
funscriptPath := video.GetFunscriptPath(t.Scene.Path())
heatmapPath := instance.Paths.Scene.GetInteractiveHeatmapPath(videoChecksum)
generator := NewInteractiveHeatmapSpeedGenerator(funscriptPath, heatmapPath)
@ -40,30 +39,13 @@ func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
return
}
median := sql.NullInt64{
Int64: generator.InteractiveSpeed,
Valid: true,
}
var s *models.Scene
median := generator.InteractiveSpeed
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error
s, err = t.TxnManager.Scene.FindByPath(ctx, t.Scene.Path)
return err
}); err != nil {
logger.Error(err.Error())
return
}
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
qb := t.TxnManager.Scene
scenePartial := models.ScenePartial{
ID: s.ID,
InteractiveSpeed: &median,
}
_, err := qb.Update(ctx, scenePartial)
return err
primaryFile := t.Scene.PrimaryFile()
primaryFile.InteractiveSpeed = &median
qb := t.TxnManager.File
return qb.Update(ctx, primaryFile)
}); err != nil {
logger.Error(err.Error())
}
@ -71,7 +53,8 @@ func (t *GenerateInteractiveHeatmapSpeedTask) Start(ctx context.Context) {
}
func (t *GenerateInteractiveHeatmapSpeedTask) shouldGenerate() bool {
if !t.Scene.Interactive {
primaryFile := t.Scene.PrimaryFile()
if primaryFile == nil || !primaryFile.Interactive {
return false
}
sceneHash := t.Scene.GetHash(t.fileNamingAlgorithm)

View File

@ -13,7 +13,7 @@ import (
)
type GenerateMarkersTask struct {
TxnManager models.Repository
TxnManager Repository
Scene *models.Scene
Marker *models.SceneMarker
Overwrite bool
@ -27,7 +27,7 @@ type GenerateMarkersTask struct {
func (t *GenerateMarkersTask) GetDescription() string {
if t.Scene != nil {
return fmt.Sprintf("Generating markers for %s", t.Scene.Path)
return fmt.Sprintf("Generating markers for %s", t.Scene.Path())
} else if t.Marker != nil {
return fmt.Sprintf("Generating marker preview for marker ID %d", t.Marker.ID)
}
@ -57,7 +57,7 @@ func (t *GenerateMarkersTask) Start(ctx context.Context) {
}
ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path)
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path())
if err != nil {
logger.Errorf("error reading video file: %s", err.Error())
return
@ -83,7 +83,7 @@ func (t *GenerateMarkersTask) generateSceneMarkers(ctx context.Context) {
}
ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path)
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path())
if err != nil {
logger.Errorf("error reading video file: %s", err.Error())
return
@ -133,13 +133,9 @@ func (t *GenerateMarkersTask) generateMarker(videoFile *ffmpeg.VideoFile, scene
func (t *GenerateMarkersTask) markersNeeded(ctx context.Context) int {
markers := 0
var sceneMarkers []*models.SceneMarker
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error
sceneMarkers, err = t.TxnManager.SceneMarker.FindBySceneID(ctx, t.Scene.ID)
return err
}); err != nil {
logger.Errorf("errror finding scene markers: %s", err.Error())
sceneMarkers, err := t.TxnManager.SceneMarker.FindBySceneID(ctx, t.Scene.ID)
if err != nil {
logger.Errorf("error finding scene markers: %s", err.Error())
return 0
}

View File

@ -2,23 +2,25 @@ package manager
import (
"context"
"database/sql"
"fmt"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/hash/videophash"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/txn"
)
type GeneratePhashTask struct {
Scene models.Scene
File *file.VideoFile
Overwrite bool
fileNamingAlgorithm models.HashAlgorithm
txnManager models.Repository
txnManager txn.Manager
fileUpdater file.Updater
}
func (t *GeneratePhashTask) GetDescription() string {
return fmt.Sprintf("Generating phash for %s", t.Scene.Path)
return fmt.Sprintf("Generating phash for %s", t.File.Path)
}
func (t *GeneratePhashTask) Start(ctx context.Context) {
@ -26,34 +28,27 @@ func (t *GeneratePhashTask) Start(ctx context.Context) {
return
}
ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path)
if err != nil {
logger.Errorf("error reading video file: %s", err.Error())
return
}
hash, err := videophash.Generate(instance.FFMPEG, videoFile)
hash, err := videophash.Generate(instance.FFMPEG, t.File)
if err != nil {
logger.Errorf("error generating phash: %s", err.Error())
logErrorOutput(err)
return
}
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
qb := t.txnManager.Scene
hashValue := sql.NullInt64{Int64: int64(*hash), Valid: true}
scenePartial := models.ScenePartial{
ID: t.Scene.ID,
Phash: &hashValue,
}
_, err := qb.Update(ctx, scenePartial)
return err
if err := txn.WithTxn(ctx, t.txnManager, func(ctx context.Context) error {
qb := t.fileUpdater
hashValue := int64(*hash)
t.File.Fingerprints = t.File.Fingerprints.AppendUnique(file.Fingerprint{
Type: file.FingerprintTypePhash,
Fingerprint: hashValue,
})
return qb.Update(ctx, t.File)
}); err != nil {
logger.Error(err.Error())
}
}
func (t *GeneratePhashTask) shouldGenerate() bool {
return t.Overwrite || !t.Scene.Phash.Valid
return t.Overwrite || t.File.Fingerprints.Get(file.FingerprintTypePhash) == nil
}

View File

@ -23,7 +23,7 @@ type GeneratePreviewTask struct {
}
func (t *GeneratePreviewTask) GetDescription() string {
return fmt.Sprintf("Generating preview for %s", t.Scene.Path)
return fmt.Sprintf("Generating preview for %s", t.Scene.Path())
}
func (t *GeneratePreviewTask) Start(ctx context.Context) {
@ -32,7 +32,7 @@ func (t *GeneratePreviewTask) Start(ctx context.Context) {
}
ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path)
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path())
if err != nil {
logger.Errorf("error reading video file: %v", err)
return
@ -55,7 +55,7 @@ func (t *GeneratePreviewTask) Start(ctx context.Context) {
}
func (t GeneratePreviewTask) generateVideo(videoChecksum string, videoDuration float64) error {
videoFilename := t.Scene.Path
videoFilename := t.Scene.Path()
if err := t.generator.PreviewVideo(context.TODO(), videoFilename, videoDuration, videoChecksum, t.Options, true); err != nil {
logger.Warnf("[generator] failed generating scene preview, trying fallback")
@ -68,7 +68,7 @@ func (t GeneratePreviewTask) generateVideo(videoChecksum string, videoDuration f
}
func (t GeneratePreviewTask) generateWebp(videoChecksum string) error {
videoFilename := t.Scene.Path
videoFilename := t.Scene.Path()
return t.generator.PreviewWebp(context.TODO(), videoFilename, videoChecksum)
}

View File

@ -5,7 +5,6 @@ import (
"fmt"
"io"
"os"
"time"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
@ -17,11 +16,11 @@ type GenerateScreenshotTask struct {
Scene models.Scene
ScreenshotAt *float64
fileNamingAlgorithm models.HashAlgorithm
txnManager models.Repository
txnManager Repository
}
func (t *GenerateScreenshotTask) Start(ctx context.Context) {
scenePath := t.Scene.Path
scenePath := t.Scene.Path()
ffprobe := instance.FFProbe
probeResult, err := ffprobe.NewVideoFile(scenePath)
@ -76,11 +75,7 @@ func (t *GenerateScreenshotTask) Start(ctx context.Context) {
if err := t.txnManager.WithTxn(ctx, func(ctx context.Context) error {
qb := t.txnManager.Scene
updatedTime := time.Now()
updatedScene := models.ScenePartial{
ID: t.Scene.ID,
UpdatedAt: &models.SQLiteTimestamp{Timestamp: updatedTime},
}
updatedScene := models.NewScenePartial()
if err := scene.SetScreenshot(instance.Paths, checksum, coverImageData); err != nil {
return fmt.Errorf("error writing screenshot: %v", err)
@ -92,7 +87,7 @@ func (t *GenerateScreenshotTask) Start(ctx context.Context) {
}
// update the scene with the update date
_, err = qb.Update(ctx, updatedScene)
_, err = qb.UpdatePartial(ctx, t.Scene.ID, updatedScene)
if err != nil {
return fmt.Errorf("error updating scene: %v", err)
}

View File

@ -16,7 +16,7 @@ type GenerateSpriteTask struct {
}
func (t *GenerateSpriteTask) GetDescription() string {
return fmt.Sprintf("Generating sprites for %s", t.Scene.Path)
return fmt.Sprintf("Generating sprites for %s", t.Scene.Path())
}
func (t *GenerateSpriteTask) Start(ctx context.Context) {
@ -25,7 +25,7 @@ func (t *GenerateSpriteTask) Start(ctx context.Context) {
}
ffprobe := instance.FFProbe
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path)
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path())
if err != nil {
logger.Errorf("error reading video file: %s", err.Error())
return

View File

@ -51,7 +51,8 @@ func (j *IdentifyJob) Execute(ctx context.Context, progress *job.Progress) {
// if scene ids provided, use those
// otherwise, batch query for all scenes - ordering by path
if err := txn.WithTxn(ctx, instance.Repository, func(ctx context.Context) error {
// don't use a transaction to query scenes
if err := txn.WithDatabase(ctx, instance.Repository, func(ctx context.Context) error {
if len(j.input.SceneIDs) == 0 {
return j.identifyAllScenes(ctx, sources)
}
@ -130,7 +131,7 @@ func (j *IdentifyJob) identifyScene(ctx context.Context, s *models.Scene, source
}
var taskError error
j.progress.ExecuteTask("Identifying "+s.Path, func() {
j.progress.ExecuteTask("Identifying "+s.Path(), func() {
task := identify.SceneIdentifier{
SceneReaderUpdater: instance.Repository.Scene,
StudioCreator: instance.Repository.Studio,
@ -139,7 +140,7 @@ func (j *IdentifyJob) identifyScene(ctx context.Context, s *models.Scene, source
DefaultOptions: j.input.Options,
Sources: sources,
ScreenshotSetter: &scene.PathsScreenshotSetter{
ScreenshotSetter: &scene.PathsCoverSetter{
Paths: instance.Paths,
FileNamingAlgorithm: instance.Config.GetVideoFileNamingAlgorithm(),
},
@ -150,7 +151,7 @@ func (j *IdentifyJob) identifyScene(ctx context.Context, s *models.Scene, source
})
if taskError != nil {
logger.Errorf("Error encountered identifying %s: %v", s.Path, taskError)
logger.Errorf("Error encountered identifying %s: %v", s.Path(), taskError)
}
j.progress.Increment()

View File

@ -28,7 +28,7 @@ import (
)
type ImportTask struct {
txnManager models.Repository
txnManager Repository
json jsonUtils
BaseDir string

View File

@ -14,13 +14,13 @@ type MigrateHashTask struct {
// Start starts the task.
func (t *MigrateHashTask) Start() {
if !t.Scene.OSHash.Valid || !t.Scene.Checksum.Valid {
if t.Scene.OSHash() == "" || t.Scene.Checksum() == "" {
// nothing to do
return
}
oshash := t.Scene.OSHash.String
checksum := t.Scene.Checksum.String
oshash := t.Scene.OSHash()
checksum := t.Scene.Checksum()
oldHash := oshash
newHash := checksum

View File

@ -4,327 +4,279 @@ import (
"context"
"errors"
"fmt"
"os"
"io/fs"
"path/filepath"
"regexp"
"time"
"github.com/remeh/sizedwaitgroup"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/file/video"
"github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/scene/generate"
"github.com/stashapp/stash/pkg/utils"
)
const scanQueueSize = 200000
type scanner interface {
Scan(ctx context.Context, handlers []file.Handler, options file.ScanOptions, progressReporter file.ProgressReporter)
}
type ScanJob struct {
txnManager models.Repository
scanner scanner
input ScanMetadataInput
subscriptions *subscriptionManager
}
type scanFile struct {
path string
info os.FileInfo
caseSensitiveFs bool
}
func (j *ScanJob) Execute(ctx context.Context, progress *job.Progress) {
input := j.input
paths := getScanPaths(input.Paths)
if job.IsCancelled(ctx) {
logger.Info("Stopping due to user request")
return
}
start := time.Now()
config := config.GetInstance()
parallelTasks := config.GetParallelTasksWithAutoDetection()
logger.Infof("Scan started with %d parallel tasks", parallelTasks)
fileQueue := make(chan scanFile, scanQueueSize)
go func() {
total, newFiles := j.queueFiles(ctx, paths, fileQueue, parallelTasks)
if !job.IsCancelled(ctx) {
progress.SetTotal(total)
logger.Infof("Finished counting files. Total files to scan: %d, %d new files found", total, newFiles)
}
}()
wg := sizedwaitgroup.New(parallelTasks)
fileNamingAlgo := config.GetVideoFileNamingAlgorithm()
calculateMD5 := config.IsCalculateMD5()
var err error
var galleries []string
mutexManager := utils.NewMutexManager()
for f := range fileQueue {
if job.IsCancelled(ctx) {
break
}
if isGallery(f.path) {
galleries = append(galleries, f.path)
}
if err := instance.Paths.Generated.EnsureTmpDir(); err != nil {
logger.Warnf("couldn't create temporary directory: %v", err)
}
wg.Add()
task := ScanTask{
TxnManager: j.txnManager,
file: file.FSFile(f.path, f.info),
UseFileMetadata: input.UseFileMetadata,
StripFileExtension: input.StripFileExtension,
fileNamingAlgorithm: fileNamingAlgo,
calculateMD5: calculateMD5,
GeneratePreview: input.ScanGeneratePreviews,
GenerateImagePreview: input.ScanGenerateImagePreviews,
GenerateSprite: input.ScanGenerateSprites,
GeneratePhash: input.ScanGeneratePhashes,
GenerateThumbnails: input.ScanGenerateThumbnails,
progress: progress,
CaseSensitiveFs: f.caseSensitiveFs,
mutexManager: mutexManager,
}
go func() {
task.Start(ctx)
wg.Done()
progress.Increment()
}()
sp := getScanPaths(input.Paths)
paths := make([]string, len(sp))
for i, p := range sp {
paths[i] = p.Path
}
wg.Wait()
start := time.Now()
if err := instance.Paths.Generated.EmptyTmpDir(); err != nil {
logger.Warnf("couldn't empty temporary directory: %v", err)
const taskQueueSize = 200000
taskQueue := job.NewTaskQueue(ctx, progress, taskQueueSize, instance.Config.GetParallelTasksWithAutoDetection())
j.scanner.Scan(ctx, getScanHandlers(j.input, taskQueue, progress), file.ScanOptions{
Paths: paths,
ScanFilters: []file.PathFilter{newScanFilter(instance.Config)},
ZipFileExtensions: instance.Config.GetGalleryExtensions(),
ParallelTasks: instance.Config.GetParallelTasksWithAutoDetection(),
}, progress)
taskQueue.Close()
if job.IsCancelled(ctx) {
logger.Info("Stopping due to user request")
return
}
elapsed := time.Since(start)
logger.Info(fmt.Sprintf("Scan finished (%s)", elapsed))
if job.IsCancelled(ctx) {
logger.Info("Stopping due to user request")
return
}
if err != nil {
return
}
progress.ExecuteTask("Associating galleries", func() {
for _, path := range galleries {
wg.Add()
task := ScanTask{
TxnManager: j.txnManager,
file: file.FSFile(path, nil), // hopefully info is not needed
UseFileMetadata: false,
}
go task.associateGallery(ctx, &wg)
wg.Wait()
}
logger.Info("Finished gallery association")
})
j.subscriptions.notify()
}
func (j *ScanJob) queueFiles(ctx context.Context, paths []*config.StashConfig, scanQueue chan<- scanFile, parallelTasks int) (total int, newFiles int) {
defer close(scanQueue)
var minModTime time.Time
if j.input.Filter != nil && j.input.Filter.MinModTime != nil {
minModTime = *j.input.Filter.MinModTime
}
wg := sizedwaitgroup.New(parallelTasks)
for _, sp := range paths {
csFs, er := fsutil.IsFsPathCaseSensitive(sp.Path)
if er != nil {
logger.Warnf("Cannot determine fs case sensitivity: %s", er.Error())
}
err := walkFilesToScan(sp, func(path string, info os.FileInfo, err error) error {
// check stop
if job.IsCancelled(ctx) {
return context.Canceled
}
// exit early on cutoff
if info.Mode().IsRegular() && info.ModTime().Before(minModTime) {
return nil
}
wg.Add()
go func() {
defer wg.Done()
// #1756 - skip zero length files and directories
if info.IsDir() {
return
}
if info.Size() == 0 {
logger.Infof("Skipping zero-length file: %s", path)
return
}
total++
if !j.doesPathExist(ctx, path) {
newFiles++
}
scanQueue <- scanFile{
path: path,
info: info,
caseSensitiveFs: csFs,
}
}()
return nil
})
wg.Wait()
if err != nil && !errors.Is(err, context.Canceled) {
logger.Errorf("Error encountered queuing files to scan: %s", err.Error())
return
}
}
return
type scanFilter struct {
stashPaths []*config.StashConfig
generatedPath string
vidExt []string
imgExt []string
zipExt []string
videoExcludeRegex []*regexp.Regexp
imageExcludeRegex []*regexp.Regexp
}
func (j *ScanJob) doesPathExist(ctx context.Context, path string) bool {
config := config.GetInstance()
vidExt := config.GetVideoExtensions()
imgExt := config.GetImageExtensions()
gExt := config.GetGalleryExtensions()
func newScanFilter(c *config.Instance) *scanFilter {
return &scanFilter{
stashPaths: c.GetStashPaths(),
generatedPath: c.GetGeneratedPath(),
vidExt: c.GetVideoExtensions(),
imgExt: c.GetImageExtensions(),
zipExt: c.GetGalleryExtensions(),
videoExcludeRegex: generateRegexps(c.GetExcludes()),
imageExcludeRegex: generateRegexps(c.GetImageExcludes()),
}
}
ret := false
txnErr := j.txnManager.WithTxn(ctx, func(ctx context.Context) error {
r := j.txnManager
switch {
case fsutil.MatchExtension(path, gExt):
g, _ := r.Gallery.FindByPath(ctx, path)
if g != nil {
ret = true
}
case fsutil.MatchExtension(path, vidExt):
s, _ := r.Scene.FindByPath(ctx, path)
if s != nil {
ret = true
}
case fsutil.MatchExtension(path, imgExt):
i, _ := r.Image.FindByPath(ctx, path)
if i != nil {
ret = true
}
}
return nil
})
if txnErr != nil {
logger.Warnf("error checking if file exists in database: %v", txnErr)
func (f *scanFilter) Accept(ctx context.Context, path string, info fs.FileInfo) bool {
if fsutil.IsPathInDir(f.generatedPath, path) {
return false
}
return ret
}
isVideoFile := fsutil.MatchExtension(path, f.vidExt)
isImageFile := fsutil.MatchExtension(path, f.imgExt)
isZipFile := fsutil.MatchExtension(path, f.zipExt)
type ScanTask struct {
TxnManager models.Repository
file file.SourceFile
UseFileMetadata bool
StripFileExtension bool
calculateMD5 bool
fileNamingAlgorithm models.HashAlgorithm
GenerateSprite bool
GeneratePhash bool
GeneratePreview bool
GenerateImagePreview bool
GenerateThumbnails bool
zipGallery *models.Gallery
progress *job.Progress
CaseSensitiveFs bool
// handle caption files
if fsutil.MatchExtension(path, video.CaptionExts) {
// we don't include caption files in the file scan, but we do need
// to handle them
video.AssociateCaptions(ctx, path, instance.Repository, instance.Database.File, instance.Database.File)
mutexManager *utils.MutexManager
}
return false
}
func (t *ScanTask) Start(ctx context.Context) {
var s *models.Scene
path := t.file.Path()
t.progress.ExecuteTask("Scanning "+path, func() {
switch {
case isGallery(path):
t.scanGallery(ctx)
case isVideo(path):
s = t.scanScene(ctx)
case isImage(path):
t.scanImage(ctx)
case isCaptions(path):
t.associateCaptions(ctx)
}
})
if !info.IsDir() && !isVideoFile && !isImageFile && !isZipFile {
return false
}
// #1756 - skip zero length files
if !info.IsDir() && info.Size() == 0 {
logger.Infof("Skipping zero-length file: %s", path)
return false
}
s := getStashFromDirPath(f.stashPaths, path)
if s == nil {
return
return false
}
// Handle the case of a scene
iwg := sizedwaitgroup.New(2)
// shortcut: skip the directory entirely if it matches both exclusion patterns
// add a trailing separator so that it correctly matches against patterns like path/.*
pathExcludeTest := path + string(filepath.Separator)
if (s.ExcludeVideo || matchFileRegex(pathExcludeTest, f.videoExcludeRegex)) && (s.ExcludeImage || matchFileRegex(pathExcludeTest, f.imageExcludeRegex)) {
return false
}
if t.GenerateSprite {
iwg.Add()
if isVideoFile && (s.ExcludeVideo || matchFileRegex(path, f.videoExcludeRegex)) {
return false
} else if (isImageFile || isZipFile) && s.ExcludeImage || matchFileRegex(path, f.imageExcludeRegex) {
return false
}
go t.progress.ExecuteTask(fmt.Sprintf("Generating sprites for %s", path), func() {
return true
}
type scanConfig struct {
isGenerateThumbnails bool
}
func (c *scanConfig) GetCreateGalleriesFromFolders() bool {
return instance.Config.GetCreateGalleriesFromFolders()
}
func (c *scanConfig) IsGenerateThumbnails() bool {
return c.isGenerateThumbnails
}
func getScanHandlers(options ScanMetadataInput, taskQueue *job.TaskQueue, progress *job.Progress) []file.Handler {
db := instance.Database
pluginCache := instance.PluginCache
return []file.Handler{
&file.FilteredHandler{
Filter: file.FilterFunc(imageFileFilter),
Handler: &image.ScanHandler{
CreatorUpdater: db.Image,
GalleryFinder: db.Gallery,
ThumbnailGenerator: &imageThumbnailGenerator{},
ScanConfig: &scanConfig{
isGenerateThumbnails: options.ScanGenerateThumbnails,
},
PluginCache: pluginCache,
},
},
&file.FilteredHandler{
Filter: file.FilterFunc(galleryFileFilter),
Handler: &gallery.ScanHandler{
CreatorUpdater: db.Gallery,
SceneFinderUpdater: db.Scene,
PluginCache: pluginCache,
},
},
&file.FilteredHandler{
Filter: file.FilterFunc(videoFileFilter),
Handler: &scene.ScanHandler{
CreatorUpdater: db.Scene,
PluginCache: pluginCache,
CoverGenerator: &coverGenerator{},
ScanGenerator: &sceneGenerators{
input: options,
taskQueue: taskQueue,
progress: progress,
},
},
},
}
}
type imageThumbnailGenerator struct{}
func (g *imageThumbnailGenerator) GenerateThumbnail(ctx context.Context, i *models.Image, f *file.ImageFile) error {
thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum(), models.DefaultGthumbWidth)
exists, _ := fsutil.FileExists(thumbPath)
if exists {
return nil
}
if f.Height <= models.DefaultGthumbWidth && f.Width <= models.DefaultGthumbWidth {
return nil
}
logger.Debugf("Generating thumbnail for %s", f.Path)
encoder := image.NewThumbnailEncoder(instance.FFMPEG)
data, err := encoder.GetThumbnail(f, models.DefaultGthumbWidth)
if err != nil {
// don't log for animated images
if !errors.Is(err, image.ErrNotSupportedForThumbnail) {
return fmt.Errorf("getting thumbnail for image %s: %w", f.Path, err)
}
return nil
}
err = fsutil.WriteFile(thumbPath, data)
if err != nil {
return fmt.Errorf("writing thumbnail for image %s: %w", f.Path, err)
}
return nil
}
type sceneGenerators struct {
input ScanMetadataInput
taskQueue *job.TaskQueue
progress *job.Progress
}
func (g *sceneGenerators) Generate(ctx context.Context, s *models.Scene, f *file.VideoFile) error {
const overwrite = false
progress := g.progress
t := g.input
path := f.Path
config := instance.Config
fileNamingAlgorithm := config.GetVideoFileNamingAlgorithm()
if t.ScanGenerateSprites {
progress.AddTotal(1)
g.taskQueue.Add(fmt.Sprintf("Generating sprites for %s", path), func(ctx context.Context) {
taskSprite := GenerateSpriteTask{
Scene: *s,
Overwrite: false,
fileNamingAlgorithm: t.fileNamingAlgorithm,
Overwrite: overwrite,
fileNamingAlgorithm: fileNamingAlgorithm,
}
taskSprite.Start(ctx)
iwg.Done()
progress.Increment()
})
}
if t.GeneratePhash {
iwg.Add()
go t.progress.ExecuteTask(fmt.Sprintf("Generating phash for %s", path), func() {
if t.ScanGeneratePhashes {
progress.AddTotal(1)
g.taskQueue.Add(fmt.Sprintf("Generating phash for %s", path), func(ctx context.Context) {
taskPhash := GeneratePhashTask{
Scene: *s,
fileNamingAlgorithm: t.fileNamingAlgorithm,
txnManager: t.TxnManager,
File: f,
fileNamingAlgorithm: fileNamingAlgorithm,
txnManager: instance.Database,
fileUpdater: instance.Database.File,
Overwrite: overwrite,
}
taskPhash.Start(ctx)
iwg.Done()
progress.Increment()
})
}
if t.GeneratePreview {
iwg.Add()
go t.progress.ExecuteTask(fmt.Sprintf("Generating preview for %s", path), func() {
if t.ScanGeneratePreviews {
progress.AddTotal(1)
g.taskQueue.Add(fmt.Sprintf("Generating preview for %s", path), func(ctx context.Context) {
options := getGeneratePreviewOptions(GeneratePreviewOptionsInput{})
const overwrite = false
g := &generate.Generator{
Encoder: instance.FFMPEG,
@ -336,73 +288,16 @@ func (t *ScanTask) Start(ctx context.Context) {
taskPreview := GeneratePreviewTask{
Scene: *s,
ImagePreview: t.GenerateImagePreview,
ImagePreview: t.ScanGenerateImagePreviews,
Options: options,
Overwrite: overwrite,
fileNamingAlgorithm: t.fileNamingAlgorithm,
fileNamingAlgorithm: fileNamingAlgorithm,
generator: g,
}
taskPreview.Start(ctx)
iwg.Done()
progress.Increment()
})
}
iwg.Wait()
}
func walkFilesToScan(s *config.StashConfig, f filepath.WalkFunc) error {
config := config.GetInstance()
vidExt := config.GetVideoExtensions()
imgExt := config.GetImageExtensions()
gExt := config.GetGalleryExtensions()
capExt := scene.CaptionExts
excludeVidRegex := generateRegexps(config.GetExcludes())
excludeImgRegex := generateRegexps(config.GetImageExcludes())
// don't scan zip images directly
if file.IsZipPath(s.Path) {
logger.Warnf("Cannot rescan zip image %s. Rescan zip gallery instead.", s.Path)
return nil
}
generatedPath := config.GetGeneratedPath()
return fsutil.SymWalk(s.Path, func(path string, info os.FileInfo, err error) error {
if err != nil {
logger.Warnf("error scanning %s: %s", path, err.Error())
return nil
}
if info.IsDir() {
// #1102 - ignore files in generated path
if fsutil.IsPathInDir(generatedPath, path) {
return filepath.SkipDir
}
// shortcut: skip the directory entirely if it matches both exclusion patterns
// add a trailing separator so that it correctly matches against patterns like path/.*
pathExcludeTest := path + string(filepath.Separator)
if (s.ExcludeVideo || matchFileRegex(pathExcludeTest, excludeVidRegex)) && (s.ExcludeImage || matchFileRegex(pathExcludeTest, excludeImgRegex)) {
return filepath.SkipDir
}
return nil
}
if !s.ExcludeVideo && fsutil.MatchExtension(path, vidExt) && !matchFileRegex(path, excludeVidRegex) {
return f(path, info, err)
}
if !s.ExcludeImage {
if (fsutil.MatchExtension(path, imgExt) || fsutil.MatchExtension(path, gExt)) && !matchFileRegex(path, excludeImgRegex) {
return f(path, info, err)
}
}
if fsutil.MatchExtension(path, capExt) {
return f(path, info, err)
}
return nil
})
return nil
}

View File

@ -1,170 +1,160 @@
package manager
import (
"archive/zip"
"context"
"fmt"
"path/filepath"
"strings"
// func (t *ScanTask) scanGallery(ctx context.Context) {
// var g *models.Gallery
// path := t.file.Path()
// images := 0
// scanImages := false
"github.com/remeh/sizedwaitgroup"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
)
// if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
// var err error
// g, err = t.TxnManager.Gallery.FindByPath(ctx, path)
func (t *ScanTask) scanGallery(ctx context.Context) {
var g *models.Gallery
path := t.file.Path()
images := 0
scanImages := false
// if g != nil && err == nil {
// images, err = t.TxnManager.Image.CountByGalleryID(ctx, g.ID)
// if err != nil {
// return fmt.Errorf("error getting images for zip gallery %s: %s", path, err.Error())
// }
// }
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error
g, err = t.TxnManager.Gallery.FindByPath(ctx, path)
// return err
// }); err != nil {
// logger.Error(err.Error())
// return
// }
if g != nil && err == nil {
images, err = t.TxnManager.Image.CountByGalleryID(ctx, g.ID)
if err != nil {
return fmt.Errorf("error getting images for zip gallery %s: %s", path, err.Error())
}
}
// scanner := gallery.Scanner{
// Scanner: gallery.FileScanner(&file.FSHasher{}),
// ImageExtensions: instance.Config.GetImageExtensions(),
// StripFileExtension: t.StripFileExtension,
// CaseSensitiveFs: t.CaseSensitiveFs,
// CreatorUpdater: t.TxnManager.Gallery,
// Paths: instance.Paths,
// PluginCache: instance.PluginCache,
// MutexManager: t.mutexManager,
// }
return err
}); err != nil {
logger.Error(err.Error())
return
}
// var err error
// if g != nil {
// g, scanImages, err = scanner.ScanExisting(ctx, g, t.file)
// if err != nil {
// logger.Error(err.Error())
// return
// }
scanner := gallery.Scanner{
Scanner: gallery.FileScanner(&file.FSHasher{}),
ImageExtensions: instance.Config.GetImageExtensions(),
StripFileExtension: t.StripFileExtension,
CaseSensitiveFs: t.CaseSensitiveFs,
CreatorUpdater: t.TxnManager.Gallery,
Paths: instance.Paths,
PluginCache: instance.PluginCache,
MutexManager: t.mutexManager,
}
// // scan the zip files if the gallery has no images
// scanImages = scanImages || images == 0
// } else {
// g, scanImages, err = scanner.ScanNew(ctx, t.file)
// if err != nil {
// logger.Error(err.Error())
// }
// }
var err error
if g != nil {
g, scanImages, err = scanner.ScanExisting(ctx, g, t.file)
if err != nil {
logger.Error(err.Error())
return
}
// scan the zip files if the gallery has no images
scanImages = scanImages || images == 0
} else {
g, scanImages, err = scanner.ScanNew(ctx, t.file)
if err != nil {
logger.Error(err.Error())
}
}
if g != nil {
if scanImages {
t.scanZipImages(ctx, g)
} else {
// in case thumbnails have been deleted, regenerate them
t.regenerateZipImages(ctx, g)
}
}
}
// if g != nil {
// if scanImages {
// t.scanZipImages(ctx, g)
// } else {
// // in case thumbnails have been deleted, regenerate them
// t.regenerateZipImages(ctx, g)
// }
// }
// }
// associates a gallery to a scene with the same basename
func (t *ScanTask) associateGallery(ctx context.Context, wg *sizedwaitgroup.SizedWaitGroup) {
path := t.file.Path()
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
r := t.TxnManager
qb := r.Gallery
sqb := r.Scene
g, err := qb.FindByPath(ctx, path)
if err != nil {
return err
}
// func (t *ScanTask) associateGallery(ctx context.Context, wg *sizedwaitgroup.SizedWaitGroup) {
// path := t.file.Path()
// if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
// r := t.TxnManager
// qb := r.Gallery
// sqb := r.Scene
// g, err := qb.FindByPath(ctx, path)
// if err != nil {
// return err
// }
if g == nil {
// associate is run after scan is finished
// should only happen if gallery is a directory or an io error occurs during hashing
logger.Warnf("associate: gallery %s not found in DB", path)
return nil
}
// if g == nil {
// // associate is run after scan is finished
// // should only happen if gallery is a directory or an io error occurs during hashing
// logger.Warnf("associate: gallery %s not found in DB", path)
// return nil
// }
basename := strings.TrimSuffix(path, filepath.Ext(path))
var relatedFiles []string
vExt := config.GetInstance().GetVideoExtensions()
// make a list of media files that can be related to the gallery
for _, ext := range vExt {
related := basename + "." + ext
// exclude gallery extensions from the related files
if !isGallery(related) {
relatedFiles = append(relatedFiles, related)
}
}
for _, scenePath := range relatedFiles {
scene, _ := sqb.FindByPath(ctx, scenePath)
// found related Scene
if scene != nil {
sceneGalleries, _ := sqb.FindByGalleryID(ctx, g.ID) // check if gallery is already associated to the scene
isAssoc := false
for _, sg := range sceneGalleries {
if scene.ID == sg.ID {
isAssoc = true
break
}
}
if !isAssoc {
logger.Infof("associate: Gallery %s is related to scene: %d", path, scene.ID)
if err := sqb.UpdateGalleries(ctx, scene.ID, []int{g.ID}); err != nil {
return err
}
}
}
}
return nil
}); err != nil {
logger.Error(err.Error())
}
wg.Done()
}
// basename := strings.TrimSuffix(path, filepath.Ext(path))
// var relatedFiles []string
// vExt := config.GetInstance().GetVideoExtensions()
// // make a list of media files that can be related to the gallery
// for _, ext := range vExt {
// related := basename + "." + ext
// // exclude gallery extensions from the related files
// if !isGallery(related) {
// relatedFiles = append(relatedFiles, related)
// }
// }
// for _, scenePath := range relatedFiles {
// scene, _ := sqb.FindByPath(ctx, scenePath)
// // found related Scene
// if scene != nil {
// sceneGalleries, _ := sqb.FindByGalleryID(ctx, g.ID) // check if gallery is already associated to the scene
// isAssoc := false
// for _, sg := range sceneGalleries {
// if scene.ID == sg.ID {
// isAssoc = true
// break
// }
// }
// if !isAssoc {
// logger.Infof("associate: Gallery %s is related to scene: %d", path, scene.ID)
// if _, err := sqb.UpdatePartial(ctx, scene.ID, models.ScenePartial{
// GalleryIDs: &models.UpdateIDs{
// IDs: []int{g.ID},
// Mode: models.RelationshipUpdateModeAdd,
// },
// }); err != nil {
// return err
// }
// }
// }
// }
// return nil
// }); err != nil {
// logger.Error(err.Error())
// }
// wg.Done()
// }
func (t *ScanTask) scanZipImages(ctx context.Context, zipGallery *models.Gallery) {
err := walkGalleryZip(zipGallery.Path.String, func(f *zip.File) error {
// copy this task and change the filename
subTask := *t
// func (t *ScanTask) scanZipImages(ctx context.Context, zipGallery *models.Gallery) {
// err := walkGalleryZip(*zipGallery.Path, func(f *zip.File) error {
// // copy this task and change the filename
// subTask := *t
// filepath is the zip file and the internal file name, separated by a null byte
subTask.file = file.ZipFile(zipGallery.Path.String, f)
subTask.zipGallery = zipGallery
// // filepath is the zip file and the internal file name, separated by a null byte
// subTask.file = file.ZipFile(*zipGallery.Path, f)
// subTask.zipGallery = zipGallery
// run the subtask and wait for it to complete
subTask.Start(ctx)
return nil
})
if err != nil {
logger.Warnf("failed to scan zip file images for %s: %s", zipGallery.Path.String, err.Error())
}
}
// // run the subtask and wait for it to complete
// subTask.Start(ctx)
// return nil
// })
// if err != nil {
// logger.Warnf("failed to scan zip file images for %s: %s", *zipGallery.Path, err.Error())
// }
// }
func (t *ScanTask) regenerateZipImages(ctx context.Context, zipGallery *models.Gallery) {
var images []*models.Image
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
iqb := t.TxnManager.Image
// func (t *ScanTask) regenerateZipImages(ctx context.Context, zipGallery *models.Gallery) {
// var images []*models.Image
// if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
// iqb := t.TxnManager.Image
var err error
images, err = iqb.FindByGalleryID(ctx, zipGallery.ID)
return err
}); err != nil {
logger.Warnf("failed to find gallery images: %s", err.Error())
return
}
// var err error
// images, err = iqb.FindByGalleryID(ctx, zipGallery.ID)
// return err
// }); err != nil {
// logger.Warnf("failed to find gallery images: %s", err.Error())
// return
// }
for _, img := range images {
t.generateThumbnail(img)
}
}
// for _, img := range images {
// t.generateThumbnail(img)
// }
// }

View File

@ -1,184 +1,179 @@
package manager
import (
"context"
"database/sql"
"errors"
"os/exec"
"path/filepath"
"time"
// import (
// "context"
// "errors"
// "os/exec"
// "path/filepath"
// "time"
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/fsutil"
"github.com/stashapp/stash/pkg/gallery"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/image"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/plugin"
)
// "github.com/stashapp/stash/internal/manager/config"
// "github.com/stashapp/stash/pkg/file"
// "github.com/stashapp/stash/pkg/fsutil"
// "github.com/stashapp/stash/pkg/gallery"
// "github.com/stashapp/stash/pkg/hash/md5"
// "github.com/stashapp/stash/pkg/image"
// "github.com/stashapp/stash/pkg/logger"
// "github.com/stashapp/stash/pkg/models"
// "github.com/stashapp/stash/pkg/plugin"
// )
func (t *ScanTask) scanImage(ctx context.Context) {
var i *models.Image
path := t.file.Path()
// func (t *ScanTask) scanImage(ctx context.Context) {
// var i *models.Image
// path := t.file.Path()
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error
i, err = t.TxnManager.Image.FindByPath(ctx, path)
return err
}); err != nil {
logger.Error(err.Error())
return
}
// if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
// var err error
// i, err = t.TxnManager.Image.FindByPath(ctx, path)
// return err
// }); err != nil {
// logger.Error(err.Error())
// return
// }
scanner := image.Scanner{
Scanner: image.FileScanner(&file.FSHasher{}),
StripFileExtension: t.StripFileExtension,
TxnManager: t.TxnManager,
CreatorUpdater: t.TxnManager.Image,
CaseSensitiveFs: t.CaseSensitiveFs,
Paths: GetInstance().Paths,
PluginCache: instance.PluginCache,
MutexManager: t.mutexManager,
}
// scanner := image.Scanner{
// Scanner: image.FileScanner(&file.FSHasher{}),
// StripFileExtension: t.StripFileExtension,
// TxnManager: t.TxnManager,
// CreatorUpdater: t.TxnManager.Image,
// CaseSensitiveFs: t.CaseSensitiveFs,
// Paths: GetInstance().Paths,
// PluginCache: instance.PluginCache,
// MutexManager: t.mutexManager,
// }
var err error
if i != nil {
i, err = scanner.ScanExisting(ctx, i, t.file)
if err != nil {
logger.Error(err.Error())
return
}
} else {
i, err = scanner.ScanNew(ctx, t.file)
if err != nil {
logger.Error(err.Error())
return
}
// var err error
// if i != nil {
// i, err = scanner.ScanExisting(ctx, i, t.file)
// if err != nil {
// logger.Error(err.Error())
// return
// }
// } else {
// i, err = scanner.ScanNew(ctx, t.file)
// if err != nil {
// logger.Error(err.Error())
// return
// }
if i != nil {
if t.zipGallery != nil {
// associate with gallery
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
return gallery.AddImage(ctx, t.TxnManager.Gallery, t.zipGallery.ID, i.ID)
}); err != nil {
logger.Error(err.Error())
return
}
} else if config.GetInstance().GetCreateGalleriesFromFolders() {
// create gallery from folder or associate with existing gallery
logger.Infof("Associating image %s with folder gallery", i.Path)
var galleryID int
var isNewGallery bool
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error
galleryID, isNewGallery, err = t.associateImageWithFolderGallery(ctx, i.ID, t.TxnManager.Gallery)
return err
}); err != nil {
logger.Error(err.Error())
return
}
// if i != nil {
// if t.zipGallery != nil {
// // associate with gallery
// if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
// return gallery.AddImage(ctx, t.TxnManager.Gallery, t.zipGallery.ID, i.ID)
// }); err != nil {
// logger.Error(err.Error())
// return
// }
// } else if config.GetInstance().GetCreateGalleriesFromFolders() {
// // create gallery from folder or associate with existing gallery
// logger.Infof("Associating image %s with folder gallery", i.Path)
// var galleryID int
// var isNewGallery bool
// if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
// var err error
// galleryID, isNewGallery, err = t.associateImageWithFolderGallery(ctx, i.ID, t.TxnManager.Gallery)
// return err
// }); err != nil {
// logger.Error(err.Error())
// return
// }
if isNewGallery {
GetInstance().PluginCache.ExecutePostHooks(ctx, galleryID, plugin.GalleryCreatePost, nil, nil)
}
}
}
}
// if isNewGallery {
// GetInstance().PluginCache.ExecutePostHooks(ctx, galleryID, plugin.GalleryCreatePost, nil, nil)
// }
// }
// }
// }
if i != nil {
t.generateThumbnail(i)
}
}
// if i != nil {
// t.generateThumbnail(i)
// }
// }
type GalleryImageAssociator interface {
FindByPath(ctx context.Context, path string) (*models.Gallery, error)
Create(ctx context.Context, newGallery models.Gallery) (*models.Gallery, error)
gallery.ImageUpdater
}
// type GalleryImageAssociator interface {
// FindByPath(ctx context.Context, path string) (*models.Gallery, error)
// Create(ctx context.Context, newGallery *models.Gallery) error
// gallery.ImageUpdater
// }
func (t *ScanTask) associateImageWithFolderGallery(ctx context.Context, imageID int, qb GalleryImageAssociator) (galleryID int, isNew bool, err error) {
// find a gallery with the path specified
path := filepath.Dir(t.file.Path())
var g *models.Gallery
g, err = qb.FindByPath(ctx, path)
if err != nil {
return
}
// func (t *ScanTask) associateImageWithFolderGallery(ctx context.Context, imageID int, qb GalleryImageAssociator) (galleryID int, isNew bool, err error) {
// // find a gallery with the path specified
// path := filepath.Dir(t.file.Path())
// var g *models.Gallery
// g, err = qb.FindByPath(ctx, path)
// if err != nil {
// return
// }
if g == nil {
checksum := md5.FromString(path)
// if g == nil {
// checksum := md5.FromString(path)
// create the gallery
currentTime := time.Now()
// // create the gallery
// currentTime := time.Now()
newGallery := models.Gallery{
Checksum: checksum,
Path: sql.NullString{
String: path,
Valid: true,
},
CreatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
UpdatedAt: models.SQLiteTimestamp{Timestamp: currentTime},
Title: sql.NullString{
String: fsutil.GetNameFromPath(path, false),
Valid: true,
},
}
// title := fsutil.GetNameFromPath(path, false)
logger.Infof("Creating gallery for folder %s", path)
g, err = qb.Create(ctx, newGallery)
if err != nil {
return 0, false, err
}
// g = &models.Gallery{
// Checksum: checksum,
// Path: &path,
// CreatedAt: currentTime,
// UpdatedAt: currentTime,
// Title: title,
// }
isNew = true
}
// logger.Infof("Creating gallery for folder %s", path)
// err = qb.Create(ctx, g)
// if err != nil {
// return 0, false, err
// }
// associate image with gallery
err = gallery.AddImage(ctx, qb, g.ID, imageID)
galleryID = g.ID
return
}
// isNew = true
// }
func (t *ScanTask) generateThumbnail(i *models.Image) {
if !t.GenerateThumbnails {
return
}
// // associate image with gallery
// err = gallery.AddImage(ctx, qb, g.ID, imageID)
// galleryID = g.ID
// return
// }
thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth)
exists, _ := fsutil.FileExists(thumbPath)
if exists {
return
}
// func (t *ScanTask) generateThumbnail(i *models.Image) {
// if !t.GenerateThumbnails {
// return
// }
config, _, err := image.DecodeSourceImage(i)
if err != nil {
logger.Errorf("error reading image %s: %s", i.Path, err.Error())
return
}
// thumbPath := GetInstance().Paths.Generated.GetThumbnailPath(i.Checksum, models.DefaultGthumbWidth)
// exists, _ := fsutil.FileExists(thumbPath)
// if exists {
// return
// }
if config.Height > models.DefaultGthumbWidth || config.Width > models.DefaultGthumbWidth {
encoder := image.NewThumbnailEncoder(instance.FFMPEG)
data, err := encoder.GetThumbnail(i, models.DefaultGthumbWidth)
// config, _, err := image.DecodeSourceImage(i)
// if err != nil {
// logger.Errorf("error reading image %s: %s", i.Path, err.Error())
// return
// }
if err != nil {
// don't log for animated images
if !errors.Is(err, image.ErrNotSupportedForThumbnail) {
logger.Errorf("error getting thumbnail for image %s: %s", i.Path, err.Error())
// if config.Height > models.DefaultGthumbWidth || config.Width > models.DefaultGthumbWidth {
// encoder := image.NewThumbnailEncoder(instance.FFMPEG)
// data, err := encoder.GetThumbnail(i, models.DefaultGthumbWidth)
var exitErr *exec.ExitError
if errors.As(err, &exitErr) {
logger.Errorf("stderr: %s", string(exitErr.Stderr))
}
}
return
}
// if err != nil {
// // don't log for animated images
// if !errors.Is(err, image.ErrNotSupportedForThumbnail) {
// logger.Errorf("error getting thumbnail for image %s: %s", i.Path, err.Error())
err = fsutil.WriteFile(thumbPath, data)
if err != nil {
logger.Errorf("error writing thumbnail for image %s: %s", i.Path, err)
}
}
}
// var exitErr *exec.ExitError
// if errors.As(err, &exitErr) {
// logger.Errorf("stderr: %s", string(exitErr.Stderr))
// }
// }
// return
// }
// err = fsutil.WriteFile(thumbPath, data)
// if err != nil {
// logger.Errorf("error writing thumbnail for image %s: %s", i.Path, err)
// }
// }
// }

View File

@ -1,129 +1,116 @@
package manager
import (
"context"
"path/filepath"
// type sceneScreenshotter struct {
// g *generate.Generator
// }
"github.com/stashapp/stash/internal/manager/config"
"github.com/stashapp/stash/pkg/ffmpeg"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/scene"
"github.com/stashapp/stash/pkg/scene/generate"
)
// func (ss *sceneScreenshotter) GenerateScreenshot(ctx context.Context, probeResult *ffmpeg.VideoFile, hash string) error {
// return ss.g.Screenshot(ctx, probeResult.Path, hash, probeResult.Width, probeResult.Duration, generate.ScreenshotOptions{})
// }
type sceneScreenshotter struct {
g *generate.Generator
}
// func (ss *sceneScreenshotter) GenerateThumbnail(ctx context.Context, probeResult *ffmpeg.VideoFile, hash string) error {
// return ss.g.Thumbnail(ctx, probeResult.Path, hash, probeResult.Duration, generate.ScreenshotOptions{})
// }
func (ss *sceneScreenshotter) GenerateScreenshot(ctx context.Context, probeResult *ffmpeg.VideoFile, hash string) error {
return ss.g.Screenshot(ctx, probeResult.Path, hash, probeResult.Width, probeResult.Duration, generate.ScreenshotOptions{})
}
// func (t *ScanTask) scanScene(ctx context.Context) *models.Scene {
// logError := func(err error) *models.Scene {
// logger.Error(err.Error())
// return nil
// }
func (ss *sceneScreenshotter) GenerateThumbnail(ctx context.Context, probeResult *ffmpeg.VideoFile, hash string) error {
return ss.g.Thumbnail(ctx, probeResult.Path, hash, probeResult.Duration, generate.ScreenshotOptions{})
}
// var retScene *models.Scene
// var s *models.Scene
func (t *ScanTask) scanScene(ctx context.Context) *models.Scene {
logError := func(err error) *models.Scene {
logger.Error(err.Error())
return nil
}
// if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
// var err error
// s, err = t.TxnManager.Scene.FindByPath(ctx, t.file.Path())
// return err
// }); err != nil {
// logger.Error(err.Error())
// return nil
// }
var retScene *models.Scene
var s *models.Scene
// g := &generate.Generator{
// Encoder: instance.FFMPEG,
// LockManager: instance.ReadLockManager,
// ScenePaths: instance.Paths.Scene,
// }
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error
s, err = t.TxnManager.Scene.FindByPath(ctx, t.file.Path())
return err
}); err != nil {
logger.Error(err.Error())
return nil
}
// scanner := scene.Scanner{
// Scanner: scene.FileScanner(&file.FSHasher{}, t.fileNamingAlgorithm, t.calculateMD5),
// StripFileExtension: t.StripFileExtension,
// FileNamingAlgorithm: t.fileNamingAlgorithm,
// TxnManager: t.TxnManager,
// CreatorUpdater: t.TxnManager.Scene,
// Paths: GetInstance().Paths,
// CaseSensitiveFs: t.CaseSensitiveFs,
// Screenshotter: &sceneScreenshotter{
// g: g,
// },
// VideoFileCreator: &instance.FFProbe,
// PluginCache: instance.PluginCache,
// MutexManager: t.mutexManager,
// UseFileMetadata: t.UseFileMetadata,
// }
g := &generate.Generator{
Encoder: instance.FFMPEG,
LockManager: instance.ReadLockManager,
ScenePaths: instance.Paths.Scene,
}
// if s != nil {
// if err := scanner.ScanExisting(ctx, s, t.file); err != nil {
// return logError(err)
// }
scanner := scene.Scanner{
Scanner: scene.FileScanner(&file.FSHasher{}, t.fileNamingAlgorithm, t.calculateMD5),
StripFileExtension: t.StripFileExtension,
FileNamingAlgorithm: t.fileNamingAlgorithm,
TxnManager: t.TxnManager,
CreatorUpdater: t.TxnManager.Scene,
Paths: GetInstance().Paths,
CaseSensitiveFs: t.CaseSensitiveFs,
Screenshotter: &sceneScreenshotter{
g: g,
},
VideoFileCreator: &instance.FFProbe,
PluginCache: instance.PluginCache,
MutexManager: t.mutexManager,
UseFileMetadata: t.UseFileMetadata,
}
// return nil
// }
if s != nil {
if err := scanner.ScanExisting(ctx, s, t.file); err != nil {
return logError(err)
}
// var err error
// retScene, err = scanner.ScanNew(ctx, t.file)
// if err != nil {
// return logError(err)
// }
return nil
}
var err error
retScene, err = scanner.ScanNew(ctx, t.file)
if err != nil {
return logError(err)
}
return retScene
}
// return retScene
// }
// associates captions to scene/s with the same basename
func (t *ScanTask) associateCaptions(ctx context.Context) {
vExt := config.GetInstance().GetVideoExtensions()
captionPath := t.file.Path()
captionLang := scene.GetCaptionsLangFromPath(captionPath)
// func (t *ScanTask) associateCaptions(ctx context.Context) {
// vExt := config.GetInstance().GetVideoExtensions()
// captionPath := t.file.Path()
// captionLang := scene.GetCaptionsLangFromPath(captionPath)
relatedFiles := scene.GenerateCaptionCandidates(captionPath, vExt)
if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
var err error
sqb := t.TxnManager.Scene
// relatedFiles := scene.GenerateCaptionCandidates(captionPath, vExt)
// if err := t.TxnManager.WithTxn(ctx, func(ctx context.Context) error {
// var err error
// sqb := t.TxnManager.Scene
for _, scenePath := range relatedFiles {
s, er := sqb.FindByPath(ctx, scenePath)
// for _, scenePath := range relatedFiles {
// s, er := sqb.FindByPath(ctx, scenePath)
if er != nil {
logger.Errorf("Error searching for scene %s: %v", scenePath, er)
continue
}
if s != nil { // found related Scene
logger.Debugf("Matched captions to scene %s", s.Path)
captions, er := sqb.GetCaptions(ctx, s.ID)
if er == nil {
fileExt := filepath.Ext(captionPath)
ext := fileExt[1:]
if !scene.IsLangInCaptions(captionLang, ext, captions) { // only update captions if language code is not present
newCaption := &models.SceneCaption{
LanguageCode: captionLang,
Filename: filepath.Base(captionPath),
CaptionType: ext,
}
captions = append(captions, newCaption)
er = sqb.UpdateCaptions(ctx, s.ID, captions)
if er == nil {
logger.Debugf("Updated captions for scene %s. Added %s", s.Path, captionLang)
}
}
}
}
}
return err
}); err != nil {
logger.Error(err.Error())
}
}
// if er != nil {
// logger.Errorf("Error searching for scene %s: %v", scenePath, er)
// continue
// }
// if s != nil { // found related Scene
// logger.Debugf("Matched captions to scene %s", s.Path)
// captions, er := sqb.GetCaptions(ctx, s.ID)
// if er == nil {
// fileExt := filepath.Ext(captionPath)
// ext := fileExt[1:]
// if !scene.IsLangInCaptions(captionLang, ext, captions) { // only update captions if language code is not present
// newCaption := &models.SceneCaption{
// LanguageCode: captionLang,
// Filename: filepath.Base(captionPath),
// CaptionType: ext,
// }
// captions = append(captions, newCaption)
// er = sqb.UpdateCaptions(ctx, s.ID, captions)
// if er == nil {
// logger.Debugf("Updated captions for scene %s. Added %s", s.Path, captionLang)
// }
// }
// }
// }
// }
// return err
// }); err != nil {
// logger.Error(err.Error())
// }
// }

View File

@ -166,7 +166,7 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) {
_, err := r.Performer.Update(ctx, partial)
if !t.refresh {
err = r.Performer.UpdateStashIDs(ctx, t.performer.ID, []models.StashID{
err = r.Performer.UpdateStashIDs(ctx, t.performer.ID, []*models.StashID{
{
Endpoint: t.box.Endpoint,
StashID: *performer.RemoteSiteID,
@ -231,7 +231,7 @@ func (t *StashBoxPerformerTagTask) stashBoxPerformerTag(ctx context.Context) {
return err
}
err = r.Performer.UpdateStashIDs(ctx, createdPerformer.ID, []models.StashID{
err = r.Performer.UpdateStashIDs(ctx, createdPerformer.ID, []*models.StashID{
{
Endpoint: t.box.Endpoint,
StashID: *performer.RemoteSiteID,

View File

@ -23,7 +23,7 @@ type GenerateTranscodeTask struct {
}
func (t *GenerateTranscodeTask) GetDescription() string {
return fmt.Sprintf("Generating transcode for %s", t.Scene.Path)
return fmt.Sprintf("Generating transcode for %s", t.Scene.Path())
}
func (t *GenerateTranscodeTask) Start(ctc context.Context) {
@ -42,10 +42,15 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
return
}
videoCodec := t.Scene.VideoCodec.String
var videoCodec string
if t.Scene.VideoCodec() != "" {
videoCodec = t.Scene.VideoCodec()
}
audioCodec := ffmpeg.MissingUnsupported
if t.Scene.AudioCodec.Valid {
audioCodec = ffmpeg.ProbeAudioCodec(t.Scene.AudioCodec.String)
if t.Scene.AudioCodec() != "" {
audioCodec = ffmpeg.ProbeAudioCodec(t.Scene.AudioCodec())
}
if !t.Force && ffmpeg.IsStreamable(videoCodec, audioCodec, container) == nil {
@ -54,7 +59,7 @@ func (t *GenerateTranscodeTask) Start(ctc context.Context) {
// TODO - move transcode generation logic elsewhere
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path)
videoFile, err := ffprobe.NewVideoFile(t.Scene.Path())
if err != nil {
logger.Errorf("[transcode] error reading video file: %s", err.Error())
return
@ -104,15 +109,18 @@ func (t *GenerateTranscodeTask) isTranscodeNeeded() bool {
return true
}
videoCodec := t.Scene.VideoCodec.String
var videoCodec string
if t.Scene.VideoCodec() != "" {
videoCodec = t.Scene.VideoCodec()
}
container := ""
audioCodec := ffmpeg.MissingUnsupported
if t.Scene.AudioCodec.Valid {
audioCodec = ffmpeg.ProbeAudioCodec(t.Scene.AudioCodec.String)
if t.Scene.AudioCodec() != "" {
audioCodec = ffmpeg.ProbeAudioCodec(t.Scene.AudioCodec())
}
if t.Scene.Format.Valid {
container = t.Scene.Format.String
if t.Scene.Format() != "" {
container = t.Scene.Format()
}
if ffmpeg.IsStreamable(videoCodec, audioCodec, ffmpeg.Container(container)) == nil {

View File

@ -167,6 +167,9 @@ func parse(filePath string, probeJSON *FFProbeJSON) (*VideoFile, error) {
} else {
framerate, _ = strconv.ParseFloat(videoStream.AvgFrameRate, 64)
}
if math.IsNaN(framerate) {
framerate = 0
}
result.FrameRate = math.Round(framerate*100) / 100
if rotate, err := strconv.ParseInt(videoStream.Tags.Rotate, 10, 64); err == nil && rotate != 180 {
result.Width = videoStream.Height

411
pkg/file/clean.go Normal file
View File

@ -0,0 +1,411 @@
package file
import (
"context"
"errors"
"fmt"
"io/fs"
"github.com/stashapp/stash/pkg/job"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/txn"
)
// Cleaner scans through stored file and folder instances and removes those that are no longer present on disk.
type Cleaner struct {
FS FS
Repository Repository
Handlers []CleanHandler
}
type cleanJob struct {
*Cleaner
progress *job.Progress
options CleanOptions
}
// ScanOptions provides options for scanning files.
type CleanOptions struct {
Paths []string
// Do a dry run. Don't delete any files
DryRun bool
// PathFilter are used to determine if a file should be included.
// Excluded files are marked for cleaning.
PathFilter PathFilter
}
// Clean starts the clean process.
func (s *Cleaner) Clean(ctx context.Context, options CleanOptions, progress *job.Progress) {
j := &cleanJob{
Cleaner: s,
progress: progress,
options: options,
}
if err := j.execute(ctx); err != nil {
logger.Errorf("error cleaning files: %w", err)
return
}
}
type fileOrFolder struct {
fileID ID
folderID FolderID
}
type deleteSet struct {
orderedList []fileOrFolder
fileIDSet map[ID]string
folderIDSet map[FolderID]string
}
func newDeleteSet() deleteSet {
return deleteSet{
fileIDSet: make(map[ID]string),
folderIDSet: make(map[FolderID]string),
}
}
func (s *deleteSet) add(id ID, path string) {
if _, ok := s.fileIDSet[id]; !ok {
s.orderedList = append(s.orderedList, fileOrFolder{fileID: id})
s.fileIDSet[id] = path
}
}
func (s *deleteSet) has(id ID) bool {
_, ok := s.fileIDSet[id]
return ok
}
func (s *deleteSet) addFolder(id FolderID, path string) {
if _, ok := s.folderIDSet[id]; !ok {
s.orderedList = append(s.orderedList, fileOrFolder{folderID: id})
s.folderIDSet[id] = path
}
}
func (s *deleteSet) hasFolder(id FolderID) bool {
_, ok := s.folderIDSet[id]
return ok
}
func (s *deleteSet) len() int {
return len(s.orderedList)
}
func (j *cleanJob) execute(ctx context.Context) error {
progress := j.progress
toDelete := newDeleteSet()
var (
fileCount int
folderCount int
)
if err := txn.WithTxn(ctx, j.Repository, func(ctx context.Context) error {
var err error
fileCount, err = j.Repository.CountAllInPaths(ctx, j.options.Paths)
if err != nil {
return err
}
folderCount, err = j.Repository.FolderStore.CountAllInPaths(ctx, j.options.Paths)
if err != nil {
return err
}
return nil
}); err != nil {
return err
}
progress.AddTotal(fileCount + folderCount)
progress.Definite()
if err := j.assessFiles(ctx, &toDelete); err != nil {
return err
}
if err := j.assessFolders(ctx, &toDelete); err != nil {
return err
}
if j.options.DryRun && toDelete.len() > 0 {
// add progress for files that would've been deleted
progress.AddProcessed(toDelete.len())
return nil
}
progress.ExecuteTask(fmt.Sprintf("Cleaning %d files and folders", toDelete.len()), func() {
for _, ff := range toDelete.orderedList {
if job.IsCancelled(ctx) {
return
}
if ff.fileID != 0 {
j.deleteFile(ctx, ff.fileID, toDelete.fileIDSet[ff.fileID])
}
if ff.folderID != 0 {
j.deleteFolder(ctx, ff.folderID, toDelete.folderIDSet[ff.folderID])
}
progress.Increment()
}
})
return nil
}
func (j *cleanJob) assessFiles(ctx context.Context, toDelete *deleteSet) error {
const batchSize = 1000
offset := 0
progress := j.progress
more := true
if err := txn.WithTxn(ctx, j.Repository, func(ctx context.Context) error {
for more {
if job.IsCancelled(ctx) {
return nil
}
files, err := j.Repository.FindAllInPaths(ctx, j.options.Paths, batchSize, offset)
if err != nil {
return fmt.Errorf("error querying for files: %w", err)
}
for _, f := range files {
path := f.Base().Path
err = nil
fileID := f.Base().ID
// short-cut, don't assess if already added
if toDelete.has(fileID) {
continue
}
progress.ExecuteTask(fmt.Sprintf("Assessing file %s for clean", path), func() {
if j.shouldClean(ctx, f) {
err = j.flagFileForDelete(ctx, toDelete, f)
} else {
// increment progress, no further processing
progress.Increment()
}
})
if err != nil {
return err
}
}
if len(files) != batchSize {
more = false
} else {
offset += batchSize
}
}
return nil
}); err != nil {
return err
}
return nil
}
// flagFolderForDelete adds folders to the toDelete set, with the leaf folders added first
func (j *cleanJob) flagFileForDelete(ctx context.Context, toDelete *deleteSet, f File) error {
// add contained files first
containedFiles, err := j.Repository.FindByZipFileID(ctx, f.Base().ID)
if err != nil {
return fmt.Errorf("error finding contained files for %q: %w", f.Base().Path, err)
}
for _, cf := range containedFiles {
logger.Infof("Marking contained file %q to clean", cf.Base().Path)
toDelete.add(cf.Base().ID, cf.Base().Path)
}
// add contained folders as well
containedFolders, err := j.Repository.FolderStore.FindByZipFileID(ctx, f.Base().ID)
if err != nil {
return fmt.Errorf("error finding contained folders for %q: %w", f.Base().Path, err)
}
for _, cf := range containedFolders {
logger.Infof("Marking contained folder %q to clean", cf.Path)
toDelete.addFolder(cf.ID, cf.Path)
}
toDelete.add(f.Base().ID, f.Base().Path)
return nil
}
func (j *cleanJob) assessFolders(ctx context.Context, toDelete *deleteSet) error {
const batchSize = 1000
offset := 0
progress := j.progress
more := true
if err := txn.WithTxn(ctx, j.Repository, func(ctx context.Context) error {
for more {
if job.IsCancelled(ctx) {
return nil
}
folders, err := j.Repository.FolderStore.FindAllInPaths(ctx, j.options.Paths, batchSize, offset)
if err != nil {
return fmt.Errorf("error querying for folders: %w", err)
}
for _, f := range folders {
path := f.Path
folderID := f.ID
// short-cut, don't assess if already added
if toDelete.hasFolder(folderID) {
continue
}
err = nil
progress.ExecuteTask(fmt.Sprintf("Assessing folder %s for clean", path), func() {
if j.shouldCleanFolder(ctx, f) {
if err = j.flagFolderForDelete(ctx, toDelete, f); err != nil {
return
}
} else {
// increment progress, no further processing
progress.Increment()
}
})
if err != nil {
return err
}
}
if len(folders) != batchSize {
more = false
} else {
offset += batchSize
}
}
return nil
}); err != nil {
return err
}
return nil
}
func (j *cleanJob) flagFolderForDelete(ctx context.Context, toDelete *deleteSet, folder *Folder) error {
// it is possible that child folders may be included while parent folders are not
// so we need to check child folders separately
toDelete.addFolder(folder.ID, folder.Path)
return nil
}
func (j *cleanJob) shouldClean(ctx context.Context, f File) bool {
path := f.Base().Path
info, err := f.Base().Info(j.FS)
if err != nil && !errors.Is(err, fs.ErrNotExist) {
logger.Errorf("error getting file info for %q, not cleaning: %v", path, err)
return false
}
if info == nil {
// info is nil - file not exist
logger.Infof("File not found. Marking to clean: \"%s\"", path)
return true
}
// run through path filter, if returns false then the file should be cleaned
filter := j.options.PathFilter
// don't log anything - assume filter will have logged the reason
return !filter.Accept(ctx, path, info)
}
func (j *cleanJob) shouldCleanFolder(ctx context.Context, f *Folder) bool {
path := f.Path
info, err := f.Info(j.FS)
if err != nil && !errors.Is(err, fs.ErrNotExist) {
logger.Errorf("error getting folder info for %q, not cleaning: %v", path, err)
return false
}
if info == nil {
// info is nil - file not exist
logger.Infof("Folder not found. Marking to clean: \"%s\"", path)
return true
}
// run through path filter, if returns false then the file should be cleaned
filter := j.options.PathFilter
// don't log anything - assume filter will have logged the reason
return !filter.Accept(ctx, path, info)
}
func (j *cleanJob) deleteFile(ctx context.Context, fileID ID, fn string) {
// delete associated objects
fileDeleter := NewDeleter()
if err := txn.WithTxn(ctx, j.Repository, func(ctx context.Context) error {
fileDeleter.RegisterHooks(ctx, j.Repository)
if err := j.fireHandlers(ctx, fileDeleter, fileID); err != nil {
return err
}
return j.Repository.Destroy(ctx, fileID)
}); err != nil {
logger.Errorf("Error deleting file %q from database: %s", fn, err.Error())
return
}
}
func (j *cleanJob) deleteFolder(ctx context.Context, folderID FolderID, fn string) {
// delete associated objects
fileDeleter := NewDeleter()
if err := txn.WithTxn(ctx, j.Repository, func(ctx context.Context) error {
fileDeleter.RegisterHooks(ctx, j.Repository)
if err := j.fireFolderHandlers(ctx, fileDeleter, folderID); err != nil {
return err
}
return j.Repository.FolderStore.Destroy(ctx, folderID)
}); err != nil {
logger.Errorf("Error deleting folder %q from database: %s", fn, err.Error())
return
}
}
func (j *cleanJob) fireHandlers(ctx context.Context, fileDeleter *Deleter, fileID ID) error {
for _, h := range j.Handlers {
if err := h.HandleFile(ctx, fileDeleter, fileID); err != nil {
return err
}
}
return nil
}
func (j *cleanJob) fireFolderHandlers(ctx context.Context, fileDeleter *Deleter, folderID FolderID) error {
for _, h := range j.Handlers {
if err := h.HandleFolder(ctx, fileDeleter, folderID); err != nil {
return err
}
}
return nil
}

View File

@ -1,12 +1,14 @@
package file
import (
"context"
"errors"
"fmt"
"io/fs"
"os"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/txn"
)
const deleteFileSuffix = ".delete"
@ -66,6 +68,19 @@ func NewDeleter() *Deleter {
}
}
// RegisterHooks registers post-commit and post-rollback hooks.
func (d *Deleter) RegisterHooks(ctx context.Context, mgr txn.Manager) {
mgr.AddPostCommitHook(ctx, func(ctx context.Context) error {
d.Commit()
return nil
})
mgr.AddPostRollbackHook(ctx, func(ctx context.Context) error {
d.Rollback()
return nil
})
}
// Files designates files to be deleted. Each file marked will be renamed to add
// a `.delete` suffix. An error is returned if a file could not be renamed.
// Note that if an error is returned, then some files may be left renamed.
@ -159,3 +174,17 @@ func (d *Deleter) renameForDelete(path string) error {
func (d *Deleter) renameForRestore(path string) error {
return d.RenamerRemover.Rename(path+deleteFileSuffix, path)
}
func Destroy(ctx context.Context, destroyer Destroyer, f File, fileDeleter *Deleter, deleteFile bool) error {
if err := destroyer.Destroy(ctx, f.Base().ID); err != nil {
return err
}
if deleteFile {
if err := fileDeleter.Files([]string{f.Base().Path}); err != nil {
return err
}
}
return nil
}

View File

@ -1,31 +1,205 @@
package file
import (
"context"
"io"
"io/fs"
"os"
"net/http"
"strconv"
"time"
"github.com/stashapp/stash/pkg/logger"
)
type fsFile struct {
path string
info fs.FileInfo
// ID represents an ID of a file.
type ID int32
func (i ID) String() string {
return strconv.Itoa(int(i))
}
func (f *fsFile) Open() (io.ReadCloser, error) {
return os.Open(f.path)
// DirEntry represents a file or directory in the file system.
type DirEntry struct {
ZipFileID *ID `json:"zip_file_id"`
// transient - not persisted
// only guaranteed to have id, path and basename set
ZipFile File
ModTime time.Time `json:"mod_time"`
}
func (f *fsFile) Path() string {
return f.path
func (e *DirEntry) info(fs FS, path string) (fs.FileInfo, error) {
if e.ZipFile != nil {
zipPath := e.ZipFile.Base().Path
zfs, err := fs.OpenZip(zipPath)
if err != nil {
return nil, err
}
defer zfs.Close()
fs = zfs
}
// else assume os file
ret, err := fs.Lstat(path)
return ret, err
}
func (f *fsFile) FileInfo() fs.FileInfo {
return f.info
// File represents a file in the file system.
type File interface {
Base() *BaseFile
SetFingerprints(fp []Fingerprint)
Open(fs FS) (io.ReadCloser, error)
}
func FSFile(path string, info fs.FileInfo) SourceFile {
return &fsFile{
path: path,
info: info,
// BaseFile represents a file in the file system.
type BaseFile struct {
ID ID `json:"id"`
DirEntry
// resolved from parent folder and basename only - not stored in DB
Path string `json:"path"`
Basename string `json:"basename"`
ParentFolderID FolderID `json:"parent_folder_id"`
Fingerprints Fingerprints `json:"fingerprints"`
Size int64 `json:"size"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
}
// SetFingerprints sets the fingerprints of the file.
// If a fingerprint of the same type already exists, it is overwritten.
func (f *BaseFile) SetFingerprints(fp []Fingerprint) {
for _, v := range fp {
f.SetFingerprint(v)
}
}
// SetFingerprint sets the fingerprint of the file.
// If a fingerprint of the same type already exists, it is overwritten.
func (f *BaseFile) SetFingerprint(fp Fingerprint) {
for i, existing := range f.Fingerprints {
if existing.Type == fp.Type {
f.Fingerprints[i] = fp
return
}
}
f.Fingerprints = append(f.Fingerprints, fp)
}
// Base is used to fulfil the File interface.
func (f *BaseFile) Base() *BaseFile {
return f
}
func (f *BaseFile) Open(fs FS) (io.ReadCloser, error) {
if f.ZipFile != nil {
zipPath := f.ZipFile.Base().Path
zfs, err := fs.OpenZip(zipPath)
if err != nil {
return nil, err
}
return zfs.OpenOnly(f.Path)
}
return fs.Open(f.Path)
}
func (f *BaseFile) Info(fs FS) (fs.FileInfo, error) {
return f.info(fs, f.Path)
}
func (f *BaseFile) Serve(fs FS, w http.ResponseWriter, r *http.Request) {
w.Header().Add("Cache-Control", "max-age=604800000") // 1 Week
reader, err := f.Open(fs)
if err != nil {
// assume not found
http.Error(w, http.StatusText(http.StatusNotFound), http.StatusNotFound)
return
}
defer reader.Close()
rsc, ok := reader.(io.ReadSeeker)
if !ok {
// fallback to direct copy
data, err := io.ReadAll(reader)
if err != nil {
http.Error(w, err.Error(), http.StatusInternalServerError)
return
}
if k, err := w.Write(data); err != nil {
logger.Warnf("failure while serving image (wrote %v bytes out of %v): %v", k, len(data), err)
}
return
}
http.ServeContent(w, r, f.Basename, f.ModTime, rsc)
}
type Finder interface {
Find(ctx context.Context, id ...ID) ([]File, error)
}
// Getter provides methods to find Files.
type Getter interface {
FindByPath(ctx context.Context, path string) (File, error)
FindByFingerprint(ctx context.Context, fp Fingerprint) ([]File, error)
FindByZipFileID(ctx context.Context, zipFileID ID) ([]File, error)
FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]File, error)
}
type Counter interface {
CountAllInPaths(ctx context.Context, p []string) (int, error)
}
// Creator provides methods to create Files.
type Creator interface {
Create(ctx context.Context, f File) error
}
// Updater provides methods to update Files.
type Updater interface {
Update(ctx context.Context, f File) error
}
type Destroyer interface {
Destroy(ctx context.Context, id ID) error
}
// Store provides methods to find, create and update Files.
type Store interface {
Getter
Counter
Creator
Updater
Destroyer
}
// Decorator wraps the Decorate method to add additional functionality while scanning files.
type Decorator interface {
Decorate(ctx context.Context, fs FS, f File) (File, error)
}
type FilteredDecorator struct {
Decorator
Filter
}
// Decorate runs the decorator if the filter accepts the file.
func (d *FilteredDecorator) Decorate(ctx context.Context, fs FS, f File) (File, error) {
if d.Accept(f) {
return d.Decorator.Decorate(ctx, fs, f)
}
return f, nil
}

43
pkg/file/fingerprint.go Normal file
View File

@ -0,0 +1,43 @@
package file
var (
FingerprintTypeOshash = "oshash"
FingerprintTypeMD5 = "md5"
FingerprintTypePhash = "phash"
)
// Fingerprint represents a fingerprint of a file.
type Fingerprint struct {
Type string
Fingerprint interface{}
}
type Fingerprints []Fingerprint
func (f Fingerprints) Get(type_ string) interface{} {
for _, fp := range f {
if fp.Type == type_ {
return fp.Fingerprint
}
}
return nil
}
// AppendUnique appends a fingerprint to the list if a Fingerprint of the same type does not already exist in the list. If one does, then it is updated with o's Fingerprint value.
func (f Fingerprints) AppendUnique(o Fingerprint) Fingerprints {
ret := f
for i, fp := range ret {
if fp.Type == o.Type {
ret[i] = o
return ret
}
}
return append(f, o)
}
// FingerprintCalculator calculates a fingerprint for the provided file.
type FingerprintCalculator interface {
CalculateFingerprints(f *BaseFile, o Opener) ([]Fingerprint, error)
}

66
pkg/file/folder.go Normal file
View File

@ -0,0 +1,66 @@
package file
import (
"context"
"io/fs"
"strconv"
"time"
)
// FolderID represents an ID of a folder.
type FolderID int32
// String converts the ID to a string.
func (i FolderID) String() string {
return strconv.Itoa(int(i))
}
// Folder represents a folder in the file system.
type Folder struct {
ID FolderID `json:"id"`
DirEntry
Path string `json:"path"`
ParentFolderID *FolderID `json:"parent_folder_id"`
CreatedAt time.Time `json:"created_at"`
UpdatedAt time.Time `json:"updated_at"`
}
func (f *Folder) Info(fs FS) (fs.FileInfo, error) {
return f.info(fs, f.Path)
}
// FolderGetter provides methods to find Folders.
type FolderGetter interface {
FindByPath(ctx context.Context, path string) (*Folder, error)
FindByZipFileID(ctx context.Context, zipFileID ID) ([]*Folder, error)
FindAllInPaths(ctx context.Context, p []string, limit, offset int) ([]*Folder, error)
FindByParentFolderID(ctx context.Context, parentFolderID FolderID) ([]*Folder, error)
}
type FolderCounter interface {
CountAllInPaths(ctx context.Context, p []string) (int, error)
}
// FolderCreator provides methods to create Folders.
type FolderCreator interface {
Create(ctx context.Context, f *Folder) error
}
// FolderUpdater provides methods to update Folders.
type FolderUpdater interface {
Update(ctx context.Context, f *Folder) error
}
type FolderDestroyer interface {
Destroy(ctx context.Context, id FolderID) error
}
// FolderStore provides methods to find, create and update Folders.
type FolderStore interface {
FolderGetter
FolderCounter
FolderCreator
FolderUpdater
FolderDestroyer
}

48
pkg/file/fs.go Normal file
View File

@ -0,0 +1,48 @@
package file
import (
"io"
"io/fs"
"os"
)
// Opener provides an interface to open a file.
type Opener interface {
Open() (io.ReadCloser, error)
}
type fsOpener struct {
fs FS
name string
}
func (o *fsOpener) Open() (io.ReadCloser, error) {
return o.fs.Open(o.name)
}
// FS represents a file system.
type FS interface {
Lstat(name string) (fs.FileInfo, error)
Open(name string) (fs.ReadDirFile, error)
OpenZip(name string) (*ZipFS, error)
}
// OsFS is a file system backed by the OS.
type OsFS struct{}
func (f *OsFS) Lstat(name string) (fs.FileInfo, error) {
return os.Lstat(name)
}
func (f *OsFS) Open(name string) (fs.ReadDirFile, error) {
return os.Open(name)
}
func (f *OsFS) OpenZip(name string) (*ZipFS, error) {
info, err := f.Lstat(name)
if err != nil {
return nil, err
}
return newZipFS(f, name, info)
}

53
pkg/file/handler.go Normal file
View File

@ -0,0 +1,53 @@
package file
import (
"context"
"io/fs"
)
// PathFilter provides a filter function for paths.
type PathFilter interface {
Accept(ctx context.Context, path string, info fs.FileInfo) bool
}
type PathFilterFunc func(path string) bool
func (pff PathFilterFunc) Accept(path string) bool {
return pff(path)
}
// Filter provides a filter function for Files.
type Filter interface {
Accept(f File) bool
}
type FilterFunc func(f File) bool
func (ff FilterFunc) Accept(f File) bool {
return ff(f)
}
// Handler provides a handler for Files.
type Handler interface {
Handle(ctx context.Context, f File) error
}
// FilteredHandler is a Handler runs only if the filter accepts the file.
type FilteredHandler struct {
Handler
Filter
}
// Handle runs the handler if the filter accepts the file.
func (h *FilteredHandler) Handle(ctx context.Context, f File) error {
if h.Accept(f) {
return h.Handler.Handle(ctx, f)
}
return nil
}
// CleanHandler provides a handler for cleaning Files and Folders.
type CleanHandler interface {
HandleFile(ctx context.Context, fileDeleter *Deleter, fileID ID) error
HandleFolder(ctx context.Context, fileDeleter *Deleter, folderID FolderID) error
}

View File

@ -1,18 +0,0 @@
package file
import (
"io"
"github.com/stashapp/stash/pkg/hash/md5"
"github.com/stashapp/stash/pkg/hash/oshash"
)
type FSHasher struct{}
func (h *FSHasher) OSHash(src io.ReadSeeker, size int64) (string, error) {
return oshash.FromReader(src, size)
}
func (h *FSHasher) MD5(src io.Reader) (string, error) {
return md5.FromReader(src)
}

39
pkg/file/image/scan.go Normal file
View File

@ -0,0 +1,39 @@
package image
import (
"context"
"fmt"
"image"
_ "image/gif"
_ "image/jpeg"
_ "image/png"
"github.com/stashapp/stash/pkg/file"
_ "golang.org/x/image/webp"
)
// Decorator adds image specific fields to a File.
type Decorator struct {
}
func (d *Decorator) Decorate(ctx context.Context, fs file.FS, f file.File) (file.File, error) {
base := f.Base()
r, err := fs.Open(base.Path)
if err != nil {
return f, fmt.Errorf("reading image file %q: %w", base.Path, err)
}
defer r.Close()
c, format, err := image.DecodeConfig(r)
if err != nil {
return f, fmt.Errorf("decoding image file %q: %w", base.Path, err)
}
return &file.ImageFile{
BaseFile: base,
Format: format,
Width: c.Width,
Height: c.Height,
}, nil
}

9
pkg/file/image_file.go Normal file
View File

@ -0,0 +1,9 @@
package file
// ImageFile is an extension of BaseFile to represent image files.
type ImageFile struct {
*BaseFile
Format string `json:"format"`
Width int `json:"width"`
Height int `json:"height"`
}

View File

@ -1,190 +1,845 @@
package file
import (
"context"
"errors"
"fmt"
"io"
"io/fs"
"os"
"strconv"
"path/filepath"
"strings"
"sync"
"time"
"github.com/remeh/sizedwaitgroup"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/txn"
)
type SourceFile interface {
Open() (io.ReadCloser, error)
Path() string
FileInfo() fs.FileInfo
const scanQueueSize = 200000
// Repository provides access to storage methods for files and folders.
type Repository struct {
txn.Manager
txn.DatabaseProvider
Store
FolderStore FolderStore
}
type FileBased interface {
File() models.File
// Scanner scans files into the database.
//
// The scan process works using two goroutines. The first walks through the provided paths
// in the filesystem. It runs each directory entry through the provided ScanFilters. If none
// of the filter Accept methods return true, then the file/directory is ignored.
// Any folders found are handled immediately. Files inside zip files are also handled immediately.
// All other files encountered are sent to the second goroutine queue.
//
// Folders are handled by checking if the folder exists in the database, by its full path.
// If a folder entry already exists, then its mod time is updated (if applicable).
// If the folder does not exist in the database, then a new folder entry its created.
//
// Files are handled by first querying for the file by its path. If the file entry exists in the
// database, then the mod time is compared to the value in the database. If the mod time is different
// then file is marked as updated - it recalculates any fingerprints and fires decorators, then
// the file entry is updated and any applicable handlers are fired.
//
// If the file entry does not exist in the database, then fingerprints are calculated for the file.
// It then determines if the file is a rename of an existing file by querying for file entries with
// the same fingerprint. If any are found, it checks each to see if any are missing in the file
// system. If one is, then the file is treated as renamed and its path is updated. If none are missing,
// or many are, then the file is treated as a new file.
//
// If the file is not a renamed file, then the decorators are fired and the file is created, then
// the applicable handlers are fired.
type Scanner struct {
FS FS
Repository Repository
FingerprintCalculator FingerprintCalculator
// FileDecorators are applied to files as they are scanned.
FileDecorators []Decorator
}
type Hasher interface {
OSHash(src io.ReadSeeker, size int64) (string, error)
MD5(src io.Reader) (string, error)
// ProgressReporter is used to report progress of the scan.
type ProgressReporter interface {
AddTotal(total int)
Increment()
Definite()
ExecuteTask(description string, fn func())
}
type Scanned struct {
Old *models.File
New *models.File
type scanJob struct {
*Scanner
// handlers are called after a file has been scanned.
handlers []Handler
ProgressReports ProgressReporter
options ScanOptions
startTime time.Time
fileQueue chan scanFile
dbQueue chan func(ctx context.Context) error
retryList []scanFile
retrying bool
folderPathToID sync.Map
zipPathToID sync.Map
count int
txnMutex sync.Mutex
}
// FileUpdated returns true if both old and new files are present and not equal.
func (s Scanned) FileUpdated() bool {
if s.Old == nil || s.New == nil {
return false
// ScanOptions provides options for scanning files.
type ScanOptions struct {
Paths []string
// ZipFileExtensions is a list of file extensions that are considered zip files.
// Extension does not include the . character.
ZipFileExtensions []string
// ScanFilters are used to determine if a file should be scanned.
ScanFilters []PathFilter
ParallelTasks int
}
// Scan starts the scanning process.
func (s *Scanner) Scan(ctx context.Context, handlers []Handler, options ScanOptions, progressReporter ProgressReporter) {
job := &scanJob{
Scanner: s,
handlers: handlers,
ProgressReports: progressReporter,
options: options,
}
return !s.Old.Equal(*s.New)
job.execute(ctx)
}
// ContentsChanged returns true if both old and new files are present and the file content is different.
func (s Scanned) ContentsChanged() bool {
if s.Old == nil || s.New == nil {
return false
type scanFile struct {
*BaseFile
fs FS
info fs.FileInfo
zipFile *scanFile
}
func (s *scanJob) withTxn(ctx context.Context, fn func(ctx context.Context) error) error {
// get exclusive access to the database
s.txnMutex.Lock()
defer s.txnMutex.Unlock()
return txn.WithTxn(ctx, s.Repository, fn)
}
func (s *scanJob) withDB(ctx context.Context, fn func(ctx context.Context) error) error {
return txn.WithDatabase(ctx, s.Repository, fn)
}
func (s *scanJob) execute(ctx context.Context) {
paths := s.options.Paths
logger.Infof("scanning %d paths", len(paths))
s.startTime = time.Now()
s.fileQueue = make(chan scanFile, scanQueueSize)
s.dbQueue = make(chan func(ctx context.Context) error, scanQueueSize)
go func() {
if err := s.queueFiles(ctx, paths); err != nil {
if errors.Is(err, context.Canceled) {
return
}
logger.Errorf("error queuing files for scan: %v", err)
return
}
logger.Infof("Finished adding files to queue. %d files queued", s.count)
}()
done := make(chan struct{}, 1)
go func() {
if err := s.processDBOperations(ctx); err != nil {
if errors.Is(err, context.Canceled) {
return
}
logger.Errorf("error processing database operations for scan: %v", err)
}
close(done)
}()
if err := s.processQueue(ctx); err != nil {
if errors.Is(err, context.Canceled) {
return
}
logger.Errorf("error scanning files: %v", err)
return
}
if s.Old.Checksum != s.New.Checksum {
return true
// wait for database operations to complete
<-done
}
func (s *scanJob) queueFiles(ctx context.Context, paths []string) error {
var err error
s.ProgressReports.ExecuteTask("Walking directory tree", func() {
for _, p := range paths {
err = symWalk(s.FS, p, s.queueFileFunc(ctx, s.FS, nil))
if err != nil {
return
}
}
})
close(s.fileQueue)
if s.ProgressReports != nil {
s.ProgressReports.AddTotal(s.count)
s.ProgressReports.Definite()
}
if s.Old.OSHash != s.New.OSHash {
return true
return err
}
func (s *scanJob) queueFileFunc(ctx context.Context, f FS, zipFile *scanFile) fs.WalkDirFunc {
return func(path string, d fs.DirEntry, err error) error {
if err != nil {
return err
}
if err = ctx.Err(); err != nil {
return err
}
info, err := d.Info()
if err != nil {
return fmt.Errorf("reading info for %q: %w", path, err)
}
if !s.acceptEntry(ctx, path, info) {
if info.IsDir() {
return fs.SkipDir
}
return nil
}
ff := scanFile{
BaseFile: &BaseFile{
DirEntry: DirEntry{
ModTime: modTime(info),
},
Path: path,
Basename: filepath.Base(path),
Size: info.Size(),
},
fs: f,
info: info,
// there is no guarantee that the zip file has been scanned
// so we can't just plug in the id.
zipFile: zipFile,
}
if info.IsDir() {
// handle folders immediately
if err := s.handleFolder(ctx, ff); err != nil {
logger.Errorf("error processing %q: %v", path, err)
// skip the directory since we won't be able to process the files anyway
return fs.SkipDir
}
return nil
}
// if zip file is present, we handle immediately
if zipFile != nil {
s.ProgressReports.ExecuteTask("Scanning "+path, func() {
if err := s.handleFile(ctx, ff); err != nil {
logger.Errorf("error processing %q: %v", path, err)
// don't return an error, just skip the file
}
})
return nil
}
s.fileQueue <- ff
s.count++
return nil
}
}
func (s *scanJob) acceptEntry(ctx context.Context, path string, info fs.FileInfo) bool {
// always accept if there's no filters
accept := len(s.options.ScanFilters) == 0
for _, filter := range s.options.ScanFilters {
// accept if any filter accepts the file
if filter.Accept(ctx, path, info) {
accept = true
break
}
}
return accept
}
func (s *scanJob) scanZipFile(ctx context.Context, f scanFile) error {
zipFS, err := f.fs.OpenZip(f.Path)
if err != nil {
if errors.Is(err, errNotReaderAt) {
// can't walk the zip file
// just return
return nil
}
return err
}
defer zipFS.Close()
return symWalk(zipFS, f.Path, s.queueFileFunc(ctx, zipFS, &f))
}
func (s *scanJob) processQueue(ctx context.Context) error {
parallelTasks := s.options.ParallelTasks
if parallelTasks < 1 {
parallelTasks = 1
}
wg := sizedwaitgroup.New(parallelTasks)
for f := range s.fileQueue {
if err := ctx.Err(); err != nil {
return err
}
wg.Add()
ff := f
go func() {
defer wg.Done()
s.processQueueItem(ctx, ff)
}()
}
wg.Wait()
s.retrying = true
for _, f := range s.retryList {
if err := ctx.Err(); err != nil {
return err
}
wg.Add()
ff := f
go func() {
defer wg.Done()
s.processQueueItem(ctx, ff)
}()
}
wg.Wait()
close(s.dbQueue)
return nil
}
func (s *scanJob) incrementProgress() {
if s.ProgressReports != nil {
s.ProgressReports.Increment()
}
}
func (s *scanJob) processDBOperations(ctx context.Context) error {
for fn := range s.dbQueue {
if err := ctx.Err(); err != nil {
return err
}
_ = s.withTxn(ctx, fn)
}
return nil
}
func (s *scanJob) processQueueItem(ctx context.Context, f scanFile) {
s.ProgressReports.ExecuteTask("Scanning "+f.Path, func() {
var err error
if f.info.IsDir() {
err = s.handleFolder(ctx, f)
} else {
err = s.handleFile(ctx, f)
}
if err != nil {
logger.Errorf("error processing %q: %v", f.Path, err)
}
})
}
func (s *scanJob) getFolderID(ctx context.Context, path string) (*FolderID, error) {
// check the folder cache first
if f, ok := s.folderPathToID.Load(path); ok {
v := f.(FolderID)
return &v, nil
}
ret, err := s.Repository.FolderStore.FindByPath(ctx, path)
if err != nil {
return nil, err
}
if ret == nil {
return nil, nil
}
s.folderPathToID.Store(path, ret.ID)
return &ret.ID, nil
}
func (s *scanJob) getZipFileID(ctx context.Context, zipFile *scanFile) (*ID, error) {
if zipFile == nil {
return nil, nil
}
if zipFile.ID != 0 {
return &zipFile.ID, nil
}
path := zipFile.Path
// check the folder cache first
if f, ok := s.zipPathToID.Load(path); ok {
v := f.(ID)
return &v, nil
}
ret, err := s.Repository.FindByPath(ctx, path)
if err != nil {
return nil, fmt.Errorf("getting zip file ID for %q: %w", path, err)
}
if ret == nil {
return nil, fmt.Errorf("zip file %q doesn't exist in database", zipFile.Path)
}
s.zipPathToID.Store(path, ret.Base().ID)
return &ret.Base().ID, nil
}
func (s *scanJob) handleFolder(ctx context.Context, file scanFile) error {
path := file.Path
return s.withTxn(ctx, func(ctx context.Context) error {
defer s.incrementProgress()
// determine if folder already exists in data store (by path)
f, err := s.Repository.FolderStore.FindByPath(ctx, path)
if err != nil {
return fmt.Errorf("checking for existing folder %q: %w", path, err)
}
// if folder not exists, create it
if f == nil {
f, err = s.onNewFolder(ctx, file)
} else {
f, err = s.onExistingFolder(ctx, file, f)
}
if err != nil {
return err
}
if f != nil {
s.folderPathToID.Store(f.Path, f.ID)
}
return nil
})
}
func (s *scanJob) onNewFolder(ctx context.Context, file scanFile) (*Folder, error) {
now := time.Now()
toCreate := &Folder{
DirEntry: DirEntry{
ModTime: file.ModTime,
},
Path: file.Path,
CreatedAt: now,
UpdatedAt: now,
}
zipFileID, err := s.getZipFileID(ctx, file.zipFile)
if err != nil {
return nil, err
}
if zipFileID != nil {
toCreate.ZipFileID = zipFileID
}
dir := filepath.Dir(file.Path)
if dir != "." {
parentFolderID, err := s.getFolderID(ctx, dir)
if err != nil {
return nil, fmt.Errorf("getting parent folder %q: %w", dir, err)
}
// if parent folder doesn't exist, assume it's a top-level folder
// this may not be true if we're using multiple goroutines
if parentFolderID != nil {
toCreate.ParentFolderID = parentFolderID
}
}
logger.Infof("%s doesn't exist. Creating new folder entry...", file.Path)
if err := s.Repository.FolderStore.Create(ctx, toCreate); err != nil {
return nil, fmt.Errorf("creating folder %q: %w", file.Path, err)
}
return toCreate, nil
}
func (s *scanJob) onExistingFolder(ctx context.Context, f scanFile, existing *Folder) (*Folder, error) {
// check if the mod time is changed
entryModTime := f.ModTime
if !entryModTime.Equal(existing.ModTime) {
// update entry in store
existing.ModTime = entryModTime
var err error
if err = s.Repository.FolderStore.Update(ctx, existing); err != nil {
return nil, fmt.Errorf("updating folder %q: %w", f.Path, err)
}
}
return existing, nil
}
func modTime(info fs.FileInfo) time.Time {
// truncate to seconds, since we don't store beyond that in the database
return info.ModTime().Truncate(time.Second)
}
func (s *scanJob) handleFile(ctx context.Context, f scanFile) error {
var ff File
// don't use a transaction to check if new or existing
if err := s.withDB(ctx, func(ctx context.Context) error {
// determine if file already exists in data store
var err error
ff, err = s.Repository.FindByPath(ctx, f.Path)
if err != nil {
return fmt.Errorf("checking for existing file %q: %w", f.Path, err)
}
if ff == nil {
ff, err = s.onNewFile(ctx, f)
return err
}
ff, err = s.onExistingFile(ctx, f, ff)
return err
}); err != nil {
return err
}
if ff != nil && s.isZipFile(f.info.Name()) {
f.BaseFile = ff.Base()
if err := s.scanZipFile(ctx, f); err != nil {
logger.Errorf("Error scanning zip file %q: %v", f.Path, err)
}
}
return nil
}
func (s *scanJob) isZipFile(path string) bool {
fExt := filepath.Ext(path)
for _, ext := range s.options.ZipFileExtensions {
if strings.EqualFold(fExt, "."+ext) {
return true
}
}
return false
}
type Scanner struct {
Hasher Hasher
func (s *scanJob) onNewFile(ctx context.Context, f scanFile) (File, error) {
now := time.Now()
CalculateMD5 bool
CalculateOSHash bool
}
baseFile := f.BaseFile
path := baseFile.Path
func (o Scanner) ScanExisting(existing FileBased, file SourceFile) (h *Scanned, err error) {
info := file.FileInfo()
h = &Scanned{}
baseFile.CreatedAt = now
baseFile.UpdatedAt = now
existingFile := existing.File()
h.Old = &existingFile
// find the parent folder
parentFolderID, err := s.getFolderID(ctx, filepath.Dir(path))
if err != nil {
return nil, fmt.Errorf("getting parent folder for %q: %w", path, err)
}
updatedFile := existingFile
h.New = &updatedFile
if parentFolderID == nil {
// if parent folder doesn't exist, assume it's not yet created
// add this file to the queue to be created later
if s.retrying {
// if we're retrying and the folder still doesn't exist, then it's a problem
s.incrementProgress()
return nil, fmt.Errorf("parent folder for %q doesn't exist", path)
}
// update existing data if needed
// truncate to seconds, since we don't store beyond that in the database
updatedFile.FileModTime = info.ModTime().Truncate(time.Second)
updatedFile.Size = strconv.FormatInt(info.Size(), 10)
s.retryList = append(s.retryList, f)
return nil, nil
}
modTimeChanged := !existingFile.FileModTime.Equal(updatedFile.FileModTime)
baseFile.ParentFolderID = *parentFolderID
// regenerate hash(es) if missing or file mod time changed
if _, err = o.generateHashes(&updatedFile, file, modTimeChanged); err != nil {
zipFileID, err := s.getZipFileID(ctx, f.zipFile)
if err != nil {
s.incrementProgress()
return nil, err
}
// notify of changes as needed
// object exists, no further processing required
return
}
func (o Scanner) ScanNew(file SourceFile) (*models.File, error) {
info := file.FileInfo()
sizeStr := strconv.FormatInt(info.Size(), 10)
modTime := info.ModTime()
f := models.File{
Path: file.Path(),
Size: sizeStr,
FileModTime: modTime,
if zipFileID != nil {
baseFile.ZipFileID = zipFileID
}
if _, err := o.generateHashes(&f, file, true); err != nil {
fp, err := s.calculateFingerprints(f.fs, baseFile, path)
if err != nil {
s.incrementProgress()
return nil, err
}
return &f, nil
baseFile.SetFingerprints(fp)
// determine if the file is renamed from an existing file in the store
renamed, err := s.handleRename(ctx, baseFile, fp)
if err != nil {
s.incrementProgress()
return nil, err
}
if renamed != nil {
return renamed, nil
}
file, err := s.fireDecorators(ctx, f.fs, baseFile)
if err != nil {
s.incrementProgress()
return nil, err
}
// if not renamed, queue file for creation
if err := s.queueDBOperation(ctx, path, func(ctx context.Context) error {
logger.Infof("%s doesn't exist. Creating new file entry...", path)
if err := s.Repository.Create(ctx, file); err != nil {
return fmt.Errorf("creating file %q: %w", path, err)
}
if err := s.fireHandlers(ctx, file); err != nil {
return err
}
return nil
}); err != nil {
return nil, err
}
return file, nil
}
// generateHashes regenerates and sets the hashes in the provided File.
// It will not recalculate unless specified.
func (o Scanner) generateHashes(f *models.File, file SourceFile, regenerate bool) (changed bool, err error) {
existing := *f
func (s *scanJob) queueDBOperation(ctx context.Context, path string, fn func(ctx context.Context) error) error {
// perform immediately if it is a zip file
if s.isZipFile(path) {
return s.withTxn(ctx, fn)
}
var src io.ReadCloser
if o.CalculateOSHash && (regenerate || f.OSHash == "") {
logger.Infof("Calculating oshash for %s ...", f.Path)
s.dbQueue <- fn
size := file.FileInfo().Size()
return nil
}
// #2196 for symlinks
// get the size of the actual file, not the symlink
if file.FileInfo().Mode()&os.ModeSymlink == os.ModeSymlink {
fi, err := os.Stat(f.Path)
if err != nil {
return false, err
}
logger.Debugf("File <%s> is symlink. Size changed from <%d> to <%d>", f.Path, size, fi.Size())
size = fi.Size()
}
src, err = file.Open()
func (s *scanJob) fireDecorators(ctx context.Context, fs FS, f File) (File, error) {
for _, h := range s.FileDecorators {
var err error
f, err = h.Decorate(ctx, fs, f)
if err != nil {
return false, err
}
defer src.Close()
seekSrc, valid := src.(io.ReadSeeker)
if !valid {
return false, fmt.Errorf("invalid source file type: %s", file.Path())
}
// regenerate hash
var oshash string
oshash, err = o.Hasher.OSHash(seekSrc, size)
if err != nil {
return false, fmt.Errorf("error generating oshash for %s: %w", file.Path(), err)
}
f.OSHash = oshash
// reset reader to start of file
_, err = seekSrc.Seek(0, io.SeekStart)
if err != nil {
return false, fmt.Errorf("error seeking to start of file in %s: %w", file.Path(), err)
return f, err
}
}
// always generate if MD5 is nil
// only regenerate MD5 if:
// - OSHash was not calculated, or
// - existing OSHash is different to generated one
// or if it was different to the previous version
if o.CalculateMD5 && (f.Checksum == "" || (regenerate && (!o.CalculateOSHash || existing.OSHash != f.OSHash))) {
logger.Infof("Calculating checksum for %s...", f.Path)
return f, nil
}
if src == nil {
src, err = file.Open()
if err != nil {
return false, err
}
defer src.Close()
func (s *scanJob) fireHandlers(ctx context.Context, f File) error {
for _, h := range s.handlers {
if err := h.Handle(ctx, f); err != nil {
return err
}
// regenerate checksum
var checksum string
checksum, err = o.Hasher.MD5(src)
if err != nil {
return
}
f.Checksum = checksum
}
changed = (o.CalculateOSHash && (f.OSHash != existing.OSHash)) || (o.CalculateMD5 && (f.Checksum != existing.Checksum))
return
return nil
}
func (s *scanJob) calculateFingerprints(fs FS, f *BaseFile, path string) ([]Fingerprint, error) {
logger.Infof("Calculating fingerprints for %s ...", path)
// calculate primary fingerprint for the file
fp, err := s.FingerprintCalculator.CalculateFingerprints(f, &fsOpener{
fs: fs,
name: path,
})
if err != nil {
return nil, fmt.Errorf("calculating fingerprint for file %q: %w", path, err)
}
return fp, nil
}
func appendFileUnique(v []File, toAdd []File) []File {
for _, f := range toAdd {
found := false
id := f.Base().ID
for _, vv := range v {
if vv.Base().ID == id {
found = true
break
}
}
if !found {
v = append(v, f)
}
}
return v
}
func (s *scanJob) getFileFS(f *BaseFile) (FS, error) {
if f.ZipFile == nil {
return s.FS, nil
}
fs, err := s.getFileFS(f.ZipFile.Base())
if err != nil {
return nil, err
}
zipPath := f.ZipFile.Base().Path
return fs.OpenZip(zipPath)
}
func (s *scanJob) handleRename(ctx context.Context, f *BaseFile, fp []Fingerprint) (File, error) {
var others []File
for _, tfp := range fp {
thisOthers, err := s.Repository.FindByFingerprint(ctx, tfp)
if err != nil {
return nil, fmt.Errorf("getting files by fingerprint %v: %w", tfp, err)
}
others = appendFileUnique(others, thisOthers)
}
var missing []File
for _, other := range others {
// if file does not exist, then update it to the new path
// TODO - handle #1426 scenario
fs, err := s.getFileFS(other.Base())
if err != nil {
return nil, fmt.Errorf("getting FS for %q: %w", other.Base().Path, err)
}
if _, err := fs.Lstat(other.Base().Path); err != nil {
missing = append(missing, other)
}
}
n := len(missing)
switch {
case n == 1:
// assume does not exist, update existing file
other := missing[0]
otherBase := other.Base()
logger.Infof("%s moved to %s. Updating path...", otherBase.Path, f.Path)
f.ID = otherBase.ID
f.CreatedAt = otherBase.CreatedAt
f.Fingerprints = otherBase.Fingerprints
*otherBase = *f
if err := s.queueDBOperation(ctx, f.Path, func(ctx context.Context) error {
if err := s.Repository.Update(ctx, other); err != nil {
return fmt.Errorf("updating file for rename %q: %w", f.Path, err)
}
return nil
}); err != nil {
return nil, err
}
return other, nil
case n > 1:
// multiple candidates
// TODO - mark all as missing and just create a new file
return nil, nil
}
return nil, nil
}
// returns a file only if it was updated
func (s *scanJob) onExistingFile(ctx context.Context, f scanFile, existing File) (File, error) {
base := existing.Base()
path := base.Path
fileModTime := f.ModTime
updated := !fileModTime.Equal(base.ModTime)
if !updated {
s.incrementProgress()
return nil, nil
}
logger.Infof("%s has been updated: rescanning", path)
base.ModTime = fileModTime
base.Size = f.Size
base.UpdatedAt = time.Now()
// calculate and update fingerprints for the file
fp, err := s.calculateFingerprints(f.fs, base, path)
if err != nil {
s.incrementProgress()
return nil, err
}
existing.SetFingerprints(fp)
existing, err = s.fireDecorators(ctx, f.fs, existing)
if err != nil {
s.incrementProgress()
return nil, err
}
// queue file for update
if err := s.queueDBOperation(ctx, path, func(ctx context.Context) error {
if err := s.Repository.Update(ctx, existing); err != nil {
return fmt.Errorf("updating file %q: %w", path, err)
}
if err := s.fireHandlers(ctx, existing); err != nil {
return err
}
return nil
}); err != nil {
return nil, err
}
return existing, nil
}

View File

@ -1,14 +1,18 @@
package scene
package video
import (
"context"
"fmt"
"os"
"path/filepath"
"strings"
"golang.org/x/text/language"
"github.com/asticode/go-astisub"
"github.com/stashapp/stash/pkg/file"
"github.com/stashapp/stash/pkg/logger"
"github.com/stashapp/stash/pkg/models"
"github.com/stashapp/stash/pkg/txn"
"golang.org/x/text/language"
)
var CaptionExts = []string{"vtt", "srt"} // in a case where vtt and srt files are both provided prioritize vtt file due to native support
@ -46,7 +50,7 @@ func IsValidLanguage(lang string) bool {
// IsLangInCaptions returns true if lang is present
// in the captions
func IsLangInCaptions(lang string, ext string, captions []*models.SceneCaption) bool {
func IsLangInCaptions(lang string, ext string, captions []*models.VideoCaption) bool {
for _, caption := range captions {
if lang == caption.LanguageCode && ext == caption.CaptionType {
return true
@ -55,41 +59,8 @@ func IsLangInCaptions(lang string, ext string, captions []*models.SceneCaption)
return false
}
// GenerateCaptionCandidates generates a list of filenames with exts as extensions
// that can associated with the caption
func GenerateCaptionCandidates(captionPath string, exts []string) []string {
var candidates []string
basename := strings.TrimSuffix(captionPath, filepath.Ext(captionPath)) // caption filename without the extension
// a caption file can be something like scene_filename.srt or scene_filename.en.srt
// if a language code is present and valid remove it from the basename
languageExt := filepath.Ext(basename)
if len(languageExt) > 2 && IsValidLanguage(languageExt[1:]) {
basename = strings.TrimSuffix(basename, languageExt)
}
for _, ext := range exts {
candidates = append(candidates, basename+"."+ext)
}
return candidates
}
// GetCaptionsLangFromPath returns the language code from a given captions path
// If no valid language is present LangUknown is returned
func GetCaptionsLangFromPath(captionPath string) string {
langCode := LangUnknown
basename := strings.TrimSuffix(captionPath, filepath.Ext(captionPath)) // caption filename without the extension
languageExt := filepath.Ext(basename)
if len(languageExt) > 2 && IsValidLanguage(languageExt[1:]) {
langCode = languageExt[1:]
}
return langCode
}
// CleanCaptions removes non existent/accessible language codes from captions
func CleanCaptions(scenePath string, captions []*models.SceneCaption) (cleanedCaptions []*models.SceneCaption, changed bool) {
func CleanCaptions(scenePath string, captions []*models.VideoCaption) (cleanedCaptions []*models.VideoCaption, changed bool) {
changed = false
for _, caption := range captions {
found := false
@ -104,3 +75,76 @@ func CleanCaptions(scenePath string, captions []*models.SceneCaption) (cleanedCa
}
return
}
// getCaptionPrefix returns the prefix used to search for video files for the provided caption path
func getCaptionPrefix(captionPath string) string {
basename := strings.TrimSuffix(captionPath, filepath.Ext(captionPath)) // caption filename without the extension
// a caption file can be something like scene_filename.srt or scene_filename.en.srt
// if a language code is present and valid remove it from the basename
languageExt := filepath.Ext(basename)
if len(languageExt) > 2 && IsValidLanguage(languageExt[1:]) {
basename = strings.TrimSuffix(basename, languageExt)
}
return basename + "."
}
// GetCaptionsLangFromPath returns the language code from a given captions path
// If no valid language is present LangUknown is returned
func getCaptionsLangFromPath(captionPath string) string {
langCode := LangUnknown
basename := strings.TrimSuffix(captionPath, filepath.Ext(captionPath)) // caption filename without the extension
languageExt := filepath.Ext(basename)
if len(languageExt) > 2 && IsValidLanguage(languageExt[1:]) {
langCode = languageExt[1:]
}
return langCode
}
type CaptionUpdater interface {
GetCaptions(ctx context.Context, fileID file.ID) ([]*models.VideoCaption, error)
UpdateCaptions(ctx context.Context, fileID file.ID, captions []*models.VideoCaption) error
}
// associates captions to scene/s with the same basename
func AssociateCaptions(ctx context.Context, captionPath string, txnMgr txn.Manager, fqb file.Getter, w CaptionUpdater) {
captionLang := getCaptionsLangFromPath(captionPath)
captionPrefix := getCaptionPrefix(captionPath)
if err := txn.WithTxn(ctx, txnMgr, func(ctx context.Context) error {
var err error
f, er := fqb.FindByPath(ctx, captionPrefix+"*")
if er != nil {
return fmt.Errorf("searching for scene %s: %w", captionPrefix, er)
}
if f != nil { // found related Scene
fileID := f.Base().ID
path := f.Base().Path
logger.Debugf("Matched captions to file %s", path)
captions, er := w.GetCaptions(ctx, fileID)
if er == nil {
fileExt := filepath.Ext(captionPath)
ext := fileExt[1:]
if !IsLangInCaptions(captionLang, ext, captions) { // only update captions if language code is not present
newCaption := &models.VideoCaption{
LanguageCode: captionLang,
Filename: filepath.Base(captionPath),
CaptionType: ext,
}
captions = append(captions, newCaption)
er = w.UpdateCaptions(ctx, fileID, captions)
if er == nil {
logger.Debugf("Updated captions for file %s. Added %s", path, captionLang)
}
}
}
}
return err
}); err != nil {
logger.Error(err.Error())
}
}

View File

@ -0,0 +1,53 @@
package video
import (
"testing"
"github.com/stretchr/testify/assert"
)
type testCase struct {
captionPath string
expectedLang string
expectedResult string
}
var testCases = []testCase{
{
captionPath: "/stash/video.vtt",
expectedLang: LangUnknown,
expectedResult: "/stash/video.",
},
{
captionPath: "/stash/video.en.vtt",
expectedLang: "en",
expectedResult: "/stash/video.", // lang code valid, remove en part
},
{
captionPath: "/stash/video.test.srt",
expectedLang: LangUnknown,
expectedResult: "/stash/video.test.", // no lang code/lang code invalid test should remain
},
{
captionPath: "C:\\videos\\video.fr.srt",
expectedLang: "fr",
expectedResult: "C:\\videos\\video.",
},
{
captionPath: "C:\\videos\\video.xx.srt",
expectedLang: LangUnknown,
expectedResult: "C:\\videos\\video.xx.", // no lang code/lang code invalid xx should remain
},
}
func TestGenerateCaptionCandidates(t *testing.T) {
for _, c := range testCases {
assert.Equal(t, c.expectedResult, getCaptionPrefix(c.captionPath))
}
}
func TestGetCaptionsLangFromPath(t *testing.T) {
for _, l := range testCases {
assert.Equal(t, l.expectedLang, getCaptionsLangFromPath(l.captionPath))
}
}

View File

@ -1,4 +1,4 @@
package scene
package video
import (
"path/filepath"

Some files were not shown because too many files have changed in this diff Show More