mirror of
https://github.com/stashapp/CommunityScripts.git
synced 2025-12-11 14:55:10 -06:00
built for 6e873b9
This commit is contained in:
parent
9993bdd9e6
commit
901adb1876
51
.github/workflows/deploy.yml
vendored
51
.github/workflows/deploy.yml
vendored
@ -1,51 +0,0 @@
|
||||
name: Deploy repository to Github Pages
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ main, stable ]
|
||||
|
||||
# Allows you to run this workflow manually from the Actions tab
|
||||
workflow_dispatch:
|
||||
|
||||
# Sets permissions of the GITHUB_TOKEN to allow deployment to GitHub Pages
|
||||
permissions:
|
||||
contents: read
|
||||
pages: write
|
||||
id-token: write
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- name: Checkout main
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: main
|
||||
ref: main
|
||||
fetch-depth: '0'
|
||||
- run: |
|
||||
cd main
|
||||
./build_site.sh ../_site/develop
|
||||
# uncomment this once we have a stable branch
|
||||
- name: Checkout Stable
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
path: stable
|
||||
ref: stable
|
||||
fetch-depth: '0'
|
||||
- run: |
|
||||
cd stable
|
||||
../main/build_site.sh ../_site/stable
|
||||
- uses: actions/upload-pages-artifact@v2
|
||||
|
||||
deploy:
|
||||
environment:
|
||||
name: github-pages
|
||||
url: ${{ steps.deployment.outputs.page_url }}
|
||||
runs-on: ubuntu-22.04
|
||||
needs: build
|
||||
steps:
|
||||
- name: Deploy to GitHub Pages
|
||||
id: deployment
|
||||
uses: actions/deploy-pages@v2
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@ -1 +0,0 @@
|
||||
/_site
|
||||
@ -1,318 +0,0 @@
|
||||
import os
|
||||
import dateutil.parser as dateparser
|
||||
from urllib2 import quote
|
||||
|
||||
# preferences
|
||||
preference = Prefs
|
||||
DEBUG = preference['debug']
|
||||
if DEBUG:
|
||||
Log('Agent debug logging is enabled!')
|
||||
else:
|
||||
Log('Agent debug logging is disabled!')
|
||||
|
||||
def ValidatePrefs():
|
||||
pass
|
||||
|
||||
|
||||
def Start():
|
||||
Log("Stash metadata agent started")
|
||||
HTTP.Headers['Accept'] = 'application/json'
|
||||
HTTP.CacheTime = 0.1
|
||||
ValidatePrefs()
|
||||
|
||||
|
||||
def HttpReq(url, authenticate=True, retry=True):
|
||||
Log("Requesting: %s" % url)
|
||||
api_string = ''
|
||||
if Prefs['APIKey']:
|
||||
api_string = '&apikey=%s' % Prefs['APIKey']
|
||||
|
||||
if Prefs['UseHTTPS']:
|
||||
connectstring = 'https://%s:%s/graphql?query=%s%s'
|
||||
else:
|
||||
connectstring = 'http://%s:%s/graphql?query=%s%s'
|
||||
try:
|
||||
connecttoken = connectstring % (Prefs['Hostname'].strip(), Prefs['Port'].strip(), url, api_string)
|
||||
Log(connecttoken)
|
||||
return JSON.ObjectFromString(
|
||||
HTTP.Request(connecttoken).content)
|
||||
except Exception as e:
|
||||
if not retry:
|
||||
raise e
|
||||
return HttpReq(url, authenticate, False)
|
||||
|
||||
class StashPlexAgent(Agent.Movies):
|
||||
name = 'Stash Plex Agent'
|
||||
languages = [Locale.Language.English]
|
||||
primary_provider = True
|
||||
accepts_from = ['com.plexapp.agents.localmedia', 'com.plexapp.agents.xbmcnfo', 'com.plexapp.agents.phoenixadult', 'com.plexapp.agents.data18-phoenix', 'com.plexapp.agents.adultdvdempire']
|
||||
|
||||
def search(self, results, media, lang):
|
||||
DEBUG = Prefs['debug']
|
||||
file_query = r"""query{findScenes(scene_filter:{path:{value:"\"<FILENAME>\"",modifier:INCLUDES}}){scenes{id,title,date,studio{id,name}}}}"""
|
||||
mediaFile = media.items[0].parts[0].file
|
||||
filename = String.Unquote(mediaFile).encode('utf8', 'ignore')
|
||||
filename = os.path.splitext(os.path.basename(filename))[0]
|
||||
if filename:
|
||||
filename = str(quote(filename.encode('UTF-8')))
|
||||
query = file_query.replace("<FILENAME>", filename)
|
||||
request = HttpReq(query)
|
||||
if DEBUG:
|
||||
Log(request)
|
||||
movie_data = request['data']['findScenes']['scenes']
|
||||
score = 100 if len(movie_data) == 1 else 85
|
||||
for scene in movie_data:
|
||||
if scene['date']:
|
||||
title = scene['title'] + ' - ' + scene['date']
|
||||
else:
|
||||
title = scene['title']
|
||||
Log("Title Found: " + title + " Score: " + str(score) + " ID:" + scene['id'])
|
||||
results.Append(MetadataSearchResult(id = str(scene['id']), name = title, score = int(score), lang = lang))
|
||||
|
||||
|
||||
def update(self, metadata, media, lang, force=False):
|
||||
DEBUG = Prefs['debug']
|
||||
Log("update(%s)" % metadata.id)
|
||||
mid = metadata.id
|
||||
id_query = "query{findScene(id:%s){path,id,title,details,url,date,rating,paths{screenshot,stream}movies{movie{id,name}}studio{id,name,image_path,parent_studio{id,name,details}}organized,stash_ids{stash_id}tags{id,name}performers{name,image_path,tags{id,name}}movies{movie{name}}galleries{id,title,url,images{id,title,path,file{size,width,height}}}}}"
|
||||
data = HttpReq(id_query % mid)
|
||||
data = data['data']['findScene']
|
||||
metadata.collections.clear()
|
||||
|
||||
allow_scrape = False
|
||||
if (Prefs["RequireOrganized"] and data["organized"]) or not Prefs["RequireOrganized"]:
|
||||
if DEBUG and Prefs["RequireOrganized"]:
|
||||
Log("Passed 'Organized' Check, continuing...")
|
||||
if (Prefs["RequireURL"] and data["url"]) or not Prefs["RequireURL"]:
|
||||
if DEBUG and Prefs["RequireURL"]:
|
||||
Log("Passed 'RequireURL' Check, continuing...")
|
||||
if (Prefs["RequireStashID"] and len(data["stash_ids"])) or not Prefs["RequireStashID"]:
|
||||
if DEBUG and Prefs["RequireStashID"]:
|
||||
Log("Passed 'RequireStashID' Check, continuing...")
|
||||
allow_scrape = True
|
||||
else:
|
||||
Log("Failed 'RequireStashID' Check, stopping.")
|
||||
allow_scrape = False
|
||||
else:
|
||||
Log("Failed 'RequireURL' Check, stopping.")
|
||||
allow_scrape = False
|
||||
else:
|
||||
Log("Failed 'Organized' Check, stopping.")
|
||||
allow_scrape = False
|
||||
|
||||
if allow_scrape:
|
||||
if data['date']:
|
||||
try:
|
||||
Log("Trying to parse:" + data["date"])
|
||||
date=dateparser().parse(data["date"])
|
||||
except Exception as ex:
|
||||
Log(ex)
|
||||
date=None
|
||||
pass
|
||||
# Set the date and year if found.
|
||||
if date is not None:
|
||||
metadata.originally_available_at = date
|
||||
metadata.year = date.year
|
||||
|
||||
# Get the title
|
||||
if data['title']:
|
||||
metadata.title = data["title"]
|
||||
|
||||
# Get the Studio
|
||||
if not data["studio"] is None:
|
||||
metadata.studio = data["studio"]["name"]
|
||||
|
||||
# Get the rating
|
||||
if not data["rating"] is None:
|
||||
metadata.rating = float(data["rating"]) * 2
|
||||
if Prefs["CreateRatingTags"]:
|
||||
if int(data["rating"]) > 0:
|
||||
rating = str(int(data["rating"]))
|
||||
ratingstring = "Rating: " + rating + " Stars"
|
||||
try:
|
||||
metadata.collections.add(ratingstring)
|
||||
except:
|
||||
pass
|
||||
|
||||
# Set the summary
|
||||
if data['details']:
|
||||
summary = data["details"].replace("\n", " ").replace("\r", "").replace("\t", "")
|
||||
metadata.summary = summary
|
||||
|
||||
# Set series and add to collections
|
||||
if Prefs["CreateSiteCollectionTags"]:
|
||||
if not data["studio"] is None:
|
||||
if Prefs["PrefixSiteCollectionTags"]:
|
||||
SitePrefix = Prefs["PrefixSiteCollectionTags"]
|
||||
else:
|
||||
SitePrefix = "Site: "
|
||||
site = SitePrefix + data["studio"]["name"]
|
||||
try:
|
||||
if DEBUG:
|
||||
Log("Adding Site Collection: " + site)
|
||||
metadata.collections.add(site)
|
||||
except:
|
||||
pass
|
||||
if Prefs["CreateStudioCollectionTags"]:
|
||||
if not data["studio"] is None:
|
||||
if Prefs["PrefixStudioCollectionTags"]:
|
||||
StudioPrefix = Prefs["PrefixStudioCollectionTags"]
|
||||
else:
|
||||
StudioPrefix = "Studio: "
|
||||
if not data["studio"]["parent_studio"] is None:
|
||||
site = StudioPrefix + data["studio"]["parent_studio"]["name"]
|
||||
else:
|
||||
if Prefs["UseSiteForStudioCollectionTags"]:
|
||||
site = StudioPrefix + data["studio"]["name"]
|
||||
else:
|
||||
site = None
|
||||
try:
|
||||
if DEBUG:
|
||||
Log("Adding Studio Collection: " + site)
|
||||
if site:
|
||||
metadata.collections.add(site)
|
||||
except:
|
||||
pass
|
||||
if Prefs["CreateMovieCollectionTags"]:
|
||||
if not data["movies"] is None:
|
||||
for movie in data["movies"]:
|
||||
if Prefs["PrefixMovieCollectionTags"]:
|
||||
MoviePrefix = Prefs["PrefixMovieCollectionTags"]
|
||||
else:
|
||||
MoviePrefix = "Movie: "
|
||||
if "name" in movie["movie"]:
|
||||
movie_collection = MoviePrefix + movie["movie"]["name"]
|
||||
try:
|
||||
if DEBUG:
|
||||
Log("Adding Movie Collection: " + movie_collection)
|
||||
metadata.collections.add(movie_collection)
|
||||
except:
|
||||
pass
|
||||
if Prefs["CreatePerformerCollectionTags"]:
|
||||
if not data["performers"] is None:
|
||||
for performer in data["performers"]:
|
||||
if Prefs["CreatePerformerCollectionTags"]:
|
||||
PerformerPrefix = Prefs["PrefixPerformerCollectionTags"]
|
||||
else:
|
||||
PerformerPrefix = "Actor: "
|
||||
if "name" in performer:
|
||||
actor_collection = PerformerPrefix + performer["name"]
|
||||
try:
|
||||
if DEBUG:
|
||||
Log("Adding Performer Collection: " + actor_collection)
|
||||
metadata.collections.add(actor_collection)
|
||||
except:
|
||||
pass
|
||||
|
||||
# Add the genres
|
||||
metadata.genres.clear()
|
||||
if Prefs["IgnoreTags"]:
|
||||
ignore_tags = Prefs["IgnoreTags"].split(",")
|
||||
ignore_tags = list(map(lambda x: x.strip(), ignore_tags))
|
||||
else:
|
||||
ignore_tags = []
|
||||
if Prefs["CreateTagCollectionTags"]:
|
||||
collection_tags = Prefs["CreateTagCollectionTags"].split(",")
|
||||
collection_tags = list(map(lambda x: x.strip(), collection_tags))
|
||||
else:
|
||||
collection_tags = []
|
||||
try:
|
||||
if data["tags"]:
|
||||
genres = data["tags"]
|
||||
for genre in genres:
|
||||
if not genre["id"] in ignore_tags and "ambiguous" not in genre["name"].lower():
|
||||
metadata.genres.add(genre["name"])
|
||||
if not Prefs["CreateAllTagCollectionTags"] and genre["id"] in collection_tags:
|
||||
try:
|
||||
if DEBUG:
|
||||
Log("Adding Tag Collection: " + genre["name"])
|
||||
metadata.collections.add(genre["name"])
|
||||
except:
|
||||
pass
|
||||
elif Prefs["CreateAllTagCollectionTags"] and genre["id"] not in collection_tags:
|
||||
try:
|
||||
if DEBUG:
|
||||
Log("Adding Tag Collection: " + genre["name"])
|
||||
metadata.collections.add(genre["name"])
|
||||
except:
|
||||
pass
|
||||
if Prefs["AppendPerformerTags"]:
|
||||
for performer in data["performers"]:
|
||||
if performer["tags"]:
|
||||
genres = performer["tags"]
|
||||
for genre in genres:
|
||||
if not genre["id"] in ignore_tags and "ambiguous" not in genre["name"].lower() and genre["name"] not in metadata.genres:
|
||||
if DEBUG:
|
||||
Log("Added Performer (" + performer['name'] + ") tag to scene: " + genre['name'] )
|
||||
metadata.genres.add(genre["name"])
|
||||
if genre["id"] in collection_tags:
|
||||
try:
|
||||
if DEBUG:
|
||||
Log("Adding Tag Collection: " + genre["name"])
|
||||
metadata.collections.add(genre["name"])
|
||||
except:
|
||||
pass
|
||||
except:
|
||||
pass
|
||||
|
||||
# Add the performers
|
||||
metadata.roles.clear()
|
||||
# Create and populate role with actor's name
|
||||
try:
|
||||
if data["performers"]:
|
||||
api_string = ""
|
||||
if Prefs['APIKey']:
|
||||
api_string = '&apikey=%s' % Prefs['APIKey']
|
||||
models=data["performers"]
|
||||
for model in models:
|
||||
if DEBUG:
|
||||
Log("Pulling Model: " + model["name"] + " With Image: " + model["image_path"])
|
||||
role = metadata.roles.new()
|
||||
role.name = model["name"]
|
||||
role.photo = model["image_path"] + api_string
|
||||
except:
|
||||
pass
|
||||
|
||||
# Add posters and fan art.
|
||||
if not data["paths"]["screenshot"] is None:
|
||||
api_string = ""
|
||||
if Prefs['APIKey']:
|
||||
api_string = '&apikey=%s' % Prefs['APIKey']
|
||||
try:
|
||||
thumb = HTTP.Request(data["paths"]["screenshot"] + api_string)
|
||||
metadata.posters[data["paths"]["screenshot"] + api_string] = Proxy.Preview(thumb, sort_order=0)
|
||||
metadata.art[data["paths"]["screenshot"] + api_string] = Proxy.Preview(thumb, sort_order=0)
|
||||
except Exception as e:
|
||||
pass
|
||||
|
||||
if Prefs["IncludeGalleryImages"]:
|
||||
api_string = ""
|
||||
if Prefs['APIKey']:
|
||||
api_string = '&apikey=%s' % Prefs['APIKey']
|
||||
if Prefs['UseHTTPS']:
|
||||
imagestring = 'https://%s:%s/image/%s/image' + api_string
|
||||
else:
|
||||
imagestring = 'http://%s:%s/image/%s/image' + api_string
|
||||
if not data["galleries"] is None:
|
||||
for gallery in data["galleries"]:
|
||||
for image in gallery["images"]:
|
||||
if Prefs["SortGalleryImages"]:
|
||||
if image["file"]["height"] > image["file"]["width"]:
|
||||
image_orientation = "poster"
|
||||
else:
|
||||
image_orientation = "background"
|
||||
else:
|
||||
image_orientation = "all"
|
||||
imageurl = imagestring % (Prefs['Hostname'], Prefs['Port'], image["id"])
|
||||
try:
|
||||
thumb = HTTP.Request(imageurl)
|
||||
if image_orientation == "poster" or image_orientation == "all":
|
||||
if DEBUG:
|
||||
Log("Inserting Poster image: " + image["title"] + " (" + str(image["file"]["width"]) + "x" + str(image["file"]["height"]) + " WxH)")
|
||||
metadata.posters[imageurl] = Proxy.Preview(thumb)
|
||||
if image_orientation == "background" or image_orientation == "all":
|
||||
if DEBUG:
|
||||
Log("Inserting Background image: " + image["title"] + " (" + str(image["file"]["width"]) + "x" + str(image["file"]["height"]) + " WxH)")
|
||||
metadata.art[imageurl] = Proxy.Preview(thumb)
|
||||
except Exception as e:
|
||||
pass
|
||||
@ -1,146 +0,0 @@
|
||||
[
|
||||
{
|
||||
"id": "Hostname",
|
||||
"label": "The host for Stash",
|
||||
"type": "text",
|
||||
"default": "127.0.0.1"
|
||||
},
|
||||
{
|
||||
"id": "Port",
|
||||
"label": "The port for Stash",
|
||||
"type": "text",
|
||||
"default": "9999"
|
||||
},
|
||||
{
|
||||
"id": "UseHTTPS",
|
||||
"label": "Use HTTPS instead of HTTP to connect",
|
||||
"type": "bool",
|
||||
"default": false
|
||||
},
|
||||
{
|
||||
"id": "APIKey",
|
||||
"label": "The API Key for Stash if Authentication is enabled",
|
||||
"type": "text",
|
||||
"default": ""
|
||||
},
|
||||
{
|
||||
"id": "IncludeGalleryImages",
|
||||
"label": "Include attached Gallery images in addition to default poster?",
|
||||
"type": "bool",
|
||||
"default": false
|
||||
},
|
||||
{
|
||||
"id": "SortGalleryImages",
|
||||
"label": "If including gallery images, auto sort into poster/background based on orientation?",
|
||||
"type": "bool",
|
||||
"default": false
|
||||
},
|
||||
{
|
||||
"id": "AppendPerformerTags",
|
||||
"label": "Include Performer Tags with the scraped scene tags?",
|
||||
"type": "bool",
|
||||
"default": false
|
||||
},
|
||||
{
|
||||
"id": "IgnoreTags",
|
||||
"label": "Stash Tag ID numbers to ignore (comma separated, 0 to disable)",
|
||||
"type": "text",
|
||||
"default": "1,2,3318,6279"
|
||||
},
|
||||
{
|
||||
"id": "CreateTagCollectionTags",
|
||||
"label": "Stash Tag ID numbers create Collections from (comma separated, 0 to disable)",
|
||||
"type": "text",
|
||||
"default": "0"
|
||||
},
|
||||
{
|
||||
"id": "CreateAllTagCollectionTags",
|
||||
"label": "Create Collections from ALL Tags (If TRUE then option above will exclude instead of include tags)",
|
||||
"type": "bool",
|
||||
"default": false
|
||||
},
|
||||
{
|
||||
"id": "CreateSiteCollectionTags",
|
||||
"label": "Auto create Plex Collection tags for scene Site",
|
||||
"type": "bool",
|
||||
"default": true
|
||||
},
|
||||
{
|
||||
"id": "PrefixSiteCollectionTags",
|
||||
"label": "Prefix for Site Collection Names (The Site name will be appended to this value)",
|
||||
"type": "text",
|
||||
"default": "Site: "
|
||||
},
|
||||
{
|
||||
"id": "CreateStudioCollectionTags",
|
||||
"label": "Auto create Plex Collection tags for scene Studio",
|
||||
"type": "bool",
|
||||
"default": true
|
||||
},
|
||||
{
|
||||
"id": "UseSiteForStudioCollectionTags",
|
||||
"label": "If Studio is not defined, use Site instead (In Stash, Studio is the parent of the scene Studio)",
|
||||
"type": "bool",
|
||||
"default": false
|
||||
},
|
||||
{
|
||||
"id": "PrefixStudioCollectionTags",
|
||||
"label": "Prefix for Studio Collection Names (The Studio name (if available) will be appended to this value)",
|
||||
"type": "text",
|
||||
"default": "Studio: "
|
||||
},
|
||||
{
|
||||
"id": "CreateMovieCollectionTags",
|
||||
"label": "Auto create Plex Collection tags for associated scene Movie",
|
||||
"type": "bool",
|
||||
"default": false
|
||||
},
|
||||
{
|
||||
"id": "PrefixMovieCollectionTags",
|
||||
"label": "Prefix for Movie Collection Names (The Movie title (if available) will be appended to this value)",
|
||||
"type": "text",
|
||||
"default": "Movie: "
|
||||
},
|
||||
{
|
||||
"id": "CreatePerformerCollectionTags",
|
||||
"label": "Auto create Plex Collection tags for associated Performers",
|
||||
"type": "bool",
|
||||
"default": false
|
||||
},
|
||||
{
|
||||
"id": "PrefixPerformerCollectionTags",
|
||||
"label": "Prefix for Performer Collection Names (The performer (if available) will be appended to this value)",
|
||||
"type": "text",
|
||||
"default": "Actor: "
|
||||
},
|
||||
{
|
||||
"id": "CreateRatingTags",
|
||||
"label": "Auto create Plex Collection tags for Stash star rating",
|
||||
"type": "bool",
|
||||
"default": false
|
||||
},
|
||||
{
|
||||
"id": "RequireOrganized",
|
||||
"label": "Require Organized flag to be set in Stash to pull metadata",
|
||||
"type": "bool",
|
||||
"default": false
|
||||
},
|
||||
{
|
||||
"id": "RequireURL",
|
||||
"label": "Require scene URL to be set in Stash to pull metadata",
|
||||
"type": "bool",
|
||||
"default": false
|
||||
},
|
||||
{
|
||||
"id": "RequireStashID",
|
||||
"label": "Require a scene StashID to be set in Stash to pull metadata",
|
||||
"type": "bool",
|
||||
"default": false
|
||||
},
|
||||
{
|
||||
"id": "debug",
|
||||
"label": "Use debug logging",
|
||||
"type": "bool",
|
||||
"default": false
|
||||
},
|
||||
]
|
||||
@ -1,28 +0,0 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>CFBundleDevelopmentRegion</key>
|
||||
<string>English</string>
|
||||
<key>CFBundleExecutable</key>
|
||||
<string>StashPlexAgent</string>
|
||||
<key>CFBundleIdentifier</key>
|
||||
<string>com.plexapp.agents.stashplexagent</string>
|
||||
<key>CFBundleInfoDictionaryVersion</key>
|
||||
<string>6.0</string>
|
||||
<key>CFBundleShortVersionString</key>
|
||||
<string>1.0</string>
|
||||
<key>CFBundleSignature</key>
|
||||
<string>????</string>
|
||||
<key>CFBundleVersion</key>
|
||||
<string>1.0</string>
|
||||
<key>PlexFrameworkVersion</key>
|
||||
<string>2</string>
|
||||
<key>PlexPluginClass</key>
|
||||
<string>Agent</string>
|
||||
<key>PlexPluginMode</key>
|
||||
<string>AlwaysOn</string>
|
||||
<key>PlexPluginCodePolicy</key>
|
||||
<string>Elevated</string>
|
||||
</dict>
|
||||
</plist>
|
||||
@ -1,30 +0,0 @@
|
||||
# StashPlexAgent.bundle
|
||||
A very simplistic Plex agent to pull metadata from Stash.
|
||||
|
||||
Scenes are matched based on filename (without path or extension) against the Stash "Path", so files must be scanned into Stash with their current filename.
|
||||
|
||||
Preferences are set under the plugin, or in the library definition (if you set it as the primary agent for the library). I'm using "Video Files Scanner" with it.
|
||||
|
||||
By default it will create Plex "Site: <STUDIO>" and "Studio: <STUDIO PARENT>" collection tags, but this can be disabled in preferences. There are several collection tag options available, and each can be modified with what it prepends the Collection name with
|
||||
|
||||
Also Stash "Tags" are placed into Plex "Genres", as well as optionally pulling attached Performer tags into the Plex genre list
|
||||
|
||||
You can also set tag numbers to ignore on import, I've left mine in as an example. You probably want to change these unless your "temporary" tags miraculously line up with mine. (Also initially you might need to try saving a couple of times. Plex seems to not want to initally keep changes in this field for some reason)
|
||||
|
||||
And optionally you can pull in any images from galleries attached to Scenes as Plex artwork. (There is an option to auto split gallery images into Poster/Background depending on basic orientation... If it's taller than wide, it's a poster)
|
||||
|
||||
For installing just download the bundle and put it into your "\PlexMediaServer\Plex Media Server\Plug-ins" folder. (The entire bundle as a directory... "\StashPlexAgent.bundle")
|
||||
|
||||
I guarantee there will be problems. When they pop up feel free to get with me (@Darklyter) on either the TPDB or Stash Discord channels.
|
||||
|
||||
Also this agent only handles scenes currently. I haven't played with movies in Stash much yet, but can take a look if there is interest (though it will optionally create Collections based on defined movies). Currently the Plex ADE agent handles that for me.
|
||||
|
||||
Also a bit of explanation for Sites vs Studios:
|
||||
|
||||
I help out with TPDB, so I'm very much in the Site -> Studio -> Network mentality. In Stash it is simply "Studio".
|
||||
|
||||
For my thinking, a Stash studio that is directly connected to the scene is the "Site". If that site has a parent studio, that is defined as "Studio". If the scene studio has a grandparent, that would be "Network" (though I'm not doing anything with that yet.
|
||||
|
||||
For example, in my Stash I have: Mind Geek as the Parent of Brazzers which is the Parent of Brazzers Live.
|
||||
|
||||
Therefore a scene would have: Site = "Brazzers Live", Studio = "Brazzers", Network = "Mind Geek"
|
||||
80
README.md
80
README.md
@ -1,31 +1,37 @@
|
||||
# CommunityScripts Repository
|
||||
|
||||
This repository contains plugin and utility scripts created by the Stash community and hosted on the official GitHub repo. There is also [a list of third-party plugins on our wiki page](https://github.com/stashapp/stash/wiki/Plugins-&--Scripts).
|
||||
This repository contains plugin and utility scripts created by the Stash community and hosted on the official GitHub repo.
|
||||
|
||||
There is also [a list of third-party plugins in our documentation](https://docs.stashapp.cc/add-ons/third-party-integrations).
|
||||
|
||||
## Please note: V24 now uses an installer
|
||||
# We recommend you use that to install (and update) plugins.
|
||||
Manual installs are not recommended, and you shouldn't do so unless you otherwise know what you are doing.
|
||||
|
||||
## How To Install
|
||||
To download a plugin, either clone the git repo, or download the files directly.
|
||||
To download a plugin in Stash v24, the CommunityScripts repo source is automatically installed by default.
|
||||
|
||||
It is recommended that plugins are placed in their own subdirectory of your `plugins` directory. The `plugins` directory should be created as a subdirectory in the directory containing your `config.yml` file. This will be in `$HOME/.stash` by default.
|
||||
This default source is located at https://stashapp.github.io/CommunityScripts/stable/index.yml
|
||||
|
||||
When downloading directly click on the file you want and then make sure to click the raw button:
|
||||
# Plugin, Themes, and Scripts Directory
|
||||
We used to list all community supported plugins, themes, and scripts in this repository...
|
||||
but with the changes in v24, ANY items installable by the plugin installer will no longer listed here.
|
||||
Use the Plugin Installer built into Stash.
|
||||
|
||||

|
||||
|
||||
# Plugin and Script Directory
|
||||
This list keeps track of scripts and plugins in this repository. Please ensure the list is kept in alphabetical order.
|
||||
We will continue to list the items NOT otherwise installable in this way below.
|
||||
|
||||
## NOTE: BREAKING CHANGES
|
||||
The upcoming v24 release (and the current development branch) have breaking changes to schema, and also plugin changes.
|
||||
We're beginning to review plugins and the rest and patch them to work, but it's an ongoing process.
|
||||
We'll update the table below as we do this, but we STRONGLY recommend you do not use the development branch unless you are prepared to help with the patching.
|
||||
The recent v24 release (and future development branches) had major breaking changes to old schema and plugin changes.
|
||||
We're beginning to review plugins and the rest and patch them to work, but it's an ongoing process...
|
||||
|
||||
We'll update the table below as we do this...
|
||||
We will also be rearranging things a bit, and updating documentation (including this page)
|
||||
|
||||
## Plugins
|
||||
## Plugins will no longer be listed individually here...
|
||||
|
||||
Category|Triggers|Plugin Name|Description|Minimum Stash version|Updated for v24|
|
||||
--------|-----------|-----------|-----------|---------------------|-----
|
||||
Scraper|Task|[GHScraper_Checker](plugins/GHScraper_Checker)|Compare local file against github file from the community scraper repo.|v0.8|:x:
|
||||
Maintenance|Task<br />Scene.Update|[renamerOnUpdate](plugins/renamerOnUpdate)|Rename/Move your file based on Stash metadata.|v0.7|:x:
|
||||
Maintenance|Task<br />Scene.Update|[renamerOnUpdate](plugins/renamerOnUpdate)|Rename/Move your file based on Stash metadata.|v2.4|:white_check_mark: STOPGAP
|
||||
Maintenance|Set Scene Cover|[setSceneCoverFromFile](plugins/setSceneCoverFromFile)|Searchs Stash for Scenes with a cover image in the same folder and sets the cover image in stash to that image|v0.7|:x:
|
||||
Scenes|SceneMarker.Create<br />SceneMarker.Update|[markerTagToScene](plugins/markerTagToScene)|Adds primary tag of Scene Marker to the Scene on marker create/update.|v0.8 ([46bbede](https://github.com/stashapp/stash/commit/46bbede9a07144797d6f26cf414205b390ca88f9))|:x:
|
||||
Scanning|Scene.Create<br />Gallery.Create<br />Image.Create|[defaultDataForPath](plugins/defaultDataForPath)|Adds configured Tags, Performers and/or Studio to all newly scanned Scenes, Images and Galleries..|v0.8|:x:
|
||||
@ -36,18 +42,46 @@ Reporting||[TagGraph](plugins/tagGraph)|Creates a visual of the Tag relations.|v
|
||||
|
||||
## Themes
|
||||
|
||||
Theme Name|Description |Updated for v24|
|
||||
----------|--------------------------------------------|----
|
||||
[Plex](themes/plex) |Theme inspired by the popular Plex Interface|:x:
|
||||
# A Variety of Themes are now available to be one click installed via the Plugin Setting page in your Stash
|
||||
We welcome new themes, as well as patches to existing themes.
|
||||
|
||||
## Utility Scripts
|
||||
|
||||
|Category|Userscript Name|Description|Updated for v24|
|
||||
|Category|Name|Description|Updated for v24|
|
||||
---------|---------------|-----------|----
|
||||
StashDB |[StashDB Submission Helper](/userscripts/StashDB_Submission_Helper)|Adds handy functions for StashDB submissions like buttons to add aliases in bulk to a performer|:x:
|
||||
|
||||
## Utility Scripts
|
||||
|
||||
Category|Plugin Name|Description|Minimum Stash version|Updated for v24|
|
||||
--------|-----------|-----------|---------------------|----
|
||||
Kodi|[Kodi Helper](scripts/kodi-helper)|Generates `nfo` and `strm` for use with Kodi.|v0.7|:x:
|
||||
|
||||
## Contributing
|
||||
|
||||
### For plugins made with [stash-plugin-builder](https://github.com/Tetrax-10/stash-plugin-builder)
|
||||
|
||||
Please refer to its [docs](https://github.com/Tetrax-10/stash-plugin-builder#readme) for building.
|
||||
|
||||
### Formatting
|
||||
|
||||
Formatting is enforced on all files. Follow this setup guide:
|
||||
|
||||
1. **[Yarn](https://yarnpkg.com/en/docs/install)** and **its dependencies** must be installed to run the formatting tools.
|
||||
```sh
|
||||
yarn install --frozen-lockfile
|
||||
```
|
||||
|
||||
2. **Python dependencies** must also be installed to format `py` files.
|
||||
```sh
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
#### Formatting non-`py` files
|
||||
|
||||
```sh
|
||||
yarn run format
|
||||
```
|
||||
|
||||
#### Formatting `py` files
|
||||
|
||||
`py` files are formatted using [`black`](https://pypi.org/project/black/).
|
||||
|
||||
```sh
|
||||
yarn run format-py
|
||||
```
|
||||
@ -1,72 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# builds a repository of scrapers
|
||||
# outputs to _site with the following structure:
|
||||
# index.yml
|
||||
# <scraper_id>.zip
|
||||
# Each zip file contains the scraper.yml file and any other files in the same directory
|
||||
|
||||
outdir="$1"
|
||||
if [ -z "$outdir" ]; then
|
||||
outdir="_site"
|
||||
fi
|
||||
|
||||
rm -rf "$outdir"
|
||||
mkdir -p "$outdir"
|
||||
|
||||
buildPlugin()
|
||||
{
|
||||
f=$1
|
||||
|
||||
if grep -q "^#pkgignore" "$f"; then
|
||||
return
|
||||
fi
|
||||
|
||||
# get the scraper id from the directory
|
||||
dir=$(dirname "$f")
|
||||
plugin_id=$(basename "$f" .yml)
|
||||
|
||||
echo "Processing $plugin_id"
|
||||
|
||||
# create a directory for the version
|
||||
version=$(git log -n 1 --pretty=format:%h -- "$dir"/*)
|
||||
updated=$(TZ=UTC0 git log -n 1 --date="format-local:%F %T" --pretty=format:%ad -- "$dir"/*)
|
||||
|
||||
# create the zip file
|
||||
# copy other files
|
||||
zipfile=$(realpath "$outdir/$plugin_id.zip")
|
||||
|
||||
pushd "$dir" > /dev/null
|
||||
zip -r "$zipfile" . > /dev/null
|
||||
popd > /dev/null
|
||||
|
||||
name=$(grep "^name:" "$f" | head -n 1 | cut -d' ' -f2- | sed -e 's/\r//' -e 's/^"\(.*\)"$/\1/')
|
||||
description=$(grep "^description:" "$f" | head -n 1 | cut -d' ' -f2- | sed -e 's/\r//' -e 's/^"\(.*\)"$/\1/')
|
||||
ymlVersion=$(grep "^version:" "$f" | head -n 1 | cut -d' ' -f2- | sed -e 's/\r//' -e 's/^"\(.*\)"$/\1/')
|
||||
version="$ymlVersion-$version"
|
||||
dep=$(grep "^# requires:" "$f" | cut -c 12- | sed -e 's/\r//')
|
||||
|
||||
# write to spec index
|
||||
echo "- id: $plugin_id
|
||||
name: $name
|
||||
metadata:
|
||||
description: $description
|
||||
version: $version
|
||||
date: $updated
|
||||
path: $plugin_id.zip
|
||||
sha256: $(sha256sum "$zipfile" | cut -d' ' -f1)" >> "$outdir"/index.yml
|
||||
|
||||
# handle dependencies
|
||||
if [ ! -z "$dep" ]; then
|
||||
echo " requires:" >> "$outdir"/index.yml
|
||||
for d in ${dep//,/ }; do
|
||||
echo " - $d" >> "$outdir"/index.yml
|
||||
done
|
||||
fi
|
||||
|
||||
echo "" >> "$outdir"/index.yml
|
||||
}
|
||||
|
||||
find ./plugins -mindepth 1 -name *.yml | while read file; do
|
||||
buildPlugin "$file"
|
||||
done
|
||||
@ -1,11 +0,0 @@
|
||||
name: Cropper.JS
|
||||
description: Exports cropper.js functionality for JS/Userscripts
|
||||
version: 1.6.1
|
||||
ui:
|
||||
css:
|
||||
- cropper.css
|
||||
javascript:
|
||||
- cropper.js
|
||||
|
||||
# note - not minimized for more transparency around updates & diffs against source code
|
||||
# https://github.com/fengyuanchen/cropperjs/tree/main/dist
|
||||
@ -1,308 +0,0 @@
|
||||
/*!
|
||||
* Cropper.js v1.6.1
|
||||
* https://fengyuanchen.github.io/cropperjs
|
||||
*
|
||||
* Copyright 2015-present Chen Fengyuan
|
||||
* Released under the MIT license
|
||||
*
|
||||
* Date: 2023-09-17T03:44:17.565Z
|
||||
*/
|
||||
|
||||
.cropper-container {
|
||||
direction: ltr;
|
||||
font-size: 0;
|
||||
line-height: 0;
|
||||
position: relative;
|
||||
-ms-touch-action: none;
|
||||
touch-action: none;
|
||||
-webkit-user-select: none;
|
||||
-moz-user-select: none;
|
||||
-ms-user-select: none;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.cropper-container img {
|
||||
backface-visibility: hidden;
|
||||
display: block;
|
||||
height: 100%;
|
||||
image-orientation: 0deg;
|
||||
max-height: none !important;
|
||||
max-width: none !important;
|
||||
min-height: 0 !important;
|
||||
min-width: 0 !important;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.cropper-wrap-box,
|
||||
.cropper-canvas,
|
||||
.cropper-drag-box,
|
||||
.cropper-crop-box,
|
||||
.cropper-modal {
|
||||
bottom: 0;
|
||||
left: 0;
|
||||
position: absolute;
|
||||
right: 0;
|
||||
top: 0;
|
||||
}
|
||||
|
||||
.cropper-wrap-box,
|
||||
.cropper-canvas {
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.cropper-drag-box {
|
||||
background-color: #fff;
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
.cropper-modal {
|
||||
background-color: #000;
|
||||
opacity: 0.5;
|
||||
}
|
||||
|
||||
.cropper-view-box {
|
||||
display: block;
|
||||
height: 100%;
|
||||
outline: 1px solid #39f;
|
||||
outline-color: rgba(51, 153, 255, 0.75);
|
||||
overflow: hidden;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.cropper-dashed {
|
||||
border: 0 dashed #eee;
|
||||
display: block;
|
||||
opacity: 0.5;
|
||||
position: absolute;
|
||||
}
|
||||
|
||||
.cropper-dashed.dashed-h {
|
||||
border-bottom-width: 1px;
|
||||
border-top-width: 1px;
|
||||
height: calc(100% / 3);
|
||||
left: 0;
|
||||
top: calc(100% / 3);
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.cropper-dashed.dashed-v {
|
||||
border-left-width: 1px;
|
||||
border-right-width: 1px;
|
||||
height: 100%;
|
||||
left: calc(100% / 3);
|
||||
top: 0;
|
||||
width: calc(100% / 3);
|
||||
}
|
||||
|
||||
.cropper-center {
|
||||
display: block;
|
||||
height: 0;
|
||||
left: 50%;
|
||||
opacity: 0.75;
|
||||
position: absolute;
|
||||
top: 50%;
|
||||
width: 0;
|
||||
}
|
||||
|
||||
.cropper-center::before,
|
||||
.cropper-center::after {
|
||||
background-color: #eee;
|
||||
content: ' ';
|
||||
display: block;
|
||||
position: absolute;
|
||||
}
|
||||
|
||||
.cropper-center::before {
|
||||
height: 1px;
|
||||
left: -3px;
|
||||
top: 0;
|
||||
width: 7px;
|
||||
}
|
||||
|
||||
.cropper-center::after {
|
||||
height: 7px;
|
||||
left: 0;
|
||||
top: -3px;
|
||||
width: 1px;
|
||||
}
|
||||
|
||||
.cropper-face,
|
||||
.cropper-line,
|
||||
.cropper-point {
|
||||
display: block;
|
||||
height: 100%;
|
||||
opacity: 0.1;
|
||||
position: absolute;
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.cropper-face {
|
||||
background-color: #fff;
|
||||
left: 0;
|
||||
top: 0;
|
||||
}
|
||||
|
||||
.cropper-line {
|
||||
background-color: #39f;
|
||||
}
|
||||
|
||||
.cropper-line.line-e {
|
||||
cursor: ew-resize;
|
||||
right: -3px;
|
||||
top: 0;
|
||||
width: 5px;
|
||||
}
|
||||
|
||||
.cropper-line.line-n {
|
||||
cursor: ns-resize;
|
||||
height: 5px;
|
||||
left: 0;
|
||||
top: -3px;
|
||||
}
|
||||
|
||||
.cropper-line.line-w {
|
||||
cursor: ew-resize;
|
||||
left: -3px;
|
||||
top: 0;
|
||||
width: 5px;
|
||||
}
|
||||
|
||||
.cropper-line.line-s {
|
||||
bottom: -3px;
|
||||
cursor: ns-resize;
|
||||
height: 5px;
|
||||
left: 0;
|
||||
}
|
||||
|
||||
.cropper-point {
|
||||
background-color: #39f;
|
||||
height: 5px;
|
||||
opacity: 0.75;
|
||||
width: 5px;
|
||||
}
|
||||
|
||||
.cropper-point.point-e {
|
||||
cursor: ew-resize;
|
||||
margin-top: -3px;
|
||||
right: -3px;
|
||||
top: 50%;
|
||||
}
|
||||
|
||||
.cropper-point.point-n {
|
||||
cursor: ns-resize;
|
||||
left: 50%;
|
||||
margin-left: -3px;
|
||||
top: -3px;
|
||||
}
|
||||
|
||||
.cropper-point.point-w {
|
||||
cursor: ew-resize;
|
||||
left: -3px;
|
||||
margin-top: -3px;
|
||||
top: 50%;
|
||||
}
|
||||
|
||||
.cropper-point.point-s {
|
||||
bottom: -3px;
|
||||
cursor: s-resize;
|
||||
left: 50%;
|
||||
margin-left: -3px;
|
||||
}
|
||||
|
||||
.cropper-point.point-ne {
|
||||
cursor: nesw-resize;
|
||||
right: -3px;
|
||||
top: -3px;
|
||||
}
|
||||
|
||||
.cropper-point.point-nw {
|
||||
cursor: nwse-resize;
|
||||
left: -3px;
|
||||
top: -3px;
|
||||
}
|
||||
|
||||
.cropper-point.point-sw {
|
||||
bottom: -3px;
|
||||
cursor: nesw-resize;
|
||||
left: -3px;
|
||||
}
|
||||
|
||||
.cropper-point.point-se {
|
||||
bottom: -3px;
|
||||
cursor: nwse-resize;
|
||||
height: 20px;
|
||||
opacity: 1;
|
||||
right: -3px;
|
||||
width: 20px;
|
||||
}
|
||||
|
||||
@media (min-width: 768px) {
|
||||
|
||||
.cropper-point.point-se {
|
||||
height: 15px;
|
||||
width: 15px;
|
||||
}
|
||||
}
|
||||
|
||||
@media (min-width: 992px) {
|
||||
|
||||
.cropper-point.point-se {
|
||||
height: 10px;
|
||||
width: 10px;
|
||||
}
|
||||
}
|
||||
|
||||
@media (min-width: 1200px) {
|
||||
|
||||
.cropper-point.point-se {
|
||||
height: 5px;
|
||||
opacity: 0.75;
|
||||
width: 5px;
|
||||
}
|
||||
}
|
||||
|
||||
.cropper-point.point-se::before {
|
||||
background-color: #39f;
|
||||
bottom: -50%;
|
||||
content: ' ';
|
||||
display: block;
|
||||
height: 200%;
|
||||
opacity: 0;
|
||||
position: absolute;
|
||||
right: -50%;
|
||||
width: 200%;
|
||||
}
|
||||
|
||||
.cropper-invisible {
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
.cropper-bg {
|
||||
background-image: url('data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAABAAAAAQAQMAAAAlPW0iAAAAA3NCSVQICAjb4U/gAAAABlBMVEXMzMz////TjRV2AAAACXBIWXMAAArrAAAK6wGCiw1aAAAAHHRFWHRTb2Z0d2FyZQBBZG9iZSBGaXJld29ya3MgQ1M26LyyjAAAABFJREFUCJlj+M/AgBVhF/0PAH6/D/HkDxOGAAAAAElFTkSuQmCC');
|
||||
}
|
||||
|
||||
.cropper-hide {
|
||||
display: block;
|
||||
height: 0;
|
||||
position: absolute;
|
||||
width: 0;
|
||||
}
|
||||
|
||||
.cropper-hidden {
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
.cropper-move {
|
||||
cursor: move;
|
||||
}
|
||||
|
||||
.cropper-crop {
|
||||
cursor: crosshair;
|
||||
}
|
||||
|
||||
.cropper-disabled .cropper-drag-box,
|
||||
.cropper-disabled .cropper-face,
|
||||
.cropper-disabled .cropper-line,
|
||||
.cropper-disabled .cropper-point {
|
||||
cursor: not-allowed;
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,66 +0,0 @@
|
||||
import sys, json
|
||||
from pathlib import Path
|
||||
|
||||
import stashapi.log as log
|
||||
from stashapi.stashapp import StashInterface
|
||||
import re
|
||||
from dateparser import parse
|
||||
from datetime import datetime
|
||||
|
||||
def main():
|
||||
global stash
|
||||
global pattern
|
||||
|
||||
pattern = re.compile(r"\D(\d{4}|\d{1,2})[\._\- /\\](\d{1,2}|[a-zA-Z]{3,}\.*)[\._\- /\\](\d{4}|\d{1,2})\D")
|
||||
json_input = json.loads(sys.stdin.read())
|
||||
mode_arg = json_input['args']['mode']
|
||||
|
||||
stash = StashInterface(json_input["server_connection"])
|
||||
|
||||
if mode_arg == "gallery":
|
||||
find_date_for_galleries()
|
||||
|
||||
|
||||
|
||||
def find_date_for_galleries():
|
||||
|
||||
galleries = stash.find_galleries(f={
|
||||
"is_missing": "date",
|
||||
"path": {
|
||||
"modifier": "MATCHES_REGEX",
|
||||
"value": ".zip$"
|
||||
},
|
||||
"file_count": {
|
||||
"modifier": "EQUALS",
|
||||
"value": 1
|
||||
}
|
||||
})
|
||||
|
||||
|
||||
total = len(galleries)
|
||||
|
||||
log.info(f"Found {total} galleries")
|
||||
|
||||
for i, gallery in enumerate(galleries):
|
||||
log.progress(i/total)
|
||||
acceptableDate = None
|
||||
for file in gallery.get("files", []):
|
||||
for match in pattern.finditer(file["path"]):
|
||||
g1 = match.group(1)
|
||||
g2 = match.group(2)
|
||||
g3 =match.group(3)
|
||||
temp = parse(g1+" "+g2+" "+g3)
|
||||
if temp:
|
||||
acceptableDate = temp.strftime("%Y-%m-%d")
|
||||
if acceptableDate:
|
||||
log.info("Gallery ID ("+gallery.get("id") + ") has matched the date : "+acceptableDate)
|
||||
updateObject = {
|
||||
"id":gallery.get("id"),
|
||||
"date":acceptableDate
|
||||
}
|
||||
stash.update_gallery(updateObject)
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@ -1,14 +0,0 @@
|
||||
name: Date Parser
|
||||
description: Find date in path or filename and add it
|
||||
version: 0.2
|
||||
exec:
|
||||
- python
|
||||
- "{pluginDir}/date_parser.py"
|
||||
interface: raw
|
||||
tasks:
|
||||
- name: Find gallery dates
|
||||
description: Add the date on galleries based on their path
|
||||
defaultArgs:
|
||||
mode: gallery
|
||||
|
||||
|
||||
@ -1,14 +0,0 @@
|
||||
certifi>=2022.9.24
|
||||
charset-normalizer>=2.1.1
|
||||
dateparser>=1.1.3
|
||||
idna>=3.4
|
||||
python-dateutil>=2.8.2
|
||||
pytz>=2022.6
|
||||
pytz-deprecation-shim>=0.1.0.post0
|
||||
regex>=2022.3.2
|
||||
requests>=2.28.1
|
||||
six>=1.16.0
|
||||
stashapp-tools>=0.2.17
|
||||
tzdata>=2022.6
|
||||
tzlocal>=4.2
|
||||
urllib3>=1.26.12
|
||||
@ -1,208 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
|
||||
import requests
|
||||
|
||||
import log
|
||||
|
||||
FRAGMENT = json.loads(sys.stdin.read())
|
||||
FRAGMENT_SERVER = FRAGMENT["server_connection"]
|
||||
FRAGMENT_ARG = FRAGMENT['args']['mode']
|
||||
log.LogDebug("Starting Plugin: Github Scraper Checker")
|
||||
|
||||
CHECK_LOG = False
|
||||
GET_NEW_FILE = False
|
||||
OVERWRITE = False
|
||||
|
||||
if FRAGMENT_ARG == "CHECK":
|
||||
CHECK_LOG = True
|
||||
if FRAGMENT_ARG == "NEWFILE":
|
||||
GET_NEW_FILE = True
|
||||
if FRAGMENT_ARG == "OVERWRITE":
|
||||
OVERWRITE = True
|
||||
|
||||
# Don't write in log if the file don't exist locally.
|
||||
IGNORE_MISS_LOCAL = False
|
||||
|
||||
def graphql_getScraperPath():
|
||||
query = """
|
||||
query Configuration {
|
||||
configuration {
|
||||
general {
|
||||
scrapersPath
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
result = callGraphQL(query)
|
||||
return result["configuration"]["general"]["scrapersPath"]
|
||||
|
||||
|
||||
def callGraphQL(query, variables=None):
|
||||
# Session cookie for authentication
|
||||
graphql_port = FRAGMENT_SERVER['Port']
|
||||
graphql_scheme = FRAGMENT_SERVER['Scheme']
|
||||
graphql_cookies = {
|
||||
'session': FRAGMENT_SERVER.get('SessionCookie').get('Value')
|
||||
}
|
||||
graphql_headers = {
|
||||
"Accept-Encoding": "gzip, deflate, br",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
"Connection": "keep-alive",
|
||||
"DNT": "1"
|
||||
}
|
||||
if FRAGMENT_SERVER.get('Domain'):
|
||||
graphql_domain = FRAGMENT_SERVER['Domain']
|
||||
else:
|
||||
if FRAGMENT_SERVER.get('Host'):
|
||||
graphql_domain = FRAGMENT_SERVER['Host']
|
||||
else:
|
||||
graphql_domain = 'localhost'
|
||||
# Because i don't understand how host work...
|
||||
graphql_domain = 'localhost'
|
||||
# Stash GraphQL endpoint
|
||||
graphql_url = graphql_scheme + "://" + \
|
||||
graphql_domain + ":" + str(graphql_port) + "/graphql"
|
||||
|
||||
json = {'query': query}
|
||||
if variables is not None:
|
||||
json['variables'] = variables
|
||||
try:
|
||||
response = requests.post(
|
||||
graphql_url, json=json, headers=graphql_headers, cookies=graphql_cookies, timeout=10)
|
||||
except:
|
||||
sys.exit("[FATAL] Error with the graphql request, are you sure the GraphQL endpoint ({}) is correct.".format(
|
||||
graphql_url))
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
if result.get("error"):
|
||||
for error in result["error"]["errors"]:
|
||||
raise Exception("GraphQL error: {}".format(error))
|
||||
if result.get("data"):
|
||||
return result.get("data")
|
||||
elif response.status_code == 401:
|
||||
sys.exit("HTTP Error 401, Unauthorised.")
|
||||
else:
|
||||
raise ConnectionError("GraphQL query failed:{} - {}. Query: {}. Variables: {}".format(
|
||||
response.status_code, response.content, query, variables))
|
||||
|
||||
|
||||
def file_getlastline(path):
|
||||
with open(path, 'r', encoding="utf-8") as f:
|
||||
for line in f:
|
||||
u_match = re.search(r"^\s*#\s*last updated", line.lower())
|
||||
if u_match:
|
||||
return line.strip()
|
||||
return None
|
||||
|
||||
|
||||
def get_date(line):
|
||||
try:
|
||||
date = datetime.strptime(re.sub(r".*#.*Last Updated\s*", "", line), "%B %d, %Y")
|
||||
except:
|
||||
return None
|
||||
return date
|
||||
|
||||
|
||||
scraper_folder_path = graphql_getScraperPath()
|
||||
GITHUB_LINK = "https://github.com/stashapp/CommunityScrapers/archive/refs/heads/master.zip"
|
||||
|
||||
try:
|
||||
r = requests.get(GITHUB_LINK, timeout=10)
|
||||
except:
|
||||
sys.exit("Failing to download the zip file.")
|
||||
zip_path = os.path.join(scraper_folder_path, "github.zip")
|
||||
log.LogDebug(zip_path)
|
||||
with open(zip_path, "wb") as zip_file:
|
||||
zip_file.write(r.content)
|
||||
|
||||
with zipfile.ZipFile(zip_path) as z:
|
||||
change_detected = False
|
||||
|
||||
for filename in z.namelist():
|
||||
# Only care about the scrapers folders
|
||||
if "/scrapers/" in filename and filename.endswith(".yml"):
|
||||
# read the file
|
||||
line = bytes()
|
||||
# Filename abc.yml
|
||||
gh_file = os.path.basename(filename)
|
||||
|
||||
# Filename /scrapers/<subdir>/abc.yml
|
||||
if filename.endswith(f"/scrapers/{gh_file}") == False:
|
||||
log.LogDebug("Subdirectory detected: " + filename)
|
||||
subdir = re.findall('\/scrapers\/(.*)\/.*\.yml', filename)
|
||||
|
||||
if len(subdir) != 1:
|
||||
log.LogError(f"Unexpected number of matching subdirectories found. Expected 1. Found {len(subdir)}.")
|
||||
exit(1)
|
||||
|
||||
gh_file = subdir[0] + "/" + gh_file
|
||||
|
||||
log.LogDebug(gh_file)
|
||||
path_local = os.path.join(scraper_folder_path, gh_file)
|
||||
gh_line = None
|
||||
yml_script = None
|
||||
if OVERWRITE:
|
||||
with z.open(filename) as f:
|
||||
scraper_content = f.read()
|
||||
with open(path_local, 'wb') as yml_file:
|
||||
yml_file.write(scraper_content)
|
||||
log.LogInfo("Replacing/Creating {}".format(gh_file))
|
||||
continue
|
||||
with z.open(filename) as f:
|
||||
for line in f:
|
||||
script_match = re.search(r"action:\sscript", line.decode().lower())
|
||||
update_match = re.search(r"^\s*#\s*last updated", line.decode().lower())
|
||||
if script_match:
|
||||
yml_script = True
|
||||
if update_match:
|
||||
gh_line = line.decode().strip()
|
||||
break
|
||||
# Got last line
|
||||
if gh_line is None:
|
||||
log.LogError("[Github] Line Error ({}) ".format(gh_file))
|
||||
continue
|
||||
gh_date = get_date(gh_line)
|
||||
if gh_date is None:
|
||||
log.LogError("[Github] Date Error ({}) ".format(gh_file))
|
||||
continue
|
||||
elif os.path.exists(path_local):
|
||||
# Local Part
|
||||
local_line = file_getlastline(path_local)
|
||||
if local_line is None:
|
||||
log.LogError("[Local] Line Error ({}) ".format(gh_file))
|
||||
continue
|
||||
local_date = get_date(local_line.strip())
|
||||
if local_date is None:
|
||||
log.LogError("[Local] Date Error ({}) ".format(gh_file))
|
||||
continue
|
||||
if gh_date > local_date and CHECK_LOG:
|
||||
change_detected = True
|
||||
|
||||
if yml_script:
|
||||
log.LogInfo("[{}] New version on github (Can be any of the related files)".format(gh_file))
|
||||
else:
|
||||
log.LogInfo("[{}] New version on github".format(gh_file))
|
||||
elif GET_NEW_FILE:
|
||||
change_detected = True
|
||||
# File don't exist local so we take the github version.
|
||||
with z.open(filename) as f:
|
||||
scraper_content = f.read()
|
||||
with open(path_local, 'wb') as yml_file:
|
||||
yml_file.write(scraper_content)
|
||||
log.LogInfo("Creating {}".format(gh_file))
|
||||
continue
|
||||
elif CHECK_LOG and IGNORE_MISS_LOCAL == False:
|
||||
change_detected = True
|
||||
|
||||
log.LogWarning("[{}] File don't exist locally".format(gh_file))
|
||||
|
||||
if change_detected == False:
|
||||
log.LogInfo("Scrapers appear to be in sync with GitHub version.")
|
||||
|
||||
os.remove(zip_path)
|
||||
@ -1,21 +0,0 @@
|
||||
name: GHScraper_Checker
|
||||
description: Check the community scraper repo.
|
||||
version: 0.1.1
|
||||
url: https://github.com/stashapp/CommunityScripts/tree/main/plugins/GHScraper_Checker
|
||||
exec:
|
||||
- python
|
||||
- "{pluginDir}/GHScraper_Checker.py"
|
||||
interface: raw
|
||||
tasks:
|
||||
- name: 'Status Check'
|
||||
description: "Show in log if you don't have the scraper or a new version is available."
|
||||
defaultArgs:
|
||||
mode: CHECK
|
||||
- name: 'Getting new files'
|
||||
description: "Download scraper that don't exist in your scraper folder."
|
||||
defaultArgs:
|
||||
mode: NEWFILE
|
||||
# - name: 'Overwrite everything'
|
||||
# description: 'Replace your scraper by github version. Overwrite anything existing.'
|
||||
# defaultArgs:
|
||||
# mode: OVERWRITE
|
||||
@ -1,52 +0,0 @@
|
||||
import sys
|
||||
|
||||
|
||||
# Log messages sent from a plugin instance are transmitted via stderr and are
|
||||
# encoded with a prefix consisting of special character SOH, then the log
|
||||
# level (one of t, d, i, w, e, or p - corresponding to trace, debug, info,
|
||||
# warning, error and progress levels respectively), then special character
|
||||
# STX.
|
||||
#
|
||||
# The LogTrace, LogDebug, LogInfo, LogWarning, and LogError methods, and their equivalent
|
||||
# formatted methods are intended for use by plugin instances to transmit log
|
||||
# messages. The LogProgress method is also intended for sending progress data.
|
||||
#
|
||||
|
||||
def __prefix(level_char):
|
||||
start_level_char = b'\x01'
|
||||
end_level_char = b'\x02'
|
||||
|
||||
ret = start_level_char + level_char + end_level_char
|
||||
return ret.decode()
|
||||
|
||||
|
||||
def __log(level_char, s):
|
||||
if level_char == "":
|
||||
return
|
||||
|
||||
print(__prefix(level_char) + s + "\n", file=sys.stderr, flush=True)
|
||||
|
||||
|
||||
def LogTrace(s):
|
||||
__log(b't', s)
|
||||
|
||||
|
||||
def LogDebug(s):
|
||||
__log(b'd', s)
|
||||
|
||||
|
||||
def LogInfo(s):
|
||||
__log(b'i', s)
|
||||
|
||||
|
||||
def LogWarning(s):
|
||||
__log(b'w', s)
|
||||
|
||||
|
||||
def LogError(s):
|
||||
__log(b'e', s)
|
||||
|
||||
|
||||
def LogProgress(p):
|
||||
progress = min(max(0, p), 1)
|
||||
__log(b'p', str(progress))
|
||||
@ -1,308 +0,0 @@
|
||||
(function() {
|
||||
let running = false;
|
||||
const buttons = [];
|
||||
let maxCount = 0;
|
||||
|
||||
function resolveToggle(el) {
|
||||
let button = null;
|
||||
if (el?.classList.contains('optional-field-content')) {
|
||||
button = el.previousElementSibling;
|
||||
} else if (el?.tagName === 'SPAN' && el?.classList.contains('ml-auto')) {
|
||||
button = el.querySelector('.optional-field button');
|
||||
} else if (el?.parentElement?.classList.contains('optional-field-content')) {
|
||||
button = el.parentElement.previousElementSibling;
|
||||
}
|
||||
const state = button?.classList.contains('text-success');
|
||||
return {
|
||||
button,
|
||||
state
|
||||
};
|
||||
}
|
||||
|
||||
function toggleSearchItem(searchItem, toggleMode) {
|
||||
const searchResultItem = searchItem.querySelector('li.search-result.selected-result.active');
|
||||
if (!searchResultItem) return;
|
||||
|
||||
const {
|
||||
urlNode,
|
||||
url,
|
||||
id,
|
||||
data,
|
||||
nameNode,
|
||||
name,
|
||||
queryInput,
|
||||
performerNodes
|
||||
} = stash.parseSearchItem(searchItem);
|
||||
|
||||
const {
|
||||
remoteUrlNode,
|
||||
remoteId,
|
||||
remoteUrl,
|
||||
remoteData,
|
||||
urlNode: matchUrlNode,
|
||||
detailsNode,
|
||||
imageNode,
|
||||
titleNode,
|
||||
codeNode,
|
||||
dateNode,
|
||||
studioNode,
|
||||
performerNodes: matchPerformerNodes,
|
||||
matches
|
||||
} = stash.parseSearchResultItem(searchResultItem);
|
||||
|
||||
const studioMatchNode = matches.find(o => o.matchType === 'studio')?.matchNode;
|
||||
const performerMatchNodes = matches.filter(o => o.matchType === 'performer').map(o => o.matchNode);
|
||||
|
||||
const includeTitle = document.getElementById('result-toggle-title').checked;
|
||||
const includeCode = document.getElementById('result-toggle-code').checked;
|
||||
const includeDate = document.getElementById('result-toggle-date').checked;
|
||||
const includeCover = document.getElementById('result-toggle-cover').checked;
|
||||
const includeStashID = document.getElementById('result-toggle-stashid').checked;
|
||||
const includeURL = document.getElementById('result-toggle-url').checked;
|
||||
const includeDetails = document.getElementById('result-toggle-details').checked;
|
||||
const includeStudio = document.getElementById('result-toggle-studio').checked;
|
||||
const includePerformers = document.getElementById('result-toggle-performers').checked;
|
||||
|
||||
let options = [];
|
||||
|
||||
options.push(['title', includeTitle, titleNode, resolveToggle(titleNode)]);
|
||||
options.push(['code', includeCode, codeNode, resolveToggle(codeNode)]);
|
||||
options.push(['date', includeDate, dateNode, resolveToggle(dateNode)]);
|
||||
options.push(['cover', includeCover, imageNode, resolveToggle(imageNode)]);
|
||||
options.push(['stashid', includeStashID, remoteUrlNode, resolveToggle(remoteUrlNode)]);
|
||||
options.push(['url', includeURL, matchUrlNode, resolveToggle(matchUrlNode)]);
|
||||
options.push(['details', includeDetails, detailsNode, resolveToggle(detailsNode)]);
|
||||
options.push(['studio', includeStudio, studioMatchNode, resolveToggle(studioMatchNode)]);
|
||||
options = options.concat(performerMatchNodes.map(o => ['performer', includePerformers, o, resolveToggle(o)]));
|
||||
|
||||
for (const [optionType, optionValue, optionNode, {
|
||||
button,
|
||||
state
|
||||
}] of options) {
|
||||
let wantedState = optionValue;
|
||||
if (toggleMode === 1) {
|
||||
wantedState = true;
|
||||
} else if (toggleMode === -1) {
|
||||
wantedState = false;
|
||||
}
|
||||
if (optionNode && wantedState !== state) {
|
||||
button.click();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function run() {
|
||||
if (!running) return;
|
||||
const button = buttons.pop();
|
||||
stash.setProgress((maxCount - buttons.length) / maxCount * 100);
|
||||
if (button) {
|
||||
const searchItem = getClosestAncestor(button, '.search-item');
|
||||
let toggleMode = 0;
|
||||
if (btn === btnOn) {
|
||||
toggleMode = 1;
|
||||
} else if (btn === btnOff) {
|
||||
toggleMode = -1;
|
||||
} else if (btn === btnMixed) {
|
||||
toggleMode = 0;
|
||||
}
|
||||
toggleSearchItem(searchItem, toggleMode);
|
||||
setTimeout(run, 0);
|
||||
} else {
|
||||
stop();
|
||||
}
|
||||
}
|
||||
|
||||
const btnGroup = document.createElement('div');
|
||||
const btnGroupId = 'batch-result-toggle';
|
||||
btnGroup.setAttribute('id', btnGroupId);
|
||||
btnGroup.classList.add('btn-group', 'ml-3');
|
||||
|
||||
const checkLabel = '<svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="check" class="svg-inline--fa fa-check fa-w-16 fa-icon fa-fw" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M173.898 439.404l-166.4-166.4c-9.997-9.997-9.997-26.206 0-36.204l36.203-36.204c9.997-9.998 26.207-9.998 36.204 0L192 312.69 432.095 72.596c9.997-9.997 26.207-9.997 36.204 0l36.203 36.204c9.997 9.997 9.997 26.206 0 36.204l-294.4 294.401c-9.998 9.997-26.207 9.997-36.204-.001z"></path></svg>';
|
||||
const timesLabel = '<svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="times" class="svg-inline--fa fa-times fa-w-11 fa-icon fa-fw" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 352 512"><path fill="currentColor" d="M242.72 256l100.07-100.07c12.28-12.28 12.28-32.19 0-44.48l-22.24-22.24c-12.28-12.28-32.19-12.28-44.48 0L176 189.28 75.93 89.21c-12.28-12.28-32.19-12.28-44.48 0L9.21 111.45c-12.28 12.28-12.28 32.19 0 44.48L109.28 256 9.21 356.07c-12.28 12.28-12.28 32.19 0 44.48l22.24 22.24c12.28 12.28 32.2 12.28 44.48 0L176 322.72l100.07 100.07c12.28 12.28 32.2 12.28 44.48 0l22.24-22.24c12.28-12.28 12.28-32.19 0-44.48L242.72 256z"></path></svg>';
|
||||
const startLabel = '<svg aria-hidden="true" focusable="false" data-prefix="fas" data-icon="circle" class="svg-inline--fa fa-circle fa-w-16 fa-icon fa-fw" role="img" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 512 512"><path fill="currentColor" d="M512 256C512 397.4 397.4 512 256 512C114.6 512 0 397.4 0 256C0 114.6 114.6 0 256 0C397.4 0 512 114.6 512 256zM256 48C141.1 48 48 141.1 48 256C48 370.9 141.1 464 256 464C370.9 464 464 370.9 464 256C464 141.1 370.9 48 256 48z"/></svg>';
|
||||
let btn;
|
||||
|
||||
const btnOffId = 'batch-result-toggle-off';
|
||||
const btnOff = document.createElement("button");
|
||||
btnOff.setAttribute("id", btnOffId);
|
||||
btnOff.title = 'Result Toggle All Off';
|
||||
btnOff.classList.add('btn', 'btn-primary');
|
||||
btnOff.innerHTML = timesLabel;
|
||||
btnOff.onclick = () => {
|
||||
if (running) {
|
||||
stop();
|
||||
} else {
|
||||
btn = btnOff;
|
||||
start();
|
||||
}
|
||||
};
|
||||
btnGroup.appendChild(btnOff);
|
||||
|
||||
const btnMixedId = 'batch-result-toggle-mixed';
|
||||
const btnMixed = document.createElement("button");
|
||||
btnMixed.setAttribute("id", btnMixedId);
|
||||
btnMixed.title = 'Result Toggle All';
|
||||
btnMixed.classList.add('btn', 'btn-primary');
|
||||
btnMixed.innerHTML = startLabel;
|
||||
btnMixed.onclick = () => {
|
||||
if (running) {
|
||||
stop();
|
||||
} else {
|
||||
btn = btnMixed;
|
||||
start();
|
||||
}
|
||||
};
|
||||
btnGroup.appendChild(btnMixed);
|
||||
|
||||
const btnOnId = 'batch-result-toggle-on';
|
||||
const btnOn = document.createElement("button");
|
||||
btnOn.setAttribute("id", btnOnId);
|
||||
btnOn.title = 'Result Toggle All On';
|
||||
btnOn.classList.add('btn', 'btn-primary');
|
||||
btnOn.innerHTML = checkLabel;
|
||||
btnOn.onclick = () => {
|
||||
if (running) {
|
||||
stop();
|
||||
} else {
|
||||
btn = btnOn;
|
||||
start();
|
||||
}
|
||||
};
|
||||
btnGroup.appendChild(btnOn);
|
||||
|
||||
function start() {
|
||||
// btn.innerHTML = stopLabel;
|
||||
btn.classList.remove('btn-primary');
|
||||
btn.classList.add('btn-danger');
|
||||
btnMixed.disabled = true;
|
||||
btnOn.disabled = true;
|
||||
btnOff.disabled = true;
|
||||
btn.disabled = false;
|
||||
running = true;
|
||||
stash.setProgress(0);
|
||||
buttons.length = 0;
|
||||
for (const button of document.querySelectorAll('.btn.btn-primary')) {
|
||||
if (button.innerText === 'Search') {
|
||||
buttons.push(button);
|
||||
}
|
||||
}
|
||||
maxCount = buttons.length;
|
||||
run();
|
||||
}
|
||||
|
||||
function stop() {
|
||||
// btn.innerHTML = startLabel;
|
||||
btn.classList.remove('btn-danger');
|
||||
btn.classList.add('btn-primary');
|
||||
running = false;
|
||||
stash.setProgress(0);
|
||||
btnMixed.disabled = false;
|
||||
btnOn.disabled = false;
|
||||
btnOff.disabled = false;
|
||||
}
|
||||
|
||||
stash.addEventListener('tagger:mutations:header', evt => {
|
||||
const el = getElementByXpath("//button[text()='Scrape All']");
|
||||
if (el && !document.getElementById(btnGroupId)) {
|
||||
const container = el.parentElement;
|
||||
container.appendChild(btnGroup);
|
||||
sortElementChildren(container);
|
||||
el.classList.add('ml-3');
|
||||
}
|
||||
});
|
||||
|
||||
const resultToggleConfigId = 'result-toggle-config';
|
||||
|
||||
stash.addEventListener('tagger:configuration', evt => {
|
||||
const el = evt.detail;
|
||||
if (!document.getElementById(resultToggleConfigId)) {
|
||||
const configContainer = el.parentElement;
|
||||
const resultToggleConfig = createElementFromHTML(`
|
||||
<div id="${resultToggleConfigId}" class="col-md-6 mt-4">
|
||||
<h5>Result Toggle ${startLabel} Configuration</h5>
|
||||
<div class="row">
|
||||
<div class="align-items-center form-group col-md-6">
|
||||
<div class="form-check">
|
||||
<input type="checkbox" id="result-toggle-title" class="form-check-input" data-default="true">
|
||||
<label title="" for="result-toggle-title" class="form-check-label">Title</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="align-items-center form-group col-md-6">
|
||||
<div class="form-check">
|
||||
<input type="checkbox" id="result-toggle-code" class="form-check-input" data-default="true">
|
||||
<label title="" for="result-toggle-code" class="form-check-label">Code</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="align-items-center form-group col-md-6">
|
||||
<div class="form-check">
|
||||
<input type="checkbox" id="result-toggle-date" class="form-check-input" data-default="true">
|
||||
<label title="" for="result-toggle-date" class="form-check-label">Date</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="align-items-center form-group col-md-6">
|
||||
<div class="form-check">
|
||||
<input type="checkbox" id="result-toggle-cover" class="form-check-input" data-default="true">
|
||||
<label title="" for="result-toggle-cover" class="form-check-label">Cover</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="align-items-center form-group col-md-6">
|
||||
<div class="form-check">
|
||||
<input type="checkbox" id="result-toggle-stashid" class="form-check-input" data-default="true">
|
||||
<label title="" for="result-toggle-stashid" class="form-check-label">Stash ID</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="align-items-center form-group col-md-6">
|
||||
<div class="form-check">
|
||||
<input type="checkbox" id="result-toggle-url" class="form-check-input" data-default="true">
|
||||
<label title="" for="result-toggle-url" class="form-check-label">URL</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="align-items-center form-group col-md-6">
|
||||
<div class="form-check">
|
||||
<input type="checkbox" id="result-toggle-details" class="form-check-input" data-default="true">
|
||||
<label title="" for="result-toggle-details" class="form-check-label">Details</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="align-items-center form-group col-md-6">
|
||||
<div class="form-check">
|
||||
<input type="checkbox" id="result-toggle-studio" class="form-check-input" data-default="true">
|
||||
<label title="" for="result-toggle-studio" class="form-check-label">Studio</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="align-items-center form-group col-md-6">
|
||||
<div class="form-check">
|
||||
<input type="checkbox" id="result-toggle-performers" class="form-check-input" data-default="true">
|
||||
<label title="" for="result-toggle-performers" class="form-check-label">Performers</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`);
|
||||
configContainer.appendChild(resultToggleConfig);
|
||||
loadSettings();
|
||||
}
|
||||
});
|
||||
|
||||
async function loadSettings() {
|
||||
for (const input of document.querySelectorAll(`#${resultToggleConfigId} input`)) {
|
||||
input.checked = await sessionStorage.getItem(input.id, input.dataset.default === 'true');
|
||||
input.addEventListener('change', async () => {
|
||||
await sessionStorage.setItem(input.id, input.checked);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
stash.addEventListener('tagger:mutation:add:remoteperformer', evt => toggleSearchItem(getClosestAncestor(evt.detail.node, '.search-item'), 0));
|
||||
stash.addEventListener('tagger:mutation:add:remotestudio', evt => toggleSearchItem(getClosestAncestor(evt.detail.node, '.search-item'), 0));
|
||||
stash.addEventListener('tagger:mutation:add:local', evt => toggleSearchItem(getClosestAncestor(evt.detail.node, '.search-item'), 0));
|
||||
stash.addEventListener('tagger:mutation:add:container', evt => toggleSearchItem(getClosestAncestor(evt.detail.node, '.search-item'), 0));
|
||||
stash.addEventListener('tagger:mutation:add:subcontainer', evt => toggleSearchItem(getClosestAncestor(evt.detail.node, '.search-item'), 0));
|
||||
|
||||
function checkSaveButtonDisplay() {
|
||||
const taggerContainer = document.querySelector('.tagger-container');
|
||||
const saveButton = getElementByXpath("//button[text()='Save']", taggerContainer);
|
||||
btnGroup.style.display = saveButton ? 'inline-block' : 'none';
|
||||
}
|
||||
|
||||
stash.addEventListener('tagger:mutations:searchitems', checkSaveButtonDisplay);
|
||||
})();
|
||||
@ -1,9 +0,0 @@
|
||||
name: Stash Batch Result Toggle.
|
||||
# requires: StashUserscriptLibrary
|
||||
description: In Scene Tagger, adds button to toggle all stashdb scene match result fields. Saves clicks when you only want to save a few metadata fields. Instead of turning off every field, you batch toggle them off, then toggle on the ones you want
|
||||
version: 1.0
|
||||
ui:
|
||||
requires:
|
||||
- StashUserscriptLibrary
|
||||
javascript:
|
||||
- stashBatchResultToggle.js
|
||||
@ -1,12 +0,0 @@
|
||||
# Comic Archive Metadata Extractor
|
||||
Follows the Comicrack Standard for saving Comic Metadata in .cbz files by reading the ComicInfo.xml file in the archive and writing the result into the stash gallery.
|
||||
Use the config.py ImportList to define what XML names should be mapped to what.
|
||||
Currently, Bookmark and Type are recognized as chapters that are imported as well.
|
||||
The current Configuration will overwrite any value you try to set that is already set in the ComicInfo.xml. For a change in that, change the hook condition in the yml.
|
||||
|
||||
### Installation
|
||||
Move the `comicInfoExtractor` directory into Stash's plugins directory, reload plugins.
|
||||
|
||||
### Tasks
|
||||
* Load all cbz Metadata - Fetch metadata for all galleries.
|
||||
* Post update hook - Fetch metadata for that gallery
|
||||
@ -1,124 +0,0 @@
|
||||
import stashapi.log as log
|
||||
from stashapi.stashapp import StashInterface
|
||||
import stashapi.marker_parse as mp
|
||||
import yaml
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import xml.etree.ElementTree as ET
|
||||
import zipfile
|
||||
|
||||
per_page = 100
|
||||
|
||||
def processGallery(g):
|
||||
#Read ComicInfo.xml File
|
||||
if len(g["files"]) == 0:
|
||||
log.info(g["id"] + " is not an archive. No scanning for Comic Metadata.")
|
||||
return
|
||||
comicInfo = False
|
||||
with zipfile.ZipFile(g["files"][0]["path"], 'r') as archive:
|
||||
archivecontent = [x.lower() for x in archive.namelist()]
|
||||
for archivefile in archivecontent:
|
||||
if archivefile.lower() == "comicinfo.xml":
|
||||
comicInfo = ET.fromstring(archive.read("ComicInfo.xml"))
|
||||
if not comicInfo:
|
||||
log.info(g["files"][0]["path"] + " does not contain a ComicInfo.xml file. No scan will be triggered.")
|
||||
return
|
||||
|
||||
#Adjust names for giving ids
|
||||
for key in ImportList.keys():
|
||||
if ImportList[key] == "tags":
|
||||
ImportList[key] = "tag_ids"
|
||||
if ImportList[key] == "performers":
|
||||
ImportList[key] = "performer_ids"
|
||||
if ImportList[key] == "studio":
|
||||
ImportList[key] = "studio_id"
|
||||
|
||||
#Get Metadata from ComicInfo.xml
|
||||
galleryData = {"id": g["id"]}
|
||||
for item in ImportList.keys():
|
||||
value = comicInfo.find(item)
|
||||
if value != None:
|
||||
galleryData[ImportList[item]] = value.text
|
||||
chapterData = []
|
||||
pageData = comicInfo.find("Pages")
|
||||
if pageData:
|
||||
for page in pageData:
|
||||
if page.get("Bookmark"):
|
||||
chapterData.append({"image_index": int(page.get("Image")) + 1, "title": page.get("Bookmark")})
|
||||
if page.get("Type"):
|
||||
chapterData.append({"image_index": int(page.get("Image")) + 1, "title": page.get("Type")})
|
||||
|
||||
#Adjust the retrieved data if necessary
|
||||
for data in galleryData.keys():
|
||||
if data in ["tag_ids", "performer_ids"]:
|
||||
galleryData[data] = [x.strip() for x in galleryData[data].split(",")]
|
||||
if data == "tag_ids":
|
||||
tagids = []
|
||||
for tag in galleryData[data]:
|
||||
tagids.append(stash.find_tag(tag, create=True)["id"])
|
||||
galleryData[data] = tagids
|
||||
if data == "performer_ids":
|
||||
performerids = []
|
||||
for performer in galleryData[data]:
|
||||
performerids.append(stash.find_performer(performer, create=True)["id"])
|
||||
galleryData[data] = performerids
|
||||
if data == "studio_id":
|
||||
galleryData[data] = stash.find_studio(galleryData[data], create=True)["id"]
|
||||
if data == "date":
|
||||
galleryData[data] = galleryData[data] + "-01-01"
|
||||
if data == "organized":
|
||||
galleryData[data] = eval(galleryData[data].lower().capitalize())
|
||||
if data == "rating100":
|
||||
galleryData[data] = int(galleryData[data])
|
||||
|
||||
#Add Chapter if it does not exist and finally update Gallery Metadata
|
||||
for chapter in chapterData:
|
||||
addChapter = True
|
||||
for existingChapter in g["chapters"]:
|
||||
if existingChapter["title"] == chapter["title"] and existingChapter["image_index"] == chapter["image_index"]:
|
||||
addChapter = False
|
||||
if addChapter:
|
||||
stash.create_gallery_chapter({"title": chapter["title"], "image_index": chapter["image_index"], "gallery_id": g["id"]})
|
||||
stash.update_gallery(galleryData)
|
||||
|
||||
|
||||
|
||||
def processAll():
|
||||
log.info('Getting gallery count')
|
||||
count=stash.find_galleries(f={},filter={"per_page": 1},get_count=True)[0]
|
||||
log.info(str(count)+' galleries to scan.')
|
||||
for r in range(1,int(count/per_page)+1):
|
||||
log.info('processing '+str(r*per_page)+ ' - '+str(count))
|
||||
galleries=stash.find_galleries(f={},filter={"page":r,"per_page": per_page})
|
||||
for g in galleries:
|
||||
processGallery(g)
|
||||
|
||||
|
||||
|
||||
#Start of the Program
|
||||
json_input = json.loads(sys.stdin.read())
|
||||
FRAGMENT_SERVER = json_input["server_connection"]
|
||||
stash = StashInterface(FRAGMENT_SERVER)
|
||||
|
||||
#Load Config
|
||||
with open(os.path.join(os.path.dirname(os.path.abspath(__file__)), "config.yml"), "r") as f:
|
||||
try:
|
||||
config = yaml.safe_load(f)
|
||||
except yaml.YAMLError as exc:
|
||||
log.error("Could not load config.yml: " + str(exc))
|
||||
sys.exit(1)
|
||||
try:
|
||||
ImportList=config["ImportList"]
|
||||
except KeyError as key:
|
||||
log.error(str(key) + " is not defined in config.yml, but is needed for this script to proceed")
|
||||
sys.exit(1)
|
||||
|
||||
if 'mode' in json_input['args']:
|
||||
PLUGIN_ARGS = json_input['args']["mode"]
|
||||
if 'process' in PLUGIN_ARGS:
|
||||
processAll()
|
||||
elif 'hookContext' in json_input['args']:
|
||||
id=json_input['args']['hookContext']['id']
|
||||
gallery=stash.find_gallery(id)
|
||||
processGallery(gallery)
|
||||
@ -1,19 +0,0 @@
|
||||
name: Comic Info Extractor
|
||||
description: Extract the metadata from cbz with the Comicrack standard (ComicInfo.xml)
|
||||
version: 0.1
|
||||
url: https://github.com/stashapp/CommunityScripts/
|
||||
exec:
|
||||
- "/usr/bin/python3"
|
||||
- "{pluginDir}/comicInfoExtractor.py"
|
||||
interface: raw
|
||||
hooks:
|
||||
- name: Add Metadata to Gallery
|
||||
description: Update Metadata for Gallery by evaluating the ComicInfo.xml.
|
||||
triggeredBy:
|
||||
- Gallery.Update.Post
|
||||
- Gallery.Create.Post
|
||||
tasks:
|
||||
- name: Load all cbz Metadata
|
||||
description: Get Metadata for all Galleries by looking for ComicInfo.xml files in the Archive.
|
||||
defaultArgs:
|
||||
mode: process
|
||||
@ -1,12 +0,0 @@
|
||||
#pkgignore
|
||||
#ImportList is a dictionary
|
||||
#that matches an xml Attribute from ComicInfo.xml to the according value in stash (using the graphql naming)
|
||||
#Fields that refer to different types of media are resolved by name and created if necessary (tags, studio, performers)
|
||||
#Fields that can contain multiple values (tags, performers) will be expected as a comma separated string, like
|
||||
#<Genre>Outdoor, Blonde</Genre>
|
||||
ImportList:
|
||||
Genre: tags
|
||||
Title: title
|
||||
Writer: studio
|
||||
Year: date
|
||||
Summary: details
|
||||
@ -1,2 +0,0 @@
|
||||
stashapp-tools
|
||||
pyyaml
|
||||
@ -1,151 +0,0 @@
|
||||
# Path Default Tags
|
||||
Define default tags/performers/studio for Scenes, Images, and Galleries by file path.
|
||||
Big thanks to @WithoutPants - I based this entire script off of yours (markerTagToScene) and learned about Stash plugins during the process :)
|
||||
|
||||
## Requirements
|
||||
- Stash
|
||||
|
||||
## Installation
|
||||
|
||||
- Download the whole folder 'defaultDataForPath' (defaultDataForPath.js, defaultDataForPath.yml)
|
||||
- Place it in your **plugins** folder
|
||||
- Reload plugins (Settings > Plugins)
|
||||
- Default Data For Path (1.0) should appear.
|
||||
|
||||
## Usage
|
||||
|
||||
- This plugin will execute on Tasks->Scan. Any new file added to Stash will be created with the specified data if configured.
|
||||
|
||||
## Configuration
|
||||
|
||||
- Edit **_jsonData_** array. Refer to Examples.
|
||||
|
||||
## Notes
|
||||
- Remember to escape file paths!
|
||||
- Note this script only works on NEWLY created Scenes/Images/Galleries. To run on existing content, the content will need to be removed from Stash and then rescanned.
|
||||
- There is NO validation of tags/performers/studios being performed. They must exist in Stash and be spelled exactly the same to work. These values are not updated when they are updated in Stash. They will have to manually be configured. If there is a mismatch, an error will be logged and the affected file will not have any default data added. The Scan task will continue to execute however.
|
||||
- If you misconfigure the script, the Task->Scan will complete and files will be created, but you can remove those files from Stash, fix the script, and try again.
|
||||
- I recommend using VS Code but any text editor should do. I especially recommend an editor with collapse functionality as your config JSON grows.
|
||||
- This requires a decent bit of manual effort and verbosity to configure, but only needs to be maintained after that.
|
||||
- This may slow down your Task->Scan. This script is probably sloppily written, I was not sober when I wrote it, and haven't looked much at it since it works ¯\\\_(ツ)_/¯
|
||||
|
||||
## Examples
|
||||
|
||||
> Here is a simple config Object. It defines data for any Scene/Image/Gallery found within the paths listed (it includes all subfolders). Matching files will be assigned Studio **'Brazzers'** and Tag **'Default_Data_For_Path_Tagged'** assuming that Studio and Tag exist in Stash and are spelled this way.
|
||||
<br>**_'name'_** is optional and not used by the script. Feel free to include it for the purposes of organization.
|
||||
<br>**_'paths'_** is optional and defines what file paths the current Object config should apply to. If it is not included, then no files will be matched to this config, unless **_'children'_** is used, in which case, those files in **_'children'_** will be matched to this config. See next example.
|
||||
<br>**_'studio'_** is optional and defines a Studio to apply to file matches. The Studio must exist in Stash and be spelled the same.
|
||||
<br>**_'tags'_** is optional and defines Tags to apply to file matches. The Tags must exist in Stash and be spelled the same.
|
||||
```
|
||||
var jsonData = [
|
||||
{
|
||||
"name": "OPTIONAL NAME - NOT USED IN SCRIPT",
|
||||
"paths": [
|
||||
"C:\\Users\\UserName\\Desktop\\NOTPORN\\Brazzers",
|
||||
"D:\\SecretStorage\\Porn\\Brazzers"
|
||||
],
|
||||
"studio": "Brazzers",
|
||||
"tags": [
|
||||
"Default_Data_For_Path_Tagged"
|
||||
]
|
||||
}
|
||||
];
|
||||
```
|
||||
|
||||
> This config introduces a new concept. Note the _'Instagram Root'_ config object has no paths. It defines a studio and then children. This means all child config object of this will recieve the Studio _'Instagram'_ (it will overwrite any child config object studio definitions if different). You may also specify Performers and Tags in this way, those will be appended to child config objects definitions. See the _'Celebrities_' config object is used in a similar way to add the tag _'PERFORMER - Celebrity_' to its underlying children (which also recieve the _Instagram_ studio as it is their ancestor). It saves you from having to add the tag to each config object seperately and allows for more logical config groupings to be created.
|
||||
|
||||
> If you also add a **_'paths'_** value to _'Instagram Root'_, then the data specified on _'Instagram Root'_ config object will be applied to files in that path as well. Data from children will not be carried over. For example, _'PornHub Root'_ applies studio PornHub to all files in **_"C:\\Users\\UserName\\Desktop\\Pornhub"_**, and has children objects with more specific config. Instagram Root does not have such a paths specification. So a file in path **_"C:\\Users\\UserName\\Desktop\\Pornhub\\SweetBunny"_** will have Studio PornHub added while a file in **_"C:\\Users\\UserName\\Desktop\\Instagram\\Kylie Jenner"_** will not have Studio Instagram added.
|
||||
|
||||
> So say a file is scanned that has file path **_"C:\\Users\\UserName\\Desktop\\Instagram\\alexisfawx\\video1.mp4"_**. The data added will be:
|
||||
<br /> **Studio:** _Instagram_ - because the "Alexis Fawx" Config object is a descendant of the Instagram config object, and the scanned file matches "Alexis Fawx" Config object paths.
|
||||
<br /> **Tag:** _ORGANIZED - Unorganized_ - because the scanned file matches "Default Tag - Matches all scanned files" Config object paths.
|
||||
<br /> **Tag:** _PERFORMER - Pornstar_ - because the "Alexis Fawx" Config object is a child of the Pornstars config object, and the scanned file matches "Alexis Fawx" Config object paths.
|
||||
<br /> **Tag:** _PERFORMER - Caucasian_ - beacause the scanned file matches "Alexis Fawx" Config object paths.
|
||||
<br /> **Tag:** _PERFORMER - Fake Tits_ - beacause the scanned file matches "Alexis Fawx" Config object paths.
|
||||
<br /> **Performer:** _Alexis Fawx_ - beacause the scanned file matches "Alexis Fawx" Config object paths.
|
||||
<br />
|
||||
|
||||
```
|
||||
var jsonData = [
|
||||
{
|
||||
"name": "Default Tag - Matches all scanned files",
|
||||
"paths": [
|
||||
""
|
||||
],
|
||||
"tags": [
|
||||
"ORGANIZED - Unorganized"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Instagram Root",
|
||||
"studio": "Instagram",
|
||||
"children": [
|
||||
{
|
||||
"name": "Celebrities",
|
||||
"tags": [
|
||||
"PERFORMER - Celebrity"
|
||||
],
|
||||
"children": [
|
||||
{
|
||||
"name": "Kim Kardashian",
|
||||
"paths": [
|
||||
"C:\\Users\\UserName\\Desktop\\Instagram\\kimkardashian"
|
||||
],
|
||||
"performers": [
|
||||
"Kim Kardashian"
|
||||
],
|
||||
"tags": [
|
||||
"PERFORMER - Armenian",
|
||||
"PERFORMER - Big Ass"
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Katy Perry",
|
||||
"paths": [
|
||||
"C:\\Users\\UserName\\Desktop\\Instagram\\katyperry"
|
||||
],
|
||||
"performers": [
|
||||
"Katy Perry"
|
||||
],
|
||||
"tags": [
|
||||
"PERFORMER - Caucasian,
|
||||
"PERFORMER - Big Tits"
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "Pornstars",
|
||||
"tags": [
|
||||
"PERFORMER - Pornstar
|
||||
],
|
||||
"children": [
|
||||
{
|
||||
"name": "Alexis Fawx",
|
||||
"paths": [
|
||||
"C:\\Users\\UserName\\Desktop\\Instagram\\alexisfawx"
|
||||
],
|
||||
"performers": [
|
||||
"Alexis Fawx"
|
||||
],
|
||||
"tags": [
|
||||
"PERFORMER - Caucasian",
|
||||
"PERFORMER - Fake Tits"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "PornHub Root",
|
||||
"paths": [
|
||||
"C:\\Users\\UserName\\Desktop\\PornHub"
|
||||
]
|
||||
"studio": "PornHub",
|
||||
"children": [
|
||||
(etc...)
|
||||
]
|
||||
}
|
||||
];
|
||||
```
|
||||
@ -1,450 +0,0 @@
|
||||
var jsonData = [
|
||||
{
|
||||
"name": "OPTIONAL NAME - NOT USED IN SCRIPT",
|
||||
"paths": [
|
||||
"C:\\Users\\UserName\\Desktop\\NOTPORN\\Brazzers",
|
||||
"D:\\SecretStorage\\Porn\\Brazzers"
|
||||
],
|
||||
"studio": "Brazzers",
|
||||
"tags": [
|
||||
"Default_Data_For_Path_Tagged"
|
||||
]
|
||||
}
|
||||
];
|
||||
|
||||
function ok() {
|
||||
return {
|
||||
output: "ok"
|
||||
};
|
||||
}
|
||||
|
||||
function main() {
|
||||
var hookContext = input.Args.hookContext;
|
||||
var type = hookContext.type;
|
||||
var ID = hookContext.ID;
|
||||
|
||||
if (!type || !ID) {
|
||||
// just return
|
||||
return ok();
|
||||
}
|
||||
|
||||
var itemPath;
|
||||
var name = "";
|
||||
if (type === 'Scene.Create.Post') {
|
||||
itemPath = getScenePath(ID);
|
||||
name = "scene"
|
||||
} else if (type === 'Gallery.Create.Post') {
|
||||
itemPath = getGalleryPath(ID);
|
||||
name = "gallery"
|
||||
} else if(type === 'Image.Create.Post') {
|
||||
itemPath = getImagePath(ID);
|
||||
name = "image"
|
||||
}
|
||||
|
||||
var defaultData = getDefaultData(itemPath)
|
||||
|
||||
// create tags
|
||||
var defaultTags = [];
|
||||
for(var p=0; p<defaultData.length; p++) {
|
||||
var tags = defaultData[p].tags;
|
||||
if(tags) {
|
||||
for(var t=0; t<tags.length; t++) {
|
||||
var tag = tags[t]
|
||||
if(stringNotAlreadyPresent(tag, defaultTags))
|
||||
defaultTags.push(tag)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// create studio
|
||||
var addStudio = false;
|
||||
var defaultStudioId = null;
|
||||
var defaultStudio;
|
||||
for(var p=0; p<defaultData.length; p++) {
|
||||
var studio = defaultData[p].studio;
|
||||
if(studio) {
|
||||
var studioId = getStudioId(studio)
|
||||
if(studioId) {
|
||||
defaultStudioId = studioId;
|
||||
addStudio = true;
|
||||
defaultStudio = studio;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// create performers
|
||||
var defaultPerformers = [];
|
||||
for(var p=0; p<defaultData.length; p++) {
|
||||
var performers = defaultData[p].performers;
|
||||
if(performers) {
|
||||
for(var t=0; t<performers.length; t++) {
|
||||
var performer = performers[t];
|
||||
if(stringNotAlreadyPresent(performer, defaultPerformers))
|
||||
defaultPerformers.push(performer)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// convert tags to tagIds
|
||||
var addTags = false;
|
||||
var defaultTagIds = [];
|
||||
if(defaultTags) {
|
||||
for(var i=defaultTags.length-1; i>=0; i--) {
|
||||
var tagId = getTagId(defaultTags[i])
|
||||
tagId ? defaultTagIds.push(tagId) : defaultTags.pop();
|
||||
}
|
||||
if(defaultTagIds && defaultTagIds.length != 0) {
|
||||
addTags = true;
|
||||
}
|
||||
}
|
||||
|
||||
// convert performers to performerIds
|
||||
var addPerformers = false;
|
||||
var defaultPerformerIds = [];
|
||||
if(defaultPerformers) {
|
||||
for(var i=defaultPerformers.length-1; i>=0; i--) {
|
||||
var tagId = getPerformerId(defaultPerformers[i])
|
||||
tagId ? defaultPerformerIds.push(tagId) : defaultPerformers.pop();
|
||||
}
|
||||
if(defaultPerformerIds && defaultPerformerIds.length != 0) {
|
||||
addPerformers = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// Apply all and log
|
||||
var tags = addTags ? defaultTagIds : null;
|
||||
var studio = addStudio ? defaultStudioId : null;
|
||||
var performers = addPerformers ? defaultPerformerIds : null;
|
||||
|
||||
if (type === 'Scene.Create.Post') {
|
||||
setSceneData(ID, tags, studio, performers)
|
||||
} else if (type === 'Gallery.Create.Post') {
|
||||
setGalleryData(ID, tags, studio, performers)
|
||||
} else if(type === 'Image.Create.Post') {
|
||||
setImageData(ID, tags, studio, performers)
|
||||
}
|
||||
|
||||
|
||||
for(var o=0;o<defaultTags.length;o++) {
|
||||
log.Info("[DefaultDataForPath]: Added tag " + defaultTags[o] + " to " + name + " " + ID);
|
||||
}
|
||||
for(var o=0;o<defaultPerformers.length;o++) {
|
||||
log.Info("[DefaultDataForPath]: Added performer " + defaultPerformers[o] + " to " + name + " " + ID);
|
||||
}
|
||||
addStudio ? log.Info("[DefaultDataForPath]: Added studio " + defaultStudio + " to " + name + " " + ID) : "";
|
||||
|
||||
}
|
||||
|
||||
function getScenePath(ID) {
|
||||
var query = "\
|
||||
query findScene($id: ID) {\
|
||||
findScene(id: $id) {\
|
||||
path\
|
||||
}\
|
||||
}";
|
||||
|
||||
var variables = {
|
||||
id: ID
|
||||
};
|
||||
|
||||
var result = gql.Do(query, variables);
|
||||
var findScene = result.findScene;
|
||||
if (!findScene) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var path = findScene.path;
|
||||
return path;
|
||||
}
|
||||
|
||||
function getImagePath(ID) {
|
||||
var query = "\
|
||||
query findImage($id: ID) {\
|
||||
findImage(id: $id) {\
|
||||
path\
|
||||
}\
|
||||
}";
|
||||
|
||||
var variables = {
|
||||
id: ID
|
||||
};
|
||||
|
||||
var result = gql.Do(query, variables);
|
||||
var findImage = result.findImage;
|
||||
if (!findImage) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var path = findImage.path;
|
||||
return path;
|
||||
}
|
||||
|
||||
function getGalleryPath(ID) {
|
||||
var query = "\
|
||||
query findGallery($id: ID) {\
|
||||
findGallery(id: $id) {\
|
||||
path\
|
||||
}\
|
||||
}";
|
||||
|
||||
var variables = {
|
||||
id: ID
|
||||
};
|
||||
|
||||
var result = gql.Do(query, variables);
|
||||
var findGallery = result.findGallery;
|
||||
if (!findGallery) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var path = findGallery.path;
|
||||
return path;
|
||||
}
|
||||
|
||||
function getTagId(tagName) {
|
||||
var query = "\
|
||||
query findTagId($filter: FindFilterType!) {\
|
||||
findTags(filter: $filter) {\
|
||||
tags {\
|
||||
id\
|
||||
}\
|
||||
}\
|
||||
}";
|
||||
|
||||
var variables = {
|
||||
filter: {
|
||||
q: tagName
|
||||
}
|
||||
};
|
||||
|
||||
result = gql.Do(query, variables)
|
||||
if(result.findTags.tags[0]) {
|
||||
return result.findTags.tags[0].id;
|
||||
}
|
||||
else {
|
||||
log.Error("TAG " + tagName + " DOES NOT EXIST IN STASH!")
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function getPerformerId(performerName) {
|
||||
var query = "\
|
||||
query findPerformerId($filter: FindFilterType!) {\
|
||||
findPerformers(filter: $filter) {\
|
||||
performers {\
|
||||
id\
|
||||
}\
|
||||
}\
|
||||
}";
|
||||
|
||||
var variables = {
|
||||
filter: {
|
||||
q: performerName
|
||||
}
|
||||
};
|
||||
|
||||
result = gql.Do(query, variables)
|
||||
if(result.findPerformers.performers[0]) {
|
||||
return result.findPerformers.performers[0].id;
|
||||
}
|
||||
else {
|
||||
log.Error("PERFORMER " + performerName + " DOES NOT EXIST IN STASH!")
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function getStudioId(studioName) {
|
||||
var query = "\
|
||||
query findStudioId($filter: FindFilterType!) {\
|
||||
findStudios(filter: $filter) {\
|
||||
studios {\
|
||||
id\
|
||||
}\
|
||||
}\
|
||||
}";
|
||||
|
||||
var variables = {
|
||||
filter: {
|
||||
q: studioName
|
||||
}
|
||||
};
|
||||
|
||||
result = gql.Do(query, variables)
|
||||
if(result.findStudios.studios[0]) {
|
||||
return result.findStudios.studios[0].id
|
||||
}
|
||||
else {
|
||||
log.Error("STUDIO " + studioName + " DOES NOT EXIST IN STASH!")
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
function stringNotAlreadyPresent(text, itemArray) {
|
||||
for(var i=0;i < itemArray.length; i++) {
|
||||
if(itemArray[i] === text) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
function addAllData(obj, lowerItemPath, defaultData, pTags, pPerformers, pStudio) {
|
||||
if(obj) {
|
||||
if(obj.paths) {
|
||||
var paths = obj.paths;
|
||||
if(containsPath(paths, lowerItemPath)) {
|
||||
// inject data from parent if avail
|
||||
if(pTags) {
|
||||
if(!obj.tags) {
|
||||
obj.tags = [];
|
||||
}
|
||||
obj.tags = obj.tags.concat(pTags)
|
||||
}
|
||||
if(pPerformers) {
|
||||
if(!obj.performers) {
|
||||
obj.performers = [];
|
||||
}
|
||||
obj.performers = obj.performers.concat(pPerformers)
|
||||
}
|
||||
if(pStudio) {
|
||||
obj.studio = pStudio
|
||||
}
|
||||
defaultData.push(obj)
|
||||
}
|
||||
}
|
||||
else {
|
||||
// add defaults to children
|
||||
if(obj.tags) {
|
||||
if(!pTags) {
|
||||
pTags = obj.tags;
|
||||
}
|
||||
else {
|
||||
pTags = pTags.concat(obj.tags)
|
||||
}
|
||||
}
|
||||
if(obj.performers) {
|
||||
if(!pPerformers) {
|
||||
pPerformers = obj.performers
|
||||
}
|
||||
else {
|
||||
pPerformers = pPerformers.concat(obj.performers)
|
||||
}
|
||||
}
|
||||
if(obj.studio) {
|
||||
pStudio = obj.studio;
|
||||
}
|
||||
}
|
||||
if(obj.children) {
|
||||
for(var o=0;o<obj.children.length;o++) {
|
||||
defaultData = addAllData(obj.children[o], lowerItemPath, defaultData, pTags, pPerformers, pStudio)
|
||||
}
|
||||
}
|
||||
}
|
||||
return defaultData
|
||||
}
|
||||
|
||||
function getDefaultData(itemPath) {
|
||||
var defaultData = [];
|
||||
var lowerItemPath = itemPath.toLowerCase();
|
||||
for(var i=0;i<jsonData.length;i++) {
|
||||
var obj = jsonData[i];
|
||||
defaultData = addAllData(obj, lowerItemPath, defaultData, null, null, null)
|
||||
}
|
||||
|
||||
return defaultData;
|
||||
}
|
||||
|
||||
function containsPath(paths, inputPath) {
|
||||
for(var p=0;p<paths.length;p++) {
|
||||
var path = paths[p].toLowerCase() + '';
|
||||
if(stringContains(inputPath, path)) {
|
||||
log.Info("[DefaultDataForPath]: " + inputPath + " MATCH " + path)
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function stringContains(value, searchFor)
|
||||
{
|
||||
var v = (value || '').toLowerCase();
|
||||
var v2 = searchFor;
|
||||
if (v2) {
|
||||
v2 = v2.toLowerCase();
|
||||
}
|
||||
return v.indexOf(v2) > -1;
|
||||
}
|
||||
|
||||
|
||||
function containsElem(items, elem) {
|
||||
for(var i=0;i<items.length;i++) {
|
||||
var item = items[i].toLowerCase();
|
||||
if(item.equals(elem)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
function setSceneData(sceneID, tagIDs, studioID, performerIds) {
|
||||
var mutation = "\
|
||||
mutation sceneUpdate($input: SceneUpdateInput!) {\
|
||||
sceneUpdate(input: $input) {\
|
||||
id\
|
||||
}\
|
||||
}";
|
||||
|
||||
var variables = {
|
||||
input: {
|
||||
id: sceneID,
|
||||
tag_ids: tagIDs,
|
||||
studio_id: studioID,
|
||||
performer_ids: performerIds
|
||||
}
|
||||
};
|
||||
|
||||
gql.Do(mutation, variables);
|
||||
}
|
||||
|
||||
function setImageData(sceneID, tagIDs, studioID, performerIds) {
|
||||
var mutation = "\
|
||||
mutation imageUpdate($input: ImageUpdateInput!) {\
|
||||
imageUpdate(input: $input) {\
|
||||
id\
|
||||
}\
|
||||
}";
|
||||
|
||||
var variables = {
|
||||
input: {
|
||||
id: sceneID,
|
||||
tag_ids: tagIDs,
|
||||
studio_id: studioID,
|
||||
performer_ids: performerIds
|
||||
}
|
||||
};
|
||||
|
||||
gql.Do(mutation, variables);
|
||||
}
|
||||
|
||||
function setGalleryData(sceneID, tagIDs, studioID, performerIds) {
|
||||
var mutation = "\
|
||||
mutation galleryUpdate($input: GalleryUpdateInput!) {\
|
||||
galleryUpdate(input: $input) {\
|
||||
id\
|
||||
}\
|
||||
}";
|
||||
|
||||
var variables = {
|
||||
input: {
|
||||
id: sceneID,
|
||||
tag_ids: tagIDs,
|
||||
studio_id: studioID,
|
||||
performer_ids: performerIds
|
||||
}
|
||||
};
|
||||
|
||||
gql.Do(mutation, variables);
|
||||
}
|
||||
|
||||
main();
|
||||
@ -1,15 +0,0 @@
|
||||
# example plugin config
|
||||
name: Default Data For Path
|
||||
description: Adds configured Tags, Performers and/or Studio to all newly scanned Scenes, Images and Galleries.
|
||||
url: https://github.com/stashapp/CommunityScripts
|
||||
version: 1.1
|
||||
exec:
|
||||
- defaultDataForPath.js
|
||||
interface: js
|
||||
hooks:
|
||||
- name: Add Configured Data on Scan
|
||||
description: Adds configured tags/performers/studio on Task->Scan creation.
|
||||
triggeredBy:
|
||||
- Scene.Create.Post
|
||||
- Gallery.Create.Post
|
||||
- Image.Create.Post
|
||||
@ -1,8 +0,0 @@
|
||||
Marks duplicate markers with a tag: `[Marker: Duplicate]`
|
||||
|
||||
Tasks -> Search for duplicate markers
|
||||
|
||||
It will add the tag to any markers that have an **exact** match for title, time **and** primary tag.
|
||||
It will only add to existing markers, it is up to the user to go to the tag and navigate to the scene where the duplicates will be highlighted with the tag.
|
||||
|
||||
(it's technically a Dupe Marker Marker)
|
||||
@ -1,69 +0,0 @@
|
||||
import json
|
||||
import sys
|
||||
import re
|
||||
import datetime as dt
|
||||
import stashapi.log as log
|
||||
from stashapi.tools import human_bytes
|
||||
from stashapi.stash_types import PhashDistance
|
||||
from stashapi.stashapp import StashInterface
|
||||
|
||||
FRAGMENT = json.loads(sys.stdin.read())
|
||||
MODE = FRAGMENT['args']['mode']
|
||||
stash = StashInterface(FRAGMENT["server_connection"])
|
||||
dupe_marker_tag = stash.find_tag('[Marker: Duplicate]', create=True).get("id")
|
||||
|
||||
def findScenesWithMarkers():
|
||||
totalDupes = 0
|
||||
scenes = stash.find_scenes(f={"has_markers": "true"},fragment="id")
|
||||
for scene in scenes:
|
||||
totalDupes += checkScene(scene)
|
||||
log.info("Found %d duplicate markers across %d scenes" % (totalDupes, len(scenes)))
|
||||
|
||||
def addMarkerTag(marker):
|
||||
query = """
|
||||
mutation SceneMarkerUpdate($input:SceneMarkerUpdateInput!) {
|
||||
sceneMarkerUpdate(input: $input) {
|
||||
id
|
||||
}
|
||||
}
|
||||
"""
|
||||
oldTags = [tag["id"] for tag in marker["tags"]]
|
||||
if dupe_marker_tag in oldTags:
|
||||
return
|
||||
oldTags.append(dupe_marker_tag)
|
||||
newMarker = {
|
||||
"id": marker["id"],
|
||||
"tag_ids": oldTags
|
||||
}
|
||||
stash._callGraphQL(query, {"input": newMarker })
|
||||
#stash.update_scene_marker(newMarker, "id")
|
||||
|
||||
def checkScene(scene):
|
||||
seen = set()
|
||||
dupes = []
|
||||
markers = stash.find_scene_markers(scene['id'])
|
||||
# find duplicate pairs
|
||||
for marker in markers:
|
||||
sortidx = ";".join([
|
||||
str(marker["title"]),
|
||||
str(marker["seconds"]),
|
||||
str(marker["primary_tag"]["id"])
|
||||
])
|
||||
if sortidx not in seen:
|
||||
seen.add(sortidx)
|
||||
else:
|
||||
dupes.append(marker)
|
||||
# add tag
|
||||
if dupes:
|
||||
log.debug("Found %d duplicate markers in scene %s" % (len(dupes), scene['id']))
|
||||
for dupe in dupes:
|
||||
addMarkerTag(dupe)
|
||||
return len(dupes)
|
||||
|
||||
def main():
|
||||
if MODE == "search":
|
||||
findScenesWithMarkers()
|
||||
log.exit("Plugin exited normally.")
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@ -1,13 +0,0 @@
|
||||
name: Dupe Marker Detector
|
||||
description: Finds and marks duplicate markers
|
||||
version: 0.1
|
||||
url: https://github.com/stashapp/CommunityScripts/
|
||||
exec:
|
||||
- python
|
||||
- "{pluginDir}/dupeMarker.py"
|
||||
interface: raw
|
||||
tasks:
|
||||
- name: 'Search'
|
||||
description: Search for duplicate markers
|
||||
defaultArgs:
|
||||
mode: search
|
||||
@ -1 +0,0 @@
|
||||
stashapp-tools
|
||||
@ -1,398 +0,0 @@
|
||||
function ok() {
|
||||
return {
|
||||
output: "ok"
|
||||
};
|
||||
}
|
||||
|
||||
function main() {
|
||||
var hookContext = input.Args.hookContext;
|
||||
var type = hookContext.type;
|
||||
var ID = hookContext.ID;
|
||||
|
||||
if (!ID) {
|
||||
return ok();
|
||||
}
|
||||
|
||||
var filenameFetcher;
|
||||
var saver;
|
||||
if (type === 'Scene.Create.Post') {
|
||||
filenameFetcher = getSceneFilename;
|
||||
saver = updateScene;
|
||||
} else if (type === 'Gallery.Create.Post') {
|
||||
filenameFetcher = getGalleryFilename;
|
||||
saver = updateGallery;
|
||||
} else {
|
||||
return ok();
|
||||
}
|
||||
|
||||
var filename = filenameFetcher(ID);
|
||||
if (!filename) {
|
||||
return ok();
|
||||
}
|
||||
|
||||
filename = cleanFilename(filename);
|
||||
var parseResult = parseFilename(filename);
|
||||
|
||||
saver(ID, parseResult);
|
||||
|
||||
return ok();
|
||||
}
|
||||
|
||||
function getSceneFilename(sceneID) {
|
||||
var query = "\
|
||||
query findScene($id: ID) {\
|
||||
findScene(id: $id) {\
|
||||
path\
|
||||
}\
|
||||
}";
|
||||
|
||||
var variables = {
|
||||
id: sceneID
|
||||
};
|
||||
|
||||
var result = gql.Do(query, variables);
|
||||
var findScene = result.findScene;
|
||||
if (!findScene) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var path = findScene.path;
|
||||
return path.substring(path.lastIndexOf('/') + 1);
|
||||
}
|
||||
|
||||
function updateScene(sceneID, parseResult) {
|
||||
var query = "\
|
||||
mutation SceneUpdate($input: SceneUpdateInput!) {\
|
||||
sceneUpdate(input: $input) {\
|
||||
id\
|
||||
}\
|
||||
}";
|
||||
|
||||
var variables = {
|
||||
input: parseResult
|
||||
};
|
||||
|
||||
variables.input.id = sceneID;
|
||||
|
||||
gql.Do(query, variables);
|
||||
}
|
||||
|
||||
function getGalleryFilename(galleryID) {
|
||||
var query = "\
|
||||
query findGallery($id: ID!) {\
|
||||
findGallery(id: $id) {\
|
||||
path\
|
||||
}\
|
||||
}";
|
||||
|
||||
var variables = {
|
||||
id: galleryID
|
||||
};
|
||||
|
||||
var result = gql.Do(query, variables);
|
||||
var findGallery = result.findGallery;
|
||||
if (!findGallery) {
|
||||
return null;
|
||||
}
|
||||
|
||||
var path = findGallery.path;
|
||||
return path.substring(path.lastIndexOf('/') + 1);
|
||||
}
|
||||
|
||||
function updateGallery(galleryID, parseResult) {
|
||||
var query = "\
|
||||
mutation GalleryUpdate($input: GalleryUpdateInput!) {\
|
||||
galleryUpdate(input: $input) {\
|
||||
id\
|
||||
}\
|
||||
}";
|
||||
|
||||
var variables = {
|
||||
input: parseResult
|
||||
};
|
||||
|
||||
variables.input.id = galleryID;
|
||||
|
||||
gql.Do(query, variables);
|
||||
}
|
||||
|
||||
function matchNames(parts, name, aliases) {
|
||||
var names = [name].concat(aliases);
|
||||
|
||||
var partRegexes = [];
|
||||
|
||||
for (var i = 0; i < parts.length; i++) {
|
||||
partRegexes[i] = new RegExp('^' + parts[i].toLowerCase() + '[. \\-_]*');
|
||||
}
|
||||
|
||||
var cleanRegex = /[. \-_]/g;
|
||||
var longestMatch = 0;
|
||||
for (var i = 0; i < names.length; i++) {
|
||||
var name = names[i].replace(cleanRegex, '').toLowerCase();
|
||||
for (var j = 0; j < partRegexes.length; j++) {
|
||||
if (!partRegexes[j].test(name)) {
|
||||
break;
|
||||
}
|
||||
|
||||
name = name.replace(partRegexes[j], '');
|
||||
|
||||
if (name.length === 0) {
|
||||
if (j + 1 > longestMatch) {
|
||||
longestMatch = j + 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return longestMatch;
|
||||
}
|
||||
|
||||
function cleanFilename(name) {
|
||||
name = name
|
||||
// remove imageset-...[rarbg]
|
||||
.replace(/imageset-[\w\d]+\[rarbg]/i, '')
|
||||
// replace [...] with just ...
|
||||
.replace(/\[(.*?)]/g, '$1')
|
||||
// replace (...) with just ...
|
||||
.replace(/\((.*?)\)/g, '$1')
|
||||
// replace {...} with just ...
|
||||
.replace(/{(.*?)}/g, '$1')
|
||||
;
|
||||
|
||||
var blockList = [
|
||||
'mp4',
|
||||
'mov',
|
||||
'zip',
|
||||
'xxx',
|
||||
'4k',
|
||||
'4096x2160',
|
||||
'3840x2160',
|
||||
'2160p',
|
||||
'1080p',
|
||||
'1920x1080',
|
||||
'60fps',
|
||||
'30fps',
|
||||
'repack',
|
||||
'ktr',
|
||||
];
|
||||
var regExp = new RegExp('(_|[^\\w\\d]|^)(' + blockList.join('|') + ')(_|[^\\w\\d]|$)', 'i');
|
||||
while (regExp.test(name)) {
|
||||
name = name.replace(regExp, '$1$3');
|
||||
}
|
||||
|
||||
// If name starts with <...>.com remove the .com (sometimes names include studio name as site/domain)
|
||||
name = name.replace(/^([\w\d-]+?)\.com/, '$1');
|
||||
|
||||
name = name
|
||||
// Remove everything except letters and digits at the start
|
||||
.replace(/^(_|[^\w\d])+/, '')
|
||||
// Remove everything except letters and digits at the end
|
||||
.replace(/(_|[^\w\d])+$/, '')
|
||||
;
|
||||
|
||||
return name;
|
||||
}
|
||||
|
||||
function matchStudio(parts, result) {
|
||||
var query = "\
|
||||
query findStudios($studio_filter: StudioFilterType, $filter: FindFilterType!) {\
|
||||
findStudios(studio_filter: $studio_filter, filter: $filter) {\
|
||||
studios {\
|
||||
id\
|
||||
name\
|
||||
aliases\
|
||||
}\
|
||||
}\
|
||||
}";
|
||||
|
||||
var searchTerm = parts[0].substring(0, 2);
|
||||
if (parts[0].substring(0, 1) === 'a') {
|
||||
searchTerm = parts[0].substring(1, 3);
|
||||
}
|
||||
var variables = {
|
||||
filter: {
|
||||
per_page: -1,
|
||||
},
|
||||
studio_filter: {
|
||||
name: {
|
||||
modifier: "INCLUDES",
|
||||
value: searchTerm
|
||||
},
|
||||
OR: {
|
||||
aliases: {
|
||||
modifier: "INCLUDES",
|
||||
value: searchTerm
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var queryResult = gql.Do(query, variables);
|
||||
var studios = queryResult.findStudios.studios;
|
||||
if (!studios.length && parts[0].substring(0, 1) === 'a') {
|
||||
variables.studio_filter.name.value = variables.studio_filter.OR.aliases.value = parts[0].substring(1, 3);
|
||||
queryResult = gql.Do(query, variables);
|
||||
studios = queryResult.findStudios.studios;
|
||||
}
|
||||
|
||||
var matchingParts = 0;
|
||||
for (var i = 0; i < studios.length; i++) {
|
||||
var studio = studios[i];
|
||||
matchingParts = matchNames(parts, studio.name, studio.aliases);
|
||||
if (matchingParts === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
result.studio_id = studio.id;
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
return matchingParts;
|
||||
}
|
||||
|
||||
function matchDate(parts, result) {
|
||||
if (
|
||||
parts.length < 3 ||
|
||||
!/^(\d{2}|\d{4})$/.test(parts[0]) ||
|
||||
!/^\d{2}$/.test(parts[1]) ||
|
||||
!/^\d{2}$/.test(parts[2])
|
||||
) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
var year = parseInt(parts[0], 10);
|
||||
var month = parseInt(parts[1], 10);
|
||||
var day = parseInt(parts[2], 10);
|
||||
|
||||
if (year < 100) {
|
||||
year += 2000;
|
||||
}
|
||||
|
||||
if (
|
||||
year < 2000 || year > 2100 ||
|
||||
month < 1 || month > 12 ||
|
||||
day < 1 || day > 31
|
||||
) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
result.date = year + "-" + (month < 10 ? "0" + month : month) + "-" + (day < 10 ? "0" + day : day);
|
||||
|
||||
return 3;
|
||||
}
|
||||
|
||||
function matchPerformers(parts, result) {
|
||||
var query = "\
|
||||
query findPerformers($performer_filter: PerformerFilterType, $filter: FindFilterType!) {\
|
||||
findPerformers(performer_filter: $performer_filter, filter: $filter) {\
|
||||
performers {\
|
||||
id\
|
||||
name\
|
||||
aliases\
|
||||
}\
|
||||
}\
|
||||
}"
|
||||
var variables = {
|
||||
filter: {
|
||||
per_page: -1
|
||||
},
|
||||
performer_filter: {
|
||||
name: {
|
||||
modifier: "INCLUDES"
|
||||
},
|
||||
OR: {
|
||||
aliases: {
|
||||
modifier: "INCLUDES"
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var totalMatchingParts = 0;
|
||||
result.performer_ids = [];
|
||||
do {
|
||||
variables.performer_filter.name.value = variables.performer_filter.OR.aliases.value = parts[0].substring(0, 2);
|
||||
|
||||
var queryResult = gql.Do(query, variables);
|
||||
var performers = queryResult.findPerformers.performers;
|
||||
if (!performers.length) {
|
||||
parts.shift();
|
||||
continue;
|
||||
}
|
||||
|
||||
var maxMatchLength = 0;
|
||||
var matches = [];
|
||||
for (var i = 0; i < performers.length; i++) {
|
||||
var performer = performers[i];
|
||||
var aliases = performer.aliases ? performer.aliases.split(/\s*[,;]+\s*/) : [];
|
||||
var matchingParts = matchNames(parts, performer.name, aliases);
|
||||
if (matchingParts === 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (matchingParts > maxMatchLength) {
|
||||
maxMatchLength = matchingParts;
|
||||
matches = [performer.id];
|
||||
} else if (matchingParts === maxMatchLength) {
|
||||
matches.push(performer.id);
|
||||
}
|
||||
}
|
||||
|
||||
if (maxMatchLength === 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
result.performer_ids = result.performer_ids.concat(matches);
|
||||
|
||||
totalMatchingParts += maxMatchLength;
|
||||
|
||||
parts = parts.slice(maxMatchLength);
|
||||
while (parts.length > 0 && (parts[0].toLowerCase() === 'and' || parts[0] === '&')) {
|
||||
parts.shift();
|
||||
totalMatchingParts += 1;
|
||||
}
|
||||
} while (parts.length > 0);
|
||||
|
||||
return totalMatchingParts;
|
||||
}
|
||||
|
||||
function parseFilename(name) {
|
||||
var parts = name.split(/[. \-_,]+/);
|
||||
|
||||
var matchers = [
|
||||
matchStudio,
|
||||
matchDate,
|
||||
matchPerformers,
|
||||
];
|
||||
|
||||
var result = {};
|
||||
var hasMatched = false;
|
||||
for (var matchTries = 0; matchTries < 3 && !hasMatched && parts.length; matchTries++) {
|
||||
for (var i = 0; i < matchers.length && parts.length > 0; i++) {
|
||||
var matchedParts = matchers[i](parts, result);
|
||||
|
||||
if (matchedParts > 0) {
|
||||
hasMatched = true;
|
||||
parts = parts.slice(matchedParts);
|
||||
}
|
||||
}
|
||||
|
||||
// If no matchers worked remove a part. Maybe the format is correct but studio isn't found? etc
|
||||
if (!hasMatched) {
|
||||
parts.shift();
|
||||
}
|
||||
}
|
||||
|
||||
if (hasMatched && parts.length > 0) {
|
||||
var title = parts.join(' ');
|
||||
// Look behind assertions are not supported, so can't use `replace(/(?<=.)([A-Z]/g, ' $1')` so instead have to do a loop. Otherwise for example 'FooABar' will become 'Foo ABar' instead of 'Foo A Bar'
|
||||
while (/[^\s][A-Z]/.test(title)) {
|
||||
title = title.replace(/([^\s])([A-Z])/g, '$1 $2');
|
||||
}
|
||||
result.title = title.trim();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
main();
|
||||
@ -1,13 +0,0 @@
|
||||
name: Filename parser
|
||||
description: Parses filename into studio, date, performers and title
|
||||
url:
|
||||
version: 0.1
|
||||
exec:
|
||||
- filenameParser.js
|
||||
interface: js
|
||||
hooks:
|
||||
- name: Prepopulates data based on filename
|
||||
description:
|
||||
triggeredBy:
|
||||
- Scene.Create.Post
|
||||
- Gallery.Create.Post
|
||||
@ -1,81 +0,0 @@
|
||||
function ok() {
|
||||
return {
|
||||
output: "ok"
|
||||
};
|
||||
}
|
||||
|
||||
function main() {
|
||||
var hookContext = input.Args.hookContext;
|
||||
var opInput = hookContext.input;
|
||||
var primaryTagID = opInput.primary_tag_id;
|
||||
var sceneID = opInput.scene_id;
|
||||
|
||||
// we can't currently find scene markers. If it's not in the input
|
||||
// then just return
|
||||
if (!primaryTagID || !sceneID) {
|
||||
// just return
|
||||
return ok();
|
||||
}
|
||||
|
||||
// get the existing scene tags
|
||||
var sceneTags = getSceneTags(sceneID);
|
||||
var tagIDs = [];
|
||||
for (var i = 0; i < sceneTags.length; ++i) {
|
||||
var tagID = sceneTags[i].id;
|
||||
if (tagID == primaryTagID) {
|
||||
log.Debug("primary tag already exists on scene");
|
||||
return;
|
||||
}
|
||||
|
||||
tagIDs.push(tagID);
|
||||
}
|
||||
|
||||
// set the tag on the scene if not present
|
||||
tagIDs.push(primaryTagID);
|
||||
|
||||
setSceneTags(sceneID, tagIDs);
|
||||
log.Info("added primary tag " + primaryTagID + " to scene " + sceneID);
|
||||
}
|
||||
|
||||
function getSceneTags(sceneID) {
|
||||
var query = "\
|
||||
query findScene($id: ID) {\
|
||||
findScene(id: $id) {\
|
||||
tags {\
|
||||
id\
|
||||
}\
|
||||
}\
|
||||
}";
|
||||
|
||||
var variables = {
|
||||
id: sceneID
|
||||
};
|
||||
|
||||
var result = gql.Do(query, variables);
|
||||
var findScene = result.findScene;
|
||||
if (findScene) {
|
||||
return findScene.tags;
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
function setSceneTags(sceneID, tagIDs) {
|
||||
var mutation = "\
|
||||
mutation sceneUpdate($input: SceneUpdateInput!) {\
|
||||
sceneUpdate(input: $input) {\
|
||||
id\
|
||||
}\
|
||||
}";
|
||||
|
||||
var variables = {
|
||||
input: {
|
||||
id: sceneID,
|
||||
tag_ids: tagIDs
|
||||
}
|
||||
};
|
||||
|
||||
gql.Do(mutation, variables);
|
||||
}
|
||||
|
||||
main();
|
||||
@ -1,14 +0,0 @@
|
||||
# example plugin config
|
||||
name: Scene Marker Tags to Scene
|
||||
description: Adds primary tag of Scene Marker to the Scene on marker create/update.
|
||||
url: https://github.com/stashapp/CommunityScripts
|
||||
version: 1.0
|
||||
exec:
|
||||
- markerTagToScene.js
|
||||
interface: js
|
||||
hooks:
|
||||
- name: Update scene with scene marker tag
|
||||
description: Adds primary tag of Scene Marker to the Scene on marker create/update.
|
||||
triggeredBy:
|
||||
- SceneMarker.Create.Post
|
||||
- SceneMarker.Update.Post
|
||||
@ -1,229 +0,0 @@
|
||||
# Path Parser
|
||||
|
||||
Updates scene info based on the file path.
|
||||
|
||||
## Contents
|
||||
* [Hooks](#hooks)
|
||||
* [Triggers](#triggers)
|
||||
* [Rules](#rules)
|
||||
* [Patterns](#patterns)
|
||||
* [Fields](#fields)
|
||||
* [Examples](#examples)
|
||||
|
||||
## Hooks
|
||||
|
||||
### Run Rules on scan
|
||||
|
||||
Updates scene info whenever a new scene is added.
|
||||
|
||||
You can disable this hook by deleting the following section from `pathParser.yml`:
|
||||
|
||||
```yml
|
||||
hooks:
|
||||
- name: Run Rules on scan
|
||||
description: Updates scene info whenever a new scene is added.
|
||||
triggeredBy:
|
||||
- Scene.Create.Post
|
||||
```
|
||||
|
||||
## Triggers
|
||||
|
||||
### Create Tags
|
||||
|
||||
Adds the \[Run\] and \[Test\] tags (configurable from pathParser.yml).
|
||||
|
||||
You can remove this trigger by deleting the following section from `pathParser.yml`:
|
||||
|
||||
```yml
|
||||
- name: Create Tags
|
||||
description: Create tags used by the path parser tasks.
|
||||
defaultArgs:
|
||||
task: createTags
|
||||
runTag: '[Run]'
|
||||
testTag: '[Test]'
|
||||
```
|
||||
|
||||
### Remove Tags
|
||||
|
||||
Removes the \[Run\] and \[Test\] tags (configurable from pathParser.yml).
|
||||
|
||||
You can remove this trigger by deleting the following section from `pathParser.yml`:
|
||||
|
||||
```yml
|
||||
- name: Remove Tags
|
||||
description: Remove tags used by the path parser tasks.
|
||||
defaultArgs:
|
||||
task: removeTags
|
||||
runTag: '[Run]'
|
||||
testTag: '[Test]'
|
||||
```
|
||||
|
||||
### Run Rules
|
||||
|
||||
Run rules for scenes containing the \[Run\] tag (configurable from pathParser.yml).
|
||||
|
||||
You can remove this trigger by deleting the following section from `pathParser.yml`:
|
||||
|
||||
```yml
|
||||
- name: Run Rules
|
||||
description: Run rules for scenes containing the run tag.
|
||||
defaultArgs:
|
||||
task: runRules
|
||||
runTag: '[Run]'
|
||||
```
|
||||
|
||||
### Test Rules
|
||||
|
||||
Test rules for scenes containing the \[Test\] tag (configurable from pathParser.yml).
|
||||
|
||||
You can remove this trigger by deleting the following section from `pathParser.yml`:
|
||||
|
||||
```yml
|
||||
- name: Test Rules
|
||||
description: Test rules for scenes containing the test tag.
|
||||
defaultArgs:
|
||||
task: testRules
|
||||
testTag: '[Test]'
|
||||
```
|
||||
|
||||
## Rules
|
||||
|
||||
A single rule must have a name, pattern, and fields:
|
||||
|
||||
```jsonc
|
||||
{
|
||||
name: 'Your Rule',
|
||||
|
||||
// This pattern would match a scene with the path: folder/folder/file.mp4
|
||||
pattern: [
|
||||
'folder',
|
||||
'folder',
|
||||
'file'
|
||||
],
|
||||
|
||||
// The matched scene would update it's title and studio
|
||||
fields: {
|
||||
title: 'Scene Title',
|
||||
studio: 'Studio'
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Patterns
|
||||
|
||||
Each entry in pattern will match a folder or the filename (without extension).
|
||||
|
||||
Patterns behave differently depending on the type:
|
||||
|
||||
| Type | Format | Description |
|
||||
|:---------|:-----------------------------------|:-------------------------------------------|
|
||||
| null | `null` | Matches any value |
|
||||
| String | `'string'` | Matches a specific value exactly |
|
||||
| RegExp | `/regex/` | Match using a regex<sup>1</sup> |
|
||||
| Array | `['string1', 'string2', /regex/]` | Match any one of the sub-patterns |
|
||||
| Function | `function (path) { return path; }` | Match if function returns a non-null value |
|
||||
|
||||
1. Parenthesis matches in the regex are able to be used in [field](#fields) replacements.
|
||||
|
||||
## Fields
|
||||
|
||||
The first matching rule will update the scene with the fields indicated:
|
||||
|
||||
| Field | Format |
|
||||
| :-----------|:----------------------------------|
|
||||
| title | `'New Title'` |
|
||||
| studio | `'Studio Name'` |
|
||||
| movie_title | `'Movie Name'` |
|
||||
| scene_index | `'1'` |
|
||||
| performers | `'Performer 1, Performer 2, ...'` |
|
||||
| tags | `'Tag 1, Tag 2, ...'` |
|
||||
|
||||
Matched patterns can be inserted into any field by referencing their indexed value ([see examples](#examples) below).
|
||||
|
||||
## Examples
|
||||
|
||||
### Specific studio folders with scenes
|
||||
|
||||
```js
|
||||
{
|
||||
name: 'Studio/Scene',
|
||||
pattern: [
|
||||
['Specific Studio', 'Another Studio'], // A specific studio name
|
||||
null // Any filename
|
||||
],
|
||||
fields: {
|
||||
title: '#1', // 1 refers to the second pattern (filename)
|
||||
studio: '#0' // 0 refers to the first pattern (folder)
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Input: `X:\DCE\Black Adam.mp4`
|
||||
|
||||
Output:
|
||||
|
||||
0. DCE
|
||||
1. Black Adam
|
||||
|
||||
### Studio with movie sub-folder and scenes
|
||||
|
||||
```js
|
||||
{
|
||||
name: 'Studio/Movie (YEAR)/Scene - Scene #',
|
||||
pattern: [
|
||||
null, // Any studio name
|
||||
/(.+) \(\d{4}\)/, // A sub-folder with 'Movie Title (2022)'
|
||||
/(.+) - \w+ ({d})/, // A filename with 'Scene Title - Scene 1'
|
||||
],
|
||||
fields: {
|
||||
title: '#2',
|
||||
studio: '#0',
|
||||
movie_title: '#1',
|
||||
scene_index: '#3'
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Input: `X:\HBO\House of the Dragon (2022)\House of the Dragon - Episode 1.mp4`
|
||||
|
||||
Output:
|
||||
|
||||
0. HBO
|
||||
1. House of the Dragon
|
||||
2. House of the Dragon
|
||||
3. 1
|
||||
|
||||
### Filename with performers using function
|
||||
|
||||
```js
|
||||
|
||||
{
|
||||
name: 'Studio/Scene.Performers.S##E##',
|
||||
pattern: [
|
||||
null, // Any studio name
|
||||
function (path) {
|
||||
var parts = path.split('.');
|
||||
var performers = parts[1].split('&').map(function (performer) { return performer.trim() }).join(',');
|
||||
var series = /S(\d{2})E(\d{2})/.exec(parts[2]);
|
||||
return [parts[0], performers, parseInt(series[1]), parseInt(series[2])];
|
||||
}
|
||||
],
|
||||
fields: {
|
||||
title: '#1',
|
||||
studio: '#0',
|
||||
performers: '#2',
|
||||
movie_title: '#1 - Season #3',
|
||||
scene_index: '#4'
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Input: `X:\Prime\The Boys.Karl Urban & Jack Quaid.S06E09.mp4`
|
||||
|
||||
Output:
|
||||
|
||||
0. Prime
|
||||
1. The Boys
|
||||
2. Karl Urban,Jack Quaid
|
||||
3. 6
|
||||
4. 9
|
||||
@ -1,748 +0,0 @@
|
||||
// Common Patterns
|
||||
var patterns = {
|
||||
movieTitleAndYear: /(.+) \(\d{4}\)/,
|
||||
sceneTitleAndPerformers: /(.+) - ([A-zÀ-ú, ]+)/
|
||||
}
|
||||
|
||||
var rules = [
|
||||
{
|
||||
name: 'Rule 1',
|
||||
pattern: [
|
||||
'Specific Studio',
|
||||
null,
|
||||
null
|
||||
],
|
||||
fields: {
|
||||
studio: '#0',
|
||||
title: '#2',
|
||||
}
|
||||
},
|
||||
{
|
||||
name: 'Rule 2',
|
||||
pattern: [
|
||||
['One Studio', 'Another Studio'],
|
||||
patterns.movieTitleAndYear,
|
||||
patterns.sceneTitleAndPerformers
|
||||
],
|
||||
fields: {
|
||||
title: '#2',
|
||||
studio: '#0',
|
||||
performers: '#3'
|
||||
}
|
||||
},
|
||||
];
|
||||
|
||||
/* ----------------------------------------------------------------------------
|
||||
// DO NOT EDIT BELOW!
|
||||
---------------------------------------------------------------------------- */
|
||||
function main()
|
||||
{
|
||||
try
|
||||
{
|
||||
switch (getTask(input.Args))
|
||||
{
|
||||
case 'createTags':
|
||||
var runTag = getArg(input.Args, 'runTag');
|
||||
var testTag = getArg(input.Args, 'testTag');
|
||||
createTags([runTag, testTag]);
|
||||
break;
|
||||
|
||||
case 'removeTags':
|
||||
var runTag = getArg(input.Args, 'runTag');
|
||||
var testTag = getArg(input.Args, 'testTag');
|
||||
removeTags([runTag, testTag]);
|
||||
break;
|
||||
|
||||
case 'runRules':
|
||||
var runTag = getArg(input.Args, 'runTag');
|
||||
initBasePaths();
|
||||
runRules(runTag);
|
||||
break;
|
||||
|
||||
case 'testRules':
|
||||
DEBUG = true;
|
||||
var testTag = getArg(input.Args, 'testTag');
|
||||
initBasePaths();
|
||||
runRules(testTag);
|
||||
break;
|
||||
|
||||
case 'scene':
|
||||
var id = getId(input.Args);
|
||||
initBasePaths();
|
||||
matchRuleWithSceneId(id, applyRule);
|
||||
break;
|
||||
|
||||
case 'image':
|
||||
var id = getId(input.Args);
|
||||
initBasePaths();
|
||||
break;
|
||||
|
||||
default:
|
||||
throw 'Unsupported task';
|
||||
}
|
||||
}
|
||||
catch (e)
|
||||
{
|
||||
return { Output: 'error', Error: e };
|
||||
}
|
||||
|
||||
return { Output: 'ok' };
|
||||
}
|
||||
|
||||
// Get an input arg
|
||||
function getArg(inputArgs, arg)
|
||||
{
|
||||
if (inputArgs.hasOwnProperty(arg))
|
||||
{
|
||||
return inputArgs[arg];
|
||||
}
|
||||
|
||||
throw 'Input is missing ' + arg;
|
||||
}
|
||||
|
||||
// Determine task based on input args
|
||||
function getTask(inputArgs)
|
||||
{
|
||||
if (inputArgs.hasOwnProperty('task'))
|
||||
{
|
||||
return inputArgs.task;
|
||||
}
|
||||
|
||||
if (!inputArgs.hasOwnProperty('hookContext'))
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
switch (inputArgs.hookContext.type)
|
||||
{
|
||||
case 'Scene.Create.Post':
|
||||
return 'scene';
|
||||
|
||||
case 'Image.Create.Post':
|
||||
return 'image';
|
||||
}
|
||||
}
|
||||
|
||||
// Get stash paths from configuration
|
||||
function initBasePaths()
|
||||
{
|
||||
var query ='\
|
||||
query Query {\
|
||||
configuration {\
|
||||
general {\
|
||||
stashes {\
|
||||
path\
|
||||
}\
|
||||
}\
|
||||
}\
|
||||
}';
|
||||
|
||||
var result = gql.Do(query);
|
||||
if (!result.configuration)
|
||||
{
|
||||
throw 'Unable to get library paths';
|
||||
}
|
||||
|
||||
BASE_PATHS = result.configuration.general.stashes.map(function (stash)
|
||||
{
|
||||
return stash.path;
|
||||
});
|
||||
|
||||
if (BASE_PATHS == null || BASE_PATHS.length == 0)
|
||||
{
|
||||
throw 'Unable to get library paths';
|
||||
}
|
||||
}
|
||||
|
||||
// Create tag if it does not already exist
|
||||
function createTags(tags)
|
||||
{
|
||||
var query ='\
|
||||
mutation TagCreate($input: TagCreateInput!) {\
|
||||
tagCreate(input: $input) {\
|
||||
id\
|
||||
}\
|
||||
}';
|
||||
|
||||
tags.forEach(function (tag)
|
||||
{
|
||||
if (tryGetTag(tag) !== null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var variables = {
|
||||
input: {
|
||||
name: tag
|
||||
}
|
||||
};
|
||||
|
||||
var result = gql.Do(query, variables);
|
||||
if (!result.tagCreate)
|
||||
{
|
||||
throw 'Could not create tag ' + tag;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Remove tags if it already exists
|
||||
function removeTags(tags)
|
||||
{
|
||||
tags.forEach(function (tag)
|
||||
{
|
||||
var tagId = tryGetTag(tag);
|
||||
if (tagId === null)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
var query = '\
|
||||
mutation TagsDestroy($ids: [ID!]!) {\
|
||||
tagsDestroy(ids: $ids)\
|
||||
}';
|
||||
|
||||
var variables = {
|
||||
ids: [ tagId ]
|
||||
};
|
||||
|
||||
var result = gql.Do(query, variables);
|
||||
if (!result.tagsDestroy)
|
||||
{
|
||||
throw 'Unable to remove tag ' + tag;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// Run rules for scenes containing tag
|
||||
function runRules(tag)
|
||||
{
|
||||
var tagId = tryGetTag(tag);
|
||||
if (tagId === null)
|
||||
{
|
||||
throw 'Tag ' + tag + ' does not exist';
|
||||
}
|
||||
|
||||
var query = '\
|
||||
query FindScenes($sceneFilter: SceneFilterType) {\
|
||||
findScenes(scene_filter: $sceneFilter) {\
|
||||
scenes {\
|
||||
id\
|
||||
}\
|
||||
}\
|
||||
}';
|
||||
|
||||
var variables = {
|
||||
sceneFilter: {
|
||||
tags: {
|
||||
value: tagId,
|
||||
modifier: 'INCLUDES'
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var result = gql.Do(query, variables);
|
||||
if (!result.findScenes || result.findScenes.scenes.length == 0)
|
||||
{
|
||||
throw 'No scenes found with tag ' + tag;
|
||||
}
|
||||
|
||||
result.findScenes.scenes.forEach(function (scene)
|
||||
{
|
||||
matchRuleWithSceneId(scene.id, applyRule);
|
||||
});
|
||||
}
|
||||
|
||||
// Get scene/image id from input args
|
||||
function getId(inputArgs)
|
||||
{
|
||||
if ((id = inputArgs.hookContext.id) == null)
|
||||
{
|
||||
throw 'Input is missing id';
|
||||
}
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
// Apply callback function to first matching rule for id
|
||||
function matchRuleWithSceneId(sceneId, cb)
|
||||
{
|
||||
var query = '\
|
||||
query FindScene($findSceneId: ID) {\
|
||||
findScene(id: $findSceneId) {\
|
||||
files {\
|
||||
path\
|
||||
}\
|
||||
}\
|
||||
}';
|
||||
|
||||
var variables = {
|
||||
findSceneId: sceneId
|
||||
}
|
||||
|
||||
var result = gql.Do(query, variables);
|
||||
if (!result.findScene || result.findScene.files.length == 0)
|
||||
{
|
||||
throw 'Missing scene for id: ' + sceneId;
|
||||
}
|
||||
|
||||
for (var i = 0; i < result.findScene.files.length; i++)
|
||||
{
|
||||
try
|
||||
{
|
||||
matchRuleWithPath(sceneId, result.findScene.files[i].path, cb);
|
||||
|
||||
if (DEBUG && bufferedOutput !== null && bufferedOutput !== '')
|
||||
{
|
||||
log.Info('[PathParser] ' + bufferedOutput);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
catch (e)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (DEBUG && bufferedOutput !== null && bufferedOutput !== '')
|
||||
{
|
||||
log.Info('[PathParser] ' + bufferedOutput);
|
||||
}
|
||||
|
||||
throw 'No rule matches id: ' + sceneId;
|
||||
}
|
||||
|
||||
// Apply callback to first matching rule for path
|
||||
function matchRuleWithPath(sceneId, path, cb)
|
||||
{
|
||||
// Remove base path
|
||||
for (var i = 0; i < BASE_PATHS.length; i++)
|
||||
{
|
||||
if (path.slice(0, BASE_PATHS[i].length) === BASE_PATHS[i])
|
||||
{
|
||||
path = path.slice(BASE_PATHS[i].length);
|
||||
}
|
||||
}
|
||||
|
||||
if (DEBUG)
|
||||
{
|
||||
bufferedOutput = path + '\n';
|
||||
}
|
||||
|
||||
// Split paths into parts
|
||||
var parts = path.split(/[\\/]/);
|
||||
|
||||
// Remove extension from filename
|
||||
parts[parts.length - 1] = parts[parts.length - 1].slice(0, parts[parts.length - 1].lastIndexOf('.'));
|
||||
|
||||
for (var i = 0; i < rules.length; i++)
|
||||
{
|
||||
var sceneData = testRule(rules[i].pattern, parts);
|
||||
if (sceneData !== null)
|
||||
{
|
||||
if (DEBUG)
|
||||
{
|
||||
bufferedOutput += 'Rule: ' + rules[i].name + '\n';
|
||||
}
|
||||
|
||||
log.Debug('[PathParser] Rule: ' + rules[i].name + '\nPath: ' + path);
|
||||
cb(sceneId, rules[i].fields, sceneData);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
bufferedOutput += 'No matching rule!';
|
||||
throw 'No matching rule for path: ' + path;
|
||||
}
|
||||
|
||||
// Test single rule
|
||||
function testRule(pattern, parts)
|
||||
{
|
||||
if (pattern.length !== parts.length)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
var matchedParts = [];
|
||||
for (var i = 0; i < pattern.length; i++)
|
||||
{
|
||||
if ((subMatches = testPattern(pattern[i], parts[i])) == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
matchedParts = [].concat(matchedParts, subMatches);
|
||||
}
|
||||
|
||||
return matchedParts;
|
||||
}
|
||||
|
||||
function testPattern(pattern, part)
|
||||
{
|
||||
// Match anything
|
||||
if (pattern == null)
|
||||
{
|
||||
return [part];
|
||||
}
|
||||
|
||||
// Simple match
|
||||
if (typeof pattern === 'string')
|
||||
{
|
||||
if (pattern === part)
|
||||
{
|
||||
return [part];
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Predicate match
|
||||
if (typeof pattern == 'function')
|
||||
{
|
||||
try
|
||||
{
|
||||
var results = pattern(part);
|
||||
if (results !== null)
|
||||
{
|
||||
return results;
|
||||
}
|
||||
}
|
||||
catch (e)
|
||||
{
|
||||
throw e;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Array match
|
||||
if (pattern instanceof Array)
|
||||
{
|
||||
for (var i = 0; i < pattern.length; i++)
|
||||
{
|
||||
if ((results = testPattern(pattern[i], part)) != null)
|
||||
{
|
||||
return results;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// RegExp match
|
||||
if (pattern instanceof RegExp)
|
||||
{
|
||||
var results = pattern.exec(part);
|
||||
if (results === null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
|
||||
return results.slice(1);
|
||||
}
|
||||
}
|
||||
|
||||
// Apply rule
|
||||
function applyRule(sceneId, fields, data)
|
||||
{
|
||||
var any = false;
|
||||
var variables = {
|
||||
input: {
|
||||
id: sceneId
|
||||
}
|
||||
};
|
||||
|
||||
if (DEBUG)
|
||||
{
|
||||
for (var i = 0; i < data.length; i++)
|
||||
{
|
||||
bufferedOutput += '#' + i + ': ' + data[i] + '\n';
|
||||
}
|
||||
}
|
||||
|
||||
for (var field in fields)
|
||||
{
|
||||
var value = fields[field];
|
||||
for (var i = data.length - 1; i >= 0; i--)
|
||||
{
|
||||
value = value.replace('#' + i, data[i]);
|
||||
}
|
||||
|
||||
switch (field)
|
||||
{
|
||||
case 'title':
|
||||
if (DEBUG)
|
||||
{
|
||||
bufferedOutput += field + ': ' + value + '\n';
|
||||
}
|
||||
|
||||
variables.input['title'] = value;
|
||||
any = true;
|
||||
continue;
|
||||
|
||||
case 'studio':
|
||||
var studioId = tryGetStudio(value);
|
||||
if (studioId == null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (DEBUG)
|
||||
{
|
||||
bufferedOutput += field + ': ' + value + '\n';
|
||||
bufferedOutput += 'studio_id: ' + studioId + '\n';
|
||||
}
|
||||
|
||||
variables.input['studio_id'] = studioId;
|
||||
any = true;
|
||||
continue;
|
||||
|
||||
case 'movie_title':
|
||||
var movie_title = value.split(' ').join('[\\W]*');
|
||||
var movieId = tryGetMovie(movie_title);
|
||||
if (movieId == null)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!variables.input.hasOwnProperty('movies'))
|
||||
{
|
||||
variables.input['movies'] = [{}];
|
||||
}
|
||||
|
||||
if (DEBUG)
|
||||
{
|
||||
bufferedOutput += field + ': ' + value + '\n';
|
||||
bufferedOutput += 'movie_id: ' + movieId + '\n';
|
||||
}
|
||||
|
||||
variables.input['movies'][0]['movie_id'] = movieId;
|
||||
any = true;
|
||||
continue;
|
||||
|
||||
case 'scene_index':
|
||||
var sceneIndex = parseInt(value);
|
||||
if (isNaN(sceneIndex))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!variables.input.hasOwnProperty('movies'))
|
||||
{
|
||||
variables.input['movies'] = [{}];
|
||||
}
|
||||
|
||||
if (DEBUG)
|
||||
{
|
||||
bufferedOutput += 'scene_index: ' + sceneIndex + '\n';
|
||||
}
|
||||
|
||||
variables.input['movies'][0]['scene_index'] = sceneIndex;
|
||||
continue;
|
||||
|
||||
case 'performers':
|
||||
var performers = value.split(',').map(tryGetPerformer).filter(notNull);
|
||||
if (performers.length == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (DEBUG)
|
||||
{
|
||||
bufferedOutput += field + ': ' + value + '\n';
|
||||
bufferedOutput += 'performer_ids: ' + performers.join(', ') + '\n';
|
||||
}
|
||||
|
||||
variables.input['performer_ids'] = performers;
|
||||
any = true;
|
||||
continue;
|
||||
|
||||
case 'tags':
|
||||
var tags = value.split(',').map(tryGetTag).filter(notNull);
|
||||
if (tags.length == 0)
|
||||
{
|
||||
continue;
|
||||
}
|
||||
|
||||
if (DEBUG)
|
||||
{
|
||||
bufferedOutput += field + ': ' + value + '\n';
|
||||
bufferedOutput += 'tag_ids: ' + tags.join(', ') + '\n';
|
||||
}
|
||||
|
||||
variables.input['tag_ids'] = tags;
|
||||
any = true;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Test only
|
||||
if (DEBUG)
|
||||
{
|
||||
if (!any)
|
||||
{
|
||||
bufferedOutput += 'No fields to update!\n';
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
// Remove movies if movie_id is missing
|
||||
if (variables.input.hasOwnProperty('movies') && !variables.input['movies'][0].hasOwnProperty('movie_id'))
|
||||
{
|
||||
delete variables.input['movies'];
|
||||
}
|
||||
|
||||
// Apply updates
|
||||
var query = '\
|
||||
mutation Mutation($input: SceneUpdateInput!) {\
|
||||
sceneUpdate(input: $input) {\
|
||||
id\
|
||||
}\
|
||||
}';
|
||||
|
||||
if (!any)
|
||||
{
|
||||
throw 'No fields to update for scene ' + sceneId;
|
||||
}
|
||||
|
||||
var result = gql.Do(query, variables);
|
||||
if (!result.sceneUpdate)
|
||||
{
|
||||
throw 'Unable to update scene ' + sceneId;
|
||||
}
|
||||
}
|
||||
|
||||
// Returns true for not null elements
|
||||
function notNull(ele)
|
||||
{
|
||||
return ele != null;
|
||||
}
|
||||
|
||||
// Get studio id from studio name
|
||||
function tryGetStudio(studio)
|
||||
{
|
||||
var query = '\
|
||||
query FindStudios($studioFilter: StudioFilterType) {\
|
||||
findStudios(studio_filter: $studioFilter) {\
|
||||
studios {\
|
||||
id\
|
||||
}\
|
||||
count\
|
||||
}\
|
||||
}';
|
||||
|
||||
var variables = {
|
||||
studioFilter: {
|
||||
name: {
|
||||
value: studio.trim(),
|
||||
modifier: 'EQUALS'
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var result = gql.Do(query, variables);
|
||||
if (!result.findStudios || result.findStudios.count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
return result.findStudios.studios[0].id;
|
||||
}
|
||||
|
||||
function tryGetMovie(movie_title)
|
||||
{
|
||||
var query = '\
|
||||
query FindMovies($movieFilter: MovieFilterType) {\
|
||||
findMovies(movie_filter: $movieFilter) {\
|
||||
movies {\
|
||||
id\
|
||||
}\
|
||||
count\
|
||||
}\
|
||||
}';
|
||||
|
||||
var variables = {
|
||||
movieFilter: {
|
||||
name: {
|
||||
value: movie_title.trim(),
|
||||
modifier: 'MATCHES_REGEX'
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var result = gql.Do(query, variables);
|
||||
if (!result.findMovies || result.findMovies.count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
return result.findMovies.movies[0].id;
|
||||
}
|
||||
|
||||
// Get performer id from performer name
|
||||
function tryGetPerformer(performer)
|
||||
{
|
||||
var query = '\
|
||||
query FindPerformers($performerFilter: PerformerFilterType) {\
|
||||
findPerformers(performer_filter: $performerFilter) {\
|
||||
performers {\
|
||||
id\
|
||||
}\
|
||||
count\
|
||||
}\
|
||||
}';
|
||||
|
||||
var variables = {
|
||||
performerFilter: {
|
||||
name: {
|
||||
value: performer.trim(),
|
||||
modifier: 'EQUALS'
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var result = gql.Do(query, variables);
|
||||
if (!result.findPerformers || result.findPerformers.count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
return result.findPerformers.performers[0].id;
|
||||
}
|
||||
|
||||
// Get tag id from tag name
|
||||
function tryGetTag(tag)
|
||||
{
|
||||
var query ='\
|
||||
query FindTags($tagFilter: TagFilterType) {\
|
||||
findTags(tag_filter: $tagFilter) {\
|
||||
tags {\
|
||||
id\
|
||||
}\
|
||||
count\
|
||||
}\
|
||||
}';
|
||||
|
||||
var variables = {
|
||||
tagFilter: {
|
||||
name: {
|
||||
value: tag.trim(),
|
||||
modifier: 'EQUALS'
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
var result = gql.Do(query, variables);
|
||||
if (!result.findTags || result.findTags.count == 0)
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
return result.findTags.tags[0].id;
|
||||
}
|
||||
|
||||
var DEBUG = false;
|
||||
var BASE_PATHS = [];
|
||||
var bufferedOutput = '';
|
||||
main();
|
||||
@ -1,35 +0,0 @@
|
||||
# example plugin config
|
||||
name: Path Parser
|
||||
description: Updates scene info based on the file path.
|
||||
version: 1.0
|
||||
exec:
|
||||
- pathParser.js
|
||||
interface: js
|
||||
hooks:
|
||||
- name: Run Rules on scan
|
||||
description: Updates scene info whenever a new scene is added.
|
||||
triggeredBy:
|
||||
- Scene.Create.Post
|
||||
tasks:
|
||||
- name: Create Tags
|
||||
description: Create tags used by the path parser tasks.
|
||||
defaultArgs:
|
||||
task: createTags
|
||||
runTag: '[Run]'
|
||||
testTag: '[Test]'
|
||||
- name: Remove Tags
|
||||
description: Remove tags used by the path parser tasks.
|
||||
defaultArgs:
|
||||
task: removeTags
|
||||
runTag: '[Run]'
|
||||
testTag: '[Test]'
|
||||
- name: Run Rules
|
||||
description: Run rules for scenes containing the run tag.
|
||||
defaultArgs:
|
||||
task: runRules
|
||||
runTag: '[Run]'
|
||||
- name: Test Rules
|
||||
description: Test rules for scenes containing the test tag.
|
||||
defaultArgs:
|
||||
task: testRules
|
||||
testTag: '[Test]'
|
||||
@ -1,49 +0,0 @@
|
||||
This plugin has four functions:
|
||||
|
||||
# PHASH Duplicate Tagger
|
||||
|
||||
## Requirements
|
||||
* python >= 3.10.X
|
||||
* `pip install -r requirements.txt`
|
||||
|
||||
|
||||
## Title Syntax
|
||||
|
||||
This plugin will change the titles of scenes that are matched as duplicates in the following format
|
||||
|
||||
`[PDT: 0.0GB|<group_id><keep_flag>] <Scene Title>`
|
||||
|
||||
group_id: usually the scene ID of the scene that was selected to Keep
|
||||
keep_flag: K=Keep R=remove U=Unknown
|
||||
|
||||
|
||||
## Tags
|
||||
various tags may be created by this plugin
|
||||
* Keep - Applied on scenes that are determined to be the "best"
|
||||
* Remove - Applied to the scenes that determined to be the "worst"
|
||||
* Unknown - Applied to scenes where a best scene could not be determined
|
||||
* Ignore - Applied to scenes by user to ignore known duplicates
|
||||
* Reason - These tags are applied to remove scenes, they will have a category that will match the determining factor on why a scene was chosen to be removed
|
||||
|
||||
## Tasks
|
||||
### Tag Dupes (EXACT/HIGH/MEDIUM)
|
||||
These tasks will search for scenes with similar PHASHs within stash the closeness (distance) of the hashes to each other depends on which option you select
|
||||
|
||||
* EXACT - Matches have a distance of 0 and should be exact matches
|
||||
* HIGH - Matches have a distance of 3 and are very similar to each other
|
||||
* MEDIUM - Matches have a distance of 6 and resemble each other
|
||||
|
||||
### Delete Managed Tags
|
||||
remove any generated tags within stash created by the plugin, excluding the `Ignore` tag this may be something you want to retain
|
||||
|
||||
### Scene Cleanup
|
||||
cleanup changes made to scene titles and tags back to before they were tagged
|
||||
|
||||
### Generate Scene PHASHs
|
||||
Start a generate task within stash to generate PHASHs
|
||||
|
||||
## Custom Compare Functions
|
||||
|
||||
you can create custom compare functions inside config.py all current compare functions are provided custom functions must return two values when a better file is determined, the better object and a message string, optionally you can set `remove_reason` on the worse file and it will be tagged with that reason
|
||||
|
||||
custom functions must start with "compare_" otherwise they will not be detected, make sure to add your function name to the PRIORITY list
|
||||
@ -1,110 +0,0 @@
|
||||
import stashapi.log as log
|
||||
from stashapi.tools import human_bytes, human_bits
|
||||
|
||||
PRIORITY = ['bitrate_per_pixel','resolution', 'bitrate', 'encoding', 'size', 'age']
|
||||
CODEC_PRIORITY = {'AV1':0,'H265':1,'HEVC':1,'H264':2,'MPEG4':3,'MPEG1VIDEO':3,'WMV3':4,'WMV2':5,'VC1':6,'SVQ3':7}
|
||||
|
||||
KEEP_TAG_NAME = "[PDT: Keep]"
|
||||
REMOVE_TAG_NAME = "[PDT: Remove]"
|
||||
UNKNOWN_TAG_NAME = "[PDT: Unknown]"
|
||||
IGNORE_TAG_NAME = "[PDT: Ignore]"
|
||||
|
||||
|
||||
def compare_bitrate_per_pixel(self, other):
|
||||
|
||||
try:
|
||||
self_bpp = self.bitrate / (self.width * self.height * self.frame_rate)
|
||||
except ZeroDivisionError:
|
||||
log.warning(f'scene {self.id} has 0 in file value ({self.width}x{self.height} {self.frame_rate}fps)')
|
||||
return
|
||||
try:
|
||||
other_bpp = other.bitrate / (other.width * other.height * other.frame_rate)
|
||||
except ZeroDivisionError:
|
||||
log.warning(f'scene {other.id} has 0 in file value ({other.width}x{other.height} {other.frame_rate}fps)')
|
||||
return
|
||||
|
||||
bpp_diff = abs(self_bpp-other_bpp)
|
||||
if bpp_diff <= 0.01:
|
||||
return
|
||||
|
||||
if self_bpp > other_bpp:
|
||||
better_bpp, worse_bpp = self_bpp, other_bpp
|
||||
better, worse = self, other
|
||||
else:
|
||||
worse_bpp, better_bpp = self_bpp, other_bpp
|
||||
worse, better = self, other
|
||||
worse.remove_reason = "bitrate_per_pxl"
|
||||
message = f'bitrate/pxl {better_bpp:.3f}bpp > {worse_bpp:.3f}bpp Δ:{bpp_diff:.3f}'
|
||||
return better, message
|
||||
|
||||
def compare_frame_rate(self, other):
|
||||
if not self.frame_rate:
|
||||
log.warning(f'scene {self.id} has no value for frame_rate')
|
||||
if not other.frame_rate:
|
||||
log.warning(f'scene {other.id} has no value for frame_rate')
|
||||
|
||||
if abs(self.frame_rate-other.frame_rate) < 5:
|
||||
return
|
||||
|
||||
if self.frame_rate > other.frame_rate:
|
||||
better, worse = self, other
|
||||
else:
|
||||
worse, better = self, other
|
||||
worse.remove_reason = "frame_rate"
|
||||
return better, f'Better FPS {better.frame_rate} vs {worse.frame_rate}'
|
||||
|
||||
def compare_resolution(self, other):
|
||||
if self.height == other.height:
|
||||
return
|
||||
if self.height > other.height:
|
||||
better, worse = self, other
|
||||
else:
|
||||
worse, better = self, other
|
||||
worse.remove_reason = "resolution"
|
||||
return better, f"Better Resolution {better.id}:{better.height}p > {worse.id}:{worse.height}p"
|
||||
|
||||
def compare_bitrate(self, other):
|
||||
if self.bitrate == other.bitrate:
|
||||
return
|
||||
if self.bitrate > other.bitrate:
|
||||
better, worse = self, other
|
||||
else:
|
||||
worse, better = self, other
|
||||
worse.remove_reason = "bitrate"
|
||||
return better, f"Better Bitrate {human_bits(better.bitrate)}ps > {human_bits(worse.bitrate)}ps Δ:({human_bits(better.bitrate-other.bitrate)}ps)"
|
||||
|
||||
def compare_size(self, other):
|
||||
if abs(self.size-other.size) <= 100000: # diff is <= than 0.1 Mb
|
||||
return
|
||||
if self.size > other.size:
|
||||
better, worse = self, other
|
||||
else:
|
||||
worse, better = self, other
|
||||
worse.remove_reason = "file_size"
|
||||
return better, f"Better Size {human_bytes(better.size)} > {human_bytes(worse.size)} Δ:({human_bytes(better.size-worse.size)})"
|
||||
|
||||
def compare_age(self, other):
|
||||
if not (self.mod_time and other.mod_time):
|
||||
return
|
||||
if self.mod_time == other.mod_time:
|
||||
return
|
||||
if self.mod_time < other.mod_time:
|
||||
better, worse = self, other
|
||||
else:
|
||||
worse, better = self, other
|
||||
worse.remove_reason = "age"
|
||||
return better, f"Choose Oldest: Δ:{worse.mod_time-better.mod_time} | {better.id} older than {worse.id}"
|
||||
|
||||
def compare_encoding(self, other):
|
||||
if self.codec_priority == other.codec_priority:
|
||||
return
|
||||
if not (isinstance(self.codec_priority, int) and isinstance(other.codec_priority, int)):
|
||||
return
|
||||
|
||||
if self.codec_priority < other.codec_priority:
|
||||
better, worse = self, other
|
||||
else:
|
||||
worse, better = self, other
|
||||
worse.remove_reason = "video_codec"
|
||||
return self, f"Prefer Codec {better.codec}({better.id}) over {worse.codec}({worse.id})"
|
||||
|
||||
@ -1,270 +0,0 @@
|
||||
import re, sys, json
|
||||
import datetime as dt
|
||||
from inspect import getmembers, isfunction
|
||||
|
||||
try:
|
||||
import stashapi.log as log
|
||||
from stashapi.tools import human_bytes, human_bits
|
||||
from stashapi.stash_types import PhashDistance
|
||||
from stashapi.stashapp import StashInterface
|
||||
except ModuleNotFoundError:
|
||||
print("You need to install the stashapi module. (pip install stashapp-tools)",
|
||||
file=sys.stderr)
|
||||
|
||||
import config
|
||||
|
||||
FRAGMENT = json.loads(sys.stdin.read())
|
||||
MODE = FRAGMENT['args']['mode']
|
||||
stash = StashInterface(FRAGMENT["server_connection"])
|
||||
|
||||
SLIM_SCENE_FRAGMENT = """
|
||||
id
|
||||
title
|
||||
date
|
||||
tags { id }
|
||||
files {
|
||||
size
|
||||
path
|
||||
width
|
||||
height
|
||||
bit_rate
|
||||
mod_time
|
||||
duration
|
||||
frame_rate
|
||||
video_codec
|
||||
}
|
||||
"""
|
||||
|
||||
def main():
|
||||
|
||||
if MODE == "remove":
|
||||
clean_scenes()
|
||||
for tag in get_managed_tags():
|
||||
stash.destroy_tag(tag["id"])
|
||||
|
||||
if MODE == "tag_exact":
|
||||
process_duplicates(PhashDistance.EXACT)
|
||||
if MODE == "tag_high":
|
||||
process_duplicates(PhashDistance.HIGH)
|
||||
if MODE == "tag_medium":
|
||||
process_duplicates(PhashDistance.MEDIUM)
|
||||
|
||||
if MODE == "clean_scenes":
|
||||
clean_scenes()
|
||||
if MODE == "generate_phash":
|
||||
generate_phash()
|
||||
|
||||
|
||||
log.exit("Plugin exited normally.")
|
||||
|
||||
|
||||
def parse_timestamp(ts, format="%Y-%m-%dT%H:%M:%S%z"):
|
||||
ts = re.sub(r'\.\d+', "", ts) #remove fractional seconds
|
||||
return dt.datetime.strptime(ts, format)
|
||||
|
||||
|
||||
class StashScene:
|
||||
|
||||
def __init__(self, scene=None) -> None:
|
||||
file = scene["files"][0]
|
||||
|
||||
self.id = int(scene['id'])
|
||||
self.mod_time = parse_timestamp(file['mod_time'])
|
||||
if scene.get("date"):
|
||||
self.date = parse_timestamp(scene['date'], format="%Y-%m-%d")
|
||||
else:
|
||||
self.date = None
|
||||
self.path = scene.get("path")
|
||||
self.width = file['width']
|
||||
self.height = file['height']
|
||||
# File size in # of BYTES
|
||||
self.size = int(file['size'])
|
||||
self.frame_rate = int(file['frame_rate'])
|
||||
self.bitrate = int(file['bit_rate'])
|
||||
self.duration = float(file['duration'])
|
||||
# replace any existing tagged title
|
||||
self.title = re.sub(r'^\[Dupe: \d+[KR]\]\s+', '', scene['title'])
|
||||
self.path = file['path']
|
||||
self.tag_ids = [t["id"]for t in scene["tags"]]
|
||||
|
||||
self.remove_reason = None
|
||||
|
||||
self.codec = file['video_codec'].upper()
|
||||
if self.codec in config.CODEC_PRIORITY:
|
||||
self.codec_priority = config.CODEC_PRIORITY[self.codec]
|
||||
else:
|
||||
self.codec_priority = None
|
||||
log.warning(f"could not find codec {self.codec} used in SceneID:{self.id}")
|
||||
|
||||
def __repr__(self) -> str:
|
||||
return f'<StashScene ({self.id})>'
|
||||
|
||||
def __str__(self) -> str:
|
||||
return f'id:{self.id}, height:{self.height}, size:{human_bytes(self.size)}, file_mod_time:{self.mod_time}, title:{self.title}'
|
||||
|
||||
def compare(self, other):
|
||||
if not (isinstance(other, StashScene)):
|
||||
raise Exception(f"can only compare to <StashScene> not <{type(other)}>")
|
||||
|
||||
if self.id == other.id:
|
||||
return None, f"Matching IDs {self.id}=={other.id}"
|
||||
|
||||
def compare_not_found(*args, **kwargs):
|
||||
raise Exception("comparison not found")
|
||||
for type in config.PRIORITY:
|
||||
try:
|
||||
compare_function = getattr(self, f'compare_{type}', compare_not_found)
|
||||
result = compare_function(other)
|
||||
if result and len(result) == 2:
|
||||
best, msg = result
|
||||
return best, msg
|
||||
except Exception as e:
|
||||
log.error(f"Issue Comparing {self.id} {other.id} using <{type}> {e}")
|
||||
|
||||
return None, f"{self.id} worse than {other.id}"
|
||||
|
||||
def process_duplicates(distance:PhashDistance=PhashDistance.EXACT):
|
||||
|
||||
clean_scenes() # clean old results
|
||||
|
||||
ignore_tag_id = stash.find_tag(config.IGNORE_TAG_NAME, create=True).get("id")
|
||||
duplicate_list = stash.find_duplicate_scenes(distance, fragment=SLIM_SCENE_FRAGMENT)
|
||||
|
||||
total = len(duplicate_list)
|
||||
log.info(f"Found {total} sets of duplicates.")
|
||||
|
||||
for i, group in enumerate(duplicate_list):
|
||||
group = [StashScene(s) for s in group]
|
||||
filtered_group = []
|
||||
for scene in group:
|
||||
if ignore_tag_id in scene.tag_ids:
|
||||
log.debug(f"Ignore {scene.id} {scene.title}")
|
||||
else:
|
||||
filtered_group.append(scene)
|
||||
|
||||
if len(filtered_group) > 1:
|
||||
tag_files(filtered_group)
|
||||
|
||||
log.progress(i/total)
|
||||
|
||||
def tag_files(group):
|
||||
|
||||
keep_reasons = []
|
||||
keep_scene = None
|
||||
|
||||
total_size = group[0].size
|
||||
for scene in group[1:]:
|
||||
total_size += scene.size
|
||||
better, msg = scene.compare(group[0])
|
||||
if better:
|
||||
keep_scene = better
|
||||
keep_reasons.append(msg)
|
||||
total_size = human_bytes(total_size, round=2, prefix='G')
|
||||
|
||||
if not keep_scene:
|
||||
log.info(f"could not determine better scene from {group}")
|
||||
if config.UNKNOWN_TAG_NAME:
|
||||
group_id = group[0].id
|
||||
for scene in group:
|
||||
tag_ids = [stash.find_tag(config.UNKNOWN_TAG_NAME, create=True).get("id")]
|
||||
stash.update_scenes({
|
||||
'ids': [scene.id],
|
||||
'title': f'[PDT: {total_size}|{group_id}U] {scene.title}',
|
||||
'tag_ids': {
|
||||
'mode': 'ADD',
|
||||
'ids': tag_ids
|
||||
}
|
||||
})
|
||||
return
|
||||
|
||||
log.info(f"{keep_scene.id} best of:{[s.id for s in group]} {keep_reasons}")
|
||||
|
||||
for scene in group:
|
||||
if scene.id == keep_scene.id:
|
||||
tag_ids = [stash.find_tag(config.KEEP_TAG_NAME, create=True).get("id")]
|
||||
stash.update_scenes({
|
||||
'ids': [scene.id],
|
||||
'title': f'[PDT: {total_size}|{keep_scene.id}K] {scene.title}',
|
||||
'tag_ids': {
|
||||
'mode': 'ADD',
|
||||
'ids': tag_ids
|
||||
}
|
||||
})
|
||||
else:
|
||||
tag_ids = []
|
||||
tag_ids.append(stash.find_tag(config.REMOVE_TAG_NAME, create=True).get("id"))
|
||||
if scene.remove_reason:
|
||||
tag_ids.append(stash.find_tag(f'[Reason: {scene.remove_reason}]', create=True).get('id'))
|
||||
stash.update_scenes({
|
||||
'ids': [scene.id],
|
||||
'title': f'[PDT: {total_size}|{keep_scene.id}R] {scene.title}',
|
||||
'tag_ids': {
|
||||
'mode': 'ADD',
|
||||
'ids': tag_ids
|
||||
}
|
||||
})
|
||||
|
||||
def clean_scenes():
|
||||
scene_count, scenes = stash.find_scenes(f={
|
||||
"title": {
|
||||
"modifier": "MATCHES_REGEX",
|
||||
"value": "^\\[PDT: .+?\\]"
|
||||
}
|
||||
},fragment="id title", get_count=True)
|
||||
|
||||
log.info(f"Cleaning Titles/Tags of {scene_count} Scenes ")
|
||||
|
||||
# Clean scene Title
|
||||
for i, scene in enumerate(scenes):
|
||||
title = re.sub(r'\[PDT: .+?\]\s+', '', scene['title'])
|
||||
stash.update_scenes({
|
||||
'ids': [scene['id']],
|
||||
'title': title
|
||||
})
|
||||
log.progress(i/scene_count)
|
||||
|
||||
# Remove Tags
|
||||
for tag in get_managed_tags():
|
||||
scene_count, scenes = stash.find_scenes(f={
|
||||
"tags":{"value": [tag['id']],"modifier": "INCLUDES","depth": 0}
|
||||
}, fragment="id", get_count=True)
|
||||
if not scene_count > 0:
|
||||
continue
|
||||
log.info(f'removing tag {tag["name"]} from {scene_count} scenes')
|
||||
stash.update_scenes({
|
||||
'ids': [s["id"] for s in scenes],
|
||||
'tag_ids': {
|
||||
'mode': 'REMOVE',
|
||||
'ids': [tag['id']]
|
||||
}
|
||||
})
|
||||
|
||||
def get_managed_tags(fragment="id name"):
|
||||
tags = stash.find_tags(f={
|
||||
"name": {
|
||||
"value": "^\\[Reason",
|
||||
"modifier": "MATCHES_REGEX"
|
||||
}}, fragment=fragment)
|
||||
tag_name_list = [
|
||||
config.REMOVE_TAG_NAME,
|
||||
config.KEEP_TAG_NAME,
|
||||
config.UNKNOWN_TAG_NAME,
|
||||
# config.IGNORE_TAG_NAME,
|
||||
]
|
||||
for tag_name in tag_name_list:
|
||||
if tag := stash.find_tag(tag_name):
|
||||
tags.append(tag)
|
||||
return tags
|
||||
|
||||
def generate_phash():
|
||||
query = """mutation MetadataGenerate($input: GenerateMetadataInput!) {
|
||||
metadataGenerate(input: $input)
|
||||
}"""
|
||||
variables = {"phashes", True}
|
||||
stash._callGraphQL(query, variables)
|
||||
|
||||
if __name__ == '__main__':
|
||||
for name, func in getmembers(config, isfunction):
|
||||
if re.match(r'^compare_', name):
|
||||
setattr(StashScene, name, func)
|
||||
main()
|
||||
@ -1,33 +0,0 @@
|
||||
name: "PHash Duplicate Tagger"
|
||||
description: Will tag scenes based on duplicate PHashes for easier/safer removal.
|
||||
version: 0.1.3
|
||||
url: https://github.com/stashapp/CommunityScripts/tree/main/plugins/phashDuplicateTagger
|
||||
exec:
|
||||
- python
|
||||
- "{pluginDir}/phashDuplicateTagger.py"
|
||||
interface: raw
|
||||
tasks:
|
||||
- name: 'Tag Dupes (EXACT)'
|
||||
description: 'Assign duplicates tags to Exact Match (Dist 0) scenes'
|
||||
defaultArgs:
|
||||
mode: tag_exact
|
||||
- name: 'Tag Dupes (HIGH)'
|
||||
description: 'Assign duplicates tags to High Match (Dist 3) scenes'
|
||||
defaultArgs:
|
||||
mode: tag_high
|
||||
- name: 'Tag Dupes (MEDIUM)'
|
||||
description: 'Assign duplicates tags to Medium Match (Dist 6) scenes (BE CAREFUL WITH THIS LEVEL)'
|
||||
defaultArgs:
|
||||
mode: tag_medium
|
||||
- name: 'Delete Managed Tags'
|
||||
description: 'Deletes tags managed by this plugin from stash'
|
||||
defaultArgs:
|
||||
mode: remove
|
||||
- name: 'Scene Cleanup'
|
||||
description: 'Removes titles from scenes and any generated tags excluding [Dupe: Ignore]'
|
||||
defaultArgs:
|
||||
mode: clean_scenes
|
||||
- name: 'Generate Scene PHASHs'
|
||||
description: 'Generate PHASHs for all scenes where they are missing'
|
||||
defaultArgs:
|
||||
mode: generate_phash
|
||||
@ -1 +0,0 @@
|
||||
stashapp-tools>=0.2.33
|
||||
@ -1,205 +0,0 @@
|
||||
# *renamerOnUpdate*
|
||||
Using metadata from your Stash to rename/move your file.
|
||||
|
||||
## Table of Contents
|
||||
|
||||
- [*renamerOnUpdate*](#renameronupdate)
|
||||
- [Table of Contents](#table-of-contents)
|
||||
- [Requirement](#requirement)
|
||||
- [Installation](#installation)
|
||||
- [:exclamation: Make sure to configure the plugin by editing `config.py` before running it :exclamation:](#exclamation-make-sure-to-configure-the-plugin-by-editing-configpy-before-running-it-exclamation)
|
||||
- [Usage](#usage)
|
||||
- [Configuration](#configuration)
|
||||
- [Config.py explained](#configpy-explained)
|
||||
- [Template](#template)
|
||||
- [- You can find the list of available variables in `config.py`](#--you-can-find-the-list-of-available-variables-in-configpy)
|
||||
- [Filename](#filename)
|
||||
- [- Based on a Tag](#--based-on-a-tag)
|
||||
- [- Based on a Studio](#--based-on-a-studio)
|
||||
- [- Change filename no matter what](#--change-filename-no-matter-what)
|
||||
- [Path](#path)
|
||||
- [- Based on a Tag](#--based-on-a-tag-1)
|
||||
- [- Based on a Studio](#--based-on-a-studio-1)
|
||||
- [- Based on a Path](#--based-on-a-path)
|
||||
- [- Change path no matter what](#--change-path-no-matter-what)
|
||||
- [- Special Variables](#--special-variables)
|
||||
- [Advanced](#advanced)
|
||||
- [Groups](#groups)
|
||||
- [Option](#option)
|
||||
- [*p_tag_option*](#p_tag_option)
|
||||
- [*field_replacer*](#field_replacer)
|
||||
- [*replace_words*](#replace_words)
|
||||
- [*removecharac_Filename*](#removecharac_filename)
|
||||
- [*performer_limit*](#performer_limit)
|
||||
|
||||
# Requirement
|
||||
- Stash (v0.15+)
|
||||
- Python 3.6+ (Tested on Python v3.9.1 64bit, Win10)
|
||||
- Request Module (https://pypi.org/project/requests/)
|
||||
- Tested on Windows 10/Synology/docker.
|
||||
|
||||
# Installation
|
||||
|
||||
- Download the whole folder '**renamerOnUpdate**' (config.py, log.py, renamerOnUpdate.py/.yml)
|
||||
- Place it in your **plugins** folder (where the `config.yml` is)
|
||||
- Reload plugins (Settings > Plugins > Reload)
|
||||
- *renamerOnUpdate* appears
|
||||
|
||||
### :exclamation: Make sure to configure the plugin by editing `config.py` before running it :exclamation:
|
||||
|
||||
# Usage
|
||||
|
||||
- Everytime you update a scene, it will check/rename your file. An update can be:
|
||||
- Saving in **Scene Edit**.
|
||||
- Clicking the **Organized** button.
|
||||
- Running a scan that **updates** the path.
|
||||
|
||||
- By pressing the button in the Task menu.
|
||||
- It will go through each of your scenes.
|
||||
- `:warning:` It's recommended to understand correctly how this plugin works, and use **DryRun** first.
|
||||
|
||||
# Configuration
|
||||
|
||||
- Read/Edit `config.py`
|
||||
- Change template filename/path
|
||||
- Add `log_file` path
|
||||
|
||||
- There are multiple buttons in Task menu:
|
||||
- Enable: (default) Enable the trigger update
|
||||
- Disable: Disable the trigger update
|
||||
- Dry-run: A switch to enable/disable dry-run mode
|
||||
|
||||
- Dry-run mode:
|
||||
- It prevents editing the file, only shows in your log.
|
||||
- This mode can write into a file (`dryrun_renamerOnUpdate.txt`), the change that the plugin will do.
|
||||
- You need to set a path for `log_file` in `config.py`
|
||||
- The format will be: `scene_id|current path|new path`. (e.g. `100|C:\Temp\foo.mp4|C:\Temp\bar.mp4`)
|
||||
- This file will be overwritten everytime the plugin is triggered.
|
||||
|
||||
# Config.py explained
|
||||
## Template
|
||||
To modify your path/filename, you can use **variables**. These are elements that will change based on your **metadata**.
|
||||
|
||||
- Variables are represented with a word preceded with a `$` symbol. (E.g. `$date`)
|
||||
- If the metadata exists, this term will be replaced by it:
|
||||
- Scene date = 2006-01-02, `$date` = 2006-01-02
|
||||
- You can find the list of available variables in `config.py`
|
||||
-----
|
||||
In the example below, we will use:
|
||||
- Path: `C:\Temp\QmlnQnVja0J1bm55.mp4`
|
||||
- This file is [Big Buck Bunny](https://en.wikipedia.org/wiki/Big_Buck_Bunny).
|
||||
|
||||
## Filename
|
||||
Change your filename (C:\Temp\\**QmlnQnVja0J1bm55.mp4**)
|
||||
|
||||
------
|
||||
**Priority** : Tags > Studios > Default
|
||||
### - Based on a Tag
|
||||
```py
|
||||
tag_templates = {
|
||||
"rename_tag": "$year $title - $studio $resolution $video_codec",
|
||||
"rename_tag2": "$title"
|
||||
}
|
||||
```
|
||||
|tag| new path |
|
||||
|--|--|
|
||||
|rename_tag| `C:\Temp\2008 Big Buck Bunny - Blender Institute 1080p H264.mp4` |
|
||||
| rename_tag2 | `C:\Temp\Big Buck Bunny.mp4` |
|
||||
|
||||
|
||||
|
||||
### - Based on a Studio
|
||||
```py
|
||||
studio_templates = {
|
||||
"Blender Institute": "$date - $title [$studio]",
|
||||
"Pixar": "$title [$studio]"
|
||||
}
|
||||
```
|
||||
|studio| new path |
|
||||
|--|--|
|
||||
|Blender Institute| `C:\Temp\2008-05-20 - Big Buck Bunny [Blender Institute].mp4` |
|
||||
| Pixar | `C:\Temp\Big Buck Bunny [Pixar].mp4` |
|
||||
|
||||
|
||||
### - Change filename no matter what
|
||||
```py
|
||||
use_default_template = True
|
||||
default_template = "$date $title"
|
||||
```
|
||||
The file became: `C:\Temp\2008-05-20 - Big Buck Bunny.mp4`
|
||||
|
||||
## Path
|
||||
Change your path (**C:\Temp**\\QmlnQnVja0J1bm55.mp4)
|
||||
### - Based on a Tag
|
||||
```py
|
||||
p_tag_templates = {
|
||||
"rename_tag": r"D:\Video\",
|
||||
"rename_tag2": r"E:\Video\$year"
|
||||
}
|
||||
```
|
||||
|tag| new path |
|
||||
|--|--|
|
||||
|rename_tag| `D:\Video\QmlnQnVja0J1bm55.mp4` |
|
||||
| rename_tag2 | `E:\Video\2008\QmlnQnVja0J1bm55.mp4` |
|
||||
|
||||
|
||||
|
||||
### - Based on a Studio
|
||||
```py
|
||||
p_studio_templates = {
|
||||
"Blender Institute": r"D:\Video\Blender\",
|
||||
"Pixar": r"E:\Video\$studio\"
|
||||
}
|
||||
```
|
||||
|studio| new path |
|
||||
|--|--|
|
||||
|Blender Institute| `D:\Video\Blender\QmlnQnVja0J1bm55.mp4` |
|
||||
| Pixar | `E:\Video\Pixar\QmlnQnVja0J1bm55.mp4` |
|
||||
|
||||
### - Based on a Path
|
||||
```py
|
||||
p_path_templates = {
|
||||
r"C:\Temp": r"D:\Video\",
|
||||
r"C:\Video": r"E:\Video\Win\"
|
||||
}
|
||||
```
|
||||
|file path| new path |
|
||||
|--|--|
|
||||
|`C:\Temp`| `D:\Video\QmlnQnVja0J1bm55.mp4` |
|
||||
| `C:\Video`| `E:\Video\Win\QmlnQnVja0J1bm55.mp4` |
|
||||
|
||||
|
||||
### - Change path no matter what
|
||||
```py
|
||||
p_use_default_template = True
|
||||
p_default_template = r"D:\Video\"
|
||||
```
|
||||
The file is moved to: `D:\Video\QmlnQnVja0J1bm55.mp4`
|
||||
|
||||
### - Special Variables
|
||||
`$studio_hierarchy` - Create the entire hierarchy of studio as folder (E.g. `../MindGeek/Brazzers/Hot And Mean/video.mp4`). Use your parent studio.
|
||||
|
||||
`^*` - The current directory of the file.
|
||||
Explanation:
|
||||
- **If**: `p_default_template = r"^*\$performer"`
|
||||
- It creates a folder with a performer name in the current directory where the file is.
|
||||
- `C:\Temp\video.mp4` so `^*=C:\Temp\`, result: `C:\Temp\Jane Doe\video.mp4`
|
||||
- If you don't use `prevent_consecutive` option, the plugin will create a new folder everytime (`C:\Temp\Jane Doe\Jane Doe\...\video.mp4`).
|
||||
|
||||
## Advanced
|
||||
|
||||
### Groups
|
||||
You can group elements in the template with `{}`, it's used when you want to remove a character if a variable is null.
|
||||
|
||||
Example:
|
||||
|
||||
|
||||
**With** date in Stash:
|
||||
- `[$studio] $date - $title` -> `[Blender] 2008-05-20 - Big Buck Bunny`
|
||||
|
||||
**Without** date in Stash:
|
||||
- `[$studio] $date - $title` -> `[Blender] - Big Buck Bunny`
|
||||
|
||||
If you want to use the `-` only when you have the date, you can group the `-` with `$date`
|
||||
**Without** date in Stash:
|
||||
- `[$studio] {$date -} $title` -> `[Blender] Big Buck Bunny`
|
||||
@ -1,52 +0,0 @@
|
||||
import sys
|
||||
|
||||
|
||||
# Log messages sent from a plugin instance are transmitted via stderr and are
|
||||
# encoded with a prefix consisting of special character SOH, then the log
|
||||
# level (one of t, d, i, w, e, or p - corresponding to trace, debug, info,
|
||||
# warning, error and progress levels respectively), then special character
|
||||
# STX.
|
||||
#
|
||||
# The LogTrace, LogDebug, LogInfo, LogWarning, and LogError methods, and their equivalent
|
||||
# formatted methods are intended for use by plugin instances to transmit log
|
||||
# messages. The LogProgress method is also intended for sending progress data.
|
||||
#
|
||||
|
||||
def __prefix(level_char):
|
||||
start_level_char = b'\x01'
|
||||
end_level_char = b'\x02'
|
||||
|
||||
ret = start_level_char + level_char + end_level_char
|
||||
return ret.decode()
|
||||
|
||||
|
||||
def __log(level_char, s):
|
||||
if level_char == "":
|
||||
return
|
||||
|
||||
print(__prefix(level_char) + s + "\n", file=sys.stderr, flush=True)
|
||||
|
||||
|
||||
def LogTrace(s):
|
||||
__log(b't', s)
|
||||
|
||||
|
||||
def LogDebug(s):
|
||||
__log(b'd', s)
|
||||
|
||||
|
||||
def LogInfo(s):
|
||||
__log(b'i', s)
|
||||
|
||||
|
||||
def LogWarning(s):
|
||||
__log(b'w', s)
|
||||
|
||||
|
||||
def LogError(s):
|
||||
__log(b'e', s)
|
||||
|
||||
|
||||
def LogProgress(p):
|
||||
progress = min(max(0, p), 1)
|
||||
__log(b'p', str(progress))
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,31 +0,0 @@
|
||||
name: renamerOnUpdate
|
||||
description: Rename/move filename based on a template.
|
||||
url: https://github.com/stashapp/CommunityScripts
|
||||
version: 2.4.4
|
||||
exec:
|
||||
- python
|
||||
- "{pluginDir}/renamerOnUpdate.py"
|
||||
interface: raw
|
||||
hooks:
|
||||
- name: hook_rename
|
||||
description: Rename/move file when you update a scene.
|
||||
triggeredBy:
|
||||
- Scene.Update.Post
|
||||
tasks:
|
||||
- name: 'Disable'
|
||||
description: Disable the hook
|
||||
defaultArgs:
|
||||
mode: disable
|
||||
- name: 'Enable'
|
||||
description: Enable the hook
|
||||
defaultArgs:
|
||||
mode: enable
|
||||
- name: 'Dryrun'
|
||||
description: Enable/disable dry-run
|
||||
defaultArgs:
|
||||
mode: dryrun
|
||||
- name: 'Rename scenes'
|
||||
description: Rename all your scenes based on your config.
|
||||
defaultArgs:
|
||||
mode: bulk
|
||||
|
||||
@ -1,275 +0,0 @@
|
||||
###################################################################
|
||||
# General information #
|
||||
# -----------------------------------------------------------------
|
||||
# Available elements for renaming:
|
||||
# $oshash
|
||||
# $checksum
|
||||
# $date
|
||||
# $date_format
|
||||
# $year
|
||||
# $performer
|
||||
# $title
|
||||
# $height
|
||||
# $resolution
|
||||
# $duration
|
||||
# $bitrate (megabits per second)
|
||||
# $studio
|
||||
# $parent_studio
|
||||
# $studio_family
|
||||
# $rating
|
||||
# $tags
|
||||
# $video_codec
|
||||
# $audio_codec
|
||||
# $movie_scene
|
||||
# $movie_title
|
||||
# $movie_year
|
||||
# $movie_scene
|
||||
# $stashid_scene
|
||||
# $stashid_performer
|
||||
# $studio_code
|
||||
#
|
||||
# Note:
|
||||
# $date_format: can be edited with date_format settings
|
||||
# $duration: can be edited with duration_format settings
|
||||
# $studio_family: If parent studio exists use it, else use the studio name.
|
||||
# $performer: If more than * performers linked to the scene, this field will be ignored. Limit this number at Settings section below (default: 3)
|
||||
# $resolution: SD/HD/UHD/VERTICAL (for phone) | $height: 720p 1080p 4k 5k 6k 8k
|
||||
# $movie_scene: "scene #" # = index scene
|
||||
# -----------------------------------------------------------------
|
||||
# Example templates:
|
||||
#
|
||||
# $title == Her Fantasy Ball
|
||||
# $date $title == 2016-12-29 Her Fantasy Ball
|
||||
# $date.$title == 2016-12-29.Her Fantasy Ball
|
||||
# $year $title $height == 2016 Her Fantasy Ball 1080p
|
||||
# $year_$title-$height == 2016_Her Fantasy Ball-1080p
|
||||
# $date $performer - $title [$studio] == 2016-12-29 Eva Lovia - Her Fantasy Ball [Sneaky Sex]
|
||||
# $parent_studio $date $performer - $title == Reality Kings 2016-12-29 Eva Lovia - Her Fantasy Ball
|
||||
# $date $title - $tags == 2016-12-29 Her Fantasy Ball - Blowjob Cumshot Facial Tattoo
|
||||
#
|
||||
|
||||
####################################################################
|
||||
# TEMPLATE FILENAME (Rename your files)
|
||||
|
||||
# Priority : Tags > Studios > Default
|
||||
|
||||
# Templates to use for given tags
|
||||
# Add or remove as needed or leave it empty/comment out
|
||||
# you can specific group with {}. exemple: [$studio] {$date -} $title, the '-' will be removed if no date
|
||||
tag_templates = {
|
||||
# "!1. Western": "$date $performer - $title [$studio]",
|
||||
# "!1. JAV": "$title",
|
||||
# "!1. Anime": "$title $date [$studio]"
|
||||
}
|
||||
|
||||
# Adjust the below if you want to use studio names instead of tags for the renaming templates
|
||||
studio_templates = {
|
||||
|
||||
}
|
||||
|
||||
# Change to True to use the default template if no specific tag/studio is found
|
||||
use_default_template = False
|
||||
# Default template, adjust as needed
|
||||
default_template = "$date $title"
|
||||
|
||||
####################################################################
|
||||
# TEMPLATE PATH (Move your files)
|
||||
|
||||
# $studio_hierarchy: create the whole hierarchy folder (MindGeek/Brazzers/Hot And Mean/video.mp4)
|
||||
# ^* = parent of folder (E:\Movies\video.mp4 -> E:\Movies\)
|
||||
|
||||
# trigger with a specific tag
|
||||
# "tagname": "path"
|
||||
# ex: "plugin_move": r"E:\Movies\R18\$studio_hierarchy"
|
||||
p_tag_templates = {
|
||||
}
|
||||
|
||||
|
||||
p_studio_templates = {
|
||||
}
|
||||
|
||||
# match a path
|
||||
# "match path": "destination"
|
||||
# ex: r"E:\Film\R18\2. Test\A trier": r"E:\Film\R18\2. Test\A trier\$performer",
|
||||
p_path_templates = {
|
||||
}
|
||||
|
||||
# change to True to use the default template if no specific tag/studio is found
|
||||
p_use_default_template = False
|
||||
# default template, adjust as needed
|
||||
p_default_template = r"^*\$performer"
|
||||
|
||||
# if unorganized, ignore other templates, use this path
|
||||
p_non_organized = r""
|
||||
|
||||
# option if tag is present
|
||||
# "tagname": [option]
|
||||
# clean_tag: remove the tag after the rename
|
||||
# inverse_performer: change the last/first name (Jane Doe -> Doe Jane)
|
||||
# dry_run: activate dry_run for this scene
|
||||
# ex: "plugin_move": ["clean_tag"]
|
||||
p_tag_option = {
|
||||
}
|
||||
######################################
|
||||
# Logging #
|
||||
|
||||
# File to save what is renamed, can be useful if you need to revert changes.
|
||||
# Will look like: IDSCENE|OLD_PATH|NEW_PATH
|
||||
# Leave Blank ("") or use None if you don't want to use a log file, or a working path like: C:\Users\USERNAME\.stash\plugins\Hooks\rename_log.txt
|
||||
log_file = r""
|
||||
|
||||
######################################
|
||||
# Settings #
|
||||
|
||||
# rename associated file (subtitle, funscript) if present
|
||||
associated_extension = ["srt", "vtt", "funscript"]
|
||||
|
||||
# use filename as title if no title is set
|
||||
# it will cause problem if you update multiple time the same scene without title.
|
||||
filename_as_title = False
|
||||
|
||||
# Character which replaces every space in the filename
|
||||
# Common values are "." and "_"
|
||||
# e. g.:
|
||||
# "."
|
||||
# 2016-12-29.Eva.Lovia.-.Her.Fantasy.Ball
|
||||
filename_splitchar = " "
|
||||
|
||||
# replace space for stash field (title, performer...), if you have a title 'I love Stash' it can become 'I_love_Stash'
|
||||
field_whitespaceSeperator = ""
|
||||
# Remove/Replace character from field (not using regex)
|
||||
# "field": {"replace": "foo","with": "bar"}
|
||||
# ex: "$studio": {"replace": "'","with": ""} My Dad's Hot Girlfriend --> My Dads Hot Girlfriend
|
||||
field_replacer = {
|
||||
}
|
||||
|
||||
# Match and replace.
|
||||
# "match": ["replace with", "system"] the second element of the list determine the system used. If you don't put this element, the default is word
|
||||
# regex: match a regex, word: match a word, any: match a term
|
||||
# difference between 'word' & 'any': word is between seperator (space, _, -), any is anything ('ring' would replace 'during')
|
||||
# ex: "Scene": ["Sc.", "word"] - Replace Scene by Sc.
|
||||
# r"S\d+:E\d+": ["", "regex"] - Remove Sxx:Ex (x is a digit)
|
||||
replace_words = {
|
||||
}
|
||||
|
||||
# Date format for $date_format field, check: https://docs.python.org/3/library/datetime.html#strftime-and-strptime-format-codes
|
||||
date_format = r"%Y-%m-%d"
|
||||
# Duration format, check table: https://docs.python.org/3/library/time.html#time.strftime
|
||||
# exemple: %H;%M;%S -> 00;35;20 (You can't have ':' character in filename)
|
||||
# If empty, it will give you the duration as seconds
|
||||
duration_format = r""
|
||||
|
||||
# put the filename in lowercase
|
||||
lowercase_Filename = False
|
||||
# filename in title case (Capitalises each word and lowercases the rest)
|
||||
titlecase_Filename = False
|
||||
# remove these characters if there are present in the filename
|
||||
removecharac_Filename = ",#"
|
||||
|
||||
# Character to use as a performer separator.
|
||||
performer_splitchar = " "
|
||||
# Maximum number of performer names in the filename. If there are more than that in a scene the filename will not include any performer name!
|
||||
performer_limit = 3
|
||||
# The filename with have the name of performer before reaching the limit (if limit=3, the filename can contains 3 performers for a 4 performers scenes)
|
||||
performer_limit_keep = False
|
||||
# sorting performer (name, id, rating, favorite, mix (favorite > rating > name), mixid (..>..> id))
|
||||
performer_sort = "id"
|
||||
# ignore certain gender. Available "MALE" "FEMALE" "TRANSGENDER_MALE" "TRANSGENDER_FEMALE" "INTERSEX" "NON_BINARY" "UNDEFINED"
|
||||
performer_ignoreGender = []
|
||||
|
||||
# word attached at end if multiple file for same scene [FileRefactor]
|
||||
duplicate_suffix = ["", "_1", "_2", "_3", "_4", "_5", "_6", "_7", "_8", "_9", "_10"]
|
||||
|
||||
# If $performer is before $title, prevent having duplicate text.
|
||||
# e.g.:
|
||||
# Template used: $year $performer - $title
|
||||
# 2016 Dani Daniels - Dani Daniels in ***.mp4 --> 2016 Dani Daniels in ***.mp4
|
||||
prevent_title_performer = False
|
||||
|
||||
## Path mover related
|
||||
# remove consecutive (/FolderName/FolderName/video.mp4 -> FolderName/video.mp4
|
||||
prevent_consecutive = True
|
||||
# check when the file has moved that the old directory is empty, if empty it will remove it.
|
||||
remove_emptyfolder = True
|
||||
# the folder only contains 1 performer name. Else it will look the same as for filename
|
||||
path_one_performer = True
|
||||
# if there is no performer on the scene, the $performer field will be replaced by "NoPerformer" so a folder "NoPerformer" will be created
|
||||
path_noperformer_folder = False
|
||||
# if the folder already have a performer name, it won't change it
|
||||
path_keep_alrperf = True
|
||||
|
||||
# Removes prepositions from the beginning of titles
|
||||
prepositions_list = ['The', 'A', 'An']
|
||||
prepositions_removal = False
|
||||
|
||||
# Squeeze studio names removes all spaces in studio, parent studio and studio family name
|
||||
# e. g.:
|
||||
# Reality Kings --> RealityKings
|
||||
# Team Skeet Extras --> TeamSkeetExtras
|
||||
squeeze_studio_names = False
|
||||
|
||||
# Rating indicator option to identify the number correctly in your OS file search
|
||||
# Separated from the template handling above to avoid having only "RTG" in the filename for scenes without ratings
|
||||
# e. g.:
|
||||
# "{}" with scene rating of 5 == 5
|
||||
# "RTG{}" with scene rating of 5 == RTG5
|
||||
# "{}-stars" with scene rating 3 == 3-stars
|
||||
rating_format = "{}"
|
||||
|
||||
# Character to use as a tag separator.
|
||||
tags_splitchar = " "
|
||||
# Include and exclude tags
|
||||
# Tags will be compared strictly. "pantyhose" != "Pantyhose" and "panty hose" != "pantyhose"
|
||||
# Option 1: If you're using whitelist, every other tag which is not listed there will be ignored in the filename
|
||||
# Option 2: All tags in the tags_blacklist array will be ignored in the filename. Every other tag will be used.
|
||||
# Option 3: Leave both arrays empty if you're looking for every tag which is linked to the scene.
|
||||
# Attention: Only recommended if the scene linked tags number is not that big due to maxiumum filename length
|
||||
tags_whitelist = [
|
||||
# "Brunette", "Blowjob"
|
||||
]
|
||||
|
||||
tags_blacklist = [
|
||||
# ignored tags...
|
||||
]
|
||||
|
||||
# Only rename 'Organized' scenes.
|
||||
only_organized = False
|
||||
|
||||
# If the new path is over 240 characters, the plugin will try to reduce it. Set to True to ignore that.
|
||||
ignore_path_length = False
|
||||
|
||||
# Field to remove if the path is too long. First in list will be removed then second then ... if length is still too long.
|
||||
order_field = ["$video_codec", "$audio_codec", "$resolution", "tags", "rating", "$height", "$studio_family", "$studio", "$parent_studio", "$performer"]
|
||||
|
||||
# Alternate way to show diff. Not useful at all.
|
||||
alt_diff_display = False
|
||||
|
||||
# number of scene process by the task renamer. -1 = all scenes
|
||||
batch_number_scene = -1
|
||||
|
||||
# disable/enable the hook. You can edit this value in 'Plugin Tasks' inside of Stash.
|
||||
enable_hook = True
|
||||
# disable/enable dry mode. Do a trial run with no permanent changes. Can write into a file (dryrun_renamerOnUpdate.txt), set a path for log_file.
|
||||
# You can edit this value in 'Plugin Tasks' inside of Stash.
|
||||
dry_run = False
|
||||
# Choose if you want to append to (True) or overwrite (False) the dry-run log file.
|
||||
dry_run_append = True
|
||||
######################################
|
||||
# Module Related #
|
||||
|
||||
# ! OPTIONAL module settings. Not needed for basic operation !
|
||||
|
||||
# = psutil module (https://pypi.org/project/psutil/) =
|
||||
# Gets a list of all processes instead of stopping after the first one. Enabling it slows down the plugin
|
||||
process_getall = False
|
||||
# If the file is used by a process, the plugin will kill it. IT CAN MAKE STASH CRASH TOO.
|
||||
process_kill_attach = False
|
||||
# =========================
|
||||
|
||||
# = Unidecode module (https://pypi.org/project/Unidecode/) =
|
||||
# Check site mentioned for more details.
|
||||
# TL;DR: Prevent having non common characters by replacing them.
|
||||
# Warning: If you have non-latin characters (Cyrillic, Kanji, Arabic, ...), the result will be extremely different.
|
||||
use_ascii = False
|
||||
# =========================
|
||||
|
||||
@ -1,145 +0,0 @@
|
||||
// By ScruffyNerf
|
||||
// Ported by feederbox826
|
||||
|
||||
(function () {
|
||||
let cropping = false;
|
||||
let cropper = null;
|
||||
|
||||
try {
|
||||
const img = document.createElement('img');
|
||||
new Cropper(img)
|
||||
} catch (e) {
|
||||
console.error("Cropper not loaded - please install 4. CropperJS from CommunityScripts")
|
||||
}
|
||||
try {
|
||||
stash.getVersion()
|
||||
} catch (e) {
|
||||
console.error("Stash not loaded - please install 1. stashUserscriptLibrary from CommunityScripts")
|
||||
}
|
||||
|
||||
function setupCropper() {
|
||||
const cropBtnContainerId = "crop-btn-container";
|
||||
if (document.getElementById(cropBtnContainerId)) return
|
||||
const sceneId = window.location.pathname.replace('/scenes/', '').split('/')[0];
|
||||
const sceneImage = document.querySelector("img.scene-cover")
|
||||
|
||||
var cropperModal = document.createElement("dialog");
|
||||
cropperModal.style.width = "90%";
|
||||
cropperModal.style.border = "none";
|
||||
cropperModal.classList.add('bg-dark');
|
||||
document.body.appendChild(cropperModal);
|
||||
|
||||
var cropperContainer = document.createElement("div");
|
||||
cropperContainer.style.width = "100%";
|
||||
cropperContainer.style.height = "auto";
|
||||
cropperContainer.style.margin = "auto";
|
||||
cropperModal.appendChild(cropperContainer);
|
||||
|
||||
var image = sceneImage.cloneNode();
|
||||
image.style.display = "block";
|
||||
image.style.maxWidth = "100%";
|
||||
cropperContainer.appendChild(image);
|
||||
|
||||
var cropBtnContainer = document.createElement('div');
|
||||
cropBtnContainer.setAttribute("id", cropBtnContainerId);
|
||||
cropBtnContainer.classList.add('d-flex','flex-row','justify-content-center','align-items-center');
|
||||
cropBtnContainer.style.gap = "10px";
|
||||
cropperModal.appendChild(cropBtnContainer);
|
||||
|
||||
|
||||
sceneImage.parentElement.parentElement.style.flexFlow = 'column';
|
||||
|
||||
const cropInfo = document.createElement('p');
|
||||
cropInfo.style.all = "revert";
|
||||
cropInfo.classList.add('text-white');
|
||||
|
||||
const cropStart = document.createElement('button');
|
||||
cropStart.setAttribute("id", "crop-start");
|
||||
cropStart.classList.add('btn', 'btn-primary');
|
||||
cropStart.innerText = 'Crop Image';
|
||||
cropStart.addEventListener('click', evt => {
|
||||
cropping = true;
|
||||
cropStart.style.display = 'none';
|
||||
cropCancel.style.display = 'inline-block';
|
||||
|
||||
//const isVertical = image.naturalHeight > image.naturalWidth;
|
||||
//const aspectRatio = isVertical ? 3/2 : NaN
|
||||
const aspectRatio = NaN
|
||||
|
||||
cropper = new Cropper(image, {
|
||||
viewMode: 1,
|
||||
initialAspectRatio: aspectRatio,
|
||||
movable: false,
|
||||
rotatable: false,
|
||||
scalable: false,
|
||||
zoomable: false,
|
||||
zoomOnTouch: false,
|
||||
zoomOnWheel: false,
|
||||
ready() {
|
||||
cropAccept.style.display = 'inline-block';
|
||||
},
|
||||
crop(e) {
|
||||
cropInfo.innerText = `X: ${Math.round(e.detail.x)}, Y: ${Math.round(e.detail.y)}, Width: ${Math.round(e.detail.width)}px, Height: ${Math.round(e.detail.height)}px`;
|
||||
}
|
||||
});
|
||||
cropperModal.showModal();
|
||||
});
|
||||
sceneImage.parentElement.appendChild(cropStart);
|
||||
|
||||
const cropAccept = document.createElement('button');
|
||||
cropAccept.setAttribute("id", "crop-accept");
|
||||
cropAccept.classList.add('btn', 'btn-success', 'mr-2');
|
||||
cropAccept.innerText = 'OK';
|
||||
cropAccept.addEventListener('click', async evt => {
|
||||
cropping = false;
|
||||
cropStart.style.display = 'inline-block';
|
||||
cropAccept.style.display = 'none';
|
||||
cropCancel.style.display = 'none';
|
||||
cropInfo.innerText = '';
|
||||
|
||||
const reqData = {
|
||||
"operationName": "SceneUpdate",
|
||||
"variables": {
|
||||
"input": {
|
||||
"cover_image": cropper.getCroppedCanvas().toDataURL(),
|
||||
"id": sceneId
|
||||
}
|
||||
},
|
||||
"query": `mutation SceneUpdate($input: SceneUpdateInput!) {
|
||||
sceneUpdate(input: $input) {
|
||||
id
|
||||
}
|
||||
}`
|
||||
}
|
||||
await stash.callGQL(reqData);
|
||||
reloadImg(image.src);
|
||||
cropper.destroy();
|
||||
cropperModal.close("cropAccept");
|
||||
});
|
||||
cropBtnContainer.appendChild(cropAccept);
|
||||
|
||||
const cropCancel = document.createElement('button');
|
||||
cropCancel.setAttribute("id", "crop-accept");
|
||||
cropCancel.classList.add('btn', 'btn-danger');
|
||||
cropCancel.innerText = 'Cancel';
|
||||
cropCancel.addEventListener('click', evt => {
|
||||
cropping = false;
|
||||
cropStart.style.display = 'inline-block';
|
||||
cropAccept.style.display = 'none';
|
||||
cropCancel.style.display = 'none';
|
||||
cropInfo.innerText = '';
|
||||
|
||||
cropper.destroy();
|
||||
cropperModal.close("cropCancel");
|
||||
});
|
||||
cropBtnContainer.appendChild(cropCancel);
|
||||
cropAccept.style.display = 'none';
|
||||
cropCancel.style.display = 'none';
|
||||
|
||||
cropBtnContainer.appendChild(cropInfo);
|
||||
}
|
||||
|
||||
stash.addEventListener('page:scene', function () {
|
||||
waitForElementId('scene-edit-details', setupCropper);
|
||||
});
|
||||
})();
|
||||
@ -1,10 +0,0 @@
|
||||
name: Scene Cover Cropper
|
||||
# requires: CropperJS
|
||||
description: Crop Scene Cover Images
|
||||
version: 1.0
|
||||
ui:
|
||||
requires:
|
||||
- CropperJS
|
||||
css:
|
||||
javascript:
|
||||
- sceneCoverCropper.js
|
||||
@ -1,77 +0,0 @@
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import json
|
||||
import base64
|
||||
|
||||
try:
|
||||
import stashapi.log as log
|
||||
from stashapi.tools import file_to_base64
|
||||
from stashapi.stashapp import StashInterface
|
||||
except ModuleNotFoundError:
|
||||
print("You need to install the stashapi module. (pip install stashapp-tools)",
|
||||
file=sys.stderr)
|
||||
|
||||
MANUAL_ROOT = None # /some/other/path to override scanning all stashes
|
||||
cover_pattern = r'(?:thumb|poster|cover)\.(?:jpg|png)'
|
||||
|
||||
def main():
|
||||
global stash, mode_arg
|
||||
json_input = json.loads(sys.stdin.read())
|
||||
|
||||
stash = StashInterface(json_input["server_connection"])
|
||||
mode_arg = json_input['args']['mode']
|
||||
|
||||
try:
|
||||
if MANUAL_ROOT:
|
||||
scan(MANUAL_ROOT, handle_cover)
|
||||
else:
|
||||
for stash_path in get_stash_paths():
|
||||
scan(stash_path, handle_cover)
|
||||
except Exception as e:
|
||||
log.error(e)
|
||||
|
||||
out = json.dumps({"output": "ok"})
|
||||
print( out + "\n")
|
||||
|
||||
|
||||
def handle_cover(path, file):
|
||||
filepath = os.path.join(path, file)
|
||||
|
||||
|
||||
b64img = file_to_base64(filepath)
|
||||
if not b64img:
|
||||
log.warning(f"Could not parse {filepath} to b64image")
|
||||
return
|
||||
|
||||
scenes = stash.find_scenes(f={
|
||||
"path": {
|
||||
"modifier": "INCLUDES",
|
||||
"value": f"{path}\""
|
||||
}
|
||||
}, fragment="id")
|
||||
|
||||
log.info(f'Found Cover: {[int(s["id"]) for s in scenes]}|{filepath}')
|
||||
|
||||
if mode_arg == "set_cover":
|
||||
for scene in scenes:
|
||||
stash.update_scene({
|
||||
"id": scene["id"],
|
||||
"cover_image": b64img
|
||||
})
|
||||
log.info(f'Applied cover to {len(scenes)} scenes')
|
||||
|
||||
def get_stash_paths():
|
||||
config = stash.get_configuration("general { stashes { path excludeVideo } }")
|
||||
stashes = config["configuration"]["general"]["stashes"]
|
||||
return [s["path"] for s in stashes if not s["excludeVideo"]]
|
||||
|
||||
def scan(ROOT_PATH, _callback):
|
||||
log.info(f'Scanning {ROOT_PATH}')
|
||||
for root, dirs, files in os.walk(ROOT_PATH):
|
||||
for file in files:
|
||||
if re.match(cover_pattern, file, re.IGNORECASE):
|
||||
_callback(root, file)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@ -1,17 +0,0 @@
|
||||
name: Set Scene Cover
|
||||
description: searches Stash for Scenes with a cover image in the same folder and sets the cover image in stash to that image
|
||||
version: 0.4
|
||||
url: https://github.com/stg-annon/CommunityScripts/tree/main/plugins/setSceneCoverFromFile
|
||||
exec:
|
||||
- python
|
||||
- "{pluginDir}/set_cover.py"
|
||||
interface: raw
|
||||
tasks:
|
||||
- name: Scan
|
||||
description: searches stash dirs for cover images and logs results
|
||||
defaultArgs:
|
||||
mode: scan
|
||||
- name: Set Cover
|
||||
description: searches for cover images and sets any stash scene found in the same dir to that image
|
||||
defaultArgs:
|
||||
mode: set_cover
|
||||
@ -1,6 +0,0 @@
|
||||
name: Stash Userscript Library
|
||||
description: Exports utility functions and a Stash class that emits events whenever a GQL response is received and whenenever a page navigation change is detected
|
||||
version: 1.0
|
||||
ui:
|
||||
javascript:
|
||||
- stashUserscriptLibrary.js
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,136 +0,0 @@
|
||||
(function() {
|
||||
function createStatElement(container, title, heading) {
|
||||
const statEl = document.createElement('div');
|
||||
statEl.classList.add('stats-element');
|
||||
container.appendChild(statEl);
|
||||
|
||||
const statTitle = document.createElement('p');
|
||||
statTitle.classList.add('title');
|
||||
statTitle.innerText = title;
|
||||
statEl.appendChild(statTitle);
|
||||
|
||||
const statHeading = document.createElement('p');
|
||||
statHeading.classList.add('heading');
|
||||
statHeading.innerText = heading;
|
||||
statEl.appendChild(statHeading);
|
||||
}
|
||||
|
||||
async function createSceneStashIDPct(row) {
|
||||
const reqData = {
|
||||
"variables": {
|
||||
"scene_filter": {
|
||||
"stash_id": {
|
||||
"value": "",
|
||||
"modifier": "NOT_NULL"
|
||||
}
|
||||
}
|
||||
},
|
||||
"query": "query FindScenes($filter: FindFilterType, $scene_filter: SceneFilterType, $scene_ids: [Int!]) {\n findScenes(filter: $filter, scene_filter: $scene_filter, scene_ids: $scene_ids) {\n count\n }\n}"
|
||||
};
|
||||
const stashIdCount = (await stash.callGQL(reqData)).data.findScenes.count;
|
||||
|
||||
const reqData2 = {
|
||||
"variables": {
|
||||
"scene_filter": {}
|
||||
},
|
||||
"query": "query FindScenes($filter: FindFilterType, $scene_filter: SceneFilterType, $scene_ids: [Int!]) {\n findScenes(filter: $filter, scene_filter: $scene_filter, scene_ids: $scene_ids) {\n count\n }\n}"
|
||||
};
|
||||
const totalCount = (await stash.callGQL(reqData2)).data.findScenes.count;
|
||||
|
||||
createStatElement(row, (stashIdCount / totalCount * 100).toFixed(2) + '%', 'Scene StashIDs');
|
||||
}
|
||||
|
||||
async function createPerformerStashIDPct(row) {
|
||||
const reqData = {
|
||||
"variables": {
|
||||
"performer_filter": {
|
||||
"stash_id": {
|
||||
"value": "",
|
||||
"modifier": "NOT_NULL"
|
||||
}
|
||||
}
|
||||
},
|
||||
"query": "query FindPerformers($filter: FindFilterType, $performer_filter: PerformerFilterType) {\n findPerformers(filter: $filter, performer_filter: $performer_filter) {\n count\n }\n}"
|
||||
};
|
||||
const stashIdCount = (await stash.callGQL(reqData)).data.findPerformers.count;
|
||||
|
||||
const reqData2 = {
|
||||
"variables": {
|
||||
"performer_filter": {}
|
||||
},
|
||||
"query": "query FindPerformers($filter: FindFilterType, $performer_filter: PerformerFilterType) {\n findPerformers(filter: $filter, performer_filter: $performer_filter) {\n count\n }\n}"
|
||||
};
|
||||
const totalCount = (await stash.callGQL(reqData2)).data.findPerformers.count;
|
||||
|
||||
createStatElement(row, (stashIdCount / totalCount * 100).toFixed(2) + '%', 'Performer StashIDs');
|
||||
}
|
||||
|
||||
async function createStudioStashIDPct(row) {
|
||||
const reqData = {
|
||||
"variables": {
|
||||
"studio_filter": {
|
||||
"stash_id": {
|
||||
"value": "",
|
||||
"modifier": "NOT_NULL"
|
||||
}
|
||||
}
|
||||
},
|
||||
"query": "query FindStudios($filter: FindFilterType, $studio_filter: StudioFilterType) {\n findStudios(filter: $filter, studio_filter: $studio_filter) {\n count\n }\n}"
|
||||
};
|
||||
const stashIdCount = (await stash.callGQL(reqData)).data.findStudios.count;
|
||||
|
||||
const reqData2 = {
|
||||
"variables": {
|
||||
"scene_filter": {}
|
||||
},
|
||||
"query": "query FindStudios($filter: FindFilterType, $studio_filter: StudioFilterType) {\n findStudios(filter: $filter, studio_filter: $studio_filter) {\n count\n }\n}"
|
||||
};
|
||||
const totalCount = (await stash.callGQL(reqData2)).data.findStudios.count;
|
||||
|
||||
createStatElement(row, (stashIdCount / totalCount * 100).toFixed(2) + '%', 'Studio StashIDs');
|
||||
}
|
||||
|
||||
async function createPerformerFavorites(row) {
|
||||
const reqData = {
|
||||
"variables": {
|
||||
"performer_filter": {
|
||||
"filter_favorites": true
|
||||
}
|
||||
},
|
||||
"query": "query FindPerformers($filter: FindFilterType, $performer_filter: PerformerFilterType) {\n findPerformers(filter: $filter, performer_filter: $performer_filter) {\n count\n }\n}"
|
||||
};
|
||||
const perfCount = (await stash.callGQL(reqData)).data.findPerformers.count;
|
||||
|
||||
createStatElement(row, perfCount, 'Favorite Performers');
|
||||
}
|
||||
|
||||
async function createMarkersStat(row) {
|
||||
const reqData = {
|
||||
"variables": {
|
||||
"scene_marker_filter": {}
|
||||
},
|
||||
"query": "query FindSceneMarkers($filter: FindFilterType, $scene_marker_filter: SceneMarkerFilterType) {\n findSceneMarkers(filter: $filter, scene_marker_filter: $scene_marker_filter) {\n count\n }\n}"
|
||||
};
|
||||
const totalCount = (await stash.callGQL(reqData)).data.findSceneMarkers.count;
|
||||
|
||||
createStatElement(row, totalCount, 'Markers');
|
||||
}
|
||||
|
||||
stash.addEventListener('page:stats', function() {
|
||||
waitForElementByXpath("//div[contains(@class, 'container-fluid')]/div[@class='mt-5']", function(xpath, el) {
|
||||
if (!document.getElementById('custom-stats-row')) {
|
||||
const changelog = el.querySelector('div.changelog');
|
||||
const row = document.createElement('div');
|
||||
row.setAttribute('id', 'custom-stats-row');
|
||||
row.classList.add('col', 'col-sm-8', 'm-sm-auto', 'row', 'stats');
|
||||
el.insertBefore(row, changelog);
|
||||
|
||||
createSceneStashIDPct(row);
|
||||
createStudioStashIDPct(row);
|
||||
createPerformerStashIDPct(row);
|
||||
createPerformerFavorites(row);
|
||||
createMarkersStat(row);
|
||||
}
|
||||
});
|
||||
});
|
||||
})();
|
||||
@ -1,9 +0,0 @@
|
||||
name: Extended Stats
|
||||
# requires: StashUserscriptLibrary
|
||||
description: Adds new stats to the stats page
|
||||
version: 1.0
|
||||
ui:
|
||||
requires:
|
||||
- StashUserscriptLibrary
|
||||
javascript:
|
||||
- stats.js
|
||||
@ -1,30 +0,0 @@
|
||||
|
||||
# Tag Graph Generator
|
||||
|
||||
## Requirements
|
||||
* python >= 3.7.X
|
||||
* `pip install -r requirements.txt`
|
||||
|
||||
---
|
||||
|
||||
## Usage
|
||||
|
||||
### Running as a plugin
|
||||
move the `tagGraph` directory into Stash's plugins directory, reload plugins and you can run the **Generate Graph** task
|
||||
|
||||
### Running as a script
|
||||
> **⚠️ Note:** use this if you are connecting to a remote instance of stash
|
||||
|
||||
ensure `STASH_SETTINGS` is configured properly, you will likely need to change it
|
||||
|
||||
run `python .\tag_graph.py -script`
|
||||
|
||||
### View graph
|
||||
a `tag_graph.html` file will be generated inside the tagGraph directory, open it with a browser to view/interact with the graph
|
||||
|
||||
---
|
||||
|
||||
## Customizing the graph
|
||||
set `SHOW_OPTIONS` to `True` and you will get an interface to play around with that will affect what the graph looks like.
|
||||
|
||||
for more info see [pyvis docs](https://pyvis.readthedocs.io/en/latest/tutorial.html#using-the-configuration-ui-to-dynamically-tweak-network-settings)
|
||||
@ -1,7 +0,0 @@
|
||||
STASH_SETTINGS = {
|
||||
"Scheme":"http",
|
||||
"Domain": "localhost",
|
||||
"Port": "9999",
|
||||
"ApiKey": "YOUR_API_KEY_HERE"
|
||||
}
|
||||
SHOW_OPTIONS = False
|
||||
@ -1,2 +0,0 @@
|
||||
pyvis==0.1.9
|
||||
requests==2.25.1
|
||||
@ -1,247 +0,0 @@
|
||||
|
||||
import os, re, sys, copy, json, requests
|
||||
|
||||
# local dependencies
|
||||
import config
|
||||
# external dependencies
|
||||
from pyvis.network import Network
|
||||
|
||||
class StashLogger:
|
||||
# Log messages sent from a script scraper instance are transmitted via stderr and are
|
||||
# encoded with a prefix consisting of special character SOH, then the log
|
||||
# level (one of t, d, i, w or e - corresponding to trace, debug, info,
|
||||
# warning and error levels respectively), then special character
|
||||
# STX.
|
||||
#
|
||||
# The log.trace, log.debug, log.info, log.warning, and log.error methods, and their equivalent
|
||||
# formatted methods are intended for use by script scraper instances to transmit log
|
||||
# messages.
|
||||
#
|
||||
def __log(self, level_char: bytes, s):
|
||||
if level_char:
|
||||
lvl_char = "\x01{}\x02".format(level_char.decode())
|
||||
s = re.sub(r"data:image.+?;base64(.+?')","[...]",str(s))
|
||||
for x in s.split("\n"):
|
||||
print(lvl_char, x, file=sys.stderr, flush=True)
|
||||
def trace(self, s):
|
||||
self.__log(b't', s)
|
||||
def debug(self, s):
|
||||
self.__log(b'd', s)
|
||||
def info(self, s):
|
||||
self.__log(b'i', s)
|
||||
def warning(self, s):
|
||||
self.__log(b'w', s)
|
||||
def error(self, s):
|
||||
self.__log(b'e', s)
|
||||
def progress(self, p):
|
||||
progress = min(max(0, p), 1)
|
||||
self.__log(b'p', str(progress))
|
||||
|
||||
class StashInterface:
|
||||
port = ""
|
||||
url = ""
|
||||
headers = {
|
||||
"Accept-Encoding": "gzip, deflate",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
"Connection": "keep-alive",
|
||||
"DNT": "1"
|
||||
}
|
||||
cookies = {}
|
||||
|
||||
def __init__(self, conn, fragments={}):
|
||||
global log
|
||||
|
||||
if conn.get("Logger"):
|
||||
log = conn.get("Logger")
|
||||
else:
|
||||
raise Exception("No logger passed to StashInterface")
|
||||
|
||||
self.port = conn['Port'] if conn.get('Port') else '9999'
|
||||
scheme = conn['Scheme'] if conn.get('Scheme') else 'http'
|
||||
|
||||
api_key = conn.get("ApiKey")
|
||||
if api_key:
|
||||
self.headers["ApiKey"] = api_key
|
||||
|
||||
# Session cookie for authentication
|
||||
self.cookies = {}
|
||||
if conn.get('SessionCookie'):
|
||||
self.cookies.update({
|
||||
'session': conn['SessionCookie']['Value']
|
||||
})
|
||||
|
||||
domain = conn['Domain'] if conn.get('Domain') else 'localhost'
|
||||
|
||||
# Stash GraphQL endpoint
|
||||
self.url = f'{scheme}://{domain}:{self.port}/graphql'
|
||||
|
||||
try:
|
||||
self.get_stash_config()
|
||||
except Exception:
|
||||
log.error(f"Could not connect to Stash at {self.url}")
|
||||
sys.exit()
|
||||
|
||||
log.info(f"Using Stash's GraphQl endpoint at {self.url}")
|
||||
|
||||
self.fragments = fragments
|
||||
|
||||
def __resolveFragments(self, query):
|
||||
|
||||
fragmentReferences = list(set(re.findall(r'(?<=\.\.\.)\w+', query)))
|
||||
fragments = []
|
||||
for ref in fragmentReferences:
|
||||
fragments.append({
|
||||
"fragment": ref,
|
||||
"defined": bool(re.search("fragment {}".format(ref), query))
|
||||
})
|
||||
|
||||
if all([f["defined"] for f in fragments]):
|
||||
return query
|
||||
else:
|
||||
for fragment in [f["fragment"] for f in fragments if not f["defined"]]:
|
||||
if fragment not in self.fragments:
|
||||
raise Exception(f'GraphQL error: fragment "{fragment}" not defined')
|
||||
query += self.fragments[fragment]
|
||||
return self.__resolveFragments(query)
|
||||
|
||||
def __callGraphQL(self, query, variables=None):
|
||||
|
||||
query = self.__resolveFragments(query)
|
||||
|
||||
json_request = {'query': query}
|
||||
if variables is not None:
|
||||
json_request['variables'] = variables
|
||||
|
||||
response = requests.post(self.url, json=json_request, headers=self.headers, cookies=self.cookies)
|
||||
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
|
||||
if result.get("errors"):
|
||||
for error in result["errors"]:
|
||||
log.error(f"GraphQL error: {error}")
|
||||
if result.get("error"):
|
||||
for error in result["error"]["errors"]:
|
||||
log.error(f"GraphQL error: {error}")
|
||||
if result.get("data"):
|
||||
return result['data']
|
||||
elif response.status_code == 401:
|
||||
sys.exit("HTTP Error 401, Unauthorized. Cookie authentication most likely failed")
|
||||
else:
|
||||
raise ConnectionError(
|
||||
"GraphQL query failed:{} - {}. Query: {}. Variables: {}".format(
|
||||
response.status_code, response.content, query, variables)
|
||||
)
|
||||
|
||||
def __match_alias_item(self, search, items):
|
||||
item_matches = {}
|
||||
for item in items:
|
||||
if re.match(rf'{search}$', item.name, re.IGNORECASE):
|
||||
log.debug(f'matched "{search}" to "{item.name}" ({item.id}) using primary name')
|
||||
item_matches[item.id] = item
|
||||
if not item.aliases:
|
||||
continue
|
||||
for alias in item.aliases:
|
||||
if re.match(rf'{search}$', alias.strip(), re.IGNORECASE):
|
||||
log.debug(f'matched "{search}" to "{alias}" ({item.id}) using alias')
|
||||
item_matches[item.id] = item
|
||||
return list(item_matches.values())
|
||||
|
||||
def get_stash_config(self):
|
||||
query = """
|
||||
query Configuration {
|
||||
configuration { general { stashes{ path } } }
|
||||
}
|
||||
"""
|
||||
result = self.__callGraphQL(query)
|
||||
return result['configuration']
|
||||
|
||||
def get_tags_with_relations(self):
|
||||
query = """
|
||||
query FindTags($filter: FindFilterType, $tag_filter: TagFilterType) {
|
||||
findTags(filter: $filter, tag_filter: $tag_filter) {
|
||||
count
|
||||
tags {
|
||||
id
|
||||
name
|
||||
parents { id }
|
||||
children { id }
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
variables = {
|
||||
"tag_filter":{
|
||||
"child_count":{"modifier": "GREATER_THAN", "value": 0},
|
||||
"OR": {
|
||||
"parent_count": {"modifier": "GREATER_THAN", "value": 0}}
|
||||
},
|
||||
"filter": {"q":"", "per_page":-1}
|
||||
}
|
||||
result = self.__callGraphQL(query, variables)
|
||||
return result['findTags']['tags']
|
||||
|
||||
def script_init():
|
||||
import logging as log
|
||||
log.basicConfig(level=log.INFO, format='%(levelname)s: %(message)s')
|
||||
stash_connection = config.STASH_SETTINGS
|
||||
stash_connection["Logger"] = log
|
||||
generate_graph(stash_connection)
|
||||
|
||||
def plugin_init():
|
||||
log = StashLogger()
|
||||
stash_connection = json.loads(sys.stdin.read())["server_connection"]
|
||||
stash_connection["Logger"] = log
|
||||
generate_graph(stash_connection)
|
||||
print(json.dumps({"output":"ok"}))
|
||||
|
||||
def generate_graph(stash_connection):
|
||||
log = stash_connection["Logger"]
|
||||
|
||||
stash = StashInterface(stash_connection)
|
||||
log.info("getting tags from stash...")
|
||||
tags = stash.get_tags_with_relations()
|
||||
|
||||
log.info("generating graph...")
|
||||
if config.SHOW_OPTIONS:
|
||||
G = Network(directed=True, height="100%", width="66%", bgcolor="#202b33", font_color="white")
|
||||
G.show_buttons()
|
||||
else:
|
||||
G = Network(directed=True, height="100%", width="100%", bgcolor="#202b33", font_color="white")
|
||||
|
||||
node_theme = {
|
||||
"border": "#adb5bd",
|
||||
"background":"#394b59",
|
||||
"highlight":{
|
||||
"border": "#137cbd",
|
||||
"background":"#FFFFFF"
|
||||
}
|
||||
}
|
||||
edge_theme = {
|
||||
"color": "#FFFFFF",
|
||||
"highlight":"#137cbd"
|
||||
}
|
||||
|
||||
# create all nodes
|
||||
for tag in tags:
|
||||
G.add_node(tag["id"], label=tag["name"], color=node_theme )
|
||||
# create all edges
|
||||
for tag in tags:
|
||||
for child in tag["children"]:
|
||||
G.add_edge( tag["id"], child["id"], color=edge_theme )
|
||||
|
||||
|
||||
current_abs_path = os.path.dirname(os.path.abspath(__file__))
|
||||
save_path = os.path.join(current_abs_path, "tag_graph.html")
|
||||
|
||||
G.save_graph(save_path)
|
||||
log.info(f'saved graph to "{save_path}"')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if len(sys.argv) > 1:
|
||||
script_init()
|
||||
else:
|
||||
plugin_init()
|
||||
@ -1,12 +0,0 @@
|
||||
name: Tag Graph
|
||||
description: Creates a visual of the Tag relations
|
||||
version: 0.2
|
||||
exec:
|
||||
- python
|
||||
- "{pluginDir}/tag_graph.py"
|
||||
interface: raw
|
||||
tasks:
|
||||
- name: Generate Graph
|
||||
description: generates graph from current tag data
|
||||
defaultArgs:
|
||||
mode: generate
|
||||
@ -1,16 +0,0 @@
|
||||
# timestampTrade
|
||||
I've created the api https://timestamp.trade to sync markers between stash instances and xbvr.
|
||||
This sits along side other metadata databases like stashdb while we wait for the feature to be added there.
|
||||
|
||||
The api does not currently require an api key but one may be required in the future.
|
||||
|
||||
Fetching scenes require a stashdb id on the scene.
|
||||
Submitting markers does not require a stashid on the scene but it is recommended.
|
||||
|
||||
### Installation
|
||||
Move the `timestampTrade` directory into Stash's plugins directory, reload plugins.
|
||||
|
||||
### Tasks
|
||||
* Submit - Submit markers for all scenes that have markers.
|
||||
* Sync - Fetch markers for all scenes with a stash id.
|
||||
* Post update hook - Fetch markers for that scene
|
||||
@ -1,2 +0,0 @@
|
||||
requests
|
||||
stashapp-tools
|
||||
@ -1,120 +0,0 @@
|
||||
import stashapi.log as log
|
||||
from stashapi.stashapp import StashInterface
|
||||
import stashapi.marker_parse as mp
|
||||
import os
|
||||
import sys
|
||||
import requests
|
||||
import json
|
||||
import time
|
||||
import math
|
||||
|
||||
per_page = 100
|
||||
request_s = requests.Session()
|
||||
|
||||
def processScene(s):
|
||||
if len(s['stash_ids']) == 0:
|
||||
log.debug('no scenes to process')
|
||||
return
|
||||
skip_sync_tag_id = stash.find_tag('[Timestamp: Skip Sync]', create=True).get("id")
|
||||
for sid in s['stash_ids']:
|
||||
try:
|
||||
if any(tag['id'] == str(skip_sync_tag_id) for tag in s['tags']):
|
||||
log.debug('scene has skip sync tag')
|
||||
return
|
||||
log.debug('looking up markers for stash id: '+sid['stash_id'])
|
||||
res = requests.post('https://timestamp.trade/get-markers/' + sid['stash_id'], json=s)
|
||||
md = res.json()
|
||||
if md.get('marker'):
|
||||
log.info('api returned markers for scene: ' + s['title'] + ' marker count: ' + str(len(md['marker'])))
|
||||
markers = []
|
||||
for m in md['marker']:
|
||||
# log.debug('-- ' + m['name'] + ", " + str(m['start'] / 1000))
|
||||
marker = {}
|
||||
marker["seconds"] = m['start'] / 1000
|
||||
marker["primary_tag"] = m["tag"]
|
||||
marker["tags"] = []
|
||||
marker["title"] = m['name']
|
||||
markers.append(marker)
|
||||
if len(markers) > 0:
|
||||
log.info('Saving markers')
|
||||
mp.import_scene_markers(stash, markers, s['id'], 15)
|
||||
else:
|
||||
log.debug('api returned no markers for scene: ' + s['title'])
|
||||
except json.decoder.JSONDecodeError:
|
||||
log.error('api returned invalid JSON for stash id: ' + sid['stash_id'])
|
||||
|
||||
|
||||
def processAll():
|
||||
log.info('Getting scene count')
|
||||
skip_sync_tag_id = stash.find_tag('[Timestamp: Skip Sync]', create=True).get("id")
|
||||
count=stash.find_scenes(f={"stash_id":{"value":"","modifier":"NOT_NULL"},"has_markers":"false","tags":{"depth":0,"excludes":[skip_sync_tag_id],"modifier":"INCLUDES_ALL","value":[]}},filter={"per_page": 1},get_count=True)[0]
|
||||
log.info(str(count)+' scenes to submit.')
|
||||
i=0
|
||||
for r in range(1,int(count/per_page)+1):
|
||||
log.info('fetching data: %s - %s %0.1f%%' % ((r - 1) * per_page,r * per_page,(i/count)*100,))
|
||||
scenes=stash.find_scenes(f={"stash_id":{"value":"","modifier":"NOT_NULL"},"has_markers":"false"},filter={"page":r,"per_page": per_page})
|
||||
for s in scenes:
|
||||
processScene(s)
|
||||
i=i+1
|
||||
log.progress((i/count))
|
||||
time.sleep(2)
|
||||
|
||||
def submit():
|
||||
scene_fgmt = """title
|
||||
details
|
||||
url
|
||||
date
|
||||
performers{
|
||||
name
|
||||
stash_ids{
|
||||
endpoint
|
||||
stash_id
|
||||
}
|
||||
}
|
||||
tags{
|
||||
name
|
||||
}
|
||||
studio{
|
||||
name
|
||||
stash_ids{
|
||||
endpoint
|
||||
stash_id
|
||||
}
|
||||
}
|
||||
stash_ids{
|
||||
endpoint
|
||||
stash_id
|
||||
}
|
||||
scene_markers{
|
||||
title
|
||||
seconds
|
||||
primary_tag{
|
||||
name
|
||||
}
|
||||
}"""
|
||||
skip_submit_tag_id = stash.find_tag('[Timestamp: Skip Submit]', create=True).get("id")
|
||||
count = stash.find_scenes(f={"has_markers": "true","tags":{"depth":0,"excludes":[skip_sync_tag_id],"modifier":"INCLUDES_ALL","value":[]}}, filter={"per_page": 1}, get_count=True)[0]
|
||||
i=0
|
||||
for r in range(1, math.ceil(count/per_page) + 1):
|
||||
log.info('submitting scenes: %s - %s %0.1f%%' % ((r - 1) * per_page,r * per_page,(i/count)*100,))
|
||||
scenes = stash.find_scenes(f={"has_markers": "true"}, filter={"page": r, "per_page": per_page},fragment=scene_fgmt)
|
||||
for s in scenes:
|
||||
log.debug("submitting scene: " + str(s))
|
||||
request_s.post('https://timestamp.trade/submit-stash', json=s)
|
||||
i=i+1
|
||||
log.progress((i/count))
|
||||
time.sleep(2)
|
||||
|
||||
json_input = json.loads(sys.stdin.read())
|
||||
FRAGMENT_SERVER = json_input["server_connection"]
|
||||
stash = StashInterface(FRAGMENT_SERVER)
|
||||
if 'mode' in json_input['args']:
|
||||
PLUGIN_ARGS = json_input['args']["mode"]
|
||||
if 'submit' in PLUGIN_ARGS:
|
||||
submit()
|
||||
elif 'process' in PLUGIN_ARGS:
|
||||
processAll()
|
||||
elif 'hookContext' in json_input['args']:
|
||||
id=json_input['args']['hookContext']['id']
|
||||
scene=stash.find_scene(id)
|
||||
processScene(scene)
|
||||
@ -1,22 +0,0 @@
|
||||
name: Timestamp Trade
|
||||
description: Sync Markers with timestamp.trade, a new database for sharing markers.
|
||||
version: 0.2
|
||||
url: https://github.com/stashapp/CommunityScripts/
|
||||
exec:
|
||||
- python
|
||||
- "{pluginDir}/timestampTrade.py"
|
||||
interface: raw
|
||||
hooks:
|
||||
- name: Add Marker to Scene
|
||||
description: Makes Markers checking against timestamp.trade
|
||||
triggeredBy:
|
||||
- Scene.Update.Post
|
||||
tasks:
|
||||
- name: 'Submit'
|
||||
description: Submit markers to timestamp.trade
|
||||
defaultArgs:
|
||||
mode: submit
|
||||
- name: 'Sync'
|
||||
description: Get markers for all scenes with a stashid
|
||||
defaultArgs:
|
||||
mode: process
|
||||
@ -1,22 +0,0 @@
|
||||
|
||||
# titleFromFilename
|
||||
Sets a scene's title
|
||||
|
||||
## Requirements
|
||||
- Stash ( versions after the files refactor PR, API>31 )
|
||||
- Python 3.10
|
||||
- Requests Module (https://pypi.org/project/requests/)
|
||||
|
||||
## Installation
|
||||
|
||||
- Download the whole folder `titleFromFilename`
|
||||
- Place it in your **plugins** folder (where the `config.yml` is). If its not there create it
|
||||
- Reload plugins from stash (Settings > Plugins -> Reload Plugins)
|
||||
- titleFromFilename should appear
|
||||
|
||||
## Usage
|
||||
When a scene is created the plugin will set the title to the filename.
|
||||
By default the file extension will not be added to the title.
|
||||
If you want to keep the file extension open `config.py` file and change `STRIP_EXT = True` to `STRIP_EXT = False`
|
||||
|
||||
|
||||
@ -1,2 +0,0 @@
|
||||
# strip file extension from title
|
||||
STRIP_EXT = True
|
||||
@ -1,99 +0,0 @@
|
||||
import json
|
||||
import sys
|
||||
|
||||
import requests
|
||||
|
||||
def exit_plugin(msg=None, err=None):
|
||||
if msg is None and err is None:
|
||||
msg = "plugin ended"
|
||||
output_json = {"output": msg, "error": err}
|
||||
print(json.dumps(output_json))
|
||||
sys.exit()
|
||||
|
||||
def doRequest(query, variables=None, port=9999, session=None, scheme="http", raise_exception=True):
|
||||
# Session cookie for authentication
|
||||
graphql_port = port
|
||||
graphql_scheme = scheme
|
||||
graphql_cookies = {
|
||||
'session': session
|
||||
}
|
||||
|
||||
graphql_headers = {
|
||||
"Accept-Encoding": "gzip, deflate",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
"Connection": "keep-alive",
|
||||
"DNT": "1"
|
||||
}
|
||||
graphql_domain = 'localhost'
|
||||
# Stash GraphQL endpoint
|
||||
graphql_url = graphql_scheme + "://" + graphql_domain + ":" + str(graphql_port) + "/graphql"
|
||||
|
||||
json = {'query': query}
|
||||
if variables is not None:
|
||||
json['variables'] = variables
|
||||
try:
|
||||
response = requests.post(graphql_url, json=json,headers=graphql_headers, cookies=graphql_cookies, timeout=20)
|
||||
except Exception as e:
|
||||
exit_plugin(err=f"[FATAL] Exception with GraphQL request. {e}")
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
if result.get("error"):
|
||||
for error in result["error"]["errors"]:
|
||||
if raise_exception:
|
||||
raise Exception(f"GraphQL error: {error}")
|
||||
else:
|
||||
log.LogError(f"GraphQL error: {error}")
|
||||
return None
|
||||
if result.get("data"):
|
||||
return result.get("data")
|
||||
elif response.status_code == 401:
|
||||
exit_plugin(err="HTTP Error 401, Unauthorised.")
|
||||
else:
|
||||
raise ConnectionError(f"GraphQL query failed: {response.status_code} - {response.content}")
|
||||
|
||||
def update_scene_title(scene_id, scene_title, port, session, scheme):
|
||||
query = """
|
||||
mutation UpdateSceneTitle($id: ID!, $title: String) {
|
||||
sceneUpdate(
|
||||
input: {id: $id, title: $title}
|
||||
) {
|
||||
title
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {
|
||||
"id": scene_id,
|
||||
"title": scene_title
|
||||
}
|
||||
result = doRequest(query=query, variables=variables, port=port, session=session, scheme=scheme)
|
||||
return result.get('sceneUpdate')
|
||||
|
||||
def get_scene_base(scene_id, port, session, scheme):
|
||||
query = """
|
||||
query FindScene($id: ID!, $checksum: String) {
|
||||
findScene(id: $id, checksum: $checksum) {
|
||||
files {
|
||||
basename
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {
|
||||
"id": scene_id
|
||||
}
|
||||
result = doRequest(query=query, variables=variables, port=port, session=session, scheme=scheme)
|
||||
return result.get('findScene')
|
||||
|
||||
def get_api_version(port, session, scheme):
|
||||
query = """
|
||||
query SystemStatus {
|
||||
systemStatus {
|
||||
databaseSchema
|
||||
appSchema
|
||||
}
|
||||
}
|
||||
"""
|
||||
result = doRequest(query=query, port=port, session=session, scheme=scheme)
|
||||
return result.get('systemStatus')
|
||||
|
||||
@ -1,52 +0,0 @@
|
||||
import sys
|
||||
|
||||
|
||||
# Log messages sent from a plugin instance are transmitted via stderr and are
|
||||
# encoded with a prefix consisting of special character SOH, then the log
|
||||
# level (one of t, d, i, w, e, or p - corresponding to trace, debug, info,
|
||||
# warning, error and progress levels respectively), then special character
|
||||
# STX.
|
||||
#
|
||||
# The LogTrace, LogDebug, LogInfo, LogWarning, and LogError methods, and their equivalent
|
||||
# formatted methods are intended for use by plugin instances to transmit log
|
||||
# messages. The LogProgress method is also intended for sending progress data.
|
||||
#
|
||||
|
||||
def __prefix(level_char):
|
||||
start_level_char = b'\x01'
|
||||
end_level_char = b'\x02'
|
||||
|
||||
ret = start_level_char + level_char + end_level_char
|
||||
return ret.decode()
|
||||
|
||||
|
||||
def __log(level_char, s):
|
||||
if level_char == "":
|
||||
return
|
||||
|
||||
print(__prefix(level_char) + s + "\n", file=sys.stderr, flush=True)
|
||||
|
||||
|
||||
def LogTrace(s):
|
||||
__log(b't', s)
|
||||
|
||||
|
||||
def LogDebug(s):
|
||||
__log(b'd', s)
|
||||
|
||||
|
||||
def LogInfo(s):
|
||||
__log(b'i', s)
|
||||
|
||||
|
||||
def LogWarning(s):
|
||||
__log(b'w', s)
|
||||
|
||||
|
||||
def LogError(s):
|
||||
__log(b'e', s)
|
||||
|
||||
|
||||
def LogProgress(p):
|
||||
progress = min(max(0, p), 1)
|
||||
__log(b'p', str(progress))
|
||||
@ -1 +0,0 @@
|
||||
requests
|
||||
@ -1,69 +0,0 @@
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
|
||||
import config
|
||||
#import log
|
||||
import graphql
|
||||
|
||||
API_VERSION_BF_FILES = 31 # APP/DB Schema version prior to files refactor PR
|
||||
MAX_RETRY_COUNT = 25
|
||||
SLEEP_RETRY = 0.5
|
||||
|
||||
FRAGMENT = json.loads(sys.stdin.read())
|
||||
#log.LogDebug(json.dumps(FRAGMENT))
|
||||
FRAGMENT_SERVER = FRAGMENT["server_connection"]
|
||||
FRAGMENT_SCENE_ID = FRAGMENT["args"].get("hookContext")
|
||||
|
||||
if FRAGMENT_SCENE_ID:
|
||||
scene_id = FRAGMENT_SCENE_ID["id"]
|
||||
else:
|
||||
graphql.exit_plugin("No ID found")
|
||||
|
||||
graphql_port = FRAGMENT_SERVER['Port']
|
||||
graphql_scheme = FRAGMENT_SERVER['Scheme']
|
||||
graphql_session = FRAGMENT_SERVER.get('SessionCookie').get('Value')
|
||||
|
||||
system_status = graphql.get_api_version(port=graphql_port,
|
||||
session=graphql_session,
|
||||
scheme=graphql_scheme)
|
||||
|
||||
api_version = system_status.get("appSchema")
|
||||
|
||||
basename = None
|
||||
|
||||
if api_version > API_VERSION_BF_FILES: # only needed for versions after files refactor
|
||||
files_base = graphql.get_scene_base(scene_id=scene_id,
|
||||
port=graphql_port,
|
||||
session=graphql_session,
|
||||
scheme=graphql_scheme)
|
||||
if len(files_base["files"]) > 0:
|
||||
basename = files_base["files"][0].get("basename")
|
||||
else:
|
||||
graphql.exit_plugin(
|
||||
f"Stash with API version:{api_version} is not supported. You need at least {API_VERSION_BF_FILES}"
|
||||
)
|
||||
|
||||
if basename is None:
|
||||
graphql.exit_plugin("No basename found") # file-less scene
|
||||
|
||||
if config.STRIP_EXT:
|
||||
basename = os.path.splitext(basename)[0]
|
||||
|
||||
i = MAX_RETRY_COUNT
|
||||
while i >= 0:
|
||||
#log.LogDebug(f"TitleFromFilename: Retry attempt {i}")
|
||||
i -= 1
|
||||
updated_scene = graphql.update_scene_title(scene_id,
|
||||
basename,
|
||||
port=graphql_port,
|
||||
session=graphql_session,
|
||||
scheme=graphql_scheme)
|
||||
if updated_scene:
|
||||
graphql.exit_plugin(
|
||||
f"Scene title updated after {MAX_RETRY_COUNT - i} tries. Title:{updated_scene.get('title')}"
|
||||
)
|
||||
time.sleep(SLEEP_RETRY)
|
||||
|
||||
graphql.exit_plugin("Error updating scene")
|
||||
@ -1,13 +0,0 @@
|
||||
name: titleFromFilename
|
||||
description: Set a scene's title from it's filename
|
||||
url: https://github.com/stashapp/CommunityScripts
|
||||
version: 1.2
|
||||
exec:
|
||||
- python
|
||||
- "{pluginDir}/titleFromFilename.py"
|
||||
interface: raw
|
||||
hooks:
|
||||
- name: hook_set_title_from_filename
|
||||
description: Set the title of the scene to it's filename
|
||||
triggeredBy:
|
||||
- Scene.Create.Post
|
||||
@ -1,94 +0,0 @@
|
||||
# SQLITE Renamer for Stash
|
||||
Using metadata from your database (SQLITE) to rename your file.
|
||||
|
||||
## :exclamation: Important :exclamation:
|
||||
**By doing this, you will make definitive change to your Database and Files!**
|
||||
###### (You can have a logfile (`USING_LOG`), so you can probably revert everything...)
|
||||
|
||||
|
||||
## Requirement
|
||||
- Python (Tested on Python v3.9.1 64bit, Win10)
|
||||
- ProgressBar2 Module (https://github.com/WoLpH/python-progressbar)
|
||||
- Stash Database (https://github.com/stashapp/stash)
|
||||
- Windows 10 ? (No idea if this work for all OS)
|
||||
|
||||
## Usage
|
||||
|
||||
- I recommend make a copy of your database. (Use "backup" in Stash Settings)
|
||||
- You need to set your Database path ([Line 9](Stash_Sqlite_Renamer.py#L9))
|
||||
- Replace things between [Line 270 - 301](Stash_Sqlite_Renamer.py#L270)
|
||||
|
||||
## First Run
|
||||
Set `USE_DRY` to True ([Line 13](Stash_Sqlite_Renamer.py#L13)), by doing this nothing will be changed.
|
||||
- This will create a file `renamer_dryrun.txt` that show how the path/file will be changed.
|
||||
|
||||
You can uncomment the break ([Line 254](Stash_Sqlite_Renamer.py#L254)), so it will stop after the first file.
|
||||
|
||||
## Filename template
|
||||
Available: `$date` `$performer` `$title` `$studio` `$height`
|
||||
|
||||
The script will replace these field with the data from the database.
|
||||
Exemple:
|
||||
| Template | Result
|
||||
| ------------- |:-------------:
|
||||
$title|Her Fantasy Ball.mp4
|
||||
$title $height|Her Fantasy Ball 1080p.mp4
|
||||
$date $title|2016-12-29 Her Fantasy Ball.mp4
|
||||
$date $performer - $title [$studio]|2016-12-29 Eva Lovia - Her Fantasy Ball [Sneaky Sex].mp4
|
||||
|
||||
Note:
|
||||
- A regex will remove illegal character for Windows.
|
||||
- If you path will be more than 240 characters, the script will try to reduce it. It will only use Date + Title.
|
||||
- If your height of the video is 2160/4320, it will be replace by `4k`/`8k` else it will be `height + p` (240p,720p,1080p...)
|
||||
- If the scene contains more than 3 performers, $performer will be replace by nothing.
|
||||
|
||||
## Change scenes by tags
|
||||
|
||||
If you want differents formats by tags. Create a dict with `tag` (The name of the tag in Stash) & `filename` (Filename template)
|
||||
```py
|
||||
tags_dict = {
|
||||
'1': {
|
||||
'tag': '1. JAV',
|
||||
'filename': '$title'
|
||||
},
|
||||
'2': {
|
||||
'tag': '1. Anime',
|
||||
'filename': '$date $title'
|
||||
}
|
||||
}
|
||||
|
||||
for _, dict_section in tags_dict.items():
|
||||
tag_name = dict_section.get("tag")
|
||||
filename_template = dict_section.get("filename")
|
||||
id_tags = gettingTagsID(tag_name)
|
||||
if id_tags is not None:
|
||||
id_scene = get_SceneID_fromTags(id_tags)
|
||||
option_sqlite_query = "WHERE id in ({})".format(id_scene)
|
||||
edit_db(filename_template,option_sqlite_query)
|
||||
print("====================")
|
||||
```
|
||||
|
||||
If you only want change 1 tag:
|
||||
```py
|
||||
id_tags = gettingTagsID('1. JAV')
|
||||
if id_tags is not None:
|
||||
id_scene = get_SceneID_fromTags(id_tags)
|
||||
option_sqlite_query = "WHERE id in ({})".format(id_scene)
|
||||
edit_db("$date $performer - $title [$studio]",option_sqlite_query)
|
||||
```
|
||||
## Change all scenes
|
||||
|
||||
```py
|
||||
edit_db("$date $performer - $title [$studio]")
|
||||
```
|
||||
|
||||
## Optional SQLITE
|
||||
|
||||
If you only want change a specific path, use the second parameter to `edit_db()`, it will add it to the sqlite query. [(Documentation ?)](https://www.tutorialspoint.com/sqlite/sqlite_where_clause.htm)
|
||||
|
||||
Exemple (Only take file that have the path `E:\\Film\\R18`):
|
||||
```py
|
||||
option_sqlite_query = "WHERE path LIKE 'E:\\Film\\R18\\%'"
|
||||
edit_db("$date $performer - $title [$studio]",option_sqlite_query)
|
||||
```
|
||||
|
||||
@ -1,309 +0,0 @@
|
||||
import os
|
||||
import re
|
||||
import sqlite3
|
||||
import sys
|
||||
|
||||
import progressbar
|
||||
|
||||
# Your sqlite path
|
||||
DB_PATH = r"C:\Users\Winter\.stash\Full.sqlite"
|
||||
# Log keep a trace of OldPath & new_path. Could be useful if you want to revert everything. Filename: rename_log.txt
|
||||
USING_LOG = True
|
||||
# DRY_RUN = True | Will don't change anything in your database & disk.
|
||||
DRY_RUN = False
|
||||
# Only take female performer name
|
||||
FEMALE_ONLY = False
|
||||
# Print debug message
|
||||
DEBUG_MODE = True
|
||||
|
||||
def logPrint(q):
|
||||
if "[DEBUG]" in q and DEBUG_MODE == False:
|
||||
return
|
||||
print(q)
|
||||
|
||||
logPrint("Database Path: {}".format(DB_PATH))
|
||||
if DRY_RUN == True:
|
||||
try:
|
||||
os.remove("rename_dryrun.txt")
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
logPrint("[DRY_RUN] DRY-RUN Enable")
|
||||
|
||||
|
||||
def gettingTagsID(name):
|
||||
cursor.execute("SELECT id from tags WHERE name=?;", [name])
|
||||
result = cursor.fetchone()
|
||||
try:
|
||||
id = str(result[0])
|
||||
logPrint("[Tag] [{}] {}".format(id,name))
|
||||
except:
|
||||
id = None
|
||||
logPrint("[Tag] Error when trying to get:{}".format(name))
|
||||
return id
|
||||
|
||||
|
||||
def get_SceneID_fromTags(id):
|
||||
cursor.execute("SELECT scene_id from scenes_tags WHERE tag_id=?;", [id])
|
||||
record = cursor.fetchall()
|
||||
logPrint("There is {} scene(s) with the tag_id {}".format(len(record), id))
|
||||
array_ID = []
|
||||
for row in record:
|
||||
array_ID.append(row[0])
|
||||
list = ",".join(map(str, array_ID))
|
||||
return list
|
||||
|
||||
|
||||
def get_Perf_fromSceneID(id_scene):
|
||||
perf_list = ""
|
||||
cursor.execute("SELECT performer_id from performers_scenes WHERE scene_id=?;", [id_scene])
|
||||
record = cursor.fetchall()
|
||||
#logPrint("Performer in scene: ", len(record))
|
||||
if len(record) > 3:
|
||||
logPrint("More than 3 performers.")
|
||||
else:
|
||||
perfcount = 0
|
||||
for row in record:
|
||||
perf_id = str(row[0])
|
||||
cursor.execute("SELECT name,gender from performers WHERE id=?;", [perf_id])
|
||||
perf = cursor.fetchall()
|
||||
if FEMALE_ONLY == True:
|
||||
# Only take female gender
|
||||
if str(perf[0][1]) == "FEMALE":
|
||||
perf_list += str(perf[0][0]) + " "
|
||||
perfcount += 1
|
||||
else:
|
||||
continue
|
||||
else:
|
||||
perf_list += str(perf[0][0]) + " "
|
||||
perfcount += 1
|
||||
perf_list = perf_list.strip()
|
||||
return perf_list
|
||||
|
||||
|
||||
def get_Studio_fromID(id):
|
||||
cursor.execute("SELECT name from studios WHERE id=?;", [id])
|
||||
record = cursor.fetchall()
|
||||
studio_name = str(record[0][0])
|
||||
return studio_name
|
||||
|
||||
|
||||
def makeFilename(scene_info, query):
|
||||
# Query exemple:
|
||||
# Available: $date $performer $title $studio $height
|
||||
# $title == SSNI-000.mp4
|
||||
# $date $title == 2017-04-27 Oni Chichi.mp4
|
||||
# $date $performer - $title [$studio] == 2016-12-29 Eva Lovia - Her Fantasy Ball [Sneaky Sex].mp4
|
||||
new_filename = str(query)
|
||||
if "$date" in new_filename:
|
||||
if scene_info.get('date') == "" or scene_info.get('date') is None:
|
||||
new_filename = re.sub('\$date\s*', '', new_filename)
|
||||
else:
|
||||
new_filename = new_filename.replace("$date", scene_info["date"])
|
||||
|
||||
if "$performer" in new_filename:
|
||||
if scene_info.get('performer') == "" or scene_info.get('performer') is None:
|
||||
new_filename = re.sub('\$performer\s*', '', new_filename)
|
||||
else:
|
||||
new_filename = new_filename.replace("$performer", scene_info["performer"])
|
||||
|
||||
if "$title" in new_filename:
|
||||
if scene_info.get('title') == "" or scene_info.get('title') is None:
|
||||
new_filename = re.sub('\$title\s*', '', new_filename)
|
||||
else:
|
||||
new_filename = new_filename.replace("$title", scene_info["title"])
|
||||
|
||||
if "$studio" in new_filename:
|
||||
if scene_info.get('studio') == "" or scene_info.get('studio') is None:
|
||||
new_filename = re.sub('\$studio\s*', '', new_filename)
|
||||
else:
|
||||
new_filename = new_filename.replace("$studio", scene_info["studio"])
|
||||
|
||||
if "$height" in new_filename:
|
||||
if scene_info.get('height') == "" or scene_info.get('height') is None:
|
||||
new_filename = re.sub('\$height\s*', '', new_filename)
|
||||
else:
|
||||
new_filename = new_filename.replace("$height", scene_info["height"])
|
||||
new_filename = re.sub('^\s*-\s*', '', new_filename)
|
||||
new_filename = re.sub('\s*-\s*$', '', new_filename)
|
||||
new_filename = re.sub('\[\W*]', '', new_filename)
|
||||
new_filename = re.sub('\s{2,}', ' ', new_filename)
|
||||
new_filename = new_filename.strip()
|
||||
return new_filename
|
||||
|
||||
|
||||
def edit_db(query_filename, optionnal_query=None):
|
||||
query = "SELECT id,path,title,date,studio_id,height from scenes;"
|
||||
if optionnal_query is not None:
|
||||
query = "SELECT id,path,title,date,studio_id,height from scenes {};".format(optionnal_query)
|
||||
cursor.execute(query)
|
||||
record = cursor.fetchall()
|
||||
if len(record) == 0:
|
||||
logPrint("[Warn] There is no scene to change with this query")
|
||||
return
|
||||
logPrint("Scenes numbers: {}".format(len(record)))
|
||||
progressbar_Index = 0
|
||||
progress = progressbar.ProgressBar(redirect_stdout=True).start(len(record))
|
||||
for row in record:
|
||||
progress.update(progressbar_Index + 1)
|
||||
progressbar_Index += 1
|
||||
scene_ID = str(row[0])
|
||||
# Fixing letter (X:Folder -> X:\Folder)
|
||||
current_path = re.sub(r"^(.):\\*", r"\1:\\", str(row[1]))
|
||||
current_directory = os.path.dirname(current_path)
|
||||
current_filename = os.path.basename(current_path)
|
||||
file_extension = os.path.splitext(current_path)[1]
|
||||
scene_title = str(row[2])
|
||||
scene_date = str(row[3])
|
||||
scene_Studio_id = str(row[4])
|
||||
file_height = str(row[5])
|
||||
# By default, title contains extensions.
|
||||
scene_title = re.sub(file_extension + '$', '', scene_title)
|
||||
|
||||
performer_name = get_Perf_fromSceneID(scene_ID)
|
||||
|
||||
studio_name = ""
|
||||
if (scene_Studio_id and scene_Studio_id != "None"):
|
||||
studio_name = get_Studio_fromID(scene_Studio_id)
|
||||
|
||||
if file_height == '4320':
|
||||
file_height = '8k'
|
||||
else:
|
||||
if file_height == '2160':
|
||||
file_height = '4k'
|
||||
else:
|
||||
file_height = "{}p".format(file_height)
|
||||
|
||||
scene_info = {
|
||||
"title": scene_title,
|
||||
"date": scene_date,
|
||||
"performer": performer_name,
|
||||
"studio": studio_name,
|
||||
"height": file_height
|
||||
}
|
||||
logPrint("[DEBUG] Scene information: {}".format(scene_info))
|
||||
# Create the new filename
|
||||
new_filename = makeFilename(scene_info, query_filename) + file_extension
|
||||
|
||||
# Remove illegal character for Windows ('#' and ',' is not illegal you can remove it)
|
||||
new_filename = re.sub('[\\/:"*?<>|#,]+', '', new_filename)
|
||||
|
||||
# Replace the old filename by the new in the filepath
|
||||
new_path = current_path.replace(current_filename, new_filename)
|
||||
|
||||
if len(new_path) > 240:
|
||||
logPrint("[Warn] The Path is too long ({})".format(new_path))
|
||||
# We only use the date and title to get a shorter file (eg: 2017-04-27 - Oni Chichi.mp4)
|
||||
if scene_info.get("date"):
|
||||
reducePath = len(current_directory + scene_info["title"] + scene_info["date"] + file_extension) + 3
|
||||
else:
|
||||
reducePath = len(current_directory + scene_info["title"] + file_extension) + 3
|
||||
if reducePath < 240:
|
||||
if scene_info.get("date"):
|
||||
new_filename = makeFilename(scene_info, "$date - $title") + file_extension
|
||||
else:
|
||||
new_filename = makeFilename(scene_info, "$title") + file_extension
|
||||
#new_path = re.sub('{}$'.format(current_filename), new_filename, current_path)
|
||||
new_path = current_path.replace(current_filename, new_filename)
|
||||
logPrint("Reduced filename to: {}", new_filename)
|
||||
else:
|
||||
logPrint("[Error] Can't manage to reduce the path, ID: {}".format(scene_ID))
|
||||
continue
|
||||
|
||||
# Looking for duplicate filename
|
||||
cursor.execute("SELECT id FROM scenes WHERE path LIKE ? AND NOT id=?;", ["%" + new_filename, scene_ID])
|
||||
dupl_check = cursor.fetchall()
|
||||
if len(dupl_check) > 0:
|
||||
for dupl_row in dupl_check:
|
||||
logPrint("[Error] Same filename: [{}]".format(dupl_row[0]))
|
||||
logPrint("[{}] - {}\n".format(dupl_row[0], new_filename),
|
||||
file=open("renamer_duplicate.txt", "a", encoding='utf-8'))
|
||||
logPrint("\n")
|
||||
continue
|
||||
|
||||
logPrint("[DEBUG] Filename: {} -> {}".format(current_filename, new_filename))
|
||||
logPrint("[DEBUG] Path: {} -> {}".format(current_path, new_path))
|
||||
if (new_path == current_path):
|
||||
logPrint("[DEBUG] File already good.\n")
|
||||
continue
|
||||
else:
|
||||
#
|
||||
# THIS PART WILL EDIT YOUR DATABASE, FILES (be careful and know what you do)
|
||||
#
|
||||
# Windows Rename
|
||||
if (os.path.isfile(current_path) == True):
|
||||
if DRY_RUN == False:
|
||||
os.rename(current_path, new_path)
|
||||
if (os.path.isfile(new_path) == True):
|
||||
logPrint("[OS] File Renamed! ({})".format(current_filename))
|
||||
if USING_LOG == True:
|
||||
print("{}|{}|{}\n".format(scene_ID, current_path, new_path), file=open("rename_log.txt", "a", encoding='utf-8'))
|
||||
|
||||
# Database rename
|
||||
cursor.execute("UPDATE scenes SET path=? WHERE id=?;", [new_path, scene_ID])
|
||||
sqliteConnection.commit()
|
||||
logPrint("[SQLITE] Datebase Updated!")
|
||||
else:
|
||||
logPrint("[OS] File failed to rename ? ({})".format(current_filename))
|
||||
print("{} -> {}\n".format(current_path,new_path), file=open("renamer_fail.txt", "a", encoding='utf-8'))
|
||||
else:
|
||||
logPrint("[DRY_RUN][OS] File should be renamed")
|
||||
print("{} -> {}\n".format(current_path, new_path), file=open("renamer_dryrun.txt", "a", encoding='utf-8'))
|
||||
else:
|
||||
logPrint("[OS] File don't exist in your Disk/Drive ({})".format(current_path))
|
||||
logPrint("\n")
|
||||
# break
|
||||
progress.finish()
|
||||
if DRY_RUN == False:
|
||||
sqliteConnection.commit()
|
||||
return
|
||||
|
||||
|
||||
try:
|
||||
sqliteConnection = sqlite3.connect(DB_PATH)
|
||||
cursor = sqliteConnection.cursor()
|
||||
logPrint("Python successfully connected to SQLite\n")
|
||||
except sqlite3.Error as error:
|
||||
logPrint("FATAL SQLITE Error: ", error)
|
||||
input("Press Enter to continue...")
|
||||
sys.exit(1)
|
||||
|
||||
# THIS PART IS PERSONAL THINGS, YOU SHOULD CHANGE THING BELOW :)
|
||||
|
||||
# Select Scene with Specific Tags
|
||||
tags_dict = {
|
||||
'1': {
|
||||
'tag': '!1. JAV',
|
||||
'filename': '$title'
|
||||
},
|
||||
'2': {
|
||||
'tag': '!1. Anime',
|
||||
'filename': '$date $title'
|
||||
},
|
||||
'3': {
|
||||
'tag': '!1. Western',
|
||||
'filename': '$date $performer - $title [$studio]'
|
||||
}
|
||||
}
|
||||
|
||||
for _, dict_section in tags_dict.items():
|
||||
tag_name = dict_section.get("tag")
|
||||
filename_template = dict_section.get("filename")
|
||||
id_tags = gettingTagsID(tag_name)
|
||||
if id_tags is not None:
|
||||
id_scene = get_SceneID_fromTags(id_tags)
|
||||
option_sqlite_query = "WHERE id in ({}) AND path LIKE 'E:\\Film\\R18\\%'".format(id_scene)
|
||||
edit_db(filename_template, option_sqlite_query)
|
||||
logPrint("====================")
|
||||
|
||||
# Select ALL scenes
|
||||
#edit_db("$date $performer - $title [$studio]")
|
||||
|
||||
# END OF PERSONAL THINGS
|
||||
|
||||
if DRY_RUN == False:
|
||||
sqliteConnection.commit()
|
||||
cursor.close()
|
||||
sqliteConnection.close()
|
||||
logPrint("The SQLite connection is closed")
|
||||
# Input if you want to check the console.
|
||||
input("Press Enter to continue...")
|
||||
@ -1,47 +0,0 @@
|
||||
# Kodi helper
|
||||
|
||||
## Features
|
||||
|
||||
Kodi helper generates files that can be imported with Kodi, to integrate your stash metadata into your Kodi system.
|
||||
|
||||
Kodi helper can generate nfo files alongside the source files, or in a specific directory. For more information on how Kodi uses nfo files, see the [Kodi wiki page](https://kodi.wiki/view/NFO_files).
|
||||
|
||||
Kodi helper can also generate strm files, along with nfo files. For more information on how Kodi uses strm files, see the [Kodi wiki page](https://kodi.wiki/view/Internet_video_and_audio_streams).
|
||||
|
||||
## Configuration
|
||||
|
||||
Modify `config.py` to enter values for `API_KEY` and `SERVER_URL`.
|
||||
|
||||
## Generating NFO files
|
||||
|
||||
`python kodi-helper.py generate-nfo [--inline | --outdir=<output directory>] [--overwrite] [--filter=<filter string>] [--preserve-path --truncate-prefix=<prefix>] [--genre <genre> ...]`
|
||||
|
||||
All nfo files will be named using the same basename as the source file. For example: `foo.mp4` will have `foo.nfo` generated.
|
||||
|
||||
If `--inline` is used, then nfo files will be created alongside the source files. This requires the source files being accessible using the `path` stored by stash. This usually means that the script must be run on the same machine as stash, and if the stash libraries are relative, then the script must be run from the same directory as stash.
|
||||
|
||||
If `--outdir` is provided, then all nfo files will be created in the provided directory. Note that this may cause issues if there are source files with the same basename, as they will generate the same filename. If `--preserve-path` is included, then the full path of the source file will be added to the directory provided with `--outdir`. The path can be stripped of a prefix by providing a `--truncate-prefix` parameter.
|
||||
|
||||
The nfo files will not be overwritten by default. This can be overridden with `--overwrite`.
|
||||
|
||||
nfo files will be generated for all scenes in the system. The scenes can be filtered by providing the `--filter` parameter. The filter parameter must be a JSON graphql string. For example:
|
||||
|
||||
`--filter='{"path": { "value": "foo", "modifier": "INCLUDES" }}'`
|
||||
|
||||
This will only generate for files that include `foo` in the path.
|
||||
|
||||
Genres can be added to nfo files by providing `--genre` parameters. More than one `--genre <genre>` parameter may be provided (ie `--genre=foo --genre=bar`).
|
||||
|
||||
## Generating STRM files
|
||||
|
||||
`python kodi-helper.py generate-strm --outdir=<output directory> [--preserve-path --truncate-prefix=<prefix>] [--use-source-filenames] [--overwrite] [--filter=<filter string>] [--genre <genre> ...]`
|
||||
|
||||
This will generate strm and nfo files.
|
||||
|
||||
All strm files will be named by the scene ID in stash. ie `30.strm`. If `--use-source-filenames` is provided, then the strm and nfo filenames will be named by the source file instead.
|
||||
|
||||
All files will be generated in the directory provided by `--outdir`. If `--preserve-path` is included, then the full path of the source file will be added to the directory provided with `--outdir`. The path can be stripped of a prefix by providing a `--truncate-prefix` parameter.
|
||||
|
||||
The generated files will not be overwritten by default. This can be overridden with `--overwrite`.
|
||||
|
||||
As per generating nfo files, the scenes to generate for can be filtered using the `--filter` parameter.
|
||||
@ -1,2 +0,0 @@
|
||||
api_key = ""
|
||||
server_url = "http://localhost:9999/graphql"
|
||||
@ -1,335 +0,0 @@
|
||||
import argparse
|
||||
import os
|
||||
import requests
|
||||
import math
|
||||
import re
|
||||
import json
|
||||
|
||||
import config
|
||||
|
||||
BATCH_SIZE = 100
|
||||
|
||||
def parseArgs():
|
||||
parser = argparse.ArgumentParser(description="Generate nfo and strm files for Kodi integration.")
|
||||
parser.add_argument("mode", metavar="MODE", choices=["generate-nfo", "generate-strm"], help="generate-nfo or generate-strm")
|
||||
parser.add_argument("--inline", action="store_true", help="Generate nfo files along side video files")
|
||||
parser.add_argument("--outdir", metavar="<output directory>", help="Generate files in <outdir>")
|
||||
parser.add_argument("--preserve-path", action="store_true", help="Include source file directory structure in output directory (with --outdir only)")
|
||||
parser.add_argument("--truncate-prefix", type=str, metavar="<path prefix>", help="Remove prefix from output directory (with --preserve-path only)")
|
||||
parser.add_argument("--use-source-filenames", action="store_true", help="Use source filenames for strm files instead of stash id")
|
||||
parser.add_argument("--overwrite", action="store_true", help="Overwrite nfo/strm files if already present")
|
||||
parser.add_argument("--filter", metavar="<filter string>", help="JSON graphql string to filter scenes with")
|
||||
parser.add_argument("--genre", metavar="<genre>", help="Genre to assign. May be included multiple times", action="append")
|
||||
return parser.parse_args()
|
||||
|
||||
# raw plugins may accept the plugin input from stdin, or they can elect
|
||||
# to ignore it entirely. In this case it optionally reads from the
|
||||
# command-line parameters.
|
||||
def main():
|
||||
args = parseArgs()
|
||||
|
||||
if args.mode == "generate-nfo":
|
||||
generateNFOFiles(args)
|
||||
elif args.mode == "generate-strm":
|
||||
generateSTRMFiles(args)
|
||||
|
||||
|
||||
def generateNFOFiles(args):
|
||||
if not args.inline and args.outdir == "":
|
||||
print("--outdir or --inline must be set\n")
|
||||
return
|
||||
|
||||
filter = args.filter or ""
|
||||
if filter != "":
|
||||
filter = json.loads(filter)
|
||||
else:
|
||||
filter = {}
|
||||
|
||||
total = getCount(filter)
|
||||
pages = math.ceil(total / BATCH_SIZE)
|
||||
|
||||
i = 1
|
||||
while i <= pages:
|
||||
print("Processing page {} of {}".format(i, pages))
|
||||
scenes = getScenes(i, filter)
|
||||
|
||||
for scene in scenes:
|
||||
# don't regenerate if file already exists and not overwriting
|
||||
output = getOutputNFOFile(scene["path"], args)
|
||||
if not args.overwrite and os.path.exists(output):
|
||||
continue
|
||||
|
||||
nfo = generateNFO(scene, args)
|
||||
writeFile(output, nfo, True)
|
||||
|
||||
i += 1
|
||||
|
||||
def generateSTRMFiles(args):
|
||||
if args.outdir == "":
|
||||
print("--outdir must be set\n")
|
||||
return
|
||||
|
||||
filter = args.filter or ""
|
||||
if filter != "":
|
||||
filter = json.loads(filter)
|
||||
else:
|
||||
filter = {}
|
||||
|
||||
total = getCount(filter)
|
||||
pages = math.ceil(total / BATCH_SIZE)
|
||||
|
||||
i = 1
|
||||
while i <= pages:
|
||||
print("Processing page {} of {}".format(i, pages))
|
||||
scenes = getScenes(i, filter)
|
||||
|
||||
for scene in scenes:
|
||||
name = ""
|
||||
outdir = getOutputDir(scene["path"], args)
|
||||
|
||||
if args.use_source_filenames:
|
||||
name = basename(os.path.splitext(scene["path"])[0])
|
||||
else:
|
||||
name = scene["id"]
|
||||
|
||||
name = os.path.join(outdir, name)
|
||||
|
||||
# don't regenerate if file already exists and not overwriting
|
||||
strmOut = name + ".strm"
|
||||
if args.overwrite or not os.path.exists(strmOut):
|
||||
data = generateSTRM(scene)
|
||||
writeFile(strmOut, data, False)
|
||||
|
||||
output = name + ".nfo"
|
||||
if args.overwrite or not os.path.exists(output):
|
||||
nfo = generateNFO(scene, args)
|
||||
writeFile(output, nfo, True)
|
||||
|
||||
i += 1
|
||||
|
||||
def basename(f):
|
||||
f = os.path.normpath(f)
|
||||
return os.path.basename(f)
|
||||
|
||||
def getOutputSTRMFile(sceneID, args):
|
||||
return os.path.join(args.outdir, "{}.strm".format(sceneID))
|
||||
|
||||
def getOutputDir(sourceFile, args):
|
||||
ret = args.outdir
|
||||
|
||||
if args.preserve_path:
|
||||
if args.truncate_prefix != None:
|
||||
toRemove = args.truncate_prefix
|
||||
if sourceFile.startswith(toRemove):
|
||||
sourceFile = sourceFile[len(toRemove):]
|
||||
|
||||
sourceFile = os.path.normpath(sourceFile)
|
||||
ret = os.path.join(args.outdir, os.path.dirname(sourceFile))
|
||||
|
||||
return ret
|
||||
|
||||
def getOutputNFOFile(sourceFile, args):
|
||||
if args.inline:
|
||||
# just replace the extension
|
||||
return os.path.splitext(sourceFile)[0]+".nfo"
|
||||
|
||||
outdir = getOutputDir(sourceFile, args)
|
||||
|
||||
ret = os.path.join(outdir, basename(sourceFile))
|
||||
return os.path.splitext(ret)[0]+".nfo"
|
||||
|
||||
def getCount(sceneFilter):
|
||||
query = """
|
||||
query findScenes($filter: FindFilterType!, $scene_filter: SceneFilterType!) {
|
||||
findScenes(filter: $filter, scene_filter: $scene_filter) {
|
||||
count
|
||||
}
|
||||
}
|
||||
"""
|
||||
variables = {
|
||||
'filter': {
|
||||
'per_page': 0,
|
||||
},
|
||||
'scene_filter': sceneFilter
|
||||
}
|
||||
|
||||
result = __callGraphQL(query, variables)
|
||||
|
||||
return result["findScenes"]["count"]
|
||||
|
||||
|
||||
def getScenes(page, sceneFilter):
|
||||
query = """
|
||||
query findScenes($filter: FindFilterType!, $scene_filter: SceneFilterType!) {
|
||||
findScenes(filter: $filter, scene_filter: $scene_filter) {
|
||||
scenes {
|
||||
id
|
||||
title
|
||||
path
|
||||
rating
|
||||
details
|
||||
date
|
||||
oshash
|
||||
paths {
|
||||
screenshot
|
||||
stream
|
||||
}
|
||||
studio {
|
||||
name
|
||||
image_path
|
||||
}
|
||||
performers {
|
||||
name
|
||||
image_path
|
||||
}
|
||||
tags {
|
||||
name
|
||||
}
|
||||
movies {
|
||||
movie {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
variables = {
|
||||
'filter': {
|
||||
'per_page': BATCH_SIZE,
|
||||
'page': page,
|
||||
},
|
||||
'scene_filter': sceneFilter
|
||||
}
|
||||
|
||||
result = __callGraphQL(query, variables)
|
||||
|
||||
return result["findScenes"]["scenes"]
|
||||
|
||||
def addAPIKey(url):
|
||||
if config.api_key:
|
||||
return url + "&apikey=" + config.api_key
|
||||
return url
|
||||
|
||||
def getSceneTitle(scene):
|
||||
if scene["title"] != None and scene["title"] != "":
|
||||
return scene["title"]
|
||||
|
||||
return basename(scene["path"])
|
||||
|
||||
def generateSTRM(scene):
|
||||
return scene["paths"]["stream"]
|
||||
|
||||
def generateNFO(scene, args):
|
||||
ret = """
|
||||
<?xml version="1.0" encoding="UTF-8" standalone="yes" ?>
|
||||
<movie>
|
||||
<title>{title}</title>
|
||||
<userrating>{rating}</userrating>
|
||||
<plot>{details}</plot>
|
||||
<uniqueid type="stash">{id}</uniqueid>
|
||||
{tags}
|
||||
<premiered>{date}</premiered>
|
||||
<studio>{studio}</studio>
|
||||
{performers}
|
||||
{thumbs}
|
||||
{fanart}
|
||||
{genres}
|
||||
</movie>
|
||||
"""
|
||||
tags = ""
|
||||
for t in scene["tags"]:
|
||||
tags = tags + """
|
||||
<tag>{}</tag>""".format(t["name"])
|
||||
|
||||
rating = ""
|
||||
if scene["rating"] != None:
|
||||
rating = scene["rating"]
|
||||
|
||||
date = ""
|
||||
if scene["date"] != None:
|
||||
date = scene["date"]
|
||||
|
||||
studio = ""
|
||||
logo = ""
|
||||
if scene["studio"] != None:
|
||||
studio = scene["studio"]["name"]
|
||||
logo = scene["studio"]["image_path"]
|
||||
if not logo.endswith("?default=true"):
|
||||
logo = addAPIKey(logo)
|
||||
else:
|
||||
logo = ""
|
||||
|
||||
performers = ""
|
||||
i = 0
|
||||
for p in scene["performers"]:
|
||||
thumb = addAPIKey(p["image_path"])
|
||||
performers = performers + """
|
||||
<actor>
|
||||
<name>{}</name>
|
||||
<role></role>
|
||||
<order>{}</order>
|
||||
<thumb>{}</thumb>
|
||||
</actor>""".format(p["name"], i, thumb)
|
||||
i += 1
|
||||
|
||||
thumbs = [
|
||||
"""<thumb aspect="poster">{}</thumb>""".format(addAPIKey(scene["paths"]["screenshot"]))
|
||||
]
|
||||
fanart = [
|
||||
"""<thumb>{}</thumb>""".format(addAPIKey(scene["paths"]["screenshot"]))
|
||||
]
|
||||
if logo != "":
|
||||
thumbs.append("""<thumb aspect="clearlogo">{}</thumb>""".format(logo))
|
||||
fanart.append("""<thumb>{}</thumb>""".format(logo))
|
||||
|
||||
fanart = """<fanart>{}</fanart>""".format("\n".join(fanart))
|
||||
|
||||
genres = []
|
||||
if args.genre != None:
|
||||
for g in args.genre:
|
||||
genres.append("<genre>{}</genre>".format(g))
|
||||
|
||||
ret = ret.format(title = getSceneTitle(scene), rating = rating, id = scene["id"], tags = tags, date = date, studio = studio, performers = performers, details = scene["details"] or "", thumbs = "\n".join(thumbs), fanart = fanart, genres = "\n".join(genres))
|
||||
|
||||
return ret
|
||||
|
||||
def writeFile(fn, data, useUTF):
|
||||
encoding = None
|
||||
if useUTF:
|
||||
encoding = "utf-8-sig"
|
||||
os.makedirs(os.path.dirname(fn), exist_ok=True)
|
||||
f = open(fn, "w", encoding=encoding)
|
||||
f.write(data)
|
||||
f.close()
|
||||
|
||||
def __callGraphQL(query, variables = None):
|
||||
headers = {
|
||||
"Accept-Encoding": "gzip, deflate, br",
|
||||
"Content-Type": "application/json",
|
||||
"Accept": "application/json",
|
||||
"Connection": "keep-alive",
|
||||
"DNT": "1",
|
||||
"ApiKey": config.api_key
|
||||
}
|
||||
|
||||
json = {}
|
||||
json['query'] = query
|
||||
if variables != None:
|
||||
json['variables'] = variables
|
||||
|
||||
# handle cookies
|
||||
response = requests.post(config.server_url, json=json, headers=headers)
|
||||
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
if result.get("error", None):
|
||||
for error in result["error"]["errors"]:
|
||||
raise Exception("GraphQL error: {}".format(error))
|
||||
if result.get("data", None):
|
||||
return result.get("data")
|
||||
else:
|
||||
raise Exception("GraphQL query failed:{} - {}. Query: {}. Variables: {}".format(response.status_code, response.content, query, variables))
|
||||
|
||||
main()
|
||||
@ -1,15 +0,0 @@
|
||||
FROM python:3.11.5-alpine3.18
|
||||
|
||||
WORKDIR /usr/src/app
|
||||
|
||||
COPY requirements.txt ./
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
|
||||
COPY . .
|
||||
|
||||
#Create an empty config file so that we can just use the defaults. This file can be mounted if it needs to be
|
||||
#modified
|
||||
RUN touch /config.toml
|
||||
|
||||
#Apparently using -u causes the logs to output immediately
|
||||
CMD [ "python", "-u", "./watcher.py", "/config.toml" ]
|
||||
@ -1,63 +0,0 @@
|
||||
# Stash Watcher
|
||||
Stash Watcher is a service that watches your Stash library directories for changes and then triggers a Metadata Scan when new files are added to those directories. It then waits a period of time before triggering another scan to keep Stash from constantly scanning if you're making many changes. Note that updates are watched during that window; the update is merely delayed.
|
||||
|
||||
## Configuration
|
||||
Modify a [config.toml](config.toml) for your environment. The defaults match the Stash docker defaults, so they may work for you. You are likely to have to update `Paths` and possibly `ApiKey`. Check out [default.toml](default.toml) for all configurable options. You can configure:
|
||||
* Url (host, domain, port)
|
||||
* Api Key (if your Stash is password protected)
|
||||
* Paths
|
||||
* Timeout - the minimum time between Metadata Scans
|
||||
* Scan options - The options for the Metadata Scan
|
||||
* Enable Polling - see [SMB/CIFS Shares](#smbcifs-shares)
|
||||
|
||||
## Running Stash Watcher
|
||||
You can run Stash Watcher directly from the [command line](#running-directly-with-python) or from inside [docker](#running-with-docker).
|
||||
|
||||
### Running directly with python
|
||||
The directs below are for linux, but they should work on other operating systems.
|
||||
#### Step 0: Create a Virtual Environment (optional, but recommended)
|
||||
```
|
||||
python -m venv venv
|
||||
. venv/bin/activate
|
||||
```
|
||||
#### Step 1: Install dependencies
|
||||
```
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
#### Step 2: Create/Modify Configuration
|
||||
Following the directions in [Configuration](#configuration), modify [config.toml](config.toml) if necessary.
|
||||
|
||||
#### Step 3: Execute
|
||||
```
|
||||
python watcher.py path_to_config.toml
|
||||
```
|
||||
That's it. Now when you make changes to watched directories, Stash Watcher will make an API call to trigger a metadata scan.
|
||||
|
||||
### Running with docker
|
||||
There is currently no published docker image, so you'll have to build it yourself. The easiest way to do this is with docker compose:
|
||||
```
|
||||
version: "3.4"
|
||||
services:
|
||||
stash-watcher:
|
||||
container_name: stash-watcher
|
||||
build: <path_to_stash-watcher_directory>
|
||||
volumes:
|
||||
#This is only required if you have to modify config.toml (if the defaults are fine you don't have to map this file)
|
||||
- ./config.toml:/config.toml:ro
|
||||
#This is the path to your stash content. If you have multiple paths, map them here
|
||||
- /stash:/data:ro
|
||||
restart: unless-stopped
|
||||
```
|
||||
|
||||
Then you can run
|
||||
```
|
||||
docker compose up -d --build
|
||||
```
|
||||
To start the watcher.
|
||||
|
||||
## Notes
|
||||
### SMB/CIFS shares
|
||||
The library ([watchdog](https://pypi.org/project/watchdog/)) that Stash Watcher uses has some limitations when dealing with SMB/CIFS shares. If you encounter some problems, set [PollInterval in your config.toml](https://github.com/DuctTape42/CommunityScripts/blob/main/scripts/stash-watcher/defaults.toml#L28). This is a lot less efficient than the default mechanism, but is more likely to work.
|
||||
|
||||
In my testing (this is from Windows to a share on another machine), if the machine running Stash Watcher wrote to the share, then the normal watcher worked fine. However, if a different machine wrote to the share, then Stash Watcher did not see the write unless I used Polling.
|
||||
|
||||
@ -1,16 +0,0 @@
|
||||
#This is the information about your stash instance
|
||||
[Host]
|
||||
#The scheme (either http or https)
|
||||
Scheme = http
|
||||
#The full hostname for your stash instance. If you're running in docker you might want the
|
||||
#service name and not localhost here.
|
||||
Host = localhost
|
||||
#The port number for your stash instance
|
||||
Port = 9999
|
||||
#The api key, if your stash instance is password protected
|
||||
ApiKey =
|
||||
|
||||
#Configuration for the listener itself
|
||||
[Config]
|
||||
#A comma separated list of paths to watch.
|
||||
Paths = /data
|
||||
@ -1,48 +0,0 @@
|
||||
#This is the information about your stash instance
|
||||
[Host]
|
||||
#The scheme (either http or https)
|
||||
Scheme = http
|
||||
#The full hostname for your stash instance. If you're running in docker you might want the
|
||||
#service name and not localhost here.
|
||||
Host = localhost
|
||||
#The port number for your stash instance
|
||||
Port = 9999
|
||||
#The api key, if your stash instance is password protected
|
||||
ApiKey =
|
||||
|
||||
#Configuration for the listener itself
|
||||
[Config]
|
||||
#A comma separated list of paths to watch.
|
||||
Paths = /data
|
||||
#The minimum time to wait between triggering scans
|
||||
Cooldown = 300
|
||||
#A list of file extensions to watch. If this is omitted, it uses the extensions that are defined
|
||||
#in your Stash library (for videos, images, and galleries)
|
||||
Extensions =
|
||||
#If this is set to a non-zero numeric value, this forces the use of polling to
|
||||
#determine file system changes. If it is left blank, then the OS appropriate
|
||||
#mechanism is used. This is much less efficient than the OS mechanism, so it
|
||||
#should be used with care. The docs claim that this is required to watch SMB
|
||||
#shares, though in my testing I could watch them on Windows with the regular
|
||||
#WindowsApiObserver
|
||||
PollInterval=
|
||||
#This enables debug logging
|
||||
Debug=
|
||||
|
||||
#Options for the Stash Scan. Stash defaults to everything disabled, so this is the default
|
||||
#Generate options that match up with what we can do in Scan
|
||||
[ScanOptions]
|
||||
#"Generate scene covers" from the UI
|
||||
Covers=true
|
||||
#"Generate previews" from the UI
|
||||
Previews=true
|
||||
#"Generate animated image previews" from the UI
|
||||
ImagePreviews=false
|
||||
#"Generate scrubber sprites" from the UI
|
||||
Sprites=false
|
||||
#"Generate perceptual hashes" from the UI
|
||||
Phashes=true
|
||||
#"Generate thumbnails for images" from the UI
|
||||
Thumbnails=true
|
||||
#"Generate previews for image clips" from the UI
|
||||
ClipPreviews=false
|
||||
@ -1,3 +0,0 @@
|
||||
argparse
|
||||
stashapp-tools
|
||||
watchdog
|
||||
@ -1,240 +0,0 @@
|
||||
#!/usr/bin/python -w
|
||||
import argparse
|
||||
import configparser
|
||||
import time
|
||||
import os
|
||||
from threading import Lock, Condition
|
||||
from watchdog.observers import Observer
|
||||
from watchdog.observers.polling import PollingObserver
|
||||
from watchdog.events import PatternMatchingEventHandler
|
||||
from stashapi.stashapp import StashInterface
|
||||
import logging
|
||||
import sys
|
||||
from enum import Enum
|
||||
|
||||
#the type of watcher being used; controls how to interpret the events
|
||||
WatcherType = Enum('WatcherType', ['INOTIFY', 'WINDOWS', 'POLLING', 'KQUEUE'])
|
||||
|
||||
#Setup logger
|
||||
logger = logging.getLogger("stash-watcher")
|
||||
logger.setLevel(logging.INFO)
|
||||
ch = logging.StreamHandler()
|
||||
ch.setLevel(logging.INFO)
|
||||
ch.setFormatter(logging.Formatter("%(asctime)s %(message)s"))
|
||||
logger.addHandler(ch)
|
||||
|
||||
#This signals that we should
|
||||
shouldUpdate = False
|
||||
mutex = Lock()
|
||||
signal = Condition(mutex)
|
||||
|
||||
modifiedFiles = {}
|
||||
|
||||
|
||||
currentWatcherType = None
|
||||
|
||||
|
||||
def log(msg):
|
||||
logger.info(msg)
|
||||
|
||||
def debug(msg):
|
||||
logger.debug(msg)
|
||||
|
||||
def handleEvent(event):
|
||||
global shouldUpdate
|
||||
global currentWatcherType
|
||||
debug("========EVENT========")
|
||||
debug(str(event))
|
||||
#log(modifiedFiles)
|
||||
#Record if the file was modified. When a file is closed, see if it was modified. If so, trigger
|
||||
shouldTrigger = False
|
||||
|
||||
if event.is_directory == True:
|
||||
return
|
||||
#Depending on the watcher type, we have to handle these events differently
|
||||
if currentWatcherType == WatcherType.WINDOWS:
|
||||
#On windows here's what happens:
|
||||
# File moved into a watched directory - Created Event
|
||||
# File moved out of a watched directory - Deleted Event
|
||||
# Moved within a watched directory (src and dst in watched directory) - Moved event
|
||||
|
||||
# echo blah > foo.mp4 - Created then Modified
|
||||
# copying a small file - Created then Modified
|
||||
# copying a large file - Created then two (or more) Modified events (appears to be one when the file is created and another when it's finished)
|
||||
|
||||
#It looks like you can get an optional Created Event and then
|
||||
#either one or two Modified events. You can also get Moved events
|
||||
|
||||
#For local files on Windows, they can't be opened if they're currently
|
||||
#being written to. Therefore, every time we get an event, attempt to
|
||||
#open the file. If we're successful, assume the write is finished and
|
||||
#trigger the update. Otherwise wait until the next event and try again
|
||||
if event.event_type == "created" or event.event_type == "modified":
|
||||
try:
|
||||
with open(event.src_path) as file:
|
||||
debug("Successfully opened file; triggering")
|
||||
shouldTrigger = True
|
||||
except:
|
||||
pass
|
||||
|
||||
if event.event_type == "moved":
|
||||
shouldTrigger = True
|
||||
elif currentWatcherType == WatcherType.POLLING:
|
||||
#Every interval you get 1 event per changed file
|
||||
# - If the file was not present in the previous poll, then Created
|
||||
# - If the file was present and has a new size, then Modified
|
||||
# - If the file was moved within the directory, then Moved
|
||||
# - If the file is gone, then deleted
|
||||
#
|
||||
# For now, just trigger on the created event. In the future, create
|
||||
# a timer at 2x polling interval. Reschedule the timer on each event
|
||||
# when it fires, trigger the update.
|
||||
if event.event_type == "moved" or event.event_type == "created":
|
||||
shouldTrigger = True
|
||||
#Until someone tests this on mac, just do what INOTIFY does
|
||||
elif currentWatcherType == WatcherType.INOTIFY or currentWatcherType == WatcherType.KQUEUE:
|
||||
if event.event_type == "modified":
|
||||
modifiedFiles[event.src_path] = 1
|
||||
#These are for files being copied into the target
|
||||
elif event.event_type == "closed":
|
||||
if event.src_path in modifiedFiles:
|
||||
del modifiedFiles[event.src_path]
|
||||
shouldTrigger = True
|
||||
#For download managers and the like that write to a temporary file and then move to the destination (real)
|
||||
#path. Note that this actually triggers if the destination is in the watched location, and not just if it's
|
||||
#moved out of a watched directory
|
||||
elif event.event_type == "moved":
|
||||
shouldTrigger = True
|
||||
else:
|
||||
print("Unknown watcher type " + str(currentWatcherType))
|
||||
sys.exit(1)
|
||||
|
||||
#Trigger the update
|
||||
if shouldTrigger:
|
||||
debug("Triggering updates")
|
||||
with mutex:
|
||||
shouldUpdate = True
|
||||
signal.notify()
|
||||
|
||||
|
||||
def main(stash, scanFlags, paths, extensions, timeout, pollInterval):
|
||||
global shouldUpdate
|
||||
global currentWatcherType
|
||||
|
||||
if len(extensions) == 1 and extensions[0] == "*":
|
||||
patterns = ["*"]
|
||||
else:
|
||||
patterns = list(map(lambda x : "*." + x, extensions))
|
||||
eventHandler = PatternMatchingEventHandler(patterns, None, False, True)
|
||||
eventHandler.on_any_event = handleEvent
|
||||
observer = Observer()
|
||||
observerName = type(observer).__name__
|
||||
if pollInterval != None and pollInterval > 0:
|
||||
currentWatcherType = WatcherType.POLLING
|
||||
observer = PollingObserver()
|
||||
elif observerName == "WindowsApiObserver":
|
||||
currentWatcherType = WatcherType.WINDOWS
|
||||
elif observerName == "KqueueObserver":
|
||||
currentWatcherType = WatcherType.KQUEUE
|
||||
elif observerName == "InotifyObserver":
|
||||
currentWatcherType = WatcherType.INOTIFY
|
||||
else:
|
||||
print("Unknown watcher type " + str(observer))
|
||||
sys.exit(1)
|
||||
|
||||
debug(str(observer))
|
||||
for path in paths:
|
||||
observer.schedule(eventHandler, path, recursive=True)
|
||||
observer.start()
|
||||
try:
|
||||
while True:
|
||||
with mutex:
|
||||
while not shouldUpdate:
|
||||
signal.wait()
|
||||
shouldUpdate = False
|
||||
log("Triggering stash scan")
|
||||
stash.metadata_scan(flags = scanFlags)
|
||||
log("Sleeping for " + str(timeout) + " seconds")
|
||||
time.sleep(timeout)
|
||||
except KeyboardInterrupt:
|
||||
observer.stop()
|
||||
observer.join()
|
||||
|
||||
def listConverter(item):
|
||||
debug("listConverter(" + str(item) + ")")
|
||||
if not item:
|
||||
return None
|
||||
listItems = [i.strip() for i in item.split(',')]
|
||||
if not listItems or (len(listItems) == 1 and not listItems[0]):
|
||||
return None
|
||||
return listItems
|
||||
|
||||
def makeArgParser():
|
||||
parser = argparse.ArgumentParser(description='Stash file watcher')
|
||||
parser.add_argument('config_path', nargs=1, help='Config file path (toml)')
|
||||
return parser
|
||||
|
||||
def parseConfig(path):
|
||||
config = configparser.ConfigParser(converters={'list': listConverter })
|
||||
|
||||
|
||||
#Load the defaults first
|
||||
defaults_path = os.path.join(os.path.dirname('__file__'), 'defaults.toml')
|
||||
config.read(defaults_path)
|
||||
|
||||
#Now read the user config
|
||||
config.read(path)
|
||||
|
||||
return config
|
||||
|
||||
if __name__ == '__main__':
|
||||
#Parse the arguments
|
||||
parser = makeArgParser()
|
||||
args = parser.parse_args()
|
||||
configPath = args.config_path
|
||||
config = parseConfig(configPath)
|
||||
|
||||
#Set up Stash
|
||||
stashArgs = {
|
||||
"scheme": config["Host"]["Scheme"],
|
||||
"host": config["Host"]["Host"],
|
||||
"port": config["Host"]["Port"]
|
||||
}
|
||||
|
||||
if config["Host"]["ApiKey"]:
|
||||
stashArgs["ApiKey"] = config["Host"]["ApiKey"]
|
||||
|
||||
stash = StashInterface(stashArgs)
|
||||
|
||||
#And now the flags for the scan
|
||||
scanFlags = {
|
||||
"scanGenerateCovers": config["ScanOptions"].getboolean("Covers"),
|
||||
"scanGeneratePreviews": config["ScanOptions"].getboolean("Previews"),
|
||||
"scanGenerateImagePreviews": config["ScanOptions"].getboolean("ImagePreviews"),
|
||||
"scanGenerateSprites": config["ScanOptions"].getboolean("Sprites"),
|
||||
"scanGeneratePhashes": config["ScanOptions"].getboolean("Phashes"),
|
||||
"scanGenerateThumbnails": config["ScanOptions"].getboolean("Thumbnails"),
|
||||
"scanGenerateClipPreviews": config["ScanOptions"].getboolean("ClipPreviews")
|
||||
}
|
||||
|
||||
paths = config.getlist("Config", "Paths")
|
||||
timeout = config["Config"].getint("Cooldown")
|
||||
|
||||
#If the extensions are in the config, use them. Otherwise pull them from stash.
|
||||
extensions = config.getlist('Config', 'Extensions')
|
||||
if not extensions:
|
||||
stashConfig = stash.get_configuration()
|
||||
extensions = stashConfig['general']['videoExtensions'] + stashConfig['general']['imageExtensions'] + stashConfig['general']['galleryExtensions']
|
||||
|
||||
pollIntervalStr = config.get('Config', 'PollInterval')
|
||||
if pollIntervalStr:
|
||||
pollInterval = int(pollIntervalStr)
|
||||
else:
|
||||
pollInterval = None
|
||||
|
||||
if config.get('Config', 'Debug') == "true":
|
||||
logger.setLevel(logging.DEBUG)
|
||||
ch.setLevel(logging.DEBUG)
|
||||
|
||||
|
||||
main(stash, scanFlags, paths, extensions, timeout, pollInterval)
|
||||
BIN
stable/CropperJS.zip
Normal file
BIN
stable/CropperJS.zip
Normal file
Binary file not shown.
BIN
stable/StashUserscriptLibrary.zip
Normal file
BIN
stable/StashUserscriptLibrary.zip
Normal file
Binary file not shown.
BIN
stable/TPDBMarkers.zip
Normal file
BIN
stable/TPDBMarkers.zip
Normal file
Binary file not shown.
BIN
stable/Theme-BlackHole.zip
Normal file
BIN
stable/Theme-BlackHole.zip
Normal file
Binary file not shown.
BIN
stable/Theme-ModernDark.zip
Normal file
BIN
stable/Theme-ModernDark.zip
Normal file
Binary file not shown.
BIN
stable/Theme-NeonDark.zip
Normal file
BIN
stable/Theme-NeonDark.zip
Normal file
Binary file not shown.
BIN
stable/Theme-Night.zip
Normal file
BIN
stable/Theme-Night.zip
Normal file
Binary file not shown.
BIN
stable/Theme-Plex.zip
Normal file
BIN
stable/Theme-Plex.zip
Normal file
Binary file not shown.
BIN
stable/Theme-PornHub.zip
Normal file
BIN
stable/Theme-PornHub.zip
Normal file
Binary file not shown.
BIN
stable/Theme-Pulsar.zip
Normal file
BIN
stable/Theme-Pulsar.zip
Normal file
Binary file not shown.
BIN
stable/Theme-PulsarLight.zip
Normal file
BIN
stable/Theme-PulsarLight.zip
Normal file
Binary file not shown.
BIN
stable/Theme-RoundedYellow.zip
Normal file
BIN
stable/Theme-RoundedYellow.zip
Normal file
Binary file not shown.
BIN
stable/VideoScrollWheel.zip
Normal file
BIN
stable/VideoScrollWheel.zip
Normal file
Binary file not shown.
BIN
stable/comicInfoExtractor.zip
Normal file
BIN
stable/comicInfoExtractor.zip
Normal file
Binary file not shown.
BIN
stable/date_parser.zip
Normal file
BIN
stable/date_parser.zip
Normal file
Binary file not shown.
BIN
stable/defaultDataForPath.zip
Normal file
BIN
stable/defaultDataForPath.zip
Normal file
Binary file not shown.
BIN
stable/dupeMarker.zip
Normal file
BIN
stable/dupeMarker.zip
Normal file
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user