Updates to timestamp.trade and stashdb-performer-gallery (#282)

Co-authored-by: Tweeticoats <Tweeticoats@github.com>
This commit is contained in:
Tweeticoats
2024-04-25 00:01:47 +09:30
committed by GitHub
parent eca29cac9c
commit 468e9df869
5 changed files with 562 additions and 110 deletions

View File

@@ -9,4 +9,4 @@ insert_final_newline = true
trim_trailing_whitespace = true
[*.md]
trim_trailing_whitespace = false
trim_trailing_whitespace = false

View File

@@ -1,5 +1,5 @@
import stashapi.log as log
from stashapi.stashapp import StashInterface,StashItem
from stashapi.stashapp import StashInterface, StashItem
from stashapi.stashbox import StashBoxInterface
import os
import sys
@@ -15,10 +15,12 @@ import base64
per_page = 100
request_s = requests.Session()
stash_boxes = {}
scrapers={}
scrapers = {}
def processImages(img):
log.debug("image: %s" % (img,))
image_data = None
for file in [x["path"] for x in img["visual_files"]]:
if settings["path"] in file:
index_file = Path(Path(file).parent) / (Path(file).stem + ".json")
@@ -27,21 +29,25 @@ def processImages(img):
log.debug("loading index file %s" % (index_file,))
with open(index_file) as f:
index = json.load(f)
index["id"] = img["id"]
stash.update_image(index)
if image_data:
image_data["gallery_ids"].extend(index["gallery_ids"])
else:
image_data = index
if image_data:
# log.debug(image_data)
stash.update_image(image_data)
def processPerformers():
query={
"tags": {
"depth": 0,
"excludes": [],
"modifier": "INCLUDES_ALL",
"value": [tag_stashbox_performer_gallery],
}
query = {
"tags": {
"depth": 0,
"excludes": [],
"modifier": "INCLUDES_ALL",
"value": [tag_stashbox_performer_gallery],
}
}
performers = stash.find_performers(f=query)
for performer in performers:
@@ -59,8 +65,6 @@ def processPerformer(performer):
def get_stashbox(endpoint):
# if endpoint in stash_boxes:
# return stash_boxes[endpoint]
for sbx_config in stash.get_configuration()["general"]["stashBoxes"]:
if sbx_config["endpoint"] == endpoint:
stashbox = StashBoxInterface(
@@ -186,11 +190,11 @@ def processPerformerStashid(endpoint, stashid, p):
log.debug("image already downloaded")
# scrape urls on the performer using the url scrapers in stash
if settings['runPerformerScraper'] and len(perf['urls'])>0:
if settings["runPerformerScraper"] and len(perf["urls"]) > 0:
# we need to determine what scrapers we have and what url patterns they accept, query what url patterns are supported, should only need to check once
if len(scrapers) == 0:
scrapers_graphql="""query ListPerformerScrapers {
scrapers_graphql = """query ListPerformerScrapers {
listScrapers(types: [PERFORMER]) {
id
name
@@ -201,64 +205,132 @@ def processPerformerStashid(endpoint, stashid, p):
}
}"""
res = stash.callGQL(scrapers_graphql)
for r in res['listScrapers']:
if r['performer']['urls']:
for url in r['performer']['urls']:
scrapers[url]=r
for r in res["listScrapers"]:
if r["performer"]["urls"]:
for url in r["performer"]["urls"]:
scrapers[url] = r
for u in perf['urls']:
for u in perf["urls"]:
for url in scrapers.keys():
if url in u['url']:
log.info('Running stash scraper on performer url: %s' % (u['url'],))
res=stash.scrape_performer_url(u['url'])
if url in u["url"]:
log.info(
"Running stash scraper on performer url: %s" % (u["url"],)
)
res = stash.scrape_performer_url(u["url"])
# Check if the scraper returned a result
if res is not None:
log.debug(res)
# it's possible for multiple images to be returned by a scraper so incriment a number each image
image_id = 1
if res['images']:
for image in res['images']:
image_index = Path(settings["path"]) / p["id"] / ("%s-%s.json" % (scrapers[url]['id'],image_id ,))
if res["images"]:
for image in res["images"]:
image_index = (
Path(settings["path"])
/ p["id"]
/ (
"%s-%s.json"
% (
scrapers[url]["id"],
image_id,
)
)
)
if not image_index.exists():
with open(image_index, "w") as f:
image_data = {
"title": '%s - %s ' % (scrapers[url]['id'],image_id,),
"details": "name: %s\ngender: %s\nurl: %s\ntwitter: %s\ninstagram: %s\nbirthdate: %s\nethnicity: %s\ncountry: %s\neye_color: %s\nheight: %s\nmeasurements: %s\nfake tits: %s\npenis_length: %s\n career length: %s\ntattoos: %s\npiercings: %s\nhair_color: %s\nweight: %s\n description: %s\n" % (res['name'], res['gender'], res['url'], res['twitter'], res['instagram'], res['birthdate'], res['ethnicity'], res['country'], res['eye_color'], res['height'], res['measurements'], res['fake_tits'], res['penis_length'], res['career_length'], res['tattoos'], res['piercings'], res['hair_color'], res['weight'], res['details'],),
"urls": [u['url'],],
"title": "%s - %s "
% (
scrapers[url]["id"],
image_id,
),
"details": "name: %s\ngender: %s\nurl: %s\ntwitter: %s\ninstagram: %s\nbirthdate: %s\nethnicity: %s\ncountry: %s\neye_color: %s\nheight: %s\nmeasurements: %s\nfake tits: %s\npenis_length: %s\n career length: %s\ntattoos: %s\npiercings: %s\nhair_color: %s\nweight: %s\n description: %s\n"
% (
res["name"],
res["gender"],
res["url"],
res["twitter"],
res["instagram"],
res["birthdate"],
res["ethnicity"],
res["country"],
res["eye_color"],
res["height"],
res["measurements"],
res["fake_tits"],
res["penis_length"],
res["career_length"],
res["tattoos"],
res["piercings"],
res["hair_color"],
res["weight"],
res["details"],
),
"urls": [
u["url"],
],
"performer_ids": [p["id"]],
"tag_ids": [tag_stashbox_performer_gallery],
"gallery_ids": [index["galleries"][endpoint]],
"tag_ids": [
tag_stashbox_performer_gallery
],
"gallery_ids": [
index["galleries"][endpoint]
],
}
json.dump(image_data, f)
filename = Path(settings["path"]) / p["id"] / ("%s-%s.jpg" % (scrapers[url]['id'],image_id ,))
filename = (
Path(settings["path"])
/ p["id"]
/ (
"%s-%s.jpg"
% (
scrapers[url]["id"],
image_id,
)
)
)
if not filename.exists():
if image.startswith('data:'):
if image.startswith("data:"):
with open(filename, "wb") as f:
f.write(base64.b64decode(image.split('base64,')[1]))
f.write(
base64.b64decode(
image.split("base64,")[1]
)
)
f.close()
else:
with open(image_index, "w") as f:
image_data = {
"title": '%s - %s ' % (scrapers[url]['id'],image_id,),
"details": "%s"% (res,),
"urls": [u['url'],image],
"title": "%s - %s "
% (
scrapers[url]["id"],
image_id,
),
"details": "%s" % (res,),
"urls": [u["url"], image],
"performer_ids": [p["id"]],
"tag_ids": [tag_stashbox_performer_gallery],
"gallery_ids": [index["galleries"][endpoint]],
"tag_ids": [
tag_stashbox_performer_gallery
],
"gallery_ids": [
index["galleries"][endpoint]
],
}
json.dump(image_data, f)
filename = Path(settings["path"]) / p["id"] / ("%s.jpg" % (image_id,))
filename = (
Path(settings["path"])
/ p["id"]
/ ("%s.jpg" % (image_id,))
)
r = requests.get(img["url"])
if r.status_code==200:
if r.status_code == 200:
with open(filename, "wb") as f:
f.write(r.content)
f.close()
image_id=image_id+1
image_id = image_id + 1
# log.debug('%s %s' % (url['url'],url['type'],))
# stash.scraper
# scrape=stash.scrape_performer_url(ur)
# stash.scraper
# scrape=stash.scrape_performer_url(ur)
else:
log.error("endpoint %s not configured, skipping" % (endpoint,))
@@ -305,23 +377,35 @@ def processQueue():
]["queue"].removeprefix(settings["queue"])
},
)
stash.run_plugin_task("stashdb-performer-gallery", "Process Performers")
stash.run_plugin_task(
"stashdb-performer-gallery", "Process Performers", args={"full": False}
)
def relink_images():
query={
"path": {"modifier": "INCLUDES", "value": settings["path"]},
"is_missing": "galleries"
def relink_images(performer_id=None):
query = {
"path": {"modifier": "INCLUDES", "value": settings["path"]},
}
if performer_id == None:
query["is_missing"] = "galleries"
query["path"] = {"modifier": "INCLUDES", "value": settings["path"]}
else:
query["path"] = {
"modifier": "INCLUDES",
"value": str(Path(settings["path"]) / performer_id / ""),
}
total = stash.find_images(f=query,get_count=True)[0]
# else:
# query["file_count"] = {"modifier": "NOT_EQUALS", "value": 1}
total = stash.find_images(f=query, get_count=True)[0]
i = 0
images=[]
images = []
while i < total:
images = stash.find_images(f=query,filter={"page": 0, "per_page": per_page})
images = stash.find_images(f=query, filter={"page": 0, "per_page": per_page})
for img in images:
log.debug('image: %s' %(img,))
log.debug("image: %s" % (img,))
processImages(img)
i=i+1
i = i + 1
log.progress((i / total))
@@ -333,7 +417,7 @@ stash = StashInterface(FRAGMENT_SERVER)
config = stash.get_configuration()["plugins"]
settings = {
"path": "/download_dir",
"runPerformerScraper":False,
"runPerformerScraper": False,
}
if "stashdb-performer-gallery" in config:
settings.update(config["stashdb-performer-gallery"])
@@ -354,10 +438,12 @@ if "mode" in json_input["args"]:
p = stash.find_performer(json_input["args"]["performer"])
if tag_stashbox_performer_gallery in [x["id"] for x in p["tags"]]:
processPerformer(p)
stash.metadata_scan(paths=[settings["path"]])
stash.run_plugin_task(
"stashdb-performer-gallery", "relink missing images", args={}
)
stash.metadata_scan(paths=[settings["path"]])
stash.run_plugin_task(
"stashdb-performer-gallery",
"relink missing images",
args={"performer_id": p["id"]},
)
elif "processPerformers" in PLUGIN_ARGS:
processPerformers()
stash.metadata_scan([settings["path"]])
@@ -365,7 +451,10 @@ if "mode" in json_input["args"]:
"stashdb-performer-gallery", "relink missing images", args={}
)
elif "processImages" in PLUGIN_ARGS:
relink_images()
if "performer_id" in json_input["args"]:
relink_images(performer_id=json_input["args"]["performer_id"])
else:
relink_images()
elif "hookContext" in json_input["args"]:

View File

@@ -1,6 +1,6 @@
name: stashdb performer gallery
description: Automatically download performer images from stashdb or other stash-boxes. Add the [Stashbox Performer Gallery] tag to a performer and it will create a gallery of images from that stash-box database. Apply the tag [Set Profile Image] to an image to set it as the profile image of that performer. Note you will need to configure the download path and add this as a path under settings > library
version: 0.1
version: 0.2
url: https://github.com/stashapp/CommunityScripts/
exec:
- python

View File

@@ -11,13 +11,264 @@ import math
per_page = 100
request_s = requests.Session()
scrapers = {}
def processScene(s):
if "https://timestamp.trade/scene/" in [u[:30] for u in s["urls"]]:
for url in s["urls"]:
log.debug(url)
if url.startswith("https://timestamp.trade/scene/"):
json_url = "https://timestamp.trade/json-scene/%s" % (url[30:],)
res = request_s.get(json_url)
if res.status_code == 200:
data = res.json()
if len(data) == 0:
log.debug("no scene metadata")
return
log.debug(data)
log.debug(s["scene_markers"])
log.debug(len(s["scene_markers"]) > 0)
if (
settings["createMarkers"]
and (len(s["scene_markers"]) == 0)
or settings["overwriteMarkers"]
):
log.debug("creating markers")
markers = []
for m in data["markers"]:
marker = {
"seconds": m["start_time"] / 1000,
"primary_tag": None,
"tags": [],
"title": m["name"],
}
if m["tag_name"]:
marker["primary_tag"] = m["tag_name"]
else:
marker["primary_tag"] = m["name"]
markers.append(marker)
# log.debug(marker)
if len(markers) > 0:
if settings["overwriteMarkers"]:
stash.destroy_scene_markers(s["id"])
mp.import_scene_markers(stash, markers, s["id"], 15)
new_scene = {
"id": s["id"],
}
needs_update = False
if settings["createGalleryFromScene"]:
for g in data["galleries"]:
for f in g["files"]:
res = stash.find_galleries(
f={
"checksum": {
"value": f["md5"],
"modifier": "EQUALS",
},
"tags": {
"depth": 0,
"excludes": [skip_sync_tag_id],
"modifier": "INCLUDES_ALL",
"value": [],
},
}
)
for gal in res:
# log.debug('Gallery=%s' %(gal,))
needs_update = False
gallery = {
"id": gal["id"],
"title": gal["title"],
"urls": gal["urls"],
"date": gal["date"],
"rating100": gal["rating100"],
"performer_ids": [
x["id"] for x in gal["performers"]
],
"tag_ids": [x["id"] for x in gal["tags"]],
"scene_ids": [x["id"] for x in gal["scenes"]],
"details": gal["details"],
}
if "studio" in gal:
log.debug(s["studio"])
if gal["studio"]:
gallery["studio_id"] = gal["studio"]["id"]
elif s["studio"]["id"]:
gallery["studio_id"] = s["studio"]["id"]
if len(gal["urls"]) == 0:
log.debug(
"no urls on gallery, needs new metadata"
)
gallery["urls"].extend(
[x["url"] for x in g["urls"]]
)
needs_update = True
if s["id"] not in gallery["scene_ids"]:
log.debug(
"attaching scene %s to gallery %s "
% (
s["id"],
gallery["id"],
)
)
gallery["scene_ids"].append(s["id"])
needs_update = True
if needs_update:
log.info("updating gallery: %s" % (gal["id"],))
stash.update_gallery(gallery_data=gallery)
if settings["extraUrls"]:
if "urls" in data and data["urls"]:
extra_urls = s["urls"]
for u in data["urls"]:
if u not in extra_urls:
extra_urls.append(u)
needs_update = True
if needs_update:
new_scene["urls"] = extra_urls
if settings["createMovieFromScene"]:
if "movies" in data:
movies_to_add = []
for m in data["movies"]:
log.debug("movie: %s" % (m,))
log.debug("scene: %s" % (s,))
movies = []
scene_index = None
for sc in m["scenes"]:
if sc["scene_id"] == data["scene_id"]:
scene_index = sc["scene_index"]
for u in m["urls"]:
sm = stash.find_movies(
f={
"url": {
"modifier": "EQUALS",
"value": u["url"],
}
}
)
log.debug("sm: %s" % (sm,))
movies.extend(sm)
if len(movies) == 0:
# we need to determine what scrapers we have and what url patterns they accept, query what url patterns are supported, should only need to check once
if len(scrapers) == 0:
scrapers_graphql = """query ListPerformerScrapers {
listScrapers(types: [MOVIE]) {
id
name
movie {
urls
supported_scrapes
}
}
}"""
res = stash.callGQL(scrapers_graphql)
for r in res["listScrapers"]:
if r["movie"]["urls"]:
for url in r["movie"]["urls"]:
scrapers[url] = r
created = False
for u in m["urls"]:
# is there a scraper that can scrape this url
for su in scrapers.keys():
if su in u["url"]:
movie_scrape = stash.scrape_movie_url(
u["url"]
)
if movie_scrape and not created:
log.debug(
"move scrape: %s"
% (movie_scrape,)
)
new_movie = {
"name": movie_scrape["name"],
"aliases": movie_scrape[
"aliases"
],
"date": movie_scrape["date"],
"rating100": movie_scrape[
"rating"
],
"director": movie_scrape[
"director"
],
"synopsis": movie_scrape[
"synopsis"
],
"url": movie_scrape["url"],
"front_image": movie_scrape[
"front_image"
],
"back_image": movie_scrape[
"back_image"
],
}
if not new_movie["name"]:
new_movie["name"] = m["title"]
if new_movie["date"] == "1-01-01":
new_movie["date"] = None
if not movie_scrape["url"]:
new_movie["url"] = u["url"]
if movie_scrape["studio"]:
new_movie["studio_id"] = (
movie_scrape["studio"][
"stored_id"
]
)
log.debug(
"new movie: %s" % (new_movie,)
)
nm = stash.create_movie(new_movie)
if nm:
movies.append(nm)
created = True
# the above has not created a movie from either no scraper or a bad scrape, just create the movie manually
if not created:
new_movie = {
"name": m["title"],
"synopsis": m["description"],
"date": m["release_date"],
}
if len(m["urls"]) > 0:
new_movie["url"] = m["urls"][0]["url"]
log.debug("new movie: %s" % (new_movie,))
nm = stash.create_movie(new_movie)
if nm:
movies.append(nm)
movies_to_add.extend(
[
{
"movie_id": x["id"],
"scene_index": scene_index,
}
for x in movies
]
)
if len(movies_to_add) > 0:
new_scene["movies"] = []
for m in movies_to_add:
if m["movie_id"] not in [
x["movie"]["id"] for x in s["movies"]
]:
new_scene["movies"].append(m)
needs_update = True
if needs_update:
log.debug("updating scene: %s" % (new_scene,))
stash.update_scene(new_scene)
else:
processSceneStashid(s)
def processSceneStashid(s):
if len(s["stash_ids"]) == 0:
log.debug("no scenes to process")
return
skip_sync_tag_id = stash.find_tag("[Timestamp: Skip Sync]", create=True).get("id")
for sid in s["stash_ids"]:
try:
if any(tag["id"] == str(skip_sync_tag_id) for tag in s["tags"]):
@@ -25,9 +276,15 @@ def processScene(s):
return
log.debug("looking up markers for stash id: " + sid["stash_id"])
res = request_s.get(
"https://timestamp.trade/get-markers/" + sid["stash_id"], json=s
"https://timestamp.trade/get-markers/" + sid["stash_id"]
)
if res.status_code != 200:
log.debug("bad result from api, skipping")
return
md = res.json()
if not md:
log.debug("bad result from api, skipping")
return
if md.get("marker"):
log.info(
"api returned markers for scene: "
@@ -88,7 +345,10 @@ def processScene(s):
"details": gal["details"],
}
if "studio" in gal:
gallery["studio_id"] = gal["studio"]["id"]
if gal["studio"]:
gallery["studio_id"] = gal["studio"]["id"]
elif s["studio"]:
gallery["studio_id"] = s["studio"]["id"]
if len(gal["urls"]) == 0:
log.debug("no urls on gallery, needs new metadata")
gallery["urls"].extend(
@@ -176,6 +436,17 @@ def processScene(s):
needs_update = True
if needs_update:
new_scene["urls"] = extra_urls
if settings[
"addTimestampTradeUrl"
] and "https://timestamp.trade/scene/" not in [u[:30] for u in s["urls"]]:
if "urls" not in new_scene:
new_scene["urls"] = s["urls"]
log.debug(md)
if "scene_id" in md:
new_scene["urls"].append(
"https://timestamp.trade/scene/%s" % (md["scene_id"],)
)
needs_update = True
if needs_update:
log.debug("new scene update: %s" % (new_scene,))
stash.update_scene(new_scene)
@@ -184,30 +455,17 @@ def processScene(s):
log.error("api returned invalid JSON for stash id: " + sid["stash_id"])
def processAll():
def processAll(query):
log.debug(query)
log.info("Getting scene count")
skip_sync_tag_id = stash.find_tag("[Timestamp: Skip Sync]", create=True).get("id")
count = stash.find_scenes(
f={
"stash_id_endpoint": {
"endpoint": "",
"modifier": "NOT_NULL",
"stash_id": "",
},
"has_markers": "false",
"tags": {
"depth": 0,
"excludes": [skip_sync_tag_id],
"modifier": "INCLUDES_ALL",
"value": [],
},
},
f=query,
filter={"per_page": 1},
get_count=True,
)[0]
log.info(str(count) + " scenes to submit.")
log.info(str(count) + " scenes to process.")
i = 0
for r in range(1, int(count / per_page) + 1):
for r in range(1, int(count / per_page) + 2):
log.info(
"fetching data: %s - %s %0.1f%%"
% (
@@ -217,14 +475,7 @@ def processAll():
)
)
scenes = stash.find_scenes(
f={
"stash_id_endpoint": {
"endpoint": "",
"modifier": "NOT_NULL",
"stash_id": "",
},
"has_markers": "false",
},
f=query,
filter={"page": r, "per_page": per_page},
)
for s in scenes:
@@ -237,7 +488,7 @@ def processAll():
def submitScene(query):
scene_fgmt = """title
details
url
urls
date
performers{
name
@@ -269,7 +520,7 @@ def submitScene(query):
}
galleries{
title
url
urls
date
details
tags{
@@ -313,12 +564,11 @@ def submitScene(query):
stash_ids{
endpoint
stash_id
}
}
}
}
}
"""
count = stash.find_scenes(f=query, filter={"per_page": 1}, get_count=True)[0]
i = 0
for r in range(1, math.ceil(count / per_page) + 1):
@@ -338,7 +588,7 @@ def submitScene(query):
request_s.post("https://timestamp.trade/submit-stash", json=s)
i = i + 1
log.progress((i / count))
time.sleep(2)
time.sleep(0.5)
def submitGallery():
@@ -397,7 +647,7 @@ def submitGallery():
stash_ids{
endpoint
stash_id
}
}
}"""
skip_submit_tag_id = stash.find_tag("[Timestamp: Skip Submit]", create=True).get(
@@ -656,8 +906,11 @@ settings = {
"createGalleryFromScene": False,
"createMovieFromScene": False,
"extraUrls": False,
"addTimestampTradeUrl": False,
"disableSceneMarkersHook": False,
"disableGalleryLookupHook": False,
"createMarkers": True,
"overwriteMarkers": False,
}
if "timestampTrade" in config:
settings.update(config["timestampTrade"])
@@ -666,7 +919,7 @@ log.debug("settings: %s " % (settings,))
if "mode" in json_input["args"]:
PLUGIN_ARGS = json_input["args"]["mode"]
if "submitScene" in PLUGIN_ARGS:
if "submitScene" == PLUGIN_ARGS:
skip_submit_tag_id = stash.find_tag(
"[Timestamp: Skip Submit]", create=True
).get("id")
@@ -680,7 +933,7 @@ if "mode" in json_input["args"]:
},
}
submitScene(query)
elif "submitMovieScene" in PLUGIN_ARGS:
elif "submitMovieScene" == PLUGIN_ARGS:
skip_submit_tag_id = stash.find_tag(
"[Timestamp: Skip Submit]", create=True
).get("id")
@@ -694,19 +947,111 @@ if "mode" in json_input["args"]:
},
}
submitScene(query)
elif "submitGallery" in PLUGIN_ARGS:
elif "submitSLRScene" == PLUGIN_ARGS:
skip_submit_tag_id = stash.find_tag(
"[Timestamp: Skip Submit]", create=True
).get("id")
query = {
"tags": {
"depth": 0,
"excludes": [skip_submit_tag_id],
"modifier": "INCLUDES_ALL",
"value": [],
},
"url": {"modifier": "INCLUDES", "value": "sexlikereal.com"},
}
submitScene(query)
elif "submitEroscriptScene" == PLUGIN_ARGS:
skip_submit_tag_id = stash.find_tag(
"[Timestamp: Skip Submit]", create=True
).get("id")
query = {
"tags": {
"depth": 0,
"excludes": [skip_submit_tag_id],
"modifier": "INCLUDES_ALL",
"value": [],
},
"url": {"modifier": "INCLUDES", "value": "eroscripts.com"},
}
submitScene(query)
elif "submitGallery" == PLUGIN_ARGS:
submitGallery()
elif "processGallery" in PLUGIN_ARGS:
processGalleries()
elif "processScene" in PLUGIN_ARGS:
processAll()
elif "processGallery" == PLUGIN_ARGS:
if "gallery_id" in json_input["args"]:
gallery = stash.find_gallery(json_input["args"]["gallery_id"])
processGallery(gallery)
else:
processGalleries()
elif "processScene" == PLUGIN_ARGS:
skip_sync_tag_id = stash.find_tag("[Timestamp: Skip Sync]", create=True).get(
"id"
)
if "scene_id" in json_input["args"]:
scene = stash.find_scene(json_input["args"]["scene_id"])
processScene(scene)
else:
query = {
"stash_id_endpoint": {
"endpoint": "",
"modifier": "NOT_NULL",
"stash_id": "",
},
"has_markers": "false",
"tags": {
"depth": 0,
"excludes": [skip_sync_tag_id],
"modifier": "INCLUDES_ALL",
"value": [],
},
}
processAll(query)
elif "reprocessScene" == PLUGIN_ARGS:
skip_sync_tag_id = stash.find_tag("[Timestamp: Skip Sync]", create=True).get(
"id"
)
query = {
"url": {"modifier": "INCLUDES", "value": "https://timestamp.trade/scene/"},
"tags": {
"depth": 0,
"excludes": [skip_sync_tag_id],
"modifier": "INCLUDES_ALL",
"value": [],
},
}
processAll(query)
elif "processAll" == PLUGIN_ARGS:
skip_sync_tag_id = stash.find_tag("[Timestamp: Skip Sync]", create=True).get(
"id"
)
query = {
"stash_id_endpoint": {
"endpoint": "",
"modifier": "NOT_NULL",
"stash_id": "",
},
"tags": {
"depth": 0,
"excludes": [skip_sync_tag_id],
"modifier": "INCLUDES_ALL",
"value": [],
},
}
processAll(query)
elif "hookContext" in json_input["args"]:
_id = json_input["args"]["hookContext"]["id"]
_type = json_input["args"]["hookContext"]["type"]
if _type == "Scene.Update.Post" and not settings["disableSceneMarkersHook"]:
scene = stash.find_scene(_id)
processScene(scene)
# scene = stash.find_scene(_id)
# processScene(scene)
stash.run_plugin_task("timestampTrade", "Sync", args={"scene_id": _id})
if _type == "Gallery.Update.Post" and not settings["disableGalleryLookupHook"]:
gallery = stash.find_gallery(_id)
processGallery(gallery)
# gallery = stash.find_gallery(_id)
# processGallery(gallery)
stash.run_plugin_task(
"timestampTrade", "Sync Gallery", args={"gallery_id": _id}
)

View File

@@ -1,6 +1,6 @@
name: Timestamp Trade
description: Sync Markers with timestamp.trade, a new database for sharing markers.
version: 0.4
version: 0.5
url: https://github.com/stashapp/CommunityScripts/
exec:
- python
@@ -25,6 +25,12 @@ settings:
disableGalleryLookupHook:
displayName: Disable the Gallery Lookup hook
type: BOOLEAN
addTimestampTradeUrl:
displayName: Add timestamp.trade url
type: BOOLEAN
createMarkers:
displayName: Add markers from timestamp.trade
type: BOOLEAN
hooks:
- name: Add Marker to Scene
@@ -45,10 +51,22 @@ tasks:
description: Submit movie information to timestamp.trade
defaultArgs:
mode: submitMovieScene
- name: "Submit Scenes with eroscripts.com url"
description: Submit scenes with a eroscripts.com forum post
defaultArgs:
mode: submitEroscriptScene
- name: "Sync"
description: Get markers for all scenes with a stashid
defaultArgs:
mode: processScene
- name: "Re-process Scene"
description: reprocess scenes with a timestamp.trade url
defaultArgs:
mode: reprocessScene
- name: "Re-process All"
description: reprocess all scenes with any stash-box id
defaultArgs:
mode: processAll
- name: "Submit Gallery"
description: Submit gallery info to timestamp.trade
defaultArgs: