Closed 8ullyMaguire closed 4 months ago
The duration is associated to a file instead of a scene in stash so instead of adding the scenes and sorting them by duration in stash I have to sort them without adding to stash:
import requests
from typing import Dict, List, Optional, Union
import logging
from datetime import timedelta
STASHBOX_GRAPHQL_ENDPOINT = "https://stashdb.org/graphql"
STASHBOX_API_KEY = "your-stash-box-api-key"
# Configure logging
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s')
def execute_graphql_query(query: str, variables: Optional[Dict] = None) -> Dict:
headers = {
"Content-Type": "application/json",
"ApiKey": STASHBOX_API_KEY
}
payload = {"query": query}
if variables:
payload["variables"] = variables
logging.debug(f"Executing GraphQL query: {query}")
logging.debug(f"Variables: {variables}")
try:
response = requests.post(STASHBOX_GRAPHQL_ENDPOINT, json=payload, headers=headers)
response.raise_for_status()
except requests.exceptions.RequestException as e:
logging.error(f"Error executing GraphQL query: {e}")
raise
logging.debug(f"Response: {response.json()}")
return response.json()["data"]
def get_studio_scenes(studio_id: str) -> List[Dict]:
query = """
query Scenes($input: SceneQueryInput!) {
queryScenes(input: $input) {
count
scenes {
id
release_date
title
duration
}
__typename
}
}
"""
page = 1
total_scenes = float("inf") # Initialize with a large value
all_scenes: List[Dict] = [] # Create a list to store all fetched scenes
while True:
variables = {
"input": {
"direction": "DESC",
"page": page,
"parentStudio": studio_id,
"per_page": 20,
"sort": "DATE",
}
}
logging.debug(f"Fetching scenes for page {page}")
try:
result = execute_graphql_query(query, variables)
except Exception as e:
logging.error(f"Error fetching scenes for page {page}: {e}")
break
scenes = result["queryScenes"]["scenes"]
total_scenes = result["queryScenes"]["count"]
logging.debug(f"Page {page}: Fetched {len(scenes)} scenes, total scenes: {total_scenes}")
all_scenes.extend(scenes) # Append the fetched scenes to the all_scenes list
# Break if we have fetched all the scenes
if len(all_scenes) >= total_scenes:
break
page += 1
return all_scenes
def format_duration(duration: int) -> str:
return str(timedelta(seconds=duration))
def main() -> None:
studio_id = "sample_studio_id"
scenes = list(get_studio_scenes(studio_id))
if not scenes:
logging.warning("No scenes fetched")
return
scenes.sort(key=lambda x: x['duration'] or 0, reverse=True)
for scene in scenes:
print(
f"Duration: {format_duration(scene['duration'] or 0)}",
f"Release Date: {scene['release_date']}, "
f"Title: {scene['title']}, Scene ID: {scene['id']}"
)
if __name__ == "__main__":
main()
Is your feature request related to a problem? Please describe. When automatically identifying a scene fails, I often struggle to identify the specific scene when I only know the studio or performer and duration.
Describe the solution you'd like I would like to be able to batch import scene metadata from stash-box into stash, specifically for a given studio or performer. This would allow me to easily sort and filter scenes by duration, making it simpler to identify the scene I'm looking for.
Describe alternatives you've considered I have considered manually searching for each scene individually, but this is time-consuming and inefficient.
Additional context This feature would significantly enhance my workflow by providing a more efficient way to manage and identify scenes in stash.
Here is a sample script to get all scenes given a studio id. Now I have to add all these scenes to stash with the given studio id.
This works
but if I add duration to the query I get the error
Here is the script I have so far: