from argparse import ArgumentParser
from time import sleep
from requests import HTTPError, post, Timeout, get, ConnectionError, exceptions

SERVER_URL = ""
API_KEY = ""

def set_manual_import_file_data(available_files):
    prepared_files = []
    # Construct payload
    for f in available_files:
        # Pull seriesId and episodeIds from the GET response
        episode_ids = [ep['id'] for ep in f.get('episodes', [])]
        series_id = f.get('series', {}).get('id', 0)  # fallback 0 if missing

        if series_id == 0 or not episode_ids:
            # Skip files that are impossible to "auto" import
            continue

        prepared_files.append({
            "path": f["path"],
            "folderName": f.get("folderName", ""),
            "seriesId": series_id,
            "episodeIds": episode_ids,
            "quality": f["quality"],
            "languages": f.get("languages", [{"id": 1, "name": "English"}]),
            "releaseGroup": f.get("releaseGroup", ""),
            "indexerFlags": f.get("indexerFlags", 0),
            "releaseType": f.get("releaseType", "singleEpisode"),
            "downloadId": f.get("downloadId", "")
        })
    return prepared_files

def get_all_queue_items():
    # set pageSize to 10000 as lazy way to not miss anything (download queue api is paginated)
    queue_resp = get(f"{SERVER_URL}/api/v3/queue", params={"apikey": API_KEY, "pageSize": 10000})
    queue_resp.raise_for_status()
    return queue_resp.json()

def get_manual_import_data(download_id):
    get_url = f"{SERVER_URL}/api/v3/manualimport"
    get_params = {
        "downloadId": download_id,
        "filterExistingFiles": "false"
    }
    get_headers = {
        "Accept": "*/*",
        "Accept-Language": "en-US,en;q=0.7",
        "Connection": "keep-alive",
        "Referer": f"http://{SERVER_URL}/activity/queue",
        "Sec-GPC": "1",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
                      "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/142.0.0.0 Safari/537.36",
        "X-Api-Key": API_KEY,
        "X-Requested-With": "XMLHttpRequest"
    }

    resp = get(get_url, headers=get_headers, params=get_params, verify=False)
    resp.raise_for_status()
    return resp.json()

def send_manual_import_command_sonarr(prepared_files):
    post_url = f"{SERVER_URL}/api/v3/command"
    post_headers = {
        "Accept": "application/json, text/javascript, */*; q=0.01",
        "Accept-Language": "en-US,en;q=0.7",
        "Connection": "keep-alive",
        "Content-Type": "application/json",
        "Referer": f"http://{SERVER_URL}/activity/queue",
        "Origin": f"http://{SERVER_URL}",
        "Sec-GPC": "1",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
                      "AppleWebKit/537.36 (KHTML, like Gecko) Chrome/142.0.0.0 Safari/537.36",
        "X-Api-Key": API_KEY,
        "X-Requested-With": "XMLHttpRequest"
    }

    payload = {
        "name": "ManualImport",
        "files": prepared_files,
        "importMode": "auto"
    }

    return post(post_url, headers=post_headers, json=payload, verify=False)

def validate_servarr_connection():
    try:
        response = get(SERVER_URL, headers={"X-Api-Key": API_KEY}, timeout=5)
        response.raise_for_status()
        return response.status_code == 200

    except HTTPError as e:
        print(f"HTTP error from {SERVER_URL}: {e.response.status_code} - {e.response.reason}")
    except ConnectionError:
        print(f"Failed to connect to the server {SERVER_URL}")
    except Timeout:
        print(f"Request to server timed out {SERVER_URL}")
    return False

def scan_radarr_server():
    return

def scan_sonarr_server():
    # lazy try-catch for everything so the tool doesn't crash and stop doing its job
    try:
        print(f"Fetching all queue items for {SERVER_URL}...")
        queue = get_all_queue_items()
        print(f"Found {len(queue.get('records', []))} queue items")

        records = queue.get("records", [])
        incomplete_downloads_count = 0
        manually_imported_count = 0
        for item in records:
            if item.get("status") != "completed":
                # print(f"Skipping non-completed item: {item.get('title')}")
                incomplete_downloads_count += 1
                continue

            # Get available files for manual import from Sonarr
            download_id = item.get("downloadId")
            available_files = get_manual_import_data(download_id)

            if not available_files:
                print(f"No files available for manual import")
                continue

            print(f"Found {len(available_files)} file(s) available for import:")
            for f in available_files:
                print(f"  - {f['path']}")
                print(f"    Episodes: {len(f.get('episodes', []))} episodes, IDs: {f.get('episodeIds', [])}")
            print()

            prepared_files = set_manual_import_file_data(available_files)

            try:
                post_resp = send_manual_import_command_sonarr(prepared_files)
                print(f"POST status for downloadID {download_id}", post_resp.status_code)
                if post_resp.status_code == 201:
                    manually_imported_count += 1

            except exceptions.RequestException as e:
                print(f"✗ Network error processing {download_id}: {e}")
            except Exception as e:
                print(f"✗ Unexpected error processing {download_id}: {e}")
        print(f"Skipped {incomplete_downloads_count} items still downloading")
        print(f"\"Manually\" imported {manually_imported_count} items")
    except Exception as e:
        print(e)

class HelpfulArgParser(ArgumentParser):
    def error(self, message):
        print(f"error: {message}\n")
        self.print_help()
        raise SystemExit(2)

def main():
    # these globals should only be written to within here, so this is clean
    global SERVER_URL
    global API_KEY

    parser = HelpfulArgParser(
        description="A tool for automatically manually importing radarr/sonarr items stuck in the queue. currently only working with sonarr"
    )
    # Required flags with a single dash
    parser.add_argument(
        "-url",
        required=True,
        help="Server URL e.g. http://192.168.1.20:8989"
    )

    parser.add_argument(
        "-server_type",
        required=True,
        help="\"sonarr\" or \"radarr\""
    )

    parser.add_argument(
        "-api_key",
        required=True,
        help="API key for the server"
    )

    parser.add_argument(
        "-refresh",
        required=False,
        default=10,
        type=int,
        help="Set refresh interval in seconds (default: 10)"
    )

    args = parser.parse_args()

    SERVER_URL = args.url
    API_KEY = args.api_key
    if not validate_servarr_connection():
        print("Failed to connect to the Radarr or Sonarr server. exiting.")
        exit(1)
    while True:
        if args.server_type == "sonarr":
            scan_sonarr_server()
        elif args.server_type == "radarr":
            scan_radarr_server()
        sleep(args.refresh)




if __name__ == "__main__":
    main()