import json
import os
import sys
import time
from b2sdk.v1 import InMemoryAccountInfo, B2Api

# Configuration
MAX_RETRIES = 3  # Maximum retry attempts
TIMEOUT = 30  # Timeout in seconds for each request
CUSTOM_CDN_URL = "https://images.livetimelapse.com.au"  # Custom CDN base URL
WAIT_TIME = 2  # Time to wait in seconds between bucket processing

# Helper function to retry operations
def retry_operation(func, max_retries, *args, **kwargs):
    for attempt in range(1, max_retries + 1):
        try:
            return func(*args, **kwargs)
        except Exception as e:
            print(f"Attempt {attempt} failed: {e}")
            if attempt < max_retries:
                time.sleep(2 ** attempt)  # Exponential backoff
            else:
                print(f"Operation failed after {max_retries} attempts.")
                raise

# Load account details from a JSON file
def load_accounts(filename="account-data/accounts.json"):
    with open(filename, "r") as file:
        return json.load(file)

# Process files in a bucket
def process_bucket(bucket, b2_api):
    print(f"Fetching files for bucket: {bucket.name}")

    # Prepare file output
    output_file = f"{bucket.name}-files.json"
    if os.path.exists(output_file):
        print(f"File list for bucket {bucket.name} already exists, skipping.")
        return

    file_list = []
    try:
        for file_info, _ in bucket.ls(recursive=True):
            # Replace CDN URL without /file/
            cdn_url = f"{CUSTOM_CDN_URL}/{bucket.name}/{file_info.file_name}"

            # Add additional metadata for each file
            file_entry = {
                "bucket_name": bucket.name,
                "file_name": file_info.file_name,
                "file_id": file_info.id_,
                "size": file_info.size,
                "upload_timestamp": file_info.upload_timestamp,
                "content_type": file_info.content_type,
                "download_url": b2_api.get_download_url_for_fileid(file_info.id_),
                "cdn_url": cdn_url,
            }
            file_list.append(file_entry)

    except Exception as e:
        print(f"Error fetching files for bucket {bucket.name}: {e}")

    # Save to JSON file in the current directory
    with open(output_file, "w") as json_file:
        json.dump(file_list, json_file, indent=4)
    print(f"File list saved for bucket {bucket.name}: {output_file}")

# Process all buckets on all accounts sequentially
def process_all_accounts(accounts):
    print("Starting to process all accounts and their buckets...")

    # Iterate through all accounts
    for account in accounts:
        account_id = account["account_id"]
        application_key = account["application_key"]

        print(f"Processing buckets for account ID: {account_id}")

        # Authenticate with B2
        account_info = InMemoryAccountInfo()
        b2_api = B2Api(account_info)
        retry_operation(b2_api.authorize_account, MAX_RETRIES, "production", account_id, application_key)

        # List all buckets in the account
        buckets = retry_operation(b2_api.list_buckets, MAX_RETRIES)

        # Process each bucket one by one
        for bucket in buckets:
            try:
                process_bucket(bucket, b2_api)
                print(f"Finished processing bucket: {bucket.name}")
                print(f"Waiting {WAIT_TIME} seconds before processing the next bucket...")
                time.sleep(WAIT_TIME)  # Wait before processing the next bucket
            except Exception as e:
                print(f"Error processing bucket {bucket.name}: {e}")

if __name__ == "__main__":
    # Ensure accounts.json exists
    if not os.path.exists("account-data/accounts.json"):
        print("accounts.json file not found. Please create the file and try again.")
        sys.exit(1)

    # Load account details
    accounts = load_accounts()

    # Process all buckets on all accounts
    process_all_accounts(accounts)
