import json
import requests
import time
import sys
from concurrent.futures import ThreadPoolExecutor, as_completed

# ==============================
# CONFIG
# ==============================
INPUT_FILE = sys.argv[1] if len(sys.argv) > 1 else "grouped_result.json"
URL = "https://mygreenqube.com:4003/landRoute/editLandStatusByAI"

HEADERS = {
    "Authorization": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJpZCI6IjVkZTQwZThkLWM4MDItNDdlZC1iN2Q0LWRkNGQ1MDNhMGJlOCIsImlhdCI6MTU4MzMyNTUxNCwiZXhwIjoxNTgzOTMwMzE0fQ.YZP4e5vC6U7-DqfW3MXOczQ-hPngfbukXRVpUNkdru8",
    "information": "paddy_carbon_pr_53158,1,190d19ce-e4f2-4f17-9445-83e46e0a4625",
    "Content-Type": "application/json"
}

BATCH_SIZE = 50
MAX_WORKERS = 5
MAX_RETRIES = 3


# ==============================
# LOAD DATA
# ==============================
with open(INPUT_FILE, "r", encoding="utf-8") as f:
    data = json.load(f)


# ==============================
# TRANSFORM FUNCTION
# ==============================
def transform_record(rel_id, item):

    status = item.get("status")
    missing = item.get("missing", [])
    score = item.get("co_farmer_match_score", 0)

    # Status mapping
    ai_status = "APPROVED" if status == "ACCEPTED" else "REJECTED"

    # Comment
    if not missing:
        comment = "All matched"
    else:
        comment = ", ".join(missing) + " missing"

    # Percentage
    percentage = int(score * 100)

    return {
        "rel_id": rel_id,
        "ai_status": ai_status,
        "ai_status_comment": comment,
        "status_pecentage": percentage
    }


# ==============================
# CREATE BATCHES
# ==============================
def create_batches(data_list, batch_size):
    for i in range(0, len(data_list), batch_size):
        yield data_list[i:i + batch_size]


# ==============================
# API CALL WITH RETRY
# ==============================
def send_batch(batch):

    payload = {"lands": batch}

    for attempt in range(MAX_RETRIES):
        try:
            response = requests.put(URL, headers=HEADERS, json=payload, timeout=30)

            if response.status_code == 200:
                print(f"✅ Batch success ({len(batch)} records)")
                return True, None
            else:
                print(f"⚠️ Retry {attempt+1}: {response.text}")

        except Exception as e:
            print(f"⚠️ Retry {attempt+1} Error:", str(e))

        time.sleep(2)

    return False, batch


# ==============================
# MAIN EXECUTION
# ==============================
def main():

    # Transform all records
    all_records = [
        transform_record(rel_id, item)
        for rel_id, item in data.items()
    ]

    batches = list(create_batches(all_records, BATCH_SIZE))

    failed_batches = []

    print(f"🚀 Total Records: {len(all_records)}")
    print(f"📦 Total Batches: {len(batches)}")

    # Parallel execution
    with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
        futures = [executor.submit(send_batch, batch) for batch in batches]

        for future in as_completed(futures):
            success, failed = future.result()

            if not success:
                failed_batches.append(failed)

    # Save failed batches
    if failed_batches:
        with open("failed_batches.json", "w", encoding="utf-8") as f:
            json.dump(failed_batches, f, indent=2)

        print(f"\n❌ Failed batches saved: failed_batches.json")
    else:
        print("\n🎉 All batches processed successfully!")


# ==============================
# RUN
# ==============================
if __name__ == "__main__":
    main()