def batch_cancel_requests(request_ids, max_concurrent=5):
"""Cancel multiple requests with rate limiting"""
import concurrent.futures
import time
def cancel_single_request(request_id):
try:
response = requests.delete(
f"https://api.hitl.sh/v1/api/requests/{request_id}",
headers={"Authorization": "Bearer your_api_key_here"}
)
if response.status_code == 200:
data = response.json()["data"]
return {
"request_id": request_id,
"status": "cancelled"
}
else:
return {
"request_id": request_id,
"status": "error",
"error": response.json().get("msg", "Unknown error")
}
except Exception as e:
return {
"request_id": request_id,
"status": "exception",
"error": str(e)
}
results = []
# Process in batches to respect rate limits
with concurrent.futures.ThreadPoolExecutor(max_workers=max_concurrent) as executor:
# Submit all requests
future_to_id = {
executor.submit(cancel_single_request, req_id): req_id
for req_id in request_ids
}
for future in concurrent.futures.as_completed(future_to_id):
result = future.result()
results.append(result)
# Small delay to avoid hitting rate limits
time.sleep(0.1)
# Summarize results
summary = {
"total_requests": len(request_ids),
"successfully_cancelled": sum(1 for r in results if r["status"] == "cancelled"),
"errors": [r for r in results if r["status"] in ["error", "exception"]],
"refunds_granted": sum(1 for r in results if r.get("refunded", False)),
"details": results
}
return summary
# Usage
request_ids = ["65f1234567890abcdef12348", "65f1234567890abcdef12349", "65f1234567890abcdef12350"]
results = batch_cancel_requests(request_ids)
print(f"Cancelled {results['successfully_cancelled']}/{results['total_requests']} requests")
print(f"Refunds granted: {results['refunds_granted']}")
for error in results['errors']:
print(f"Error cancelling {error['request_id']}: {error['error']}")