import requests
from concurrent.futures import ThreadPoolExecutor, as_completed
url = "https://api.zerogpu.ai/v1/responses"
headers = {
"content-type": "application/json",
"x-api-key": "YOUR_API_KEY",
"x-project-id": "YOUR_PROJECT_ID",
}
def one_request(content: str, model: str):
payload = {
"model": model,
"input": [{"role": "user", "content": content}],
"text": {"format": {"type": "text"}},
}
r = requests.post(url, headers=headers, json=payload)
r.raise_for_status()
return r.json()
texts = ["First text...", "Second text..."] # your inputs
model = "zlm-v1-summary-cloud" # or zlm-v1-iab-classify-cloud
results = []
with ThreadPoolExecutor(max_workers=5) as executor:
futures = {executor.submit(one_request, t, model): t for t in texts}
for future in as_completed(futures):
try:
results.append(future.result())
except requests.RequestException as e:
# log and skip or retry
print(f"Failed: {e}")