in Utils/llm/api.py [0:0]
def request_google_ai_studio_data(system_prompt, messages, model):
config = API[model]()
headers = {
'Content-Type': 'application/json',
}
contents = [
{"role": message['role'], "parts": [{"text": message['content']}]}
for message in messages
]
payload = {
"contents": contents,
"system_instruction": {"role": "user", "parts": [{"text": system_prompt}]},
"generation_config": {
"maxOutputTokens": 8192,
"temperature": temperature,
"responseMimeType": "text/plain"
},
}
response = requests.post(config["url"], headers=headers, json=payload, timeout=300)
if not response.ok:
raise APIException(response.status_code, response.content)
data = response.json()
return {
'content': data["candidates"][0]["content"]["parts"][0]["text"],
'tokens': {
"input_tokens": data["usageMetadata"]["promptTokenCount"],
"output_tokens": data["usageMetadata"]["candidatesTokenCount"],
}
}