File size: 991 Bytes
f96eb1f
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
import os

from openai import OpenAI
from dotenv import load_dotenv

load_dotenv()

api_key = os.getenv("OPENAI_API_KEY","")
url = os.getenv("OPENAI_API_URL","https://api.openai.com/v1/")
model = os.getenv("OPENAI_API_MODEL","openai/gpt-oss-120b")
client = OpenAI(
    api_key=api_key,
    base_url=url
)
def run_openai(prompt: str, sys_prompt: str) -> str:
    try:
        response = client.chat.completions.create(
            model=model,
            max_tokens=5000,
            temperature=1,
            presence_penalty=0,
            top_p=0.95,
            messages=[
                {
                    "role": "system",
                    "content": sys_prompt
                },
                {
                    "role": "user",
                    "content": prompt
                }
            ]
        )
    except Exception as e:
        return f"LLM infer filed: {type(e).__name__}"
    return response.choices[0].message.content