import requests
import json
import openai

# Configura?ii OpenAI
openai.api_base = "http://localhost:4891/v1"
openai.api_key = "not needed for a local LLM"
model_openai = "mistral-openorca"

# Configura?ii LLaMA
url_llama = "http://localhost:11434/api/generate"
headers_llama = {"Content-Type": "application/json"}

def get_response_from_openai(prompt):
    response = openai.Completion.create(
        model=model_openai,
        prompt=prompt,
        max_tokens=150,  # Redus numărul de tokeni
        temperature=0.38,
        top_p=0.95,
        n=1,
        echo=True,
        stream=False
    )
    return response.choices[0].text.strip()

def get_response_from_llama(prompt):
    payload = json.dumps({
        "model": "llama2",
        "prompt": prompt
    })
    response = requests.post(url_llama, data=payload, headers=headers_llama)
    if response.status_code == 200:
        full_response = ""
        for line in response.text.splitlines():
            try:
                line_data = json.loads(line)
                full_response += line_data.get("response", "")
            except json.JSONDecodeError as e:
                print(f"Eroare la decodarea liniei JSON: {e}")
        return full_response
    else:
        print("A apărut o eroare:", response.status_code)
        return None

def ask_question_interactive():
    question = input("Scrie o întrebare ini?ială: ")
    num_cycles = int(input("Câte cicluri să facă între modele? (introduce?i un număr): "))
    
    for _ in range(num_cycles):
        print("\nCiclu", _ + 1)
        # Ob?ine răspunsul de la OpenAI
        openai_response = get_response_from_openai(question)
        print("Răspuns de la OpenAI:", openai_response)
        
        # Trimite răspunsul de la OpenAI către LLaMA
        llama_response = get_response_from_llama(openai_response)
        if llama_response:
            print("Răspuns de la LLaMA:", llama_response)
            # Următorul prompt pentru OpenAI va fi răspunsul de la LLaMA
            question = llama_response

if __name__ == "__main__":
    ask_question_interactive()
