BareGit
#include "llm_client.hpp"

#include <mw/http_client.hpp>
#include <mw/url.hpp>
#include <spdlog/spdlog.h>

OpenAiClient::OpenAiClient(std::string api_key, std::string model,
                           std::string endpoint,
                           std::unique_ptr<mw::HTTPSessionInterface> session)
    : api_key_(std::move(api_key)), model_(std::move(model)),
      endpoint_(std::move(endpoint)),
      session_(session ? std::move(session)
                       : std::make_unique<mw::HTTPSession>())
{
}

mw::E<AssistantMessage>
OpenAiClient::parseResponse(std::string_view response_body) const
{
    try
    {
        auto response_json = nlohmann::json::parse(response_body);
        if(!response_json.contains("choices") || response_json["choices"].empty())
        {
            return std::unexpected(mw::runtimeError(
                std::string("Invalid response format: missing choices. Body: ") + std::string(response_body)));
        }
        auto& choice = response_json["choices"][0]["message"];

        AssistantMessage assistant_msg;
        if(choice.contains("content") && !choice["content"].is_null())
        {
            assistant_msg.content = static_cast<std::string>(choice["content"]);
        }

        if(choice.contains("tool_calls"))
        {
            for(const auto& tc : choice["tool_calls"])
            {
                ToolCall call;
                call.id = static_cast<std::string>(tc["id"]);
                call.name = static_cast<std::string>(tc["function"]["name"]);
                call.arguments = nlohmann::json::parse(
                    static_cast<std::string>(tc["function"]["arguments"]));
                assistant_msg.tool_calls.push_back(call);
            }
        }

        return assistant_msg;
    }
    catch(const std::exception& e)
    {
        return std::unexpected(
            mw::runtimeError(std::string("JSON parse error: ") + e.what()));
    }
}

Task<mw::E<Message>>
OpenAiClient::generateResponse(const std::vector<Message>& history,
                               const nlohmann::json& available_tools_schema)
{
    nlohmann::json request_body;
    request_body["model"] = model_;

    nlohmann::json messages = nlohmann::json::array();
    for(const auto& msg : history)
    {
        messages.push_back(toJson(msg));
    }
    request_body["messages"] = messages;

    if(!available_tools_schema.empty() && available_tools_schema.is_array())
    {
        request_body["tools"] = available_tools_schema;
    }

    auto url_res = mw::URL::fromStr(endpoint_);
    if(!url_res.has_value())
    {
        co_return std::unexpected(mw::runtimeError("Invalid endpoint URL"));
    }
    mw::URL request_url = std::move(url_res.value());
    request_url.appendPath("chat/completions");

    mw::HTTPRequest request(request_url.str());
    request.addHeader("Content-Type", "application/json");
    request.addHeader("Authorization", "Bearer " + api_key_);
    request.setPayload(request_body.dump());

    auto response = session_->post(request);
    if(!response.has_value())
    {
        spdlog::error("OpenAI request failed: {}",
                      mw::errorMsg(response.error()));
        co_return std::unexpected(response.error());
    }

    auto parsed = parseResponse(response.value()->payloadAsStr());
    if(!parsed.has_value())
    {
        co_return std::unexpected(parsed.error());
    }
    co_return parsed.value();
}