import com.openai.client.OpenAIClient;
import com.openai.client.okhttp.OpenAIOkHttpClient;
import com.openai.models.chat.completions.ChatCompletion;
import com.openai.models.chat.completions.ChatCompletionCreateParams;

import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;

import org.json.JSONArray;
import org.json.JSONObject;

public class uncloseai {
    public static void main(String[] args) {
        System.out.println("=== uncloseai. Java Client (Official OpenAI SDK) ===\n");

        // Discover endpoints from environment variables
        String modelEndpoint1 = System.getenv("MODEL_ENDPOINT_1");
        String modelEndpoint2 = System.getenv("MODEL_ENDPOINT_2");
        String ttsEndpoint1 = System.getenv("TTS_ENDPOINT_1");

        if (modelEndpoint1 == null || modelEndpoint2 == null || ttsEndpoint1 == null) {
            System.err.println("ERROR: No models discovered. Set environment variables:");
            System.err.println("  MODEL_ENDPOINT_1, MODEL_ENDPOINT_2, TTS_ENDPOINT_1");
            System.exit(1);
        }

        try {
            HttpClient httpClient = HttpClient.newHttpClient();

            // Discover models from endpoint 1
            System.out.println("Discovering models from " + modelEndpoint1 + "...");
            HttpRequest req1 = HttpRequest.newBuilder()
                .uri(URI.create(modelEndpoint1 + "/models"))
                .GET()
                .build();
            HttpResponse<String> res1 = httpClient.send(req1, HttpResponse.BodyHandlers.ofString());
            JSONObject json1 = new JSONObject(res1.body());
            JSONArray models1 = json1.getJSONArray("data");
            String model1Id = models1.getJSONObject(0).getString("id");
            System.out.println("Model 1: " + model1Id + "\n");

            // Discover models from endpoint 2
            System.out.println("Discovering models from " + modelEndpoint2 + "...");
            HttpRequest req2 = HttpRequest.newBuilder()
                .uri(URI.create(modelEndpoint2 + "/models"))
                .GET()
                .build();
            HttpResponse<String> res2 = httpClient.send(req2, HttpResponse.BodyHandlers.ofString());
            JSONObject json2 = new JSONObject(res2.body());
            JSONArray models2 = json2.getJSONArray("data");
            String model2Id = models2.getJSONObject(0).getString("id");
            System.out.println("Model 2: " + model2Id + "\n");

            // Non-streaming chat with Model 1
            System.out.println("=== Non-Streaming Chat (Model 1) ===");
            OpenAIClient client1 = OpenAIOkHttpClient.builder()
                .apiKey("dummy-key")
                .baseUrl(modelEndpoint1)
                .build();

            ChatCompletionCreateParams params1 = ChatCompletionCreateParams.builder()
                .model(model1Id)
                .addUserMessage("Give a Python Fizzbuzz solution in one line of code?")
                .temperature(0.5)
                .maxTokens(150L)
                .build();

            ChatCompletion response1 = client1.chat().completions().create(params1);
            System.out.println("Response: " + response1.choices().get(0).message().content().get() + "\n");

            // Streaming chat with Model 1
            System.out.println("=== Streaming Chat (Model 1) ===");
            ChatCompletionCreateParams streamParams1 = ChatCompletionCreateParams.builder()
                .model(model1Id)
                .addUserMessage("Explain quantum entanglement in one sentence.")
                .temperature(0.5)
                .maxTokens(150L)
                .build();

            System.out.print("Response: ");
            client1.chat().completions().createStreaming(streamParams1).stream().forEach(chunk -> {
                if (!chunk.choices().isEmpty() && chunk.choices().get(0).delta().content().isPresent()) {
                    System.out.print(chunk.choices().get(0).delta().content().get());
                }
            });
            System.out.println("\n");

            // Non-streaming chat with Model 2
            System.out.println("=== Non-Streaming Chat (Model 2) ===");
            OpenAIClient client2 = OpenAIOkHttpClient.builder()
                .apiKey("dummy-key")
                .baseUrl(modelEndpoint2)
                .build();

            ChatCompletionCreateParams params2 = ChatCompletionCreateParams.builder()
                .model(model2Id)
                .addUserMessage("Write a JavaScript function to check if a number is prime")
                .temperature(0.5)
                .maxTokens(150L)
                .build();

            ChatCompletion response2 = client2.chat().completions().create(params2);
            System.out.println("Response: " + response2.choices().get(0).message().content().get() + "\n");

            // Streaming chat with Model 2
            System.out.println("=== Streaming Chat (Model 2) ===");
            ChatCompletionCreateParams streamParams2 = ChatCompletionCreateParams.builder()
                .model(model2Id)
                .addUserMessage("Give a Python Fizzbuzz solution in one line of code?")
                .temperature(0.5)
                .maxTokens(150L)
                .build();

            System.out.print("Response: ");
            client2.chat().completions().createStreaming(streamParams2).stream().forEach(chunk -> {
                if (!chunk.choices().isEmpty() && chunk.choices().get(0).delta().content().isPresent()) {
                    System.out.print(chunk.choices().get(0).delta().content().get());
                }
            });
            System.out.println("\n");

            // TTS example
            System.out.println("=== TTS Speech Generation ===");
            System.out.println("[SKIPPED] TTS implementation needs audio API research\n");

            System.out.println("=== Examples Complete ===");

        } catch (IOException | InterruptedException e) {
            System.err.println("Error: " + e.getMessage());
            e.printStackTrace();
        }
    }
}
