// PUBLIC DOMAIN - NO LICENSE, NO WARRANTY
// Copyright 2025 TimeHexOn & foxhop & russell@unturf
// https://www.permacomputer.com

using System;
using System.ClientModel;
using System.IO;
using System.Net.Http;
using System.Text.Json;
using System.Threading.Tasks;
using OpenAI;
using OpenAI.Chat;
using OpenAI.Audio;

class uncloseai
{
    static async Task Main(string[] args)
    {
        Console.WriteLine("=== uncloseai. C# Client (Official OpenAI SDK) ===\n");

        // Discover endpoints from environment variables
        var modelEndpoint1 = Environment.GetEnvironmentVariable("MODEL_ENDPOINT_1");
        var modelEndpoint2 = Environment.GetEnvironmentVariable("MODEL_ENDPOINT_2");
        var ttsEndpoint1 = Environment.GetEnvironmentVariable("TTS_ENDPOINT_1");

        if (string.IsNullOrEmpty(modelEndpoint1) || string.IsNullOrEmpty(modelEndpoint2) || string.IsNullOrEmpty(ttsEndpoint1))
        {
            Console.WriteLine("ERROR: No models discovered. Set environment variables:");
            Console.WriteLine("  MODEL_ENDPOINT_1, MODEL_ENDPOINT_2, TTS_ENDPOINT_1");
            Environment.Exit(1);
        }

        // Discover models from endpoint 1
        Console.WriteLine($"Discovering models from {modelEndpoint1}...");
        using var httpClient = new HttpClient();
        var response1 = await httpClient.GetStringAsync($"{modelEndpoint1}/models");
        var models1 = JsonDocument.Parse(response1);
        var model1Id = models1.RootElement.GetProperty("data")[0].GetProperty("id").GetString();
        Console.WriteLine($"Model 1: {model1Id}\n");

        // Discover models from endpoint 2
        Console.WriteLine($"Discovering models from {modelEndpoint2}...");
        var response2 = await httpClient.GetStringAsync($"{modelEndpoint2}/models");
        var models2 = JsonDocument.Parse(response2);
        var model2Id = models2.RootElement.GetProperty("data")[0].GetProperty("id").GetString();
        Console.WriteLine($"Model 2: {model2Id}\n");

        // Non-streaming chat with Model 1
        Console.WriteLine("=== Non-Streaming Chat (Model 1) ===");
        var client1 = new ChatClient(
            model: model1Id,
            credential: new ApiKeyCredential("dummy-key"),
            options: new OpenAIClientOptions
            {
                Endpoint = new Uri(modelEndpoint1)
            }
        );

        var response1Chat = await client1.CompleteChatAsync(
            new[]
            {
                new UserChatMessage("Give a Python Fizzbuzz solution in one line of code?")
            },
            new ChatCompletionOptions
            {
                Temperature = 0.5f,
                MaxOutputTokenCount = 150
            }
        );

        Console.WriteLine($"Response: {response1Chat.Value.Content[0].Text}\n");

        // Streaming chat with Model 1
        Console.WriteLine("=== Streaming Chat (Model 1) ===");
        Console.Write("Response: ");
        await foreach (var update in client1.CompleteChatStreamingAsync(
            new[]
            {
                new UserChatMessage("Explain quantum entanglement in one sentence.")
            },
            new ChatCompletionOptions
            {
                Temperature = 0.5f,
                MaxOutputTokenCount = 150
            }
        ))
        {
            foreach (var contentPart in update.ContentUpdate)
            {
                Console.Write(contentPart.Text);
            }
        }
        Console.WriteLine("\n");

        // Non-streaming chat with Model 2
        Console.WriteLine("=== Non-Streaming Chat (Model 2) ===");
        var client2 = new ChatClient(
            model: model2Id,
            credential: new ApiKeyCredential("dummy-key"),
            options: new OpenAIClientOptions
            {
                Endpoint = new Uri(modelEndpoint2)
            }
        );

        var response2Chat = await client2.CompleteChatAsync(
            new[]
            {
                new UserChatMessage("Write a JavaScript function to check if a number is prime")
            },
            new ChatCompletionOptions
            {
                Temperature = 0.5f,
                MaxOutputTokenCount = 150
            }
        );

        Console.WriteLine($"Response: {response2Chat.Value.Content[0].Text}\n");

        // Streaming chat with Model 2
        Console.WriteLine("=== Streaming Chat (Model 2) ===");
        Console.Write("Response: ");
        await foreach (var update in client2.CompleteChatStreamingAsync(
            new[]
            {
                new UserChatMessage("Give a Python Fizzbuzz solution in one line of code?")
            },
            new ChatCompletionOptions
            {
                Temperature = 0.5f,
                MaxOutputTokenCount = 150
            }
        ))
        {
            foreach (var contentPart in update.ContentUpdate)
            {
                Console.Write(contentPart.Text);
            }
        }
        Console.WriteLine("\n");

        // TTS example
        Console.WriteLine("=== TTS Speech Generation ===");
        var ttsClient = new AudioClient(
            model: "tts-1",
            credential: new ApiKeyCredential("dummy-key"),
            options: new OpenAIClientOptions
            {
                Endpoint = new Uri(ttsEndpoint1)
            }
        );

        var speech = await ttsClient.GenerateSpeechAsync(
            "I think so therefore, Today is a wonderful day to grow something people love!",
            GeneratedSpeechVoice.Alloy,
            new SpeechGenerationOptions
            {
                SpeedRatio = 0.9f
            }
        );

        await File.WriteAllBytesAsync("speech.mp3", speech.Value.ToArray());
        var fileInfo = new FileInfo("speech.mp3");
        Console.WriteLine($"[OK] Speech file created: speech.mp3 ({fileInfo.Length} bytes)\n");

        Console.WriteLine("=== Examples Complete ===");
    }
}
