import { prisma } from "./prisma.js";
import { createId } from "./id.js";
import { decryptSecret } from "./crypto.js";

const defaultAiRunTypes: Record<
  string,
  {
    name: string;
    description: string;
    billableDefault: boolean;
  }
> = {
  classification: {
    name: "Classification",
    description: "Classify content or route work using AI",
    billableDefault: true,
  },
  extraction: {
    name: "Extraction",
    description: "Extract structured information from documents or messages",
    billableDefault: true,
  },
  fact_consolidation: {
    name: "Fact Consolidation",
    description: "Consolidate extracted facts into a cleaner client knowledge set",
    billableDefault: true,
  },
  gap_analysis: {
    name: "Gap Analysis",
    description: "Analyze missing evidence, questions, or filing gaps",
    billableDefault: true,
  },
  form_fill: {
    name: "Form Fill",
    description: "Generate or fill legal forms from the available client evidence",
    billableDefault: true,
  },
  packet_assembly: {
    name: "Packet Assembly",
    description: "Assemble final filing packets or generated output bundles",
    billableDefault: true,
  },
  review_comment_reply: {
    name: "Review Comment Reply",
    description: "Draft an anchored reply for a document review comment",
    billableDefault: true,
  },
  custom_agent_prompt_design: {
    name: "Custom Agent Prompt Design",
    description: "Generate a suggested prompt for a workspace custom agent",
    billableDefault: true,
  },
  workflow_blueprint_analysis: {
    name: "Workflow Blueprint Analysis",
    description: "Analyze workflow templates and generate document checklists plus unique intake questions",
    billableDefault: true,
  },
  form_field_configuration_suggestion: {
    name: "Form Field Configuration Suggestion",
    description: "Suggest source hints or operational instructions for a form field configuration",
    billableDefault: true,
  },
};

function humanizeAiRunTypeCode(runType: string) {
  return String(runType ?? "")
    .trim()
    .split(/[_\s-]+/)
    .filter(Boolean)
    .map((part) => part.charAt(0).toUpperCase() + part.slice(1))
    .join(" ");
}

async function ensureAiRunTypeExists(runType: string) {
  const normalizedRunType = String(runType ?? "").trim();

  if (!normalizedRunType) {
    return;
  }

  const knownType = defaultAiRunTypes[normalizedRunType];
  const name = knownType?.name || humanizeAiRunTypeCode(normalizedRunType) || "AI Run";
  const description =
    knownType?.description || `Workspace AI run type for ${normalizedRunType.replace(/_/g, " ")}.`;
  const billableDefault = knownType?.billableDefault ?? true;

  await prisma.$executeRaw`
    INSERT IGNORE INTO ai_run_types (code, name, description, billable_default)
    VALUES (
      ${normalizedRunType},
      ${name},
      ${description},
      ${billableDefault ? 1 : 0}
    )
  `;
}

async function getActiveAiCredentialRecord(lawFirmId: string) {
  const settings = await prisma.lawFirmAiSetting.findUnique({
    where: {
      law_firm_id: lawFirmId,
    },
  });

  if (!settings?.active_credential_id) {
    throw new Error("No active AI credential configured for this law firm");
  }

  const credential = await prisma.lawFirmAiCredential.findUnique({
    where: {
      id: settings.active_credential_id,
    },
  });

  if (!credential || !credential.is_active) {
    throw new Error("Active AI credential is missing or inactive");
  }

  return {
    settings,
    credential,
  };
}

export async function getActiveAiContext(lawFirmId: string) {
  const { settings, credential } = await getActiveAiCredentialRecord(lawFirmId);

  return {
    provider: settings.default_provider,
    model: settings.default_model,
    credentialId: credential.id,
  };
}

export async function getActiveAiRuntime(lawFirmId: string) {
  const { settings, credential } = await getActiveAiCredentialRecord(lawFirmId);

  return {
    provider: settings.default_provider,
    model: settings.default_model,
    credentialId: credential.id,
    apiKey: decryptSecret(Buffer.from(credential.encrypted_api_key)),
  };
}

export async function runJsonChatCompletion(input: {
  lawFirmId: string;
  systemPrompt: string;
  userPrompt: string;
  maxCompletionTokens?: number;
}) {
  const runtime = await getActiveAiRuntime(input.lawFirmId);

  if (runtime.provider !== "openai") {
    throw new Error(`Unsupported AI provider: ${runtime.provider}`);
  }

  const response = await fetch("https://api.openai.com/v1/chat/completions", {
    method: "POST",
    headers: {
      Authorization: `Bearer ${runtime.apiKey}`,
      "Content-Type": "application/json",
    },
    body: JSON.stringify({
      model: runtime.model,
      response_format: {
        type: "json_object",
      },
      temperature: 0,
      max_completion_tokens: input.maxCompletionTokens ?? 2000,
      messages: [
        {
          role: "system",
          content: input.systemPrompt,
        },
        {
          role: "user",
          content: input.userPrompt,
        },
      ],
    }),
  });

  const payload = await response.json().catch(() => null);

  if (!response.ok || !payload || typeof payload !== "object") {
    const message =
      payload &&
      typeof payload === "object" &&
      payload !== null &&
      "error" in payload &&
      payload.error &&
      typeof payload.error === "object" &&
      "message" in payload.error
        ? String(payload.error.message ?? "OpenAI request failed")
        : "OpenAI request failed";

    throw new Error(message);
  }

  const choices = (payload as { choices?: Array<{ message?: { content?: unknown } }> }).choices;
  const content =
    Array.isArray(choices) && choices[0]?.message?.content ? choices[0].message?.content : null;

  if (typeof content !== "string" || !content.trim()) {
    throw new Error("OpenAI returned an empty response");
  }

  return {
    json: JSON.parse(content) as Record<string, unknown>,
    usage: {
      inputTokens: Number(
        (payload as { usage?: { prompt_tokens?: number } }).usage?.prompt_tokens ?? 0,
      ),
      outputTokens: Number(
        (payload as { usage?: { completion_tokens?: number } }).usage?.completion_tokens ?? 0,
      ),
      totalTokens: Number(
        (payload as { usage?: { total_tokens?: number } }).usage?.total_tokens ?? 0,
      ),
    },
    model: String((payload as { model?: string }).model ?? runtime.model),
  };
}

export async function runTextChatCompletion(input: {
  lawFirmId: string;
  systemPrompt: string;
  messages: Array<{
    role: "user" | "assistant";
    content: string;
  }>;
  maxCompletionTokens?: number;
  temperature?: number;
}) {
  const runtime = await getActiveAiRuntime(input.lawFirmId);

  if (runtime.provider !== "openai") {
    throw new Error(`Unsupported AI provider: ${runtime.provider}`);
  }

  const response = await fetch("https://api.openai.com/v1/chat/completions", {
    method: "POST",
    headers: {
      Authorization: `Bearer ${runtime.apiKey}`,
      "Content-Type": "application/json",
    },
    body: JSON.stringify({
      model: runtime.model,
      temperature: input.temperature ?? 0.2,
      max_completion_tokens: input.maxCompletionTokens ?? 1200,
      messages: [
        {
          role: "system",
          content: input.systemPrompt,
        },
        ...input.messages.map((message) => ({
          role: message.role,
          content: message.content,
        })),
      ],
    }),
  });

  const payload = await response.json().catch(() => null);

  if (!response.ok || !payload || typeof payload !== "object") {
    const message =
      payload &&
      typeof payload === "object" &&
      payload !== null &&
      "error" in payload &&
      payload.error &&
      typeof payload.error === "object" &&
      "message" in payload.error
        ? String(payload.error.message ?? "OpenAI request failed")
        : "OpenAI request failed";

    throw new Error(message);
  }

  const choices = (payload as { choices?: Array<{ message?: { content?: unknown } }> }).choices;
  const content =
    Array.isArray(choices) && choices[0]?.message?.content ? choices[0].message?.content : null;

  if (typeof content !== "string" || !content.trim()) {
    throw new Error("OpenAI returned an empty response");
  }

  return {
    text: content.trim(),
    usage: {
      inputTokens: Number(
        (payload as { usage?: { prompt_tokens?: number } }).usage?.prompt_tokens ?? 0,
      ),
      outputTokens: Number(
        (payload as { usage?: { completion_tokens?: number } }).usage?.completion_tokens ?? 0,
      ),
      totalTokens: Number(
        (payload as { usage?: { total_tokens?: number } }).usage?.total_tokens ?? 0,
      ),
    },
    model: String((payload as { model?: string }).model ?? runtime.model),
  };
}

export async function createAiRun(input: {
  lawFirmId: string;
  caseId?: string | null;
  clientId?: string | null;
  runType: string;
  status?: string;
}) {
  const aiContext = await getActiveAiContext(input.lawFirmId);
  const aiRunId = createId();

  await ensureAiRunTypeExists(input.runType);

  const aiRun = await prisma.$executeRaw`
    INSERT INTO ai_runs (
      id, law_firm_id, case_id, client_id, ai_provider, ai_model,
      api_key_reference_id, ai_run_type_code, status, started_at, created_at
    ) VALUES (
      ${aiRunId},
      ${input.lawFirmId},
      ${input.caseId ?? null},
      ${input.clientId ?? null},
      ${aiContext.provider},
      ${aiContext.model},
      ${aiContext.credentialId},
      ${input.runType},
      ${input.status ?? "running"},
      NOW(),
      CURRENT_TIMESTAMP
    )
  `;

  void aiRun;

  const [row] = await prisma.$queryRaw<
    Array<{
      id: string;
      ai_provider: string;
      ai_model: string;
      api_key_reference_id: string;
    }>
  >`
    SELECT id, ai_provider, ai_model, api_key_reference_id
    FROM ai_runs
    WHERE id = ${aiRunId}
    LIMIT 1
  `;

  return row;
}

export async function finishAiRun(input: {
  aiRunId: string;
  status: string;
  inputTokens?: number;
  outputTokens?: number;
  estimatedCost?: number;
  errorMessage?: string | null;
}) {
  await prisma.$executeRaw`
    UPDATE ai_runs
    SET
      status = ${input.status},
      input_tokens = ${input.inputTokens ?? 0},
      output_tokens = ${input.outputTokens ?? 0},
      estimated_cost = ${input.estimatedCost ?? 0},
      error_message = ${input.errorMessage ?? null},
      completed_at = NOW()
    WHERE id = ${input.aiRunId}
  `;
}
