File size: 863 Bytes
624088c
 
b022cb9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
"use server"

import { LLMEngine, LLMPredictionFunctionParams } from "@/types"
import { defaultLLMEngineName, getLLMEngineFunction } from "./getLLMEngineFunction"

export async function predict(params: LLMPredictionFunctionParams): Promise<string> {
  const { llmVendorConfig: { vendor } } = params
  // LLMVendor = what the user configure in the UI (eg. a dropdown item called default server)
  // LLMEngine = the actual engine to use (eg. hugging face)
  const llmEngineName: LLMEngine =
    vendor === "ANTHROPIC" ? "ANTHROPIC" :
    vendor === "GROQ" ? "GROQ" :
    vendor === "OPENAI" ? "OPENAI" :
    defaultLLMEngineName

  const llmEngineFunction = getLLMEngineFunction(llmEngineName)

  // console.log("predict: using " + llmEngineName)
  const results = await llmEngineFunction(params)

  // console.log("predict: result: " + results)
  return results
}