import PremAI from 'premai';const client = new PremAI({ apiKey: process.env['PREMAI_API_KEY'], // This is the default and can be omitted});const response = await client.models.list();console.log(response.data);
import PremAI from 'premai';const client = new PremAI({ apiKey: process.env['PREMAI_API_KEY'], // This is the default and can be omitted});const response = await client.chat.completions({ messages: [{ role: 'user', content: 'Write a one-sentence bedtime story about a unicorn.' }], model: 'llama3.2-3b'});console.log(response.choices[0].message.content);
import OpenAI from 'openai';const client = new OpenAI({ baseURL: "https://studio.premai.io/api/v1/", apiKey: process.env['PREMAI_API_KEY'], // This is the default and can be omitted});const response = await client.models.list();console.log(response.data);
The model name can be replaced with the name of your fine-tuned models as well.
Copy
Ask AI
import OpenAI from "openai";const client = new OpenAI({ baseURL: "https://studio.premai.io/api/v1/", apiKey: process.env.PREMAI_API_KEY,});//Create a chat completionconst response = await client.chat.completions.create({ model: "llama3.2-3b", //Or any other model you want to use messages: [{ role: "user", content: "Write a one-sentence bedtime story about a unicorn." }]});console.log(response.choices[0].message.content);
import OpenAI from "openai";const client = new OpenAI({ baseURL: "https://studio.premai.io/api/v1/", apiKey: process.env.PREMAI_API_KEY,});//Create a chat completionconst response = await client.chat.completions.create({ model: "llama3.2-3b", //Or any other model you want to use messages: [{ role: "user", content: "Write a one-sentence bedtime story about a unicorn." }], stream: true,});for await (const chunk of response) { process.stdout.write(chunk.choices[0].delta.content);}