import PremAI from 'premai';const client = new PremAI({ apiKey: process.env['PREMAI_API_KEY'], // This is the default and can be omitted});const response = await client.models.list();console.log(response.data);
This is useful to check if a model is running or not. If it is not running, you can load it up using the load method.
The model name can be replaced with the name of your fine-tuned models as well.
Copy
Ask AI
import PremAI from 'premai';const client = new PremAI({ apiKey: process.env['PREMAI_API_KEY'] // This is the default and can be omitted});const response = await client.models.check_status(model="llama3.2-3b");console.log(response);
The model name can be replaced with the name of your fine-tuned models as well.
Copy
Ask AI
import PremAI from 'premai';const client = new PremAI({ apiKey: process.env['PREMAI_API_KEY'] // This is the default and can be omitted});const response = await client.models.load(model="llama3.2-3b"); // Or any other model you want to load upconst modelStatus = await client.models.check_status(model="llama3.2-3b");console.log(modelStatus);
The model name can be replaced with the name of your fine-tuned models as well.
Copy
Ask AI
import PremAI from 'premai';const client = new PremAI({ apiKey: process.env['PREMAI_API_KEY'] // This is the default and can be omitted});const response = await client.models.unload(model="llama3.2-3b"); // Or any other model you want to load upconsole.log(response);
import PremAI from 'premai';const client = new PremAI({ apiKey: process.env['PREMAI_API_KEY'], // This is the default and can be omitted});const response = await client.chat.completions({ messages: [{ role: 'user', content: 'Write a one-sentence bedtime story about a unicorn.' }], model: 'llama3.2-3b'});console.log(response.choices[0].message.content);
import OpenAI from 'openai';const client = new OpenAI({ baseURL: "https://studio.premai.io/api/v1/", apiKey: process.env['PREMAI_API_KEY'], // This is the default and can be omitted});const response = await client.models.list();console.log(response.data);
The model name can be replaced with the name of your fine-tuned models as well.
Copy
Ask AI
import OpenAI from "openai";const client = new OpenAI({ baseURL: "https://studio.premai.io/api/v1/", apiKey: process.env.PREMAI_API_KEY,});//Create a chat completionconst response = await client.chat.completions.create({ model: "llama3.2-3b", //Or any other model you want to use messages: [{ role: "user", content: "Write a one-sentence bedtime story about a unicorn." }]});console.log(response.choices[0].message.content);
import OpenAI from "openai";const client = new OpenAI({ baseURL: "https://studio.premai.io/api/v1/", apiKey: process.env.PREMAI_API_KEY,});//Create a chat completionconst response = await client.chat.completions.create({ model: "llama3.2-3b", //Or any other model you want to use messages: [{ role: "user", content: "Write a one-sentence bedtime story about a unicorn." }], stream: true,});for await (const chunk of response) { process.stdout.write(chunk.choices[0].delta.content);}