TypeScript SDK
Using the NovaKit API with TypeScript and JavaScript
TypeScript SDK
Use NovaKit from TypeScript or JavaScript with the openai package for chat completions, or native fetch for full API access.
Installation
npm install openai
# or
pnpm add openaiConfiguration
const API_KEY = process.env.NOVAKIT_API_KEY;
const BASE_URL = "https://www.novakit.ai/api/v1";
const headers = {
"Authorization": `Bearer ${API_KEY}`,
"Content-Type": "application/json",
};Chat Completions
import OpenAI from "openai";
const client = new OpenAI({
apiKey: process.env.NOVAKIT_API_KEY,
baseURL: "https://www.novakit.ai/api/v1",
});
const response = await client.chat.completions.create({
model: "openai/gpt-4o-mini",
messages: [
{ role: "system", content: "You are a helpful assistant." },
{ role: "user", content: "Hello!" },
],
});
console.log(response.choices[0].message.content);const response = await fetch(`${BASE_URL}/chat/completions`, {
method: "POST",
headers,
body: JSON.stringify({
messages: [
{ role: "system", content: "You are a helpful assistant." },
{ role: "user", content: "Hello!" },
],
model: "openai/gpt-4o-mini",
}),
});
const data = await response.json();
console.log(data.choices[0].message.content);Streaming
import OpenAI from "openai";
const client = new OpenAI({
apiKey: process.env.NOVAKIT_API_KEY,
baseURL: "https://www.novakit.ai/api/v1",
});
const stream = await client.chat.completions.create({
model: "openai/gpt-4o-mini",
messages: [{ role: "user", content: "Write a short poem" }],
stream: true,
});
for await (const chunk of stream) {
const content = chunk.choices[0]?.delta?.content || "";
process.stdout.write(content);
}AI Agents
Create and run autonomous agents with tools and memory.
Types
interface Agent {
id: string;
name: string;
description?: string;
system_prompt: string;
model: string;
tools: string[];
temperature: number;
max_iterations: number;
memory_enabled: boolean;
created_at: string;
}
interface AgentRunEvent {
type: "start" | "step" | "done" | "error";
run_id?: string;
session_id?: string;
step?: {
step_number: number;
type: "thinking" | "tool_call" | "tool_result" | "response";
content?: string;
tool?: string;
tool_input?: Record<string, unknown>;
tool_output?: unknown;
};
output?: string;
credits_used?: number;
error?: string;
}Create an Agent
async function createAgent(config: {
name: string;
system_prompt: string;
tools?: string[];
model?: string;
memory_enabled?: boolean;
}): Promise<Agent> {
const response = await fetch(`${BASE_URL}/agents`, {
method: "POST",
headers,
body: JSON.stringify({
name: config.name,
system_prompt: config.system_prompt,
tools: config.tools || ["web_search"],
model: config.model || "anthropic/claude-3.5-sonnet",
memory_enabled: config.memory_enabled || false,
}),
});
const data = await response.json();
return data.agent;
}
// Usage
const agent = await createAgent({
name: "Research Assistant",
system_prompt: "You help with research tasks. Search the web and cite sources.",
tools: ["web_search", "web_fetch", "create_document"],
memory_enabled: true,
});
console.log(`Created agent: ${agent.id}`);Run Agent with Streaming
async function runAgent(
agentId: string,
input: string,
sessionId?: string,
onEvent?: (event: AgentRunEvent) => void
): Promise<AgentRunEvent | null> {
const response = await fetch(`${BASE_URL}/agents/${agentId}/run`, {
method: "POST",
headers,
body: JSON.stringify({
input,
session_id: sessionId,
}),
});
const reader = response.body?.getReader();
const decoder = new TextDecoder();
let finalEvent: AgentRunEvent | null = null;
if (!reader) {
throw new Error("No response body");
}
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value);
const lines = chunk.split("\n");
for (const line of lines) {
if (!line.startsWith("data: ")) continue;
const event: AgentRunEvent = JSON.parse(line.slice(6));
if (onEvent) {
onEvent(event);
}
switch (event.type) {
case "start":
console.log(`Run started: ${event.run_id}`);
break;
case "step":
const step = event.step!;
if (step.type === "thinking") {
console.log(`[Thinking] ${step.content?.slice(0, 100)}...`);
} else if (step.type === "tool_call") {
console.log(`[Tool] Calling ${step.tool}...`);
} else if (step.type === "response") {
console.log(`\n[Response]\n${step.content}`);
}
break;
case "done":
console.log(`\nCompleted! Credits: ${event.credits_used}`);
finalEvent = event;
break;
case "error":
console.error(`Error: ${event.error}`);
break;
}
}
}
return finalEvent;
}
// Usage
const result = await runAgent(
agent.id,
"Research the latest developments in quantum computing"
);Multi-turn Conversation
import { randomUUID } from "crypto";
// Create a session for conversation continuity
const sessionId = randomUUID();
// First turn
await runAgent(agent.id, "Search for AI news from today", sessionId);
// Second turn - references previous context
await runAgent(agent.id, "Tell me more about the first result", sessionId);
// Third turn
await runAgent(agent.id, "Create a summary document", sessionId);Upload Knowledge Base
async function uploadKnowledge(agentId: string, file: File): Promise<void> {
const formData = new FormData();
formData.append("file", file);
const response = await fetch(`${BASE_URL}/agents/${agentId}/knowledge`, {
method: "POST",
headers: {
Authorization: `Bearer ${API_KEY}`,
},
body: formData,
});
const { file: fileInfo } = await response.json();
console.log(`Uploaded: ${fileInfo.id}, Status: ${fileInfo.status}`);
// Poll for processing completion
while (true) {
const statusResponse = await fetch(
`${BASE_URL}/agents/${agentId}/knowledge/${fileInfo.id}`,
{ headers }
);
const { file: status } = await statusResponse.json();
if (status.status === "completed") {
console.log("Knowledge file ready!");
break;
} else if (status.status === "failed") {
throw new Error(`Failed: ${status.error_message}`);
}
await new Promise((r) => setTimeout(r, 2000));
}
}Image Generation
interface ImageResponse {
created: number;
data: Array<{ url: string; width: number; height: number }>;
usage: { credits_used: number };
}
const response = await fetch(`${BASE_URL}/images/generations`, {
method: "POST",
headers,
body: JSON.stringify({
prompt: "A beautiful sunset over the ocean",
model: "fal-ai/flux/dev",
size: "1024x1024",
}),
});
const data: ImageResponse = await response.json();
console.log(`Image URL: ${data.data[0].url}`);Video Generation (Async)
interface JobResponse {
id: string;
status: "pending" | "processing" | "completed" | "failed";
progress?: number;
outputData?: {
video: { url: string };
};
errorMessage?: string;
}
// Start async job
const createResponse = await fetch(
`${BASE_URL}/videos/generations?async=true`,
{
method: "POST",
headers,
body: JSON.stringify({
prompt: "A timelapse of clouds moving over mountains",
duration_seconds: 5,
}),
}
);
const { id: jobId } = await createResponse.json();
console.log(`Job started: ${jobId}`);
// Poll for completion
const pollJob = async (jobId: string): Promise<JobResponse> => {
while (true) {
const response = await fetch(`${BASE_URL}/jobs/${jobId}?poll=true`, {
headers,
});
const status: JobResponse = await response.json();
console.log(`Status: ${status.status}, Progress: ${status.progress || 0}%`);
if (status.status === "completed") {
return status;
} else if (status.status === "failed") {
throw new Error(status.errorMessage);
}
await new Promise((resolve) => setTimeout(resolve, 5000));
}
};
const result = await pollJob(jobId);
console.log(`Video URL: ${result.outputData?.video.url}`);Type Definitions
// Request types
interface ChatMessage {
role: "system" | "user" | "assistant";
content: string;
images?: string[];
}
interface ChatRequest {
messages: ChatMessage[];
model?: string;
temperature?: number;
max_tokens?: number;
stream?: boolean;
}
interface ImageRequest {
prompt: string;
model?: string;
size?: "512x512" | "1024x1024" | "1792x1024" | "1024x1792";
n?: number;
style?: "vivid" | "natural";
upscale?: "2x" | "4x";
negative_prompt?: string;
seed?: number;
}
interface AgentRequest {
name: string;
system_prompt: string;
tools?: string[];
model?: string;
temperature?: number;
max_iterations?: number;
memory_enabled?: boolean;
}
// Response types
interface ChatResponse {
id: string;
choices: Array<{
index: number;
message: { role: string; content: string };
finish_reason: string;
}>;
usage: {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
credits_used: number;
};
}Helper Class
class NovaKitClient {
private apiKey: string;
private baseUrl: string;
constructor(apiKey?: string) {
this.apiKey = apiKey || process.env.NOVAKIT_API_KEY || "";
this.baseUrl = "https://www.novakit.ai/api/v1";
}
private get headers() {
return {
Authorization: `Bearer ${this.apiKey}`,
"Content-Type": "application/json",
};
}
async chat(messages: ChatMessage[], options: Partial<ChatRequest> = {}) {
const response = await fetch(`${this.baseUrl}/chat/completions`, {
method: "POST",
headers: this.headers,
body: JSON.stringify({ messages, ...options }),
});
if (!response.ok) {
throw new Error(`API error: ${response.status}`);
}
return response.json() as Promise<ChatResponse>;
}
async generateImage(prompt: string, options: Partial<ImageRequest> = {}) {
const response = await fetch(`${this.baseUrl}/images/generations`, {
method: "POST",
headers: this.headers,
body: JSON.stringify({ prompt, ...options }),
});
if (!response.ok) {
throw new Error(`API error: ${response.status}`);
}
return response.json();
}
async createAgent(config: AgentRequest): Promise<Agent> {
const response = await fetch(`${this.baseUrl}/agents`, {
method: "POST",
headers: this.headers,
body: JSON.stringify(config),
});
if (!response.ok) {
throw new Error(`API error: ${response.status}`);
}
const data = await response.json();
return data.agent;
}
async *runAgentStream(
agentId: string,
input: string,
sessionId?: string
): AsyncGenerator<AgentRunEvent> {
const response = await fetch(`${this.baseUrl}/agents/${agentId}/run`, {
method: "POST",
headers: this.headers,
body: JSON.stringify({ input, session_id: sessionId }),
});
const reader = response.body?.getReader();
const decoder = new TextDecoder();
if (!reader) throw new Error("No response body");
while (true) {
const { done, value } = await reader.read();
if (done) break;
const chunk = decoder.decode(value);
for (const line of chunk.split("\n")) {
if (line.startsWith("data: ")) {
yield JSON.parse(line.slice(6));
}
}
}
}
}
// Usage
const client = new NovaKitClient();
// Chat
const response = await client.chat([{ role: "user", content: "Hello!" }]);
console.log(response.choices[0].message.content);
// Create and run agent
const agent = await client.createAgent({
name: "Helper",
system_prompt: "You are helpful.",
tools: ["web_search"],
});
for await (const event of client.runAgentStream(agent.id, "What's new in AI?")) {
console.log(event);
}For production use, consider adding retry logic, better error handling, and request queuing to stay within rate limits.