Installation
Install the Trainly SDK using your preferred package manager:Copy
npm install @trainly/react
Quick Start
Copy
import { TrainlyClient } from "@trainly/react";
const trainly = new TrainlyClient({
apiKey: "tk_your_api_key",
chatId: "chat_abc123",
});
const response = await trainly.query({
question: "What are the key findings?",
});
console.log(response.answer);
Initialization
Basic Configuration
Copy
import { TrainlyClient } from "@trainly/react";
const trainly = new TrainlyClient({
apiKey: process.env.TRAINLY_API_KEY,
chatId: process.env.TRAINLY_CHAT_ID,
});
Advanced Configuration
Copy
const trainly = new TrainlyClient({
apiKey: "tk_your_api_key",
chatId: "chat_abc123",
// API Configuration
baseUrl: "https://api.trainlyai.com",
timeout: 30000, // Request timeout in milliseconds
// Retry Configuration
maxRetries: 3,
retryDelay: 1000,
retryBackoff: 2, // Exponential backoff multiplier
// Default Query Options
defaultModel: "gpt-4o-mini",
defaultTemperature: 0.7,
defaultMaxTokens: 1000,
// Logging
debug: false,
logger: console,
});
Querying Documents
Basic Query
Copy
const response = await trainly.query({
question: "What is the main conclusion?",
});
console.log("Answer:", response.answer);
console.log("Model used:", response.model);
console.log("Tokens:", response.usage.total_tokens);
// Access citations
response.context.forEach((chunk, index) => {
console.log(`[${index}] ${chunk.chunk_text.substring(0, 100)}...`);
console.log(` Relevance: ${(chunk.score * 100).toFixed(1)}%`);
});
Advanced Query Options
Copy
const response = await trainly.query({
question: "Explain the methodology in detail",
// Model Selection
model: "gpt-4o", // or 'claude-3-opus', 'gpt-4-turbo', etc.
temperature: 0.3, // Lower = more focused, Higher = more creative
maxTokens: 2000, // Maximum response length
// Custom Instructions
customPrompt:
"You are a research analyst. Provide detailed technical explanations with citations.",
// Scope Filtering
scopeFilters: {
project_id: "proj_123",
version: "2.0",
category: "research",
},
});
TypeScript Types
Copy
import type { QueryOptions, QueryResponse, ChunkScore } from "@trainly/react";
const options: QueryOptions = {
question: "What are the results?",
model: "gpt-4o-mini",
temperature: 0.7,
};
const response: QueryResponse = await trainly.query(options);
const answer: string = response.answer;
const citations: ChunkScore[] = response.context;
const tokenCount: number = response.usage.total_tokens;
Streaming Responses
Basic Streaming
Copy
// Get real-time streaming response
const stream = await trainly.queryStream({
question: "Summarize all the key points",
});
for await (const chunk of stream) {
if (chunk.type === "content") {
process.stdout.write(chunk.data);
} else if (chunk.type === "context") {
console.log("Citations:", chunk.data.length);
} else if (chunk.type === "end") {
console.log("\n\nComplete!");
}
}
React Streaming Component
Copy
import { useState, useEffect } from "react";
import { TrainlyClient } from "@trainly/react";
function StreamingChat() {
const [answer, setAnswer] = useState("");
const [loading, setLoading] = useState(false);
const [question, setQuestion] = useState("");
const trainly = new TrainlyClient({
apiKey: process.env.NEXT_PUBLIC_TRAINLY_API_KEY!,
chatId: process.env.NEXT_PUBLIC_TRAINLY_CHAT_ID!,
});
async function handleSubmit(e: React.FormEvent) {
e.preventDefault();
setLoading(true);
setAnswer("");
try {
const stream = await trainly.queryStream({ question });
for await (const chunk of stream) {
if (chunk.type === "content") {
setAnswer((prev) => prev + chunk.data);
} else if (chunk.type === "end") {
setLoading(false);
} else if (chunk.type === "error") {
console.error("Stream error:", chunk.data);
setLoading(false);
}
}
} catch (error) {
console.error("Query failed:", error);
setLoading(false);
}
}
return (
<div>
<form onSubmit={handleSubmit}>
<input
value={question}
onChange={(e) => setQuestion(e.target.value)}
placeholder="Ask a question..."
disabled={loading}
/>
<button type="submit" disabled={loading}>
{loading ? "Streaming..." : "Ask"}
</button>
</form>
{answer && (
<div className="answer">
<h3>Answer:</h3>
<p>{answer}</p>
</div>
)}
</div>
);
}
Node.js Streaming Example
Copy
import { TrainlyClient } from "@trainly/react";
import { createInterface } from "readline";
const trainly = new TrainlyClient({
apiKey: process.env.TRAINLY_API_KEY!,
chatId: process.env.TRAINLY_CHAT_ID!,
});
const rl = createInterface({
input: process.stdin,
output: process.stdout,
});
rl.question("Ask a question: ", async (question) => {
console.log("\nAnswer:");
const stream = await trainly.queryStream({ question });
for await (const chunk of stream) {
if (chunk.type === "content") {
process.stdout.write(chunk.data);
} else if (chunk.type === "end") {
console.log("\n\nDone!");
rl.close();
}
}
});
File Management
Upload Files
Copy
import { readFileSync } from "fs";
// Upload from file path (Node.js)
const result = await trainly.uploadFile({
file: "./research_paper.pdf",
scopeValues: {
project_id: "proj_123",
category: "research",
version: 1.0,
},
});
console.log("Uploaded:", result.filename);
console.log("File ID:", result.file_id);
console.log("Size:", result.size_bytes);
// Upload from Buffer (Node.js)
const fileBuffer = readFileSync("./document.pdf");
const result2 = await trainly.uploadFile({
file: fileBuffer,
filename: "document.pdf",
scopeValues: { category: "docs" },
});
// Upload from Blob (Browser)
const fileInput = document.querySelector('input[type="file"]');
const file = fileInput.files[0];
const result3 = await trainly.uploadFile({
file: file,
scopeValues: { user_id: currentUser.id },
});
Upload Text Content
Copy
// Upload raw text without a file
const result = await trainly.uploadText({
content: "This is my document content. It contains important information...",
name: "My Notes.txt",
scopeValues: {
type: "notes",
date: "2024-01-15",
},
});
console.log("Text uploaded as:", result.filename);
Bulk Upload
Copy
import { glob } from "glob";
import { readFileSync } from "fs";
// Find all PDF files
const pdfFiles = await glob("./documents/**/*.pdf");
const results = await trainly.uploadBulk({
files: pdfFiles.map((path) => ({
file: path,
scopeValues: {
source: "research",
uploadDate: new Date().toISOString(),
},
})),
onProgress: (completed, total) => {
console.log(`Uploaded ${completed}/${total} files`);
},
});
console.log(`Success: ${results.successful_uploads}/${results.total_files}`);
// Check for failures
results.results.forEach((result) => {
if (!result.success) {
console.error(`Failed to upload ${result.filename}: ${result.error}`);
}
});
List Files
Copy
const files = await trainly.listFiles();
console.log(`Total files: ${files.total_files}`);
console.log(`Total size: ${formatBytes(files.total_size_bytes)}`);
files.files.forEach((file) => {
console.log(`
File: ${file.filename}
ID: ${file.file_id}
Size: ${formatBytes(file.size_bytes)}
Chunks: ${file.chunk_count}
Uploaded: ${new Date(file.upload_date).toLocaleDateString()}
`);
});
// Helper function
function formatBytes(bytes: number): string {
if (bytes === 0) return "0 Bytes";
const k = 1024;
const sizes = ["Bytes", "KB", "MB", "GB"];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return Math.round((bytes / Math.pow(k, i)) * 100) / 100 + " " + sizes[i];
}
Delete Files
Copy
// Delete a specific file
const result = await trainly.deleteFile("v1_user_xyz_doc_123");
console.log(`Deleted: ${result.filename}`);
console.log(`Freed: ${formatBytes(result.size_bytes_freed)}`);
console.log(`Chunks removed: ${result.chunks_deleted}`);
// Delete multiple files
const fileIds = ["file_1", "file_2", "file_3"];
for (const fileId of fileIds) {
try {
await trainly.deleteFile(fileId);
console.log(`✓ Deleted ${fileId}`);
} catch (error) {
console.error(`✗ Failed to delete ${fileId}:`, error.message);
}
// Add delay to avoid rate limiting
await new Promise((resolve) => setTimeout(resolve, 100));
}
V1 OAuth Authentication
Setup V1 Client
Copy
import { TrainlyV1Client } from "@trainly/react";
// User authenticates with your OAuth provider (Clerk, Auth0, etc.)
const userOAuthToken = await getOAuthToken(); // Your implementation
// Initialize V1 client
const trainly = new TrainlyV1Client({
userToken: userOAuthToken,
appId: "app_your_app_id",
});
// Now you can query user's private data
const response = await trainly.query({
messages: [{ role: "user", content: "What documents do I have?" }],
responseTokens: 200,
});
console.log(response.answer);
console.log("Privacy guarantee:", response.privacy_guarantee);
Upload with V1 Auth
Copy
// Upload file to user's private subchat
const uploadResult = await trainly.uploadFile({
file: "./user_document.pdf",
scopeValues: {
playlist_id: "playlist_123",
genre: "rock",
},
});
console.log("Uploaded to user subchat:", uploadResult.chat_id);
console.log("User maintains full privacy:", uploadResult.privacy_guarantee);
List User’s Files
Copy
// Get all files in user's private subchat
const files = await trainly.listFiles();
files.files.forEach((file) => {
console.log(`${file.filename} - ${formatBytes(file.size_bytes)}`);
});
Delete User’s File
Copy
// Delete from user's private subchat
const result = await trainly.deleteFile(fileId);
console.log(`Deleted: ${result.filename}`);
console.log(`Space freed: ${formatBytes(result.size_bytes_freed)}`);
Error Handling
Basic Error Handling
Copy
import {
TrainlyError,
RateLimitError,
AuthenticationError,
ValidationError,
} from "@trainly/react";
try {
const response = await trainly.query({
question: "What is the conclusion?",
});
console.log(response.answer);
} catch (error) {
if (error instanceof RateLimitError) {
console.error("Rate limit exceeded. Retry after:", error.retryAfter);
// Wait and retry
await new Promise((resolve) =>
setTimeout(resolve, error.retryAfter * 1000),
);
} else if (error instanceof AuthenticationError) {
console.error("Authentication failed:", error.message);
// Redirect to login or refresh token
} else if (error instanceof ValidationError) {
console.error("Invalid request:", error.message);
console.error("Details:", error.details);
} else if (error instanceof TrainlyError) {
console.error("Trainly API error:", error.message);
console.error("Status:", error.status);
} else {
console.error("Unexpected error:", error);
}
}
Automatic Retry Logic
Copy
const trainly = new TrainlyClient({
apiKey: "tk_your_api_key",
chatId: "chat_abc123",
// Automatic retry configuration
maxRetries: 3,
retryDelay: 1000, // Initial delay in ms
retryBackoff: 2, // Exponential backoff multiplier
// Custom retry logic
shouldRetry: (error, attempt) => {
// Retry on rate limits and server errors
if (error.status === 429 || error.status >= 500) {
console.log(`Retry attempt ${attempt}...`);
return true;
}
return false;
},
// Custom retry delay
getRetryDelay: (attempt, error) => {
if (error.status === 429 && error.retryAfter) {
return error.retryAfter * 1000;
}
return Math.min(1000 * Math.pow(2, attempt), 30000);
},
});
Error Recovery Patterns
Copy
async function queryWithRetry(
question: string,
maxAttempts: number = 3,
): Promise<QueryResponse> {
let lastError: Error;
for (let attempt = 1; attempt <= maxAttempts; attempt++) {
try {
return await trainly.query({ question });
} catch (error) {
lastError = error;
if (error instanceof RateLimitError) {
const delay = error.retryAfter * 1000 || 60000;
console.log(`Rate limited. Waiting ${delay}ms...`);
await new Promise((resolve) => setTimeout(resolve, delay));
continue;
}
if (attempt < maxAttempts && error.status >= 500) {
const delay = Math.pow(2, attempt) * 1000;
console.log(`Server error. Retrying in ${delay}ms...`);
await new Promise((resolve) => setTimeout(resolve, delay));
continue;
}
throw error;
}
}
throw lastError!;
}
// Usage
try {
const response = await queryWithRetry("What are the findings?");
console.log(response.answer);
} catch (error) {
console.error("All retry attempts failed:", error);
}
Advanced Features
Custom Headers
Copy
const trainly = new TrainlyClient({
apiKey: "tk_your_api_key",
chatId: "chat_abc123",
// Add custom headers to all requests
headers: {
"X-Custom-Header": "value",
"X-App-Version": "1.0.0",
},
});
// Or per request
const response = await trainly.query({
question: "What is the result?",
headers: {
"X-Request-ID": generateRequestId(),
},
});
Request Interceptors
Copy
const trainly = new TrainlyClient({
apiKey: "tk_your_api_key",
chatId: "chat_abc123",
// Intercept requests
onRequest: (config) => {
console.log("Request:", config.method, config.url);
return config;
},
// Intercept responses
onResponse: (response) => {
console.log("Response:", response.status, response.data);
return response;
},
// Intercept errors
onError: (error) => {
console.error("Error:", error.message);
// Track errors in your monitoring system
trackError(error);
throw error;
},
});
Rate Limiting
Copy
import { RateLimiter } from "@trainly/react";
// Create rate limiter (60 requests per minute)
const limiter = new RateLimiter({
maxRequests: 60,
windowMs: 60000,
});
async function rateLimitedQuery(question: string) {
// Wait if necessary
await limiter.waitForSlot();
// Make request
return await trainly.query({ question });
}
// Use it
const response = await rateLimitedQuery("What is the conclusion?");
Caching
Copy
import { LRUCache } from "lru-cache";
const queryCache = new LRUCache<string, QueryResponse>({
max: 100, // Cache up to 100 queries
ttl: 1000 * 60 * 5, // 5 minutes TTL
});
async function cachedQuery(question: string): Promise<QueryResponse> {
// Check cache
const cached = queryCache.get(question);
if (cached) {
console.log("Cache hit!");
return cached;
}
// Query API
const response = await trainly.query({ question });
// Store in cache
queryCache.set(question, response);
return response;
}
Framework Integration
Next.js App Router
Copy
// app/api/query/route.ts
import { TrainlyClient } from "@trainly/react";
import { NextRequest, NextResponse } from "next/server";
const trainly = new TrainlyClient({
apiKey: process.env.TRAINLY_API_KEY!,
chatId: process.env.TRAINLY_CHAT_ID!,
});
export async function POST(request: NextRequest) {
try {
const { question } = await request.json();
const response = await trainly.query({ question });
return NextResponse.json(response);
} catch (error) {
return NextResponse.json(
{ error: error.message },
{ status: error.status || 500 },
);
}
}
Next.js Pages Router
Copy
// pages/api/query.ts
import type { NextApiRequest, NextApiResponse } from "next";
import { TrainlyClient } from "@trainly/react";
const trainly = new TrainlyClient({
apiKey: process.env.TRAINLY_API_KEY!,
chatId: process.env.TRAINLY_CHAT_ID!,
});
export default async function handler(
req: NextApiRequest,
res: NextApiResponse,
) {
if (req.method !== "POST") {
return res.status(405).json({ error: "Method not allowed" });
}
try {
const { question } = req.body;
const response = await trainly.query({ question });
return res.status(200).json(response);
} catch (error) {
return res.status(error.status || 500).json({
error: error.message,
});
}
}
Express.js
Copy
import express from "express";
import { TrainlyClient } from "@trainly/react";
const app = express();
app.use(express.json());
const trainly = new TrainlyClient({
apiKey: process.env.TRAINLY_API_KEY!,
chatId: process.env.TRAINLY_CHAT_ID!,
});
app.post("/api/query", async (req, res) => {
try {
const { question } = req.body;
const response = await trainly.query({ question });
res.json(response);
} catch (error) {
res.status(error.status || 500).json({
error: error.message,
});
}
});
app.listen(3000, () => {
console.log("Server running on port 3000");
});
Browser (Vanilla JS)
Copy
<!DOCTYPE html>
<html>
<head>
<title>Trainly Chat</title>
<script type="module">
import { TrainlyClient } from "https://cdn.jsdelivr.net/npm/@trainly/react/dist/index.mjs";
const trainly = new TrainlyClient({
apiKey: "tk_your_api_key",
chatId: "chat_abc123",
});
async function askQuestion() {
const question = document.getElementById("question").value;
const answerDiv = document.getElementById("answer");
answerDiv.textContent = "Loading...";
try {
const response = await trainly.query({ question });
answerDiv.textContent = response.answer;
} catch (error) {
answerDiv.textContent = "Error: " + error.message;
}
}
window.askQuestion = askQuestion;
</script>
</head>
<body>
<input type="text" id="question" placeholder="Ask a question..." />
<button onclick="askQuestion()">Ask</button>
<div id="answer"></div>
</body>
</html>
Best Practices
Security
Security
Never expose API keys in client-side code
Copy
// ❌ BAD - API key in browser
const trainly = new TrainlyClient({
apiKey: 'tk_your_api_key', // Exposed to users!
chatId: 'chat_abc123'
});
// ✅ GOOD - API key on server
// Frontend calls your API
const response = await fetch('/api/query', {
method: 'POST',
body: JSON.stringify({ question })
});
// Your API uses the SDK
const trainly = new TrainlyClient({
apiKey: process.env.TRAINLY_API_KEY,
chatId: process.env.TRAINLY_CHAT_ID
});
Performance
Performance
Cache responses when possibleUse streaming for long responsesStreaming provides better UX for responses that take time to generate.
Copy
const cache = new Map();
async function getCachedResponse(question: string) {
if (cache.has(question)) {
return cache.get(question);
}
const response = await trainly.query({ question });
cache.set(question, response);
return response;
}
Error Handling
Error Handling
Always handle errors gracefully
Copy
try {
const response = await trainly.query({ question });
return response;
} catch (error) {
if (error instanceof RateLimitError) {
// Wait and retry
await delay(error.retryAfter * 1000);
return trainly.query({ question });
}
// Log error for monitoring
console.error('Query failed:', error);
// Show user-friendly message
throw new Error('Unable to process your question. Please try again.');
}
TypeScript Types Reference
Copy
// Client Configuration
interface TrainlyClientConfig {
apiKey: string;
chatId: string;
baseUrl?: string;
timeout?: number;
maxRetries?: number;
retryDelay?: number;
retryBackoff?: number;
defaultModel?: string;
defaultTemperature?: number;
defaultMaxTokens?: number;
debug?: boolean;
logger?: Console;
}
// Query Options
interface QueryOptions {
question: string;
model?: string;
temperature?: number;
maxTokens?: number;
customPrompt?: string;
scopeFilters?: Record<string, string | number | boolean>;
headers?: Record<string, string>;
}
// Query Response
interface QueryResponse {
answer: string;
context: ChunkScore[];
chat_id: string;
model: string;
usage: {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
};
}
// Chunk Score
interface ChunkScore {
chunk_id: string;
chunk_text: string;
score: number;
}
// Upload Options
interface UploadOptions {
file: string | Buffer | Blob;
filename?: string;
scopeValues?: Record<string, string | number | boolean>;
}
// Upload Response
interface UploadResponse {
success: boolean;
filename: string;
file_id: string;
chat_id: string;
size_bytes: number;
processing_status: string;
}
// File Info
interface FileInfo {
file_id: string;
filename: string;
upload_date: string;
size_bytes: number;
chunk_count: number;
}
// List Files Response
interface ListFilesResponse {
success: boolean;
files: FileInfo[];
total_files: number;
total_size_bytes: number;
}
// Delete Response
interface DeleteResponse {
success: boolean;
message: string;
file_id: string;
filename: string;
chunks_deleted: number;
size_bytes_freed: number;
}
Migration Guide
From REST API to SDK
Copy
// Before (REST API)
const response = await fetch(
"https://api.trainlyai.com/v1/chat_abc/answer_question",
{
method: "POST",
headers: {
Authorization: "Bearer tk_your_api_key",
"Content-Type": "application/json",
},
body: JSON.stringify({
question: "What is the result?",
}),
},
);
const data = await response.json();
// After (SDK)
const trainly = new TrainlyClient({
apiKey: "tk_your_api_key",
chatId: "chat_abc",
});
const response = await trainly.query({
question: "What is the result?",
});
The SDK handles authentication, retries, and error handling automatically!