import { Adaline } from '@adaline/client';
import { Gateway } from '@adaline/gateway';
import { OpenAI } from '@adaline/openai';
const adaline = new Adaline({ debug: true });
const gateway = new Gateway();
const openaiProvider = new OpenAI();
// Initialize deployment controller
const deploymentController = await adaline.initLatestDeployment({
promptId: 'chatbot-prompt',
deploymentEnvironmentId: 'environment_abc123',
refreshInterval: 60
});
// Initialize monitor
const monitor = adaline.initMonitor({
projectId: 'chatbot-project',
flushInterval: 5,
maxBufferSize: 100
});
// Handle chat request
async function handleChat(userId: string, message: string) {
// Get cached deployment (no API call)
const deployment = await deploymentController.get();
// Create trace for this conversation
const trace = monitor.logTrace({
name: 'Chat Turn',
sessionId: userId,
tags: ['chat', 'production'],
attributes: { userId, messageLength: message.length }
});
// Log LLM call
const span = trace.logSpan({
name: 'LLM Completion',
promptId: deployment.promptId,
deploymentId: deployment.id,
runEvaluation: true,
tags: ['llm', deployment.prompt.config.providerName]
});
try {
// Create model from deployment config
const model = openaiProvider.chatModel({
modelName: deployment.prompt.config.model,
apiKey: process.env.OPENAI_API_KEY!
});
// Call LLM using Adaline Gateway
const gatewayResponse = await gateway.completeChat({
model,
config: deployment.prompt.config.settings,
messages: [
...deployment.prompt.messages,
{
role: 'user',
content: [{ modality: 'text', value: message }]
}
],
tools: deployment.prompt.tools
});
const reply = gatewayResponse.response.messages[0].content[0].value;
// Update span with success
span.update({
status: 'success',
content: {
type: 'Model',
provider: deployment.prompt.config.providerName,
model: deployment.prompt.config.model,
input: JSON.stringify(gatewayResponse.provider.request),
output: JSON.stringify(gatewayResponse.provider.response)
},
attributes: {
cached: gatewayResponse.cached,
}
});
trace.update({ status: 'success' });
return reply;
} catch (error) {
span.update({
status: 'failure',
attributes: {
error: error instanceof Error ? error.message : String(error)
}
});
trace.update({ status: 'failure' });
throw error;
} finally {
span.end();
trace.end();
}
}
// Graceful shutdown
process.on('SIGTERM', async () => {
await monitor.flush();
monitor.stop();
deploymentController.stop();
});