Ollama Integration: - Add providerConfig.js for managing AI provider settings - Add toolConverter.js to convert between Claude and Ollama formats - Add ollama.js API handler with function calling support - Update diagramAI.ts with Ollama models (llama3.1, mistral, qwen2.5) - Route requests to appropriate provider based on selected model - Use 127.0.0.1 to avoid IPv6 resolution issues New modify_connection Tool: - Add modify_connection tool to change connection labels and colors - Support finding connections by label or by from/to entities - Add chatModifyConnection event handler in diagramManager - Clarify in tool descriptions that empty string removes labels 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
179 lines
6.9 KiB
JavaScript
179 lines
6.9 KiB
JavaScript
import { Router } from "express";
|
|
import { getSession, addMessage, getConversationForAPI } from "../services/sessionStore.js";
|
|
import { getOllamaUrl } from "../services/providerConfig.js";
|
|
import {
|
|
claudeToolsToOllama,
|
|
claudeMessagesToOllama,
|
|
ollamaResponseToClaude
|
|
} from "../services/toolConverter.js";
|
|
|
|
const router = Router();
|
|
|
|
/**
|
|
* Build entity context string for the system prompt
|
|
*/
|
|
function buildEntityContext(entities) {
|
|
if (!entities || entities.length === 0) {
|
|
return "\n\nThe diagram is currently empty.";
|
|
}
|
|
|
|
const entityList = entities.map(e => {
|
|
const shape = e.template?.replace('#', '').replace('-template', '') || 'unknown';
|
|
const pos = e.position || { x: 0, y: 0, z: 0 };
|
|
return `- ${e.text || '(no label)'} (${shape}, ${e.color || 'unknown'}) at (${pos.x?.toFixed(1)}, ${pos.y?.toFixed(1)}, ${pos.z?.toFixed(1)})`;
|
|
}).join('\n');
|
|
|
|
return `\n\n## Current Diagram State\nThe diagram currently contains ${entities.length} entities:\n${entityList}`;
|
|
}
|
|
|
|
/**
|
|
* Handle Ollama chat requests
|
|
* Accepts Claude-format requests and converts them to Ollama format
|
|
*/
|
|
router.post("/*path", async (req, res) => {
|
|
const requestStart = Date.now();
|
|
console.log(`[Ollama API] ========== REQUEST START ==========`);
|
|
|
|
const ollamaUrl = getOllamaUrl();
|
|
console.log(`[Ollama API] Using Ollama at: ${ollamaUrl}`);
|
|
|
|
// Extract request body (Claude format)
|
|
const { sessionId, model, max_tokens, system, tools, messages } = req.body;
|
|
|
|
console.log(`[Ollama API] Session ID: ${sessionId || 'none'}`);
|
|
console.log(`[Ollama API] Model: ${model}`);
|
|
console.log(`[Ollama API] Messages count: ${messages?.length || 0}`);
|
|
|
|
// Build system prompt with entity context
|
|
let systemPrompt = system || '';
|
|
|
|
if (sessionId) {
|
|
const session = getSession(sessionId);
|
|
if (session) {
|
|
console.log(`[Ollama API] Session found: ${session.entities.length} entities, ${session.conversationHistory.length} messages in history`);
|
|
|
|
// Inject entity context into system prompt
|
|
const entityContext = buildEntityContext(session.entities);
|
|
console.log(`[Ollama API] Entity context added (${entityContext.length} chars)`);
|
|
systemPrompt += entityContext;
|
|
|
|
// Get conversation history and merge with current messages
|
|
const historyMessages = getConversationForAPI(sessionId);
|
|
if (historyMessages.length > 0 && messages) {
|
|
const currentContent = messages[messages.length - 1]?.content;
|
|
const filteredHistory = historyMessages.filter(msg => msg.content !== currentContent);
|
|
messages.unshift(...filteredHistory);
|
|
console.log(`[Ollama API] Merged ${filteredHistory.length} history messages`);
|
|
}
|
|
} else {
|
|
console.log(`[Ollama API] WARNING: Session ${sessionId} not found`);
|
|
}
|
|
}
|
|
|
|
// Convert to Ollama format
|
|
const ollamaMessages = claudeMessagesToOllama(messages || [], systemPrompt);
|
|
const ollamaTools = claudeToolsToOllama(tools);
|
|
|
|
const ollamaRequest = {
|
|
model: model,
|
|
messages: ollamaMessages,
|
|
stream: false,
|
|
options: {
|
|
num_predict: max_tokens || 1024
|
|
}
|
|
};
|
|
|
|
// Only add tools if there are any
|
|
if (ollamaTools.length > 0) {
|
|
ollamaRequest.tools = ollamaTools;
|
|
}
|
|
|
|
console.log(`[Ollama API] Converted to Ollama format: ${ollamaMessages.length} messages, ${ollamaTools.length} tools`);
|
|
|
|
try {
|
|
console.log(`[Ollama API] Sending request to Ollama...`);
|
|
const fetchStart = Date.now();
|
|
|
|
const response = await fetch(`${ollamaUrl}/api/chat`, {
|
|
method: "POST",
|
|
headers: {
|
|
"Content-Type": "application/json"
|
|
},
|
|
body: JSON.stringify(ollamaRequest)
|
|
});
|
|
|
|
const fetchDuration = Date.now() - fetchStart;
|
|
console.log(`[Ollama API] Response received in ${fetchDuration}ms, status: ${response.status}`);
|
|
|
|
if (!response.ok) {
|
|
const errorText = await response.text();
|
|
console.error(`[Ollama API] Error response:`, errorText);
|
|
return res.status(response.status).json({
|
|
error: `Ollama API error: ${response.status}`,
|
|
details: errorText
|
|
});
|
|
}
|
|
|
|
const ollamaData = await response.json();
|
|
console.log(`[Ollama API] Response parsed. Done: ${ollamaData.done}, model: ${ollamaData.model}`);
|
|
|
|
// Convert response back to Claude format
|
|
const claudeResponse = ollamaResponseToClaude(ollamaData);
|
|
console.log(`[Ollama API] Converted to Claude format. Stop reason: ${claudeResponse.stop_reason}, content blocks: ${claudeResponse.content.length}`);
|
|
|
|
// Store messages to session if applicable
|
|
if (sessionId && claudeResponse.content) {
|
|
const session = getSession(sessionId);
|
|
if (session) {
|
|
// Store the user message if it was new
|
|
const userMessage = messages?.[messages.length - 1];
|
|
if (userMessage && userMessage.role === 'user' && typeof userMessage.content === 'string') {
|
|
addMessage(sessionId, {
|
|
role: 'user',
|
|
content: userMessage.content
|
|
});
|
|
console.log(`[Ollama API] Stored user message to session`);
|
|
}
|
|
|
|
// Store the assistant response (text only)
|
|
const assistantContent = claudeResponse.content
|
|
.filter(c => c.type === 'text')
|
|
.map(c => c.text)
|
|
.join('\n');
|
|
|
|
if (assistantContent) {
|
|
addMessage(sessionId, {
|
|
role: 'assistant',
|
|
content: assistantContent
|
|
});
|
|
console.log(`[Ollama API] Stored assistant response to session (${assistantContent.length} chars)`);
|
|
}
|
|
}
|
|
}
|
|
|
|
const totalDuration = Date.now() - requestStart;
|
|
console.log(`[Ollama API] ========== REQUEST COMPLETE (${totalDuration}ms) ==========`);
|
|
res.json(claudeResponse);
|
|
|
|
} catch (error) {
|
|
const totalDuration = Date.now() - requestStart;
|
|
console.error(`[Ollama API] ========== REQUEST FAILED (${totalDuration}ms) ==========`);
|
|
console.error(`[Ollama API] Error:`, error);
|
|
|
|
// Check if it's a connection error
|
|
if (error.cause?.code === 'ECONNREFUSED') {
|
|
return res.status(503).json({
|
|
error: "Ollama is not running",
|
|
details: `Could not connect to Ollama at ${ollamaUrl}. Make sure Ollama is installed and running.`
|
|
});
|
|
}
|
|
|
|
res.status(500).json({
|
|
error: "Failed to proxy request to Ollama",
|
|
details: error.message
|
|
});
|
|
}
|
|
});
|
|
|
|
export default router;
|