feat: Adds support for openai-compatible servers replacing the ollama based api #1
7 changed files with 208 additions and 25 deletions
5
.env.example
Normal file
5
.env.example
Normal file
|
|
@ -0,0 +1,5 @@
|
||||||
|
# LiteLLM Proxy Settings
|
||||||
|
OPENAI_API_BASE_URL=http://127.0.0.1:4000/v1
|
||||||
|
OPENAI_API_KEY=MY_SECRET_KEY
|
||||||
|
DEFAULT_MODEL=openai/qwen3-30b-a3b-mlx
|
||||||
|
PORT=5000
|
||||||
|
|
@ -1,2 +1,6 @@
|
||||||
export const
|
export const
|
||||||
port = 5000;
|
port = 5000;
|
||||||
|
|
||||||
|
// OpenAI configuration
|
||||||
|
export const
|
||||||
|
openaiApiKey = "your-api-key-here";
|
||||||
60
lib/openai.js
Normal file
60
lib/openai.js
Normal file
|
|
@ -0,0 +1,60 @@
|
||||||
|
import { OpenAI } from 'openai';
|
||||||
|
|
||||||
|
// Create the OpenAI client with the provided API key and base URL
|
||||||
|
const createOpenAIClient = (apiKey, baseURL) => {
|
||||||
|
return new OpenAI({
|
||||||
|
apiKey: apiKey,
|
||||||
|
baseURL: baseURL
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
// Function to fetch available models
|
||||||
|
export const fetchModels = async ({
|
||||||
|
apiKey = process.env.OPENAI_API_KEY,
|
||||||
|
baseURL = process.env.OPENAI_API_BASE_URL || 'http://127.0.0.1:4000/v1'
|
||||||
|
}) => {
|
||||||
|
const openai = createOpenAIClient(apiKey, baseURL);
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await openai.models.list();
|
||||||
|
// The response structure depends on the OpenAI API version
|
||||||
|
// For the latest version, the models are directly in the response
|
||||||
|
const models = Array.isArray(response.data) ? response.data : response;
|
||||||
|
return {
|
||||||
|
models: models
|
||||||
|
};
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching models:', error);
|
||||||
|
// If we can't fetch models, return a default list of common models
|
||||||
|
return {
|
||||||
|
models: [
|
||||||
|
{ id: 'openai/qwen3-30b-a3b-mlx' }
|
||||||
|
],
|
||||||
|
error: error.message
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export default async ({
|
||||||
|
apiKey = process.env.OPENAI_API_KEY,
|
||||||
|
baseURL = process.env.OPENAI_API_BASE_URL || 'http://127.0.0.1:4000/v1',
|
||||||
|
model = process.env.DEFAULT_MODEL || 'openai/qwen3-30b-a3b-mlx',
|
||||||
|
prompt
|
||||||
|
}) => {
|
||||||
|
const openai = createOpenAIClient(apiKey, baseURL);
|
||||||
|
|
||||||
|
const response = await openai.chat.completions.create({
|
||||||
|
model: model,
|
||||||
|
messages: [
|
||||||
|
{
|
||||||
|
role: 'user',
|
||||||
|
content: prompt
|
||||||
|
}
|
||||||
|
],
|
||||||
|
temperature: 0.7
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
text: response.choices[0].message.content
|
||||||
|
};
|
||||||
|
};
|
||||||
49
main.js
49
main.js
|
|
@ -1,12 +1,16 @@
|
||||||
import { createReadStream } from 'node:fs';
|
import { createReadStream } from 'node:fs';
|
||||||
|
import { config } from 'dotenv';
|
||||||
|
|
||||||
|
// Load environment variables from .env file
|
||||||
|
config();
|
||||||
|
|
||||||
import createFastify from 'fastify';
|
import createFastify from 'fastify';
|
||||||
|
|
||||||
import promptableTodoList from './src/promptable-todo-list.js';
|
import promptableTodoList from './src/promptable-todo-list.js';
|
||||||
|
import { fetchModels } from './lib/openai.js';
|
||||||
|
|
||||||
import {
|
// Get port from environment variable, default to 5000
|
||||||
port
|
const port = process.env.PORT || 5000;
|
||||||
} from './config.js';
|
|
||||||
|
|
||||||
const fastify = createFastify();
|
const fastify = createFastify();
|
||||||
|
|
||||||
|
|
@ -39,5 +43,42 @@ fastify.post(
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Endpoint to fetch available models
|
||||||
|
fastify.get(
|
||||||
|
'/models',
|
||||||
|
async (
|
||||||
|
request,
|
||||||
|
reply
|
||||||
|
) => {
|
||||||
|
try {
|
||||||
|
const { models, error } = await fetchModels({
|
||||||
|
apiKey: request.query.apiKey,
|
||||||
|
baseURL: request.query.baseURL
|
||||||
|
});
|
||||||
|
|
||||||
|
if (error) {
|
||||||
|
return reply
|
||||||
|
.code(500)
|
||||||
|
.send({
|
||||||
|
error
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return reply.send({
|
||||||
|
models,
|
||||||
|
currentModel: process.env.DEFAULT_MODEL || 'openai/qwen3-30b-a3b-mlx',
|
||||||
|
baseURL: process.env.OPENAI_API_BASE_URL || 'http://127.0.0.1:4000/v1'
|
||||||
|
});
|
||||||
|
}
|
||||||
|
catch(error){
|
||||||
|
return reply
|
||||||
|
.code(500)
|
||||||
|
.send({
|
||||||
|
error: JSON.parse(JSON.stringify(error, Object.getOwnPropertyNames(error)))
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
await fastify.listen({ port });
|
await fastify.listen({ port });
|
||||||
console.log(`Listening to localhost:${port}`);
|
console.log(`Listening to localhost:${port}`);
|
||||||
|
|
|
||||||
|
|
@ -8,13 +8,14 @@
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"ai": "^4.3.10",
|
"ai": "^4.3.10",
|
||||||
"dedent": "^1.5.3",
|
"dedent": "^1.5.3",
|
||||||
|
"dotenv": "^16.4.1",
|
||||||
"fastify": "^5.3.2",
|
"fastify": "^5.3.2",
|
||||||
"happy-dom": "^17.4.4",
|
"happy-dom": "^17.4.4",
|
||||||
"joi": "^17.13.3",
|
"joi": "^17.13.3",
|
||||||
"marked": "^15.0.11",
|
"marked": "^15.0.11",
|
||||||
"ollama-ai-provider": "^1.2.0"
|
"openai": "^4.28.0"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "bun ./main.js"
|
"start": "bun ./main.js"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
@ -62,10 +62,18 @@
|
||||||
column-gap: 0.5rem;
|
column-gap: 0.5rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
.app__status {
|
||||||
|
font-size: 0.8rem;
|
||||||
|
background-color: rgba(0, 0, 0, 0.05);
|
||||||
|
padding: 0.5rem;
|
||||||
|
border-radius: 0.25rem;
|
||||||
|
margin-bottom: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
.main__prompt {
|
.main__prompt {
|
||||||
margin-top: auto;
|
margin-top: auto;
|
||||||
display: grid;
|
display: grid;
|
||||||
grid-template-columns: auto 1fr 100px;
|
grid-template-columns: 1fr auto;
|
||||||
column-gap: 0.5rem;
|
column-gap: 0.5rem;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -114,8 +122,6 @@
|
||||||
titleElement.setAttribute('value', task.title);
|
titleElement.setAttribute('value', task.title);
|
||||||
formElement.appendChild(titleElement);
|
formElement.appendChild(titleElement);
|
||||||
|
|
||||||
// TODO up/down
|
|
||||||
|
|
||||||
const removeElement = createSimpleElement('button.secondary');
|
const removeElement = createSimpleElement('button.secondary');
|
||||||
removeElement.textContent = '🗑️';
|
removeElement.textContent = '🗑️';
|
||||||
removeElement.addEventListener('click', () => {
|
removeElement.addEventListener('click', () => {
|
||||||
|
|
@ -124,7 +130,6 @@
|
||||||
});
|
});
|
||||||
formElement.appendChild(removeElement);
|
formElement.appendChild(removeElement);
|
||||||
|
|
||||||
// TODO replace form 'submit' event w/ field 'input' event
|
|
||||||
formElement.addEventListener('submit', event => {
|
formElement.addEventListener('submit', event => {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
task.isDone = isDoneElement.checked;
|
task.isDone = isDoneElement.checked;
|
||||||
|
|
@ -136,9 +141,64 @@
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Function to fetch available models
|
||||||
|
const fetchAvailableModels = async () => {
|
||||||
|
try {
|
||||||
|
const { data } = await axios.get('/models');
|
||||||
|
const currentModelElement = document.getElementById('current-model');
|
||||||
|
const modelDropdownElement = document.getElementById('model-dropdown');
|
||||||
|
const statusElement = document.querySelector('.app__status strong:first-child');
|
||||||
|
|
||||||
|
// Update status with API base URL
|
||||||
|
if (data.baseURL) {
|
||||||
|
statusElement.nextSibling.textContent = ` Using API at ${data.baseURL}`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Display current model
|
||||||
|
currentModelElement.textContent = data.currentModel;
|
||||||
|
|
||||||
|
// Clear dropdown
|
||||||
|
modelDropdownElement.innerHTML = '';
|
||||||
|
|
||||||
|
// Add models to dropdown
|
||||||
|
if (data.models && data.models.length > 0) {
|
||||||
|
data.models.forEach(model => {
|
||||||
|
const option = document.createElement('option');
|
||||||
|
option.value = model.id;
|
||||||
|
option.textContent = model.id;
|
||||||
|
if (model.id === data.currentModel) {
|
||||||
|
option.selected = true;
|
||||||
|
}
|
||||||
|
modelDropdownElement.appendChild(option);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
const option = document.createElement('option');
|
||||||
|
option.value = data.currentModel;
|
||||||
|
option.textContent = data.currentModel;
|
||||||
|
option.selected = true;
|
||||||
|
modelDropdownElement.appendChild(option);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error fetching models:', error);
|
||||||
|
document.getElementById('current-model').textContent = 'Error loading models';
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
document.addEventListener('DOMContentLoaded', async () => {
|
document.addEventListener('DOMContentLoaded', async () => {
|
||||||
reloadTasks();
|
reloadTasks();
|
||||||
|
|
||||||
|
// Fetch available models
|
||||||
|
await fetchAvailableModels();
|
||||||
|
|
||||||
|
// Add event listener for model dropdown
|
||||||
|
document.getElementById('model-dropdown').addEventListener('change', async (event) => {
|
||||||
|
const selectedModel = event.target.value;
|
||||||
|
document.getElementById('current-model').textContent = selectedModel;
|
||||||
|
|
||||||
|
// Store the selected model in localStorage for future use
|
||||||
|
localStorage.setItem('selectedModel', selectedModel);
|
||||||
|
});
|
||||||
|
|
||||||
document.querySelector('.main__new-task').addEventListener('submit', event => {
|
document.querySelector('.main__new-task').addEventListener('submit', event => {
|
||||||
event.preventDefault();
|
event.preventDefault();
|
||||||
const
|
const
|
||||||
|
|
@ -158,7 +218,6 @@
|
||||||
const
|
const
|
||||||
promptFieldsetElement = document.querySelector('.main__prompt__fieldset'),
|
promptFieldsetElement = document.querySelector('.main__prompt__fieldset'),
|
||||||
promptSubmitElement = document.querySelector('.main__prompt__submit'),
|
promptSubmitElement = document.querySelector('.main__prompt__submit'),
|
||||||
promptModelElement = document.querySelector('.main__prompt__model'),
|
|
||||||
promptTextElement = document.querySelector('.main__prompt__text');
|
promptTextElement = document.querySelector('.main__prompt__text');
|
||||||
|
|
||||||
document.querySelector('.main__prompt').addEventListener('submit', async event => {
|
document.querySelector('.main__prompt').addEventListener('submit', async event => {
|
||||||
|
|
@ -172,10 +231,11 @@
|
||||||
finalState
|
finalState
|
||||||
}
|
}
|
||||||
} = await axios.post('/', {
|
} = await axios.post('/', {
|
||||||
baseURL: 'http://localhost:11434/api',
|
// API key and base URL are set from environment variables on the server
|
||||||
model: promptModelElement.value,
|
|
||||||
initialState: tasks,
|
initialState: tasks,
|
||||||
instructions: promptTextElement.value
|
instructions: promptTextElement.value,
|
||||||
|
// Use the selected model if available
|
||||||
|
model: localStorage.getItem('selectedModel') || undefined
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
catch(error){
|
catch(error){
|
||||||
|
|
@ -194,9 +254,6 @@
|
||||||
promptSubmitElement.removeAttribute('aria-busy');
|
promptSubmitElement.removeAttribute('aria-busy');
|
||||||
promptFieldsetElement.disabled = false;
|
promptFieldsetElement.disabled = false;
|
||||||
});
|
});
|
||||||
|
|
||||||
promptModelElement.value = window.localStorage.getItem('model') || '';
|
|
||||||
promptModelElement.addEventListener('input', () => window.localStorage.setItem('model', promptModelElement.value));
|
|
||||||
});
|
});
|
||||||
</script>
|
</script>
|
||||||
</head>
|
</head>
|
||||||
|
|
@ -210,6 +267,15 @@
|
||||||
</nav>
|
</nav>
|
||||||
</header>
|
</header>
|
||||||
<main class="app__main">
|
<main class="app__main">
|
||||||
|
<div class="app__status">
|
||||||
|
<strong>Status:</strong> Connecting to API...
|
||||||
|
<div class="model-selector" style="margin-top: 0.5rem;">
|
||||||
|
<strong>Model:</strong> <span id="current-model">Loading...</span>
|
||||||
|
<label for="model-dropdown"></label><select id="model-dropdown" style="margin-left: 0.5rem; padding: 0.25rem; font-size: 0.8rem;">
|
||||||
|
<option value="">Loading models...</option>
|
||||||
|
</select>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
<ul class="main__tasks"></ul>
|
<ul class="main__tasks"></ul>
|
||||||
<form class="main__new-task">
|
<form class="main__new-task">
|
||||||
<label style="display: contents"><input class="main__new-task__is-done" type="checkbox"></label>
|
<label style="display: contents"><input class="main__new-task__is-done" type="checkbox"></label>
|
||||||
|
|
@ -217,11 +283,10 @@
|
||||||
</form>
|
</form>
|
||||||
<form class="main__prompt">
|
<form class="main__prompt">
|
||||||
<fieldset class="main__prompt__fieldset" style="display: contents">
|
<fieldset class="main__prompt__fieldset" style="display: contents">
|
||||||
<label style="display: contents"><input class="main__prompt__model" type="text" placeholder="Model ID" required></label>
|
<label style="display: contents"><input class="main__prompt__text" type="text" placeholder="Enter your instruction for the to-do list..." required></label>
|
||||||
<label style="display: contents"><input class="main__prompt__text" type="text" placeholder="Prompt..." required></label>
|
|
||||||
<button class="main__prompt__submit" type="submit"><span class="main__prompt__submit__label">Submit</span></button>
|
<button class="main__prompt__submit" type="submit"><span class="main__prompt__submit__label">Submit</span></button>
|
||||||
</fieldset>
|
</fieldset>
|
||||||
</form>
|
</form>
|
||||||
</main>
|
</main>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
|
||||||
|
|
@ -3,16 +3,18 @@ import { marked } from 'marked';
|
||||||
import { Window } from 'happy-dom';
|
import { Window } from 'happy-dom';
|
||||||
import Joi from 'joi';
|
import Joi from 'joi';
|
||||||
|
|
||||||
import ollama from '../lib/ollama.js';
|
import openai from '../lib/openai.js';
|
||||||
|
|
||||||
export default async ({
|
export default async ({
|
||||||
baseURL,
|
apiKey = process.env.OPENAI_API_KEY,
|
||||||
model,
|
baseURL = process.env.OPENAI_API_BASE_URL,
|
||||||
|
model = process.env.DEFAULT_MODEL,
|
||||||
initialState,
|
initialState,
|
||||||
instructions
|
instructions
|
||||||
}) => {
|
}) => {
|
||||||
const
|
const
|
||||||
{ text } = await ollama({
|
{ text } = await openai({
|
||||||
|
apiKey,
|
||||||
baseURL,
|
baseURL,
|
||||||
model,
|
model,
|
||||||
prompt: dedent `
|
prompt: dedent `
|
||||||
|
|
@ -28,6 +30,11 @@ export default async ({
|
||||||
${instructions}
|
${instructions}
|
||||||
\`\`\`
|
\`\`\`
|
||||||
Update your JSON value accordingly and output it alone within a Markdown JSON code block.
|
Update your JSON value accordingly and output it alone within a Markdown JSON code block.
|
||||||
|
- Feel free to change the list as necessary, breaking up items, moving them around, etc.
|
||||||
|
- Do not output anything else than the JSON code block.
|
||||||
|
- It is important that the JSON code block is valid JSON.
|
||||||
|
- The list should reflect what the user want, so when they say something , thing about the nuance of the list.
|
||||||
|
- Do not remove any item, only update or add them.
|
||||||
`
|
`
|
||||||
}),
|
}),
|
||||||
{ document } = new Window();
|
{ document } = new Window();
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue