Add optional OpenAI auth headers
This commit is contained in:
parent
35aadfac5a
commit
1680ad6c30
8 changed files with 112 additions and 7 deletions
|
|
@ -35,6 +35,7 @@ There are currently no automated tests for this project. If you add tests in the
|
|||
|
||||
Sortana targets the `/v1/completions` API. The endpoint value stored in settings is a base URL; the full request URL is constructed by appending `/v1/completions` (adding a slash when needed) and defaulting to `https://` if no scheme is provided.
|
||||
The options page can query `/v1/models` from the same base URL to populate the Model dropdown; selecting **None** omits the `model` field from the request payload.
|
||||
Advanced options allow an optional API key plus `OpenAI-Organization` and `OpenAI-Project` headers; these headers are only sent when values are provided.
|
||||
Responses are expected to include a JSON object with `match` (or `matched`) plus a short `reason` string; the parser extracts the last JSON object in the response text and ignores any surrounding commentary.
|
||||
|
||||
## Documentation
|
||||
|
|
|
|||
|
|
@ -15,6 +15,7 @@ expecting a `match` (or `matched`) boolean plus a `reason` string.
|
|||
|
||||
- **Configurable endpoint** – set the classification service base URL on the options page.
|
||||
- **Model selection** – load available models from the endpoint and choose one (or omit the model field).
|
||||
- **Optional OpenAI auth headers** – provide an API key plus optional organization/project headers when needed.
|
||||
- **Prompt templates** – choose between OpenAI/ChatML, Qwen, Mistral, Harmony (gpt-oss), or provide your own custom template.
|
||||
- **Custom system prompts** – tailor the instructions sent to the model for more precise results.
|
||||
- **Persistent result caching** – classification results and reasoning are saved to disk so messages aren't re-evaluated across restarts.
|
||||
|
|
@ -82,6 +83,8 @@ Sortana is implemented entirely with standard WebExtension scripts—no custom e
|
|||
1. Open the add-on's options and set the base URL of your classification service
|
||||
(Sortana will append `/v1/completions`). Use the Model dropdown to load
|
||||
`/v1/models` and select a model or choose **None** to omit the `model` field.
|
||||
Advanced settings include optional API key, organization, and project headers
|
||||
for OpenAI-hosted endpoints.
|
||||
2. Use the **Classification Rules** section to add a criterion and optional
|
||||
actions such as tagging, moving, copying, forwarding, replying,
|
||||
deleting or archiving a message when it matches. Drag rules to
|
||||
|
|
|
|||
|
|
@ -484,7 +484,7 @@ async function clearCacheForMessages(idsInput) {
|
|||
}
|
||||
|
||||
try {
|
||||
const store = await storage.local.get(["endpoint", "model", "templateName", "customTemplate", "customSystemPrompt", "aiParams", "debugLogging", "htmlToMarkdown", "stripUrlParams", "altTextImages", "collapseWhitespace", "tokenReduction", "aiRules", "theme", "showDebugTab"]);
|
||||
const store = await storage.local.get(["endpoint", "model", "apiKey", "openaiOrganization", "openaiProject", "templateName", "customTemplate", "customSystemPrompt", "aiParams", "debugLogging", "htmlToMarkdown", "stripUrlParams", "altTextImages", "collapseWhitespace", "tokenReduction", "aiRules", "theme", "showDebugTab"]);
|
||||
logger.setDebug(store.debugLogging);
|
||||
await AiClassifier.setConfig(store);
|
||||
userTheme = store.theme || 'auto';
|
||||
|
|
@ -514,10 +514,13 @@ async function clearCacheForMessages(idsInput) {
|
|||
aiRules = normalizeRules(newRules);
|
||||
logger.aiLog("aiRules updated from storage change", { debug: true }, aiRules);
|
||||
}
|
||||
if (changes.endpoint || changes.model || changes.templateName || changes.customTemplate || changes.customSystemPrompt || changes.aiParams || changes.debugLogging) {
|
||||
if (changes.endpoint || changes.model || changes.apiKey || changes.openaiOrganization || changes.openaiProject || changes.templateName || changes.customTemplate || changes.customSystemPrompt || changes.aiParams || changes.debugLogging) {
|
||||
const config = {};
|
||||
if (changes.endpoint) config.endpoint = changes.endpoint.newValue;
|
||||
if (changes.model) config.model = changes.model.newValue;
|
||||
if (changes.apiKey) config.apiKey = changes.apiKey.newValue;
|
||||
if (changes.openaiOrganization) config.openaiOrganization = changes.openaiOrganization.newValue;
|
||||
if (changes.openaiProject) config.openaiProject = changes.openaiProject.newValue;
|
||||
if (changes.templateName) config.templateName = changes.templateName.newValue;
|
||||
if (changes.customTemplate) config.customTemplate = changes.customTemplate.newValue;
|
||||
if (changes.customSystemPrompt) config.customSystemPrompt = changes.customSystemPrompt.newValue;
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"manifest_version": 2,
|
||||
"name": "Sortana",
|
||||
"version": "2.4.0",
|
||||
"version": "2.4.1",
|
||||
"default_locale": "en-US",
|
||||
"applications": {
|
||||
"gecko": {
|
||||
|
|
|
|||
|
|
@ -40,6 +40,9 @@ let gTemplateText = "";
|
|||
|
||||
let gAiParams = Object.assign({}, DEFAULT_AI_PARAMS);
|
||||
let gModel = "";
|
||||
let gApiKey = "";
|
||||
let gOpenaiOrganization = "";
|
||||
let gOpenaiProject = "";
|
||||
|
||||
let gCache = new Map();
|
||||
let gCacheLoaded = false;
|
||||
|
|
@ -223,6 +226,15 @@ async function setConfig(config = {}) {
|
|||
if (typeof config.model === "string") {
|
||||
gModel = config.model.trim();
|
||||
}
|
||||
if (typeof config.apiKey === "string") {
|
||||
gApiKey = config.apiKey.trim();
|
||||
}
|
||||
if (typeof config.openaiOrganization === "string") {
|
||||
gOpenaiOrganization = config.openaiOrganization.trim();
|
||||
}
|
||||
if (typeof config.openaiProject === "string") {
|
||||
gOpenaiProject = config.openaiProject.trim();
|
||||
}
|
||||
if (typeof config.debugLogging === "boolean") {
|
||||
setDebug(config.debugLogging);
|
||||
}
|
||||
|
|
@ -241,6 +253,20 @@ async function setConfig(config = {}) {
|
|||
aiLog(`[AiClassifier] Template set to ${gTemplateName}`, {debug: true});
|
||||
}
|
||||
|
||||
function buildAuthHeaders() {
|
||||
const headers = {};
|
||||
if (gApiKey) {
|
||||
headers.Authorization = `Bearer ${gApiKey}`;
|
||||
}
|
||||
if (gOpenaiOrganization) {
|
||||
headers["OpenAI-Organization"] = gOpenaiOrganization;
|
||||
}
|
||||
if (gOpenaiProject) {
|
||||
headers["OpenAI-Project"] = gOpenaiProject;
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
|
||||
function buildSystemPrompt() {
|
||||
return SYSTEM_PREFIX + (gCustomSystemPrompt || DEFAULT_CUSTOM_SYSTEM_PROMPT) + SYSTEM_SUFFIX;
|
||||
}
|
||||
|
|
@ -453,7 +479,7 @@ async function classifyText(text, criterion, cacheKey = null) {
|
|||
try {
|
||||
const response = await fetch(gEndpoint, {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
headers: { "Content-Type": "application/json", ...buildAuthHeaders() },
|
||||
body: payload,
|
||||
});
|
||||
|
||||
|
|
|
|||
|
|
@ -4,6 +4,9 @@ const KEY_GROUPS = {
|
|||
settings: [
|
||||
'endpoint',
|
||||
'model',
|
||||
'apiKey',
|
||||
'openaiOrganization',
|
||||
'openaiProject',
|
||||
'templateName',
|
||||
'customTemplate',
|
||||
'customSystemPrompt',
|
||||
|
|
|
|||
|
|
@ -141,6 +141,32 @@
|
|||
</div>
|
||||
|
||||
<div id="advanced-options" class="mt-4 is-hidden">
|
||||
<div class="field">
|
||||
<label class="label" for="api-key">API key</label>
|
||||
<div class="field has-addons">
|
||||
<div class="control is-expanded">
|
||||
<input class="input" type="password" id="api-key" placeholder="sk-...">
|
||||
</div>
|
||||
<div class="control">
|
||||
<button class="button" id="toggle-api-key" type="button">Show</button>
|
||||
</div>
|
||||
</div>
|
||||
<p class="help">Leave blank for unauthenticated endpoints.</p>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label class="label" for="openai-organization">OpenAI Organization</label>
|
||||
<div class="control">
|
||||
<input class="input" type="text" id="openai-organization" placeholder="org-...">
|
||||
</div>
|
||||
<p class="help">Optional header for OpenAI-hosted endpoints.</p>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label class="label" for="openai-project">OpenAI Project</label>
|
||||
<div class="control">
|
||||
<input class="input" type="text" id="openai-project" placeholder="proj_...">
|
||||
</div>
|
||||
<p class="help">Optional header for OpenAI-hosted endpoints.</p>
|
||||
</div>
|
||||
<div class="field">
|
||||
<label class="checkbox">
|
||||
<input type="checkbox" id="debug-logging"> Enable debug logging
|
||||
|
|
|
|||
|
|
@ -11,6 +11,9 @@ document.addEventListener('DOMContentLoaded', async () => {
|
|||
'customTemplate',
|
||||
'customSystemPrompt',
|
||||
'model',
|
||||
'apiKey',
|
||||
'openaiOrganization',
|
||||
'openaiProject',
|
||||
'aiParams',
|
||||
'debugLogging',
|
||||
'htmlToMarkdown',
|
||||
|
|
@ -140,6 +143,23 @@ document.addEventListener('DOMContentLoaded', async () => {
|
|||
modelSelect.value = hasSelected ? selectedModel : '';
|
||||
}
|
||||
|
||||
function buildAuthHeaders() {
|
||||
const headers = {};
|
||||
const apiKey = apiKeyInput?.value.trim();
|
||||
if (apiKey) {
|
||||
headers.Authorization = `Bearer ${apiKey}`;
|
||||
}
|
||||
const organization = openaiOrgInput?.value.trim();
|
||||
if (organization) {
|
||||
headers["OpenAI-Organization"] = organization;
|
||||
}
|
||||
const project = openaiProjectInput?.value.trim();
|
||||
if (project) {
|
||||
headers["OpenAI-Project"] = project;
|
||||
}
|
||||
return headers;
|
||||
}
|
||||
|
||||
async function fetchModels(preferredModel = '') {
|
||||
if (!modelSelect || !refreshModelsBtn) return;
|
||||
const modelsUrl = AiClassifier.buildModelsUrl(endpointInput.value);
|
||||
|
|
@ -153,7 +173,7 @@ document.addEventListener('DOMContentLoaded', async () => {
|
|||
setModelHelp('Loading models...');
|
||||
|
||||
try {
|
||||
const response = await fetch(modelsUrl, { method: 'GET' });
|
||||
const response = await fetch(modelsUrl, { method: 'GET', headers: buildAuthHeaders() });
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP ${response.status}`);
|
||||
}
|
||||
|
|
@ -215,6 +235,26 @@ document.addEventListener('DOMContentLoaded', async () => {
|
|||
advancedBox.classList.toggle('is-hidden');
|
||||
});
|
||||
|
||||
const apiKeyInput = document.getElementById('api-key');
|
||||
const apiKeyToggle = document.getElementById('toggle-api-key');
|
||||
const openaiOrgInput = document.getElementById('openai-organization');
|
||||
const openaiProjectInput = document.getElementById('openai-project');
|
||||
if (apiKeyInput) {
|
||||
apiKeyInput.value = typeof defaults.apiKey === 'string' ? defaults.apiKey : '';
|
||||
}
|
||||
if (openaiOrgInput) {
|
||||
openaiOrgInput.value = typeof defaults.openaiOrganization === 'string' ? defaults.openaiOrganization : '';
|
||||
}
|
||||
if (openaiProjectInput) {
|
||||
openaiProjectInput.value = typeof defaults.openaiProject === 'string' ? defaults.openaiProject : '';
|
||||
}
|
||||
apiKeyToggle?.addEventListener('click', () => {
|
||||
if (!apiKeyInput) return;
|
||||
const show = apiKeyInput.type === 'password';
|
||||
apiKeyInput.type = show ? 'text' : 'password';
|
||||
apiKeyToggle.textContent = show ? 'Hide' : 'Show';
|
||||
});
|
||||
|
||||
const debugToggle = document.getElementById('debug-logging');
|
||||
debugToggle.checked = defaults.debugLogging === true;
|
||||
|
||||
|
|
@ -999,6 +1039,9 @@ document.addEventListener('DOMContentLoaded', async () => {
|
|||
document.getElementById('save').addEventListener('click', async () => {
|
||||
const endpoint = endpointInput.value.trim();
|
||||
const model = modelSelect?.value || '';
|
||||
const apiKey = apiKeyInput?.value.trim() || '';
|
||||
const openaiOrganization = openaiOrgInput?.value.trim() || '';
|
||||
const openaiProject = openaiProjectInput?.value.trim() || '';
|
||||
const templateName = templateSelect.value;
|
||||
const customTemplateText = customTemplate.value;
|
||||
const customSystemPrompt = systemBox.value;
|
||||
|
|
@ -1064,10 +1107,10 @@ document.addEventListener('DOMContentLoaded', async () => {
|
|||
const tokenReduction = tokenReductionToggle.checked;
|
||||
const showDebugTab = debugTabToggle.checked;
|
||||
const theme = themeSelect.value;
|
||||
await storage.local.set({ endpoint, model, templateName, customTemplate: customTemplateText, customSystemPrompt, aiParams: aiParamsSave, debugLogging, htmlToMarkdown, stripUrlParams, altTextImages, collapseWhitespace, tokenReduction, aiRules: rules, theme, showDebugTab });
|
||||
await storage.local.set({ endpoint, model, apiKey, openaiOrganization, openaiProject, templateName, customTemplate: customTemplateText, customSystemPrompt, aiParams: aiParamsSave, debugLogging, htmlToMarkdown, stripUrlParams, altTextImages, collapseWhitespace, tokenReduction, aiRules: rules, theme, showDebugTab });
|
||||
await applyTheme(theme);
|
||||
try {
|
||||
await AiClassifier.setConfig({ endpoint, model, templateName, customTemplate: customTemplateText, customSystemPrompt, aiParams: aiParamsSave, debugLogging });
|
||||
await AiClassifier.setConfig({ endpoint, model, apiKey, openaiOrganization, openaiProject, templateName, customTemplate: customTemplateText, customSystemPrompt, aiParams: aiParamsSave, debugLogging });
|
||||
logger.setDebug(debugLogging);
|
||||
} catch (e) {
|
||||
logger.aiLog('[options] failed to apply config', {level: 'error'}, e);
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue