From 32e79a13d54f064f20fdae1bd48b663a774e9801 Mon Sep 17 00:00:00 2001 From: Jordan Wages Date: Tue, 8 Jul 2025 17:34:26 -0500 Subject: [PATCH 01/50] Add shared defaults for AI parameters --- modules/AiClassifier.js | 15 ++------------- modules/defaultParams.js | 16 ++++++++++++++++ options/options.js | 14 +------------- 3 files changed, 19 insertions(+), 26 deletions(-) create mode 100644 modules/defaultParams.js diff --git a/modules/AiClassifier.js b/modules/AiClassifier.js index 3c526f8..7e757dd 100644 --- a/modules/AiClassifier.js +++ b/modules/AiClassifier.js @@ -1,5 +1,6 @@ "use strict"; import { aiLog, setDebug } from "../logger.js"; +import { DEFAULT_AI_PARAMS } from "./defaultParams.js"; const storage = (globalThis.messenger ?? globalThis.browser).storage; @@ -33,19 +34,7 @@ let gCustomTemplate = ""; let gCustomSystemPrompt = DEFAULT_CUSTOM_SYSTEM_PROMPT; let gTemplateText = ""; -let gAiParams = { - max_tokens: 4096, - temperature: 0.6, - top_p: 0.95, - seed: -1, - repetition_penalty: 1.0, - top_k: 20, - min_p: 0, - presence_penalty: 0, - frequency_penalty: 0, - typical_p: 1, - tfs: 1, -}; +let gAiParams = Object.assign({}, DEFAULT_AI_PARAMS); let gCache = new Map(); let gCacheLoaded = false; diff --git a/modules/defaultParams.js b/modules/defaultParams.js new file mode 100644 index 0000000..a8afe53 --- /dev/null +++ b/modules/defaultParams.js @@ -0,0 +1,16 @@ +"use strict"; + +export const DEFAULT_AI_PARAMS = { + max_tokens: 4096, + temperature: 0.6, + top_p: 0.95, + seed: -1, + repetition_penalty: 1.0, + top_k: 20, + min_p: 0, + presence_penalty: 0, + frequency_penalty: 0, + typical_p: 1, + tfs: 1, +}; + diff --git a/options/options.js b/options/options.js index 5132807..b465998 100644 --- a/options/options.js +++ b/options/options.js @@ -4,6 +4,7 @@ document.addEventListener('DOMContentLoaded', async () => { const AiClassifier = await import(browser.runtime.getURL('modules/AiClassifier.js')); const dataTransfer = await import(browser.runtime.getURL('options/dataTransfer.js')); const { detectSystemTheme } = await import(browser.runtime.getURL('modules/themeUtils.js')); + const { DEFAULT_AI_PARAMS } = await import(browser.runtime.getURL('modules/defaultParams.js')); const defaults = await storage.local.get([ 'endpoint', 'templateName', @@ -66,19 +67,6 @@ document.addEventListener('DOMContentLoaded', async () => { markDirty(); await applyTheme(themeSelect.value); }); - const DEFAULT_AI_PARAMS = { - max_tokens: 4096, - temperature: 0.6, - top_p: 0.95, - seed: -1, - repetition_penalty: 1.0, - top_k: 20, - min_p: 0, - presence_penalty: 0, - frequency_penalty: 0, - typical_p: 1, - tfs: 1 - }; document.getElementById('endpoint').value = defaults.endpoint || 'http://127.0.0.1:5000/v1/completions'; const templates = { From f3e20f8941da5b8e343c6b2d69ff898cc3984ea2 Mon Sep 17 00:00:00 2001 From: Jordan Wages Date: Tue, 8 Jul 2025 17:53:35 -0500 Subject: [PATCH 02/50] Remove unused sync classifier API --- modules/AiClassifier.js | 72 +++-------------------------------------- 1 file changed, 4 insertions(+), 68 deletions(-) diff --git a/modules/AiClassifier.js b/modules/AiClassifier.js index 7e757dd..b35cb2c 100644 --- a/modules/AiClassifier.js +++ b/modules/AiClassifier.js @@ -61,10 +61,6 @@ async function sha256Hex(str) { return sha256HexSync(str); } -function buildCacheKeySync(id, criterion) { - return sha256HexSync(`${id}|${criterion}`); -} - async function resolveHeaderId(id) { if (typeof id === "number" && typeof messenger?.messages?.get === "function") { try { @@ -82,7 +78,7 @@ async function resolveHeaderId(id) { async function buildCacheKey(id, criterion) { const resolvedId = await resolveHeaderId(id); if (Services) { - return buildCacheKeySync(resolvedId, criterion); + return sha256HexSync(`${resolvedId}|${criterion}`); } return sha256Hex(`${resolvedId}|${criterion}`); } @@ -122,16 +118,6 @@ async function loadCache() { gCacheLoaded = true; } -function loadCacheSync() { - if (!gCacheLoaded) { - if (!Services?.tm?.spinEventLoopUntil) { - throw new Error("loadCacheSync requires Services"); - } - let done = false; - loadCache().finally(() => { done = true; }); - Services.tm.spinEventLoopUntil(() => done); - } -} async function saveCache(updatedKey, updatedValue) { if (typeof updatedKey !== "undefined") { @@ -222,11 +208,7 @@ function buildPrompt(body, criterion) { function getCachedResult(cacheKey) { if (!gCacheLoaded) { - if (Services?.tm?.spinEventLoopUntil) { - loadCacheSync(); - } else { - return null; - } + return null; } if (cacheKey && gCache.has(cacheKey)) { aiLog(`[AiClassifier] Cache hit for key: ${cacheKey}`, {debug: true}); @@ -238,11 +220,7 @@ function getCachedResult(cacheKey) { function getReason(cacheKey) { if (!gCacheLoaded) { - if (Services?.tm?.spinEventLoopUntil) { - loadCacheSync(); - } else { - return null; - } + return null; } const entry = gCache.get(cacheKey); return cacheKey && entry ? entry.reason || null : null; @@ -319,48 +297,6 @@ async function getCacheSize() { return gCache.size; } -function classifyTextSync(text, criterion, cacheKey = null) { - if (!Services?.tm?.spinEventLoopUntil) { - throw new Error("classifyTextSync requires Services"); - } - const cached = getCachedResult(cacheKey); - if (cached !== null) { - return cached; - } - - const payload = buildPayload(text, criterion); - - aiLog(`[AiClassifier] Sending classification request to ${gEndpoint}`, {debug: true}); - - let result; - let done = false; - (async () => { - try { - const response = await fetch(gEndpoint, { - method: "POST", - headers: { "Content-Type": "application/json" }, - body: payload, - }); - if (response.ok) { - const json = await response.json(); - aiLog(`[AiClassifier] Received response:`, {debug: true}, json); - result = parseMatch(json); - cacheEntry(cacheKey, result.matched, result.reason); - result = result.matched; - } else { - aiLog(`HTTP status ${response.status}`, {level: 'warn'}); - result = false; - } - } catch (e) { - aiLog(`HTTP request failed`, {level: 'error'}, e); - result = false; - } finally { - done = true; - } - })(); - Services.tm.spinEventLoopUntil(() => done); - return result; -} async function classifyText(text, criterion, cacheKey = null) { if (!gCacheLoaded) { @@ -403,4 +339,4 @@ async function init() { await loadCache(); } -export { classifyText, classifyTextSync, setConfig, removeCacheEntries, clearCache, getReason, getCachedResult, buildCacheKey, buildCacheKeySync, getCacheSize, init }; +export { classifyText, setConfig, removeCacheEntries, clearCache, getReason, getCachedResult, buildCacheKey, getCacheSize, init }; From 1b7fa3d5eeac2379287e38082ad6bd72453025c3 Mon Sep 17 00:00:00 2001 From: Jordan Wages Date: Tue, 8 Jul 2025 19:45:57 -0500 Subject: [PATCH 03/50] Refactor message processing --- background.js | 133 +++++++++++++++++++++++++------------------------- 1 file changed, 67 insertions(+), 66 deletions(-) diff --git a/background.js b/background.js index 5d4de8d..e8741dd 100644 --- a/background.js +++ b/background.js @@ -169,10 +169,75 @@ function buildEmailText(full) { const headers = Object.entries(full.headers || {}) .map(([k, v]) => `${k}: ${v.join(' ')}`) .join('\n'); - const attachInfo = `Attachments: ${attachments.length}` + (attachments.length ? "\n" + attachments.map(a => ` - ${a}`).join('\n') : ""); + const attachInfo = `Attachments: ${attachments.length}` + + (attachments.length ? "\n" + attachments.map(a => ` - ${a}`).join('\n') : ""); const combined = `${headers}\n${attachInfo}\n\n${bodyParts.join('\n')}`.trim(); return sanitizeString(combined); } + +function updateTimingStats(elapsed) { + const t = timingStats; + t.count += 1; + t.total += elapsed; + t.last = elapsed; + const delta = elapsed - t.mean; + t.mean += delta / t.count; + t.m2 += delta * (elapsed - t.mean); +} + +async function processMessage(id) { + processing = true; + currentStart = Date.now(); + queuedCount--; + updateActionIcon(); + try { + const full = await messenger.messages.getFull(id); + const text = buildEmailText(full); + let currentTags = []; + try { + const hdr = await messenger.messages.get(id); + currentTags = Array.isArray(hdr.tags) ? [...hdr.tags] : []; + } catch (e) { + currentTags = []; + } + + for (const rule of aiRules) { + const cacheKey = await AiClassifier.buildCacheKey(id, rule.criterion); + const matched = await AiClassifier.classifyText(text, rule.criterion, cacheKey); + if (matched) { + for (const act of (rule.actions || [])) { + if (act.type === 'tag' && act.tagKey) { + if (!currentTags.includes(act.tagKey)) { + currentTags.push(act.tagKey); + await messenger.messages.update(id, { tags: currentTags }); + } + } else if (act.type === 'move' && act.folder) { + await messenger.messages.move([id], act.folder); + } else if (act.type === 'junk') { + await messenger.messages.update(id, { junk: !!act.junk }); + } + } + if (rule.stopProcessing) { + break; + } + } + } + processing = false; + const elapsed = Date.now() - currentStart; + currentStart = 0; + updateTimingStats(elapsed); + await storage.local.set({ classifyStats: timingStats }); + showTransientIcon(ICONS.circle); + } catch (e) { + processing = false; + const elapsed = Date.now() - currentStart; + currentStart = 0; + updateTimingStats(elapsed); + await storage.local.set({ classifyStats: timingStats }); + logger.aiLog("failed to apply AI rules", { level: 'error' }, e); + showTransientIcon(ICONS.average); + } +} async function applyAiRules(idsInput) { const ids = Array.isArray(idsInput) ? idsInput : [idsInput]; if (!ids.length) return queue; @@ -186,71 +251,7 @@ async function applyAiRules(idsInput) { const id = msg?.id ?? msg; queuedCount++; updateActionIcon(); - queue = queue.then(async () => { - processing = true; - currentStart = Date.now(); - queuedCount--; - updateActionIcon(); - try { - const full = await messenger.messages.getFull(id); - const text = buildEmailText(full); - let currentTags = []; - try { - const hdr = await messenger.messages.get(id); - currentTags = Array.isArray(hdr.tags) ? [...hdr.tags] : []; - } catch (e) { - currentTags = []; - } - - for (const rule of aiRules) { - const cacheKey = await AiClassifier.buildCacheKey(id, rule.criterion); - const matched = await AiClassifier.classifyText(text, rule.criterion, cacheKey); - if (matched) { - for (const act of (rule.actions || [])) { - if (act.type === 'tag' && act.tagKey) { - if (!currentTags.includes(act.tagKey)) { - currentTags.push(act.tagKey); - await messenger.messages.update(id, { tags: currentTags }); - } - } else if (act.type === 'move' && act.folder) { - await messenger.messages.move([id], act.folder); - } else if (act.type === 'junk') { - await messenger.messages.update(id, { junk: !!act.junk }); - } - } - if (rule.stopProcessing) { - break; - } - } - } - processing = false; - const elapsed = Date.now() - currentStart; - currentStart = 0; - const t = timingStats; - t.count += 1; - t.total += elapsed; - t.last = elapsed; - const delta = elapsed - t.mean; - t.mean += delta / t.count; - t.m2 += delta * (elapsed - t.mean); - await storage.local.set({ classifyStats: t }); - showTransientIcon(ICONS.circle); - } catch (e) { - processing = false; - const elapsed = Date.now() - currentStart; - currentStart = 0; - const t = timingStats; - t.count += 1; - t.total += elapsed; - t.last = elapsed; - const delta = elapsed - t.mean; - t.mean += delta / t.count; - t.m2 += delta * (elapsed - t.mean); - await storage.local.set({ classifyStats: t }); - logger.aiLog("failed to apply AI rules", { level: 'error' }, e); - showTransientIcon(ICONS.average); - } - }); + queue = queue.then(() => processMessage(id)); } return queue; From 044e7df07df09efbd56f6461543eabf275f67ab7 Mon Sep 17 00:00:00 2001 From: Jordan Wages Date: Tue, 8 Jul 2025 21:59:25 -0500 Subject: [PATCH 04/50] Update documentation --- AGENTS.md | 9 +++++---- README.md | 6 +++--- 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index c51f40c..9b94461 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -5,8 +5,9 @@ This file provides guidelines for codex agents contributing to the Sortana proje ## Repository Overview - `background.js`: Handles startup tasks and coordinates message passing within the extension. -- `modules/`: Contains reusable JavaScript modules such as `AiClassifier.js`. -- `options/`: The options page HTML, JavaScript and Bulma CSS. +- `modules/`: Contains reusable JavaScript modules such as `AiClassifier.js`, + `defaultParams.js` and `themeUtils.js`. +- `options/`: The options page HTML, JavaScript and bundled Bulma CSS (v1.0.3). - `details.html` and `details.js`: View AI reasoning and clear cache for a message. - `resources/`: Images and other static files. - `prompt_templates/`: Prompt template files for the AI service. @@ -41,7 +42,7 @@ Additional documentation exists outside this repository. - Thunderbird Add-on Store Policies - [Third Party Library Usage](https://extensionworkshop.com/documentation/publish/third-party-library-usage/) - Third Party Libraries - - [Bulma.css](https://github.com/jgthms/bulma) + - [Bulma.css v1.0.3](https://github.com/jgthms/bulma/blob/1.0.3/css/bulma.css) - Issue tracker: [Thunderbird tracker on Bugzilla](https://bugzilla.mozilla.org/describecomponents.cgi?product=Thunderbird) @@ -71,5 +72,5 @@ time the add-on loads after an update. Toolbar and menu icons reside under `resources/img` and are provided in 16, 32 and 64 pixel variants. When changing these icons, pass a dictionary mapping the sizes to the paths in `browserAction.setIcon` or `messageDisplayAction.setIcon`. -Use `resources/svg/svg2img.ps1` to regenerate PNGs from the SVG sources. +Use `resources/svg2img.ps1` to regenerate PNGs from the SVG sources. diff --git a/README.md b/README.md index 4447c9b..3393a6b 100644 --- a/README.md +++ b/README.md @@ -59,12 +59,12 @@ Sortana is implemented entirely with standard WebExtension scripts—no custom e 1. Ensure PowerShell is available (for Windows) or adapt the script for other environments. -2. Ensure the Bulma stylesheet (v1.0.4) is saved as `options/bulma.css`. You can - download it from . +2. The Bulma stylesheet (v1.0.3) is already included as `options/bulma.css`. 3. Run `powershell ./build-xpi.ps1` from the repository root. The script reads the version from `manifest.json` and creates an XPI in the `release` folder. 4. Install the generated XPI in Thunderbird via the Add-ons Manager. During development you can also load the directory as a temporary add-on. +5. To regenerate PNG icons from the SVG sources, run `resources/svg2img.ps1`. ## Usage @@ -124,7 +124,7 @@ requires disclosure of third party libraries that are included in the add-on. Ev the disclosure is only required for add-on review, they'll be listed here as well. Sortana uses the following third party libraries: -- [Bulma.css v1.0.4](https://github.com/jgthms/bulma/blob/1.0.4/css/bulma.css) +- [Bulma.css v1.0.3](https://github.com/jgthms/bulma/blob/1.0.3/css/bulma.css) - MIT License - [turndown v7.2.0](https://github.com/mixmark-io/turndown/tree/v7.2.0) - MIT License From 52583cebc16510cbba58758e45f08f69f0bf6429 Mon Sep 17 00:00:00 2001 From: Jordan Wages Date: Wed, 9 Jul 2025 00:38:56 -0500 Subject: [PATCH 05/50] Update README.md --- README.md | 1 - 1 file changed, 1 deletion(-) diff --git a/README.md b/README.md index 3393a6b..2244d49 100644 --- a/README.md +++ b/README.md @@ -115,7 +115,6 @@ Sortana requests the following Thunderbird permissions: - `accountsRead` – list accounts and folders for move actions. - `menus` – add context menu commands. - `tabs` – open new tabs and query the active tab. -- Host permissions (`*://*/*`) – allow network requests to your configured classification service. ## Thunderbird Add-on Store Disclosures From ea8888f05764c343dd3303b4c37e652ad7cc965f Mon Sep 17 00:00:00 2001 From: Jordan Wages Date: Wed, 9 Jul 2025 03:24:33 -0500 Subject: [PATCH 06/50] Update ai-filter.sln --- ai-filter.sln | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/ai-filter.sln b/ai-filter.sln index f41f23f..57705eb 100644 --- a/ai-filter.sln +++ b/ai-filter.sln @@ -56,9 +56,15 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "img", "img", "{F266602F-175 resources\img\average-16.png = resources\img\average-16.png resources\img\average-32.png = resources\img\average-32.png resources\img\average-64.png = resources\img\average-64.png + resources\img\check-16.png = resources\img\check-16.png + resources\img\check-32.png = resources\img\check-32.png + resources\img\check-64.png = resources\img\check-64.png resources\img\circle-16.png = resources\img\circle-16.png resources\img\circle-32.png = resources\img\circle-32.png resources\img\circle-64.png = resources\img\circle-64.png + resources\img\circledots-16.png = resources\img\circledots-16.png + resources\img\circledots-32.png = resources\img\circledots-32.png + resources\img\circledots-64.png = resources\img\circledots-64.png resources\img\clipboarddata-16.png = resources\img\clipboarddata-16.png resources\img\clipboarddata-32.png = resources\img\clipboarddata-32.png resources\img\clipboarddata-64.png = resources\img\clipboarddata-64.png @@ -95,6 +101,9 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "img", "img", "{F266602F-175 resources\img\upload-16.png = resources\img\upload-16.png resources\img\upload-32.png = resources\img\upload-32.png resources\img\upload-64.png = resources\img\upload-64.png + resources\img\x-16.png = resources\img\x-16.png + resources\img\x-32.png = resources\img\x-32.png + resources\img\x-64.png = resources\img\x-64.png EndProjectSection EndProject Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "js", "js", "{21D2A42C-3F85-465C-9141-C106AFD92B68}" @@ -102,6 +111,24 @@ Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "js", "js", "{21D2A42C-3F85- resources\js\turndown.js = resources\js\turndown.js EndProjectSection EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "svg", "svg", "{D4E9C905-4884-488E-B763-5BD39049C1B1}" + ProjectSection(SolutionItems) = preProject + resources\svg\average.svg = resources\svg\average.svg + resources\svg\check.svg = resources\svg\check.svg + resources\svg\circle.svg = resources\svg\circle.svg + resources\svg\circledots.svg = resources\svg\circledots.svg + resources\svg\clipboarddata.svg = resources\svg\clipboarddata.svg + resources\svg\download.svg = resources\svg\download.svg + resources\svg\eye.svg = resources\svg\eye.svg + resources\svg\flag.svg = resources\svg\flag.svg + resources\svg\gear.svg = resources\svg\gear.svg + resources\svg\reply.svg = resources\svg\reply.svg + resources\svg\settings.svg = resources\svg\settings.svg + resources\svg\trash.svg = resources\svg\trash.svg + resources\svg\upload.svg = resources\svg\upload.svg + resources\svg\x.svg = resources\svg\x.svg + EndProjectSection +EndProject Global GlobalSection(SolutionProperties) = preSolution HideSolutionNode = FALSE @@ -115,5 +142,6 @@ Global {68A87938-5C2B-49F5-8AAA-8A34FBBFD854} = {BCC6E6D2-343B-4C48-854D-5FE3BBC3CB70} {F266602F-1755-4A95-A11B-6C90C701C5BF} = {68A87938-5C2B-49F5-8AAA-8A34FBBFD854} {21D2A42C-3F85-465C-9141-C106AFD92B68} = {68A87938-5C2B-49F5-8AAA-8A34FBBFD854} + {D4E9C905-4884-488E-B763-5BD39049C1B1} = {68A87938-5C2B-49F5-8AAA-8A34FBBFD854} EndGlobalSection EndGlobal From 3c87950dfb7d072a7f0c98ae2b2d71d1a329b45f Mon Sep 17 00:00:00 2001 From: Jordan Wages Date: Tue, 15 Jul 2025 20:57:57 -0500 Subject: [PATCH 07/50] Update maintenance stats --- options/options.html | 2 ++ options/options.js | 18 +++++++++++++++--- 2 files changed, 17 insertions(+), 3 deletions(-) diff --git a/options/options.html b/options/options.html index 58cfe37..57f407c 100644 --- a/options/options.html +++ b/options/options.html @@ -236,6 +236,8 @@ Last run time--:--:-- Average run time--:--:-- Total run time--:--:-- + Messages per hour0 + Messages per day0 + + +

+ +
diff --git a/options/options.js b/options/options.js index 860c944..5e46857 100644 --- a/options/options.js +++ b/options/options.js @@ -10,6 +10,7 @@ document.addEventListener('DOMContentLoaded', async () => { 'templateName', 'customTemplate', 'customSystemPrompt', + 'model', 'aiParams', 'debugLogging', 'htmlToMarkdown', @@ -100,6 +101,88 @@ document.addEventListener('DOMContentLoaded', async () => { endpointInput.addEventListener('input', updateEndpointPreview); updateEndpointPreview(); + const modelSelect = document.getElementById('model-select'); + const refreshModelsBtn = document.getElementById('refresh-models'); + const modelHelp = document.getElementById('model-help'); + const storedModel = typeof defaults.model === 'string' ? defaults.model : ''; + + function setModelHelp(message = '', isError = false) { + if (!modelHelp) return; + modelHelp.textContent = message; + modelHelp.classList.toggle('is-danger', isError); + } + + function populateModelOptions(models = [], selectedModel = '') { + if (!modelSelect) return; + const modelIds = Array.isArray(models) ? models.filter(Boolean) : []; + modelSelect.innerHTML = ''; + + const noneOpt = document.createElement('option'); + noneOpt.value = ''; + noneOpt.textContent = 'None (omit model)'; + modelSelect.appendChild(noneOpt); + + if (selectedModel && !modelIds.includes(selectedModel)) { + const storedOpt = document.createElement('option'); + storedOpt.value = selectedModel; + storedOpt.textContent = `Stored: ${selectedModel}`; + modelSelect.appendChild(storedOpt); + } + + for (const id of modelIds) { + const opt = document.createElement('option'); + opt.value = id; + opt.textContent = id; + modelSelect.appendChild(opt); + } + + const hasSelected = [...modelSelect.options].some(opt => opt.value === selectedModel); + modelSelect.value = hasSelected ? selectedModel : ''; + } + + async function fetchModels(preferredModel = '') { + if (!modelSelect || !refreshModelsBtn) return; + const modelsUrl = AiClassifier.buildModelsUrl(endpointInput.value); + if (!modelsUrl) { + setModelHelp('Set a valid endpoint to load models.', true); + populateModelOptions([], preferredModel || modelSelect.value); + return; + } + + refreshModelsBtn.disabled = true; + setModelHelp('Loading models...'); + + try { + const response = await fetch(modelsUrl, { method: 'GET' }); + if (!response.ok) { + throw new Error(`HTTP ${response.status}`); + } + const data = await response.json(); + let models = []; + if (Array.isArray(data?.data)) { + models = data.data.map(model => model?.id ?? model?.name ?? model?.model ?? '').filter(Boolean); + } else if (Array.isArray(data?.models)) { + models = data.models.map(model => model?.id ?? model?.name ?? model?.model ?? '').filter(Boolean); + } else if (Array.isArray(data)) { + models = data.map(model => model?.id ?? model?.name ?? model?.model ?? model).filter(Boolean); + } + models = [...new Set(models)]; + populateModelOptions(models, preferredModel || modelSelect.value); + setModelHelp(models.length ? `Loaded ${models.length} model${models.length === 1 ? '' : 's'}.` : 'No models returned.'); + } catch (e) { + logger.aiLog('[options] failed to load models', { level: 'warn' }, e); + setModelHelp('Failed to load models. Check the endpoint and network.', true); + populateModelOptions([], preferredModel || modelSelect.value); + } finally { + refreshModelsBtn.disabled = false; + } + } + + populateModelOptions([], storedModel); + refreshModelsBtn?.addEventListener('click', () => { + fetchModels(modelSelect.value); + }); + const templates = { openai: browser.i18n.getMessage('template.openai'), qwen: browser.i18n.getMessage('template.qwen'), @@ -276,6 +359,7 @@ document.addEventListener('DOMContentLoaded', async () => { await loadErrors(); updateDiffDisplay(); + await fetchModels(storedModel); [htmlToggle, stripUrlToggle, altTextToggle, collapseWhitespaceToggle, tokenReductionToggle].forEach(toggle => { toggle.addEventListener('change', () => { @@ -914,6 +998,7 @@ document.addEventListener('DOMContentLoaded', async () => { document.getElementById('save').addEventListener('click', async () => { const endpoint = endpointInput.value.trim(); + const model = modelSelect?.value || ''; const templateName = templateSelect.value; const customTemplateText = customTemplate.value; const customSystemPrompt = systemBox.value; @@ -979,10 +1064,10 @@ document.addEventListener('DOMContentLoaded', async () => { const tokenReduction = tokenReductionToggle.checked; const showDebugTab = debugTabToggle.checked; const theme = themeSelect.value; - await storage.local.set({ endpoint, templateName, customTemplate: customTemplateText, customSystemPrompt, aiParams: aiParamsSave, debugLogging, htmlToMarkdown, stripUrlParams, altTextImages, collapseWhitespace, tokenReduction, aiRules: rules, theme, showDebugTab }); + await storage.local.set({ endpoint, model, templateName, customTemplate: customTemplateText, customSystemPrompt, aiParams: aiParamsSave, debugLogging, htmlToMarkdown, stripUrlParams, altTextImages, collapseWhitespace, tokenReduction, aiRules: rules, theme, showDebugTab }); await applyTheme(theme); try { - await AiClassifier.setConfig({ endpoint, templateName, customTemplate: customTemplateText, customSystemPrompt, aiParams: aiParamsSave, debugLogging }); + await AiClassifier.setConfig({ endpoint, model, templateName, customTemplate: customTemplateText, customSystemPrompt, aiParams: aiParamsSave, debugLogging }); logger.setDebug(debugLogging); } catch (e) { logger.aiLog('[options] failed to apply config', {level: 'error'}, e); From 1680ad6c3085a11a36dcb138103f1787367d64d1 Mon Sep 17 00:00:00 2001 From: wagesj45 Date: Fri, 30 Jan 2026 02:54:19 -0600 Subject: [PATCH 49/50] Add optional OpenAI auth headers --- AGENTS.md | 1 + README.md | 3 +++ background.js | 7 ++++-- manifest.json | 2 +- modules/AiClassifier.js | 28 ++++++++++++++++++++++- options/dataTransfer.js | 3 +++ options/options.html | 26 ++++++++++++++++++++++ options/options.js | 49 ++++++++++++++++++++++++++++++++++++++--- 8 files changed, 112 insertions(+), 7 deletions(-) diff --git a/AGENTS.md b/AGENTS.md index f2ea2d9..aece578 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -35,6 +35,7 @@ There are currently no automated tests for this project. If you add tests in the Sortana targets the `/v1/completions` API. The endpoint value stored in settings is a base URL; the full request URL is constructed by appending `/v1/completions` (adding a slash when needed) and defaulting to `https://` if no scheme is provided. The options page can query `/v1/models` from the same base URL to populate the Model dropdown; selecting **None** omits the `model` field from the request payload. +Advanced options allow an optional API key plus `OpenAI-Organization` and `OpenAI-Project` headers; these headers are only sent when values are provided. Responses are expected to include a JSON object with `match` (or `matched`) plus a short `reason` string; the parser extracts the last JSON object in the response text and ignores any surrounding commentary. ## Documentation diff --git a/README.md b/README.md index 957e5e0..5b908ea 100644 --- a/README.md +++ b/README.md @@ -15,6 +15,7 @@ expecting a `match` (or `matched`) boolean plus a `reason` string. - **Configurable endpoint** – set the classification service base URL on the options page. - **Model selection** – load available models from the endpoint and choose one (or omit the model field). +- **Optional OpenAI auth headers** – provide an API key plus optional organization/project headers when needed. - **Prompt templates** – choose between OpenAI/ChatML, Qwen, Mistral, Harmony (gpt-oss), or provide your own custom template. - **Custom system prompts** – tailor the instructions sent to the model for more precise results. - **Persistent result caching** – classification results and reasoning are saved to disk so messages aren't re-evaluated across restarts. @@ -82,6 +83,8 @@ Sortana is implemented entirely with standard WebExtension scripts—no custom e 1. Open the add-on's options and set the base URL of your classification service (Sortana will append `/v1/completions`). Use the Model dropdown to load `/v1/models` and select a model or choose **None** to omit the `model` field. + Advanced settings include optional API key, organization, and project headers + for OpenAI-hosted endpoints. 2. Use the **Classification Rules** section to add a criterion and optional actions such as tagging, moving, copying, forwarding, replying, deleting or archiving a message when it matches. Drag rules to diff --git a/background.js b/background.js index aef8cbb..827dec8 100644 --- a/background.js +++ b/background.js @@ -484,7 +484,7 @@ async function clearCacheForMessages(idsInput) { } try { - const store = await storage.local.get(["endpoint", "model", "templateName", "customTemplate", "customSystemPrompt", "aiParams", "debugLogging", "htmlToMarkdown", "stripUrlParams", "altTextImages", "collapseWhitespace", "tokenReduction", "aiRules", "theme", "showDebugTab"]); + const store = await storage.local.get(["endpoint", "model", "apiKey", "openaiOrganization", "openaiProject", "templateName", "customTemplate", "customSystemPrompt", "aiParams", "debugLogging", "htmlToMarkdown", "stripUrlParams", "altTextImages", "collapseWhitespace", "tokenReduction", "aiRules", "theme", "showDebugTab"]); logger.setDebug(store.debugLogging); await AiClassifier.setConfig(store); userTheme = store.theme || 'auto'; @@ -514,10 +514,13 @@ async function clearCacheForMessages(idsInput) { aiRules = normalizeRules(newRules); logger.aiLog("aiRules updated from storage change", { debug: true }, aiRules); } - if (changes.endpoint || changes.model || changes.templateName || changes.customTemplate || changes.customSystemPrompt || changes.aiParams || changes.debugLogging) { + if (changes.endpoint || changes.model || changes.apiKey || changes.openaiOrganization || changes.openaiProject || changes.templateName || changes.customTemplate || changes.customSystemPrompt || changes.aiParams || changes.debugLogging) { const config = {}; if (changes.endpoint) config.endpoint = changes.endpoint.newValue; if (changes.model) config.model = changes.model.newValue; + if (changes.apiKey) config.apiKey = changes.apiKey.newValue; + if (changes.openaiOrganization) config.openaiOrganization = changes.openaiOrganization.newValue; + if (changes.openaiProject) config.openaiProject = changes.openaiProject.newValue; if (changes.templateName) config.templateName = changes.templateName.newValue; if (changes.customTemplate) config.customTemplate = changes.customTemplate.newValue; if (changes.customSystemPrompt) config.customSystemPrompt = changes.customSystemPrompt.newValue; diff --git a/manifest.json b/manifest.json index a18e7cd..81baae5 100644 --- a/manifest.json +++ b/manifest.json @@ -1,7 +1,7 @@ { "manifest_version": 2, "name": "Sortana", - "version": "2.4.0", + "version": "2.4.1", "default_locale": "en-US", "applications": { "gecko": { diff --git a/modules/AiClassifier.js b/modules/AiClassifier.js index cb68382..b4c0907 100644 --- a/modules/AiClassifier.js +++ b/modules/AiClassifier.js @@ -40,6 +40,9 @@ let gTemplateText = ""; let gAiParams = Object.assign({}, DEFAULT_AI_PARAMS); let gModel = ""; +let gApiKey = ""; +let gOpenaiOrganization = ""; +let gOpenaiProject = ""; let gCache = new Map(); let gCacheLoaded = false; @@ -223,6 +226,15 @@ async function setConfig(config = {}) { if (typeof config.model === "string") { gModel = config.model.trim(); } + if (typeof config.apiKey === "string") { + gApiKey = config.apiKey.trim(); + } + if (typeof config.openaiOrganization === "string") { + gOpenaiOrganization = config.openaiOrganization.trim(); + } + if (typeof config.openaiProject === "string") { + gOpenaiProject = config.openaiProject.trim(); + } if (typeof config.debugLogging === "boolean") { setDebug(config.debugLogging); } @@ -241,6 +253,20 @@ async function setConfig(config = {}) { aiLog(`[AiClassifier] Template set to ${gTemplateName}`, {debug: true}); } +function buildAuthHeaders() { + const headers = {}; + if (gApiKey) { + headers.Authorization = `Bearer ${gApiKey}`; + } + if (gOpenaiOrganization) { + headers["OpenAI-Organization"] = gOpenaiOrganization; + } + if (gOpenaiProject) { + headers["OpenAI-Project"] = gOpenaiProject; + } + return headers; +} + function buildSystemPrompt() { return SYSTEM_PREFIX + (gCustomSystemPrompt || DEFAULT_CUSTOM_SYSTEM_PROMPT) + SYSTEM_SUFFIX; } @@ -453,7 +479,7 @@ async function classifyText(text, criterion, cacheKey = null) { try { const response = await fetch(gEndpoint, { method: "POST", - headers: { "Content-Type": "application/json" }, + headers: { "Content-Type": "application/json", ...buildAuthHeaders() }, body: payload, }); diff --git a/options/dataTransfer.js b/options/dataTransfer.js index fdf096f..393b533 100644 --- a/options/dataTransfer.js +++ b/options/dataTransfer.js @@ -4,6 +4,9 @@ const KEY_GROUPS = { settings: [ 'endpoint', 'model', + 'apiKey', + 'openaiOrganization', + 'openaiProject', 'templateName', 'customTemplate', 'customSystemPrompt', diff --git a/options/options.html b/options/options.html index 4c1fd79..2a1431e 100644 --- a/options/options.html +++ b/options/options.html @@ -141,6 +141,32 @@