diff --git a/background.js b/background.js index 7ae1e8a..c28a265 100644 --- a/background.js +++ b/background.js @@ -35,7 +35,7 @@ let logger; })(); // Listen for messages from UI/devtools -browser.runtime.onMessage.addListener((msg) => { +browser.runtime.onMessage.addListener(async (msg) => { logger.aiLog("onMessage received", {debug: true}, msg); if (msg?.type === "aiFilter:test") { @@ -45,7 +45,7 @@ browser.runtime.onMessage.addListener((msg) => { try { logger.aiLog("Calling browser.aiFilter.classify()", {debug: true}); - const result = browser.aiFilter.classify(text, criterion); + const result = await browser.aiFilter.classify(text, criterion); logger.aiLog("classify() returned", {debug: true}, result); return { match: result }; } diff --git a/experiment/api.js b/experiment/api.js index c225779..55a0b8b 100644 --- a/experiment/api.js +++ b/experiment/api.js @@ -70,10 +70,10 @@ var aiFilter = class extends ExtensionCommon.ExtensionAPI { aiLog("[api] failed to apply config", {level: 'error'}, err); } }, - classify: (text, criterion) => { + classify: async (text, criterion) => { aiLog("[api] classify() called", {debug: true}, text, criterion); try { - return AiClassifier.classifyTextSync(text, criterion); + return await AiClassifier.classifyText(text, criterion); } catch (err) { aiLog("[api] error in classify()", {level: 'error'}, err); diff --git a/modules/AiClassifier.jsm b/modules/AiClassifier.jsm index eb399c4..204bf7a 100644 --- a/modules/AiClassifier.jsm +++ b/modules/AiClassifier.jsm @@ -158,17 +158,47 @@ function buildPrompt(body, criterion) { return template.replace(/{{\s*(\w+)\s*}}/g, (m, key) => data[key] || ""); } -function classifyTextSync(text, criterion, cacheKey = null) { +function getCachedResult(cacheKey) { loadCache(); if (cacheKey && gCache.has(cacheKey)) { aiLog(`[AiClassifier] Cache hit for key: ${cacheKey}`, {debug: true}); return gCache.get(cacheKey); } + return null; +} +function buildPayload(text, criterion) { let payloadObj = Object.assign({ prompt: buildPrompt(text, criterion) }, gAiParams); - let payload = JSON.stringify(payloadObj); + return JSON.stringify(payloadObj); +} + +function parseMatch(result) { + const rawText = result.choices?.[0]?.text || ""; + const thinkText = rawText.match(/[\s\S]*?<\/think>/gi)?.join('') || ''; + aiLog('[AiClassifier] ⮡ Reasoning:', {debug: true}, thinkText); + const cleanedText = rawText.replace(/[\s\S]*?<\/think>/gi, "").trim(); + aiLog('[AiClassifier] ⮡ Cleaned Response Text:', {debug: true}, cleanedText); + const obj = JSON.parse(cleanedText); + return obj.matched === true || obj.match === true; +} + +function cacheResult(cacheKey, matched) { + if (cacheKey) { + aiLog(`[AiClassifier] Caching entry '${cacheKey}' → ${matched}`, {debug: true}); + gCache.set(cacheKey, matched); + saveCache(cacheKey, matched); + } +} + +function classifyTextSync(text, criterion, cacheKey = null) { + const cached = getCachedResult(cacheKey); + if (cached !== null) { + return cached; + } + + const payload = buildPayload(text, criterion); aiLog(`[AiClassifier] Sending classification request to ${gEndpoint}`, {debug: true}); @@ -179,23 +209,13 @@ function classifyTextSync(text, criterion, cacheKey = null) { xhr.setRequestHeader("Content-Type", "application/json"); xhr.send(payload); - if (xhr.status < 200 || xhr.status >= 300) { - aiLog(`HTTP status ${xhr.status}`, {level: 'warn'}); - } else { + if (xhr.status >= 200 && xhr.status < 300) { const result = JSON.parse(xhr.responseText); aiLog(`[AiClassifier] Received response:`, {debug: true}, result); - const rawText = result.choices?.[0]?.text || ""; - const thinkText = rawText.match(/[\s\S]*?<\/think>/gi)?.join('') || ''; - aiLog('[AiClassifier] ⮡ Reasoning:', {debug: true}, thinkText); - const cleanedText = rawText.replace(/[\s\S]*?<\/think>/gi, "").trim(); - aiLog('[AiClassifier] ⮡ Cleaned Response Text:', {debug: true}, cleanedText); - const obj = JSON.parse(cleanedText); - matched = obj.matched === true || obj.match === true; - if (cacheKey) { - aiLog(`[AiClassifier] Caching entry '${cacheKey}' → ${matched}`, {debug: true}); - gCache.set(cacheKey, matched); - saveCache(cacheKey, matched); - } + matched = parseMatch(result); + cacheResult(cacheKey, matched); + } else { + aiLog(`HTTP status ${xhr.status}`, {level: 'warn'}); } } catch (e) { aiLog(`HTTP request failed`, {level: 'error'}, e); @@ -205,7 +225,36 @@ function classifyTextSync(text, criterion, cacheKey = null) { } async function classifyText(text, criterion, cacheKey = null) { - return classifyTextSync(text, criterion, cacheKey); + const cached = getCachedResult(cacheKey); + if (cached !== null) { + return cached; + } + + const payload = buildPayload(text, criterion); + + aiLog(`[AiClassifier] Sending classification request to ${gEndpoint}`, {debug: true}); + + try { + const response = await fetch(gEndpoint, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: payload, + }); + + if (!response.ok) { + aiLog(`HTTP status ${response.status}`, {level: 'warn'}); + return false; + } + + const result = await response.json(); + aiLog(`[AiClassifier] Received response:`, {debug: true}, result); + const matched = parseMatch(result); + cacheResult(cacheKey, matched); + return matched; + } catch (e) { + aiLog(`HTTP request failed`, {level: 'error'}, e); + return false; + } } var AiClassifier = { classifyText, classifyTextSync, setConfig };