Refactor classifier to share code
This commit is contained in:
parent
032093d8b1
commit
312de63b2e
3 changed files with 71 additions and 22 deletions
|
@ -35,7 +35,7 @@ let logger;
|
||||||
})();
|
})();
|
||||||
|
|
||||||
// Listen for messages from UI/devtools
|
// Listen for messages from UI/devtools
|
||||||
browser.runtime.onMessage.addListener((msg) => {
|
browser.runtime.onMessage.addListener(async (msg) => {
|
||||||
logger.aiLog("onMessage received", {debug: true}, msg);
|
logger.aiLog("onMessage received", {debug: true}, msg);
|
||||||
|
|
||||||
if (msg?.type === "aiFilter:test") {
|
if (msg?.type === "aiFilter:test") {
|
||||||
|
@ -45,7 +45,7 @@ browser.runtime.onMessage.addListener((msg) => {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
logger.aiLog("Calling browser.aiFilter.classify()", {debug: true});
|
logger.aiLog("Calling browser.aiFilter.classify()", {debug: true});
|
||||||
const result = browser.aiFilter.classify(text, criterion);
|
const result = await browser.aiFilter.classify(text, criterion);
|
||||||
logger.aiLog("classify() returned", {debug: true}, result);
|
logger.aiLog("classify() returned", {debug: true}, result);
|
||||||
return { match: result };
|
return { match: result };
|
||||||
}
|
}
|
||||||
|
|
|
@ -70,10 +70,10 @@ var aiFilter = class extends ExtensionCommon.ExtensionAPI {
|
||||||
aiLog("[api] failed to apply config", {level: 'error'}, err);
|
aiLog("[api] failed to apply config", {level: 'error'}, err);
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
classify: (text, criterion) => {
|
classify: async (text, criterion) => {
|
||||||
aiLog("[api] classify() called", {debug: true}, text, criterion);
|
aiLog("[api] classify() called", {debug: true}, text, criterion);
|
||||||
try {
|
try {
|
||||||
return AiClassifier.classifyTextSync(text, criterion);
|
return await AiClassifier.classifyText(text, criterion);
|
||||||
}
|
}
|
||||||
catch (err) {
|
catch (err) {
|
||||||
aiLog("[api] error in classify()", {level: 'error'}, err);
|
aiLog("[api] error in classify()", {level: 'error'}, err);
|
||||||
|
|
|
@ -158,17 +158,47 @@ function buildPrompt(body, criterion) {
|
||||||
return template.replace(/{{\s*(\w+)\s*}}/g, (m, key) => data[key] || "");
|
return template.replace(/{{\s*(\w+)\s*}}/g, (m, key) => data[key] || "");
|
||||||
}
|
}
|
||||||
|
|
||||||
function classifyTextSync(text, criterion, cacheKey = null) {
|
function getCachedResult(cacheKey) {
|
||||||
loadCache();
|
loadCache();
|
||||||
if (cacheKey && gCache.has(cacheKey)) {
|
if (cacheKey && gCache.has(cacheKey)) {
|
||||||
aiLog(`[AiClassifier] Cache hit for key: ${cacheKey}`, {debug: true});
|
aiLog(`[AiClassifier] Cache hit for key: ${cacheKey}`, {debug: true});
|
||||||
return gCache.get(cacheKey);
|
return gCache.get(cacheKey);
|
||||||
}
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildPayload(text, criterion) {
|
||||||
let payloadObj = Object.assign({
|
let payloadObj = Object.assign({
|
||||||
prompt: buildPrompt(text, criterion)
|
prompt: buildPrompt(text, criterion)
|
||||||
}, gAiParams);
|
}, gAiParams);
|
||||||
let payload = JSON.stringify(payloadObj);
|
return JSON.stringify(payloadObj);
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseMatch(result) {
|
||||||
|
const rawText = result.choices?.[0]?.text || "";
|
||||||
|
const thinkText = rawText.match(/<think>[\s\S]*?<\/think>/gi)?.join('') || '';
|
||||||
|
aiLog('[AiClassifier] ⮡ Reasoning:', {debug: true}, thinkText);
|
||||||
|
const cleanedText = rawText.replace(/<think>[\s\S]*?<\/think>/gi, "").trim();
|
||||||
|
aiLog('[AiClassifier] ⮡ Cleaned Response Text:', {debug: true}, cleanedText);
|
||||||
|
const obj = JSON.parse(cleanedText);
|
||||||
|
return obj.matched === true || obj.match === true;
|
||||||
|
}
|
||||||
|
|
||||||
|
function cacheResult(cacheKey, matched) {
|
||||||
|
if (cacheKey) {
|
||||||
|
aiLog(`[AiClassifier] Caching entry '${cacheKey}' → ${matched}`, {debug: true});
|
||||||
|
gCache.set(cacheKey, matched);
|
||||||
|
saveCache(cacheKey, matched);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function classifyTextSync(text, criterion, cacheKey = null) {
|
||||||
|
const cached = getCachedResult(cacheKey);
|
||||||
|
if (cached !== null) {
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = buildPayload(text, criterion);
|
||||||
|
|
||||||
aiLog(`[AiClassifier] Sending classification request to ${gEndpoint}`, {debug: true});
|
aiLog(`[AiClassifier] Sending classification request to ${gEndpoint}`, {debug: true});
|
||||||
|
|
||||||
|
@ -179,23 +209,13 @@ function classifyTextSync(text, criterion, cacheKey = null) {
|
||||||
xhr.setRequestHeader("Content-Type", "application/json");
|
xhr.setRequestHeader("Content-Type", "application/json");
|
||||||
xhr.send(payload);
|
xhr.send(payload);
|
||||||
|
|
||||||
if (xhr.status < 200 || xhr.status >= 300) {
|
if (xhr.status >= 200 && xhr.status < 300) {
|
||||||
aiLog(`HTTP status ${xhr.status}`, {level: 'warn'});
|
|
||||||
} else {
|
|
||||||
const result = JSON.parse(xhr.responseText);
|
const result = JSON.parse(xhr.responseText);
|
||||||
aiLog(`[AiClassifier] Received response:`, {debug: true}, result);
|
aiLog(`[AiClassifier] Received response:`, {debug: true}, result);
|
||||||
const rawText = result.choices?.[0]?.text || "";
|
matched = parseMatch(result);
|
||||||
const thinkText = rawText.match(/<think>[\s\S]*?<\/think>/gi)?.join('') || '';
|
cacheResult(cacheKey, matched);
|
||||||
aiLog('[AiClassifier] ⮡ Reasoning:', {debug: true}, thinkText);
|
} else {
|
||||||
const cleanedText = rawText.replace(/<think>[\s\S]*?<\/think>/gi, "").trim();
|
aiLog(`HTTP status ${xhr.status}`, {level: 'warn'});
|
||||||
aiLog('[AiClassifier] ⮡ Cleaned Response Text:', {debug: true}, cleanedText);
|
|
||||||
const obj = JSON.parse(cleanedText);
|
|
||||||
matched = obj.matched === true || obj.match === true;
|
|
||||||
if (cacheKey) {
|
|
||||||
aiLog(`[AiClassifier] Caching entry '${cacheKey}' → ${matched}`, {debug: true});
|
|
||||||
gCache.set(cacheKey, matched);
|
|
||||||
saveCache(cacheKey, matched);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
aiLog(`HTTP request failed`, {level: 'error'}, e);
|
aiLog(`HTTP request failed`, {level: 'error'}, e);
|
||||||
|
@ -205,7 +225,36 @@ function classifyTextSync(text, criterion, cacheKey = null) {
|
||||||
}
|
}
|
||||||
|
|
||||||
async function classifyText(text, criterion, cacheKey = null) {
|
async function classifyText(text, criterion, cacheKey = null) {
|
||||||
return classifyTextSync(text, criterion, cacheKey);
|
const cached = getCachedResult(cacheKey);
|
||||||
|
if (cached !== null) {
|
||||||
|
return cached;
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = buildPayload(text, criterion);
|
||||||
|
|
||||||
|
aiLog(`[AiClassifier] Sending classification request to ${gEndpoint}`, {debug: true});
|
||||||
|
|
||||||
|
try {
|
||||||
|
const response = await fetch(gEndpoint, {
|
||||||
|
method: "POST",
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
body: payload,
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response.ok) {
|
||||||
|
aiLog(`HTTP status ${response.status}`, {level: 'warn'});
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
const result = await response.json();
|
||||||
|
aiLog(`[AiClassifier] Received response:`, {debug: true}, result);
|
||||||
|
const matched = parseMatch(result);
|
||||||
|
cacheResult(cacheKey, matched);
|
||||||
|
return matched;
|
||||||
|
} catch (e) {
|
||||||
|
aiLog(`HTTP request failed`, {level: 'error'}, e);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var AiClassifier = { classifyText, classifyTextSync, setConfig };
|
var AiClassifier = { classifyText, classifyTextSync, setConfig };
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue