Convert AiClassifier to ES module
This commit is contained in:
parent
a7d2aca60f
commit
83166c8c4f
5 changed files with 69 additions and 77 deletions
|
@ -17,12 +17,8 @@ let AiClassifier;
|
||||||
logger = await import(browser.runtime.getURL("logger.js"));
|
logger = await import(browser.runtime.getURL("logger.js"));
|
||||||
logger.aiLog("background.js loaded – ready to classify", {debug: true});
|
logger.aiLog("background.js loaded – ready to classify", {debug: true});
|
||||||
try {
|
try {
|
||||||
if (typeof ChromeUtils !== "undefined") {
|
AiClassifier = await import(browser.runtime.getURL('modules/AiClassifier.js'));
|
||||||
({ AiClassifier } = ChromeUtils.import("resource://aifilter/modules/AiClassifier.jsm"));
|
|
||||||
logger.aiLog("AiClassifier imported", {debug: true});
|
logger.aiLog("AiClassifier imported", {debug: true});
|
||||||
} else {
|
|
||||||
logger.aiLog("ChromeUtils is undefined, skipping AiClassifier import", {level: 'warn'});
|
|
||||||
}
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.aiLog("failed to import AiClassifier", {level: 'error'}, e);
|
logger.aiLog("failed to import AiClassifier", {level: 'error'}, e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,7 +36,7 @@ var aiFilter = class extends ExtensionCommon.ExtensionAPI {
|
||||||
setDebug = loggerMod.setDebug;
|
setDebug = loggerMod.setDebug;
|
||||||
|
|
||||||
// Now that the resource URL is registered, import the classifier
|
// Now that the resource URL is registered, import the classifier
|
||||||
({ AiClassifier } = ChromeUtils.import("resource://aifilter/modules/AiClassifier.jsm"));
|
AiClassifier = ChromeUtils.importESModule("resource://aifilter/modules/AiClassifier.js");
|
||||||
aiLog("[api] onStartup()", {debug: true});
|
aiLog("[api] onStartup()", {debug: true});
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -1,10 +1,6 @@
|
||||||
"use strict";
|
"use strict";
|
||||||
var { Services } = globalThis || ChromeUtils.importESModule("resource://gre/modules/Services.sys.mjs");
|
import { aiLog, setDebug } from "../logger.js";
|
||||||
var { NetUtil } = ChromeUtils.importESModule("resource://gre/modules/NetUtil.sys.mjs");
|
const { Services } = globalThis || ChromeUtils.importESModule("resource://gre/modules/Services.sys.mjs");
|
||||||
var { FileUtils } = ChromeUtils.importESModule("resource://gre/modules/FileUtils.sys.mjs");
|
|
||||||
var { aiLog, setDebug } = ChromeUtils.import("resource://aifilter/modules/logger.jsm");
|
|
||||||
|
|
||||||
var EXPORTED_SYMBOLS = ["AiClassifier"];
|
|
||||||
|
|
||||||
const SYSTEM_PREFIX = `You are an email-classification assistant.
|
const SYSTEM_PREFIX = `You are an email-classification assistant.
|
||||||
Read the email below and the classification criterion provided by the user.
|
Read the email below and the classification criterion provided by the user.
|
||||||
|
@ -41,36 +37,22 @@ let gAiParams = {
|
||||||
|
|
||||||
let gCache = new Map();
|
let gCache = new Map();
|
||||||
let gCacheLoaded = false;
|
let gCacheLoaded = false;
|
||||||
let gCacheFile;
|
|
||||||
|
|
||||||
function ensureCacheFile() {
|
async function loadCache() {
|
||||||
if (!gCacheFile) {
|
|
||||||
gCacheFile = Services.dirsvc.get("ProfD", Ci.nsIFile);
|
|
||||||
gCacheFile.append("aifilter_cache.json");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function loadCache() {
|
|
||||||
if (gCacheLoaded) {
|
if (gCacheLoaded) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
ensureCacheFile();
|
aiLog(`[AiClassifier] Loading cache`, {debug: true});
|
||||||
aiLog(`[AiClassifier] Loading cache from ${gCacheFile.path}`, {debug: true});
|
|
||||||
try {
|
try {
|
||||||
if (gCacheFile.exists()) {
|
const { aiCache } = await browser.storage.local.get("aiCache");
|
||||||
let stream = Cc["@mozilla.org/network/file-input-stream;1"].createInstance(Ci.nsIFileInputStream);
|
if (aiCache) {
|
||||||
stream.init(gCacheFile, -1, 0, 0);
|
for (let [k, v] of Object.entries(aiCache)) {
|
||||||
let data = NetUtil.readInputStreamToString(stream, stream.available());
|
|
||||||
stream.close();
|
|
||||||
aiLog(`[AiClassifier] Cache file contents: ${data}`, {debug: true});
|
|
||||||
let obj = JSON.parse(data);
|
|
||||||
for (let [k, v] of Object.entries(obj)) {
|
|
||||||
aiLog(`[AiClassifier] ⮡ Loaded entry '${k}' → ${v}`, {debug: true});
|
aiLog(`[AiClassifier] ⮡ Loaded entry '${k}' → ${v}`, {debug: true});
|
||||||
gCache.set(k, v);
|
gCache.set(k, v);
|
||||||
}
|
}
|
||||||
aiLog(`[AiClassifier] Loaded ${gCache.size} cache entries`, {debug: true});
|
aiLog(`[AiClassifier] Loaded ${gCache.size} cache entries`, {debug: true});
|
||||||
} else {
|
} else {
|
||||||
aiLog(`[AiClassifier] Cache file does not exist`, {debug: true});
|
aiLog(`[AiClassifier] Cache is empty`, {debug: true});
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
aiLog(`Failed to load cache`, {level: 'error'}, e);
|
aiLog(`Failed to load cache`, {level: 'error'}, e);
|
||||||
|
@ -78,36 +60,33 @@ function loadCache() {
|
||||||
gCacheLoaded = true;
|
gCacheLoaded = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
function saveCache(updatedKey, updatedValue) {
|
function loadCacheSync() {
|
||||||
ensureCacheFile();
|
if (!gCacheLoaded) {
|
||||||
aiLog(`[AiClassifier] Saving cache to ${gCacheFile.path}`, {debug: true});
|
let done = false;
|
||||||
|
loadCache().finally(() => { done = true; });
|
||||||
|
Services.tm.spinEventLoopUntil(() => done);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function saveCache(updatedKey, updatedValue) {
|
||||||
if (typeof updatedKey !== "undefined") {
|
if (typeof updatedKey !== "undefined") {
|
||||||
aiLog(`[AiClassifier] ⮡ Persisting entry '${updatedKey}' → ${updatedValue}`, {debug: true});
|
aiLog(`[AiClassifier] ⮡ Persisting entry '${updatedKey}' → ${updatedValue}`, {debug: true});
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
let obj = Object.fromEntries(gCache);
|
await browser.storage.local.set({ aiCache: Object.fromEntries(gCache) });
|
||||||
let data = JSON.stringify(obj);
|
|
||||||
let stream = Cc["@mozilla.org/network/file-output-stream;1"].createInstance(Ci.nsIFileOutputStream);
|
|
||||||
stream.init(gCacheFile,
|
|
||||||
FileUtils.MODE_WRONLY | FileUtils.MODE_CREATE | FileUtils.MODE_TRUNCATE,
|
|
||||||
FileUtils.PERMS_FILE,
|
|
||||||
0);
|
|
||||||
stream.write(data, data.length);
|
|
||||||
stream.close();
|
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
aiLog(`Failed to save cache`, {level: 'error'}, e);
|
aiLog(`Failed to save cache`, {level: 'error'}, e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function loadTemplate(name) {
|
async function loadTemplate(name) {
|
||||||
try {
|
try {
|
||||||
let url = `resource://aifilter/prompt_templates/${name}.txt`;
|
const url = typeof browser !== "undefined" && browser.runtime?.getURL
|
||||||
let xhr = new XMLHttpRequest();
|
? browser.runtime.getURL(`prompt_templates/${name}.txt`)
|
||||||
xhr.open("GET", url, false);
|
: `resource://aifilter/prompt_templates/${name}.txt`;
|
||||||
xhr.overrideMimeType("text/plain");
|
const res = await fetch(url);
|
||||||
xhr.send();
|
if (res.ok) {
|
||||||
if (xhr.status === 0 || xhr.status === 200) {
|
return await res.text();
|
||||||
return xhr.responseText;
|
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
aiLog(`Failed to load template '${name}':`, {level: 'error'}, e);
|
aiLog(`Failed to load template '${name}':`, {level: 'error'}, e);
|
||||||
|
@ -115,6 +94,14 @@ function loadTemplate(name) {
|
||||||
return "";
|
return "";
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function loadTemplateSync(name) {
|
||||||
|
let text = "";
|
||||||
|
let done = false;
|
||||||
|
loadTemplate(name).then(t => { text = t; }).catch(() => {}).finally(() => { done = true; });
|
||||||
|
Services.tm.spinEventLoopUntil(() => done);
|
||||||
|
return text;
|
||||||
|
}
|
||||||
|
|
||||||
function setConfig(config = {}) {
|
function setConfig(config = {}) {
|
||||||
if (config.endpoint) {
|
if (config.endpoint) {
|
||||||
gEndpoint = config.endpoint;
|
gEndpoint = config.endpoint;
|
||||||
|
@ -138,7 +125,7 @@ function setConfig(config = {}) {
|
||||||
if (typeof config.debugLogging === "boolean") {
|
if (typeof config.debugLogging === "boolean") {
|
||||||
setDebug(config.debugLogging);
|
setDebug(config.debugLogging);
|
||||||
}
|
}
|
||||||
gTemplateText = gTemplateName === "custom" ? gCustomTemplate : loadTemplate(gTemplateName);
|
gTemplateText = gTemplateName === "custom" ? gCustomTemplate : loadTemplateSync(gTemplateName);
|
||||||
aiLog(`[AiClassifier] Endpoint set to ${gEndpoint}`, {debug: true});
|
aiLog(`[AiClassifier] Endpoint set to ${gEndpoint}`, {debug: true});
|
||||||
aiLog(`[AiClassifier] Template set to ${gTemplateName}`, {debug: true});
|
aiLog(`[AiClassifier] Template set to ${gTemplateName}`, {debug: true});
|
||||||
}
|
}
|
||||||
|
@ -154,12 +141,12 @@ function buildPrompt(body, criterion) {
|
||||||
email: body,
|
email: body,
|
||||||
query: criterion,
|
query: criterion,
|
||||||
};
|
};
|
||||||
let template = gTemplateText || loadTemplate(gTemplateName);
|
let template = gTemplateText || loadTemplateSync(gTemplateName);
|
||||||
return template.replace(/{{\s*(\w+)\s*}}/g, (m, key) => data[key] || "");
|
return template.replace(/{{\s*(\w+)\s*}}/g, (m, key) => data[key] || "");
|
||||||
}
|
}
|
||||||
|
|
||||||
function getCachedResult(cacheKey) {
|
function getCachedResult(cacheKey) {
|
||||||
loadCache();
|
loadCacheSync();
|
||||||
if (cacheKey && gCache.has(cacheKey)) {
|
if (cacheKey && gCache.has(cacheKey)) {
|
||||||
aiLog(`[AiClassifier] Cache hit for key: ${cacheKey}`, {debug: true});
|
aiLog(`[AiClassifier] Cache hit for key: ${cacheKey}`, {debug: true});
|
||||||
return gCache.get(cacheKey);
|
return gCache.get(cacheKey);
|
||||||
|
@ -202,26 +189,33 @@ function classifyTextSync(text, criterion, cacheKey = null) {
|
||||||
|
|
||||||
aiLog(`[AiClassifier] Sending classification request to ${gEndpoint}`, {debug: true});
|
aiLog(`[AiClassifier] Sending classification request to ${gEndpoint}`, {debug: true});
|
||||||
|
|
||||||
let matched = false;
|
let result;
|
||||||
|
let done = false;
|
||||||
|
(async () => {
|
||||||
try {
|
try {
|
||||||
let xhr = new XMLHttpRequest();
|
const response = await fetch(gEndpoint, {
|
||||||
xhr.open("POST", gEndpoint, false);
|
method: "POST",
|
||||||
xhr.setRequestHeader("Content-Type", "application/json");
|
headers: { "Content-Type": "application/json" },
|
||||||
xhr.send(payload);
|
body: payload,
|
||||||
|
});
|
||||||
if (xhr.status >= 200 && xhr.status < 300) {
|
if (response.ok) {
|
||||||
const result = JSON.parse(xhr.responseText);
|
const json = await response.json();
|
||||||
aiLog(`[AiClassifier] Received response:`, {debug: true}, result);
|
aiLog(`[AiClassifier] Received response:`, {debug: true}, json);
|
||||||
matched = parseMatch(result);
|
result = parseMatch(json);
|
||||||
cacheResult(cacheKey, matched);
|
cacheResult(cacheKey, result);
|
||||||
} else {
|
} else {
|
||||||
aiLog(`HTTP status ${xhr.status}`, {level: 'warn'});
|
aiLog(`HTTP status ${response.status}`, {level: 'warn'});
|
||||||
|
result = false;
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
aiLog(`HTTP request failed`, {level: 'error'}, e);
|
aiLog(`HTTP request failed`, {level: 'error'}, e);
|
||||||
|
result = false;
|
||||||
|
} finally {
|
||||||
|
done = true;
|
||||||
}
|
}
|
||||||
|
})();
|
||||||
return matched;
|
Services.tm.spinEventLoopUntil(() => done);
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function classifyText(text, criterion, cacheKey = null) {
|
async function classifyText(text, criterion, cacheKey = null) {
|
||||||
|
@ -257,4 +251,4 @@ async function classifyText(text, criterion, cacheKey = null) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var AiClassifier = { classifyText, classifyTextSync, setConfig };
|
export { classifyText, classifyTextSync, setConfig };
|
|
@ -3,7 +3,7 @@ var { ExtensionParent } = ChromeUtils.importESModule("resource://gre/modules/Ext
|
||||||
var { MailServices } = ChromeUtils.importESModule("resource:///modules/MailServices.sys.mjs");
|
var { MailServices } = ChromeUtils.importESModule("resource:///modules/MailServices.sys.mjs");
|
||||||
var { Services } = globalThis || ChromeUtils.importESModule("resource://gre/modules/Services.sys.mjs");
|
var { Services } = globalThis || ChromeUtils.importESModule("resource://gre/modules/Services.sys.mjs");
|
||||||
var { aiLog } = ChromeUtils.import("resource://aifilter/modules/logger.jsm");
|
var { aiLog } = ChromeUtils.import("resource://aifilter/modules/logger.jsm");
|
||||||
var { AiClassifier } = ChromeUtils.import("resource://aifilter/modules/AiClassifier.jsm");
|
var AiClassifier = ChromeUtils.importESModule("resource://aifilter/modules/AiClassifier.js");
|
||||||
var { getPlainText } = ChromeUtils.import("resource://aifilter/modules/messageUtils.jsm");
|
var { getPlainText } = ChromeUtils.import("resource://aifilter/modules/messageUtils.jsm");
|
||||||
|
|
||||||
function sha256Hex(str) {
|
function sha256Hex(str) {
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
document.addEventListener('DOMContentLoaded', async () => {
|
document.addEventListener('DOMContentLoaded', async () => {
|
||||||
const logger = await import(browser.runtime.getURL('logger.js'));
|
const logger = await import(browser.runtime.getURL('logger.js'));
|
||||||
|
const AiClassifier = await import(browser.runtime.getURL('modules/AiClassifier.js'));
|
||||||
const defaults = await browser.storage.local.get([
|
const defaults = await browser.storage.local.get([
|
||||||
'endpoint',
|
'endpoint',
|
||||||
'templateName',
|
'templateName',
|
||||||
|
@ -88,6 +89,7 @@ document.addEventListener('DOMContentLoaded', async () => {
|
||||||
await browser.storage.local.set({ endpoint, templateName, customTemplate: customTemplateText, customSystemPrompt, aiParams: aiParamsSave, debugLogging });
|
await browser.storage.local.set({ endpoint, templateName, customTemplate: customTemplateText, customSystemPrompt, aiParams: aiParamsSave, debugLogging });
|
||||||
try {
|
try {
|
||||||
await browser.aiFilter.initConfig({ endpoint, templateName, customTemplate: customTemplateText, customSystemPrompt, aiParams: aiParamsSave, debugLogging });
|
await browser.aiFilter.initConfig({ endpoint, templateName, customTemplate: customTemplateText, customSystemPrompt, aiParams: aiParamsSave, debugLogging });
|
||||||
|
AiClassifier.setConfig({ endpoint, templateName, customTemplate: customTemplateText, customSystemPrompt, aiParams: aiParamsSave, debugLogging });
|
||||||
logger.setDebug(debugLogging);
|
logger.setDebug(debugLogging);
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
logger.aiLog('[options] failed to apply config', {level: 'error'}, e);
|
logger.aiLog('[options] failed to apply config', {level: 'error'}, e);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue