Merge pull request #37 from wagesj45/codex/evaluate-long-term-cache-storage-options

Fix Services import in ExpressionSearchFilter
This commit is contained in:
Jordan Wages 2025-06-25 23:17:57 -05:00 committed by GitHub
commit a149b6bc8b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
4 changed files with 11 additions and 7 deletions

View file

@ -10,6 +10,8 @@
"use strict";
const storage = (globalThis.messenger ?? browser).storage;
let logger;
let AiClassifier;
let aiRules = [];
@ -42,7 +44,7 @@ async function applyAiRules(idsInput) {
if (!ids.length) return queue;
if (!aiRules.length) {
const { aiRules: stored } = await browser.storage.local.get("aiRules");
const { aiRules: stored } = await storage.local.get("aiRules");
aiRules = Array.isArray(stored) ? stored.map(r => {
if (r.actions) return r;
const actions = [];
@ -101,7 +103,7 @@ async function applyAiRules(idsInput) {
}
try {
const store = await browser.storage.local.get(["endpoint", "templateName", "customTemplate", "customSystemPrompt", "aiParams", "debugLogging", "aiRules"]);
const store = await storage.local.get(["endpoint", "templateName", "customTemplate", "customSystemPrompt", "aiParams", "debugLogging", "aiRules"]);
logger.setDebug(store.debugLogging);
await AiClassifier.setConfig(store);
aiRules = Array.isArray(store.aiRules) ? store.aiRules.map(r => {

View file

@ -1,6 +1,8 @@
"use strict";
import { aiLog, setDebug } from "../logger.js";
const storage = (globalThis.messenger ?? globalThis.browser).storage;
let Services;
try {
if (typeof globalThis !== "undefined" && globalThis.Services) {
@ -54,7 +56,7 @@ async function loadCache() {
}
aiLog(`[AiClassifier] Loading cache`, {debug: true});
try {
const { aiCache } = await browser.storage.local.get("aiCache");
const { aiCache } = await storage.local.get("aiCache");
if (aiCache) {
for (let [k, v] of Object.entries(aiCache)) {
aiLog(`[AiClassifier] ⮡ Loaded entry '${k}' → ${v}`, {debug: true});
@ -86,7 +88,7 @@ async function saveCache(updatedKey, updatedValue) {
aiLog(`[AiClassifier] ⮡ Persisting entry '${updatedKey}' → ${updatedValue}`, {debug: true});
}
try {
await browser.storage.local.set({ aiCache: Object.fromEntries(gCache) });
await storage.local.set({ aiCache: Object.fromEntries(gCache) });
} catch (e) {
aiLog(`Failed to save cache`, {level: 'error'}, e);
}

View file

@ -1,7 +1,6 @@
"use strict";
var { ExtensionParent } = ChromeUtils.importESModule("resource://gre/modules/ExtensionParent.sys.mjs");
var { MailServices } = ChromeUtils.importESModule("resource:///modules/MailServices.sys.mjs");
var { Services } = globalThis || ChromeUtils.importESModule("resource://gre/modules/Services.sys.mjs");
var { aiLog } = ChromeUtils.import("resource://aifilter/modules/logger.jsm");
var AiClassifier = ChromeUtils.importESModule("resource://aifilter/modules/AiClassifier.js");
var { getPlainText } = ChromeUtils.import("resource://aifilter/modules/messageUtils.jsm");

View file

@ -1,7 +1,8 @@
document.addEventListener('DOMContentLoaded', async () => {
const storage = (globalThis.messenger ?? browser).storage;
const logger = await import(browser.runtime.getURL('logger.js'));
const AiClassifier = await import(browser.runtime.getURL('modules/AiClassifier.js'));
const defaults = await browser.storage.local.get([
const defaults = await storage.local.get([
'endpoint',
'templateName',
'customTemplate',
@ -269,7 +270,7 @@ document.addEventListener('DOMContentLoaded', async () => {
});
return { criterion, actions };
}).filter(r => r.criterion);
await browser.storage.local.set({ endpoint, templateName, customTemplate: customTemplateText, customSystemPrompt, aiParams: aiParamsSave, debugLogging, aiRules: rules });
await storage.local.set({ endpoint, templateName, customTemplate: customTemplateText, customSystemPrompt, aiParams: aiParamsSave, debugLogging, aiRules: rules });
try {
await AiClassifier.setConfig({ endpoint, templateName, customTemplate: customTemplateText, customSystemPrompt, aiParams: aiParamsSave, debugLogging });
logger.setDebug(debugLogging);