baa-conductor

git clone 

commit
6b819bf
parent
499bd69
author
im_wower
date
2026-03-28 16:06:31 +0800 CST
fix: suppress stale ChatGPT final-message relay
3 files changed,  +352, -6
M plugins/baa-firefox/controller.js
+82, -1
  1@@ -24,7 +24,8 @@ const CONTROLLER_STORAGE_KEYS = {
  2   credentialFingerprintByPlatform: "baaFirefox.credentialFingerprintByPlatform",
  3   accountByPlatform: "baaFirefox.accountByPlatform",
  4   geminiSendTemplate: "baaFirefox.geminiSendTemplate",
  5-  claudeState: "baaFirefox.claudeState"
  6+  claudeState: "baaFirefox.claudeState",
  7+  finalMessageRelayCache: "baaFirefox.finalMessageRelayCache"
  8 };
  9 
 10 const DEFAULT_LOCAL_API_BASE = "http://100.71.210.78:4317";
 11@@ -44,6 +45,7 @@ const CONTROL_RETRY_LOG_INTERVAL = 60_000;
 12 const TRACKED_TAB_REFRESH_DELAY = 150;
 13 const SHELL_RUNTIME_HEALTHCHECK_INTERVAL = 30_000;
 14 const CONTROL_STATUS_BODY_LIMIT = 12_000;
 15+const FINAL_MESSAGE_RELAY_CACHE_LIMIT = 20;
 16 const CONTENT_SCRIPT_INJECTION_FILES = ["delivery-adapters.js", "content-script.js"];
 17 const PAGE_INTERCEPTOR_INJECTION_FILES = ["page-interceptor.js"];
 18 const WS_RECONNECT_DELAY = 3_000;
 19@@ -1261,6 +1263,51 @@ function createFinalMessageRelayObserver(platform) {
 20   return FINAL_MESSAGE_HELPERS?.createRelayState(platform) || null;
 21 }
 22 
 23+function normalizeRecentRelayKeys(value) {
 24+  const source = Array.isArray(value) ? value : [];
 25+  const seen = new Set();
 26+  const normalized = [];
 27+
 28+  for (const entry of source) {
 29+    const key = trimToNull(entry);
 30+    if (!key || seen.has(key)) {
 31+      continue;
 32+    }
 33+
 34+    seen.add(key);
 35+    normalized.push(key);
 36+  }
 37+
 38+  return normalized.slice(-FINAL_MESSAGE_RELAY_CACHE_LIMIT);
 39+}
 40+
 41+function serializeFinalMessageRelayCache() {
 42+  return createPlatformMap((platform) => {
 43+    const observer = state.finalMessageRelayObservers[platform];
 44+    return observer ? normalizeRecentRelayKeys(observer.recentRelayKeys) : [];
 45+  });
 46+}
 47+
 48+function restoreFinalMessageRelayCache(raw) {
 49+  const source = hasPlatformShape(raw) ? raw : {};
 50+
 51+  for (const platform of PLATFORM_ORDER) {
 52+    const observer = state.finalMessageRelayObservers[platform];
 53+    if (!observer) {
 54+      continue;
 55+    }
 56+
 57+    observer.activeStream = null;
 58+    observer.recentRelayKeys = normalizeRecentRelayKeys(source[platform]);
 59+  }
 60+}
 61+
 62+async function persistFinalMessageRelayCache() {
 63+  await browser.storage.local.set({
 64+    [CONTROLLER_STORAGE_KEYS.finalMessageRelayCache]: serializeFinalMessageRelayCache()
 65+  });
 66+}
 67+
 68 function cloneHeaderMap(value) {
 69   return value && typeof value === "object" && !Array.isArray(value) ? { ...value } : {};
 70 }
 71@@ -3118,6 +3165,7 @@ async function persistState() {
 72     [CONTROLLER_STORAGE_KEYS.credentialFingerprintByPlatform]: state.credentialFingerprint,
 73     [CONTROLLER_STORAGE_KEYS.accountByPlatform]: state.account,
 74     [CONTROLLER_STORAGE_KEYS.geminiSendTemplate]: state.geminiSendTemplate,
 75+    [CONTROLLER_STORAGE_KEYS.finalMessageRelayCache]: serializeFinalMessageRelayCache(),
 76     [CONTROLLER_STORAGE_KEYS.claudeState]: {
 77       ...cloneClaudeState(state.claudeState),
 78       busy: false,
 79@@ -5341,6 +5389,21 @@ function getObservedPagePlatform(sender, fallbackPlatform = null) {
 80   return detectPlatformFromUrl(senderUrl) || fallbackPlatform || null;
 81 }
 82 
 83+function getObservedPageConversationId(context, pageControl) {
 84+  return trimToNull(context?.conversationId) || trimToNull(pageControl?.conversationId) || null;
 85+}
 86+
 87+function isObservedFinalMessageStale(relay, context, pageControl) {
 88+  const relayConversationId = trimToNull(relay?.payload?.conversation_id);
 89+  const currentConversationId = getObservedPageConversationId(context, pageControl);
 90+
 91+  if (!relayConversationId || !currentConversationId) {
 92+    return false;
 93+  }
 94+
 95+  return relayConversationId !== currentConversationId;
 96+}
 97+
 98 function relayObservedFinalMessage(platform, relay, source = "page_observed", context = null) {
 99   const observer = state.finalMessageRelayObservers[platform];
100   if (!observer || !relay?.payload) return false;
101@@ -5349,6 +5412,7 @@ function relayObservedFinalMessage(platform, relay, source = "page_observed", co
102 
103   if (pageControl.paused) {
104     FINAL_MESSAGE_HELPERS?.rememberRelay(observer, relay);
105+    persistFinalMessageRelayCache().catch(() => {});
106     addLog(
107       "info",
108       `${platformLabel(platform)} 最终消息已抑制:页面 #${context.tabId}${pageControl.conversationId ? ` conversation=${pageControl.conversationId}` : ""} 处于暂停状态`,
109@@ -5357,12 +5421,25 @@ function relayObservedFinalMessage(platform, relay, source = "page_observed", co
110     return false;
111   }
112 
113+  if (context && isObservedFinalMessageStale(relay, context, pageControl)) {
114+    const currentConversationId = getObservedPageConversationId(context, pageControl);
115+    FINAL_MESSAGE_HELPERS?.rememberRelay(observer, relay);
116+    persistFinalMessageRelayCache().catch(() => {});
117+    addLog(
118+      "info",
119+      `${platformLabel(platform)} 最终消息已抑制:页面 #${context.tabId} 当前 conversation=${currentConversationId},relay conversation=${relay.payload.conversation_id || "-"},判定为 stale replay`,
120+      false
121+    );
122+    return false;
123+  }
124+
125   if (!wsSend(relay.payload)) {
126     addLog("warn", `${platformLabel(platform)} 最终消息未能转发(WS 未连接)`, false);
127     return false;
128   }
129 
130   FINAL_MESSAGE_HELPERS?.rememberRelay(observer, relay);
131+  persistFinalMessageRelayCache().catch(() => {});
132   addLog(
133     "info",
134     `${platformLabel(platform)} 最终消息已转发 assistant=${relay.payload.assistant_message_id} source=${source}`,
135@@ -6554,6 +6631,7 @@ async function init() {
136     saved[CONTROLLER_STORAGE_KEYS.accountByPlatform]
137   );
138   state.geminiSendTemplate = saved[CONTROLLER_STORAGE_KEYS.geminiSendTemplate] || null;
139+  restoreFinalMessageRelayCache(saved[CONTROLLER_STORAGE_KEYS.finalMessageRelayCache]);
140   state.claudeState = loadClaudeState(saved[CONTROLLER_STORAGE_KEYS.claudeState]);
141   state.controllerRuntime = loadControllerRuntimeState(saved[CONTROLLER_STORAGE_KEYS.controllerRuntime]);
142   if (needsStatusReset) {
143@@ -6649,11 +6727,14 @@ function exposeControllerTestApi() {
144     handlePageBridgeReady,
145     handlePageNetwork,
146     handlePageSse,
147+    persistFinalMessageRelayCache,
148     reinjectAllOpenPlatformTabs,
149     reinjectPlatformTabs,
150+    restoreFinalMessageRelayCache,
151     runDeliveryAction,
152     runPageControlAction,
153     runPluginManagementAction,
154+    serializeFinalMessageRelayCache,
155     setDesiredTabState,
156     syncPageControlFromContext,
157     state
M plugins/baa-firefox/final-message.js
+112, -4
  1@@ -214,6 +214,42 @@
  2     return normalizeMessageText(fragments.join("\n"));
  3   }
  4 
  5+  function normalizeCandidatePath(path) {
  6+    return String(path || "").replace(/^\./u, "");
  7+  }
  8+
  9+  function isHistoricalChatgptPath(path) {
 10+    return /(?:^|\.)(?:history|items|linear_conversation|mapping|message_map|message_nodes|messages|nodes|entries|turns)(?:$|[.[\]])/iu
 11+      .test(normalizeCandidatePath(path));
 12+  }
 13+
 14+  function scoreChatgptCandidatePath(path) {
 15+    const normalizedPath = normalizeCandidatePath(path);
 16+    if (!normalizedPath) {
 17+      return 0;
 18+    }
 19+
 20+    let score = 0;
 21+
 22+    if (normalizedPath === "message") {
 23+      score += 280;
 24+    }
 25+
 26+    if (normalizedPath.endsWith(".message")) {
 27+      score += 140;
 28+    }
 29+
 30+    if (/(?:^|\.)(?:assistant|completion|current|message|output|response)(?:$|[.[\]])/iu.test(normalizedPath)) {
 31+      score += isHistoricalChatgptPath(normalizedPath) ? 0 : 120;
 32+    }
 33+
 34+    if (isHistoricalChatgptPath(normalizedPath)) {
 35+      score -= 260;
 36+    }
 37+
 38+    return score;
 39+  }
 40+
 41   function buildChatgptCandidate(message, envelope, path, context) {
 42     if (!isRecord(message)) return null;
 43 
 44@@ -252,7 +288,7 @@
 45       || trimToNull(envelope?.messageId)
 46       || null;
 47 
 48-    let score = rawText.length;
 49+    let score = rawText.length + scoreChatgptCandidatePath(path);
 50     if (assistantMessageId) score += 120;
 51     if (conversationId) score += 80;
 52     if (terminal) score += 160;
 53@@ -261,6 +297,7 @@
 54     return {
 55       assistantMessageId,
 56       conversationId,
 57+      path: normalizeCandidatePath(path),
 58       rawText,
 59       score
 60     };
 61@@ -317,18 +354,89 @@
 62       }
 63     }
 64 
 65-    return candidates.sort((left, right) =>
 66-      (right.score - left.score) || (right.rawText.length - left.rawText.length)
 67-    )[0] || null;
 68+    return candidates.sort((left, right) => compareCandidates(left, right))[0] || null;
 69+  }
 70+
 71+  function compareCandidates(current, next) {
 72+    const currentScore = Number(current?.score) || 0;
 73+    const nextScore = Number(next?.score) || 0;
 74+    if (nextScore !== currentScore) {
 75+      return nextScore - currentScore;
 76+    }
 77+
 78+    const currentTextLength = (current?.rawText || "").length;
 79+    const nextTextLength = (next?.rawText || "").length;
 80+    if (nextTextLength !== currentTextLength) {
 81+      return nextTextLength - currentTextLength;
 82+    }
 83+
 84+    const currentAssistant = trimToNull(current?.assistantMessageId) ? 1 : 0;
 85+    const nextAssistant = trimToNull(next?.assistantMessageId) ? 1 : 0;
 86+    if (nextAssistant !== currentAssistant) {
 87+      return nextAssistant - currentAssistant;
 88+    }
 89+
 90+    const currentConversation = trimToNull(current?.conversationId) ? 1 : 0;
 91+    const nextConversation = trimToNull(next?.conversationId) ? 1 : 0;
 92+    return nextConversation - currentConversation;
 93+  }
 94+
 95+  function pickPreferredCandidate(current, next) {
 96+    if (!current) return next ? { ...next } : null;
 97+    if (!next) return { ...current };
 98+    return compareCandidates(current, next) > 0 ? { ...next } : { ...current };
 99+  }
100+
101+  function candidateTextsOverlap(current, next) {
102+    const currentText = normalizeMessageText(current?.rawText);
103+    const nextText = normalizeMessageText(next?.rawText);
104+    if (!currentText || !nextText) {
105+      return false;
106+    }
107+
108+    return currentText === nextText || currentText.includes(nextText) || nextText.includes(currentText);
109+  }
110+
111+  function shouldMergeCandidatePair(current, next) {
112+    if (!current || !next) {
113+      return true;
114+    }
115+
116+    const currentAssistant = trimToNull(current?.assistantMessageId);
117+    const nextAssistant = trimToNull(next?.assistantMessageId);
118+    if (currentAssistant && nextAssistant) {
119+      return currentAssistant === nextAssistant;
120+    }
121+
122+    const currentConversation = trimToNull(current?.conversationId);
123+    const nextConversation = trimToNull(next?.conversationId);
124+    if (currentConversation && nextConversation && currentConversation !== nextConversation) {
125+      return false;
126+    }
127+
128+    if (candidateTextsOverlap(current, next)) {
129+      return true;
130+    }
131+
132+    if (!current?.rawText || !next?.rawText) {
133+      return Boolean(currentAssistant || nextAssistant || (currentConversation && nextConversation));
134+    }
135+
136+    return false;
137   }
138 
139   function mergeCandidates(current, next) {
140     if (!next) return current;
141     if (!current) return { ...next };
142 
143+    if (!shouldMergeCandidatePair(current, next)) {
144+      return pickPreferredCandidate(current, next);
145+    }
146+
147     return {
148       assistantMessageId: next.assistantMessageId || current.assistantMessageId || null,
149       conversationId: next.conversationId || current.conversationId || null,
150+      path: next.path || current.path || "",
151       rawText:
152         (next.rawText && next.rawText.length >= (current.rawText || "").length)
153           ? next.rawText
M tests/browser/browser-control-e2e-smoke.test.mjs
+158, -1
  1@@ -258,7 +258,7 @@ function createControllerHarness(options = {}) {
  2   }
  3 
  4   let nextTabId = Math.max(0, ...tabs.keys()) + 1;
  5-  const storage = {};
  6+  const storage = options.storage || {};
  7   const sentMessages = [];
  8   const ws = options.ws || {
  9     readyState: 1,
 10@@ -849,6 +849,65 @@ test("final message relay observer waits for ChatGPT stream completion and suppr
 11   assert.equal(duplicateRelay, null);
 12 });
 13 
 14+test("final message relay observer prefers the latest ChatGPT root message over historical mapping entries", () => {
 15+  const relayState = createRelayState("chatgpt");
 16+  const pageUrl = "https://chatgpt.com/c/conv-chatgpt-new-turn";
 17+  const url = "https://chatgpt.com/backend-api/conversation";
 18+
 19+  const relay = observeNetwork(
 20+    relayState,
 21+    {
 22+      reqBody: JSON.stringify({
 23+        conversation_id: "conv-chatgpt-new-turn"
 24+      }),
 25+      resBody: JSON.stringify({
 26+        conversation_id: "conv-chatgpt-new-turn",
 27+        mapping: {
 28+          old_turn: {
 29+            message: {
 30+              id: "msg-chatgpt-old-turn",
 31+              author: {
 32+                role: "assistant"
 33+              },
 34+              status: "finished_successfully",
 35+              end_turn: true,
 36+              content: {
 37+                content_type: "text",
 38+                parts: ["old historical answer with many many extra words that should not win"]
 39+              }
 40+            }
 41+          }
 42+        },
 43+        message: {
 44+          id: "msg-chatgpt-new-turn",
 45+          author: {
 46+            role: "assistant"
 47+          },
 48+          status: "finished_successfully",
 49+          end_turn: true,
 50+          content: {
 51+            content_type: "text",
 52+            parts: ["new ChatGPT turn answer"]
 53+          }
 54+        }
 55+      }),
 56+      source: "page",
 57+      url
 58+    },
 59+    {
 60+      observedAt: 1_710_000_002_500,
 61+      pageUrl
 62+    }
 63+  );
 64+
 65+  assert.ok(relay);
 66+  assert.equal(relay.payload.type, "browser.final_message");
 67+  assert.equal(relay.payload.platform, "chatgpt");
 68+  assert.equal(relay.payload.conversation_id, "conv-chatgpt-new-turn");
 69+  assert.equal(relay.payload.assistant_message_id, "msg-chatgpt-new-turn");
 70+  assert.equal(relay.payload.raw_text, "new ChatGPT turn answer");
 71+});
 72+
 73 test("final message relay observer extracts Gemini final text only after stream completion", () => {
 74   const relayState = createRelayState("gemini");
 75   const pageUrl = "https://gemini.google.com/app/conv-gemini-smoke";
 76@@ -1178,6 +1237,104 @@ test("controller page-level pause suppresses only the paused page relay and resu
 77   assert.equal(resumedRelay.conversation_id, "conv-page-paused");
 78 });
 79 
 80+test("controller suppresses stale ChatGPT replay when the tab already points at a different conversation", () => {
 81+  const harness = createControllerHarness({
 82+    finalMessageHelpers
 83+  });
 84+  const sender = {
 85+    tab: {
 86+      id: 43,
 87+      title: "Current ChatGPT Page",
 88+      url: "https://chatgpt.com/c/conv-page-current"
 89+    }
 90+  };
 91+
 92+  harness.hooks.handlePageSse(
 93+    {
 94+      chunk: 'data: {"conversation_id":"conv-page-old","message":{"id":"msg-page-old","author":{"role":"assistant"},"status":"finished_successfully","end_turn":true,"content":{"content_type":"text","parts":["stale replay answer"]}}}',
 95+      done: true,
 96+      platform: "chatgpt",
 97+      reqBody: JSON.stringify({
 98+        conversation_id: "conv-page-old"
 99+      }),
100+      url: "https://chatgpt.com/backend-api/conversation"
101+    },
102+    sender
103+  );
104+
105+  assert.equal(
106+    harness.sentMessages.filter((message) => message.type === "browser.final_message").length,
107+    0
108+  );
109+
110+  harness.hooks.handlePageSse(
111+    {
112+      chunk: 'data: {"conversation_id":"conv-page-current","message":{"id":"msg-page-current","author":{"role":"assistant"},"status":"finished_successfully","end_turn":true,"content":{"content_type":"text","parts":["current page answer"]}}}',
113+      done: true,
114+      platform: "chatgpt",
115+      reqBody: JSON.stringify({
116+        conversation_id: "conv-page-current"
117+      }),
118+      url: "https://chatgpt.com/backend-api/conversation"
119+    },
120+    sender
121+  );
122+
123+  const currentRelay = harness.sentMessages.find((message) =>
124+    message.type === "browser.final_message" && message.assistant_message_id === "msg-page-current"
125+  );
126+  assert.ok(currentRelay);
127+  assert.equal(currentRelay.conversation_id, "conv-page-current");
128+});
129+
130+test("controller restores recent final-message relay cache after reload and suppresses ChatGPT replay", async () => {
131+  const storage = {};
132+  const sender = {
133+    tab: {
134+      id: 44,
135+      title: "Reloaded ChatGPT Page",
136+      url: "https://chatgpt.com/c/conv-page-cache"
137+    }
138+  };
139+  const replayData = {
140+    chunk: 'data: {"conversation_id":"conv-page-cache","message":{"id":"msg-page-cache","author":{"role":"assistant"},"status":"finished_successfully","end_turn":true,"content":{"content_type":"text","parts":["cached replay answer"]}}}',
141+    done: true,
142+    platform: "chatgpt",
143+    reqBody: JSON.stringify({
144+      conversation_id: "conv-page-cache"
145+    }),
146+    url: "https://chatgpt.com/backend-api/conversation"
147+  };
148+  const storageKey = "baaFirefox.finalMessageRelayCache";
149+
150+  const firstHarness = createControllerHarness({
151+    finalMessageHelpers,
152+    storage
153+  });
154+  firstHarness.hooks.handlePageSse(replayData, sender);
155+
156+  assert.equal(
157+    firstHarness.sentMessages.filter((message) => message.type === "browser.final_message").length,
158+    1
159+  );
160+
161+  await firstHarness.hooks.persistFinalMessageRelayCache();
162+  assert.ok(Array.isArray(storage[storageKey]?.chatgpt));
163+  assert.ok(storage[storageKey].chatgpt.length > 0);
164+
165+  const secondHarness = createControllerHarness({
166+    finalMessageHelpers,
167+    storage
168+  });
169+  secondHarness.hooks.restoreFinalMessageRelayCache(storage[storageKey]);
170+  secondHarness.hooks.handlePageSse(replayData, sender);
171+
172+  assert.equal(
173+    secondHarness.sentMessages.filter((message) => message.type === "browser.final_message").length,
174+    0
175+  );
176+});
177+
178 test("controller blocks delivery bridge when the target page conversation is paused", async () => {
179   const harness = createControllerHarness();
180   const sender = {