diff --git a/README.md b/README.md index dd08701..9b66a1c 100644 --- a/README.md +++ b/README.md @@ -113,6 +113,23 @@ rep+ is a lightweight Chrome DevTools extension inspired by Burp Suite's Repeate This combo makes rep+ handy for bug bounty hunters and vulnerability researchers who want Burp-like iteration without the heavyweight UI. Install the extension, open DevTools, head to the rep+ panel, and start hacking. 😎 +### Local Model (Ollama) Setup +If you use a local model (e.g., Ollama) you must allow Chrome extensions to call it, otherwise you’ll see 403/CORS errors. + +1. Stop any running Ollama instance. +2. Start Ollama with CORS enabled (pick one): + - Allow only Chrome extensions: + ```bash + OLLAMA_ORIGINS="chrome-extension://*" ollama serve + ``` + - Allow everything (easier for local dev): + ```bash + OLLAMA_ORIGINS="*" ollama serve + ``` +3. Verify your model exists (e.g., `gemma3:4b`) with `ollama list`. +4. Reload the extension and try again. If you still see 403, check Ollama logs for details. + + ## Permissions & Privacy - **Optional**: `webRequest` + `` only when you enable multi-tab capture. - **Data**: Stored locally; no tracking/analytics. diff --git a/background.js b/background.js index 3e4b825..9b4e48c 100644 --- a/background.js +++ b/background.js @@ -13,14 +13,271 @@ chrome.runtime.onConnect.addListener((port) => { ports.delete(port); }); - // Listen for messages from panel (e.g. to toggle capture) + // Listen for messages from panel (e.g. to toggle capture, local model requests) port.onMessage.addListener((msg) => { + console.log('Background: Received port message:', msg.type); if (msg.type === 'ping') { + console.log('Background: Responding to ping'); port.postMessage({ type: 'pong' }); + } else if (msg.type === 'local-model-request') { + // Handle local model request via port + const requestId = msg.requestId || `local-${Date.now()}-${Math.random()}`; + console.log('Background: Received local model request', requestId, 'URL:', msg.url, 'Body:', JSON.stringify(msg.body).substring(0, 100)); + + // Check if port is still connected before making request + if (!port || !port.onDisconnect) { + console.error('Background: Port already disconnected'); + return; + } + + // Keep service worker alive during request + const keepAlive = setInterval(() => { + // Service worker will stay alive as long as we have active work + }, 1000); + + // Proxy the request to localhost + // Note: Service workers need host_permissions for localhost in MV3 + const requestBody = { + model: msg.body.model, + prompt: msg.body.prompt, + stream: msg.body.stream !== undefined ? msg.body.stream : true + }; + + console.log('Background: Sending fetch request to', msg.url, 'with body:', JSON.stringify(requestBody).substring(0, 200)); + + // Try to match curl's request format exactly + fetch(msg.url, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + 'Accept': 'application/json' + }, + body: JSON.stringify(requestBody), + // Don't send credentials or referrer that might trigger security + credentials: 'omit', + referrerPolicy: 'no-referrer' + }) + .then(response => { + console.log('Background: Fetch response status', response.status); + // Log response headers for debugging + const responseHeaders = {}; + response.headers.forEach((value, key) => { + responseHeaders[key] = value; + }); + console.log('Background: Response headers:', responseHeaders); + + if (!response.ok) { + return response.text().then(text => { + console.error('Background: Fetch failed with status', response.status, 'Response body length:', text?.length || 0, 'Response body:', text || '(empty)'); + // Provide more helpful error message + let errorMsg = `Request failed with status ${response.status}`; + if (text && text.trim()) { + try { + const errorData = JSON.parse(text); + errorMsg = errorData.error || errorData.message || errorMsg; + } catch (e) { + errorMsg = text.length > 200 ? text.substring(0, 200) + '...' : text; + } + } else if (response.status === 403) { + errorMsg = '403 Forbidden: Ollama is blocking the request. ' + + 'This might be due to CORS or security settings. ' + + 'Try restarting Ollama with: OLLAMA_ORIGINS="*" ollama serve ' + + 'Or check Ollama configuration for access restrictions.'; + } + throw new Error(errorMsg); + }); + } + return response.body; + }) + .then(body => { + if (!body) { + throw new Error('No response body received'); + } + + // Stream the response back via this specific port + const reader = body.getReader(); + const decoder = new TextDecoder(); + let hasError = false; + + function readChunk() { + if (hasError) return; + + reader.read().then(({ done, value }) => { + if (done) { + // Send final message + clearInterval(keepAlive); + try { + port.postMessage({ + type: 'local-model-stream-done', + requestId: requestId + }); + console.log('Background: Sent stream-done for', requestId); + } catch (e) { + console.error('Background: Error sending stream-done', e); + hasError = true; + } + return; + } + + const chunk = decoder.decode(value, { stream: true }); + // Send chunk message + try { + port.postMessage({ + type: 'local-model-stream-chunk', + chunk: chunk, + requestId: requestId + }); + } catch (e) { + console.error('Background: Port disconnected during streaming', e); + hasError = true; + reader.cancel().catch(() => {}); + return; + } + + // Continue reading + readChunk(); + }).catch(error => { + clearInterval(keepAlive); + console.error('Background: Error reading chunk', error); + hasError = true; + try { + port.postMessage({ + type: 'local-model-stream-error', + error: error.message, + requestId: requestId + }); + } catch (e) { + console.error('Background: Error sending error message', e); + } + }); + } + + readChunk(); + }) + .catch(error => { + clearInterval(keepAlive); + console.error('Background: Fetch error', error, error.stack); + let errorMessage = error.message || 'Failed to fetch from local model API'; + + // Provide helpful error message for CORS issues + if (errorMessage.includes('CORS') || errorMessage.includes('Failed to fetch')) { + errorMessage = 'CORS error: Ollama needs to allow CORS. ' + + 'Start Ollama with: OLLAMA_ORIGINS="chrome-extension://*" ollama serve ' + + 'Or configure your Ollama server to send CORS headers. ' + + 'Original error: ' + errorMessage; + } + + try { + port.postMessage({ + type: 'local-model-error', + error: errorMessage, + requestId: requestId + }); + } catch (e) { + console.error('Background: Port disconnected, cannot send error', e); + } + }); } }); }); +// Handle local model API requests (bypass CORS) +chrome.runtime.onMessage.addListener((request, sender, sendResponse) => { + if (request.type === 'local-model-request') { + const requestId = request.requestId || `local-${Date.now()}-${Math.random()}`; + + // Proxy the request to localhost (service workers can bypass CORS) + fetch(request.url, { + method: 'POST', + headers: { + 'Content-Type': 'application/json' + }, + body: JSON.stringify(request.body) + }) + .then(response => { + if (!response.ok) { + return response.text().then(text => { + throw new Error(text || 'Request failed'); + }); + } + return response.body; + }) + .then(body => { + // Stream the response back via port connections (for DevTools panels) + const reader = body.getReader(); + const decoder = new TextDecoder(); + + function readChunk() { + reader.read().then(({ done, value }) => { + if (done) { + // Send final message to all connected ports + ports.forEach(port => { + try { + port.postMessage({ + type: 'local-model-stream-done', + requestId: requestId + }); + } catch (e) { + // Port might be disconnected, remove it + ports.delete(port); + } + }); + return; + } + + const chunk = decoder.decode(value, { stream: true }); + // Send chunk message to all connected ports + ports.forEach(port => { + try { + port.postMessage({ + type: 'local-model-stream-chunk', + chunk: chunk, + requestId: requestId + }); + } catch (e) { + // Port might be disconnected, remove it + ports.delete(port); + } + }); + + // Continue reading + readChunk(); + }).catch(error => { + ports.forEach(port => { + try { + port.postMessage({ + type: 'local-model-stream-error', + error: error.message, + requestId: requestId + }); + } catch (e) { + ports.delete(port); + } + }); + }); + } + + readChunk(); + }) + .catch(error => { + ports.forEach(port => { + try { + port.postMessage({ + type: 'local-model-error', + error: error.message, + requestId: requestId + }); + } catch (e) { + ports.delete(port); + } + }); + }); + + // Return true to indicate we'll send responses asynchronously + return true; + } +}); + // Helper to process request body function parseRequestBody(requestBody) { if (!requestBody) return null; diff --git a/js/features/ai/core.js b/js/features/ai/core.js index 5c23b94..7408038 100644 --- a/js/features/ai/core.js +++ b/js/features/ai/core.js @@ -11,6 +11,14 @@ export function getAISettings() { }; } + if (provider === 'local') { + return { + provider: 'local', + apiKey: localStorage.getItem('local_api_url') || 'http://localhost:11434/api/generate', + model: localStorage.getItem('local_model') || '' + }; + } + return { provider: 'anthropic', apiKey: localStorage.getItem('anthropic_api_key') || '', @@ -24,6 +32,9 @@ export function saveAISettings(provider, apiKey, model) { if (provider === 'gemini') { localStorage.setItem('gemini_api_key', apiKey); localStorage.setItem('gemini_model', model); + } else if (provider === 'local') { + localStorage.setItem('local_api_url', apiKey); // apiKey is actually the URL for local + localStorage.setItem('local_model', model); } else { localStorage.setItem('anthropic_api_key', apiKey); localStorage.setItem('anthropic_model', model); @@ -34,6 +45,9 @@ export async function streamExplanation(apiKey, model, request, onUpdate, provid if (provider === 'gemini') { return streamExplanationFromGemini(apiKey, model, request, onUpdate); } + if (provider === 'local') { + return streamExplanationFromLocal(apiKey, model, request, onUpdate); + } return streamExplanationFromClaude(apiKey, model, request, onUpdate); } @@ -41,6 +55,9 @@ export async function streamExplanationWithSystem(apiKey, model, systemPrompt, u if (provider === 'gemini') { return streamExplanationFromGeminiWithSystem(apiKey, model, systemPrompt, userPrompt, onUpdate); } + if (provider === 'local') { + return streamExplanationFromLocalWithSystem(apiKey, model, systemPrompt, userPrompt, onUpdate); + } return streamExplanationFromClaudeWithSystem(apiKey, model, systemPrompt, userPrompt, onUpdate); } @@ -145,7 +162,7 @@ export async function streamExplanationFromClaudeWithSystem(apiKey, model, syste if (dataStr === '[DONE]') continue; try { - const data = JSON.parse(dataStr); + const data = JSON.parse(line); if (data.type === 'content_block_delta' && data.delta.text) { fullText += data.delta.text; onUpdate(fullText); @@ -282,3 +299,323 @@ export async function streamExplanationFromGeminiWithSystem(apiKey, model, syste return fullText; } +// Shared port connection for local model requests +let sharedPort = null; +let portListeners = new Map(); // Map of requestId -> listener function + +function getOrCreatePort() { + // Check if existing port is still valid + if (sharedPort) { + // Try to check if port is still connected by checking lastError + // Note: lastError is only set after an operation, so we can't check it here + // Instead, we'll rely on the onDisconnect handler + return sharedPort; + } + + // Create new port connection + try { + sharedPort = chrome.runtime.connect({ name: "rep-panel" }); + console.log('Created new port connection for local model requests'); + + // Set up shared message listener + sharedPort.onMessage.addListener((msg) => { + console.log('Port received message:', msg.type, msg.requestId); + if (msg.type && msg.requestId && portListeners.has(msg.requestId)) { + const listener = portListeners.get(msg.requestId); + listener(msg); + } else { + console.warn('Port message ignored - no listener for requestId:', msg.requestId); + } + }); + + // Handle disconnection + sharedPort.onDisconnect.addListener(() => { + console.warn('Port disconnected, clearing shared port. Error:', chrome.runtime.lastError?.message); + const wasConnected = sharedPort !== null; + sharedPort = null; + + // Reject all pending requests only if we had an active connection + if (wasConnected) { + portListeners.forEach((listener, requestId) => { + try { + listener({ + type: 'local-model-error', + error: 'Connection to background script lost. Service worker may have been terminated.', + requestId: requestId + }); + } catch (e) { + console.error('Error notifying listener:', e); + } + }); + portListeners.clear(); + } + }); + + return sharedPort; + } catch (e) { + console.error('Failed to create port connection:', e); + throw new Error('Failed to connect to background script: ' + e.message); + } +} + +export async function streamExplanationFromLocal(apiUrl, model, request, onUpdate) { + const systemPrompt = "You are an expert security researcher and web developer. Explain the following HTTP request in detail, highlighting interesting parameters, potential security implications, and what this request is likely doing. Be concise but thorough."; + const prompt = `${systemPrompt}\n\nExplain this HTTP request:\n\n${request}`; + + // Use background service worker to proxy the request (bypasses CORS) + return new Promise((resolve, reject) => { + let fullText = ''; + let buffer = ''; + const requestId = `local-${Date.now()}-${Math.random()}`; + let isResolved = false; + + // Get or create shared port connection + let port; + try { + port = getOrCreatePort(); + } catch (e) { + reject(e); + return; + } + + const portMessageListener = (msg) => { + // Only process messages for this request + if (!msg.type || msg.requestId !== requestId) return; + + if (msg.type === 'local-model-stream-chunk') { + buffer += msg.chunk; + const lines = buffer.split('\n'); + buffer = lines.pop() || ''; // Keep incomplete line in buffer + + for (const line of lines) { + if (!line.trim()) continue; + + try { + const data = JSON.parse(line); + // Ollama format: { "response": "text", "done": false } + if (data.response) { + fullText += data.response; + onUpdate(fullText); + } + // If done, break + if (data.done) { + if (!isResolved) { + isResolved = true; + portListeners.delete(requestId); + resolve(fullText); + } + return; + } + } catch (e) { + // Ignore parse errors for incomplete chunks + } + } + } else if (msg.type === 'local-model-stream-done') { + // Process any remaining buffer + if (buffer.trim()) { + try { + const data = JSON.parse(buffer); + if (data.response) { + fullText += data.response; + onUpdate(fullText); + } + } catch (e) { + // Ignore parse errors + } + } + if (!isResolved) { + isResolved = true; + portListeners.delete(requestId); + resolve(fullText); + } + } else if (msg.type === 'local-model-stream-error' || msg.type === 'local-model-error') { + if (!isResolved) { + isResolved = true; + portListeners.delete(requestId); + reject(new Error(msg.error || 'Failed to communicate with local model API')); + } + } + }; + + // Register listener for this request + portListeners.set(requestId, portMessageListener); + + // Set a timeout to detect if the request is stuck (60 seconds) + const timeout = setTimeout(() => { + if (!isResolved) { + console.warn('Local model request timeout after 60s, requestId:', requestId); + } + }, 60000); + + // Wrap the listener to clear timeout on completion + const wrappedListener = (msg) => { + if (msg.type === 'local-model-stream-done' || msg.type === 'local-model-error' || msg.type === 'local-model-stream-error') { + clearTimeout(timeout); + } + portMessageListener(msg); + }; + + // Update the listener in the map + portListeners.set(requestId, wrappedListener); + + // Send request to background script via port + try { + const message = { + type: 'local-model-request', + requestId: requestId, + url: apiUrl, + body: { + model: model, + prompt: prompt, + stream: true + } + }; + + port.postMessage(message); + console.log('Sent local model request:', requestId, 'URL:', apiUrl, 'Model:', model); + + // Verify port is still connected after sending + if (chrome.runtime.lastError) { + clearTimeout(timeout); + if (!isResolved) { + isResolved = true; + portListeners.delete(requestId); + reject(new Error('Port error after sending: ' + chrome.runtime.lastError.message)); + } + } + } catch (e) { + clearTimeout(timeout); + if (!isResolved) { + isResolved = true; + portListeners.delete(requestId); + reject(new Error('Failed to send request to background script: ' + e.message)); + } + } + }); +} + +export async function streamExplanationFromLocalWithSystem(apiUrl, model, systemPrompt, userPrompt, onUpdate) { + // Combine system prompt and user prompt for local models that don't support system messages + const prompt = `${systemPrompt}\n\n${userPrompt}`; + + // Use background service worker to proxy the request (bypasses CORS) + return new Promise((resolve, reject) => { + let fullText = ''; + let buffer = ''; + const requestId = `local-${Date.now()}-${Math.random()}`; + let isResolved = false; + + // Get or create shared port connection + let port; + try { + port = getOrCreatePort(); + } catch (e) { + reject(e); + return; + } + + const portMessageListener = (msg) => { + // Only process messages for this request + if (!msg.type || msg.requestId !== requestId) return; + + if (msg.type === 'local-model-stream-chunk') { + buffer += msg.chunk; + const lines = buffer.split('\n'); + buffer = lines.pop() || ''; // Keep incomplete line in buffer + + for (const line of lines) { + if (!line.trim()) continue; + + try { + const data = JSON.parse(line); + // Ollama format: { "response": "text", "done": false } + if (data.response) { + fullText += data.response; + onUpdate(fullText); + } + // If done, break + if (data.done) { + if (!isResolved) { + isResolved = true; + portListeners.delete(requestId); + resolve(fullText); + } + return; + } + } catch (e) { + // Ignore parse errors for incomplete chunks + } + } + } else if (msg.type === 'local-model-stream-done') { + // Process any remaining buffer + if (buffer.trim()) { + try { + const data = JSON.parse(buffer); + if (data.response) { + fullText += data.response; + onUpdate(fullText); + } + } catch (e) { + // Ignore parse errors + } + } + if (!isResolved) { + isResolved = true; + portListeners.delete(requestId); + resolve(fullText); + } + } else if (msg.type === 'local-model-stream-error' || msg.type === 'local-model-error') { + if (!isResolved) { + isResolved = true; + portListeners.delete(requestId); + reject(new Error(msg.error || 'Failed to communicate with local model API')); + } + } + }; + + // Register listener for this request + portListeners.set(requestId, portMessageListener); + + // Set a timeout to detect if the request is stuck (60 seconds) + const timeout = setTimeout(() => { + if (!isResolved) { + console.warn('Local model request timeout after 60s, requestId:', requestId); + } + }, 60000); + + // Send request to background script via port + try { + const message = { + type: 'local-model-request', + requestId: requestId, + url: apiUrl, + body: { + model: model, + prompt: prompt, + stream: true + } + }; + + port.postMessage(message); + console.log('Sent local model request (with system):', requestId); + + // Wrap the listener to clear timeout on completion + const wrappedListener = (msg) => { + if (msg.type === 'local-model-stream-done' || msg.type === 'local-model-error' || msg.type === 'local-model-stream-error') { + clearTimeout(timeout); + } + portMessageListener(msg); + }; + + // Update the listener in the map + portListeners.set(requestId, wrappedListener); + } catch (e) { + clearTimeout(timeout); + if (!isResolved) { + isResolved = true; + portListeners.delete(requestId); + reject(new Error('Failed to send request to background script: ' + e.message)); + } + } + }); +} diff --git a/js/features/ai/explain.js b/js/features/ai/explain.js index ce2e20c..a218a56 100644 --- a/js/features/ai/explain.js +++ b/js/features/ai/explain.js @@ -11,9 +11,17 @@ import { getAISettings, streamExplanation } from './core.js'; */ export async function handleAIExplanation(promptPrefix, content, explanationModal, explanationContent, settingsModal, onTextUpdate) { const { provider, apiKey, model } = getAISettings(); - if (!apiKey) { - const providerName = provider === 'gemini' ? 'Gemini' : 'Anthropic'; - alert(`Please configure your ${providerName} API Key in Settings first.`); + if (!apiKey || (provider === 'local' && !model)) { + let providerName = 'Anthropic'; + if (provider === 'gemini') { + providerName = 'Gemini'; + } else if (provider === 'local') { + providerName = 'Local Model'; + } + const message = provider === 'local' + ? 'Please configure your Local Model URL and Model Name in Settings first.' + : `Please configure your ${providerName} API Key in Settings first.`; + alert(message); settingsModal.style.display = 'block'; return; } diff --git a/js/features/ai/index.js b/js/features/ai/index.js index 598b281..6905044 100644 --- a/js/features/ai/index.js +++ b/js/features/ai/index.js @@ -86,7 +86,9 @@ export { streamExplanationFromClaude, streamExplanationFromClaudeWithSystem, streamExplanationFromGemini, - streamExplanationFromGeminiWithSystem + streamExplanationFromGeminiWithSystem, + streamExplanationFromLocal, + streamExplanationFromLocalWithSystem } from './core.js'; export { handleAIExplanation } from './explain.js'; export { handleAttackSurfaceAnalysis } from './suggestions.js'; @@ -102,6 +104,9 @@ export function setupAIFeatures(elements) { const geminiModelSelect = document.getElementById('gemini-model'); const anthropicSettings = document.getElementById('anthropic-settings'); const geminiSettings = document.getElementById('gemini-settings'); + const localSettings = document.getElementById('local-settings'); + const localApiUrlInput = document.getElementById('local-api-url'); + const localModelInput = document.getElementById('local-model'); const aiMenuBtn = document.getElementById('ai-menu-btn'); const aiMenuDropdown = document.getElementById('ai-menu-dropdown'); const explainBtn = document.getElementById('explain-btn'); @@ -118,12 +123,16 @@ export function setupAIFeatures(elements) { if (aiProviderSelect) { aiProviderSelect.addEventListener('change', () => { const provider = aiProviderSelect.value; + anthropicSettings.style.display = 'none'; + geminiSettings.style.display = 'none'; + localSettings.style.display = 'none'; + if (provider === 'gemini') { - anthropicSettings.style.display = 'none'; geminiSettings.style.display = 'block'; + } else if (provider === 'local') { + localSettings.style.display = 'block'; } else { anthropicSettings.style.display = 'block'; - geminiSettings.style.display = 'none'; } }); } @@ -134,16 +143,22 @@ export function setupAIFeatures(elements) { if (aiProviderSelect) aiProviderSelect.value = provider; + anthropicSettings.style.display = 'none'; + geminiSettings.style.display = 'none'; + localSettings.style.display = 'none'; + if (provider === 'gemini') { geminiApiKeyInput.value = apiKey; if (geminiModelSelect) geminiModelSelect.value = model; - anthropicSettings.style.display = 'none'; geminiSettings.style.display = 'block'; + } else if (provider === 'local') { + if (localApiUrlInput) localApiUrlInput.value = apiKey; // apiKey is actually the URL for local + if (localModelInput) localModelInput.value = model; + localSettings.style.display = 'block'; } else { anthropicApiKeyInput.value = apiKey; if (anthropicModelSelect) anthropicModelSelect.value = model; anthropicSettings.style.display = 'block'; - geminiSettings.style.display = 'none'; } settingsModal.style.display = 'block'; @@ -158,17 +173,23 @@ export function setupAIFeatures(elements) { if (provider === 'gemini') { key = geminiApiKeyInput.value.trim(); model = geminiModelSelect ? geminiModelSelect.value : 'gemini-flash-latest'; + } else if (provider === 'local') { + key = localApiUrlInput ? localApiUrlInput.value.trim() : 'http://localhost:11434/api/generate'; + model = localModelInput ? localModelInput.value.trim() : ''; } else { key = anthropicApiKeyInput.value.trim(); model = anthropicModelSelect ? anthropicModelSelect.value : 'claude-3-5-sonnet-20241022'; } - if (key) { + if (key && (provider !== 'local' || model)) { saveAISettings(provider, key, model); + alert('Settings saved!'); + settingsModal.style.display = 'none'; + } else if (provider === 'local' && !model) { + alert('Please enter a model name for the local provider.'); + } else { + alert('Please enter required settings.'); } - - alert('Settings saved!'); - settingsModal.style.display = 'none'; }); } diff --git a/js/features/ai/suggestions.js b/js/features/ai/suggestions.js index 218cda8..57250db 100644 --- a/js/features/ai/suggestions.js +++ b/js/features/ai/suggestions.js @@ -20,9 +20,17 @@ export async function handleAttackSurfaceAnalysis( onTextUpdate ) { const { provider, apiKey, model } = getAISettings(); - if (!apiKey) { - const providerName = provider === 'gemini' ? 'Gemini' : 'Anthropic'; - alert(`Please configure your ${providerName} API Key in Settings first.`); + if (!apiKey || (provider === 'local' && !model)) { + let providerName = 'Anthropic'; + if (provider === 'gemini') { + providerName = 'Gemini'; + } else if (provider === 'local') { + providerName = 'Local Model'; + } + const message = provider === 'local' + ? 'Please configure your Local Model URL and Model Name in Settings first.' + : `Please configure your ${providerName} API Key in Settings first.`; + alert(message); settingsModal.style.display = 'block'; return; } diff --git a/js/features/extractors/ui.js b/js/features/extractors/ui.js index 34e15b9..6d5aa8b 100644 --- a/js/features/extractors/ui.js +++ b/js/features/extractors/ui.js @@ -122,7 +122,11 @@ export function initExtractorUI() { // Get all resources const resources = await new Promise((resolve) => { - chrome.devtools.inspectedWindow.getResources((res) => resolve(res)); + if (chrome.devtools && chrome.devtools.inspectedWindow) { + chrome.devtools.inspectedWindow.getResources((res) => resolve(res)); + } else { + resolve([]); + } }); const jsFiles = resources.filter(r => r.type === 'script' || r.url.endsWith('.js') || r.url.endsWith('.map')); diff --git a/js/network/capture.js b/js/network/capture.js index 51351da..aac9157 100644 --- a/js/network/capture.js +++ b/js/network/capture.js @@ -3,11 +3,13 @@ export function setupNetworkListener(onRequestCaptured) { // Get the current page URL once at setup let currentPageUrl = ''; - chrome.devtools.inspectedWindow.eval('window.location.href', (result, isException) => { - if (!isException && result) { - currentPageUrl = result; - } - }); + if (chrome.devtools && chrome.devtools.inspectedWindow) { + chrome.devtools.inspectedWindow.eval('window.location.href', (result, isException) => { + if (!isException && result) { + currentPageUrl = result; + } + }); + } // Update page URL when navigation occurs chrome.devtools.network.onNavigated.addListener((url) => { diff --git a/js/network/multi-tab.js b/js/network/multi-tab.js index 13de6c8..93f43e1 100644 --- a/js/network/multi-tab.js +++ b/js/network/multi-tab.js @@ -34,7 +34,7 @@ export function initMultiTabCapture() { const req = msg.data; // Skip requests from the current inspected tab (handled by setupNetworkListener) - if (req.tabId === chrome.devtools.inspectedWindow.tabId) return; + if (chrome.devtools && chrome.devtools.inspectedWindow && req.tabId === chrome.devtools.inspectedWindow.tabId) return; // Convert to HAR-like format const harEntry = { diff --git a/manifest.json b/manifest.json index f04efb9..97bee9c 100644 --- a/manifest.json +++ b/manifest.json @@ -3,14 +3,13 @@ "name": "rep+", "version": "1.0", "description": "rep+ - Capture, modify, and replay HTTP requests in Chrome DevTools with AI-powered security analysis.", - "permissions": [ - "activeTab", - "storage" - ], "optional_permissions": [ "webRequest" ], - "host_permissions": [], + "host_permissions": [ + "http://localhost/*", + "http://127.0.0.1/*" + ], "optional_host_permissions": [ "" ], diff --git a/panel.html b/panel.html index 203c02b..66eba15 100644 --- a/panel.html +++ b/panel.html @@ -457,6 +457,7 @@

Settings

@@ -490,6 +491,18 @@

Settings

+