diff --git a/.gitignore b/.gitignore index 213affdb..5e08bfbd 100644 --- a/.gitignore +++ b/.gitignore @@ -47,6 +47,9 @@ codeql-development-mcp-server.code-workspace # The 'codeql test run` command generates `.testproj` test database directories *.testproj +# CodeQL CLI diagnostic files generated during query runs +**/diagnostic/cli-diagnostics-*.json + # Prevent accidentally committing integration test output files in root directory # These should only be in client/integration-tests/primitives/tools/*/after/ directories /evaluator-log.json diff --git a/extensions/vscode/esbuild.config.js b/extensions/vscode/esbuild.config.js index 3a0682a9..bdeeab4f 100644 --- a/extensions/vscode/esbuild.config.js +++ b/extensions/vscode/esbuild.config.js @@ -52,6 +52,22 @@ const testSuiteConfig = { }, }; +// Extended integration tests โ€” standalone (no vscode API dependency). +// Built separately so they can run via `node` without the Extension Host. +const extendedTestConfig = { + ...shared, + entryPoints: [ + 'test/extended/run-extended-tests.ts', + ], + outdir: 'dist/test/extended', + outfile: undefined, + outExtension: { '.js': '.cjs' }, + external: [], // No externals โ€” fully self-contained + logOverride: { + 'require-resolve-not-external': 'silent', + }, +}; + const isWatch = process.argv.includes('--watch'); if (isWatch) { @@ -67,6 +83,10 @@ if (isWatch) { await build(testSuiteConfig); console.log('โœ… Test suite build completed successfully'); console.log(`๐Ÿ“ฆ Generated: dist/test/suite/*.cjs`); + + await build(extendedTestConfig); + console.log('โœ… Extended test build completed successfully'); + console.log(`๐Ÿ“ฆ Generated: dist/test/extended/*.cjs`); } catch (error) { console.error('โŒ Build failed:', error); process.exit(1); diff --git a/extensions/vscode/eslint.config.mjs b/extensions/vscode/eslint.config.mjs index 5a698bd2..16264526 100644 --- a/extensions/vscode/eslint.config.mjs +++ b/extensions/vscode/eslint.config.mjs @@ -46,9 +46,10 @@ export default [ sourceType: 'module', parser: typescript.parser, globals: { - process: 'readonly', - console: 'readonly', Buffer: 'readonly', + console: 'readonly', + fetch: 'readonly', + process: 'readonly', __dirname: 'readonly', __filename: 'readonly', }, diff --git a/extensions/vscode/package.json b/extensions/vscode/package.json index 371edaea..2b17de11 100644 --- a/extensions/vscode/package.json +++ b/extensions/vscode/package.json @@ -155,6 +155,7 @@ "test": "npm run test:coverage && npm run test:integration", "test:coverage": "vitest --run --coverage", "test:integration": "npm run download:vscode && vscode-test", + "test:integration:extended": "npm run bundle && node dist/test/extended/run-extended-tests.cjs", "test:integration:label": "vscode-test --label", "test:watch": "vitest --watch", "vscode:prepublish": "npm run clean && npm run lint && npm run bundle && npm run bundle:server", diff --git a/extensions/vscode/test/extended/download-databases.ts b/extensions/vscode/test/extended/download-databases.ts new file mode 100644 index 00000000..5b05230b --- /dev/null +++ b/extensions/vscode/test/extended/download-databases.ts @@ -0,0 +1,258 @@ +/** + * Download CodeQL databases using the GitHub REST API. + * + * When running inside the VS Code Extension Development Host, this uses + * the VS Code GitHub authentication session (same auth as vscode-codeql). + * When running standalone, it falls back to the GH_TOKEN env var. + * + * Downloads are cached on disk and reused if less than 24 hours old. + */ + +import { createWriteStream, existsSync, mkdirSync, readFileSync, readdirSync, statSync } from 'fs'; +import { join } from 'path'; +import { execFileSync } from 'child_process'; +import { homedir } from 'os'; +import { pipeline } from 'stream/promises'; + +const MAX_AGE_MS = 24 * 60 * 60 * 1000; // 24 hours + +export interface RepoConfig { + callGraphFromTo?: { sourceFunction: string; targetFunction: string }; + language: string; + owner: string; + repo: string; +} + +/** + * Get a GitHub token. Tries VS Code auth session first, then GH_TOKEN env var, + * then `gh auth token` CLI. + */ +async function getGitHubToken(): Promise { + // Try VS Code authentication (when running in Extension Host) + try { + const vscode = await import('vscode'); + const session = await vscode.authentication.getSession('github', ['repo'], { createIfNone: false }); + if (session?.accessToken) { + console.log(' ๐Ÿ”‘ Using VS Code GitHub authentication'); + return session.accessToken; + } + } catch { + // Not in VS Code โ€” fall through + } + + // Try GH_TOKEN env var + if (process.env.GH_TOKEN) { + console.log(' ๐Ÿ”‘ Using GH_TOKEN environment variable'); + return process.env.GH_TOKEN; + } + + // Try `gh auth token` CLI + try { + const { execFileSync } = await import('child_process'); + const token = execFileSync('gh', ['auth', 'token'], { encoding: 'utf8', timeout: 5000 }).trim(); + if (token) { + console.log(' ๐Ÿ”‘ Using GitHub CLI (gh auth token)'); + return token; + } + } catch { + // gh CLI not available or not authenticated + } + + return undefined; +} + +/** + * Download a CodeQL database for a repository via GitHub REST API. + * Returns the path to the extracted database, or null if download failed. + */ +async function downloadDatabase( + repo: RepoConfig, + databaseDir: string, + token: string, +): Promise { + const { language, owner, repo: repoName } = repo; + const repoDir = join(databaseDir, owner, repoName); + const dbDir = join(repoDir, language); + const zipPath = join(repoDir, `${language}.zip`); + const markerFile = join(dbDir, 'codeql-database.yml'); + + // Check cache + if (existsSync(markerFile)) { + try { + const mtime = statSync(markerFile).mtimeMs; + if (Date.now() - mtime < MAX_AGE_MS) { + console.log(` โœ“ Cached: ${owner}/${repoName} (${language})`); + return dbDir; + } + } catch { + // Fall through to download + } + } + + mkdirSync(repoDir, { recursive: true }); + + const url = `https://api.github.com/repos/${encodeURIComponent(owner)}/${encodeURIComponent(repoName)}/code-scanning/codeql/databases/${encodeURIComponent(language)}`; + console.log(` โฌ‡ Downloading: ${owner}/${repoName} (${language})...`); + + try { + const response = await fetch(url, { + headers: { + Accept: 'application/zip', + Authorization: `Bearer ${token}`, + 'User-Agent': 'codeql-development-mcp-server-extended-tests', + }, + }); + + if (!response.ok) { + console.error(` โœ— Download failed: ${response.status} ${response.statusText}`); + return null; + } + + if (!response.body) { + console.error(` โœ— Empty response body`); + return null; + } + + // Stream to zip file + const dest = createWriteStream(zipPath); + // @ts-expect-error โ€” ReadableStream โ†’ NodeJS.ReadableStream interop + await pipeline(response.body, dest); + + // Extract + console.log(` ๐Ÿ“ฆ Extracting: ${owner}/${repoName} (${language})...`); + mkdirSync(dbDir, { recursive: true }); + execFileSync('unzip', ['-o', '-q', zipPath, '-d', dbDir]); + + // Flatten if single nested directory (zip often has one top-level dir) + const entries = readdirSync(dbDir); + if (entries.length === 1 && !existsSync(join(dbDir, 'codeql-database.yml'))) { + const nested = join(dbDir, entries[0]); + if (existsSync(join(nested, 'codeql-database.yml'))) { + // Copy all contents up, then remove the nested directory + execFileSync('bash', ['-c', `cp -a "${nested}"/. "${dbDir}/" && rm -rf "${nested}"`]); + } + } + + if (!existsSync(markerFile)) { + console.error(` โœ— Extraction failed: ${markerFile} not found`); + return null; + } + + // Clean up zip + try { const { unlinkSync } = await import('fs'); unlinkSync(zipPath); } catch { /* best effort */ } + + console.log(` โœ“ Ready: ${owner}/${repoName} (${language})`); + return dbDir; + } catch (err) { + console.error(` โœ— Error downloading ${owner}/${repoName}: ${err}`); + return null; + } +} + +/** + * Get the default vscode-codeql global storage paths (platform-dependent). + */ +function getVscodeCodeqlStoragePaths(): string[] { + const home = homedir(); + const candidates = [ + join(home, 'Library', 'Application Support', 'Code', 'User', 'globalStorage', 'GitHub.vscode-codeql'), + join(home, 'Library', 'Application Support', 'Code', 'User', 'globalStorage', 'github.vscode-codeql'), + join(home, '.config', 'Code', 'User', 'globalStorage', 'GitHub.vscode-codeql'), + join(home, '.config', 'Code', 'User', 'globalStorage', 'github.vscode-codeql'), + ]; + return candidates.filter(p => existsSync(p)); +} + +/** + * Scan directories for CodeQL databases (by codeql-database.yml marker). + */ +function scanForDatabases(dir: string, found: Map, depth: number): void { + if (depth > 4) return; + const markerPath = join(dir, 'codeql-database.yml'); + if (existsSync(markerPath)) { + try { + const yml = readFileSync(markerPath, 'utf8'); + const langMatch = yml.match(/primaryLanguage:\s*(\S+)/); + found.set(dir, { language: langMatch?.[1] ?? 'unknown', path: dir }); + } catch { /* skip */ } + return; + } + try { + for (const entry of readdirSync(dir)) { + if (entry.startsWith('.') || entry === 'node_modules') continue; + const full = join(dir, entry); + try { if (statSync(full).isDirectory()) scanForDatabases(full, found, depth + 1); } catch { /* skip */ } + } + } catch { /* skip */ } +} + +/** + * Discover and/or download databases for the requested repos. + * Returns a map of "owner/repo" โ†’ database path. + */ +export async function resolveAllDatabases( + repos: RepoConfig[], + additionalDirs: string[], +): Promise<{ databases: Map; missing: RepoConfig[] }> { + const databases = new Map(); + const missing: RepoConfig[] = []; + + // First: discover existing databases on disk + const searchDirs = [...additionalDirs, ...getVscodeCodeqlStoragePaths()]; + const envDirs = process.env.CODEQL_DATABASES_BASE_DIRS; + if (envDirs) searchDirs.push(...envDirs.split(':').filter(Boolean)); + + console.log(` Searching ${searchDirs.length} directories for existing databases...`); + const existing = new Map(); + for (const dir of searchDirs) { + if (existsSync(dir)) scanForDatabases(dir, existing, 0); + } + console.log(` Found ${existing.size} existing database(s) on disk`); + + // Match existing databases to requested repos + for (const repo of repos) { + let found = false; + for (const [dbPath, info] of existing) { + if (info.language === repo.language) { + const pathLower = dbPath.toLowerCase(); + if (pathLower.includes(repo.repo.toLowerCase()) || pathLower.includes(repo.owner.toLowerCase())) { + databases.set(`${repo.owner}/${repo.repo}`, dbPath); + found = true; + console.log(` โœ“ Found: ${repo.owner}/${repo.repo} โ†’ ${dbPath}`); + break; + } + } + } + if (!found) missing.push(repo); + } + + // Second: try to download missing databases + if (missing.length > 0) { + const token = await getGitHubToken(); + if (token) { + console.log(`\n โฌ‡ Attempting to download ${missing.length} missing database(s)...`); + const downloadDir = additionalDirs[0] || join(homedir(), '.codeql-mcp-test-databases'); + mkdirSync(downloadDir, { recursive: true }); + + const stillMissing: RepoConfig[] = []; + for (const repo of missing) { + const dbPath = await downloadDatabase(repo, downloadDir, token); + if (dbPath) { + databases.set(`${repo.owner}/${repo.repo}`, dbPath); + } else { + stillMissing.push(repo); + } + } + return { databases, missing: stillMissing }; + } else { + console.log(`\n โš ๏ธ No GitHub token available for downloading missing databases.`); + console.log(` ๐Ÿ’ก Options to provide databases:`); + console.log(` 1. Open VS Code, use "CodeQL: Download Database from GitHub"`); + console.log(` 2. Set GH_TOKEN env var for automatic download`); + console.log(` 3. Set CODEQL_DATABASES_BASE_DIRS to point to existing databases`); + } + } + + return { databases, missing }; +} + diff --git a/extensions/vscode/test/extended/repos.json b/extensions/vscode/test/extended/repos.json new file mode 100644 index 00000000..a297f6ae --- /dev/null +++ b/extensions/vscode/test/extended/repos.json @@ -0,0 +1,37 @@ +{ + "description": "Repositories for extended MCP integration testing with CallGraphFromTo source/target function pairs.", + "repositories": [ + { + "owner": "gin-gonic", + "repo": "gin", + "language": "go", + "callGraphFromTo": { "sourceFunction": "handleHTTPRequest", "targetFunction": "ServeHTTP" } + }, + { + "owner": "expressjs", + "repo": "express", + "language": "javascript", + "callGraphFromTo": { "sourceFunction": "json", "targetFunction": "send" } + }, + { + "owner": "checkstyle", + "repo": "checkstyle", + "language": "java", + "callGraphFromTo": { "sourceFunction": "process", "targetFunction": "log" } + }, + { + "owner": "PyCQA", + "repo": "flake8", + "language": "python", + "callGraphFromTo": { "sourceFunction": "run", "targetFunction": "report" } + } + ], + "settings": { + "databaseDir": ".tmp/extended-test-databases", + "fixtureSearchDirs": [ + "test/fixtures/single-folder-workspace/codeql-storage/databases", + "test/fixtures/multi-root-workspace/folder-a/codeql-storage/databases" + ], + "timeoutMs": 600000 + } +} diff --git a/extensions/vscode/test/extended/run-extended-tests.ts b/extensions/vscode/test/extended/run-extended-tests.ts new file mode 100644 index 00000000..5d27e7f9 --- /dev/null +++ b/extensions/vscode/test/extended/run-extended-tests.ts @@ -0,0 +1,537 @@ +/** + * Extended Integration Test Runner + * + * Downloads real CodeQL databases from GitHub (via GH_TOKEN), spawns the + * MCP server with annotation/cache tools enabled, and runs multi-scenario + * validation against real-world codebases. + * + * Usage: + * GH_TOKEN=ghp_... npx tsx test/extended/run-extended-tests.ts + * + * Or via npm script: + * GH_TOKEN=ghp_... npm run test:integration:extended + * + * Override config: + * EXTENDED_TEST_CONFIG=/path/to/custom-repos.json npm run test:integration:extended + */ + +import { existsSync, mkdirSync, readFileSync, writeFileSync } from 'fs'; +import { join, resolve } from 'path'; +import { Client } from '@modelcontextprotocol/sdk/client/index.js'; +import { StdioClientTransport } from '@modelcontextprotocol/sdk/client/stdio.js'; +import { resolveAllDatabases, type RepoConfig } from './download-databases.js'; + +// --------------------------------------------------------------------------- +// Configuration +// --------------------------------------------------------------------------- + +interface ExtendedTestConfig { + description?: string; + repositories: RepoConfig[]; + settings: { + databaseDir: string; + fixtureSearchDirs?: string[]; + timeoutMs: number; + }; +} + +function loadConfig(): ExtendedTestConfig { + if (process.env.EXTENDED_TEST_CONFIG) { + const raw = readFileSync(process.env.EXTENDED_TEST_CONFIG, 'utf8'); + return JSON.parse(raw) as ExtendedTestConfig; + } + // Resolve repos.json from the source tree (not dist/). + // __dirname is either test/extended/ (source) or dist/test/extended/ (compiled). + // Walk up to find the extension root (contains package.json), then use test/extended/repos.json. + let dir = __dirname; + for (let i = 0; i < 5; i++) { + if (existsSync(join(dir, 'package.json'))) { + const configPath = join(dir, 'test', 'extended', 'repos.json'); + if (existsSync(configPath)) { + return JSON.parse(readFileSync(configPath, 'utf8')) as ExtendedTestConfig; + } + } + dir = resolve(dir, '..'); + } + throw new Error('repos.json not found. Set EXTENDED_TEST_CONFIG or run from the extension directory.'); +} + +// --------------------------------------------------------------------------- +// MCP Server connection +// --------------------------------------------------------------------------- + +function resolveServerPath(): string { + // Walk up from __dirname to find the monorepo root (has server/dist/) + let dir = __dirname; + for (let i = 0; i < 6; i++) { + const candidate = join(dir, 'server', 'dist', 'codeql-development-mcp-server.js'); + if (existsSync(candidate)) return candidate; + dir = resolve(dir, '..'); + } + throw new Error('MCP server not found. Run "npm run build -w server" first.'); +} + +/** + * Collected server stderr log lines. + */ +const serverLogLines: string[] = []; + +async function connectToServer( + databaseDir: string, + logDir: string, +): Promise<{ client: Client; transport: StdioClientTransport }> { + const serverPath = resolveServerPath(); + + const env: Record = { + ...(process.env as Record), + CODEQL_DATABASES_BASE_DIRS: databaseDir, + DEBUG: 'true', + ENABLE_ANNOTATION_TOOLS: 'true', + MONITORING_STORAGE_LOCATION: join(logDir, 'monitoring'), + TRANSPORT_MODE: 'stdio', + }; + + const transport = new StdioClientTransport({ + args: [serverPath], + command: 'node', + env, + stderr: 'pipe', + }); + + // Capture server stderr for the test report + // The transport exposes the child process after start() + const origStart = transport.start.bind(transport); + transport.start = async function (...args: Parameters) { + const result = await origStart(...args); + // Access the spawned process stderr via the transport's internal process + const proc = (transport as unknown as { _process?: { stderr?: { on: (e: string, cb: (d: Buffer) => void) => void } } })._process; + if (proc?.stderr) { + proc.stderr.on('data', (chunk: Buffer) => { + const text = chunk.toString(); + serverLogLines.push(text); + // Also echo to our stderr so the user can see live output + process.stderr.write(text); + }); + } + return result; + }; + + const client = new Client({ name: 'extended-test', version: '1.0.0' }); + await client.connect(transport); + return { client, transport }; +} + +// --------------------------------------------------------------------------- +// Test utilities +// --------------------------------------------------------------------------- + +let passed = 0; +let failed = 0; + +function assert(condition: boolean, message: string): void { + if (!condition) { + failed++; + console.error(` โœ— ${message}`); + throw new Error(message); + } + passed++; + console.log(` โœ“ ${message}`); +} + +function callToolText(result: { content: unknown[]; isError?: boolean }): string { + const arr = result.content as Array<{ text?: string; type: string }>; + return arr[0]?.text ?? ''; +} + +// --------------------------------------------------------------------------- +// Scenarios +// --------------------------------------------------------------------------- + +async function scenario1_databaseDiscovery( + client: Client, + repos: RepoConfig[], +): Promise { + console.log('\n๐Ÿ“‹ Scenario 1: Database Discovery & Registration'); + + const result = await client.callTool({ arguments: {}, name: 'list_codeql_databases' }); + assert(!result.isError, 'list_codeql_databases succeeds'); + const text = callToolText(result); + + for (const repo of repos) { + assert(text.includes(repo.language), `Database for ${repo.owner}/${repo.repo} listed with language ${repo.language}`); + } +} + +async function scenario2_toolsQueryExecution( + client: Client, + databases: Map, + repos: RepoConfig[], +): Promise { + console.log('\n๐Ÿ“‹ Scenario 2: Tools Query Execution + Auto-Caching'); + + for (const repo of repos) { + const key = `${repo.owner}/${repo.repo}`; + const dbPath = databases.get(key); + if (!dbPath) { + console.log(` โญ Skipping ${key} โ€” no database`); + continue; + } + + // Run CallGraphFromTo if source/target functions are defined + if (repo.callGraphFromTo) { + const { sourceFunction, targetFunction } = repo.callGraphFromTo; + console.log(` ๐Ÿ” CallGraphFromTo on ${key}: ${sourceFunction} โ†’ ${targetFunction}`); + + const result = await client.callTool({ + arguments: { + database: dbPath, + format: 'sarif-latest', + queryLanguage: repo.language, + queryName: 'CallGraphFromTo', + sourceFunction, + targetFunction, + }, + name: 'codeql_query_run', + }); + + const text = callToolText(result); + if (result.isError || text.includes('not found in pack')) { + console.log(` โš ๏ธ CallGraphFromTo not available for ${repo.language} โ€” ${text.substring(0, 100)}`); + } else if (text.includes('Query failed') || text.includes('extensional predicate')) { + console.log(` โœ— Bug: External predicates not passed correctly for ${key}`); + failed++; + } else { + assert(text.length > 0, `CallGraphFromTo for ${key} produces output`); + + // Verify results were cached + const lookupResult = await client.callTool({ + arguments: { language: repo.language, queryName: 'CallGraphFromTo' }, + name: 'query_results_cache_lookup', + }); + assert(!lookupResult.isError, `Cache lookup for ${key} succeeds`); + const lookupText = callToolText(lookupResult); + if (lookupText.includes('"cached": true') || lookupText.includes('"cached":true')) { + console.log(` โœ“ Results cached for ${key}`); + } else { + console.log(` โš ๏ธ Results not cached for ${key} (may be expected if query had no results)`); + } + } + } + } +} + +async function scenario3_cacheRetrievalSubset(client: Client): Promise { + console.log('\n๐Ÿ“‹ Scenario 3: Cache Retrieval & Subset Selection'); + + // List all cached entries + const lookupResult = await client.callTool({ + arguments: {}, + name: 'query_results_cache_lookup', + }); + const lookupText = callToolText(lookupResult); + + if (lookupText.includes('"cached": false') || lookupText.includes('"cached":false')) { + console.log(' โญ No cached results available โ€” skipping subset tests'); + return; + } + + // Extract a cache key from the lookup result + const keyMatch = lookupText.match(/"cacheKey":\s*"([^"]+)"/); + if (!keyMatch) { + console.log(' โญ Could not extract cache key โ€” skipping subset tests'); + return; + } + const cacheKey = keyMatch[1]; + + // Retrieve with maxLines limit + const subsetResult = await client.callTool({ + arguments: { cacheKey, maxLines: 20 }, + name: 'query_results_cache_retrieve', + }); + assert(!subsetResult.isError, 'query_results_cache_retrieve succeeds with maxLines'); + const subsetText = callToolText(subsetResult); + assert(subsetText.includes('totalLines') || subsetText.includes('totalResults') || subsetText.includes('No cached result'), 'Subset response contains totals or graceful message'); + + // Retrieve with grep + const grepResult = await client.callTool({ + arguments: { cacheKey, grep: 'function', maxLines: 10 }, + name: 'query_results_cache_retrieve', + }); + assert(!grepResult.isError, 'query_results_cache_retrieve succeeds with grep'); +} + +async function scenario4_crossDatabaseComparison( + client: Client, + _repos: RepoConfig[], +): Promise { + console.log('\n๐Ÿ“‹ Scenario 4: Cross-Database Query Comparison'); + + const compareResult = await client.callTool({ + arguments: { queryName: 'CallGraphFrom' }, + name: 'query_results_cache_compare', + }); + assert(!compareResult.isError, 'query_results_cache_compare succeeds'); + const compareText = callToolText(compareResult); + assert(compareText.includes('queryName') || compareText.includes('No cached results'), 'Compare returns query info or empty message'); +} + +async function scenario5_auditWorkflow( + client: Client, + repos: RepoConfig[], +): Promise { + console.log('\n๐Ÿ“‹ Scenario 5: Audit Workflow (Multi-Repo Finding Triage)'); + + // Store synthetic findings for two repos + for (const repo of repos.slice(0, 2)) { + const storeResult = await client.callTool({ + arguments: { + findings: [ + { description: `Test finding in ${repo.repo}`, line: 10, sourceLocation: 'src/main.ts', sourceType: 'RemoteFlowSource' }, + ], + owner: repo.owner, + repo: repo.repo, + }, + name: 'audit_store_findings', + }); + assert(!storeResult.isError, `audit_store_findings succeeds for ${repo.owner}/${repo.repo}`); + } + + // List findings for first repo + const repo0 = repos[0]; + const listResult = await client.callTool({ + arguments: { owner: repo0.owner, repo: repo0.repo }, + name: 'audit_list_findings', + }); + assert(!listResult.isError, 'audit_list_findings succeeds'); + const listText = callToolText(listResult); + assert(listText.includes('src/main.ts'), 'Finding location appears in listing'); + + // Add triage notes + const notesResult = await client.callTool({ + arguments: { + line: 10, + notes: 'Extended test: false positive โ€” input validated upstream', + owner: repo0.owner, + repo: repo0.repo, + sourceLocation: 'src/main.ts', + }, + name: 'audit_add_notes', + }); + assert(!notesResult.isError, 'audit_add_notes succeeds'); + + // Search across all repos + const searchResult = await client.callTool({ + arguments: { query: 'false positive' }, + name: 'annotation_search', + }); + assert(!searchResult.isError, 'annotation_search succeeds'); + const searchText = callToolText(searchResult); + assert(searchText.includes('validated upstream'), 'Triage note found via search'); + + // Clear first repo only + const clearResult = await client.callTool({ + arguments: { owner: repo0.owner, repo: repo0.repo }, + name: 'audit_clear_repo', + }); + assert(!clearResult.isError, 'audit_clear_repo succeeds'); + + // Verify second repo still has findings + if (repos.length >= 2) { + const repo1 = repos[1]; + const list2Result = await client.callTool({ + arguments: { owner: repo1.owner, repo: repo1.repo }, + name: 'audit_list_findings', + }); + const list2Text = callToolText(list2Result); + assert(list2Text.includes('src/main.ts'), 'Second repo findings preserved after clearing first'); + } + + // Final cleanup + for (const repo of repos.slice(0, 2)) { + await client.callTool({ + arguments: { owner: repo.owner, repo: repo.repo }, + name: 'audit_clear_repo', + }); + } +} + +async function scenario6_promptsAndResources(client: Client): Promise { + console.log('\n๐Ÿ“‹ Scenario 6: Prompts & Resources Validation'); + + // List prompts + const promptsResponse = await client.listPrompts(); + assert(promptsResponse.prompts.length > 0, `Server provides ${promptsResponse.prompts.length} prompts`); + + const promptNames = promptsResponse.prompts.map(p => p.name); + assert(promptNames.includes('tools_query_workflow'), 'tools_query_workflow prompt available'); + assert(promptNames.includes('test_driven_development'), 'test_driven_development prompt available'); + + // List resources + const resourcesResponse = await client.listResources(); + assert(resourcesResponse.resources.length > 0, `Server provides ${resourcesResponse.resources.length} resources`); + + const resourceUris = resourcesResponse.resources.map(r => r.uri); + assert(resourceUris.some(u => u.includes('server/overview')), 'Server overview resource available'); + + // Read a resource + const overviewResource = resourcesResponse.resources.find(r => r.uri.includes('server/overview')); + if (overviewResource) { + const content = await client.readResource({ uri: overviewResource.uri }); + assert(content.contents.length > 0, 'Server overview resource has content'); + } +} + +async function scenario7_cacheCleanup(client: Client): Promise { + console.log('\n๐Ÿ“‹ Scenario 7: Cache Cleanup'); + + const clearResult = await client.callTool({ + arguments: { all: true }, + name: 'query_results_cache_clear', + }); + assert(!clearResult.isError, 'query_results_cache_clear (all) succeeds'); + + // Verify empty + const lookupResult = await client.callTool({ + arguments: { queryName: 'CallGraphFrom' }, + name: 'query_results_cache_lookup', + }); + const text = callToolText(lookupResult); + assert(text.includes('false') || text.includes('count": 0') || text.includes('"count":0'), 'Cache is empty after clear'); +} + +// --------------------------------------------------------------------------- +// Main +// --------------------------------------------------------------------------- + +async function main(): Promise { + console.log('โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•'); + console.log('๐Ÿงช CodeQL MCP Server โ€” Extended Integration Tests'); + console.log('โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•'); + + const config = loadConfig(); + console.log(`\n๐Ÿ“‚ Config: ${config.repositories.length} repositories`); + for (const r of config.repositories) { + console.log(` โ€ข ${r.owner}/${r.repo} (${r.language})`); + } + + // Resolve extension root by walking up from __dirname + let extensionRoot = __dirname; + for (let i = 0; i < 5; i++) { + if (existsSync(join(extensionRoot, 'package.json')) && + existsSync(join(extensionRoot, 'test', 'extended', 'repos.json'))) { + break; + } + extensionRoot = resolve(extensionRoot, '..'); + } + const logDir = resolve(extensionRoot, '.tmp', 'extended-test-logs'); + mkdirSync(logDir, { recursive: true }); + + // Additional database dirs from config + const additionalDirs: string[] = []; + if (config.settings.databaseDir) { + additionalDirs.push(resolve(extensionRoot, config.settings.databaseDir)); + } + // Add fixture search dirs (relative to extension root) + if (config.settings.fixtureSearchDirs) { + for (const dir of config.settings.fixtureSearchDirs) { + additionalDirs.push(resolve(extensionRoot, dir)); + } + } + + // Phase 1: Discover and/or download databases + console.log('\n๐Ÿ“ฅ Phase 1: Resolving CodeQL databases...'); + const { databases, missing } = await resolveAllDatabases(config.repositories, additionalDirs); + + if (missing.length > 0) { + console.log(`\nโš ๏ธ ${missing.length} database(s) not found:`); + for (const m of missing) { + console.log(` โ€ข ${m.owner}/${m.repo} (${m.language})`); + } + console.log('\n๐Ÿ’ก To download missing databases, use the GitHub.vscode-codeql extension:'); + console.log(' 1. Open VS Code with the CodeQL extension installed'); + console.log(' 2. Run "CodeQL: Download Database from GitHub" from the command palette'); + console.log(' 3. Enter the repository name (e.g., expressjs/express)'); + console.log(' 4. Or set CODEQL_DATABASES_BASE_DIRS to point to your databases'); + } + + if (databases.size === 0) { + console.log('\nโœ— No databases found. Cannot run extended tests.'); + console.log(' Download at least one database with the vscode-codeql extension first.'); + process.exit(1); + } + + console.log(`\nโœ“ ${databases.size} database(s) ready`); + + // Build colon-delimited database dirs for the MCP server + const databaseDirs = [...new Set(Array.from(databases.values()).map(p => resolve(p, '..')))].join(':'); + + // Phase 2: Connect to MCP server + console.log('\n๐Ÿ”Œ Phase 2: Connecting to MCP server...'); + let client: Client; + let transport: StdioClientTransport; + try { + ({ client, transport } = await connectToServer(databaseDirs, logDir)); + console.log('โœ“ Connected to MCP server'); + } catch (err) { + console.error(`\nโœ— Server connection failed: ${err}`); + process.exit(1); + } + + // Phase 3: Run scenarios + console.log('\n๐Ÿงช Phase 3: Running test scenarios...'); + + const scenarios = [ + () => scenario1_databaseDiscovery(client, config.repositories), + () => scenario2_toolsQueryExecution(client, databases, config.repositories), + () => scenario3_cacheRetrievalSubset(client), + () => scenario4_crossDatabaseComparison(client, config.repositories), + () => scenario5_auditWorkflow(client, config.repositories), + () => scenario6_promptsAndResources(client), + () => scenario7_cacheCleanup(client), + ]; + + for (const scenario of scenarios) { + try { + await scenario(); + } catch (err) { + console.error(` โœ— Scenario failed: ${err instanceof Error ? err.message : err}`); + } + } + + // Cleanup + try { await client.close(); } catch { /* best-effort */ } + try { await transport.close(); } catch { /* best-effort */ } + + // Write test report with server logs + const timestamp = new Date().toISOString().replace(/[:.]/g, '-'); + const reportPath = join(logDir, `report-${timestamp}.txt`); + const serverLogPath = join(logDir, `server-${timestamp}.log`); + + const reportLines = [ + `CodeQL MCP Server โ€” Extended Integration Test Report`, + `Date: ${new Date().toISOString()}`, + `Config: ${config.repositories.length} repositories`, + ...config.repositories.map(r => ` โ€ข ${r.owner}/${r.repo} (${r.language})`), + ``, + `Results: ${passed} passed, ${failed} failed`, + ``, + `Server log: ${serverLogPath}`, + ]; + writeFileSync(reportPath, reportLines.join('\n') + '\n'); + writeFileSync(serverLogPath, serverLogLines.join('')); + + console.log(`\n๐Ÿ“„ Report: ${reportPath}`); + console.log(`๐Ÿ“‹ Server log: ${serverLogPath} (${serverLogLines.length} chunks)`); + + // Summary + console.log('\nโ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•'); + console.log(`๐Ÿ“Š Results: ${passed} passed, ${failed} failed`); + console.log('โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•โ•'); + + process.exit(failed > 0 ? 1 : 0); +} + +main().catch((err) => { + console.error('Fatal error:', err); + process.exit(1); +});