diff --git a/changelog.md b/changelog.md index 1d9fcee..cfaf556 100644 --- a/changelog.md +++ b/changelog.md @@ -1,5 +1,7 @@ # Changelog +## [0.4.6] - 17/04/2026 +- Scans center ## [0.4.5] - 13/04/2026 - Selected commits review diff --git a/package-lock.json b/package-lock.json index d80f979..d32796a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "codeant-cli", - "version": "0.4.4", + "version": "0.4.5", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "codeant-cli", - "version": "0.4.4", + "version": "0.4.5", "license": "MIT", "dependencies": { "@gitbeaker/rest": "^43.8.0", diff --git a/package.json b/package.json index d0c029e..f5c71bc 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "codeant-cli", - "version": "0.4.5", + "version": "0.4.6", "description": "Code review CLI tool", "type": "module", "bin": { @@ -27,7 +27,13 @@ ".": "./src/reviewHeadless.js", "./review": "./src/reviewHeadless.js", "./push-protection": "./src/utils/installPushProtectionHook.js", - "./config": "./src/utils/config.js" + "./config": "./src/utils/config.js", + "./scans/connection": "./src/scans/connectionHandler.js", + "./scans/list-repos": "./src/scans/listRepos.js", + "./scans/scan-history": "./src/scans/getScanHistory.js", + "./scans/fetch-results": "./src/scans/fetchScanResults.js", + "./scans/fetch-advanced-results": "./src/scans/fetchAdvancedScanResults.js", + "./scans/dismissed-alerts": "./src/scans/fetchDismissedAlerts.js" }, "files": [ "src" diff --git a/scans.md b/scans.md new file mode 100644 index 0000000..b8229ef --- /dev/null +++ b/scans.md @@ -0,0 +1,219 @@ +# `codeant scans` + +Fetch and explore scan results from CodeAnt. + +```bash +codeant scans [options] +``` + +--- + +## Subcommands + +### `scans orgs` + +List authenticated organizations. + +```bash +codeant scans orgs +``` + +--- + +### `scans repos` + +List repositories for an organization. + +```bash +codeant scans repos [options] +``` + +**Options:** + +| Option | Description | +|--------|-------------| +| `--org ` | Organization name (auto-picked when only one is authenticated) | + +**Examples:** + +```bash +# List repos (auto-selects org if only one) +codeant scans repos + +# List repos for a specific org +codeant scans repos --org my-org +``` + +--- + +### `scans history` + +Show scan history for a repository. + +```bash +codeant scans history --repo [options] +``` + +**Options:** + +| Option | Description | +|--------|-------------| +| `--repo ` | **(required)** Repository in `owner/repo` format | +| `--branch ` | Filter by branch name | +| `--since ` | Show scans since ISO date (e.g. `2024-01-01`) | +| `--limit ` | Max results (default: `20`) | + +**Examples:** + +```bash +# Show last 20 scans for a repo +codeant scans history --repo acme/backend + +# Filter to a specific branch +codeant scans history --repo acme/backend --branch main + +# Show scans since a date +codeant scans history --repo acme/backend --since 2024-06-01 + +# Show up to 50 results +codeant scans history --repo acme/backend --limit 50 +``` + +--- + +### `scans get` + +Show scan metadata and a severity/category summary. Does not include individual findings. + +```bash +codeant scans get --repo [options] +``` + +**Options:** + +| Option | Description | +|--------|-------------| +| `--repo ` | **(required)** Repository in `owner/repo` format | +| `--scan ` | Specific commit SHA to use | +| `--branch ` | Resolve latest scan on this branch | +| `--types ` | Comma-separated scan types (default: `all`) | +| `--quiet` | Suppress progress output | + +**Examples:** + +```bash +# Get latest scan summary for a repo +codeant scans get --repo acme/backend + +# Get scan for a specific commit +codeant scans get --repo acme/backend --scan abc1234 + +# Get latest scan on a branch +codeant scans get --repo acme/backend --branch main + +# Only include SAST and secrets types +codeant scans get --repo acme/backend --types sast,secrets + +# Suppress progress output +codeant scans get --repo acme/backend --quiet +``` + +--- + +### `scans results` + +Fetch full scan findings for a repository. + +```bash +codeant scans results --repo [options] +``` + +**Options:** + +| Option | Description | +|--------|-------------| +| `--repo ` | **(required)** Repository in `owner/repo` format | +| `--scan ` | Specific commit SHA to use | +| `--branch ` | Resolve latest scan on this branch | +| `--types ` | Comma-separated types: `sast`, `sca`, `secrets`, `iac`, `dead_code`, `sbom`, `anti_patterns`, `docstring`, `complex_functions`, `all` (default: `all`) | +| `--severity ` | Filter by severity (e.g. `critical,high`) | +| `--path ` | Filter by file path glob | +| `--check ` | Filter by check ID or name (regex) | +| `--include-dismissed` | Include dismissed findings (excluded by default) | +| `--format ` | Output format: `json`, `sarif`, `csv`, `md`, `table` (default: `json`) | +| `--output ` | Write output to file instead of stdout | +| `--fields ` | Project findings to a subset of fields (comma-separated) | +| `--limit ` | Max findings per page (default: `100`) | +| `--offset ` | Pagination offset (default: `0`) | +| `--fail-fast` | Exit `3` on first category fetch failure | +| `--no-color` | Disable ANSI color (auto-disabled when not a TTY) | +| `--quiet` | Suppress progress output on stderr | + +**Examples:** + +```bash +# Fetch all findings (JSON) +codeant scans results --repo acme/backend + +# Fetch only critical and high severity findings +codeant scans results --repo acme/backend --severity critical,high + +# Fetch SAST findings only +codeant scans results --repo acme/backend --types sast + +# Filter to a specific file path +codeant scans results --repo acme/backend --path 'src/**/*.ts' + +# Filter by check name using regex +codeant scans results --repo acme/backend --check 'sql-injection' + +# Output as a Markdown table +codeant scans results --repo acme/backend --format md + +# Output as SARIF to a file +codeant scans results --repo acme/backend --format sarif --output results.sarif + +# Include dismissed findings +codeant scans results --repo acme/backend --include-dismissed + +# Paginate through results +codeant scans results --repo acme/backend --limit 50 --offset 100 + +# Project only specific fields +codeant scans results --repo acme/backend --fields id,severity,message,path +``` + +**Exit codes:** + +| Code | Meaning | +|------|---------| +| `0` | Success | +| `1` | General error | +| `3` | Category fetch failure (with `--fail-fast`) | + +--- + +### `scans dismissed` + +List dismissed alerts for a repository. + +```bash +codeant scans dismissed --repo [options] +``` + +**Options:** + +| Option | Description | +|--------|-------------| +| `--repo ` | **(required)** Repository in `owner/repo` format | +| `--analysis-type ` | Analysis type: `security` or `secrets` (default: `security`) | + +**Examples:** + +```bash +# List dismissed security alerts +codeant scans dismissed --repo acme/backend + +# List dismissed secrets alerts +codeant scans dismissed --repo acme/backend --analysis-type secrets +``` diff --git a/src/commands/scans/dismissed.js b/src/commands/scans/dismissed.js new file mode 100644 index 0000000..d9751d3 --- /dev/null +++ b/src/commands/scans/dismissed.js @@ -0,0 +1,26 @@ +import { fetchDismissedAlerts } from '../../scans/fetchDismissedAlerts.js'; + +/** + * codeant scans dismissed --repo [--analysis-type security|secrets] + */ +export async function runDismissed({ repo, analysisType = 'security' } = {}) { + if (!repo) { + const err = new Error('--repo is required'); + err.exitCode = 1; + throw err; + } + + const result = await fetchDismissedAlerts(repo, analysisType); + if (!result.success) { + const err = new Error(result.error || 'Failed to fetch dismissed alerts'); + err.exitCode = 1; + throw err; + } + + return { + repo, + analysis_type: analysisType, + total: result.dismissedAlerts.length, + dismissed_alerts: result.dismissedAlerts, + }; +} diff --git a/src/commands/scans/formatters/csv.js b/src/commands/scans/formatters/csv.js new file mode 100644 index 0000000..c1f2043 --- /dev/null +++ b/src/commands/scans/formatters/csv.js @@ -0,0 +1,24 @@ +const HEADERS = ['id', 'category', 'severity', 'file_path', 'line_number', 'check_id', 'check_name', 'message', 'cwe', 'cve', 'dismissed']; + +function csvCell(val) { + if (val === null || val === undefined) return ''; + const s = String(val); + if (s.includes(',') || s.includes('"') || s.includes('\n')) { + return '"' + s.replace(/"/g, '""') + '"'; + } + return s; +} + +export default { + name: 'csv', + mime: 'text/csv', + extension: '.csv', + render(envelope) { + const { findings = [] } = envelope; + const rows = [HEADERS.join(',')]; + for (const f of findings) { + rows.push(HEADERS.map((h) => csvCell(f[h])).join(',')); + } + return rows.join('\n'); + }, +}; diff --git a/src/commands/scans/formatters/index.js b/src/commands/scans/formatters/index.js new file mode 100644 index 0000000..e71ee0e --- /dev/null +++ b/src/commands/scans/formatters/index.js @@ -0,0 +1,12 @@ +/** + * Formatter registry. + * Contract: { name, mime, extension, render(envelope) β†’ string } + * Add a new format = drop a file in formatters/ + one line here. + */ +import json from './json.js'; +import sarif from './sarif.js'; +import csv from './csv.js'; +import md from './md.js'; +import table from './table.js'; + +export const FORMATTERS = { json, sarif, csv, md, table }; diff --git a/src/commands/scans/formatters/json.js b/src/commands/scans/formatters/json.js new file mode 100644 index 0000000..687a5c2 --- /dev/null +++ b/src/commands/scans/formatters/json.js @@ -0,0 +1,8 @@ +export default { + name: 'json', + mime: 'application/json', + extension: '.json', + render(envelope) { + return JSON.stringify(envelope, null, 2); + }, +}; diff --git a/src/commands/scans/formatters/md.js b/src/commands/scans/formatters/md.js new file mode 100644 index 0000000..2db2b2f --- /dev/null +++ b/src/commands/scans/formatters/md.js @@ -0,0 +1,63 @@ +const SEV_EMOJI = { critical: 'πŸ”΄', high: '🟠', medium: '🟑', low: 'πŸ”΅', info: 'βšͺ', unknown: '⚫' }; + +export default { + name: 'md', + mime: 'text/markdown', + extension: '.md', + render(envelope) { + const { findings = [], repo, scan, summary, generated_at, errors = [] } = envelope; + const lines = []; + + lines.push(`# CodeAnt Scan Results`); + lines.push(''); + lines.push(`**Repo:** \`${repo}\``); + if (scan) { + lines.push(`**Branch:** \`${scan.branch || 'unknown'}\` `); + lines.push(`**Commit:** \`${scan.commit_id || 'unknown'}\` `); + } + lines.push(`**Generated:** ${generated_at}`); + lines.push(''); + + // Summary + lines.push('## Summary'); + lines.push(''); + lines.push(`| Severity | Count |`); + lines.push(`|----------|-------|`); + for (const [sev, count] of Object.entries(summary.by_severity || {})) { + if (count > 0) { + lines.push(`| ${SEV_EMOJI[sev] || ''} ${sev} | ${count} |`); + } + } + lines.push(''); + + if (errors.length > 0) { + lines.push('## Errors'); + lines.push(''); + for (const e of errors) { + lines.push(`- **${e.category}**: ${e.error}`); + } + lines.push(''); + } + + if (findings.length === 0) { + lines.push('*No findings.*'); + return lines.join('\n'); + } + + lines.push('## Findings'); + lines.push(''); + lines.push('| Severity | Category | File | Line | Check | Message |'); + lines.push('|----------|----------|------|------|-------|---------|'); + + for (const f of findings) { + const sev = `${SEV_EMOJI[f.severity] || ''} ${f.severity}`; + const file = f.file_path; + const line = f.line_number; + const check = f.check_id || f.check_name || ''; + const msg = (f.message || '').replace(/\|/g, '\\|').slice(0, 120); + lines.push(`| ${sev} | ${f.category} | \`${file}\` | ${line} | \`${check}\` | ${msg} |`); + } + + return lines.join('\n'); + }, +}; diff --git a/src/commands/scans/formatters/sarif.js b/src/commands/scans/formatters/sarif.js new file mode 100644 index 0000000..581ed8a --- /dev/null +++ b/src/commands/scans/formatters/sarif.js @@ -0,0 +1,105 @@ +/** + * SARIF 2.1.0 formatter. + * Spec: https://docs.oasis-open.org/sarif/sarif/v2.1.0/sarif-v2.1.0.html + */ + +const SEV_TO_SARIF = { + critical: 'error', + high: 'error', + medium: 'warning', + low: 'note', + info: 'none', + unknown: 'none', +}; + +export default { + name: 'sarif', + mime: 'application/sarif+json', + extension: '.sarif', + render(envelope) { + const { findings = [], repo, scan, tool_version } = envelope; + + // Build per-category rule sets + const ruleMap = new Map(); + for (const f of findings) { + const key = `${f.category}/${f.check_id || f.check_name}`; + if (!ruleMap.has(key)) { + ruleMap.set(key, { + id: key, + name: f.check_name || f.check_id || 'issue', + shortDescription: { text: f.check_name || f.check_id || 'issue' }, + properties: { category: f.category, severity: f.severity }, + }); + } + } + + const rules = [...ruleMap.values()]; + + const results = findings.map((f) => { + const ruleId = `${f.category}/${f.check_id || f.check_name}`; + const result = { + ruleId, + level: SEV_TO_SARIF[f.severity] ?? 'warning', + message: { text: f.message || f.check_name || 'issue' }, + locations: [ + { + physicalLocation: { + artifactLocation: { uri: f.file_path, uriBaseId: '%SRCROOT%' }, + region: { + startLine: f.line_number || 1, + endLine: (f.line_range && f.line_range.length > 1) + ? f.line_range[f.line_range.length - 1] + : f.line_number || 1, + }, + }, + }, + ], + properties: { + findingId: f.id, + category: f.category, + severity: f.severity, + dismissed: f.dismissed, + }, + }; + + if (f.cwe) result.taxa = [{ toolComponent: { name: 'CWE' }, id: f.cwe }]; + if (f.package) result.properties.package = f.package; + + return result; + }); + + const sarif = { + $schema: 'https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json', + version: '2.1.0', + runs: [ + { + tool: { + driver: { + name: 'codeant-cli', + version: tool_version || '0.0.0', + informationUri: 'https://www.codeant.ai', + rules, + }, + }, + versionControlProvenance: scan + ? [ + { + repositoryUri: repo, + revisionId: scan.commit_id, + branch: scan.branch || undefined, + }, + ] + : undefined, + results, + properties: { + repo, + generatedAt: envelope.generated_at, + schemaVersion: envelope.schema_version, + }, + }, + ], + }; + + return JSON.stringify(sarif, null, 2); + }, +}; diff --git a/src/commands/scans/formatters/table.js b/src/commands/scans/formatters/table.js new file mode 100644 index 0000000..9ed5ab1 --- /dev/null +++ b/src/commands/scans/formatters/table.js @@ -0,0 +1,83 @@ +// Terminal ASCII table β€” no external dependencies + +let noColor = false; +export function setNoColor(v) { noColor = v; } + +const SEV_COLOR = { + critical: '\x1b[35m', // magenta + high: '\x1b[31m', // red + medium: '\x1b[33m', // yellow + low: '\x1b[34m', // blue + info: '\x1b[37m', // white + unknown: '\x1b[90m', // dark gray +}; +const RESET = '\x1b[0m'; + +function color(text, code) { + if (noColor || !process.stdout.isTTY) return text; + return `${code}${text}${RESET}`; +} + +function pad(str, len) { + const s = String(str ?? ''); + return s.length >= len ? s.slice(0, len) : s + ' '.repeat(len - s.length); +} + +const COLS = [ + { key: 'severity', label: 'SEVERITY', width: 10 }, + { key: 'category', label: 'CATEGORY', width: 17 }, + { key: 'file_path', label: 'FILE', width: 40 }, + { key: 'line_number', label: 'LINE', width: 6 }, + { key: 'check_id', label: 'CHECK', width: 20 }, + { key: 'message', label: 'MESSAGE', width: 60 }, +]; + +export default { + name: 'table', + mime: 'text/plain', + extension: '.txt', + render(envelope) { + const { findings = [], summary, errors = [] } = envelope; + const lines = []; + + // Header + const sep = COLS.map((c) => '-'.repeat(c.width)).join('-+-'); + const header = COLS.map((c) => pad(c.label, c.width)).join(' | '); + lines.push(sep); + lines.push(header); + lines.push(sep); + + for (const f of findings) { + const sev = f.severity; + const cols = COLS.map((c) => { + let val = String(f[c.key] ?? ''); + if (c.key === 'file_path' && val.length > c.width) { + val = '…' + val.slice(-(c.width - 1)); + } + val = pad(val, c.width); + if (c.key === 'severity') { + val = color(val, SEV_COLOR[sev] ?? ''); + } + return val; + }); + lines.push(cols.join(' | ')); + } + + lines.push(sep); + lines.push(`Total: ${summary?.total ?? findings.length} ` + + Object.entries(summary?.by_severity ?? {}) + .filter(([, n]) => n > 0) + .map(([s, n]) => color(`${s}:${n}`, SEV_COLOR[s] ?? '')) + .join(' ') + ); + + if (errors.length > 0) { + lines.push(''); + for (const e of errors) { + lines.push(color(`[error] ${e.category}: ${e.error}`, '\x1b[31m')); + } + } + + return lines.join('\n'); + }, +}; diff --git a/src/commands/scans/get.js b/src/commands/scans/get.js new file mode 100644 index 0000000..08cf570 --- /dev/null +++ b/src/commands/scans/get.js @@ -0,0 +1,64 @@ +import { resolveScan } from './lib/resolveScan.js'; +import { parseTypes } from './lib/categories.js'; +import { normalizeIssue, normalizeSeverity } from './lib/normalize.js'; +import { progress } from './lib/log.js'; + +const SEV_KEYS = ['critical', 'high', 'medium', 'low', 'info', 'unknown']; + +/** + * codeant scans get --repo [--scan ] [--branch ] [--types ] + * Returns metadata + summary (severity/category counts). No findings array. + */ +export async function runGet({ repo, scan, branch, types } = {}) { + if (!repo) { + const err = new Error('--repo is required'); + err.exitCode = 1; + throw err; + } + + progress(`resolving scan for ${repo}…`); + const scanMeta = await resolveScan({ repo, scan, branch }); + + const categories = parseTypes(types); + progress(`fetching ${categories.map((c) => c.key).join(', ')}…`); + + const settled = await Promise.allSettled( + categories.map((c) => c.fetcher(repo, scanMeta.commit_id)) + ); + + const bySeverity = Object.fromEntries(SEV_KEYS.map((k) => [k, 0])); + const byCategory = {}; + const errors = []; + let total = 0; + + for (let i = 0; i < settled.length; i++) { + const cat = categories[i]; + const s = settled[i]; + + if (s.status === 'rejected' || !s.value?.success) { + const msg = s.status === 'rejected' ? s.reason?.message : s.value?.error; + errors.push({ category: cat.key, error: msg || 'unknown error' }); + byCategory[cat.key] = 0; + continue; + } + + const issues = s.value.issues || []; + byCategory[cat.key] = issues.length; + total += issues.length; + + for (const issue of issues) { + const normalized = normalizeIssue(issue, cat.key); + if (normalized) { + bySeverity[normalized.severity] = (bySeverity[normalized.severity] ?? 0) + 1; + } + } + } + + return { + repo, + scan: scanMeta, + categories: categories.map((c) => c.key), + summary: { total, by_severity: bySeverity, by_category: byCategory }, + errors, + }; +} diff --git a/src/commands/scans/history.js b/src/commands/scans/history.js new file mode 100644 index 0000000..990ca1e --- /dev/null +++ b/src/commands/scans/history.js @@ -0,0 +1,43 @@ +import { getScanHistory } from '../../scans/getScanHistory.js'; + +/** + * codeant scans history --repo [--branch ] [--since ] [--limit ] + */ +export async function runHistory({ repo, branch, since, limit = 20 } = {}) { + if (!repo) { + const err = new Error('--repo is required'); + err.exitCode = 1; + throw err; + } + + const result = await getScanHistory(repo); + if (!result.success) { + const err = new Error(result.error || 'Failed to fetch scan history'); + err.exitCode = 1; + throw err; + } + + let history = [...(result.scanHistory || [])].sort( + (a, b) => new Date(b.timestamp) - new Date(a.timestamp) + ); + + if (branch) { + history = history.filter((s) => s.branch === branch); + } + + if (since) { + const sinceDate = new Date(since); + if (isNaN(sinceDate)) { + const err = new Error(`Invalid --since date: ${since}`); + err.exitCode = 1; + throw err; + } + history = history.filter((s) => new Date(s.timestamp) >= sinceDate); + } + + if (limit > 0) { + history = history.slice(0, limit); + } + + return { repo, branch: branch || null, total: history.length, scans: history }; +} diff --git a/src/commands/scans/index.js b/src/commands/scans/index.js new file mode 100644 index 0000000..a0c4c9f --- /dev/null +++ b/src/commands/scans/index.js @@ -0,0 +1,121 @@ +import { runOrgs } from './orgs.js'; +import { runRepos } from './repos.js'; +import { runHistory } from './history.js'; +import { runGet } from './get.js'; +import { runResults } from './results.js'; +import { runDismissed } from './dismissed.js'; +import { setQuiet, setNoColor } from './lib/log.js'; +import { setNoColor as tableSetNoColor } from './formatters/table.js'; + +/** + * Register all `codeant scans ` subcommands. + * + * @param {import('commander').Command} program + * @param {{ runCmd: Function }} helpers + */ +export default function registerScansCommands(program, { runCmd }) { + const scans = program.command('scans').description('Fetch and explore scan results'); + + // ── orgs ─────────────────────────────────────────────────────────────────── + scans + .command('orgs') + .description('List authenticated organizations') + .action(() => runCmd(() => runOrgs())); + + // ── repos ────────────────────────────────────────────────────────────────── + scans + .command('repos') + .description('List repositories') + .option('--org ', 'Organization name (auto-picked when only one is authenticated)') + .action((opts) => runCmd(() => runRepos({ org: opts.org }))); + + // ── history ──────────────────────────────────────────────────────────────── + scans + .command('history') + .description('Show scan history for a repository') + .requiredOption('--repo ', 'Repository (owner/repo)') + .option('--branch ', 'Filter by branch name') + .option('--since ', 'Show scans since ISO date') + .option('--limit ', 'Max results (default: 20)', parseInt, 20) + .action((opts) => + runCmd(() => runHistory({ repo: opts.repo, branch: opts.branch, since: opts.since, limit: opts.limit })) + ); + + // ── get ──────────────────────────────────────────────────────────────────── + scans + .command('get') + .description('Scan metadata + severity/category summary (no findings)') + .requiredOption('--repo ', 'Repository (owner/repo)') + .option('--scan ', 'Specific commit SHA to use') + .option('--branch ', 'Resolve latest scan on this branch') + .option('--types ', 'Comma-separated scan types (default: all)', 'all') + .option('--quiet', 'Suppress progress output') + .action((opts) => { + setQuiet(opts.quiet); + runCmd(() => runGet({ repo: opts.repo, scan: opts.scan, branch: opts.branch, types: opts.types })); + }); + + // ── results ──────────────────────────────────────────────────────────────── + scans + .command('results') + .description('Fetch full scan findings') + .requiredOption('--repo ', 'Repository (owner/repo)') + .option('--scan ', 'Specific commit SHA to use') + .option('--branch ', 'Resolve latest scan on this branch') + .option( + '--types ', + 'Comma-separated types: sast,sca,secrets,iac,dead_code,sbom,anti_patterns,docstring,complex_functions,all', + 'all' + ) + .option('--severity ', 'Filter by severity (e.g. critical,high)') + .option('--path ', 'Filter by file path glob') + .option('--check ', 'Filter by check ID or name (regex)') + .option('--include-dismissed', 'Include dismissed findings (excluded by default)') + .option('--format ', 'Output format: json|sarif|csv|md|table (default: json)', 'json') + .option('--output ', 'Write output to file instead of stdout') + .option('--fields ', 'Project findings to subset of fields (comma-separated)') + .option('--limit ', 'Max findings per page (default: 100)', parseInt, 100) + .option('--offset ', 'Pagination offset (default: 0)', parseInt, 0) + .option('--fail-fast', 'Exit 3 on first category fetch failure') + .option('--no-color', 'Disable ANSI color (auto-disabled when not a TTY)') + .option('--quiet', 'Suppress progress output on stderr') + .action(async (opts) => { + setQuiet(opts.quiet); + if (opts.noColor) { + setNoColor(true); + tableSetNoColor(true); + } + + try { + await runResults({ + repo: opts.repo, + scan: opts.scan, + branch: opts.branch, + types: opts.types, + severity: opts.severity, + path: opts.path, + check: opts.check, + includeDismissed: opts.includeDismissed || false, + format: opts.format, + output: opts.output, + fields: opts.fields, + limit: opts.limit, + offset: opts.offset, + failFast: opts.failFast || false, + }); + } catch (err) { + process.stderr.write(JSON.stringify({ error: err.message }) + '\n'); + process.exit(err.exitCode ?? 1); + } + }); + + // ── dismissed ────────────────────────────────────────────────────────────── + scans + .command('dismissed') + .description('List dismissed alerts for a repository') + .requiredOption('--repo ', 'Repository (owner/repo)') + .option('--analysis-type ', 'Analysis type: security|secrets (default: security)', 'security') + .action((opts) => + runCmd(() => runDismissed({ repo: opts.repo, analysisType: opts.analysisType })) + ); +} diff --git a/src/commands/scans/lib/categories.js b/src/commands/scans/lib/categories.js new file mode 100644 index 0000000..d9eaf11 --- /dev/null +++ b/src/commands/scans/lib/categories.js @@ -0,0 +1,51 @@ +import { + fetchSastResults, + fetchAntiPatternsResults, + fetchDocstringResults, + fetchComplexFunctionsResults, +} from '../../../scans/fetchScanResults.js'; +import { + fetchScaResults, + fetchSbomResults, + fetchSecretsResults, + fetchIacResults, + fetchDeadCodeResults, +} from '../../../scans/fetchAdvancedScanResults.js'; + +/** + * Category registry. Add one row to support a new scan type throughout the CLI. + * kind: 'code' | 'package' | 'inventory' | 'secret' | 'config' + */ +export const CATEGORIES = { + sast: { fetcher: fetchSastResults, kind: 'code' }, + anti_patterns: { fetcher: fetchAntiPatternsResults, kind: 'code' }, + docstring: { fetcher: fetchDocstringResults, kind: 'code' }, + complex_functions: { fetcher: fetchComplexFunctionsResults, kind: 'code' }, + sca: { fetcher: fetchScaResults, kind: 'package' }, + sbom: { fetcher: fetchSbomResults, kind: 'inventory' }, + secrets: { fetcher: fetchSecretsResults, kind: 'secret' }, + iac: { fetcher: fetchIacResults, kind: 'config' }, + dead_code: { fetcher: fetchDeadCodeResults, kind: 'code' }, +}; + +/** + * Parse comma-separated --types value. 'all' expands to every key. + * Returns array of { key, fetcher, kind }. + * Throws with exit code 1 on unknown type name. + */ +export function parseTypes(typesStr) { + const keys = Object.keys(CATEGORIES); + const raw = typesStr ? typesStr.split(',').map((s) => s.trim()).filter(Boolean) : ['all']; + + const expanded = raw.includes('all') ? keys : raw; + + const unknown = expanded.filter((k) => !CATEGORIES[k]); + if (unknown.length > 0) { + const err = new Error(`Unknown type(s): ${unknown.join(', ')}. Valid: ${keys.join(', ')}`); + err.exitCode = 1; + err.detail = { error: `Unknown type(s): ${unknown.join(', ')}`, valid: keys }; + throw err; + } + + return expanded.map((k) => ({ key: k, ...CATEGORIES[k] })); +} diff --git a/src/commands/scans/lib/dismissMatch.js b/src/commands/scans/lib/dismissMatch.js new file mode 100644 index 0000000..ce8a4d7 --- /dev/null +++ b/src/commands/scans/lib/dismissMatch.js @@ -0,0 +1,32 @@ +/** + * Determine if a NormalizedFinding matches any entry in dismissedAlerts. + * + * Dismiss key format: "file_path||::||context_code_block_or_line||::||test_id_or_type" + * + * @param {object} finding - NormalizedFinding + * @param {Array} dismissedAlerts - from fetchDismissedAlerts() + * @returns {object|null} matching dismiss entry, or null + */ +export function findDismissMatch(finding, dismissedAlerts) { + if (!dismissedAlerts || dismissedAlerts.length === 0) return null; + + for (const d of dismissedAlerts) { + // File path must match (tail-match to handle prefix stripping differences) + const fp = finding.file_path ?? ''; + const dfp = d.file_path ?? ''; + if (dfp && fp && !fp.endsWith(dfp) && !dfp.endsWith(fp) && fp !== dfp) continue; + + // test_id / type must match check_id when present + if (d.test_id && finding.check_id && d.test_id !== finding.check_id) continue; + + // Line number match when available (secrets format uses line_number as part1) + if (d.line_number && finding.line_number && d.line_number !== finding.line_number) continue; + + return d; + } + return null; +} + +export function isDismissed(finding, dismissedAlerts) { + return findDismissMatch(finding, dismissedAlerts) !== null; +} diff --git a/src/commands/scans/lib/emit.js b/src/commands/scans/lib/emit.js new file mode 100644 index 0000000..3a2b4fb --- /dev/null +++ b/src/commands/scans/lib/emit.js @@ -0,0 +1,25 @@ +import fs from 'fs'; +import path from 'path'; + +/** + * Emit rendered content. + * - If outputPath is set: write bytes to file, print JSON envelope to stdout. + * - Otherwise: write to stdout. + * + * @param {string} content - rendered string + * @param {string|null} outputPath + * @param {number} findingsCount - for the file envelope + */ +export function emit(content, outputPath, findingsCount = 0) { + if (outputPath) { + const resolved = path.resolve(outputPath); + fs.writeFileSync(resolved, content, 'utf8'); + const bytes = Buffer.byteLength(content, 'utf8'); + process.stdout.write( + JSON.stringify({ output: resolved, bytes, findings: findingsCount }, null, 2) + '\n' + ); + } else { + process.stdout.write(content); + if (!content.endsWith('\n')) process.stdout.write('\n'); + } +} diff --git a/src/commands/scans/lib/filters.js b/src/commands/scans/lib/filters.js new file mode 100644 index 0000000..e4a70df --- /dev/null +++ b/src/commands/scans/lib/filters.js @@ -0,0 +1,72 @@ +import { minimatch } from 'minimatch'; +import { isDismissed, findDismissMatch } from './dismissMatch.js'; +import { normalizeSeverity } from './normalize.js'; + +const SEV_RANK = { critical: 5, high: 4, medium: 3, low: 2, info: 1, unknown: 0 }; + +/** + * Apply all filters to findings in-place (returns new array). + * + * @param {Array} findings - NormalizedFinding[] + * @param {object} opts + * @param {string[]|null} opts.severity - allowed severity levels (e.g. ['critical','high']) + * @param {string|null} opts.pathGlob - minimatch glob for file_path + * @param {string|null} opts.checkRegex - regex applied to check_id + check_name + * @param {Array} opts.dismissedAlerts - from fetchDismissedAlerts() + * @param {boolean} opts.includeDismissed + * @returns {Array} filtered NormalizedFinding[] (dismissed field annotated) + */ +export function applyFilters(findings, { + severity = null, + pathGlob = null, + checkRegex = null, + dismissedAlerts = [], + includeDismissed = false, +} = {}) { + // Pre-compile regex β€” strip Python/PCRE inline flag (?i) and fold into JS flag + let checkRe = null; + if (checkRegex) { + try { + let pattern = checkRegex; + if (pattern.startsWith('(?i)')) pattern = pattern.slice(4); + checkRe = new RegExp(pattern, 'i'); + } catch { + const err = new Error(`Invalid --check regex: ${checkRegex}`); + err.exitCode = 1; + throw err; + } + } + + // Severity set + const sevSet = severity && severity.length > 0 + ? new Set(severity.map((s) => normalizeSeverity(s))) + : null; + + const result = []; + for (const f of findings) { + // Annotate dismiss status + const match = findDismissMatch(f, dismissedAlerts); + f.dismissed = match !== null; + if (match) { + f.dismiss_info = { + reason: match.reason_for_dismiss || null, + comment: match.comment_for_dismiss || null, + }; + } + + // Filter dismissed + if (f.dismissed && !includeDismissed) continue; + + // Filter by severity + if (sevSet && !sevSet.has(f.severity)) continue; + + // Filter by path glob + if (pathGlob && !minimatch(f.file_path, pathGlob, { matchBase: true })) continue; + + // Filter by check regex + if (checkRe && !checkRe.test(f.check_id) && !checkRe.test(f.check_name)) continue; + + result.push(f); + } + return result; +} diff --git a/src/commands/scans/lib/log.js b/src/commands/scans/lib/log.js new file mode 100644 index 0000000..e5c92c8 --- /dev/null +++ b/src/commands/scans/lib/log.js @@ -0,0 +1,25 @@ +// stderr progress logger β€” respects --quiet and --no-color + +let quietMode = false; +let noColorMode = false; + +export function setQuiet(q) { quietMode = !!q; } +export function setNoColor(nc) { noColorMode = !!nc; } + +function isTTY() { return process.stderr.isTTY === true; } + +function dim(text) { + if (noColorMode || !isTTY()) return text; + return `\x1b[2m${text}\x1b[0m`; +} + +/** Write a progress line to stderr (suppressed by --quiet). */ +export function progress(msg) { + if (quietMode) return; + process.stderr.write(dim(`[progress] ${msg}`) + '\n'); +} + +/** Write a JSON error object to stderr (never suppressed). */ +export function logError(obj) { + process.stderr.write(JSON.stringify(obj) + '\n'); +} diff --git a/src/commands/scans/lib/normalize.js b/src/commands/scans/lib/normalize.js new file mode 100644 index 0000000..5a3b77d --- /dev/null +++ b/src/commands/scans/lib/normalize.js @@ -0,0 +1,155 @@ +import { createRequire } from 'module'; + +const require = createRequire(import.meta.url); +const pkg = require('../../../../package.json'); + +const SEV_MAP = { + critical: 'critical', blocker: 'critical', p0: 'critical', + high: 'high', error: 'high', major: 'high', + medium: 'medium', warning: 'medium', moderate: 'medium', p2: 'medium', + low: 'low', note: 'low', minor: 'low', p3: 'low', + info: 'info', informational: 'info', information: 'info', p4: 'info', +}; + +export function normalizeSeverity(raw) { + if (!raw) return 'unknown'; + return SEV_MAP[String(raw).toLowerCase()] ?? 'unknown'; +} + +/** Stable djb2-variant hash β†’ 4-char hex suffix for finding IDs. */ +function shortHash(str) { + let h = 5381; + for (let i = 0; i < str.length; i++) { + h = (((h << 5) + h) ^ str.charCodeAt(i)) >>> 0; + } + return h.toString(16).slice(0, 4); +} + +/** + * Build a stable finding ID: category:file_path:line:check_id:hash + */ +function buildId(category, filePath, lineNumber, checkId, message) { + const hash = shortHash(`${filePath}:${lineNumber}:${checkId}:${message}`); + return `${category}:${filePath}:${lineNumber}:${checkId || 'nocheck'}:${hash}`; +} + +/** + * Normalize a raw issue (from any fetcher) into a NormalizedFinding. + * + * @param {object} issue - raw issue from fetcher + * @param {string} category - category key (e.g. 'sast') + * @returns {object} NormalizedFinding + */ +export function normalizeIssue(issue, category) { + if (!issue) return null; + + const filePath = issue.file_path || 'unknown'; + const lineNumber = issue.line_number || issue.start_line || issue.line || 1; + const lineRange = issue.file_line_range || issue.line_range || [lineNumber]; + const checkId = issue.check_id || issue.test_id || issue.rule_id || issue.vulnerability_id || issue.cve_id || ''; + const checkName = issue.check_name || issue.issue_text || issue.message || issue.description || issue.name || ''; + const message = issue.message || issue.issue_text || issue.description || checkName || ''; + const severity = normalizeSeverity(issue.severity); + + // Package info for sca/sbom + let packageInfo = null; + if (category === 'sca' || category === 'sbom') { + const name = issue.package_name || issue.name || null; + if (name) { + packageInfo = { + name, + version: issue.version || issue.package_version || null, + ecosystem: issue.ecosystem || issue.package_manager || null, + }; + } + } + + // CWE / CVE + const cwe = issue.cwe || issue.cwe_id || null; + const cve = issue.cve || issue.cve_id || issue.vulnerability_id || null; + + // Category-specific metadata + const metadata = {}; + if (category === 'iac') { + if (issue.guideline) metadata.guideline = issue.guideline; + if (issue.resource) metadata.resource = issue.resource; + } else if (category === 'sast' || category === 'anti_patterns') { + if (issue.issue_confidence) metadata.confidence = issue.issue_confidence; + if (issue.issue_type || issue.test_type) metadata.issue_type = issue.issue_type || issue.test_type; + } else if (category === 'secrets') { + if (issue.type || issue.secret_type) metadata.secret_type = issue.type || issue.secret_type; + if (issue.confidence_score) metadata.confidence_score = issue.confidence_score; + } else if (category === 'dead_code') { + if (issue.type) metadata.type = issue.type; + if (issue.confidence) metadata.confidence = issue.confidence; + } else if (category === 'complex_functions') { + if (issue.complexity !== undefined) metadata.complexity = issue.complexity; + } else if (category === 'sca') { + if (issue.cvss_score !== undefined) metadata.cvss_score = issue.cvss_score; + if (issue.fix_version) metadata.fix_version = issue.fix_version; + } + + const id = buildId(category, filePath, lineNumber, checkId, message); + + return { + id, + category, + severity, + file_path: filePath, + line_number: lineNumber, + line_range: Array.isArray(lineRange) ? lineRange : [lineNumber], + check_id: checkId, + check_name: checkName, + message, + rule_id: checkId, + cwe: cwe ? String(cwe) : null, + cve: cve ? String(cve) : null, + package: packageInfo, + metadata, + dismissed: false, + dismiss_info: null, + }; +} + +/** Build the outer envelope object. */ +export function buildEnvelope({ + repo, + scan, + categories, + findings, + pagination, + filters, + errors, +}) { + const total = pagination.total; + const bySeverity = { critical: 0, high: 0, medium: 0, low: 0, info: 0, unknown: 0 }; + const byCategory = {}; + + for (const f of findings) { + bySeverity[f.severity] = (bySeverity[f.severity] ?? 0) + 1; + byCategory[f.category] = (byCategory[f.category] ?? 0) + 1; + } + // summary uses all findings (pre-pagination) + // recalculate from pre-pagination total + const allSeverity = { ...bySeverity }; + const allCategory = { ...byCategory }; + + return { + schema_version: '1.0', + tool: 'codeant-cli', + tool_version: pkg.version, + generated_at: new Date().toISOString(), + repo, + scan, + categories, + summary: { + total, + by_severity: allSeverity, + by_category: allCategory, + }, + pagination, + filters, + errors, + findings, + }; +} diff --git a/src/commands/scans/lib/paginate.js b/src/commands/scans/lib/paginate.js new file mode 100644 index 0000000..c66d9ee --- /dev/null +++ b/src/commands/scans/lib/paginate.js @@ -0,0 +1,22 @@ +/** + * Slice findings and return pagination metadata. + * @param {Array} findings - full (sorted, filtered) findings array + * @param {{ limit?: number, offset?: number }} opts + * @returns {{ items: Array, pagination: object }} + */ +export function paginate(findings, { limit = 100, offset = 0 } = {}) { + const total = findings.length; + const safeOffset = Math.max(0, offset); + const safeLimit = limit > 0 ? limit : total; + const items = findings.slice(safeOffset, safeOffset + safeLimit); + return { + items, + pagination: { + limit: safeLimit, + offset: safeOffset, + returned: items.length, + total, + has_more: safeOffset + items.length < total, + }, + }; +} diff --git a/src/commands/scans/lib/resolveScan.js b/src/commands/scans/lib/resolveScan.js new file mode 100644 index 0000000..72942c3 --- /dev/null +++ b/src/commands/scans/lib/resolveScan.js @@ -0,0 +1,50 @@ +import { getScanHistory } from '../../../scans/getScanHistory.js'; + +function withExitCode(code, message) { + const err = new Error(message); + err.exitCode = code; + return err; +} + +/** + * Resolve the scan to use. + * Precedence: --scan (explicit SHA) > --branch (latest on branch) > global latest. + * + * @param {{ repo: string, scan?: string, branch?: string }} opts + * @returns {Promise<{ commit_id, branch, timestamp, status, resolved_by }>} + */ +export async function resolveScan({ repo, scan, branch }) { + if (scan) { + const { success, scanHistory, error } = await getScanHistory(repo); + const history = success ? (scanHistory ?? []) : []; + const hit = history.find((s) => s.latest_commit_sha === scan); + return { + commit_id: scan, + branch: hit?.branch ?? branch ?? null, + timestamp: hit?.timestamp ?? null, + status: hit?.status ?? 'done', + resolved_by: 'explicit', + }; + } + + const { success, scanHistory, error } = await getScanHistory(repo); + if (!success) throw withExitCode(1, `scan history: ${error}`); + + const sorted = [...(scanHistory ?? [])] + .filter((s) => s.latest_commit_sha && s.timestamp) + .sort((a, b) => new Date(b.timestamp) - new Date(a.timestamp)); + const filtered = branch ? sorted.filter((s) => s.branch === branch) : sorted; + const pick = filtered[0]; + + if (!pick) { + throw withExitCode(2, branch ? `no scans for branch "${branch}"` : 'no scans found'); + } + + return { + commit_id: pick.latest_commit_sha, + branch: pick.branch ?? branch ?? null, + timestamp: pick.timestamp, + status: pick.status ?? 'done', + resolved_by: branch ? 'branch' : 'latest', + }; +} diff --git a/src/commands/scans/lib/sort.js b/src/commands/scans/lib/sort.js new file mode 100644 index 0000000..93d03b0 --- /dev/null +++ b/src/commands/scans/lib/sort.js @@ -0,0 +1,19 @@ +const SEV_RANK = { critical: 5, high: 4, medium: 3, low: 2, info: 1, unknown: 0 }; + +/** + * Deterministic sort: severity desc, file_path asc, line_number asc, check_id asc, id asc. + * Same input always produces byte-identical output. + */ +export function deterministicSort(findings) { + return [...findings].sort((a, b) => { + const sd = (SEV_RANK[b.severity] ?? 0) - (SEV_RANK[a.severity] ?? 0); + if (sd !== 0) return sd; + const fp = (a.file_path ?? '').localeCompare(b.file_path ?? '', 'en', { sensitivity: 'base' }); + if (fp !== 0) return fp; + const ln = (a.line_number ?? 0) - (b.line_number ?? 0); + if (ln !== 0) return ln; + const ci = (a.check_id ?? '').localeCompare(b.check_id ?? '', 'en'); + if (ci !== 0) return ci; + return (a.id ?? '').localeCompare(b.id ?? '', 'en'); + }); +} diff --git a/src/commands/scans/orgs.js b/src/commands/scans/orgs.js new file mode 100644 index 0000000..20dacfc --- /dev/null +++ b/src/commands/scans/orgs.js @@ -0,0 +1,15 @@ +import { validateConnection } from '../../scans/connectionHandler.js'; + +/** + * codeant scans orgs + * List authenticated organizations. + */ +export async function runOrgs() { + const result = await validateConnection(); + if (!result.success) { + const err = new Error(result.error || 'Failed to validate connection'); + err.exitCode = result.error?.toLowerCase().includes('network') ? 4 : 1; + throw err; + } + return { connections: result.connections, email: result.email }; +} diff --git a/src/commands/scans/repos.js b/src/commands/scans/repos.js new file mode 100644 index 0000000..8098404 --- /dev/null +++ b/src/commands/scans/repos.js @@ -0,0 +1,41 @@ +import { validateConnection } from '../../scans/connectionHandler.js'; +import { listRepos as listReposApi } from '../../scans/listRepos.js'; + +/** + * codeant scans repos [--org ] + * List repositories. Auto-picks org when only one is authenticated. + */ +export async function runRepos({ org } = {}) { + let orgName = org; + + if (!orgName) { + const conn = await validateConnection(); + if (!conn.success) { + const err = new Error(conn.error || 'Failed to validate connection'); + err.exitCode = 1; + throw err; + } + if (conn.connections.length === 0) { + const err = new Error('No authenticated organizations found'); + err.exitCode = 1; + throw err; + } + if (conn.connections.length > 1) { + const err = new Error( + `Multiple orgs found. Specify one with --org. Available: ${conn.connections.map((c) => c.organizationName).join(', ')}` + ); + err.exitCode = 1; + throw err; + } + orgName = conn.connections[0].organizationName; + } + + const result = await listReposApi(orgName); + if (!result.success) { + const err = new Error(result.error || 'Failed to list repositories'); + err.exitCode = 1; + throw err; + } + + return { org: orgName, repos: result.repos }; +} diff --git a/src/commands/scans/results.js b/src/commands/scans/results.js new file mode 100644 index 0000000..af19adf --- /dev/null +++ b/src/commands/scans/results.js @@ -0,0 +1,156 @@ +import { resolveScan } from './lib/resolveScan.js'; +import { parseTypes } from './lib/categories.js'; +import { normalizeIssue, buildEnvelope } from './lib/normalize.js'; +import { applyFilters } from './lib/filters.js'; +import { deterministicSort } from './lib/sort.js'; +import { paginate } from './lib/paginate.js'; +import { emit } from './lib/emit.js'; +import { progress, logError } from './lib/log.js'; +import { FORMATTERS } from './formatters/index.js'; +import { fetchDismissedAlerts } from '../../scans/fetchDismissedAlerts.js'; + +/** + * codeant scans results β€” full orchestration. + * + * @param {object} opts + */ +export async function runResults(opts = {}) { + const { + repo, + scan, + branch, + types, + severity, + path: pathGlob, + check: checkRegex, + includeDismissed = false, + format = 'json', + output: outputPath = null, + fields = null, + limit = 100, + offset = 0, + failFast = false, + } = opts; + + if (!repo) { + const err = new Error('--repo is required'); + err.exitCode = 1; + throw err; + } + + const formatter = FORMATTERS[format]; + if (!formatter) { + const err = new Error(`Unknown --format "${format}". Valid: ${Object.keys(FORMATTERS).join(', ')}`); + err.exitCode = 1; + throw err; + } + + // 1. Resolve scan + progress(`resolving scan for ${repo}…`); + const scanMeta = await resolveScan({ repo, scan, branch }); + progress(`using commit ${scanMeta.commit_id} (${scanMeta.resolved_by})`); + + // 2. Parse types + const categories = parseTypes(types); + + // 3. Fetch in parallel + dismissed alerts + progress(`fetching ${categories.map((c) => c.key).join(', ')}…`); + const [settled, dismissedResult] = await Promise.all([ + Promise.allSettled(categories.map((c) => c.fetcher(repo, scanMeta.commit_id))), + includeDismissed + ? Promise.resolve({ success: true, dismissedAlerts: [] }) + : fetchDismissedAlerts(repo, 'security'), + ]); + + const dismissedAlerts = dismissedResult.success ? (dismissedResult.dismissedAlerts ?? []) : []; + + // 4. Collect findings + errors + const allFindings = []; + const errors = []; + + for (let i = 0; i < settled.length; i++) { + const cat = categories[i]; + const s = settled[i]; + + if (s.status === 'rejected' || !s.value?.success) { + const msg = s.status === 'rejected' ? s.reason?.message : s.value?.error; + errors.push({ category: cat.key, error: msg || 'unknown error' }); + logError({ category: cat.key, error: msg || 'unknown error' }); + if (failFast) { + const err = new Error(`Category "${cat.key}" failed: ${msg}`); + err.exitCode = 3; + throw err; + } + continue; + } + + progress(`normalizing ${cat.key} (${s.value.issues?.length ?? 0} issues)…`); + for (const issue of s.value.issues ?? []) { + const f = normalizeIssue(issue, cat.key); + if (f) allFindings.push(f); + } + } + + // 5. Filter + const severityList = severity + ? severity.split(',').map((s) => s.trim()).filter(Boolean) + : null; + + const filtered = applyFilters(allFindings, { + severity: severityList, + pathGlob: pathGlob || null, + checkRegex: checkRegex || null, + dismissedAlerts, + includeDismissed, + }); + + // 6. Sort + const sorted = deterministicSort(filtered); + + // 7. Paginate + const { items: pageItems, pagination } = paginate(sorted, { limit, offset }); + + // 8. Build envelope (summary uses pre-pagination totals) + const filtersObj = { + severity: severityList, + path: pathGlob || null, + check: checkRegex || null, + include_dismissed: includeDismissed, + }; + + // Rebuild summary from all filtered (pre-page) findings + const summaryBySev = { critical: 0, high: 0, medium: 0, low: 0, info: 0, unknown: 0 }; + const summaryByCat = {}; + for (const f of sorted) { + summaryBySev[f.severity] = (summaryBySev[f.severity] ?? 0) + 1; + summaryByCat[f.category] = (summaryByCat[f.category] ?? 0) + 1; + } + + const envelope = buildEnvelope({ + repo, + scan: scanMeta, + categories: categories.map((c) => c.key), + findings: pageItems, + pagination, + filters: filtersObj, + errors, + }); + // Override summary with pre-pagination counts + envelope.summary = { total: sorted.length, by_severity: summaryBySev, by_category: summaryByCat }; + + // 9. Project fields + if (fields) { + const fieldList = fields.split(',').map((f) => f.trim()).filter(Boolean); + envelope.findings = envelope.findings.map((f) => { + const projected = {}; + for (const key of fieldList) { + if (key in f) projected[key] = f[key]; + } + return projected; + }); + } + + // 10. Render + emit + const rendered = formatter.render(envelope); + emit(rendered, outputPath, envelope.findings.length); +} diff --git a/src/components/ScanCenter.js b/src/components/ScanCenter.js new file mode 100644 index 0000000..dc8cab7 --- /dev/null +++ b/src/components/ScanCenter.js @@ -0,0 +1,337 @@ +import React, { useState, useEffect } from 'react'; +import { Box, Text, useApp, useInput } from 'ink'; +import SelectList from './SelectList.js'; +import { validateConnectionOnMount } from '../scanCenter/validateConnectionOnMount.js'; +import { handleSelectConnection as _handleSelectConnection } from '../scanCenter/handleSelectConnection.js'; +import { handleSelectRepo as _handleSelectRepo } from '../scanCenter/handleSelectRepo.js'; +import { handleSelectScan as _handleSelectScan } from '../scanCenter/handleSelectScan.js'; +import { handleSelectResultType as _handleSelectResultType } from '../scanCenter/handleSelectResultType.js'; + +const ce = React.createElement; + +// ─── Constants ─────────────────────────────────────────────────────────────── + +const STEPS = { + LOADING: 'loading', + SELECT_CONNECTION: 'select-connection', + SELECT_REPO: 'select-repo', + SELECT_SCAN: 'select-scan', + SELECT_RESULT_TYPE: 'select-result-type', + SHOWING_RESULTS: 'showing-results', + ERROR: 'error', +}; + +const RESULT_TYPES = [ + { label: 'Security Issues (SAST)', value: 'security_issues', kind: 'basic' }, + { label: 'Anti-Patterns', value: 'anti_patterns', kind: 'basic' }, + { label: 'Docstring Issues', value: 'docstring', kind: 'basic' }, + { label: 'Complex Functions', value: 'complex_functions', kind: 'basic' }, + { label: 'SCA β€” Dependencies', value: 'sca', kind: 'advanced' }, + { label: 'SBOM', value: 'sbom', kind: 'advanced' }, + { label: 'Secrets', value: 'secrets', kind: 'advanced' }, + { label: 'IaC (Infrastructure)', value: 'iac', kind: 'advanced' }, + { label: 'Dead Code', value: 'dead_code', kind: 'advanced' }, + { label: 'Dismissed Alerts', value: 'dismissed_alerts', kind: 'dismissed' }, + { label: 'Dismissed Secrets', value: 'dismissed_secrets', kind: 'dismissed' }, +]; + +const SPINNER_FRAMES = ['β ‹', 'β ™', 'β Ή', 'β Έ', 'β Ό', 'β ΄', 'β ¦', 'β §', 'β ‡', '⠏']; +const RESULTS_PAGE = 15; + +// ─── Helpers ───────────────────────────────────────────────────────────────── + +function severityColor(sev) { + if (!sev) return 'gray'; + const s = sev.toLowerCase(); + if (s === 'critical' || s === 'high') return 'red'; + if (s === 'medium' || s === 'warning') return 'yellow'; + if (s === 'low') return 'cyan'; + return 'gray'; +} + +function severityLabel(sev) { + if (!sev) return 'INFO'; + return sev.toUpperCase().slice(0, 4); +} + +// ─── Spinner ───────────────────────────────────────────────────────────────── + +function Spinner({ label }) { + const [frame, setFrame] = useState(0); + useEffect(() => { + const id = setInterval(() => setFrame((f) => (f + 1) % SPINNER_FRAMES.length), 100); + return () => clearInterval(id); + }, []); + return ce( + Box, + null, + ce(Text, { color: 'cyan' }, SPINNER_FRAMES[frame] + ' '), + ce(Text, null, label) + ); +} + +// ─── Breadcrumb ────────────────────────────────────────────────────────────── + +function Breadcrumb({ parts }) { + return ce( + Box, + { marginBottom: 1 }, + ...parts.map((p, i) => + ce(Text, { key: i, color: i === parts.length - 1 ? 'cyan' : 'gray' }, + (i > 0 ? ' β€Ί ' : '') + p) + ) + ); +} + +// ─── Results view ──────────────────────────────────────────────────────────── + +function ResultsView({ issues, resultTypeLabel, breadcrumbParts, onBack }) { + const [offset, setOffset] = useState(0); + + useInput((input, key) => { + if (input === 'b' || key.escape) { onBack(); return; } + if (key.downArrow) setOffset((o) => Math.min(o + 1, Math.max(0, issues.length - RESULTS_PAGE))); + if (key.upArrow) setOffset((o) => Math.max(0, o - 1)); + }); + + const visible = issues.slice(offset, offset + RESULTS_PAGE); + + const rows = issues.length === 0 + ? [ce(Box, { key: 'empty', marginBottom: 1 }, ce(Text, { color: 'green' }, 'βœ“ No issues found'))] + : [ + ce(Box, { key: 'counter', marginBottom: 1 }, + ce(Text, { color: 'gray' }, + `Showing ${offset + 1}–${Math.min(offset + RESULTS_PAGE, issues.length)} of ${issues.length} issues`) + ), + ...visible.map((issue, i) => + ce( + Box, + { key: i, flexDirection: 'row', gap: 1 }, + ce(Text, { color: severityColor(issue.severity), bold: true }, + `[${severityLabel(issue.severity)}]`), + ce(Text, { color: 'gray' }, + (issue.file_path || 'unknown') + (issue.line_number ? `:${issue.line_number}` : '')), + ce(Text, null, + issue.check_name || issue.issue_text || issue.message || '') + ) + ), + ]; + + return ce( + Box, + { flexDirection: 'column', paddingX: 1 }, + ce(Breadcrumb, { parts: [...breadcrumbParts, resultTypeLabel] }), + ...rows, + ce( + Box, + { marginTop: 1, borderStyle: 'single', borderColor: 'gray', paddingX: 1 }, + ce(Text, { color: 'gray' }, '[↑↓] scroll [b] back') + ) + ); +} + +// ─── Error view ────────────────────────────────────────────────────────────── + +function ErrorView({ message, onBack, canGoBack }) { + useInput((input, key) => { + if (input === 'b' || key.escape || key.return) onBack(); + }); + return ce( + Box, + { flexDirection: 'column', paddingX: 1, paddingY: 1 }, + ce(Text, { color: 'red', bold: true }, 'βœ– Error'), + ce(Box, { marginTop: 1 }, + ce(Text, null, message) + ), + ce( + Box, + { marginTop: 1, borderStyle: 'single', borderColor: 'gray', paddingX: 1 }, + ce(Text, { color: 'gray' }, canGoBack ? '[b / Enter] go back' : '[b / Enter] exit') + ) + ); +} + +// ─── Main ScanCenter ───────────────────────────────────────────────────────── + +export default function ScanCenter() { + const { exit } = useApp(); + + const [step, setStep] = useState(STEPS.LOADING); + const [loadingMsg, setLoadingMsg] = useState('Validating connection…'); + const [errorMsg, setErrorMsg] = useState(''); + const [errorBackStep, setErrorBackStep] = useState(null); + + const [connections, setConnections] = useState([]); + const [selectedConnection, setSelectedConnection] = useState(null); + + const [repos, setRepos] = useState([]); + const [selectedRepo, setSelectedRepo] = useState(null); + + const [scanHistory, setScanHistory] = useState([]); + const [selectedScan, setSelectedScan] = useState(null); + + const [selectedResultType, setSelectedResultType] = useState(null); + const [results, setResults] = useState(null); + + const setError = (msg, backStep) => { + setErrorMsg(msg); + setErrorBackStep(backStep); + setStep(STEPS.ERROR); + }; + + // ── Step 1: validate connection on mount ── + useEffect(() => { + validateConnectionOnMount({ STEPS, setError, setConnections, setStep }); + }, []); + + // ── Step 2: connection selected β†’ fetch repos ── + const handleSelectConnection = (item) => + _handleSelectConnection({ STEPS, item, setSelectedConnection, setStep, setLoadingMsg, setError, setRepos }); + + // ── Step 3: repo selected β†’ fetch scan history ── + const handleSelectRepo = (item) => + _handleSelectRepo({ STEPS, item, setSelectedRepo, setStep, setLoadingMsg, setError, setScanHistory }); + + // ── Step 4: scan selected β†’ show result type menu ── + const handleSelectScan = (item) => + _handleSelectScan({ STEPS, item, setSelectedScan, setStep }); + + // ── Step 5: result type selected β†’ fetch exactly one endpoint ── + const handleSelectResultType = (item) => + _handleSelectResultType({ STEPS, item, selectedRepo, selectedScan, setSelectedResultType, setStep, setLoadingMsg, setError, setResults }); + + // ── Back navigation ── + const goBack = { + [STEPS.SELECT_CONNECTION]: () => exit(), + [STEPS.SELECT_REPO]: () => { setSelectedConnection(null); setStep(STEPS.SELECT_CONNECTION); }, + [STEPS.SELECT_SCAN]: () => { setSelectedRepo(null); setStep(STEPS.SELECT_REPO); }, + [STEPS.SELECT_RESULT_TYPE]:() => { setSelectedScan(null); setStep(STEPS.SELECT_SCAN); }, + [STEPS.SHOWING_RESULTS]: () => { setResults(null); setSelectedResultType(null); setStep(STEPS.SELECT_RESULT_TYPE); }, + }; + + const breadcrumbParts = [ + selectedConnection?.organizationName, + selectedRepo?.name, + selectedScan ? (selectedScan.commitId || '').slice(0, 8) : null, + ].filter(Boolean); + + // ─── Render ─────────────────────────────────────────────────────────────── + + if (step === STEPS.LOADING) { + return ce(Box, { paddingX: 1, paddingY: 1 }, ce(Spinner, { label: loadingMsg })); + } + + if (step === STEPS.ERROR) { + const canGoBack = errorBackStep !== null; + const onErrorBack = canGoBack + ? () => { setErrorMsg(''); setStep(errorBackStep); } + : () => exit(); + return ce(ErrorView, { message: errorMsg, onBack: onErrorBack, canGoBack }); + } + + if (step === STEPS.SELECT_CONNECTION) { + const items = connections + .filter((c) => c && c.organizationName) + .map((c) => ({ + label: c.organizationName, + sublabel: `${c.service} ${c.baseUrl}`, + value: c, + })); + return ce(SelectList, { + title: 'Select a connection', + items, + onSelect: handleSelectConnection, + onBack: goBack[STEPS.SELECT_CONNECTION], + }); + } + + if (step === STEPS.SELECT_REPO) { + const items = repos + .filter((r) => r && (r.name || r.full_name)) + .map((r) => ({ + label: r.name || r.full_name, + sublabel: r.pushed_at ? `Last push: ${new Date(r.pushed_at).toLocaleDateString()}` : undefined, + value: r, + })); + return ce( + Box, + { flexDirection: 'column' }, + ce(Box, { paddingX: 1 }, ce(Breadcrumb, { parts: [selectedConnection.organizationName] })), + ce(SelectList, { + title: 'Select a repository', + items, + onSelect: handleSelectRepo, + onBack: goBack[STEPS.SELECT_REPO], + emptyMessage: 'No repositories found for this organisation.', + }) + ); + } + + if (step === STEPS.SELECT_SCAN) { + const items = scanHistory + .filter((s) => s && typeof s === 'object' && s.latest_commit_sha) + .slice() + .sort((a, b) => { + const da = new Date(a.timestamp || a.date || a.created_at || 0); + const db = new Date(b.timestamp || b.date || b.created_at || 0); + return db - da; + }) + .map((s) => { + const date = s.timestamp || s.date || s.created_at; + const branch = s.branch || s.ref || ''; + const commitFull = s.latest_commit_sha || ''; + return { + label: branch || '(no branch)', + sublabel: [commitFull || null, date ? new Date(date).toLocaleString() : null].filter(Boolean).join(' '), + value: { ...s, commitId: commitFull }, + }; + }); + return ce( + Box, + { flexDirection: 'column' }, + ce(Box, { paddingX: 1 }, + ce(Breadcrumb, { parts: [selectedConnection.organizationName, selectedRepo.name] })), + ce(SelectList, { + title: 'Select a scan', + items, + onSelect: handleSelectScan, + onBack: goBack[STEPS.SELECT_SCAN], + emptyMessage: 'No scan history found for this repository.', + }) + ); + } + + if (step === STEPS.SELECT_RESULT_TYPE) { + const items = RESULT_TYPES.map((rt) => ({ + label: rt.label, + sublabel: rt.kind === 'basic' ? 'standard analysis' + : rt.kind === 'advanced' ? 'advanced analysis' + : 'dismissed', + value: rt, + })); + return ce( + Box, + { flexDirection: 'column' }, + ce(Box, { paddingX: 1 }, ce(Breadcrumb, { parts: breadcrumbParts })), + ce(SelectList, { + title: 'Select result type', + items, + onSelect: handleSelectResultType, + onBack: goBack[STEPS.SELECT_RESULT_TYPE], + }) + ); + } + + if (step === STEPS.SHOWING_RESULTS) { + const issues = results?.issues || []; + const rtObj = RESULT_TYPES.find((r) => r.value === selectedResultType?.value); + return ce(ResultsView, { + issues, + resultTypeLabel: rtObj?.label || selectedResultType?.value || '', + breadcrumbParts, + onBack: goBack[STEPS.SHOWING_RESULTS], + }); + } + + return null; +} diff --git a/src/components/SelectList.js b/src/components/SelectList.js new file mode 100644 index 0000000..3fec0c0 --- /dev/null +++ b/src/components/SelectList.js @@ -0,0 +1,80 @@ +import React, { useState } from 'react'; +import { Box, Text, useInput } from 'ink'; + +const PAGE_SIZE = 15; + +const ce = React.createElement; + +export default function SelectList({ items = [], title, onSelect, onBack, emptyMessage = 'No items.' }) { + const [cursor, setCursor] = useState(0); + const [windowStart, setWindowStart] = useState(0); + + useInput((input, key) => { + if (items.length === 0) { + if (input === 'b' || key.escape) onBack?.(); + return; + } + if (key.upArrow) { + const next = Math.max(0, cursor - 1); + setCursor(next); + if (next < windowStart) setWindowStart(next); + } else if (key.downArrow) { + const next = Math.min(items.length - 1, cursor + 1); + setCursor(next); + if (next >= windowStart + PAGE_SIZE) setWindowStart(next - PAGE_SIZE + 1); + } else if (key.return) { + onSelect(items[cursor]); + } else if (input === 'b' || key.escape) { + onBack?.(); + } + }); + + const visible = items.slice(windowStart, windowStart + PAGE_SIZE); + + const rows = items.length === 0 + ? [ce(Text, { key: 'empty', color: 'gray' }, emptyMessage)] + : visible.map((item, i) => { + const idx = windowStart + i; + const selected = idx === cursor; + return ce( + Box, + { key: idx, flexDirection: 'column' }, + ce( + Box, + { key: 'row' }, + ce(Text, { color: selected ? 'cyan' : undefined }, selected ? 'β–Ά ' : ' '), + ce(Text, { color: selected ? 'cyan' : undefined, bold: selected }, item.label) + ), + item.sublabel + ? ce(Text, { key: 'sub', color: 'gray', dimColor: true }, ' ' + item.sublabel) + : null + ); + }); + + const counter = items.length > PAGE_SIZE + ? ce( + Box, + { key: 'counter', marginTop: 1 }, + ce(Text, { color: 'gray', dimColor: true }, + `${windowStart + 1}–${Math.min(windowStart + PAGE_SIZE, items.length)} of ${items.length}`) + ) + : null; + + const footer = ce( + Box, + { key: 'footer', marginTop: 1, borderStyle: 'single', borderColor: 'gray', paddingX: 1 }, + ce(Text, { color: 'gray' }, + (items.length > 0 ? '[↑↓] navigate [Enter] select' : '') + + (onBack ? ' [b] back' : '') + ) + ); + + return ce( + Box, + { flexDirection: 'column', paddingX: 1 }, + ce(Box, { key: 'title', marginBottom: 1 }, ce(Text, { bold: true, color: 'cyan' }, title)), + ...rows, + counter, + footer + ); +} diff --git a/src/index.js b/src/index.js index b6c8dbf..2063f43 100755 --- a/src/index.js +++ b/src/index.js @@ -13,10 +13,12 @@ import Login from './commands/login.js'; import Logout from './commands/logout.js'; import Review from './commands/review.js'; import { runReviewHeadless } from './reviewHeadless.js'; +import ScanCenter from './components/ScanCenter.js'; import Welcome from './components/Welcome.js'; import * as scm from './scm/index.js'; import { setConfigValue } from './utils/config.js'; import { track, shutdown as analyticsShutdown, isTelemetryDisabled } from './utils/analytics.js'; +import registerScansCommands from './commands/scans/index.js'; // Read version from package.json const require = createRequire(import.meta.url); @@ -206,6 +208,13 @@ program render(React.createElement(Login)); }); + program + .command('scan-center') + .description('Browse scan results interactively') + .action(() => { + render(React.createElement(ScanCenter)); + }); + program .command('logout') .description('Logout from CodeAnt') @@ -376,6 +385,9 @@ program })); }); + // ─── Scans commands ─── + registerScansCommands(program, { runCmd }); + // ─── Telemetry control ─── program .command('set-telemetry ') diff --git a/src/scanCenter/handleSelectConnection.js b/src/scanCenter/handleSelectConnection.js new file mode 100644 index 0000000..e639ab1 --- /dev/null +++ b/src/scanCenter/handleSelectConnection.js @@ -0,0 +1,14 @@ +import { listRepos } from '../scans/listRepos.js'; + +export async function handleSelectConnection({ STEPS, item, setSelectedConnection, setStep, setLoadingMsg, setError, setRepos }) { + setSelectedConnection(item.value); + setStep(STEPS.LOADING); + setLoadingMsg(`Fetching repos for ${item.value.organizationName}…`); + const res = await listRepos(item.value.organizationName); + if (!res.success) { + setError(res.error || 'Failed to fetch repos', STEPS.SELECT_CONNECTION); + return; + } + setRepos(res.repos || []); + setStep(STEPS.SELECT_REPO); +} diff --git a/src/scanCenter/handleSelectRepo.js b/src/scanCenter/handleSelectRepo.js new file mode 100644 index 0000000..289b0cc --- /dev/null +++ b/src/scanCenter/handleSelectRepo.js @@ -0,0 +1,16 @@ +import { getScanHistory } from '../scans/getScanHistory.js'; + +export async function handleSelectRepo({ STEPS, item, setSelectedRepo, setStep, setLoadingMsg, setError, setScanHistory }) { + setSelectedRepo(item.value); + setStep(STEPS.LOADING); + setLoadingMsg(`Loading scan history for ${item.value.full_name}…`); + const res = await getScanHistory(item.value.full_name); + if (!res.success) { + setError(res.error || 'Failed to fetch scan history', STEPS.SELECT_REPO); + return; + } + const history = res.scanHistory || []; + process.stderr.write('SCAN_HISTORY_SAMPLE: ' + JSON.stringify(history.slice(0, 15), null, 2) + '\n'); + setScanHistory(history); + setStep(STEPS.SELECT_SCAN); +} diff --git a/src/scanCenter/handleSelectResultType.js b/src/scanCenter/handleSelectResultType.js new file mode 100644 index 0000000..2521043 --- /dev/null +++ b/src/scanCenter/handleSelectResultType.js @@ -0,0 +1,34 @@ +import { fetchScanResults } from '../scans/fetchScanResults.js'; +import { fetchAdvancedScanResults } from '../scans/fetchAdvancedScanResults.js'; +import { fetchDismissedAlerts } from '../scans/fetchDismissedAlerts.js'; + +export async function handleSelectResultType({ STEPS, item, selectedRepo, selectedScan, setSelectedResultType, setStep, setLoadingMsg, setError, setResults }) { + const rt = item.value; + setSelectedResultType(rt); + setStep(STEPS.LOADING); + setLoadingMsg(`Fetching ${item.label}…`); + + const repo = selectedRepo.full_name; + const commitId = selectedScan.commitId; + let res; + + if (rt.kind === 'basic') { + res = await fetchScanResults(repo, commitId, rt.value); + } else if (rt.kind === 'advanced') { + res = await fetchAdvancedScanResults(repo, commitId, rt.value); + } else if (rt.value === 'dismissed_alerts') { + const r = await fetchDismissedAlerts(repo, 'security'); + res = r.success ? { success: true, issues: r.dismissedAlerts } : r; + } else if (rt.value === 'dismissed_secrets') { + const r = await fetchDismissedAlerts(repo, 'secrets'); + res = r.success ? { success: true, issues: r.dismissedAlerts } : r; + } + + if (!res || !res.success) { + setError((res && res.error) || 'Failed to fetch results', STEPS.SELECT_RESULT_TYPE); + return; + } + + setResults(res); + setStep(STEPS.SHOWING_RESULTS); +} diff --git a/src/scanCenter/handleSelectScan.js b/src/scanCenter/handleSelectScan.js new file mode 100644 index 0000000..206bbbb --- /dev/null +++ b/src/scanCenter/handleSelectScan.js @@ -0,0 +1,4 @@ +export function handleSelectScan({ STEPS, item, setSelectedScan, setStep }) { + setSelectedScan(item.value); + setStep(STEPS.SELECT_RESULT_TYPE); +} diff --git a/src/scanCenter/validateConnectionOnMount.js b/src/scanCenter/validateConnectionOnMount.js new file mode 100644 index 0000000..8273aa6 --- /dev/null +++ b/src/scanCenter/validateConnectionOnMount.js @@ -0,0 +1,15 @@ +import { validateConnection } from '../scans/connectionHandler.js'; + +export async function validateConnectionOnMount({ STEPS, setError, setConnections, setStep }) { + const res = await validateConnection(); + if (!res.success) { + setError(res.error || 'Failed to validate connection', null); + return; + } + if (!res.connections || res.connections.length === 0) { + setError('No connected organisations found. Please log in to CodeAnt first.', null); + return; + } + setConnections(res.connections); + setStep(STEPS.SELECT_CONNECTION); +} diff --git a/src/scans/connectionHandler.js b/src/scans/connectionHandler.js new file mode 100644 index 0000000..10980a0 --- /dev/null +++ b/src/scans/connectionHandler.js @@ -0,0 +1,52 @@ +import { fetchApi } from '../utils/fetchApi.js'; + +/** + * Validate the stored API key with the CodeAnt backend. + * + * @returns {Promise} + * { + * success: true, + * connections: [ + * { + * organizationName: "acme-corp", + * baseUrl: "https://github.com", + * service: "github" | "gitlab" | "azuredevops" | "bitbucket" | "unknown" + * } + * ], + * email: "dev@acme.com", + * } + */ +export async function validateConnection() { + try { + const response = await fetchApi('/extension/scans2/validate', 'POST', { + extension: 'cli', + }); + + if (!response) { + return { success: false, error: 'Failed to connect to CodeAnt server' }; + } + + if (response.status === 'success' && response.data) { + const orgs = response.data.orgs || []; + return { + success: true, + connections: orgs.map((org) => ({ + organizationName: org.organization_name, + baseUrl: org.base_url, + service: org.service || 'unknown', + })), + email: response.data.email, + }; + } + + return { + success: false, + error: response.message || 'Invalid or expired connection string', + }; + } catch (error) { + return { + success: false, + error: error.message || 'Failed to validate connection string', + }; + } +} diff --git a/src/scans/fetchAdvancedScanResults.js b/src/scans/fetchAdvancedScanResults.js new file mode 100644 index 0000000..f395c5f --- /dev/null +++ b/src/scans/fetchAdvancedScanResults.js @@ -0,0 +1,322 @@ +import { fetchApi } from '../utils/fetchApi.js'; + +export const ADVANCED_RESULT_TYPES = { + SCA: 'sca', + SBOM: 'sbom', + SECRETS: 'secrets', + IAC: 'iac', + DEAD_CODE: 'dead_code', +}; + +const EXTRA_DEAD_CODE_MESSAGES = { + S1481: 'Unused local variable', + S1854: 'Unused assignment', + S1172: 'Unused function parameter', + S1144: 'Unused private method', + S1763: 'Unreachable code', + S5603: 'Unused scope-limited definition', + S3985: 'Unused private nested class', + S1128: 'Unnecessary import', +}; + +/** + * Strip /mnt/lambda/code/.../commitId/ prefix from a full Lambda path. + */ +function extractRelativeFilePath(fullPath) { + if (!fullPath) return fullPath; + const match = fullPath.match(/\/([a-f0-9]{40})\//i); + if (match) { + const idx = fullPath.indexOf(match[1]); + return fullPath.substring(idx + match[1].length + 1); + } + return fullPath; +} + +function cleanLeadingSlash(p) { + return typeof p === 'string' && p.startsWith('/') ? p.slice(1) : (p || 'unknown'); +} + +/** + * Flatten the dead_code nested structure into a flat array of issues. + */ +function flattenDeadCode(deadCodeData) { + if (!deadCodeData || typeof deadCodeData !== 'object' || Array.isArray(deadCodeData)) { + return deadCodeData; + } + + const hasNestedKeys = + deadCodeData.python_dead_code !== undefined || + deadCodeData.js_dead_code !== undefined || + deadCodeData.extra_dead_code !== undefined; + + if (!hasNestedKeys) return deadCodeData; + + const flat = []; + + // Python dead code + (deadCodeData.python_dead_code || []).forEach((item) => { + if (!item) return; + const filePath = extractRelativeFilePath(item.file_path || item.path || 'unknown'); + (item.issues || []).forEach((issue) => { + if (!issue) return; + const typeMatch = issue.issue?.match(/unused (\w+)/i); + const issueType = typeMatch ? typeMatch[1] : 'code'; + const nameMatch = issue.issue?.match(/'([^']*)'/); + const name = nameMatch ? nameMatch[1] : ''; + const confMatch = issue.issue?.match(/\((\d+)% confidence\)/); + flat.push({ + file_path: filePath, + line_number: issue.line_number || 0, + issue_text: issue.issue || `Unused ${issueType}: ${name}`, + message: issue.issue || `Unused ${issueType}: ${name}`, + severity: 'warning', + type: `unused_${issueType}`, + name, + confidence: confMatch ? confMatch[1] : '90', + check_name: `Unused ${issueType}: ${name}`, + }); + }); + }); + + // JS dead code β€” unused files + const jsDeadCode = deadCodeData.js_dead_code || {}; + (jsDeadCode.unused_files || []).forEach((filePath) => { + if (!filePath) return; + flat.push({ + file_path: extractRelativeFilePath(cleanLeadingSlash(filePath)), + line_number: 1, + issue_text: 'Unused file - this file is not imported anywhere', + message: 'Unused file - this file is not imported anywhere', + severity: 'warning', + type: 'unused_file', + check_name: 'Unused file', + }); + }); + + // JS dead code β€” unused exports + (jsDeadCode.unused_exports || []).forEach((exportData) => { + if (!Array.isArray(exportData) || exportData.length !== 2) return; + let [filePath, exportIssues] = exportData; + if (!filePath || !Array.isArray(exportIssues)) return; + filePath = extractRelativeFilePath(cleanLeadingSlash(filePath)); + exportIssues.forEach((exp) => { + if (!exp) return; + flat.push({ + file_path: filePath, + line_number: exp.line || 0, + issue_text: `Unused export: ${exp.name || 'unknown'}`, + message: `Unused export: ${exp.name || 'unknown'}`, + severity: 'warning', + type: 'unused_export', + name: exp.name, + check_name: `Unused export: ${exp.name || 'unknown'}`, + }); + }); + }); + + // Extra dead code (Sonar-style rules) + const extra = deadCodeData.extra_dead_code?.results || deadCodeData.extra_dead_code || {}; + if (typeof extra === 'object' && !Array.isArray(extra)) { + Object.entries(extra).forEach(([filePath, issues]) => { + if (!filePath || !Array.isArray(issues)) return; + issues.forEach((issue) => { + if (!issue) return; + const messageId = issue['message-id'] || ''; + const msgKey = messageId.includes(':') ? messageId.split(':')[1] : messageId; + flat.push({ + file_path: extractRelativeFilePath(filePath), + line_number: issue.line_number || 0, + issue_text: issue.issue_text || EXTRA_DEAD_CODE_MESSAGES[msgKey] || 'Dead code detected', + message: issue.issue_text || EXTRA_DEAD_CODE_MESSAGES[msgKey] || 'Dead code detected', + severity: 'warning', + type: msgKey || 'dead_code', + rule_id: msgKey, + confidence: issue.confidence || '90', + check_name: EXTRA_DEAD_CODE_MESSAGES[msgKey] || 'Dead code detected', + }); + }); + }); + } + + return flat; +} + +/** + * Normalize a single advanced issue to a consistent shape. + */ +function normalizeAdvancedIssue(item, resultType) { + if (!item) { + return { file_path: 'unknown', line_number: 1, file_line_range: [1], check_name: 'Unknown issue', severity: 'medium' }; + } + + const normalized = { ...item }; + normalized.file_path = extractRelativeFilePath(item.file_path || item.path || item.filename || 'unknown'); + normalized.line_number = item.line_number || item.start_line || item.line || 1; + normalized.file_line_range = [normalized.line_number]; + + switch (resultType) { + case ADVANCED_RESULT_TYPES.SCA: + normalized.check_name = + item.vulnerability_id || item.cve_id || item.advisory_id || + (item.package_name ? `Vulnerability in ${item.package_name}` : null) || + item.title || item.description || 'Package vulnerability detected'; + normalized.severity = item.severity || 'medium'; + break; + case ADVANCED_RESULT_TYPES.SBOM: + normalized.check_name = item.package_name + ? `${item.package_name}${item.version ? '@' + item.version : ''}` + : (item.name || item.description || 'Software component'); + normalized.severity = item.severity || 'info'; + break; + case ADVANCED_RESULT_TYPES.SECRETS: + normalized.check_name = + item.type || item.secret_type || item.rule_id || item.description || 'Secret detected'; + normalized.severity = item.severity || 'high'; + break; + case ADVANCED_RESULT_TYPES.IAC: + normalized.check_name = + item.check_id || item.rule_id || item.policy_id || + item.description || item.title || 'Infrastructure misconfiguration'; + normalized.severity = item.severity || 'medium'; + break; + case ADVANCED_RESULT_TYPES.DEAD_CODE: + normalized.check_name = + item.name || item.function_name || item.symbol_name || item.description || 'Unused code detected'; + normalized.severity = item.severity || 'low'; + break; + default: + normalized.check_name = item.description || item.message || item.name || 'Issue detected'; + } + + return normalized; +} + +/** + * Fetch advanced scan results (SCA, SBOM, secrets, IaC, dead code). + * + * @param {string} repo - "org/repo-name" + * @param {string} commitId - 40-char commit SHA + * @param {string} resultType - one of ADVANCED_RESULT_TYPES values + * @returns {Promise<{ success: boolean, issues?: Array, healthyPackages?: Array, status?: string, error?: string }>} + */ +export async function fetchAdvancedScanResults(repo, commitId, resultType) { + if (!Object.values(ADVANCED_RESULT_TYPES).includes(resultType)) { + return { + success: false, + error: `Invalid result_type. Must be one of: ${Object.values(ADVANCED_RESULT_TYPES).join(', ')}`, + }; + } + + try { + const response = await fetchApi('/extension/scans2/fetch-advanced-results', 'POST', { + repo, + commit_id: commitId, + result_type: resultType, + }); + + if (!response) { + return { success: false, error: 'Failed to connect to CodeAnt server' }; + } + + if (response.status === 'error') { + return { success: false, error: response.message || `Failed to fetch ${resultType} results` }; + } + + let resultsData; + let healthyPackages = []; + + if (resultType === ADVANCED_RESULT_TYPES.SECRETS) { + resultsData = response.secrets; + } else if (resultType === ADVANCED_RESULT_TYPES.DEAD_CODE) { + resultsData = flattenDeadCode(response.dead_code); + } else if (resultType === ADVANCED_RESULT_TYPES.SCA) { + const scaResults = response.results; + if (scaResults && typeof scaResults === 'object' && !Array.isArray(scaResults) && scaResults.all_vulnerabilities !== undefined) { + resultsData = scaResults.all_vulnerabilities || []; + healthyPackages = scaResults.healthy_packages || []; + } else { + resultsData = scaResults; + } + } else if (resultType === ADVANCED_RESULT_TYPES.IAC) { + const iacData = response.results || response.result; + if (Array.isArray(iacData)) { + const valid = iacData.filter(Boolean); + const hasNested = valid.some((item) => item?.results && Array.isArray(item.results.failed_checks)); + if (hasNested) { + const flat = []; + valid.forEach((resultItem) => { + (resultItem?.results?.failed_checks || []).forEach((check) => { + if (!check) return; + const fp = cleanLeadingSlash(extractRelativeFilePath(check.file_path || 'unknown')); + flat.push({ + file_path: fp, + line_number: check.file_line_range?.[0] || 1, + file_line_range: check.file_line_range || [1], + check_id: check.check_id || '', + check_name: check.check_name || 'Infrastructure misconfiguration', + issue_text: check.check_name || 'Infrastructure misconfiguration', + message: check.check_name || 'Infrastructure misconfiguration', + severity: check.severity || 'medium', + guideline: check.guideline || '', + code_block: check.code_block || [], + resource: check.resource || '', + }); + }); + }); + resultsData = flat; + } else { + resultsData = valid; + } + } else { + resultsData = iacData; + } + } else { + resultsData = response.results; + } + + // Normalize to array + let issues = []; + if (Array.isArray(resultsData)) { + issues = resultsData.filter(Boolean).map((item) => normalizeAdvancedIssue(item, resultType)); + } else if (resultsData && typeof resultsData === 'object') { + for (const [filePath, fileItems] of Object.entries(resultsData)) { + if (!filePath) continue; + if (Array.isArray(fileItems)) { + fileItems.forEach((item) => { + if (item) issues.push(normalizeAdvancedIssue({ ...item, file_path: filePath }, resultType)); + }); + } else if (fileItems && typeof fileItems === 'object') { + issues.push(normalizeAdvancedIssue({ ...fileItems, file_path: filePath }, resultType)); + } + } + } + + // Filter secrets false positives + if (resultType === ADVANCED_RESULT_TYPES.SECRETS) { + issues = issues.filter((issue) => issue.confidence_score?.toLowerCase() !== 'false_positive'); + } + + if (resultType === ADVANCED_RESULT_TYPES.SCA) { + return { success: true, issues, healthyPackages, status: response.status || 'done' }; + } + + return { success: true, issues, status: response.status || 'done' }; + } catch (error) { + return { success: false, error: error.message || `Failed to fetch ${resultType} results` }; + } +} + +export const fetchScaResults = (repo, commitId) => + fetchAdvancedScanResults(repo, commitId, ADVANCED_RESULT_TYPES.SCA); + +export const fetchSbomResults = (repo, commitId) => + fetchAdvancedScanResults(repo, commitId, ADVANCED_RESULT_TYPES.SBOM); + +export const fetchSecretsResults = (repo, commitId) => + fetchAdvancedScanResults(repo, commitId, ADVANCED_RESULT_TYPES.SECRETS); + +export const fetchIacResults = (repo, commitId) => + fetchAdvancedScanResults(repo, commitId, ADVANCED_RESULT_TYPES.IAC); + +export const fetchDeadCodeResults = (repo, commitId) => + fetchAdvancedScanResults(repo, commitId, ADVANCED_RESULT_TYPES.DEAD_CODE); diff --git a/src/scans/fetchDismissedAlerts.js b/src/scans/fetchDismissedAlerts.js new file mode 100644 index 0000000..1c934a7 --- /dev/null +++ b/src/scans/fetchDismissedAlerts.js @@ -0,0 +1,59 @@ +import { fetchApi } from '../utils/fetchApi.js'; + +/** + * Fetch dismissed alerts for a repository. + * + * Issue keys use the format: "file_path||::||context_code_block||::||test_id" + * For secrets the format is: "file_path||::||line_number||::||type" + * + * @param {string} repo - "org/repo-name" + * @param {string} analysisType - e.g. "security" (default) + * @returns {Promise<{ success: boolean, dismissedAlerts?: Array, error?: string }>} + */ +export async function fetchDismissedAlerts(repo, analysisType = 'security') { + try { + const response = await fetchApi('/extension/scans2/dismiss-alerts/get', 'POST', { + repo, + analysis_type: analysisType, + }); + + if (!response) { + return { success: false, error: 'Failed to connect to CodeAnt server' }; + } + + if (response.status === 'error') { + return { success: false, error: response.message || 'Failed to fetch dismissed alerts' }; + } + + const dismissData = response.data || {}; + const dismissedAlerts = []; + + for (const [issueKey, dismissInfo] of Object.entries(dismissData)) { + if (!issueKey.includes('||::||')) continue; + + const parts = issueKey.split('||::||'); + let file_path = parts[0] || ''; + const part1 = parts[1] || ''; + const part2 = parts[2] || ''; + + if (file_path.endsWith('/security_issues.json')) { + file_path = file_path.replace('/security_issues.json', ''); + } + + dismissedAlerts.push({ + file_path, + context_code_block: part1, + test_id: part2, + line_number: parseInt(part1, 10) || 0, + type: part2, + issue_key: issueKey, + reason_for_dismiss: dismissInfo.reason_for_dismiss || '', + comment_for_dismiss: dismissInfo.comment_for_dismiss || '', + }); + } + + return { success: true, dismissedAlerts }; + } catch (error) { + return { success: false, error: error.message || 'Failed to fetch dismissed alerts' }; + } +} diff --git a/src/scans/fetchScanResults.js b/src/scans/fetchScanResults.js new file mode 100644 index 0000000..cc6dd1b --- /dev/null +++ b/src/scans/fetchScanResults.js @@ -0,0 +1,115 @@ +import { fetchApi } from '../utils/fetchApi.js'; + +export const VALID_RESULT_TYPES = { + SECURITY_ISSUES: 'security_issues', + ANTI_PATTERNS: 'anti_patterns', + DOCSTRING: 'docstring', + COMPLEX_FUNCTIONS: 'complex_functions', +}; + +const RESULT_FILE_SUFFIXES = { + security_issues: '/security_issues.json', + anti_patterns: '/anti_patterns.json', + docstring: '/docstring.json', + complex_functions: '/complex_functions.json', +}; + +/** + * Strip org/repo/commitId prefix from a full server-side path. + * + * @param {string} fullPath + * @param {string} commitId + * @param {string|null} suffix - e.g. '/security_issues.json' + */ +function extractCleanPath(fullPath, commitId, suffix = null) { + let p = fullPath; + + if (suffix && p.endsWith(suffix)) { + p = p.slice(0, -suffix.length); + } + + if (commitId && p.includes(commitId)) { + const idx = p.indexOf(commitId); + return p.substring(idx + commitId.length + 1); + } + + const match = p.match(/\/([a-f0-9]{40})\//i); + if (match) { + const idx = p.indexOf(match[1]); + return p.substring(idx + match[1].length + 1); + } + + const parts = p.split('/'); + return parts.length > 3 ? parts.slice(3).join('/') : p; +} + +/** + * Fetch scan results (SAST, anti-patterns, docstring, complex-functions). + * + * @param {string} repo - "org/repo-name" + * @param {string} commitId - 40-char commit SHA + * @param {string} resultType - one of VALID_RESULT_TYPES values + * @returns {Promise<{ success: boolean, issues?: Array, status?: string, error?: string }>} + */ +export async function fetchScanResults(repo, commitId, resultType) { + if (!Object.values(VALID_RESULT_TYPES).includes(resultType)) { + return { + success: false, + error: `Invalid result_type. Must be one of: ${Object.values(VALID_RESULT_TYPES).join(', ')}`, + }; + } + + try { + const response = await fetchApi('/extension/scans2/fetch-results', 'POST', { + repo, + commit_id: commitId, + result_type: resultType, + }); + + if (!response) { + return { success: false, error: 'Failed to connect to CodeAnt server' }; + } + + if (response.status === 'error') { + return { success: false, error: response.message || `Failed to fetch ${resultType} results` }; + } + + const fileSuffix = RESULT_FILE_SUFFIXES[resultType] || ''; + const resultsData = response.results || response; + const issues = []; + + if (Array.isArray(resultsData)) { + issues.push(...resultsData); + } else if (resultsData && typeof resultsData === 'object') { + for (const [fullPath, fileIssues] of Object.entries(resultsData)) { + if (!Array.isArray(fileIssues) || fileIssues.length === 0) continue; + + const cleanPath = extractCleanPath(fullPath, commitId, fileSuffix); + for (const issue of fileIssues) { + issues.push({ + ...issue, + file_path: cleanPath, + file_line_range: [issue.line_number || issue.start_line || issue.line || 1], + check_name: issue.issue_text || issue.message || issue.description || issue.name, + }); + } + } + } + + return { success: true, issues, status: response.status || 'done' }; + } catch (error) { + return { success: false, error: error.message || `Failed to fetch ${resultType} results` }; + } +} + +export const fetchSastResults = (repo, commitId) => + fetchScanResults(repo, commitId, VALID_RESULT_TYPES.SECURITY_ISSUES); + +export const fetchAntiPatternsResults = (repo, commitId) => + fetchScanResults(repo, commitId, VALID_RESULT_TYPES.ANTI_PATTERNS); + +export const fetchDocstringResults = (repo, commitId) => + fetchScanResults(repo, commitId, VALID_RESULT_TYPES.DOCSTRING); + +export const fetchComplexFunctionsResults = (repo, commitId) => + fetchScanResults(repo, commitId, VALID_RESULT_TYPES.COMPLEX_FUNCTIONS); diff --git a/src/scans/getScanHistory.js b/src/scans/getScanHistory.js new file mode 100644 index 0000000..396df79 --- /dev/null +++ b/src/scans/getScanHistory.js @@ -0,0 +1,38 @@ +import { fetchApi } from '../utils/fetchApi.js'; + +/** + * Fetch scan history for a repository. + * + * @param {string} repo - Repository full name (e.g. "org/repo-name") + * @returns {Promise} + * { + * success: true, + * repo: "org/repo-name", + * scanHistory: [...] + * } + */ +export async function getScanHistory(repo) { + try { + const response = await fetchApi('/extension/scans2/get-scan-history', 'POST', { repo }); + + if (!response) { + return { success: false, error: 'Failed to connect to CodeAnt server' }; + } + + if (response.last_analysis_results !== undefined) { + return { + success: true, + repo: response.repo, + scanHistory: response.last_analysis_results || [], + }; + } + + if (response.status === 'error') { + return { success: false, error: response.message || 'Failed to fetch scan history' }; + } + + return { success: false, error: 'Unexpected response from server' }; + } catch (error) { + return { success: false, error: error.message || 'Failed to fetch scan history' }; + } +} diff --git a/src/scans/listRepos.js b/src/scans/listRepos.js new file mode 100644 index 0000000..11d4533 --- /dev/null +++ b/src/scans/listRepos.js @@ -0,0 +1,38 @@ +import { fetchApi } from '../utils/fetchApi.js'; + +/** + * Fetch repositories for the authenticated user from the CodeAnt backend. + * + * @param {string} organizationName - The organization name to filter repositories by. + * @returns {Promise} + * { + * success: true, + * repos: [{ name, full_name, pushed_at, ... }] + * } + */ +export async function listRepos(organizationName) { + try { + const response = await fetchApi('/extension/scans2/listrepos', 'POST', { + org: organizationName, + }); + + if (!response) { + return { success: false, error: 'Failed to connect to CodeAnt server' }; + } + + if (response.repos) { + const sortedRepos = (response.repos || []).sort( + (a, b) => new Date(b.pushed_at) - new Date(a.pushed_at) + ); + return { success: true, repos: sortedRepos }; + } + + if (response.status === 'error') { + return { success: false, error: response.message || 'Failed to fetch repositories' }; + } + + return { success: false, error: 'Unexpected response from server' }; + } catch (error) { + return { success: false, error: error.message || 'Failed to fetch repositories' }; + } +}