core.Mmio: Respect field access type #1346
Workflow file for this run
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| name: Code Linting | |
| on: | |
| pull_request_target: | |
| branches: [main] | |
| workflow_dispatch: | |
| jobs: | |
| lint: | |
| runs-on: ubuntu-latest | |
| permissions: | |
| contents: read | |
| pull-requests: write | |
| steps: | |
| # SECURITY: Checkout base branch for linter binary | |
| - name: Checkout base branch | |
| uses: actions/checkout@v3 | |
| with: | |
| ref: ${{ github.base_ref }} | |
| path: base | |
| - name: Setup Zig | |
| uses: mlugg/setup-zig@v2 | |
| with: | |
| version: 0.15.1 | |
| # Build linter from trusted base branch code | |
| - name: Build linter | |
| working-directory: base/tools/linter | |
| run: zig build --release=safe | |
| # Now checkout PR code to analyze (but not execute) | |
| - name: Checkout PR code | |
| uses: actions/checkout@v3 | |
| with: | |
| ref: ${{ github.event.pull_request.head.sha }} | |
| path: pr | |
| fetch-depth: 0 | |
| - name: Run linter | |
| working-directory: pr | |
| run: | | |
| echo "Base SHA: ${{ github.event.pull_request.base.sha }}" | |
| echo "Head SHA: ${{ github.event.pull_request.head.sha }}" | |
| # Get changed .zig files | |
| FILES=$(git diff --name-only --diff-filter=d ${{ github.event.pull_request.base.sha }} ${{ github.event.pull_request.head.sha }} | grep '\.zig$' || true) | |
| echo "Changed files: $FILES" | |
| if [ -n "$FILES" ]; then | |
| echo "$FILES" | xargs ../base/tools/linter/zig-out/bin/linter > lint_results_raw.json | |
| else | |
| echo "[]" > lint_results_raw.json | |
| fi | |
| # Debug output | |
| echo "Raw lint results:" | |
| cat lint_results_raw.json | |
| - name: Filter lint results to changed lines | |
| uses: actions/github-script@v7 | |
| with: | |
| script: | | |
| const fs = require('fs'); | |
| const { execSync } = require('child_process'); | |
| // Change to pr directory | |
| process.chdir('pr'); | |
| if (!fs.existsSync('lint_results_raw.json')) { | |
| fs.writeFileSync('lint_results.json', '[]'); | |
| console.log('No raw lint results found'); | |
| return; | |
| } | |
| const rawIssues = JSON.parse(fs.readFileSync('lint_results_raw.json', 'utf8')); | |
| if (rawIssues.length === 0) { | |
| fs.writeFileSync('lint_results.json', '[]'); | |
| console.log('No lint issues found'); | |
| return; | |
| } | |
| console.log(`Found ${rawIssues.length} total lint issue(s)`); | |
| const baseSha = '${{ github.event.pull_request.base.sha }}'; | |
| const headSha = '${{ github.event.pull_request.head.sha }}'; | |
| // Get the list of files actually changed in this PR | |
| let changedFiles; | |
| try { | |
| const changedFilesOutput = execSync( | |
| `git diff --name-only --diff-filter=d ${baseSha} ${headSha}`, | |
| { encoding: 'utf8' } | |
| ); | |
| changedFiles = new Set(changedFilesOutput.trim().split('\n').filter(f => f)); | |
| console.log(`PR contains ${changedFiles.size} changed file(s)`); | |
| } catch (error) { | |
| console.log(`Could not get changed files: ${error.message}`); | |
| changedFiles = new Set(); | |
| } | |
| // Parse diff to get added/modified line numbers per file | |
| // We parse the actual diff content to find lines starting with '+' (additions) | |
| const changedLines = {}; | |
| for (const file of changedFiles) { | |
| if (!file.endsWith('.zig')) continue; | |
| try { | |
| // Get the diff with context to properly track line numbers | |
| const diff = execSync( | |
| `git diff --unified=0 ${baseSha} ${headSha} -- "${file}"`, | |
| { encoding: 'utf8' } | |
| ); | |
| const lines = new Set(); | |
| const diffLines = diff.split('\n'); | |
| let currentNewLine = 0; | |
| for (const line of diffLines) { | |
| // Match the @@ -oldStart,oldCount +newStart,newCount @@ format | |
| const hunkMatch = line.match(/^@@ -\d+(?:,\d+)? \+(\d+)(?:,(\d+))? @@/); | |
| if (hunkMatch) { | |
| currentNewLine = parseInt(hunkMatch[1]); | |
| const count = hunkMatch[2] !== undefined ? parseInt(hunkMatch[2]) : 1; | |
| // With --unified=0, the count represents actual added lines | |
| // Add all lines in this range as they are additions | |
| for (let i = 0; i < count; i++) { | |
| lines.add(currentNewLine + i); | |
| } | |
| continue; | |
| } | |
| } | |
| changedLines[file] = lines; | |
| if (lines.size > 0) { | |
| console.log(`File ${file}: ${lines.size} added/modified line(s): ${[...lines].slice(0, 10).join(', ')}${lines.size > 10 ? '...' : ''}`); | |
| } | |
| } catch (error) { | |
| console.log(`Could not get diff for ${file}: ${error.message}`); | |
| changedLines[file] = new Set(); | |
| } | |
| } | |
| // Filter issues to only those: | |
| // 1. In files that are part of this PR | |
| // 2. On lines that were actually added/modified | |
| const filteredIssues = rawIssues.filter(issue => { | |
| // First check: is this file even in the PR? | |
| if (!changedFiles.has(issue.file)) { | |
| console.log(`Filtered out: ${issue.file}:${issue.line} (file not in PR)`); | |
| return false; | |
| } | |
| // Second check: is this line actually changed? | |
| const fileLines = changedLines[issue.file] || new Set(); | |
| if (!fileLines.has(issue.line)) { | |
| console.log(`Filtered out: ${issue.file}:${issue.line} (line not changed)`); | |
| return false; | |
| } | |
| return true; | |
| }); | |
| // Save filtered issues | |
| fs.writeFileSync('lint_results.json', JSON.stringify(filteredIssues, null, 2)); | |
| const filtered = rawIssues.length - filteredIssues.length; | |
| if (filtered > 0) { | |
| console.log(`Filtered out ${filtered} issue(s) not on changed lines`); | |
| } | |
| console.log(`Kept ${filteredIssues.length} issue(s) on changed lines for inline comments`); | |
| - name: Post review with grouped comments | |
| uses: actions/github-script@v7 | |
| with: | |
| github-token: ${{ secrets.GITHUB_TOKEN }} | |
| script: | | |
| const fs = require('fs'); | |
| const crypto = require('crypto'); | |
| // Change to pr directory | |
| process.chdir('pr'); | |
| // Helper function to delete all existing bot review comments | |
| async function deleteExistingBotComments() { | |
| try { | |
| // List all review comments on this PR | |
| const comments = await github.paginate( | |
| github.rest.pulls.listReviewComments, | |
| { | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| pull_number: context.issue.number, | |
| per_page: 100 | |
| } | |
| ); | |
| // Filter to only bot comments with our marker | |
| const botComments = comments.filter(c => | |
| c.user.login === 'github-actions[bot]' | |
| ); | |
| console.log(`Found ${botComments.length} existing bot review comment(s) to delete`); | |
| // Delete each bot comment | |
| for (const comment of botComments) { | |
| try { | |
| await github.rest.pulls.deleteReviewComment({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| comment_id: comment.id | |
| }); | |
| console.log(`Deleted comment ${comment.id}`); | |
| } catch (error) { | |
| console.log(`Could not delete comment ${comment.id}: ${error.message}`); | |
| } | |
| } | |
| } catch (error) { | |
| console.log(`Error deleting bot comments: ${error.message}`); | |
| } | |
| } | |
| // Helper function to find and update or delete bot issue comment | |
| async function findBotIssueComment() { | |
| try { | |
| const comments = await github.paginate( | |
| github.rest.issues.listComments, | |
| { | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| issue_number: context.issue.number, | |
| per_page: 100 | |
| } | |
| ); | |
| return comments.find(c => | |
| c.user.login === 'github-actions[bot]' && | |
| c.body && c.body.includes('<!-- lint-review -->') | |
| ); | |
| } catch (error) { | |
| console.log(`Error finding bot comment: ${error.message}`); | |
| return null; | |
| } | |
| } | |
| if (!fs.existsSync('lint_results.json')) { | |
| console.log('No lint results file found'); | |
| return; | |
| } | |
| const content = fs.readFileSync('lint_results.json', 'utf8').trim(); | |
| // Check if there are filtered issues | |
| const hasFilteredIssues = fs.existsSync('filtered_issues.txt'); | |
| const filteredContent = hasFilteredIssues ? fs.readFileSync('filtered_issues.txt', 'utf8') : ''; | |
| const issues = content && content !== '[]' ? JSON.parse(content) : []; | |
| // Create hash of current issues to detect changes | |
| const issuesHash = crypto.createHash('md5') | |
| .update(JSON.stringify(issues.map(i => `${i.file}:${i.line}:${i.message}`)) + filteredContent) | |
| .digest('hex') | |
| .substring(0, 8); | |
| // Find existing bot comment | |
| const existingComment = await findBotIssueComment(); | |
| // Check if the existing comment already has the same hash | |
| if (existingComment && existingComment.body.includes(`<!-- lint-hash:${issuesHash} -->`)) { | |
| console.log('Comment already exists with same issues, skipping'); | |
| return; | |
| } | |
| // Delete all existing bot review comments (inline comments) | |
| await deleteExistingBotComments(); | |
| // If no issues, delete the summary comment and return | |
| if (issues.length === 0 && !hasFilteredIssues) { | |
| console.log('No lint issues found'); | |
| if (existingComment) { | |
| try { | |
| await github.rest.issues.deleteComment({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| comment_id: existingComment.id | |
| }); | |
| console.log('Deleted existing lint comment (no issues remaining)'); | |
| } catch (error) { | |
| console.log(`Could not delete comment: ${error.message}`); | |
| } | |
| } | |
| return; | |
| } | |
| // Prepare inline review comments | |
| const reviewComments = []; | |
| const issuesByFile = {}; | |
| for (const issue of issues) { | |
| if (!issuesByFile[issue.file]) { | |
| issuesByFile[issue.file] = []; | |
| } | |
| issuesByFile[issue.file].push(issue); | |
| reviewComments.push({ | |
| path: issue.file, | |
| line: issue.line, | |
| side: 'RIGHT', | |
| body: issue.message | |
| }); | |
| } | |
| // Create summary comment body | |
| const totalIssues = issues.length; | |
| const fileCount = Object.keys(issuesByFile).length; | |
| let commentBody = `## Lint Results\n\n`; | |
| if (totalIssues > 0) { | |
| commentBody += `Found **${totalIssues}** issue${totalIssues !== 1 ? 's' : ''} on changed lines in **${fileCount}** file${fileCount !== 1 ? 's' : ''}:\n\n`; | |
| for (const [file, fileIssues] of Object.entries(issuesByFile)) { | |
| commentBody += `- **${file}**: ${fileIssues.length} issue${fileIssues.length !== 1 ? 's' : ''}\n`; | |
| } | |
| } else { | |
| commentBody += `No issues on changed lines.\n`; | |
| } | |
| commentBody += `\n<!-- lint-review -->\n<!-- lint-hash:${issuesHash} -->`; | |
| // Update or create the summary comment | |
| try { | |
| if (existingComment) { | |
| await github.rest.issues.updateComment({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| comment_id: existingComment.id, | |
| body: commentBody | |
| }); | |
| console.log('Updated existing lint summary comment'); | |
| } else { | |
| await github.rest.issues.createComment({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| issue_number: context.issue.number, | |
| body: commentBody | |
| }); | |
| console.log('Created new lint summary comment'); | |
| } | |
| } catch (error) { | |
| console.error(`Failed to update/create summary comment: ${error.message}`); | |
| } | |
| // Create inline review comments if there are issues | |
| if (reviewComments.length > 0) { | |
| try { | |
| const review = await github.rest.pulls.createReview({ | |
| owner: context.repo.owner, | |
| repo: context.repo.repo, | |
| pull_number: context.issue.number, | |
| commit_id: context.payload.pull_request.head.sha, | |
| event: 'COMMENT', | |
| comments: reviewComments | |
| }); | |
| console.log(`Created review with ${reviewComments.length} inline comment(s)`); | |
| } catch (error) { | |
| console.error(`Failed to create inline comments: ${error.message}`); | |
| } | |
| } |