Compare commits

..

26 Commits

Author SHA1 Message Date
opencode-agent[bot]
57d5c095d8 chore: generate 2026-05-03 15:22:38 +00:00
Kit Langton
13ac849db5 refactor(config+core): drop ConfigPaths.readFile, add AppFileSystem.readFileStringSafe, flatten TuiConfig.loadState (#25602) 2026-05-03 15:21:34 +00:00
Shoubhit Dash
8694c5b68f fix(auth): respect server username in clients (#25596) 2026-05-03 19:28:31 +05:30
Shoubhit Dash
0a7d02c87c feat: group changelog bugfixes (#25597) 2026-05-03 13:48:26 +00:00
Brendan Allan
e77867ef05 ci: only build electron desktop (#19067) 2026-05-03 19:10:15 +05:30
opencode-agent[bot]
fb224d8974 chore: generate 2026-05-03 13:21:15 +00:00
OpeOginni
101566131d fix(httpapi): add basic auth challenge for browser login
Adds a WWW-Authenticate challenge for unauthorized experimental HttpApi UI fallback responses so browsers open the Basic Auth prompt when a server password is configured.
2026-05-03 13:20:05 +00:00
opencode-agent[bot]
8433e8b433 chore: generate 2026-05-03 13:18:13 +00:00
Kit Langton
379600b5ab fix(sdk+cli): surface real errors instead of bare {} when server returns empty body (#25592) 2026-05-03 13:17:06 +00:00
Shoubhit Dash
7a503de606 fix(acp): pass server auth to internal client (#25591) 2026-05-03 18:42:24 +05:30
Kit Langton
2ad1eb56d3 feat(server): native HttpApi listener with Bun.serve + WS upgrade (#25547) 2026-05-03 09:09:45 -04:00
opencode-agent[bot]
a43f767abb chore: generate 2026-05-03 13:07:30 +00:00
Kit Langton
0ee3b87289 feat(server): Server.openapi() backed by HttpApi spec, parity-checked against Hono output (#25545) 2026-05-03 09:06:23 -04:00
opencode-agent[bot]
3c9f3c5786 chore: generate 2026-05-03 12:59:40 +00:00
Kit Langton
ca75ac6681 refactor(server): extract Hono-coupled utilities to backend-neutral modules (#25542) 2026-05-03 12:58:34 +00:00
Shoubhit Dash
d1f597b5b5 fix(vcs): avoid unbounded diff memory usage (#25581) 2026-05-03 17:49:46 +05:30
Dax Raad
8299fb3e2b ignore: remove triage-unassigned.ts script
This script was used to batch-triage open GitHub issues without assignees.
Removing as the triage workflow has evolved and this batch approach is no longer needed.
2026-05-03 01:59:03 -04:00
Dax Raad
4f7f90133d ci: stop sending daily community recap notifications 2026-05-03 01:54:32 -04:00
Dax Raad
b205e104f6 ci: remove vouch-based contributor filtering workflows
Removes the automated vouch system that filtered issues and PRs from non-vouched users. This simplifies the contribution process by removing the requirement for maintainers to manually vouch contributors before they can participate.
2026-05-03 01:54:32 -04:00
Dax Raad
252e2f98e6 ci: remove automatic labels from GitHub issue templates to allow manual triage 2026-05-03 01:54:32 -04:00
opencode-agent[bot]
e2afdc1202 chore: generate 2026-05-03 05:22:22 +00:00
Dax Raad
a08e4c9651 core: simplify triage workflow to focus on issue ownership
Switch triage agent to gpt-5.4-nano for faster issue assignment. Remove label
management from the triage tool so it only assigns owners based on team
ownership rules. This reduces noise in the issue tracker and ensures issues
get to the right team member immediately without unnecessary labels.

Update team structures to reflect current ownership and add script for
processing unassigned issues.
2026-05-03 01:21:17 -04:00
Dax Raad
7ccab8d272 core: update triage agent to use qwen3.6-plus model for improved response quality 2026-05-03 01:10:14 -04:00
Dax Raad
fc57eb3b8e ci 2026-05-03 01:05:36 -04:00
Dax
9179bafd54 Add debug info command (#25550) 2026-05-03 05:04:52 +00:00
Kit Langton
2df8eda8a3 fix(cli): bridge Instance.current ALS in effectCmd handlers (regression from #25522) (#25546) 2026-05-03 04:24:33 +00:00
66 changed files with 13266 additions and 11652 deletions

View File

@@ -1,6 +1,5 @@
name: Bug report
description: Report an issue that should be fixed
labels: ["bug"]
body:
- type: textarea
id: description

View File

@@ -1,6 +1,5 @@
name: 🚀 Feature Request
description: Suggest an idea, feature, or enhancement
labels: [discussion]
title: "[FEATURE]:"
body:

View File

@@ -1,6 +1,5 @@
name: Question
description: Ask a question
labels: ["question"]
body:
- type: textarea
id: question

View File

@@ -11,6 +11,5 @@ MrMushrooooom
nexxeln
R44VC0RP
rekram1-node
RhysSullivan
thdxr
simonklee

41
.github/VOUCHED.td vendored
View File

@@ -1,41 +0,0 @@
# Vouched contributors for this project.
#
# See https://github.com/mitchellh/vouch for details.
#
# Syntax:
# - One handle per line (without @), sorted alphabetically.
# - Optional platform prefix: platform:username (e.g., github:user).
# - Denounce with minus prefix: -username or -platform:username.
# - Optional details after a space following the handle.
adamdotdevin
-agusbasari29 AI PR slop
ariane-emory
-atharvau AI review spamming literally every PR
-borealbytes
-carycooper777
-danieljoshuanazareth
-danieljoshuanazareth
-davidbernat looks to be a clawdbot that spams team and sends super weird emails, doesnt appear to be a real person
dmtrkovalenko
edemaine
fahreddinozcan
-florianleibert
fwang
iamdavidhill
jayair
kitlangton
kommander
-opencode2026
-opencodeengineer bot that spams issues
r44vc0rp
rekram1-node
-ricardo-m-l
-robinmordasiewicz
rubdos
-saisharan0103 spamming ai prs
shantur
simonklee
-spider-yamet clawdbot/llm psychosis, spam pinging the team
-terisuke
thdxr
-toastythebot

View File

@@ -1,170 +0,0 @@
name: daily-issues-recap
on:
schedule:
# Run at 6 PM EST (23:00 UTC, or 22:00 UTC during daylight saving)
- cron: "0 23 * * *"
workflow_dispatch: # Allow manual trigger for testing
jobs:
daily-recap:
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: read
issues: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- uses: ./.github/actions/setup-bun
- name: Install opencode
run: curl -fsSL https://opencode.ai/install | bash
- name: Generate daily issues recap
id: recap
env:
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENCODE_PERMISSION: |
{
"bash": {
"*": "deny",
"gh issue*": "allow",
"gh search*": "allow"
},
"webfetch": "deny",
"edit": "deny",
"write": "deny"
}
run: |
# Get today's date range
TODAY=$(date -u +%Y-%m-%d)
opencode run -m opencode/claude-sonnet-4-5 "Generate a daily issues recap for the OpenCode repository.
TODAY'S DATE: ${TODAY}
STEP 1: Gather today's issues
Search for all OPEN issues created today (${TODAY}) using:
gh issue list --repo ${{ github.repository }} --state open --search \"created:${TODAY}\" --json number,title,body,labels,state,comments,createdAt,author --limit 500
IMPORTANT: EXCLUDE all issues authored by Anomaly team members. Filter out issues where the author login matches ANY of these:
adamdotdevin, Brendonovich, fwang, Hona, iamdavidhill, jayair, kitlangton, kommander, MrMushrooooom, R44VC0RP, rekram1-node, thdxr
This recap is specifically for COMMUNITY (external) issues only.
STEP 2: Analyze and categorize
For each issue created today, categorize it:
**Severity Assessment:**
- CRITICAL: Crashes, data loss, security issues, blocks major functionality
- HIGH: Significant bugs affecting many users, important features broken
- MEDIUM: Bugs with workarounds, minor features broken
- LOW: Minor issues, cosmetic, nice-to-haves
**Activity Assessment:**
- Note issues with high comment counts or engagement
- Note issues from repeat reporters (check if author has filed before)
STEP 3: Cross-reference with existing issues
For issues that seem like feature requests or recurring bugs:
- Search for similar older issues to identify patterns
- Note if this is a frequently requested feature
- Identify any issues that are duplicates of long-standing requests
STEP 4: Generate the recap
Create a structured recap with these sections:
===DISCORD_START===
**Daily Issues Recap - ${TODAY}**
**Summary Stats**
- Total issues opened today: [count]
- By category: [bugs/features/questions]
**Critical/High Priority Issues**
[List any CRITICAL or HIGH severity issues with brief descriptions and issue numbers]
**Most Active/Discussed**
[Issues with significant engagement or from active community members]
**Trending Topics**
[Patterns noticed - e.g., 'Multiple reports about X', 'Continued interest in Y feature']
**Duplicates & Related**
[Issues that relate to existing open issues]
===DISCORD_END===
STEP 5: Format for Discord
Format the recap as a Discord-compatible message:
- Use Discord markdown (**, __, etc.)
- BE EXTREMELY CONCISE - this is an EOD summary, not a detailed report
- Use hyperlinked issue numbers with suppressed embeds: [#1234](<https://github.com/${{ github.repository }}/issues/1234>)
- Group related issues on single lines where possible
- Add emoji sparingly for critical items only
- HARD LIMIT: Keep under 1800 characters total
- Skip sections that have nothing notable (e.g., if no critical issues, omit that section)
- Prioritize signal over completeness - only surface what matters
OUTPUT: Output ONLY the content between ===DISCORD_START=== and ===DISCORD_END=== markers. Include the markers so I can extract it." > /tmp/recap_raw.txt
# Extract only the Discord message between markers
sed -n '/===DISCORD_START===/,/===DISCORD_END===/p' /tmp/recap_raw.txt | grep -v '===DISCORD' > /tmp/recap.txt
echo "recap_file=/tmp/recap.txt" >> $GITHUB_OUTPUT
- name: Post to Discord
env:
DISCORD_WEBHOOK_URL: ${{ secrets.DISCORD_ISSUES_WEBHOOK_URL }}
run: |
if [ -z "$DISCORD_WEBHOOK_URL" ]; then
echo "Warning: DISCORD_ISSUES_WEBHOOK_URL secret not set, skipping Discord post"
cat /tmp/recap.txt
exit 0
fi
# Read the recap
RECAP_RAW=$(cat /tmp/recap.txt)
RECAP_LENGTH=${#RECAP_RAW}
echo "Recap length: ${RECAP_LENGTH} chars"
# Function to post a message to Discord
post_to_discord() {
local msg="$1"
local content=$(echo "$msg" | jq -Rs '.')
curl -s -H "Content-Type: application/json" \
-X POST \
-d "{\"content\": ${content}}" \
"$DISCORD_WEBHOOK_URL"
sleep 1
}
# If under limit, send as single message
if [ "$RECAP_LENGTH" -le 1950 ]; then
post_to_discord "$RECAP_RAW"
else
echo "Splitting into multiple messages..."
remaining="$RECAP_RAW"
while [ ${#remaining} -gt 0 ]; do
if [ ${#remaining} -le 1950 ]; then
post_to_discord "$remaining"
break
else
chunk="${remaining:0:1900}"
last_newline=$(echo "$chunk" | grep -bo $'\n' | tail -1 | cut -d: -f1)
if [ -n "$last_newline" ] && [ "$last_newline" -gt 500 ]; then
chunk="${remaining:0:$last_newline}"
remaining="${remaining:$((last_newline+1))}"
else
chunk="${remaining:0:1900}"
remaining="${remaining:1900}"
fi
post_to_discord "$chunk"
fi
done
fi
echo "Posted daily recap to Discord"

View File

@@ -1,173 +0,0 @@
name: daily-pr-recap
on:
schedule:
# Run at 5pm EST (22:00 UTC, or 21:00 UTC during daylight saving)
- cron: "0 22 * * *"
workflow_dispatch: # Allow manual trigger for testing
jobs:
pr-recap:
runs-on: blacksmith-4vcpu-ubuntu-2404
permissions:
contents: read
pull-requests: read
steps:
- name: Checkout repository
uses: actions/checkout@v4
with:
fetch-depth: 1
- uses: ./.github/actions/setup-bun
- name: Install opencode
run: curl -fsSL https://opencode.ai/install | bash
- name: Generate daily PR recap
id: recap
env:
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
OPENCODE_PERMISSION: |
{
"bash": {
"*": "deny",
"gh pr*": "allow",
"gh search*": "allow"
},
"webfetch": "deny",
"edit": "deny",
"write": "deny"
}
run: |
TODAY=$(date -u +%Y-%m-%d)
opencode run -m opencode/claude-sonnet-4-5 "Generate a daily PR activity recap for the OpenCode repository.
TODAY'S DATE: ${TODAY}
STEP 1: Gather PR data
Run these commands to gather PR information. ONLY include OPEN PRs created or updated TODAY (${TODAY}):
# Open PRs created today
gh pr list --repo ${{ github.repository }} --state open --search \"created:${TODAY}\" --json number,title,author,labels,createdAt,updatedAt,reviewDecision,isDraft,additions,deletions --limit 100
# Open PRs with activity today (updated today)
gh pr list --repo ${{ github.repository }} --state open --search \"updated:${TODAY}\" --json number,title,author,labels,createdAt,updatedAt,reviewDecision,isDraft,additions,deletions --limit 100
IMPORTANT: EXCLUDE all PRs authored by Anomaly team members. Filter out PRs where the author login matches ANY of these:
adamdotdevin, Brendonovich, fwang, Hona, iamdavidhill, jayair, kitlangton, kommander, MrMushrooooom, R44VC0RP, rekram1-node, thdxr
This recap is specifically for COMMUNITY (external) contributions only.
STEP 2: For high-activity PRs, check comment counts
For promising PRs, run:
gh pr view [NUMBER] --repo ${{ github.repository }} --json comments --jq '[.comments[] | select(.author.login != \"copilot-pull-request-reviewer\" and .author.login != \"github-actions\")] | length'
IMPORTANT: When counting comments/activity, EXCLUDE these bot accounts:
- copilot-pull-request-reviewer
- github-actions
STEP 3: Identify what matters (ONLY from today's PRs)
**Bug Fixes From Today:**
- PRs with 'fix' or 'bug' in title created/updated today
- Small bug fixes (< 100 lines changed) that are easy to review
- Bug fixes from community contributors
**High Activity Today:**
- PRs with significant human comments today (excluding bots listed above)
- PRs with back-and-forth discussion today
**Quick Wins:**
- Small PRs (< 50 lines) that are approved or nearly approved
- PRs that just need a final review
STEP 4: Generate the recap
Create a structured recap:
===DISCORD_START===
**Daily PR Recap - ${TODAY}**
**New PRs Today**
[PRs opened today - group by type: bug fixes, features, etc.]
**Active PRs Today**
[PRs with activity/updates today - significant discussion]
**Quick Wins**
[Small PRs ready to merge]
===DISCORD_END===
STEP 5: Format for Discord
- Use Discord markdown (**, __, etc.)
- BE EXTREMELY CONCISE - surface what we might miss
- Use hyperlinked PR numbers with suppressed embeds: [#1234](<https://github.com/${{ github.repository }}/pull/1234>)
- Include PR author: [#1234](<url>) (@author)
- For bug fixes, add brief description of what it fixes
- Show line count for quick wins: \"(+15/-3 lines)\"
- HARD LIMIT: Keep under 1800 characters total
- Skip empty sections
- Focus on PRs that need human eyes
OUTPUT: Output ONLY the content between ===DISCORD_START=== and ===DISCORD_END=== markers. Include the markers so I can extract it." > /tmp/pr_recap_raw.txt
# Extract only the Discord message between markers
sed -n '/===DISCORD_START===/,/===DISCORD_END===/p' /tmp/pr_recap_raw.txt | grep -v '===DISCORD' > /tmp/pr_recap.txt
echo "recap_file=/tmp/pr_recap.txt" >> $GITHUB_OUTPUT
- name: Post to Discord
env:
DISCORD_WEBHOOK_URL: ${{ secrets.DISCORD_ISSUES_WEBHOOK_URL }}
run: |
if [ -z "$DISCORD_WEBHOOK_URL" ]; then
echo "Warning: DISCORD_ISSUES_WEBHOOK_URL secret not set, skipping Discord post"
cat /tmp/pr_recap.txt
exit 0
fi
# Read the recap
RECAP_RAW=$(cat /tmp/pr_recap.txt)
RECAP_LENGTH=${#RECAP_RAW}
echo "Recap length: ${RECAP_LENGTH} chars"
# Function to post a message to Discord
post_to_discord() {
local msg="$1"
local content=$(echo "$msg" | jq -Rs '.')
curl -s -H "Content-Type: application/json" \
-X POST \
-d "{\"content\": ${content}}" \
"$DISCORD_WEBHOOK_URL"
sleep 1
}
# If under limit, send as single message
if [ "$RECAP_LENGTH" -le 1950 ]; then
post_to_discord "$RECAP_RAW"
else
echo "Splitting into multiple messages..."
remaining="$RECAP_RAW"
while [ ${#remaining} -gt 0 ]; do
if [ ${#remaining} -le 1950 ]; then
post_to_discord "$remaining"
break
else
chunk="${remaining:0:1900}"
last_newline=$(echo "$chunk" | grep -bo $'\n' | tail -1 | cut -d: -f1)
if [ -n "$last_newline" ] && [ "$last_newline" -gt 500 ]; then
chunk="${remaining:0:$last_newline}"
remaining="${remaining:$((last_newline+1))}"
else
chunk="${remaining:0:1900}"
remaining="${remaining:1900}"
fi
post_to_discord "$chunk"
fi
done
fi
echo "Posted daily PR recap to Discord"

View File

@@ -209,182 +209,6 @@ jobs:
packages/opencode/dist/opencode-windows-x64
packages/opencode/dist/opencode-windows-x64-baseline
build-tauri:
needs:
- build-cli
- version
continue-on-error: false
env:
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
AZURE_TRUSTED_SIGNING_ACCOUNT_NAME: ${{ secrets.AZURE_TRUSTED_SIGNING_ACCOUNT_NAME }}
AZURE_TRUSTED_SIGNING_CERTIFICATE_PROFILE: ${{ secrets.AZURE_TRUSTED_SIGNING_CERTIFICATE_PROFILE }}
AZURE_TRUSTED_SIGNING_ENDPOINT: ${{ secrets.AZURE_TRUSTED_SIGNING_ENDPOINT }}
strategy:
fail-fast: false
matrix:
settings:
- host: macos-latest
target: x86_64-apple-darwin
- host: macos-latest
target: aarch64-apple-darwin
# github-hosted: blacksmith lacks ARM64 MSVC cross-compilation toolchain
- host: windows-2025
target: aarch64-pc-windows-msvc
- host: blacksmith-4vcpu-windows-2025
target: x86_64-pc-windows-msvc
- host: blacksmith-4vcpu-ubuntu-2404
target: x86_64-unknown-linux-gnu
- host: blacksmith-8vcpu-ubuntu-2404-arm
target: aarch64-unknown-linux-gnu
runs-on: ${{ matrix.settings.host }}
steps:
- uses: actions/checkout@v3
with:
fetch-tags: true
- uses: apple-actions/import-codesign-certs@v2
if: ${{ runner.os == 'macOS' }}
with:
keychain: build
p12-file-base64: ${{ secrets.APPLE_CERTIFICATE }}
p12-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
- name: Verify Certificate
if: ${{ runner.os == 'macOS' }}
run: |
CERT_INFO=$(security find-identity -v -p codesigning build.keychain | grep "Developer ID Application")
CERT_ID=$(echo "$CERT_INFO" | awk -F'"' '{print $2}')
echo "CERT_ID=$CERT_ID" >> $GITHUB_ENV
echo "Certificate imported."
- name: Setup Apple API Key
if: ${{ runner.os == 'macOS' }}
run: |
echo "${{ secrets.APPLE_API_KEY_PATH }}" > $RUNNER_TEMP/apple-api-key.p8
- uses: ./.github/actions/setup-bun
- name: Azure login
if: runner.os == 'Windows'
uses: azure/login@v2
with:
client-id: ${{ env.AZURE_CLIENT_ID }}
tenant-id: ${{ env.AZURE_TENANT_ID }}
subscription-id: ${{ env.AZURE_SUBSCRIPTION_ID }}
- uses: actions/setup-node@v4
with:
node-version: "24"
- name: Cache apt packages
if: contains(matrix.settings.host, 'ubuntu')
uses: actions/cache@v4
with:
path: ~/apt-cache
key: ${{ runner.os }}-${{ matrix.settings.target }}-apt-${{ hashFiles('.github/workflows/publish.yml') }}
restore-keys: |
${{ runner.os }}-${{ matrix.settings.target }}-apt-
- name: install dependencies (ubuntu only)
if: contains(matrix.settings.host, 'ubuntu')
run: |
mkdir -p ~/apt-cache && chmod -R a+rw ~/apt-cache
sudo apt-get update
sudo apt-get install -y --no-install-recommends -o dir::cache::archives="$HOME/apt-cache" libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
sudo chmod -R a+rw ~/apt-cache
- name: install Rust stable
uses: dtolnay/rust-toolchain@stable
with:
targets: ${{ matrix.settings.target }}
- uses: Swatinem/rust-cache@v2
with:
workspaces: packages/desktop/src-tauri
shared-key: ${{ matrix.settings.target }}
- name: Prepare
run: |
cd packages/desktop
bun ./scripts/prepare.ts
env:
OPENCODE_VERSION: ${{ needs.version.outputs.version }}
GITHUB_TOKEN: ${{ steps.committer.outputs.token }}
OPENCODE_CLI_ARTIFACT: ${{ (runner.os == 'Windows' && 'opencode-cli-windows') || 'opencode-cli' }}
RUST_TARGET: ${{ matrix.settings.target }}
GH_TOKEN: ${{ github.token }}
GITHUB_RUN_ID: ${{ github.run_id }}
- name: Resolve tauri portable SHA
if: contains(matrix.settings.host, 'ubuntu')
run: echo "TAURI_PORTABLE_SHA=$(git ls-remote https://github.com/tauri-apps/tauri.git refs/heads/feat/truly-portable-appimage | cut -f1)" >> "$GITHUB_ENV"
# Fixes AppImage build issues, can be removed when https://github.com/tauri-apps/tauri/pull/12491 is released
- name: Install tauri-cli from portable appimage branch
uses: taiki-e/cache-cargo-install-action@v3
if: contains(matrix.settings.host, 'ubuntu')
with:
tool: tauri-cli
git: https://github.com/tauri-apps/tauri
# branch: feat/truly-portable-appimage
rev: ${{ env.TAURI_PORTABLE_SHA }}
- name: Show tauri-cli version
if: contains(matrix.settings.host, 'ubuntu')
run: cargo tauri --version
- name: Setup git committer
id: committer
uses: ./.github/actions/setup-git-committer
with:
opencode-app-id: ${{ vars.OPENCODE_APP_ID }}
opencode-app-secret: ${{ secrets.OPENCODE_APP_SECRET }}
- name: Build and upload artifacts
uses: tauri-apps/tauri-action@390cbe447412ced1303d35abe75287949e43437a
timeout-minutes: 60
with:
projectPath: packages/desktop
uploadWorkflowArtifacts: true
tauriScript: ${{ (contains(matrix.settings.host, 'ubuntu') && 'cargo tauri') || '' }}
args: --target ${{ matrix.settings.target }} --config ${{ (github.ref_name == 'beta' && './src-tauri/tauri.beta.conf.json') || './src-tauri/tauri.prod.conf.json' }} --verbose
updaterJsonPreferNsis: true
releaseId: ${{ needs.version.outputs.release }}
tagName: ${{ needs.version.outputs.tag }}
releaseDraft: true
releaseAssetNamePattern: opencode-desktop-[platform]-[arch][ext]
repo: ${{ (github.ref_name == 'beta' && 'opencode-beta') || '' }}
releaseCommitish: ${{ github.sha }}
env:
GITHUB_TOKEN: ${{ steps.committer.outputs.token }}
TAURI_BUNDLER_NEW_APPIMAGE_FORMAT: true
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
APPLE_SIGNING_IDENTITY: ${{ env.CERT_ID }}
APPLE_API_ISSUER: ${{ secrets.APPLE_API_ISSUER }}
APPLE_API_KEY: ${{ secrets.APPLE_API_KEY }}
APPLE_API_KEY_PATH: ${{ runner.temp }}/apple-api-key.p8
- name: Verify signed Windows desktop artifacts
if: runner.os == 'Windows'
shell: pwsh
run: |
$files = @(
"${{ github.workspace }}\packages\desktop\src-tauri\sidecars\opencode-cli-${{ matrix.settings.target }}.exe"
)
$files += Get-ChildItem "${{ github.workspace }}\packages\desktop\src-tauri\target\${{ matrix.settings.target }}\release\bundle\nsis\*.exe" | Select-Object -ExpandProperty FullName
foreach ($file in $files) {
$sig = Get-AuthenticodeSignature $file
if ($sig.Status -ne "Valid") {
throw "Invalid signature for ${file}: $($sig.Status)"
}
}
build-electron:
needs:
- build-cli
@@ -524,6 +348,30 @@ jobs:
env:
OPENCODE_CHANNEL: ${{ (github.ref_name == 'beta' && 'beta') || 'prod' }}
- name: Create and upload macOS .app.tar.gz
if: runner.os == 'macOS' && needs.version.outputs.release
working-directory: packages/desktop-electron/dist
env:
GH_TOKEN: ${{ steps.committer.outputs.token }}
run: |
if [[ "${{ matrix.settings.target }}" == "x86_64-apple-darwin" ]]; then
APP_DIR="mac"
OUT_NAME="opencode-desktop-mac-x64.app.tar.gz"
elif [[ "${{ matrix.settings.target }}" == "aarch64-apple-darwin" ]]; then
APP_DIR="mac-arm64"
OUT_NAME="opencode-desktop-mac-arm64.app.tar.gz"
else
echo "Unknown macOS target: ${{ matrix.settings.target }}"
exit 1
fi
APP_PATH=$(find "$APP_DIR" -maxdepth 1 -name "*.app" -type d | head -1)
if [ -z "$APP_PATH" ]; then
echo "No .app bundle found in $APP_DIR"
exit 1
fi
tar -czf "$OUT_NAME" -C "$(dirname "$APP_PATH")" "$(basename "$APP_PATH")"
gh release upload "v${{ needs.version.outputs.version }}" "$OUT_NAME" --clobber --repo "${{ needs.version.outputs.repo }}"
- name: Verify signed Windows Electron artifacts
if: runner.os == 'Windows'
shell: pwsh
@@ -542,7 +390,7 @@ jobs:
- uses: actions/upload-artifact@v4
with:
name: opencode-electron-${{ matrix.settings.target }}
name: opencode-desktop-${{ matrix.settings.target }}
path: packages/desktop-electron/dist/*
- uses: actions/upload-artifact@v4
@@ -556,7 +404,6 @@ jobs:
- version
- build-cli
- sign-cli-windows
- build-tauri
- build-electron
if: always() && !failure() && !cancelled()
runs-on: blacksmith-4vcpu-ubuntu-2404
@@ -583,13 +430,6 @@ jobs:
node-version: "24"
registry-url: "https://registry.npmjs.org"
- name: Setup git committer
id: committer
uses: ./.github/actions/setup-git-committer
with:
opencode-app-id: ${{ vars.OPENCODE_APP_ID }}
opencode-app-secret: ${{ secrets.OPENCODE_APP_SECRET }}
- uses: actions/download-artifact@v4
with:
name: opencode-cli
@@ -611,6 +451,13 @@ jobs:
pattern: latest-yml-*
path: /tmp/latest-yml
- name: Setup git committer
id: committer
uses: ./.github/actions/setup-git-committer
with:
opencode-app-id: ${{ vars.OPENCODE_APP_ID }}
opencode-app-secret: ${{ secrets.OPENCODE_APP_SECRET }}
- name: Cache apt packages (AUR)
uses: actions/cache@v4
with:
@@ -639,3 +486,5 @@ jobs:
GH_REPO: ${{ needs.version.outputs.repo }}
NPM_CONFIG_PROVENANCE: false
LATEST_YML_DIR: /tmp/latest-yml
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}

View File

@@ -1,116 +0,0 @@
name: vouch-check-issue
on:
issues:
types: [opened]
permissions:
contents: read
issues: write
jobs:
check:
runs-on: ubuntu-latest
steps:
- name: Check if issue author is denounced
uses: actions/github-script@v7
with:
script: |
const author = context.payload.issue.user.login;
const issueNumber = context.payload.issue.number;
// Skip bots
if (author.endsWith('[bot]')) {
core.info(`Skipping bot: ${author}`);
return;
}
// Read the VOUCHED.td file via API (no checkout needed)
let content;
try {
const response = await github.rest.repos.getContent({
owner: context.repo.owner,
repo: context.repo.repo,
path: '.github/VOUCHED.td',
});
content = Buffer.from(response.data.content, 'base64').toString('utf-8');
} catch (error) {
if (error.status === 404) {
core.info('No .github/VOUCHED.td file found, skipping check.');
return;
}
throw error;
}
// Parse the .td file for vouched and denounced users
const vouched = new Set();
const denounced = new Map();
for (const line of content.split('\n')) {
const trimmed = line.trim();
if (!trimmed || trimmed.startsWith('#')) continue;
const isDenounced = trimmed.startsWith('-');
const rest = isDenounced ? trimmed.slice(1).trim() : trimmed;
if (!rest) continue;
const spaceIdx = rest.indexOf(' ');
const handle = spaceIdx === -1 ? rest : rest.slice(0, spaceIdx);
const reason = spaceIdx === -1 ? null : rest.slice(spaceIdx + 1).trim();
// Handle platform:username or bare username
// Only match bare usernames or github: prefix (skip other platforms)
const colonIdx = handle.indexOf(':');
if (colonIdx !== -1) {
const platform = handle.slice(0, colonIdx).toLowerCase();
if (platform !== 'github') continue;
}
const username = colonIdx === -1 ? handle : handle.slice(colonIdx + 1);
if (!username) continue;
if (isDenounced) {
denounced.set(username.toLowerCase(), reason);
continue;
}
vouched.add(username.toLowerCase());
}
// Check if the author is denounced
const reason = denounced.get(author.toLowerCase());
if (reason !== undefined) {
// Author is denounced — close the issue
const body = 'This issue has been automatically closed.';
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issueNumber,
body,
});
await github.rest.issues.update({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issueNumber,
state: 'closed',
state_reason: 'not_planned',
});
core.info(`Closed issue #${issueNumber} from denounced user ${author}`);
return;
}
// Author is positively vouched — add label
if (!vouched.has(author.toLowerCase())) {
core.info(`User ${author} is not denounced or vouched. Allowing issue.`);
return;
}
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: issueNumber,
labels: ['Vouched'],
});
core.info(`Added vouched label to issue #${issueNumber} from ${author}`);

View File

@@ -1,114 +0,0 @@
name: vouch-check-pr
on:
pull_request_target:
types: [opened]
permissions:
contents: read
issues: write
pull-requests: write
jobs:
check:
runs-on: ubuntu-latest
steps:
- name: Check if PR author is denounced
uses: actions/github-script@v7
with:
script: |
const author = context.payload.pull_request.user.login;
const prNumber = context.payload.pull_request.number;
// Skip bots
if (author.endsWith('[bot]')) {
core.info(`Skipping bot: ${author}`);
return;
}
// Read the VOUCHED.td file via API (no checkout needed)
let content;
try {
const response = await github.rest.repos.getContent({
owner: context.repo.owner,
repo: context.repo.repo,
path: '.github/VOUCHED.td',
});
content = Buffer.from(response.data.content, 'base64').toString('utf-8');
} catch (error) {
if (error.status === 404) {
core.info('No .github/VOUCHED.td file found, skipping check.');
return;
}
throw error;
}
// Parse the .td file for vouched and denounced users
const vouched = new Set();
const denounced = new Map();
for (const line of content.split('\n')) {
const trimmed = line.trim();
if (!trimmed || trimmed.startsWith('#')) continue;
const isDenounced = trimmed.startsWith('-');
const rest = isDenounced ? trimmed.slice(1).trim() : trimmed;
if (!rest) continue;
const spaceIdx = rest.indexOf(' ');
const handle = spaceIdx === -1 ? rest : rest.slice(0, spaceIdx);
const reason = spaceIdx === -1 ? null : rest.slice(spaceIdx + 1).trim();
// Handle platform:username or bare username
// Only match bare usernames or github: prefix (skip other platforms)
const colonIdx = handle.indexOf(':');
if (colonIdx !== -1) {
const platform = handle.slice(0, colonIdx).toLowerCase();
if (platform !== 'github') continue;
}
const username = colonIdx === -1 ? handle : handle.slice(colonIdx + 1);
if (!username) continue;
if (isDenounced) {
denounced.set(username.toLowerCase(), reason);
continue;
}
vouched.add(username.toLowerCase());
}
// Check if the author is denounced
const reason = denounced.get(author.toLowerCase());
if (reason !== undefined) {
// Author is denounced — close the PR
await github.rest.issues.createComment({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
body: 'This pull request has been automatically closed.',
});
await github.rest.pulls.update({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: prNumber,
state: 'closed',
});
core.info(`Closed PR #${prNumber} from denounced user ${author}`);
return;
}
// Author is positively vouched — add label
if (!vouched.has(author.toLowerCase())) {
core.info(`User ${author} is not denounced or vouched. Allowing PR.`);
return;
}
await github.rest.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: prNumber,
labels: ['Vouched'],
});
core.info(`Added vouched label to PR #${prNumber} from ${author}`);

View File

@@ -1,38 +0,0 @@
name: vouch-manage-by-issue
on:
issue_comment:
types: [created]
concurrency:
group: vouch-manage
cancel-in-progress: false
permissions:
contents: write
issues: write
pull-requests: read
jobs:
manage:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
persist-credentials: false
fetch-depth: 0
- name: Setup git committer
id: committer
uses: ./.github/actions/setup-git-committer
with:
opencode-app-id: ${{ vars.OPENCODE_APP_ID }}
opencode-app-secret: ${{ secrets.OPENCODE_APP_SECRET }}
- uses: mitchellh/vouch/action/manage-by-issue@main
with:
issue-id: ${{ github.event.issue.number }}
comment-id: ${{ github.event.comment.id }}
roles: admin,maintain,write
env:
GITHUB_TOKEN: ${{ steps.committer.outputs.token }}

View File

@@ -1,7 +1,7 @@
---
mode: primary
hidden: true
model: opencode/minimax-m2.5
model: opencode/gpt-5.4-nano
color: "#44BA81"
tools:
"*": false
@@ -14,127 +14,30 @@ Use your github-triage tool to triage issues.
This file is the source of truth for ownership/routing rules.
## Labels
Assign issues by choosing the team with the strongest overlap. The github-triage tool will assign a random member from that team.
### windows
Do not add labels to issues. Only assign an owner.
Use for any issue that mentions Windows (the OS). Be sure they are saying that they are on Windows.
When calling github-triage, pass one of these team values: tui, desktop_web, core, inference, windows.
- Use if they mention WSL too
## Teams
#### perf
### TUI
Performance-related issues:
Terminal UI issues, including rendering, keybindings, scrolling, terminal compatibility, SSH behavior, crashes in the TUI, and low-level TUI performance.
- Slow performance
- High RAM usage
- High CPU usage
### Desktop / Web
**Only** add if it's likely a RAM or CPU issue. **Do not** add for LLM slowness.
Desktop application and browser-based app issues, including `opencode web`, desktop-specific UI behavior, packaging, and web view problems.
#### desktop
### Core
Desktop app issues:
Core opencode server and harness issues, including sqlite, snapshots, memory, API behavior, agent context construction, tool execution, provider integrations, model behavior, documentation, and larger architectural features.
- `opencode web` command
- The desktop app itself
### Inference
**Only** add if it's specifically about the Desktop application or `opencode web` view. **Do not** add for terminal, TUI, or general opencode issues.
OpenCode Zen, OpenCode Go, and billing issues.
#### nix
### Windows
**Only** add if the issue explicitly mentions nix.
If the issue does not mention nix, do not add nix.
If the issue mentions nix, assign to `rekram1-node`.
#### zen
**Only** add if the issue mentions "zen" or "opencode zen" or "opencode black".
If the issue doesn't have "zen" or "opencode black" in it then don't add zen label
#### core
Use for core server issues in `packages/opencode/`, excluding `packages/opencode/src/cli/cmd/tui/`.
Examples:
- LSP server behavior
- Harness behavior (agent + tools)
- Feature requests for server behavior
- Agent context construction
- API endpoints
- Provider integration issues
- New, broken, or poor-quality models
#### acp
If the issue mentions acp support, assign acp label.
#### docs
Add if the issue requests better documentation or docs updates.
#### opentui
TUI issues potentially caused by our underlying TUI library:
- Keybindings not working
- Scroll speed issues (too fast/slow/laggy)
- Screen flickering
- Crashes with opentui in the log
**Do not** add for general TUI bugs.
When assigning to people here are the following rules:
Desktop / Web:
Use for desktop-labeled issues only.
- adamdotdevin
- iamdavidhill
- Brendonovich
- nexxeln
Zen:
ONLY assign if the issue will have the "zen" label.
- fwang
- MrMushrooooom
TUI (`packages/opencode/src/cli/cmd/tui/...`):
- thdxr for TUI UX/UI product decisions and interaction flow
- kommander for OpenTUI engine issues: rendering artifacts, keybind handling, terminal compatibility, SSH behavior, and low-level perf bottlenecks
- rekram1-node for TUI bugs that are not clearly OpenTUI engine issues
Core (`packages/opencode/...`, excluding TUI subtree):
- thdxr for sqlite/snapshot/memory bugs and larger architectural core features
- jlongster for opencode server + API feature work (tool currently remaps jlongster -> thdxr until assignable)
- rekram1-node for harness issues, provider issues, and other bug-squashing
For core bugs that do not clearly map, either thdxr or rekram1-node is acceptable.
Docs:
- R44VC0RP
Windows:
- Hona (assign any issue that mentions Windows or is likely Windows-specific)
Determinism rules:
- If title + body does not contain "zen", do not add the "zen" label
- If "nix" label is added but title + body does not mention nix/nixos, the tool will drop "nix"
- If title + body mentions nix/nixos, assign to `rekram1-node`
- If "desktop" label is added, the tool will override assignee and randomly pick one Desktop / Web owner
In all other cases, choose the team/section with the most overlap with the issue and assign a member from that team at random.
ACP:
- rekram1-node (assign any acp issues to rekram1-node)
Windows-specific issues, including native Windows behavior, WSL interactions, path handling, shell compatibility, and installation or runtime problems that only happen on Windows.

View File

@@ -18,9 +18,12 @@ Do not use `git log` or author metadata when deciding attribution.
Rules:
- Write the final file with sections in this order:
- Write the final file with release sections in this order:
`## Core`, `## TUI`, `## Desktop`, `## SDK`, `## Extensions`
- Only include sections that have at least one notable entry
- Within each release section, keep bug fixes grouped under `### Bugfixes`
- Keep other notable entries under `### Improvements` when a section has bug fixes too
- Omit empty subsections
- Keep one bullet per commit you keep
- Skip commits that are entirely internal, CI, tests, refactors, or otherwise not user-facing
- Start each bullet with a capital letter

View File

@@ -1,16 +1,14 @@
/// <reference path="../env.d.ts" />
import { tool } from "@opencode-ai/plugin"
const TEAM = {
desktop: ["adamdotdevin", "iamdavidhill", "Brendonovich", "nexxeln"],
zen: ["fwang", "MrMushrooooom"],
tui: ["kommander", "rekram1-node", "simonklee"],
core: ["kitlangton", "rekram1-node", "jlongster"],
docs: ["R44VC0RP"],
tui: ["kommander", "simonklee"],
desktop_web: ["Hona", "Brendonovich"],
core: ["jlongster", "rekram1-node", "nexxeln", "kitlangton"],
inference: ["fwang", "MrMushrooooom"],
windows: ["Hona"],
} as const
const ASSIGNEES = [...new Set(Object.values(TEAM).flat())]
function pick<T>(items: readonly T[]) {
return items[Math.floor(Math.random() * items.length)]!
}
@@ -38,79 +36,25 @@ async function githubFetch(endpoint: string, options: RequestInit = {}) {
}
export default tool({
description: `Use this tool to assign and/or label a GitHub issue.
description: `Use this tool to assign a GitHub issue.
Choose labels and assignee using the current triage policy and ownership rules.
Pick the most fitting labels for the issue and assign one owner.
If unsure, choose the team/section with the most overlap with the issue and assign a member from that team at random.`,
Provide the team that should own the issue. This tool picks a random assignee from that team and does not apply labels.`,
args: {
assignee: tool.schema
.enum(ASSIGNEES as [string, ...string[]])
.describe("The username of the assignee")
.default("rekram1-node"),
labels: tool.schema
.array(tool.schema.enum(["nix", "opentui", "perf", "web", "desktop", "zen", "docs", "windows", "core"]))
.describe("The labels(s) to add to the issue")
.default([]),
team: tool.schema
.enum(Object.keys(TEAM) as [keyof typeof TEAM, ...(keyof typeof TEAM)[]])
.describe("The owning team"),
},
async execute(args) {
const issue = getIssueNumber()
const owner = "anomalyco"
const repo = "opencode"
const results: string[] = []
let labels = [...new Set(args.labels.map((x) => (x === "desktop" ? "web" : x)))]
const web = labels.includes("web")
const text = `${process.env.ISSUE_TITLE ?? ""}\n${process.env.ISSUE_BODY ?? ""}`.toLowerCase()
const zen = /\bzen\b/.test(text) || text.includes("opencode black")
const nix = /\bnix(os)?\b/.test(text)
if (labels.includes("nix") && !nix) {
labels = labels.filter((x) => x !== "nix")
results.push("Dropped label: nix (issue does not mention nix)")
}
const assignee = nix ? "rekram1-node" : web ? pick(TEAM.desktop) : args.assignee
if (labels.includes("zen") && !zen) {
throw new Error("Only add the zen label when issue title/body contains 'zen'")
}
if (web && !nix && !(TEAM.desktop as readonly string[]).includes(assignee)) {
throw new Error("Web issues must be assigned to adamdotdevin, iamdavidhill, Brendonovich, or nexxeln")
}
if ((TEAM.zen as readonly string[]).includes(assignee) && !labels.includes("zen")) {
throw new Error("Only zen issues should be assigned to fwang or MrMushrooooom")
}
if (assignee === "Hona" && !labels.includes("windows")) {
throw new Error("Only windows issues should be assigned to Hona")
}
if (assignee === "R44VC0RP" && !labels.includes("docs")) {
throw new Error("Only docs issues should be assigned to R44VC0RP")
}
if (assignee === "kommander" && !labels.includes("opentui")) {
throw new Error("Only opentui issues should be assigned to kommander")
}
const assignee = pick(TEAM[args.team])
await githubFetch(`/repos/${owner}/${repo}/issues/${issue}/assignees`, {
method: "POST",
body: JSON.stringify({ assignees: [assignee] }),
})
results.push(`Assigned @${assignee} to issue #${issue}`)
if (labels.length > 0) {
await githubFetch(`/repos/${owner}/${repo}/issues/${issue}/labels`, {
method: "POST",
body: JSON.stringify({ labels }),
})
results.push(`Added labels: ${labels.join(", ")}`)
}
return results.join("\n")
return `Assigned @${assignee} from ${args.team} to issue #${issue}`
},
})

View File

@@ -24,6 +24,7 @@ export namespace AppFileSystem {
readonly isDir: (path: string) => Effect.Effect<boolean>
readonly isFile: (path: string) => Effect.Effect<boolean>
readonly existsSafe: (path: string) => Effect.Effect<boolean>
readonly readFileStringSafe: (path: string) => Effect.Effect<string | undefined, Error>
readonly readJson: (path: string) => Effect.Effect<unknown, Error>
readonly writeJson: (path: string, data: unknown, mode?: number) => Effect.Effect<void, Error>
readonly ensureDir: (path: string) => Effect.Effect<void, Error>
@@ -47,6 +48,12 @@ export namespace AppFileSystem {
return yield* fs.exists(path).pipe(Effect.orElseSucceed(() => false))
})
const readFileStringSafe = Effect.fn("FileSystem.readFileStringSafe")(function* (path: string) {
return yield* fs
.readFileString(path)
.pipe(Effect.catchReason("PlatformError", "NotFound", () => Effect.succeed(undefined)))
})
const isDir = Effect.fn("FileSystem.isDir")(function* (path: string) {
const info = yield* fs.stat(path).pipe(Effect.catch(() => Effect.void))
return info?.type === "Directory"
@@ -163,6 +170,7 @@ export namespace AppFileSystem {
return Service.of({
...fs,
existsSafe,
readFileStringSafe,
isDir,
isFile,
readDirectoryEntries,

View File

@@ -65,6 +65,34 @@ describe("AppFileSystem", () => {
)
})
describe("readFileStringSafe", () => {
it(
"returns file contents when file exists",
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const filesys = yield* FileSystem.FileSystem
const tmp = yield* filesys.makeTempDirectoryScoped()
const file = path.join(tmp, "exists.txt")
yield* filesys.writeFileString(file, "hello")
const result = yield* fs.readFileStringSafe(file)
expect(result).toBe("hello")
}),
)
it(
"returns undefined for missing file (NotFound)",
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const filesys = yield* FileSystem.FileSystem
const tmp = yield* filesys.makeTempDirectoryScoped()
const result = yield* fs.readFileStringSafe(path.join(tmp, "does-not-exist.txt"))
expect(result).toBeUndefined()
}),
)
})
describe("readJson / writeJson", () => {
it(
"round-trips JSON data",

View File

@@ -27,7 +27,7 @@ const channel = (() => {
})()
const getBase = (): Configuration => ({
artifactName: "opencode-electron-${os}-${arch}.${ext}",
artifactName: "opencode-desktop-${os}-${arch}.${ext}",
directories: {
output: "dist",
buildResources: "resources",

View File

@@ -1,7 +1,8 @@
#!/usr/bin/env bun
import { Buffer } from "node:buffer"
import { $ } from "bun"
import path from "node:path"
import { parseArgs } from "node:util"
const { values } = parseArgs({
args: Bun.argv.slice(2),
@@ -12,8 +13,6 @@ const { values } = parseArgs({
const dryRun = values["dry-run"]
import { parseArgs } from "node:util"
const repo = process.env.GH_REPO
if (!repo) throw new Error("GH_REPO is required")
@@ -23,20 +22,22 @@ if (!releaseId) throw new Error("OPENCODE_RELEASE is required")
const version = process.env.OPENCODE_VERSION
if (!version) throw new Error("OPENCODE_VERSION is required")
const dir = process.env.LATEST_YML_DIR
if (!dir) throw new Error("LATEST_YML_DIR is required")
const root = dir
const token = process.env.GH_TOKEN ?? process.env.GITHUB_TOKEN
if (!token) throw new Error("GH_TOKEN or GITHUB_TOKEN is required")
const apiHeaders = {
Authorization: `token ${token}`,
Accept: "application/vnd.github+json",
}
const releaseRes = await fetch(`https://api.github.com/repos/${repo}/releases/${releaseId}`, {
headers: apiHeaders,
const rel = await fetch(`https://api.github.com/repos/${repo}/releases/${releaseId}`, {
headers: {
Authorization: `token ${token}`,
Accept: "application/vnd.github+json",
},
})
if (!releaseRes.ok) {
throw new Error(`Failed to fetch release: ${releaseRes.status} ${releaseRes.statusText}`)
if (!rel.ok) {
throw new Error(`Failed to fetch release: ${rel.status} ${rel.statusText}`)
}
type Asset = {
@@ -45,115 +46,169 @@ type Asset = {
}
type Release = {
tag_name?: string
assets?: Asset[]
}
const release = (await releaseRes.json()) as Release
const assets = release.assets ?? []
const assetByName = new Map(assets.map((asset) => [asset.name, asset]))
const assets = ((await rel.json()) as Release).assets ?? []
const amap = new Map(assets.map((item) => [item.name, item]))
const latestAsset = assetByName.get("latest.json")
if (!latestAsset) {
console.log("latest.json not found, skipping tauri finalization")
process.exit(0)
type Item = {
url: string
}
const latestRes = await fetch(latestAsset.url, {
headers: {
Authorization: `token ${token}`,
Accept: "application/octet-stream",
},
})
if (!latestRes.ok) {
throw new Error(`Failed to fetch latest.json: ${latestRes.status} ${latestRes.statusText}`)
type Yml = {
version: string
files: Item[]
}
const latestText = new TextDecoder().decode(await latestRes.arrayBuffer())
const latest = JSON.parse(latestText)
const base = { ...latest }
delete base.platforms
function parse(text: string): Yml {
const lines = text.split("\n")
let version = ""
const files: Item[] = []
let url = ""
const fetchSignature = async (asset: Asset) => {
const res = await fetch(asset.url, {
const flush = () => {
if (!url) return
files.push({ url })
url = ""
}
for (const line of lines) {
const trim = line.trim()
if (line.startsWith("version:")) {
version = line.slice("version:".length).trim()
continue
}
if (trim.startsWith("- url:")) {
flush()
url = trim.slice("- url:".length).trim()
continue
}
const indented = line.startsWith(" ") || line.startsWith("\t")
if (!indented) flush()
}
flush()
return { version, files }
}
async function read(sub: string, file: string) {
const item = Bun.file(path.join(root, sub, file))
if (!(await item.exists())) return undefined
return parse(await item.text())
}
function pick(list: Item[], exts: string[]) {
for (const ext of exts) {
const found = list.find((item) => item.url.split("?")[0]?.toLowerCase().endsWith(ext))
if (found) return found.url
}
}
function link(raw: string) {
if (raw.startsWith("https://") || raw.startsWith("http://")) return raw
return `https://github.com/${repo}/releases/download/v${version}/${raw}`
}
async function sign(url: string, key: string) {
const name = decodeURIComponent(new URL(url).pathname.split("/").pop() ?? key)
const asset = amap.get(name)
const res = await fetch(asset?.url ?? url, {
headers: {
Authorization: `token ${token}`,
Accept: "application/octet-stream",
...(asset ? { Accept: "application/octet-stream" } : {}),
},
})
if (!res.ok) {
throw new Error(`Failed to fetch signature: ${res.status} ${res.statusText}`)
throw new Error(`Failed to fetch file ${name}: ${res.status} ${res.statusText} (${asset?.url ?? url})`)
}
return Buffer.from(await res.arrayBuffer()).toString()
const tmp = process.env.RUNNER_TEMP ?? "/tmp"
const file = path.join(tmp, name)
await Bun.write(file, await res.arrayBuffer())
await $`bunx @tauri-apps/cli signer sign ${file}`
const sigFile = Bun.file(`${file}.sig`)
if (!(await sigFile.exists())) throw new Error(`Signature file not found for ${name}`)
return (await sigFile.text()).trim()
}
const entries: Record<string, { url: string; signature: string }> = {}
const add = (key: string, asset: Asset, signature: string) => {
if (entries[key]) return
entries[key] = {
url: `https://github.com/${repo}/releases/download/v${version}/${asset.name}`,
signature,
}
const add = async (data: Record<string, { url: string; signature: string }>, key: string, raw: string | undefined) => {
if (!raw) return
if (data[key]) return
const url = link(raw)
data[key] = { url, signature: await sign(url, key) }
}
const targets = [
{ key: "linux-x86_64-deb", asset: "opencode-desktop-linux-amd64.deb" },
{ key: "linux-x86_64-rpm", asset: "opencode-desktop-linux-x86_64.rpm" },
{ key: "linux-aarch64-deb", asset: "opencode-desktop-linux-arm64.deb" },
{ key: "linux-aarch64-rpm", asset: "opencode-desktop-linux-aarch64.rpm" },
{ key: "windows-aarch64-nsis", asset: "opencode-desktop-windows-arm64.exe" },
{ key: "windows-x86_64-nsis", asset: "opencode-desktop-windows-x64.exe" },
{ key: "darwin-x86_64-app", asset: "opencode-desktop-darwin-x64.app.tar.gz" },
{
key: "darwin-aarch64-app",
asset: "opencode-desktop-darwin-aarch64.app.tar.gz",
},
]
for (const target of targets) {
const asset = assetByName.get(target.asset)
if (!asset) continue
const sig = assetByName.get(`${target.asset}.sig`)
if (!sig) continue
const signature = await fetchSignature(sig)
add(target.key, asset, signature)
const alias = (data: Record<string, { url: string; signature: string }>, key: string, src: string) => {
if (data[key]) return
if (!data[src]) return
data[key] = data[src]
}
const alias = (key: string, source: string) => {
if (entries[key]) return
const entry = entries[source]
if (!entry) return
entries[key] = entry
}
const winx = await read("latest-yml-x86_64-pc-windows-msvc", "latest.yml")
const wina = await read("latest-yml-aarch64-pc-windows-msvc", "latest.yml")
const macx = await read("latest-yml-x86_64-apple-darwin", "latest-mac.yml")
const maca = await read("latest-yml-aarch64-apple-darwin", "latest-mac.yml")
const linx = await read("latest-yml-x86_64-unknown-linux-gnu", "latest-linux.yml")
const lina = await read("latest-yml-aarch64-unknown-linux-gnu", "latest-linux-arm64.yml")
alias("linux-x86_64", "linux-x86_64-deb")
alias("linux-aarch64", "linux-aarch64-deb")
alias("windows-aarch64", "windows-aarch64-nsis")
alias("windows-x86_64", "windows-x86_64-nsis")
alias("darwin-x86_64", "darwin-x86_64-app")
alias("darwin-aarch64", "darwin-aarch64-app")
const yver = winx?.version ?? wina?.version ?? macx?.version ?? maca?.version ?? linx?.version ?? lina?.version
if (yver && yver !== version) throw new Error(`latest.yml version mismatch: expected ${version}, got ${yver}`)
const out: Record<string, { url: string; signature: string }> = {}
const winxexe = pick(winx?.files ?? [], [".exe"])
const winaexe = pick(wina?.files ?? [], [".exe"])
const macxTarGz = "opencode-desktop-mac-x64.app.tar.gz"
const macaTarGz = "opencode-desktop-mac-arm64.app.tar.gz"
const linxDeb = pick(linx?.files ?? [], [".deb"])
const linxRpm = pick(linx?.files ?? [], [".rpm"])
const linxAppImage = pick(linx?.files ?? [], [".appimage"])
const linaDeb = pick(lina?.files ?? [], [".deb"])
const linaRpm = pick(lina?.files ?? [], [".rpm"])
const linaAppImage = pick(lina?.files ?? [], [".appimage"])
await add(out, "windows-x86_64-nsis", winxexe)
await add(out, "windows-aarch64-nsis", winaexe)
await add(out, "darwin-x86_64-app", macxTarGz)
await add(out, "darwin-aarch64-app", macaTarGz)
await add(out, "linux-x86_64-deb", linxDeb)
await add(out, "linux-x86_64-rpm", linxRpm)
await add(out, "linux-x86_64-appimage", linxAppImage)
await add(out, "linux-aarch64-deb", linaDeb)
await add(out, "linux-aarch64-rpm", linaRpm)
await add(out, "linux-aarch64-appimage", linaAppImage)
alias(out, "windows-x86_64", "windows-x86_64-nsis")
alias(out, "windows-aarch64", "windows-aarch64-nsis")
alias(out, "darwin-x86_64", "darwin-x86_64-app")
alias(out, "darwin-aarch64", "darwin-aarch64-app")
alias(out, "linux-x86_64", "linux-x86_64-deb")
alias(out, "linux-aarch64", "linux-aarch64-deb")
const platforms = Object.fromEntries(
Object.keys(entries)
Object.keys(out)
.sort()
.map((key) => [key, entries[key]]),
.map((key) => [key, out[key]]),
)
const output = {
...base,
if (!Object.keys(platforms).length) throw new Error("No updater files found in latest.yml artifacts")
const data = {
version,
notes: "",
pub_date: new Date().toISOString(),
platforms,
}
const dir = process.env.RUNNER_TEMP ?? "/tmp"
const file = `${dir}/latest.json`
await Bun.write(file, JSON.stringify(output, null, 2))
const tmp = process.env.RUNNER_TEMP ?? "/tmp"
const file = path.join(tmp, "latest.json")
await Bun.write(file, JSON.stringify(data, null, 2))
const tag = release.tag_name
if (!tag) throw new Error("Release tag not found")
const tag = `v${version}`
if (dryRun) {
console.log(`dry-run: wrote latest.json for ${tag} to ${file}`)

View File

@@ -182,7 +182,7 @@ type Runtime = {
Todo: (typeof import("../src/session/todo"))["Todo"]
Worktree: (typeof import("../src/worktree"))["Worktree"]
Project: (typeof import("../src/project/project"))["Project"]
Tui: typeof import("../src/server/routes/instance/tui")
Tui: typeof import("../src/server/shared/tui-control")
disposeAllInstances: (typeof import("../test/fixture/fixture"))["disposeAllInstances"]
tmpdir: (typeof import("../test/fixture/fixture"))["tmpdir"]
resetDatabase: (typeof import("../test/fixture/db"))["resetDatabase"]
@@ -203,7 +203,7 @@ function runtime() {
const todo = await import("../src/session/todo")
const worktree = await import("../src/worktree")
const project = await import("../src/project/project")
const tui = await import("../src/server/routes/instance/tui")
const tui = await import("../src/server/shared/tui-control")
const fixture = await import("../test/fixture/fixture")
const db = await import("../test/fixture/db")
return {
@@ -1506,7 +1506,7 @@ const main = Effect.gen(function* () {
const options = parseOptions(Bun.argv.slice(2))
const modules = yield* Effect.promise(() => runtime())
const effectRoutes = routeKeys(OpenApi.fromApi(modules.PublicApi))
const honoRoutes = routeKeys(yield* Effect.promise(() => modules.Server.openapi()))
const honoRoutes = routeKeys(yield* Effect.promise(() => modules.Server.openapiHono()))
const selected = scenarios.filter((scenario) => matches(options, scenario))
const missing = effectRoutes.filter((route) => !scenarios.some((scenario) => route === routeKey(scenario)))
const extra = scenarios.filter((scenario) => !effectRoutes.includes(routeKey(scenario)))

View File

@@ -4,6 +4,7 @@ import { effectCmd } from "../effect-cmd"
import { AgentSideConnection, ndJsonStream } from "@agentclientprotocol/sdk"
import { ACP } from "@/acp/agent"
import { Server } from "@/server/server"
import { ServerAuth } from "@/server/auth"
import { createOpencodeClient } from "@opencode-ai/sdk/v2"
import { withNetworkOptions, resolveNetworkOptions } from "../network"
@@ -26,6 +27,7 @@ export const AcpCommand = effectCmd({
const sdk = createOpencodeClient({
baseUrl: `http://${server.hostname}:${server.port}`,
headers: ServerAuth.headers(),
})
const input = new WritableStream<Uint8Array>({

View File

@@ -1,5 +1,10 @@
import { Global } from "@opencode-ai/core/global"
import { InstallationVersion } from "@opencode-ai/core/installation/version"
import { Flag } from "@opencode-ai/core/flag/flag"
import os from "os"
import { Duration, Effect } from "effect"
import { Config } from "@/config/config"
import { ConfigPlugin } from "@/config/plugin"
import { effectCmd } from "../../effect-cmd"
import { cmd } from "../cmd"
import { ConfigCommand } from "./config"
@@ -26,6 +31,7 @@ export const DebugCommand = cmd({
.command(SnapshotCommand)
.command(StartupCommand)
.command(AgentCommand)
.command(InfoCommand)
.command(PathsCommand)
.command(WaitCommand)
.demandCommand(),
@@ -40,6 +46,34 @@ const WaitCommand = effectCmd({
}),
})
const InfoCommand = effectCmd({
command: "info",
describe: "show debug information",
handler: Effect.fn("Cli.debug.info")(function* () {
const config = yield* Config.Service.use((cfg) => cfg.get())
const termProgram = process.env.TERM_PROGRAM
? `${process.env.TERM_PROGRAM}${process.env.TERM_PROGRAM_VERSION ? ` ${process.env.TERM_PROGRAM_VERSION}` : ""}`
: undefined
const terminal = [termProgram, process.env.TERM].filter((item): item is string => Boolean(item)).join(" / ")
console.log(`opencode version: ${InstallationVersion}`)
console.log(`os: ${os.type()} ${os.release()} ${os.arch()}`)
console.log(`terminal: ${terminal || "unknown"}`)
console.log("plugins:")
if (Flag.OPENCODE_PURE) {
console.log("external plugins disabled (--pure)")
return
}
if (!config.plugin_origins?.length) {
console.log("none")
return
}
for (const plugin of config.plugin_origins) {
console.log(`- ${ConfigPlugin.pluginSpecifier(plugin.spec)}`)
}
}),
})
const PathsCommand = cmd({
command: "paths",
describe: "show global paths (data, config, cache, state)",

View File

@@ -1,22 +1,28 @@
import { Server } from "../../server/server"
import { PublicApi } from "../../server/routes/instance/httpapi/public"
import type { CommandModule } from "yargs"
import { OpenApi } from "effect/unstable/httpapi"
type Args = {
httpapi: boolean
hono: boolean
}
export const GenerateCommand = {
command: "generate",
builder: (yargs) =>
yargs.option("httpapi", {
type: "boolean",
default: false,
description: "Generate OpenAPI from the experimental Effect HttpApi contract",
}),
yargs
.option("httpapi", {
type: "boolean",
default: false,
description:
"Generate OpenAPI from the Effect HttpApi contract (default; flag retained for backwards compatibility)",
})
.option("hono", {
type: "boolean",
default: false,
description: "Generate OpenAPI from the legacy Hono backend (parity-diff only; will be removed)",
}),
handler: async (args) => {
const specs = args.httpapi ? OpenApi.fromApi(PublicApi) : await Server.openapi()
const specs = args.hono ? await Server.openapiHono() : await Server.openapi()
for (const item of Object.values(specs.paths)) {
for (const method of ["get", "post", "put", "delete", "patch"] as const) {
const operation = item[method]

View File

@@ -6,6 +6,8 @@ import * as prompts from "@clack/prompts"
import { UI } from "../ui"
import { ModelsDev } from "@/provider/models"
const getModels = () => AppRuntime.runPromise(ModelsDev.Service.use((s) => s.get()))
const refreshModels = () => AppRuntime.runPromise(ModelsDev.Service.use((s) => s.refresh(true)))
import { map, pipe, sortBy, values } from "remeda"
import path from "path"
import os from "os"
@@ -239,45 +241,46 @@ export const ProvidersListCommand = effectCmd({
handler: Effect.fn("Cli.providers.list")(function* (_args) {
const authSvc = yield* Auth.Service
const modelsDev = yield* ModelsDev.Service
yield* Effect.promise(async () => {
UI.empty()
const authPath = path.join(Global.Path.data, "auth.json")
const homedir = os.homedir()
const displayPath = authPath.startsWith(homedir) ? authPath.replace(homedir, "~") : authPath
prompts.intro(`Credentials ${UI.Style.TEXT_DIM}${displayPath}`)
const results = Object.entries(await Effect.runPromise(authSvc.all()))
const database = await Effect.runPromise(modelsDev.get())
UI.empty()
const authPath = path.join(Global.Path.data, "auth.json")
const homedir = os.homedir()
const displayPath = authPath.startsWith(homedir) ? authPath.replace(homedir, "~") : authPath
prompts.intro(`Credentials ${UI.Style.TEXT_DIM}${displayPath}`)
const results = Object.entries(yield* Effect.orDie(authSvc.all()))
const database = yield* modelsDev.get()
for (const [providerID, result] of results) {
const name = database[providerID]?.name || providerID
prompts.log.info(`${name} ${UI.Style.TEXT_DIM}${result.type}`)
}
for (const [providerID, result] of results) {
const name = database[providerID]?.name || providerID
prompts.log.info(`${name} ${UI.Style.TEXT_DIM}${result.type}`)
}
prompts.outro(`${results.length} credentials`)
prompts.outro(`${results.length} credentials`)
const activeEnvVars: Array<{ provider: string; envVar: string }> = []
const activeEnvVars: Array<{ provider: string; envVar: string }> = []
for (const [providerID, provider] of Object.entries(database)) {
for (const envVar of provider.env) {
if (process.env[envVar]) {
activeEnvVars.push({
provider: provider.name || providerID,
envVar,
})
for (const [providerID, provider] of Object.entries(database)) {
for (const envVar of provider.env) {
if (process.env[envVar]) {
activeEnvVars.push({
provider: provider.name || providerID,
envVar,
})
}
}
}
}
if (activeEnvVars.length > 0) {
UI.empty()
prompts.intro("Environment")
if (activeEnvVars.length > 0) {
UI.empty()
prompts.intro("Environment")
for (const { provider, envVar } of activeEnvVars) {
prompts.log.info(`${provider} ${UI.Style.TEXT_DIM}${envVar}`)
for (const { provider, envVar } of activeEnvVars) {
prompts.log.info(`${provider} ${UI.Style.TEXT_DIM}${envVar}`)
}
prompts.outro(`${activeEnvVars.length} environment variable` + (activeEnvVars.length === 1 ? "" : "s"))
}
prompts.outro(`${activeEnvVars.length} environment variable` + (activeEnvVars.length === 1 ? "" : "s"))
}
})
}),
})
@@ -303,174 +306,185 @@ export const ProvidersLoginCommand = effectCmd({
handler: Effect.fn("Cli.providers.login")(function* (args) {
const cfgSvc = yield* Config.Service
const pluginSvc = yield* Plugin.Service
const modelsDev = yield* ModelsDev.Service
const authSvc = yield* Auth.Service
UI.empty()
prompts.intro("Add credential")
if (args.url) {
const url = args.url.replace(/\/+$/, "")
const wellknown = (yield* Effect.promise(() =>
fetch(`${url}/.well-known/opencode`).then((x) => x.json()),
)) as { auth: { command: string[]; env: string } }
prompts.log.info(`Running \`${wellknown.auth.command.join(" ")}\``)
const proc = Process.spawn(wellknown.auth.command, { stdout: "pipe", stderr: "inherit" })
if (!proc.stdout) {
prompts.log.error("Failed")
yield* Effect.promise(async () => {
UI.empty()
prompts.intro("Add credential")
if (args.url) {
const url = args.url.replace(/\/+$/, "")
const wellknown = (await fetch(`${url}/.well-known/opencode`).then((x) => x.json())) as {
auth: { command: string[]; env: string }
}
prompts.log.info(`Running \`${wellknown.auth.command.join(" ")}\``)
const proc = Process.spawn(wellknown.auth.command, {
stdout: "pipe",
stderr: "inherit",
})
if (!proc.stdout) {
prompts.log.error("Failed")
prompts.outro("Done")
return
}
const [exit, token] = await Promise.all([proc.exited, text(proc.stdout)])
if (exit !== 0) {
prompts.log.error("Failed")
prompts.outro("Done")
return
}
await put(url, {
type: "wellknown",
key: wellknown.auth.env,
token: token.trim(),
})
prompts.log.success("Logged into " + url)
prompts.outro("Done")
return
}
const [exit, token] = yield* Effect.promise(() => Promise.all([proc.exited, text(proc.stdout!)]))
if (exit !== 0) {
prompts.log.error("Failed")
prompts.outro("Done")
return
await refreshModels().catch(() => {})
const config = await Effect.runPromise(cfgSvc.get())
const disabled = new Set(config.disabled_providers ?? [])
const enabled = config.enabled_providers ? new Set(config.enabled_providers) : undefined
const providers = await getModels().then((x) => {
const filtered: Record<string, (typeof x)[string]> = {}
for (const [key, value] of Object.entries(x)) {
if ((enabled ? enabled.has(key) : true) && !disabled.has(key)) {
filtered[key] = value
}
}
return filtered
})
const hooks = await Effect.runPromise(pluginSvc.list())
const priority: Record<string, number> = {
opencode: 0,
openai: 1,
"github-copilot": 2,
google: 3,
anthropic: 4,
openrouter: 5,
vercel: 6,
}
yield* Effect.orDie(authSvc.set(url, { type: "wellknown", key: wellknown.auth.env, token: token.trim() }))
prompts.log.success("Logged into " + url)
prompts.outro("Done")
return
}
yield* Effect.ignore(modelsDev.refresh(true))
const config = yield* cfgSvc.get()
const disabled = new Set(config.disabled_providers ?? [])
const enabled = config.enabled_providers ? new Set(config.enabled_providers) : undefined
const allProviders = yield* modelsDev.get()
const providers: Record<string, (typeof allProviders)[string]> = {}
for (const [key, value] of Object.entries(allProviders)) {
if ((enabled ? enabled.has(key) : true) && !disabled.has(key)) providers[key] = value
}
const hooks = yield* pluginSvc.list()
const priority: Record<string, number> = {
opencode: 0,
openai: 1,
"github-copilot": 2,
google: 3,
anthropic: 4,
openrouter: 5,
vercel: 6,
}
const pluginProviders = resolvePluginProviders({
hooks,
existingProviders: providers,
disabled,
enabled,
providerNames: Object.fromEntries(Object.entries(config.provider ?? {}).map(([id, p]) => [id, p.name])),
})
const options = [
...pipe(
providers,
values(),
sortBy(
(x) => priority[x.id] ?? 99,
(x) => x.name ?? x.id,
const pluginProviders = resolvePluginProviders({
hooks,
existingProviders: providers,
disabled,
enabled,
providerNames: Object.fromEntries(Object.entries(config.provider ?? {}).map(([id, p]) => [id, p.name])),
})
const options = [
...pipe(
providers,
values(),
sortBy(
(x) => priority[x.id] ?? 99,
(x) => x.name ?? x.id,
),
map((x) => ({
label: x.name,
value: x.id,
hint: {
opencode: "recommended",
openai: "ChatGPT Plus/Pro or API key",
}[x.id],
})),
),
map((x) => ({
...pluginProviders.map((x) => ({
label: x.name,
value: x.id,
hint: {
opencode: "recommended",
openai: "ChatGPT Plus/Pro or API key",
}[x.id],
hint: "plugin",
})),
),
...pluginProviders.map((x) => ({
label: x.name,
value: x.id,
hint: "plugin",
})),
]
]
let provider: string
if (args.provider) {
const input = args.provider
const byID = options.find((x) => x.value === input)
const byName = options.find((x) => x.label.toLowerCase() === input.toLowerCase())
const match = byID ?? byName
if (!match) {
prompts.log.error(`Unknown provider "${input}"`)
process.exit(1)
}
provider = match.value
} else {
const selected = yield* Effect.promise(() =>
prompts.autocomplete({
let provider: string
if (args.provider) {
const input = args.provider
const byID = options.find((x) => x.value === input)
const byName = options.find((x) => x.label.toLowerCase() === input.toLowerCase())
const match = byID ?? byName
if (!match) {
prompts.log.error(`Unknown provider "${input}"`)
process.exit(1)
}
provider = match.value
} else {
const selected = await prompts.autocomplete({
message: "Select provider",
maxItems: 8,
options: [...options, { value: "other", label: "Other" }],
}),
)
if (prompts.isCancel(selected)) yield* Effect.die(new UI.CancelledError())
provider = selected as string
}
options: [
...options,
{
value: "other",
label: "Other",
},
],
})
if (prompts.isCancel(selected)) throw new UI.CancelledError()
provider = selected as string
}
const plugin = hooks.findLast((x) => x.auth?.provider === provider)
if (plugin && plugin.auth) {
const handled = yield* Effect.promise(() => handlePluginAuth({ auth: plugin.auth! }, provider, args.method))
if (handled) return
}
if (provider === "other") {
const custom = yield* Effect.promise(() =>
prompts.text({
message: "Enter provider id",
validate: (x) => (x && x.match(/^[0-9a-z-]+$/) ? undefined : "a-z, 0-9 and hyphens only"),
}),
)
if (prompts.isCancel(custom)) yield* Effect.die(new UI.CancelledError())
provider = (custom as string).replace(/^@ai-sdk\//, "")
const customPlugin = hooks.findLast((x) => x.auth?.provider === provider)
if (customPlugin && customPlugin.auth) {
const handled = yield* Effect.promise(() =>
handlePluginAuth({ auth: customPlugin.auth! }, provider, args.method),
)
const plugin = hooks.findLast((x) => x.auth?.provider === provider)
if (plugin && plugin.auth) {
const handled = await handlePluginAuth({ auth: plugin.auth }, provider, args.method)
if (handled) return
}
prompts.log.warn(
`This only stores a credential for ${provider} - you will need configure it in opencode.json, check the docs for examples.`,
)
}
if (provider === "other") {
const custom = await prompts.text({
message: "Enter provider id",
validate: (x) => (x && x.match(/^[0-9a-z-]+$/) ? undefined : "a-z, 0-9 and hyphens only"),
})
if (prompts.isCancel(custom)) throw new UI.CancelledError()
provider = custom.replace(/^@ai-sdk\//, "")
if (provider === "amazon-bedrock") {
prompts.log.info(
"Amazon Bedrock authentication priority:\n" +
" 1. Bearer token (AWS_BEARER_TOKEN_BEDROCK or /connect)\n" +
" 2. AWS credential chain (profile, access keys, IAM roles, EKS IRSA)\n\n" +
"Configure via opencode.json options (profile, region, endpoint) or\n" +
"AWS environment variables (AWS_PROFILE, AWS_REGION, AWS_ACCESS_KEY_ID, AWS_WEB_IDENTITY_TOKEN_FILE).",
)
}
const customPlugin = hooks.findLast((x) => x.auth?.provider === provider)
if (customPlugin && customPlugin.auth) {
const handled = await handlePluginAuth({ auth: customPlugin.auth }, provider, args.method)
if (handled) return
}
if (provider === "opencode") {
prompts.log.info("Create an api key at https://opencode.ai/auth")
}
prompts.log.warn(
`This only stores a credential for ${provider} - you will need configure it in opencode.json, check the docs for examples.`,
)
}
if (provider === "vercel") {
prompts.log.info("You can create an api key at https://vercel.link/ai-gateway-token")
}
if (provider === "amazon-bedrock") {
prompts.log.info(
"Amazon Bedrock authentication priority:\n" +
" 1. Bearer token (AWS_BEARER_TOKEN_BEDROCK or /connect)\n" +
" 2. AWS credential chain (profile, access keys, IAM roles, EKS IRSA)\n\n" +
"Configure via opencode.json options (profile, region, endpoint) or\n" +
"AWS environment variables (AWS_PROFILE, AWS_REGION, AWS_ACCESS_KEY_ID, AWS_WEB_IDENTITY_TOKEN_FILE).",
)
}
if (["cloudflare", "cloudflare-ai-gateway"].includes(provider)) {
prompts.log.info(
"Cloudflare AI Gateway can be configured with CLOUDFLARE_GATEWAY_ID, CLOUDFLARE_ACCOUNT_ID, and CLOUDFLARE_API_TOKEN environment variables. Read more: https://opencode.ai/docs/providers/#cloudflare-ai-gateway",
)
}
if (provider === "opencode") {
prompts.log.info("Create an api key at https://opencode.ai/auth")
}
const key = yield* Effect.promise(() =>
prompts.password({
if (provider === "vercel") {
prompts.log.info("You can create an api key at https://vercel.link/ai-gateway-token")
}
if (["cloudflare", "cloudflare-ai-gateway"].includes(provider)) {
prompts.log.info(
"Cloudflare AI Gateway can be configured with CLOUDFLARE_GATEWAY_ID, CLOUDFLARE_ACCOUNT_ID, and CLOUDFLARE_API_TOKEN environment variables. Read more: https://opencode.ai/docs/providers/#cloudflare-ai-gateway",
)
}
const key = await prompts.password({
message: "Enter your API key",
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
}),
)
if (prompts.isCancel(key)) yield* Effect.die(new UI.CancelledError())
yield* Effect.orDie(authSvc.set(provider, { type: "api", key: key as string }))
})
if (prompts.isCancel(key)) throw new UI.CancelledError()
await put(provider, {
type: "api",
key,
})
prompts.outro("Done")
prompts.outro("Done")
})
}),
})
@@ -482,27 +496,26 @@ export const ProvidersLogoutCommand = effectCmd({
handler: Effect.fn("Cli.providers.logout")(function* (_args) {
const authSvc = yield* Auth.Service
const modelsDev = yield* ModelsDev.Service
UI.empty()
const credentials: Array<[string, Auth.Info]> = Object.entries(yield* Effect.orDie(authSvc.all()))
prompts.intro("Remove credential")
if (credentials.length === 0) {
prompts.log.error("No credentials found")
return
}
const database = yield* modelsDev.get()
const selected = yield* Effect.promise(() =>
prompts.select({
yield* Effect.promise(async () => {
UI.empty()
const credentials: Array<[string, Auth.Info]> = Object.entries(await Effect.runPromise(authSvc.all()))
prompts.intro("Remove credential")
if (credentials.length === 0) {
prompts.log.error("No credentials found")
return
}
const database = await Effect.runPromise(modelsDev.get())
const selected = await prompts.select({
message: "Select provider",
options: credentials.map(([key, value]) => ({
label: (database[key]?.name || key) + UI.Style.TEXT_DIM + " (" + value.type + ")",
value: key,
})),
}),
)
if (prompts.isCancel(selected)) yield* Effect.die(new UI.CancelledError())
const providerID = selected as string
yield* Effect.orDie(authSvc.remove(providerID))
prompts.outro("Logout successful")
})
if (prompts.isCancel(selected)) throw new UI.CancelledError()
const providerID = selected as string
await Effect.runPromise(authSvc.remove(providerID))
prompts.outro("Logout successful")
})
}),
})

View File

@@ -5,6 +5,7 @@ import { Effect } from "effect"
import { UI } from "../ui"
import { effectCmd } from "../effect-cmd"
import { Flag } from "@opencode-ai/core/flag/flag"
import { ServerAuth } from "@/server/auth"
import { EOL } from "os"
import { Filesystem } from "@/util/filesystem"
import { createOpencodeClient, type OpencodeClient, type ToolPart } from "@opencode-ai/sdk/v2"
@@ -656,13 +657,7 @@ export const RunCommand = effectCmd({
}
if (args.attach) {
const headers = (() => {
const password = args.password ?? process.env.OPENCODE_SERVER_PASSWORD
if (!password) return undefined
const username = process.env.OPENCODE_SERVER_USERNAME ?? "opencode"
const auth = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`
return { Authorization: auth }
})()
const headers = ServerAuth.headers({ password: args.password })
const sdk = createOpencodeClient({ baseUrl: args.attach, directory, headers })
return await execute(sdk)
}

View File

@@ -5,6 +5,7 @@ import { win32DisableProcessedInput, win32InstallCtrlCGuard } from "./win32"
import { TuiConfig } from "@/cli/cmd/tui/config/tui"
import { errorMessage } from "@/util/error"
import { validateSession } from "./validate-session"
import { ServerAuth } from "@/server/auth"
export const AttachCommand = cmd({
command: "attach <url>",
@@ -38,6 +39,11 @@ export const AttachCommand = cmd({
alias: ["p"],
type: "string",
describe: "basic auth password (defaults to OPENCODE_SERVER_PASSWORD)",
})
.option("username", {
alias: ["u"],
type: "string",
describe: "basic auth username (defaults to OPENCODE_SERVER_USERNAME or 'opencode')",
}),
handler: async (args) => {
const unguard = win32InstallCtrlCGuard()
@@ -60,12 +66,7 @@ export const AttachCommand = cmd({
return args.dir
}
})()
const headers = (() => {
const password = args.password ?? process.env.OPENCODE_SERVER_PASSWORD
if (!password) return undefined
const auth = `Basic ${Buffer.from(`opencode:${password}`).toString("base64")}`
return { Authorization: auth }
})()
const headers = ServerAuth.headers({ password: args.password, username: args.username })
const config = await TuiConfig.get()
try {

View File

@@ -68,29 +68,73 @@ function normalize(raw: Record<string, unknown>) {
}
}
async function resolvePlugins(config: Info, configFilepath: string) {
if (!config.plugin) return config
for (let i = 0; i < config.plugin.length; i++) {
config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath)
}
return config
}
async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) {
const data = await loadFile(file)
acc.result = mergeDeep(acc.result, data)
if (!data.plugin?.length) return
const scope = pluginScope(file, ctx)
const plugins = ConfigPlugin.deduplicatePluginOrigins([
...(acc.result.plugin_origins ?? []),
...data.plugin.map((spec) => ({ spec, scope, source: file })),
])
acc.result.plugin = plugins.map((item) => item.spec)
acc.result.plugin_origins = plugins
}
const loadState = Effect.fn("TuiConfig.loadState")(function* (ctx: { directory: string }) {
const afs = yield* AppFileSystem.Service
const resolvePlugins = (config: Info, configFilepath: string): Effect.Effect<Info> =>
Effect.gen(function* () {
const plugins = config.plugin
if (!plugins) return config
for (let i = 0; i < plugins.length; i++) {
plugins[i] = yield* Effect.promise(() => ConfigPlugin.resolvePluginSpec(plugins[i], configFilepath))
}
return config
})
const load = (text: string, configFilepath: string): Effect.Effect<Info> =>
Effect.gen(function* () {
const expanded = yield* Effect.promise(() =>
ConfigVariable.substitute({ text, type: "path", path: configFilepath, missing: "empty" }),
)
const data = ConfigParse.jsonc(expanded, configFilepath)
if (!isRecord(data)) return {} as Info
// Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json
// (mirroring the old opencode.json shape) still get their settings applied.
const validated = ConfigParse.schema(Info, normalize(data), configFilepath)
return yield* resolvePlugins(validated, configFilepath)
}).pipe(
// catchCause (not tapErrorCause + orElseSucceed) because ConfigParse.jsonc/.schema
// can sync-throw — those become defects, which orElseSucceed wouldn't catch.
Effect.catchCause((cause) =>
Effect.sync(() => {
log.warn("invalid tui config", { path: configFilepath, cause })
return {} as Info
}),
),
)
const loadFile = (filepath: string): Effect.Effect<Info> =>
Effect.gen(function* () {
// Silent-swallow non-NotFound read errors (perms, EISDIR, IO) → log + skip.
// Matches how parse/schema/plugin failures in load() are handled — every
// broken-config path degrades gracefully rather than crashing TUI startup.
const text = yield* afs.readFileStringSafe(filepath).pipe(
Effect.catchCause((cause) =>
Effect.sync(() => {
log.warn("failed to read tui config", { path: filepath, cause })
return undefined
}),
),
)
if (!text) return {} as Info
return yield* load(text, filepath)
})
const mergeFile = (acc: Acc, file: string) =>
Effect.gen(function* () {
const data = yield* loadFile(file)
acc.result = mergeDeep(acc.result, data)
if (!data.plugin?.length) return
const scope = pluginScope(file, ctx)
const plugins = ConfigPlugin.deduplicatePluginOrigins([
...(acc.result.plugin_origins ?? []),
...data.plugin.map((spec) => ({ spec, scope, source: file })),
])
acc.result.plugin = plugins.map((item) => item.spec)
acc.result.plugin_origins = plugins
})
// Every config dir we may read from: global config dir, any `.opencode`
// folders between cwd and home, and OPENCODE_CONFIG_DIR.
const directories = yield* ConfigPaths.directories(ctx.directory)
@@ -104,19 +148,19 @@ const loadState = Effect.fn("TuiConfig.loadState")(function* (ctx: { directory:
// 1. Global tui config (lowest precedence).
for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) {
yield* Effect.promise(() => mergeFile(acc, file, ctx)).pipe(Effect.orDie)
yield* mergeFile(acc, file)
}
// 2. Explicit OPENCODE_TUI_CONFIG override, if set.
if (Flag.OPENCODE_TUI_CONFIG) {
const configFile = Flag.OPENCODE_TUI_CONFIG
yield* Effect.promise(() => mergeFile(acc, configFile, ctx)).pipe(Effect.orDie)
yield* mergeFile(acc, configFile)
log.debug("loaded custom tui config", { path: configFile })
}
// 3. Project tui files, applied root-first so the closest file wins.
for (const file of projectFiles) {
yield* Effect.promise(() => mergeFile(acc, file, ctx)).pipe(Effect.orDie)
yield* mergeFile(acc, file)
}
// 4. `.opencode` directories (and OPENCODE_CONFIG_DIR) discovered while
@@ -127,7 +171,7 @@ const loadState = Effect.fn("TuiConfig.loadState")(function* (ctx: { directory:
for (const dir of dirs) {
if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue
for (const file of ConfigPaths.fileInDirectory(dir, "tui")) {
yield* Effect.promise(() => mergeFile(acc, file, ctx)).pipe(Effect.orDie)
yield* mergeFile(acc, file)
}
}
@@ -192,29 +236,3 @@ export async function waitForDependencies() {
export async function get() {
return runPromise((svc) => svc.get())
}
async function loadFile(filepath: string): Promise<Info> {
const text = await ConfigPaths.readFile(filepath)
if (!text) return {}
return load(text, filepath).catch((error) => {
log.warn("failed to load tui config", { path: filepath, error })
return {}
})
}
async function load(text: string, configFilepath: string): Promise<Info> {
return ConfigVariable.substitute({ text, type: "path", path: configFilepath, missing: "empty" })
.then((expanded) => ConfigParse.jsonc(expanded, configFilepath))
.then((data) => {
if (!isRecord(data)) return {}
// Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json
// (mirroring the old opencode.json shape) still get their settings applied.
return ConfigParse.schema(Info, normalize(data), configFilepath)
})
.then((data) => resolvePlugins(data, configFilepath))
.catch((error) => {
log.warn("invalid tui config", { path: configFilepath, error })
return {}
})
}

View File

@@ -7,7 +7,7 @@ import { Rpc } from "@/util/rpc"
import { upgrade } from "@/cli/upgrade"
import { Config } from "@/config/config"
import { GlobalBus } from "@/bus/global"
import { Flag } from "@opencode-ai/core/flag/flag"
import { ServerAuth } from "@/server/auth"
import { writeHeapSnapshot } from "node:v8"
import { Heap } from "@/cli/heap"
import { AppRuntime } from "@/effect/app-runtime"
@@ -50,7 +50,7 @@ let server: Awaited<ReturnType<typeof Server.listen>> | undefined
export const rpc = {
async fetch(input: { url: string; method: string; headers: Record<string, string>; body?: string }) {
const headers = { ...input.headers }
const auth = getAuthorizationHeader()
const auth = ServerAuth.header()
if (auth && !headers["authorization"] && !headers["Authorization"]) {
headers["Authorization"] = auth
}
@@ -102,10 +102,3 @@ export const rpc = {
}
Rpc.listen(rpc)
function getAuthorizationHeader(): string | undefined {
const password = Flag.OPENCODE_SERVER_PASSWORD
if (!password) return undefined
const username = Flag.OPENCODE_SERVER_USERNAME ?? "opencode"
return `Basic ${btoa(`${username}:${password}`)}`
}

View File

@@ -3,6 +3,7 @@ import { Effect, Schema } from "effect"
import { AppRuntime, type AppServices } from "@/effect/app-runtime"
import { InstanceStore } from "@/project/instance-store"
import { InstanceRef } from "@/effect/instance-ref"
import { Instance } from "@/project/instance"
import { cmd, type WithDoubleDash } from "./cmd/cmd"
/**
@@ -82,17 +83,21 @@ export const effectCmd = <Args, A>(opts: EffectCmdOpts<Args, A>) =>
return
}
const directory = opts.directory?.(args) ?? process.cwd()
await AppRuntime.runPromise(
InstanceStore.Service.use((store) =>
store.provide(
{ directory },
Effect.gen(function* () {
const ctx = yield* InstanceRef
const body = opts.handler(args)
return ctx ? yield* body.pipe(Effect.ensuring(store.dispose(ctx))) : yield* body
}),
),
),
// Two-phase: load ctx, then run body inside Instance.current ALS.
// Effect's InstanceRef is provided via fiber context, but that context is
// lost across `await` inside `Effect.promise(async () => ...)` callbacks
// — when handlers re-enter Effect via `AppRuntime.runPromise(svc.method())`
// there, attach() falls back to Instance.current ALS, which Node preserves
// across awaits. Matches the pre-effectCmd `bootstrap()` behavior.
const { store, ctx } = await AppRuntime.runPromise(
InstanceStore.Service.use((store) => store.load({ directory }).pipe(Effect.map((ctx) => ({ store, ctx })))),
)
try {
await Instance.restore(ctx, () =>
AppRuntime.runPromise(opts.handler(args).pipe(Effect.provideService(InstanceRef, ctx))),
)
} finally {
await AppRuntime.runPromise(store.dispose(ctx))
}
},
})

View File

@@ -355,15 +355,7 @@ export const layer = Layer.effect(
const env = yield* Env.Service
const npmSvc = yield* Npm.Service
const readConfigFile = Effect.fnUntraced(function* (filepath: string) {
return yield* fs.readFileString(filepath).pipe(
Effect.catchIf(
(e) => e.reason._tag === "NotFound",
() => Effect.succeed(undefined),
),
Effect.orDie,
)
})
const readConfigFile = (filepath: string) => fs.readFileStringSafe(filepath).pipe(Effect.orDie)
const loadConfig = Effect.fnUntraced(function* (
text: string,

View File

@@ -1,11 +1,9 @@
export * as ConfigPaths from "./paths"
import path from "path"
import { Filesystem } from "@/util/filesystem"
import { Flag } from "@opencode-ai/core/flag/flag"
import { Global } from "@opencode-ai/core/global"
import { unique } from "remeda"
import { JsonError } from "./error"
import * as Effect from "effect/Effect"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
@@ -45,11 +43,3 @@ export const directories = Effect.fn("ConfigPaths.directories")(function* (direc
export function fileInDirectory(dir: string, name: string) {
return [path.join(dir, `${name}.json`), path.join(dir, `${name}.jsonc`)]
}
/** Read a config file, returning undefined for missing files and throwing JsonError for other failures. */
export async function readFile(filepath: string) {
return Filesystem.readText(filepath).catch((err: NodeJS.ErrnoException) => {
if (err.code === "ENOENT") return
throw new JsonError({ path: filepath }, { cause: err })
})
}

View File

@@ -24,6 +24,7 @@ const fail = (err: unknown) =>
text: () => "",
stdout: Buffer.alloc(0),
stderr: Buffer.from(err instanceof Error ? err.message : String(err)),
truncated: false,
}) satisfies Result
export type Kind = "added" | "deleted" | "modified"
@@ -45,16 +46,28 @@ export type Stat = {
readonly deletions: number
}
export type Patch = {
readonly text: string
readonly truncated: boolean
}
export interface PatchOptions {
readonly context?: number
readonly maxOutputBytes?: number
}
export interface Result {
readonly exitCode: number
readonly text: () => string
readonly stdout: Buffer
readonly stderr: Buffer
readonly truncated: boolean
}
export interface Options {
readonly cwd: string
readonly env?: Record<string, string>
readonly maxOutputBytes?: number
}
export interface Interface {
@@ -68,6 +81,10 @@ export interface Interface {
readonly status: (cwd: string) => Effect.Effect<Item[]>
readonly diff: (cwd: string, ref: string) => Effect.Effect<Item[]>
readonly stats: (cwd: string, ref: string) => Effect.Effect<Stat[]>
readonly patch: (cwd: string, ref: string, file: string, options?: PatchOptions) => Effect.Effect<Patch>
readonly patchAll: (cwd: string, ref: string, options?: PatchOptions) => Effect.Effect<Patch>
readonly patchUntracked: (cwd: string, file: string, options?: PatchOptions) => Effect.Effect<Patch>
readonly statUntracked: (cwd: string, file: string) => Effect.Effect<Stat | undefined>
}
const kind = (code: string): Kind => {
@@ -96,15 +113,31 @@ export const layer = Layer.effect(
stderr: "pipe",
})
const handle = yield* spawner.spawn(proc)
const [stdout, stderr] = yield* Effect.all(
[Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))],
{ concurrency: 2 },
)
const collect = (stream: typeof handle.stdout) =>
Stream.runFold(
stream,
() => ({ chunks: [] as Uint8Array[], bytes: 0, truncated: false }),
(acc, chunk) => {
if (opts.maxOutputBytes === undefined) {
acc.chunks.push(chunk)
acc.bytes += chunk.length
return acc
}
const remaining = opts.maxOutputBytes - acc.bytes
if (remaining > 0) acc.chunks.push(remaining >= chunk.length ? chunk : chunk.slice(0, remaining))
acc.bytes += chunk.length
acc.truncated = acc.truncated || acc.bytes > opts.maxOutputBytes
return acc
},
).pipe(Effect.map((x) => ({ buffer: Buffer.concat(x.chunks), truncated: x.truncated })))
const [stdout, stderr] = yield* Effect.all([collect(handle.stdout), collect(handle.stderr)], { concurrency: 2 })
return {
exitCode: yield* handle.exitCode,
text: () => stdout,
stdout: Buffer.from(stdout),
stderr: Buffer.from(stderr),
text: () => stdout.buffer.toString("utf8"),
stdout: stdout.buffer,
stderr: stderr.buffer,
truncated: stdout.truncated || stderr.truncated,
} satisfies Result
},
Effect.scoped,
@@ -240,6 +273,61 @@ export const layer = Layer.effect(
})
})
const patch = Effect.fn("Git.patch")(function* (cwd: string, ref: string, file: string, options?: PatchOptions) {
const result = yield* run(
["diff", "--patch", "--no-ext-diff", "--no-renames", `--unified=${options?.context ?? 3}`, ref, "--", file],
{ cwd, maxOutputBytes: options?.maxOutputBytes },
)
return { text: result.truncated ? "" : result.text(), truncated: result.truncated } satisfies Patch
})
const patchAll = Effect.fn("Git.patchAll")(function* (cwd: string, ref: string, options?: PatchOptions) {
const result = yield* run(
["diff", "--patch", "--no-ext-diff", "--no-renames", `--unified=${options?.context ?? 3}`, ref, "--", "."],
{ cwd, maxOutputBytes: options?.maxOutputBytes },
)
return { text: result.text(), truncated: result.truncated } satisfies Patch
})
const patchUntracked = Effect.fn("Git.patchUntracked")(function* (
cwd: string,
file: string,
options?: PatchOptions,
) {
const result = yield* run(
[
"diff",
"--no-index",
"--patch",
"--no-ext-diff",
"--no-renames",
`--unified=${options?.context ?? 3}`,
"--",
"/dev/null",
file,
],
{ cwd, maxOutputBytes: options?.maxOutputBytes },
)
return { text: result.truncated ? "" : result.text(), truncated: result.truncated } satisfies Patch
})
const statUntracked = Effect.fn("Git.statUntracked")(function* (cwd: string, file: string) {
const result = yield* run(["diff", "--no-index", "--numstat", "--", "/dev/null", file], {
cwd,
maxOutputBytes: 4096,
})
if (result.truncated) return
const parts = result.text().split("\t")
if (parts.length < 2) return
const additions = parts[0] === "-" ? 0 : Number.parseInt(parts[0] || "0", 10)
const deletions = parts[1] === "-" ? 0 : Number.parseInt(parts[1] || "0", 10)
return {
file,
additions: Number.isFinite(additions) ? additions : 0,
deletions: Number.isFinite(deletions) ? deletions : 0,
} satisfies Stat
})
return Service.of({
run,
branch,
@@ -251,6 +339,10 @@ export const layer = Layer.effect(
status,
diff,
stats,
patch,
patchAll,
patchUntracked,
statUntracked,
})
}),
)

View File

@@ -10,6 +10,7 @@ import { Bus } from "../bus"
import * as Log from "@opencode-ai/core/util/log"
import { createOpencodeClient } from "@opencode-ai/sdk"
import { Flag } from "@opencode-ai/core/flag/flag"
import { ServerAuth } from "@/server/auth"
import { CodexAuthPlugin } from "./codex"
import { Session } from "@/session/session"
import { NamedError } from "@opencode-ai/core/util/error"
@@ -124,11 +125,7 @@ export const layer = Layer.effect(
const client = createOpencodeClient({
baseUrl: "http://localhost:4096",
directory: ctx.directory,
headers: Flag.OPENCODE_SERVER_PASSWORD
? {
Authorization: `Basic ${Buffer.from(`${Flag.OPENCODE_SERVER_USERNAME ?? "opencode"}:${Flag.OPENCODE_SERVER_PASSWORD}`).toString("base64")}`,
}
: undefined,
headers: ServerAuth.headers(),
fetch: async (...args) => Server.Default().app.fetch(...args),
})
const cfg = yield* config.get()

View File

@@ -1,10 +1,8 @@
import { Effect, Layer, Context, Schema, Stream, Scope } from "effect"
import { formatPatch, structuredPatch } from "diff"
import path from "path"
import { Bus } from "@/bus"
import { BusEvent } from "@/bus/bus-event"
import { InstanceState } from "@/effect/instance-state"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { FileWatcher } from "@/file/watcher"
import { Git } from "@/git"
import * as Log from "@opencode-ai/core/util/log"
@@ -12,20 +10,11 @@ import { zod } from "@/util/effect-zod"
import { NonNegativeInt, withStatics } from "@/util/schema"
const log = Log.create({ service: "vcs" })
const PATCH_CONTEXT_LINES = 2_147_483_647
const MAX_PATCH_BYTES = 10_000_000
const MAX_TOTAL_PATCH_BYTES = 10_000_000
const count = (text: string) => {
if (!text) return 0
if (!text.endsWith("\n")) return text.split("\n").length
return text.slice(0, -1).split("\n").length
}
const work = Effect.fnUntraced(function* (fs: AppFileSystem.Interface, cwd: string, file: string) {
const full = path.join(cwd, file)
if (!(yield* fs.exists(full).pipe(Effect.orDie))) return ""
const buf = yield* fs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array())))
if (Buffer.from(buf).includes(0)) return ""
return Buffer.from(buf).toString("utf8")
})
const emptyPatch = (file: string) => formatPatch(structuredPatch(file, file, "", "", "", "", { context: 0 }))
const nums = (list: Git.Stat[]) =>
new Map(list.map((item) => [item.file, { additions: item.additions, deletions: item.deletions }] as const))
@@ -38,59 +27,168 @@ const merge = (...lists: Git.Item[][]) => {
return [...out.values()]
}
const emptyBatch = () => ({ patches: new Map<string, string>(), capped: false })
const parseQuotedPath = (value: string) => {
let out = ""
for (let idx = 1; idx < value.length; idx++) {
const char = value[idx]
if (char === '"') return { value: out, end: idx + 1 }
if (char !== "\\") {
out += char
continue
}
const next = value[++idx]
if (next === "t") out += "\t"
else if (next === "n") out += "\n"
else if (next === "r") out += "\r"
else if (next === '"' || next === "\\") out += next
else out += next ?? ""
}
}
const parsePathToken = (value: string) => {
if (!value.startsWith('"')) return value.split("\t")[0]
return parseQuotedPath(value)?.value ?? value
}
const fileFromDiffPath = (value: string | undefined) => {
if (!value || value === "/dev/null") return
const file = parsePathToken(value)
if (file.startsWith("a/") || file.startsWith("b/")) return file.slice(2)
return file
}
const fileFromGitHeader = (header: string) => {
if (header.startsWith('"')) {
const first = parseQuotedPath(header)
const second = first ? header.slice(first.end).trimStart() : undefined
if (!second) return
if (!second.startsWith('"')) return fileFromDiffPath(second)
return fileFromDiffPath(parseQuotedPath(second)?.value)
}
const separator = header.indexOf(" b/")
if (separator === -1) return
return fileFromDiffPath(header.slice(separator + 1))
}
const fileFromPatchChunk = (chunk: string) => {
const next = /^\+\+\+ (.+)$/m.exec(chunk)?.[1]
const before = /^--- (.+)$/m.exec(chunk)?.[1]
const file = fileFromDiffPath(next) ?? fileFromDiffPath(before)
if (file) return file
const header = /^diff --git (.+)$/m.exec(chunk)?.[1]
return fileFromGitHeader(header ?? "")
}
const splitGitPatch = (patch: Git.Patch) => {
const starts = [...patch.text.matchAll(/^diff --git /gm)].map((match) => match.index)
const chunks = starts.map((start, index) => patch.text.slice(start, starts[index + 1] ?? patch.text.length))
if (!patch.truncated) return chunks
return chunks.slice(0, -1)
}
const batchPatches = Effect.fnUntraced(function* (git: Git.Interface, cwd: string, ref: string, list: Git.Item[]) {
if (list.length === 0) return { patches: new Map<string, string>(), capped: false }
const result = yield* git.patchAll(cwd, ref, {
context: PATCH_CONTEXT_LINES,
maxOutputBytes: MAX_TOTAL_PATCH_BYTES,
})
if (result.truncated) log.warn("batched patch exceeded byte limit", { max: MAX_TOTAL_PATCH_BYTES })
return {
patches: splitGitPatch(result).reduce((acc, patch, index) => {
const file = fileFromPatchChunk(patch) ?? list[index]?.file
if (!file) return acc
acc.set(file, (acc.get(file) ?? "") + patch)
return acc
}, new Map<string, string>()),
capped: result.truncated,
}
})
const nativePatch = Effect.fnUntraced(function* (
git: Git.Interface,
cwd: string,
ref: string | undefined,
item: Git.Item,
) {
const result =
item.code === "??" || !ref
? yield* git.patchUntracked(cwd, item.file, { context: PATCH_CONTEXT_LINES, maxOutputBytes: MAX_PATCH_BYTES })
: yield* git.patch(cwd, ref, item.file, { context: PATCH_CONTEXT_LINES, maxOutputBytes: MAX_PATCH_BYTES })
if (!result.truncated && result.text) return result.text
if (result.truncated) log.warn("patch exceeded byte limit", { file: item.file, max: MAX_PATCH_BYTES })
return emptyPatch(item.file)
})
const totalPatch = (file: string, patch: string, total: number) => {
if (total + Buffer.byteLength(patch) <= MAX_TOTAL_PATCH_BYTES) return { patch, capped: false }
log.warn("total patch budget exceeded", { file, max: MAX_TOTAL_PATCH_BYTES })
return { patch: emptyPatch(file), capped: true }
}
const patchForItem = Effect.fnUntraced(function* (
git: Git.Interface,
cwd: string,
ref: string | undefined,
item: Git.Item,
batch: { patches: Map<string, string>; capped: boolean },
capped: boolean,
) {
if (capped) return emptyPatch(item.file)
const batched = batch.patches.get(item.file)
if (batched !== undefined) return batched
if (item.code !== "??" && batch.capped) return emptyPatch(item.file)
return yield* nativePatch(git, cwd, ref, item)
})
const files = Effect.fnUntraced(function* (
fs: AppFileSystem.Interface,
git: Git.Interface,
cwd: string,
ref: string | undefined,
list: Git.Item[],
map: Map<string, { additions: number; deletions: number }>,
batch: { patches: Map<string, string>; capped: boolean },
) {
const base = ref ? yield* git.prefix(cwd) : ""
const patch = (file: string, before: string, after: string) =>
formatPatch(structuredPatch(file, file, before, after, "", "", { context: Number.MAX_SAFE_INTEGER }))
const next = yield* Effect.forEach(
list,
(item) =>
Effect.gen(function* () {
const before = item.status === "added" || !ref ? "" : yield* git.show(cwd, ref, item.file, base)
const after = item.status === "deleted" ? "" : yield* work(fs, cwd, item.file)
const stat = map.get(item.file)
return {
file: item.file,
patch: patch(item.file, before, after),
additions: stat?.additions ?? (item.status === "added" ? count(after) : 0),
deletions: stat?.deletions ?? (item.status === "deleted" ? count(before) : 0),
status: item.status,
} satisfies FileDiff
}),
{ concurrency: 8 },
)
return next.toSorted((a, b) => a.file.localeCompare(b.file))
const next: FileDiff[] = []
let total = 0
let capped = false
for (const item of list.toSorted((a, b) => a.file.localeCompare(b.file))) {
const stat = map.get(item.file) ?? (item.status === "added" ? yield* git.statUntracked(cwd, item.file) : undefined)
const patch = yield* patchForItem(git, cwd, ref, item, batch, capped)
const result: { patch: string; capped: boolean } = capped
? { patch, capped: true }
: totalPatch(item.file, patch, total)
capped = capped || result.capped
if (!capped) {
total += Buffer.byteLength(result.patch)
capped = total >= MAX_TOTAL_PATCH_BYTES
}
next.push({
file: item.file,
patch: result.patch,
additions: stat?.additions ?? 0,
deletions: stat?.deletions ?? 0,
status: item.status,
})
}
return next
})
const track = Effect.fnUntraced(function* (
fs: AppFileSystem.Interface,
git: Git.Interface,
cwd: string,
ref: string | undefined,
) {
if (!ref) return yield* files(fs, git, cwd, ref, yield* git.status(cwd), new Map())
const [list, stats] = yield* Effect.all([git.status(cwd), git.stats(cwd, ref)], { concurrency: 2 })
return yield* files(fs, git, cwd, ref, list, nums(stats))
})
const compare = Effect.fnUntraced(function* (
fs: AppFileSystem.Interface,
git: Git.Interface,
cwd: string,
ref: string,
) {
const diffAgainstRef = Effect.fnUntraced(function* (git: Git.Interface, cwd: string, ref: string) {
const [list, stats, extra] = yield* Effect.all([git.diff(cwd, ref), git.stats(cwd, ref), git.status(cwd)], {
concurrency: 3,
})
return yield* files(
fs,
git,
cwd,
ref,
@@ -99,9 +197,15 @@ const compare = Effect.fnUntraced(function* (
extra.filter((item) => item.code === "??"),
),
nums(stats),
yield* batchPatches(git, cwd, ref, list),
)
})
const track = Effect.fnUntraced(function* (git: Git.Interface, cwd: string, ref: string | undefined) {
if (!ref) return yield* files(git, cwd, ref, yield* git.status(cwd), new Map(), emptyBatch())
return yield* diffAgainstRef(git, cwd, ref)
})
export const Mode = Schema.Literals(["git", "branch"]).pipe(withStatics((s) => ({ zod: zod(s) })))
export type Mode = Schema.Schema.Type<typeof Mode>
@@ -147,10 +251,9 @@ interface State {
export class Service extends Context.Service<Service, Interface>()("@opencode/Vcs") {}
export const layer: Layer.Layer<Service, never, AppFileSystem.Service | Git.Service | Bus.Service> = Layer.effect(
export const layer: Layer.Layer<Service, never, Git.Service | Bus.Service> = Layer.effect(
Service,
Effect.gen(function* () {
const fs = yield* AppFileSystem.Service
const git = yield* Git.Service
const bus = yield* Bus.Service
const scope = yield* Scope.Scope
@@ -204,23 +307,19 @@ export const layer: Layer.Layer<Service, never, AppFileSystem.Service | Git.Serv
const ctx = yield* InstanceState.context
if (ctx.project.vcs !== "git") return []
if (mode === "git") {
return yield* track(fs, git, ctx.directory, (yield* git.hasHead(ctx.directory)) ? "HEAD" : undefined)
return yield* track(git, ctx.directory, (yield* git.hasHead(ctx.directory)) ? "HEAD" : undefined)
}
if (!value.root) return []
if (value.current && value.current === value.root.name) return []
const ref = yield* git.mergeBase(ctx.directory, value.root.ref)
if (!ref) return []
return yield* compare(fs, git, ctx.directory, ref)
return yield* diffAgainstRef(git, ctx.directory, ref)
}),
})
}),
)
export const defaultLayer = layer.pipe(
Layer.provide(Git.defaultLayer),
Layer.provide(AppFileSystem.defaultLayer),
Layer.provide(Bus.layer),
)
export const defaultLayer = layer.pipe(Layer.provide(Git.defaultLayer), Layer.provide(Bus.layer))
export * as Vcs from "./vcs"

View File

@@ -0,0 +1,48 @@
export * as ServerAuth from "./auth"
import { ConfigService } from "@/effect/config-service"
import { Flag } from "@opencode-ai/core/flag/flag"
import { Config as EffectConfig, Context, Option, Redacted } from "effect"
export type Credentials = {
password?: string
username?: string
}
export type DecodedCredentials = {
readonly username: string
readonly password: Redacted.Redacted
}
export class Config extends ConfigService.Service<Config>()("@opencode/ServerAuthConfig", {
password: EffectConfig.string("OPENCODE_SERVER_PASSWORD").pipe(EffectConfig.option),
username: EffectConfig.string("OPENCODE_SERVER_USERNAME").pipe(EffectConfig.withDefault("opencode")),
}) {}
export type Info = Context.Service.Shape<typeof Config>
export function required(config: Info) {
return Option.isSome(config.password) && config.password.value !== ""
}
export function authorized(credentials: DecodedCredentials, config: Info) {
return (
Option.isSome(config.password) &&
credentials.username === config.username &&
Redacted.value(credentials.password) === config.password.value
)
}
export function header(credentials?: Credentials) {
const password = credentials?.password ?? Flag.OPENCODE_SERVER_PASSWORD
if (!password) return undefined
const username = credentials?.username ?? Flag.OPENCODE_SERVER_USERNAME ?? "opencode"
return `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`
}
export function headers(credentials?: Credentials) {
const authorization = header(credentials)
if (!authorization) return undefined
return { Authorization: authorization }
}

View File

@@ -1,78 +1,8 @@
import type { MiddlewareHandler } from "hono"
import { Database } from "@/storage/db"
import { inArray } from "drizzle-orm"
import { EventSequenceTable } from "@/sync/event.sql"
import { Workspace } from "@/control-plane/workspace"
import type { WorkspaceID } from "@/control-plane/schema"
import * as Log from "@opencode-ai/core/util/log"
import { AppRuntime } from "@/effect/app-runtime"
import { Effect } from "effect"
import { HEADER, diff, load } from "./shared/fence"
const HEADER = "x-opencode-sync"
type State = Record<string, number>
const log = Log.create({ service: "fence" })
export function load(ids?: string[]) {
const rows = Database.use((db) => {
if (!ids?.length) {
return db.select().from(EventSequenceTable).all()
}
return db.select().from(EventSequenceTable).where(inArray(EventSequenceTable.aggregate_id, ids)).all()
})
return Object.fromEntries(rows.map((row) => [row.aggregate_id, row.seq])) as State
}
export function diff(prev: State, next: State) {
const ids = new Set([...Object.keys(prev), ...Object.keys(next)])
return Object.fromEntries(
[...ids]
.map((id) => [id, next[id] ?? -1] as const)
.filter(([id, seq]) => {
return (prev[id] ?? -1) !== seq
}),
) as State
}
export function parse(headers: Headers) {
const raw = headers.get(HEADER)
if (!raw) return
let data
try {
data = JSON.parse(raw)
} catch {
return
}
if (!data || typeof data !== "object") return
return Object.fromEntries(
Object.entries(data).filter(([id, seq]) => {
return typeof id === "string" && Number.isInteger(seq)
}),
) as State
}
export function waitEffect(workspaceID: WorkspaceID, state: State, signal?: AbortSignal) {
return Effect.gen(function* () {
log.info("waiting for state", {
workspaceID,
state,
})
yield* Workspace.Service.use((workspace) => workspace.waitForSync(workspaceID, state, signal))
log.info("state fully synced", {
workspaceID,
state,
})
})
}
export async function wait(workspaceID: WorkspaceID, state: State, signal?: AbortSignal) {
await AppRuntime.runPromise(waitEffect(workspaceID, state, signal))
}
const log = Log.create({ service: "fence-middleware" })
export const FenceMiddleware: MiddlewareHandler = async (c, next) => {
if (c.req.method === "GET" || c.req.method === "HEAD" || c.req.method === "OPTIONS") return next()

View File

@@ -0,0 +1,244 @@
// TODO: Node adapter forthcoming — same pattern but using `node:http` + `ws` library,
// and `node:http`'s `upgrade` event.
//
// This module is a Bun-only proof-of-concept for a native `Bun.serve` listener that
// drives the experimental HttpApi handler directly (no Hono in the middle) and handles
// WebSocket upgrades inline based on path-matching. It exists to validate the pattern
// before deleting the Hono backend; `Server.listen()` is intentionally NOT wired to it.
import type { ServerWebSocket } from "bun"
import { Effect, Schema } from "effect"
import { AppRuntime } from "@/effect/app-runtime"
import { WithInstance } from "@/project/with-instance"
import { Pty } from "@/pty"
import { handlePtyInput } from "@/pty/input"
import { PtyID } from "@/pty/schema"
import { PtyPaths } from "@/server/routes/instance/httpapi/groups/pty"
import { ExperimentalHttpApiServer } from "@/server/routes/instance/httpapi/server"
import * as Log from "@opencode-ai/core/util/log"
import type { CorsOptions } from "./cors"
const log = Log.create({ service: "httpapi-listener" })
const decodePtyID = Schema.decodeUnknownSync(PtyID)
export type Listener = {
hostname: string
port: number
url: URL
stop: (close?: boolean) => Promise<void>
}
export type ListenOptions = CorsOptions & {
port: number
hostname: string
}
type WsKind = { kind: "pty"; ptyID: string; cursor: number | undefined; directory: string }
type PtyHandler = {
onMessage: (message: string | ArrayBuffer) => void
onClose: () => void
}
type WsState = WsKind & {
handler?: PtyHandler
pending: Array<string | Uint8Array>
ready: boolean
closed: boolean
}
// Derive from the OpenAPI path so this stays in sync if the route literal moves.
const ptyConnectPattern = new RegExp(`^${PtyPaths.connect.replace(/:[^/]+/g, "([^/]+)")}$`)
function parseCursor(value: string | null): number | undefined {
if (!value) return undefined
const parsed = Number(value)
if (!Number.isSafeInteger(parsed) || parsed < -1) return undefined
return parsed
}
function asAdapter(ws: ServerWebSocket<WsState>) {
return {
get readyState() {
return ws.readyState
},
send: (data: string | Uint8Array | ArrayBuffer) => {
try {
if (data instanceof ArrayBuffer) ws.send(new Uint8Array(data))
else ws.send(data)
} catch {
// socket likely already closed; ignore
}
},
close: (code?: number, reason?: string) => {
try {
ws.close(code, reason)
} catch {
// ignore
}
},
}
}
/**
* Spin up a native Bun.serve that:
* 1. Routes all HTTP traffic through the HttpApi web handler.
* 2. Intercepts known WebSocket upgrade paths and handles them inline.
*
* This bypasses Hono entirely. The Hono code path remains untouched.
*/
export async function listen(opts: ListenOptions): Promise<Listener> {
const built = ExperimentalHttpApiServer.webHandler(opts)
const handler = built.handler
const context = ExperimentalHttpApiServer.context
const start = (port: number) => {
try {
return Bun.serve<WsState>({
hostname: opts.hostname,
port,
idleTimeout: 0,
fetch(request, server) {
const url = new URL(request.url)
const ptyMatch = url.pathname.match(ptyConnectPattern)
if (ptyMatch && request.headers.get("upgrade")?.toLowerCase() === "websocket") {
const ptyID = ptyMatch[1]!
const cursor = parseCursor(url.searchParams.get("cursor"))
// Resolve the instance directory the same way the HttpApi
// `instance-context` middleware does (search params, then header,
// then process.cwd()).
const directory =
url.searchParams.get("directory") ?? request.headers.get("x-opencode-directory") ?? process.cwd()
const upgraded = server.upgrade(request, {
data: {
kind: "pty",
ptyID,
cursor,
directory,
pending: [],
ready: false,
closed: false,
} satisfies WsState,
})
if (upgraded) return undefined
return new Response("upgrade failed", { status: 400 })
}
// TODO: workspace-proxy WS upgrade detection. The Hono path forwards via a
// remote `new WebSocket(url, ...)` (see ServerProxy.websocket). To support
// that here we'd need to (a) resolve the workspace target the same way
// `WorkspaceRouterMiddleware` does today, then (b) `server.upgrade(request,
// { data: { kind: "proxy", target, headers, protocols } })` and bridge the
// ServerWebSocket to a remote WebSocket inside the `websocket` handlers.
// Deferred to a follow-up — the proxy story needs more design (auth header
// forwarding, fence sync, reconnection semantics) than fits this PR.
return handler(request as Request, context as never)
},
websocket: {
open(ws) {
const data = ws.data
if (data.kind !== "pty") {
ws.close(1011, "unknown ws kind")
return
}
const id = (() => {
try {
return decodePtyID(data.ptyID)
} catch {
ws.close(1008, "invalid pty id")
return undefined
}
})()
if (!id) return
;(async () => {
const result = await WithInstance.provide({
directory: data.directory,
fn: () =>
AppRuntime.runPromise(
Effect.gen(function* () {
const pty = yield* Pty.Service
return yield* pty.connect(id, asAdapter(ws), data.cursor)
}).pipe(Effect.withSpan("HttpApiListener.pty.connect.open")),
),
})
return await result
})()
.then((handler) => {
if (data.closed) {
handler?.onClose()
return
}
if (!handler) {
ws.close(4404, "session not found")
return
}
data.handler = handler
data.ready = true
for (const msg of data.pending) {
AppRuntime.runPromise(handlePtyInput(handler, msg)).catch(() => undefined)
}
data.pending.length = 0
})
.catch((err) => {
log.error("pty connect failed", { error: err })
ws.close(1011, "pty connect failed")
})
},
message(ws, message) {
const data = ws.data
if (data.kind !== "pty") return
const payload =
typeof message === "string"
? message
: message instanceof Buffer
? new Uint8Array(message.buffer, message.byteOffset, message.byteLength)
: (message as Uint8Array)
if (!data.ready || !data.handler) {
data.pending.push(payload)
return
}
AppRuntime.runPromise(handlePtyInput(data.handler, payload)).catch(() => undefined)
},
close(ws) {
const data = ws.data
data.closed = true
data.handler?.onClose()
},
},
})
} catch (err) {
log.error("Bun.serve failed", { error: err })
return undefined
}
}
const server = opts.port === 0 ? (start(4096) ?? start(0)) : start(opts.port)
if (!server) throw new Error(`Failed to start server on port ${opts.port}`)
const port = server.port
if (port === undefined) throw new Error("Bun.serve started without a numeric port")
const url = new URL("http://localhost")
url.hostname = opts.hostname
url.port = String(port)
let closing: Promise<void> | undefined
return {
hostname: opts.hostname,
port,
url,
stop(close?: boolean) {
closing ??= (async () => {
await server.stop(close)
// NOTE: we deliberately do NOT call `built.dispose()` here. The
// underlying `webHandler` is memoized at module level (same as the
// Hono path), so disposing it would tear down shared services for
// every other consumer in the process. Lifecycle teardown is owned
// by the AppRuntime itself.
})()
return closing
},
}
}
export * as HttpApiListener from "./httpapi-listener"

View File

@@ -1,7 +1,7 @@
import { Hono } from "hono"
import type { UpgradeWebSocket } from "hono/ws"
import * as Log from "@opencode-ai/core/util/log"
import * as Fence from "./fence"
import * as Fence from "./shared/fence"
import type { WorkspaceID } from "@/control-plane/schema"
import { Workspace } from "@/control-plane/workspace"
import { AppRuntime } from "@/effect/app-runtime"

View File

@@ -5,7 +5,7 @@ import * as Database from "@/storage/db"
import { eq } from "drizzle-orm"
import { Effect } from "effect"
import { HttpApiBuilder, HttpApiError } from "effect/unstable/httpapi"
import { nextTuiRequest, submitTuiResponse } from "../../tui"
import { nextTuiRequest, submitTuiResponse } from "@/server/shared/tui-control"
import { InstanceHttpApi } from "../api"
import { CommandPayload, TuiPublishPayload } from "../groups/tui"

View File

@@ -1,10 +1,11 @@
import { ConfigService } from "@/effect/config-service"
import { Config, Context, Effect, Encoding, Layer, Option, Redacted } from "effect"
import { ServerAuth } from "@/server/auth"
import { Effect, Encoding, Layer, Redacted } from "effect"
import { HttpRouter, HttpServerRequest, HttpServerResponse } from "effect/unstable/http"
import { HttpApiError, HttpApiMiddleware, HttpApiSecurity } from "effect/unstable/httpapi"
const AUTH_TOKEN_QUERY = "auth_token"
const UNAUTHORIZED = 401
const WWW_AUTHENTICATE = 'Basic realm="Secure Area"'
export class Authorization extends HttpApiMiddleware.Service<Authorization>()(
"@opencode/ExperimentalHttpApiAuthorization",
@@ -17,41 +18,18 @@ export class Authorization extends HttpApiMiddleware.Service<Authorization>()(
},
) {}
export class ServerAuthConfig extends ConfigService.Service<ServerAuthConfig>()(
"@opencode/ExperimentalHttpApiServerAuthConfig",
{
password: Config.string("OPENCODE_SERVER_PASSWORD").pipe(Config.option),
username: Config.string("OPENCODE_SERVER_USERNAME").pipe(Config.withDefault("opencode")),
},
) {}
function validateCredential<A, E, R>(
effect: Effect.Effect<A, E, R>,
credential: { readonly username: string; readonly password: Redacted.Redacted },
config: Context.Service.Shape<typeof ServerAuthConfig>,
credential: ServerAuth.DecodedCredentials,
config: ServerAuth.Info,
) {
return Effect.gen(function* () {
if (!isAuthRequired(config)) return yield* effect
if (!isCredentialAuthorized(credential, config)) return yield* new HttpApiError.Unauthorized({})
if (!ServerAuth.required(config)) return yield* effect
if (!ServerAuth.authorized(credential, config)) return yield* new HttpApiError.Unauthorized({})
return yield* effect
})
}
function isAuthRequired(config: Context.Service.Shape<typeof ServerAuthConfig>) {
return Option.isSome(config.password) && config.password.value !== ""
}
function isCredentialAuthorized(
credential: { readonly username: string; readonly password: Redacted.Redacted },
config: Context.Service.Shape<typeof ServerAuthConfig>,
) {
return (
Option.isSome(config.password) &&
credential.username === config.username &&
Redacted.value(credential.password) === config.password.value
)
}
function decodeCredential(input: string) {
const emptyCredential = {
username: "",
@@ -77,19 +55,24 @@ function decodeCredential(input: string) {
function validateRawCredential<A, E, R>(
effect: Effect.Effect<A, E, R>,
credential: { readonly username: string; readonly password: Redacted.Redacted },
config: Context.Service.Shape<typeof ServerAuthConfig>,
credential: ServerAuth.DecodedCredentials,
config: ServerAuth.Info,
) {
if (!isAuthRequired(config)) return effect
if (!isCredentialAuthorized(credential, config))
return Effect.succeed(HttpServerResponse.empty({ status: UNAUTHORIZED }))
if (!ServerAuth.required(config)) return effect
if (!ServerAuth.authorized(credential, config))
return Effect.succeed(
HttpServerResponse.empty({
status: UNAUTHORIZED,
headers: { "www-authenticate": WWW_AUTHENTICATE },
}),
)
return effect
}
export const authorizationRouterMiddleware = HttpRouter.middleware()(
Effect.gen(function* () {
const config = yield* ServerAuthConfig
if (!isAuthRequired(config)) return (effect) => effect
const config = yield* ServerAuth.Config
if (!ServerAuth.required(config)) return (effect) => effect
return (effect) =>
Effect.gen(function* () {
@@ -116,7 +99,7 @@ export const authorizationRouterMiddleware = HttpRouter.middleware()(
export const authorizationLayer = Layer.effect(
Authorization,
Effect.gen(function* () {
const config = yield* ServerAuthConfig
const config = yield* ServerAuth.Config
return Authorization.of({
basic: (effect, { credential }) => validateCredential(effect, credential, config),
authToken: (effect, { credential }) =>

View File

@@ -5,8 +5,8 @@ import { Workspace } from "@/control-plane/workspace"
import { EffectBridge } from "@/effect/bridge"
import { Session } from "@/session/session"
import { HttpApiProxy } from "./proxy"
import * as Fence from "@/server/fence"
import { getWorkspaceRouteSessionID, isLocalWorkspaceRoute, workspaceProxyURL } from "@/server/workspace"
import * as Fence from "@/server/shared/fence"
import { getWorkspaceRouteSessionID, isLocalWorkspaceRoute, workspaceProxyURL } from "@/server/shared/workspace-routing"
import { Flag } from "@opencode-ai/core/flag/flag"
import { Context, Data, Effect, Layer } from "effect"
import { HttpClient, HttpRouter, HttpServerRequest, HttpServerResponse } from "effect/unstable/http"

View File

@@ -45,9 +45,10 @@ import { Vcs } from "@/project/vcs"
import { Worktree } from "@/worktree"
import { Workspace } from "@/control-plane/workspace"
import { isAllowedCorsOrigin, type CorsOptions } from "@/server/cors"
import { serveUIEffect } from "@/server/routes/ui"
import { serveUIEffect } from "@/server/shared/ui"
import { ServerAuth } from "@/server/auth"
import { InstanceHttpApi, RootHttpApi } from "./api"
import { ServerAuthConfig, authorizationLayer, authorizationRouterMiddleware } from "./middleware/authorization"
import { authorizationLayer, authorizationRouterMiddleware } from "./middleware/authorization"
import { EventApi, eventHandlers } from "./event"
import { configHandlers } from "./handlers/config"
import { controlHandlers } from "./handlers/control"
@@ -97,7 +98,7 @@ const rootApiRoutes = HttpApiBuilder.layer(RootHttpApi).pipe(Layer.provide([cont
const instanceRouterLayer = authorizationRouterMiddleware
.combine(instanceRouterMiddleware)
.combine(workspaceRouterMiddleware)
.layer.pipe(Layer.provide(Socket.layerWebSocketConstructorGlobal), Layer.provide(ServerAuthConfig.defaultLayer))
.layer.pipe(Layer.provide(Socket.layerWebSocketConstructorGlobal), Layer.provide(ServerAuth.Config.defaultLayer))
const eventApiRoutes = HttpApiBuilder.layer(EventApi).pipe(
Layer.provide(eventHandlers),
Layer.provide(instanceRouterLayer),
@@ -125,7 +126,7 @@ const instanceApiRoutes = HttpApiBuilder.layer(InstanceHttpApi).pipe(
const rawInstanceRoutes = Layer.mergeAll(ptyConnectRoute).pipe(Layer.provide(instanceRouterLayer))
const instanceRoutes = Layer.mergeAll(rawInstanceRoutes, instanceApiRoutes).pipe(
Layer.provide([
authorizationLayer.pipe(Layer.provide(ServerAuthConfig.defaultLayer)),
authorizationLayer.pipe(Layer.provide(ServerAuth.Config.defaultLayer)),
workspaceRoutingLayer.pipe(Layer.provide(Socket.layerWebSocketConstructorGlobal)),
instanceContextLayer,
]),
@@ -137,7 +138,7 @@ const uiRoute = HttpRouter.use((router) =>
const client = yield* HttpClient.HttpClient
yield* router.add("*", "/*", (request) => serveUIEffect(request, { fs, client }))
}),
).pipe(Layer.provide(authorizationRouterMiddleware.layer.pipe(Layer.provide(ServerAuthConfig.defaultLayer))))
).pipe(Layer.provide(authorizationRouterMiddleware.layer.pipe(Layer.provide(ServerAuth.Config.defaultLayer))))
export function createRoutes(corsOptions?: CorsOptions) {
return Layer.mergeAll(rootApiRoutes, eventApiRoutes, instanceRoutes, uiRoute).pipe(

View File

@@ -7,32 +7,16 @@ import { Session } from "@/session/session"
import type { SessionID } from "@/session/schema"
import { TuiEvent } from "@/cli/cmd/tui/event"
import { zodObject } from "@/util/effect-zod"
import { AsyncQueue } from "@/util/queue"
import { errors } from "../../error"
import { lazy } from "@/util/lazy"
import { runRequest } from "./trace"
export const TuiRequest = z.object({
path: z.string(),
body: z.any(),
})
export type TuiRequest = z.infer<typeof TuiRequest>
const request = new AsyncQueue<TuiRequest>()
const response = new AsyncQueue<unknown>()
export function nextTuiRequest() {
return request.next()
}
export function submitTuiRequest(body: TuiRequest) {
request.push(body)
}
export function submitTuiResponse(body: unknown) {
response.push(body)
}
import {
TuiRequest,
nextTuiRequest,
nextTuiResponse,
submitTuiRequest,
submitTuiResponse,
} from "@/server/shared/tui-control"
export async function callTui(ctx: Context) {
const body = await ctx.req.json()
@@ -40,7 +24,7 @@ export async function callTui(ctx: Context) {
path: ctx.req.path,
body,
})
return response.next()
return nextTuiResponse()
}
const TuiControlRoutes = new Hono()

View File

@@ -1,53 +1,10 @@
import { Flag } from "@opencode-ai/core/flag/flag"
import fs from "node:fs/promises"
import { createHash } from "node:crypto"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { Effect, Stream } from "effect"
import { HttpBody, HttpClient, HttpClientRequest, HttpServerRequest, HttpServerResponse } from "effect/unstable/http"
import { Hono } from "hono"
import { proxy } from "hono/proxy"
import { getMimeType } from "hono/utils/mime"
import { createHash } from "node:crypto"
import fs from "node:fs/promises"
import { ProxyUtil } from "../proxy-util"
const embeddedUIPromise = Flag.OPENCODE_DISABLE_EMBEDDED_WEB_UI
? Promise.resolve(null)
: // @ts-expect-error - generated file at build time
import("opencode-web-ui.gen.ts").then((module) => module.default as Record<string, string>).catch(() => null)
const DEFAULT_CSP =
"default-src 'self'; script-src 'self' 'wasm-unsafe-eval'; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:; media-src 'self' data:; connect-src 'self' data:"
const UI_UPSTREAM = new URL("https://app.opencode.ai")
const csp = (hash = "") =>
`default-src 'self'; script-src 'self' 'wasm-unsafe-eval'${hash ? ` 'sha256-${hash}'` : ""}; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:; media-src 'self' data:; connect-src 'self' data:`
function themePreloadHash(body: string) {
return body.match(/<script\b(?![^>]*\bsrc\s*=)[^>]*\bid=(['"])oc-theme-preload-script\1[^>]*>([\s\S]*?)<\/script>/i)
}
function requestBody(request: HttpServerRequest.HttpServerRequest) {
if (request.method === "GET" || request.method === "HEAD") return HttpBody.empty
const len = request.headers["content-length"]
return HttpBody.stream(request.stream, request.headers["content-type"], len === undefined ? undefined : Number(len))
}
function proxyResponseHeaders(headers: Record<string, string>) {
const result = new Headers(headers)
// FetchHttpClient exposes decoded response bodies, so forwarding upstream
// transfer metadata makes browsers decode already-decoded assets again.
result.delete("content-encoding")
result.delete("content-length")
return result
}
function upstreamURL(path: string) {
return new URL(path, UI_UPSTREAM).toString()
}
function embeddedUI() {
if (Flag.OPENCODE_DISABLE_EMBEDDED_WEB_UI) return Promise.resolve(null)
return embeddedUIPromise
}
import { DEFAULT_CSP, UI_UPSTREAM, csp, embeddedUI, themePreloadHash, upstreamURL } from "../shared/ui"
export async function serveUI(request: Request) {
const embeddedWebUI = await embeddedUI()
@@ -58,7 +15,7 @@ export async function serveUI(request: Request) {
if (!match) return Response.json({ error: "Not Found" }, { status: 404 })
if (await fs.exists(match)) {
const mime = getMimeType(match) ?? "text/plain"
const mime = AppFileSystem.mimeType(match)
const headers = new Headers({ "content-type": mime })
if (mime.startsWith("text/html")) headers.set("content-security-policy", DEFAULT_CSP)
return new Response(new Uint8Array(await fs.readFile(match)), { headers })
@@ -79,49 +36,4 @@ export async function serveUI(request: Request) {
return response
}
export function serveUIEffect(
request: HttpServerRequest.HttpServerRequest,
services: { fs: AppFileSystem.Interface; client: HttpClient.HttpClient },
) {
return Effect.gen(function* () {
const embeddedWebUI = yield* Effect.promise(() => embeddedUI())
const path = new URL(request.url, "http://localhost").pathname
if (embeddedWebUI) {
const match = embeddedWebUI[path.replace(/^\//, "")] ?? embeddedWebUI["index.html"] ?? null
if (!match) return HttpServerResponse.jsonUnsafe({ error: "Not Found" }, { status: 404 })
if (yield* services.fs.existsSafe(match)) {
const mime = getMimeType(match) ?? "text/plain"
const headers = new Headers({ "content-type": mime })
if (mime.startsWith("text/html")) headers.set("content-security-policy", DEFAULT_CSP)
return HttpServerResponse.raw(yield* services.fs.readFile(match), { headers })
}
return HttpServerResponse.jsonUnsafe({ error: "Not Found" }, { status: 404 })
}
const response = yield* services.client.execute(
HttpClientRequest.make(request.method)(upstreamURL(path), {
headers: ProxyUtil.headers(request.headers, { host: UI_UPSTREAM.host }),
body: requestBody(request),
}),
)
const headers = proxyResponseHeaders(response.headers)
if (response.headers["content-type"]?.includes("text/html")) {
const body = yield* response.text
const match = themePreloadHash(body)
headers.set("Content-Security-Policy", csp(match ? createHash("sha256").update(match[2]).digest("base64") : ""))
return HttpServerResponse.text(body, { status: response.status, headers })
}
headers.set("Content-Security-Policy", csp())
return HttpServerResponse.stream(response.stream.pipe(Stream.catchCause(() => Stream.empty)), {
status: response.status,
headers,
})
})
}
export const UIRoutes = (): Hono => new Hono().all("/*", (c) => serveUI(c.req.raw))

View File

@@ -5,6 +5,7 @@ import { lazy } from "@/util/lazy"
import * as Log from "@opencode-ai/core/util/log"
import { Flag } from "@opencode-ai/core/flag/flag"
import { WorkspaceID } from "@/control-plane/schema"
import { OpenApi } from "effect/unstable/httpapi"
import { MDNS } from "./mdns"
import { AuthMiddleware, CompressionMiddleware, CorsMiddleware, ErrorMiddleware, LoggerMiddleware } from "./middleware"
import { FenceMiddleware } from "./fence"
@@ -17,6 +18,7 @@ import { WorkspaceRouterMiddleware } from "./workspace"
import { InstanceMiddleware } from "./routes/instance/middleware"
import { WorkspaceRoutes } from "./routes/control/workspace"
import { ExperimentalHttpApiServer } from "./routes/instance/httpapi/server"
import { PublicApi } from "./routes/instance/httpapi/public"
import * as ServerBackend from "./backend"
import type { CorsOptions } from "./cors"
@@ -135,7 +137,30 @@ function createHono(opts: CorsOptions, selection: ServerBackend.Selection = Serv
}
}
/**
* Generate the OpenAPI document used by the SDK build.
*
* Since the Effect HttpApi backend now covers every Hono route (plus the new
* `/api/session/*` v2 routes — see `httpapi-bridge.test.ts` for the parity
* audit), `Server.openapi()` derives the spec from `OpenApi.fromApi(PublicApi)`.
* `PublicApi` is `OpenCodeHttpApi` annotated with the `matchLegacyOpenApi`
* transform that injects instance query parameters, strips Effect's optional
* null arms, normalizes component names, and patches SSE response schemas so
* the generated SDK keeps the legacy Hono shape.
*
* The Hono-derived spec is still reachable via `openapiHono()` so reviewers
* can diff the two outputs while the Hono backend lingers; once the Hono
* backend is deleted that helper goes with it.
*/
export async function openapi() {
return OpenApi.fromApi(PublicApi)
}
/**
* Hono-derived OpenAPI spec, retained for parity diffing only. Delete once
* the Hono backend is removed.
*/
export async function openapiHono() {
// Build a fresh app with all routes registered directly so
// hono-openapi can see describeRoute metadata (`.route()` wraps
// handlers when the sub-app has a custom errorHandler, which

View File

@@ -0,0 +1,74 @@
import { Database } from "@/storage/db"
import { inArray } from "drizzle-orm"
import { EventSequenceTable } from "@/sync/event.sql"
import { Workspace } from "@/control-plane/workspace"
import type { WorkspaceID } from "@/control-plane/schema"
import * as Log from "@opencode-ai/core/util/log"
import { AppRuntime } from "@/effect/app-runtime"
import { Effect } from "effect"
export const HEADER = "x-opencode-sync"
export type State = Record<string, number>
const log = Log.create({ service: "fence" })
export function load(ids?: string[]) {
const rows = Database.use((db) => {
if (!ids?.length) {
return db.select().from(EventSequenceTable).all()
}
return db.select().from(EventSequenceTable).where(inArray(EventSequenceTable.aggregate_id, ids)).all()
})
return Object.fromEntries(rows.map((row) => [row.aggregate_id, row.seq])) as State
}
export function diff(prev: State, next: State) {
const ids = new Set([...Object.keys(prev), ...Object.keys(next)])
return Object.fromEntries(
[...ids]
.map((id) => [id, next[id] ?? -1] as const)
.filter(([id, seq]) => {
return (prev[id] ?? -1) !== seq
}),
) as State
}
export function parse(headers: Headers) {
const raw = headers.get(HEADER)
if (!raw) return
let data
try {
data = JSON.parse(raw)
} catch {
return
}
if (!data || typeof data !== "object") return
return Object.fromEntries(
Object.entries(data).filter(([id, seq]) => {
return typeof id === "string" && Number.isInteger(seq)
}),
) as State
}
export function waitEffect(workspaceID: WorkspaceID, state: State, signal?: AbortSignal) {
return Effect.gen(function* () {
log.info("waiting for state", {
workspaceID,
state,
})
yield* Workspace.Service.use((workspace) => workspace.waitForSync(workspaceID, state, signal))
log.info("state fully synced", {
workspaceID,
state,
})
})
}
export async function wait(workspaceID: WorkspaceID, state: State, signal?: AbortSignal) {
await AppRuntime.runPromise(waitEffect(workspaceID, state, signal))
}

View File

@@ -0,0 +1,28 @@
import z from "zod"
import { AsyncQueue } from "@/util/queue"
export const TuiRequest = z.object({
path: z.string(),
body: z.any(),
})
export type TuiRequest = z.infer<typeof TuiRequest>
const request = new AsyncQueue<TuiRequest>()
const response = new AsyncQueue<unknown>()
export function nextTuiRequest() {
return request.next()
}
export function submitTuiRequest(body: TuiRequest) {
request.push(body)
}
export function submitTuiResponse(body: unknown) {
response.push(body)
}
export function nextTuiResponse() {
return response.next()
}

View File

@@ -0,0 +1,91 @@
import { Flag } from "@opencode-ai/core/flag/flag"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import { Effect, Stream } from "effect"
import { HttpBody, HttpClient, HttpClientRequest, HttpServerRequest, HttpServerResponse } from "effect/unstable/http"
import { createHash } from "node:crypto"
import { ProxyUtil } from "../proxy-util"
const embeddedUIPromise = Flag.OPENCODE_DISABLE_EMBEDDED_WEB_UI
? Promise.resolve(null)
: // @ts-expect-error - generated file at build time
import("opencode-web-ui.gen.ts").then((module) => module.default as Record<string, string>).catch(() => null)
export const DEFAULT_CSP =
"default-src 'self'; script-src 'self' 'wasm-unsafe-eval'; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:; media-src 'self' data:; connect-src 'self' data:"
export const UI_UPSTREAM = new URL("https://app.opencode.ai")
export const csp = (hash = "") =>
`default-src 'self'; script-src 'self' 'wasm-unsafe-eval'${hash ? ` 'sha256-${hash}'` : ""}; style-src 'self' 'unsafe-inline'; img-src 'self' data: https:; font-src 'self' data:; media-src 'self' data:; connect-src 'self' data:`
export function themePreloadHash(body: string) {
return body.match(/<script\b(?![^>]*\bsrc\s*=)[^>]*\bid=(['"])oc-theme-preload-script\1[^>]*>([\s\S]*?)<\/script>/i)
}
function requestBody(request: HttpServerRequest.HttpServerRequest) {
if (request.method === "GET" || request.method === "HEAD") return HttpBody.empty
const len = request.headers["content-length"]
return HttpBody.stream(request.stream, request.headers["content-type"], len === undefined ? undefined : Number(len))
}
function proxyResponseHeaders(headers: Record<string, string>) {
const result = new Headers(headers)
// FetchHttpClient exposes decoded response bodies, so forwarding upstream
// transfer metadata makes browsers decode already-decoded assets again.
result.delete("content-encoding")
result.delete("content-length")
return result
}
export function upstreamURL(path: string) {
return new URL(path, UI_UPSTREAM).toString()
}
export function embeddedUI() {
if (Flag.OPENCODE_DISABLE_EMBEDDED_WEB_UI) return Promise.resolve(null)
return embeddedUIPromise
}
export function serveUIEffect(
request: HttpServerRequest.HttpServerRequest,
services: { fs: AppFileSystem.Interface; client: HttpClient.HttpClient },
) {
return Effect.gen(function* () {
const embeddedWebUI = yield* Effect.promise(() => embeddedUI())
const path = new URL(request.url, "http://localhost").pathname
if (embeddedWebUI) {
const match = embeddedWebUI[path.replace(/^\//, "")] ?? embeddedWebUI["index.html"] ?? null
if (!match) return HttpServerResponse.jsonUnsafe({ error: "Not Found" }, { status: 404 })
if (yield* services.fs.existsSafe(match)) {
const mime = AppFileSystem.mimeType(match)
const headers = new Headers({ "content-type": mime })
if (mime.startsWith("text/html")) headers.set("content-security-policy", DEFAULT_CSP)
return HttpServerResponse.raw(yield* services.fs.readFile(match), { headers })
}
return HttpServerResponse.jsonUnsafe({ error: "Not Found" }, { status: 404 })
}
const response = yield* services.client.execute(
HttpClientRequest.make(request.method)(upstreamURL(path), {
headers: ProxyUtil.headers(request.headers, { host: UI_UPSTREAM.host }),
body: requestBody(request),
}),
)
const headers = proxyResponseHeaders(response.headers)
if (response.headers["content-type"]?.includes("text/html")) {
const body = yield* response.text
const match = themePreloadHash(body)
headers.set("Content-Security-Policy", csp(match ? createHash("sha256").update(match[2]).digest("base64") : ""))
return HttpServerResponse.text(body, { status: response.status, headers })
}
headers.set("Content-Security-Policy", csp())
return HttpServerResponse.stream(response.stream.pipe(Stream.catchCause(() => Stream.empty)), {
status: response.status,
headers,
})
})
}

View File

@@ -0,0 +1,36 @@
import { SessionID } from "@/session/schema"
type Rule = { method?: string; path: string; exact?: boolean; action: "local" | "forward" }
const RULES: Array<Rule> = [
{ path: "/experimental/workspace", action: "local" },
{ path: "/session/status", action: "forward" },
{ method: "GET", path: "/session", action: "local" },
]
export function isLocalWorkspaceRoute(method: string, path: string) {
for (const rule of RULES) {
if (rule.method && rule.method !== method) continue
const match = rule.exact ? path === rule.path : path === rule.path || path.startsWith(rule.path + "/")
if (match) return rule.action === "local"
}
return false
}
export function getWorkspaceRouteSessionID(url: URL) {
if (url.pathname === "/session/status") return null
const id = url.pathname.match(/^\/session\/([^/]+)(?:\/|$)/)?.[1]
if (!id) return null
return SessionID.make(id)
}
export function workspaceProxyURL(target: string | URL, requestURL: URL) {
const proxyURL = new URL(target)
proxyURL.pathname = `${proxyURL.pathname.replace(/\/$/, "")}${requestURL.pathname}`
proxyURL.search = requestURL.search
proxyURL.hash = requestURL.hash
proxyURL.searchParams.delete("workspace")
return proxyURL
}

View File

@@ -8,45 +8,10 @@ import { Flag } from "@opencode-ai/core/flag/flag"
import { AppRuntime } from "@/effect/app-runtime"
import { WithInstance } from "@/project/with-instance"
import { Session } from "@/session/session"
import { SessionID } from "@/session/schema"
import { Effect } from "effect"
import * as Log from "@opencode-ai/core/util/log"
import { ServerProxy } from "./proxy"
type Rule = { method?: string; path: string; exact?: boolean; action: "local" | "forward" }
const RULES: Array<Rule> = [
{ path: "/experimental/workspace", action: "local" },
{ path: "/session/status", action: "forward" },
{ method: "GET", path: "/session", action: "local" },
]
export function isLocalWorkspaceRoute(method: string, path: string) {
for (const rule of RULES) {
if (rule.method && rule.method !== method) continue
const match = rule.exact ? path === rule.path : path === rule.path || path.startsWith(rule.path + "/")
if (match) return rule.action === "local"
}
return false
}
export function getWorkspaceRouteSessionID(url: URL) {
if (url.pathname === "/session/status") return null
const id = url.pathname.match(/^\/session\/([^/]+)(?:\/|$)/)?.[1]
if (!id) return null
return SessionID.make(id)
}
export function workspaceProxyURL(target: string | URL, requestURL: URL) {
const proxyURL = new URL(target)
proxyURL.pathname = `${proxyURL.pathname.replace(/\/$/, "")}${requestURL.pathname}`
proxyURL.search = requestURL.search
proxyURL.hash = requestURL.hash
proxyURL.searchParams.delete("workspace")
return proxyURL
}
import { getWorkspaceRouteSessionID, isLocalWorkspaceRoute, workspaceProxyURL } from "./shared/workspace-routing"
async function getSessionWorkspace(url: URL) {
const id = getWorkspaceRouteSessionID(url)

View File

@@ -7,7 +7,19 @@ export function errorFormat(error: unknown): string {
if (typeof error === "object" && error !== null) {
try {
return JSON.stringify(error, null, 2)
const json = JSON.stringify(error, null, 2)
// Plain objects whose own properties are all non-enumerable (or empty)
// serialize to "{}", which prints as a useless bare `{}` on stderr.
// Fall back to a custom toString first, then to ctor name + own prop names.
if (json === "{}") {
const str = String(error)
if (str && str !== "[object Object]") return str
const ctor = error.constructor?.name
const prefix = ctor && ctor !== "Object" ? ctor : "Error"
const names = Object.getOwnPropertyNames(error)
return names.length === 0 ? `${prefix} (no message)` : `${prefix} { ${names.join(", ")} }`
}
return json
} catch {
return "Unexpected error (unserializable)"
}
@@ -34,7 +46,7 @@ export function errorMessage(error: unknown): string {
if (text && text !== "[object Object]") return text
const formatted = errorFormat(error)
if (formatted && formatted !== "{}") return formatted
if (formatted) return formatted
return "unknown error"
}
@@ -45,7 +57,7 @@ export function errorData(error: unknown) {
message: errorMessage(error),
stack: error.stack,
cause: error.cause === undefined ? undefined : errorFormat(error.cause),
formatted: errorFormatted(error),
formatted: errorFormat(error),
}
}
@@ -53,7 +65,7 @@ export function errorData(error: unknown) {
return {
type: typeof error,
message: errorMessage(error),
formatted: errorFormatted(error),
formatted: errorFormat(error),
}
}
@@ -71,12 +83,6 @@ export function errorData(error: unknown) {
if (typeof data.message !== "string") data.message = errorMessage(error)
if (typeof data.type !== "string") data.type = error.constructor?.name
data.formatted = errorFormatted(error)
data.formatted = errorFormat(error)
return data
}
function errorFormatted(error: unknown) {
const formatted = errorFormat(error)
if (formatted !== "{}") return formatted
return String(error)
}

View File

@@ -0,0 +1,48 @@
import { afterEach, expect, test } from "bun:test"
import { Effect } from "effect"
import fs from "fs/promises"
import { Instance } from "../../src/project/instance"
import { disposeAllInstances, provideTestInstance, tmpdir } from "../fixture/fixture"
afterEach(async () => {
await disposeAllInstances()
})
// Regression for PR #25522: when an effectCmd handler does
// `yield* Effect.promise(async () => { ... await runPromise(svcMethod) ... })`,
// the inner runPromise creates a fresh fiber after `await` whose Effect context
// has lost the outer InstanceRef. Services that read `InstanceState.context`
// then fall back to `Instance.current` ALS, which must be installed at the JS
// callback boundary (Node ALS persists across awaits, Effect's fiber context
// does not). `provideTestInstance` mirrors effectCmd's load + ALS-restore wrap.
// Pins effect-cmd.ts directly: the pattern test below exercises the load +
// Instance.restore + dispose triple via the shared `provideTestInstance` fixture,
// so a regression that removed `Instance.restore` from effect-cmd.ts wouldn't
// fail it. This grep guards the actual production callsite.
test("effect-cmd.ts wraps the handler body in Instance.restore", async () => {
const source = await fs.readFile(new URL("../../src/cli/effect-cmd.ts", import.meta.url), "utf8")
expect(source).toContain("Instance.restore(ctx")
})
test("Instance.current reachable from inner runPromise inside Effect.promise(async)", async () => {
await using dir = await tmpdir({ git: true })
await provideTestInstance({
directory: dir.path,
fn: () =>
Effect.runPromise(
Effect.promise(async () => {
await new Promise((r) => setTimeout(r, 5))
const current = await Effect.runPromise(
Effect.sync(() => {
try {
return Instance.current
} catch {
return undefined
}
}),
)
expect(current?.directory).toBe(dir.path)
}),
),
})
})

View File

@@ -627,3 +627,43 @@ test("merges plugin_enabled flags across config layers", async () => {
"local.plugin": true,
})
})
test("silently skips malformed tui.json — load failures degrade to {}", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
await Bun.write(path.join(dir, "tui.json"), '{ "theme": "broken",')
await Bun.write(path.join(dir, ".opencode", "tui.json"), JSON.stringify({ theme: "fallback" }))
},
})
const config = await getTuiConfig(tmp.path)
// Project tui.json is malformed → silently skipped (logs a warning)
// .opencode/tui.json (lower precedence in this path) still loads
expect(config.theme).toBe("fallback")
})
test("silently skips non-ENOENT read failures (e.g. tui.json is a directory) — fallback layer still loads", async () => {
await using tmp = await tmpdir({
init: async (dir) => {
// tui.json exists as a DIRECTORY rather than a file → readFileString fails
// with EISDIR (PlatformError reason ≠ NotFound). The fix in this PR routes
// that through catchCause → log + skip, so a fallback layer should still load.
await fs.mkdir(path.join(dir, "tui.json"), { recursive: true })
await Bun.write(path.join(dir, ".opencode", "tui.json"), JSON.stringify({ theme: "fallback" }))
},
})
const config = await getTuiConfig(tmp.path)
// Did NOT crash; .opencode/tui.json (lower precedence) still loads.
expect(config.theme).toBe("fallback")
})
test("missing tui.json — silently treated as empty (ENOENT path)", async () => {
await using tmp = await tmpdir({})
// No tui.json anywhere. Should not throw.
const config = await getTuiConfig(tmp.path)
expect(config).toBeDefined()
// No theme set anywhere.
expect(config.theme).toBeUndefined()
})

View File

@@ -114,6 +114,53 @@ describe("Git", () => {
})
})
test("patch() returns capped native patch output", async () => {
await using tmp = await tmpdir({ git: true })
await fs.writeFile(path.join(tmp.path, weird), "before\n", "utf-8")
await fs.writeFile(path.join(tmp.path, "other.txt"), "old\n", "utf-8")
await $`git add .`.cwd(tmp.path).quiet()
await $`git commit --no-gpg-sign -m "add file"`.cwd(tmp.path).quiet()
await fs.writeFile(path.join(tmp.path, weird), "after\n", "utf-8")
await fs.writeFile(path.join(tmp.path, "other.txt"), "new\n", "utf-8")
await withGit(async (rt) => {
const [patch, all, capped] = await Promise.all([
rt.runPromise(Git.Service.use((git) => git.patch(tmp.path, "HEAD", weird, { context: 2_147_483_647 }))),
rt.runPromise(Git.Service.use((git) => git.patchAll(tmp.path, "HEAD", { context: 2_147_483_647 }))),
rt.runPromise(Git.Service.use((git) => git.patch(tmp.path, "HEAD", weird, { maxOutputBytes: 1 }))),
])
expect(patch.truncated).toBe(false)
expect(patch.text).toContain("diff --git")
expect(patch.text).toContain("-before")
expect(patch.text).toContain("+after")
expect(all.truncated).toBe(false)
expect(all.text).toContain("diff --git")
expect(all.text).toContain("other.txt")
expect(all.text).toContain("+new")
expect(capped.truncated).toBe(true)
expect(capped.text).toBe("")
})
})
test("patchUntracked() and statUntracked() handle added files", async () => {
await using tmp = await tmpdir({ git: true })
await fs.writeFile(path.join(tmp.path, weird), "one\ntwo\n", "utf-8")
await withGit(async (rt) => {
const [patch, stat] = await Promise.all([
rt.runPromise(Git.Service.use((git) => git.patchUntracked(tmp.path, weird, { context: 2_147_483_647 }))),
rt.runPromise(Git.Service.use((git) => git.statUntracked(tmp.path, weird))),
])
expect(patch.truncated).toBe(false)
expect(patch.text).toContain("diff --git")
expect(patch.text).toContain("+one")
expect(patch.text).toContain("+two")
expect(stat).toEqual(expect.objectContaining({ file: weird, additions: 2, deletions: 0 }))
})
})
test("show() returns empty text for binary blobs", async () => {
await using tmp = await tmpdir({ git: true })
await fs.writeFile(path.join(tmp.path, "bin.dat"), new Uint8Array([0, 1, 2, 3]))

View File

@@ -234,6 +234,7 @@ describe("Vcs diff", () => {
}),
]),
)
expect(diff.find((item) => item.file === "file.txt")?.patch).toContain("diff --git")
})
})
@@ -259,6 +260,34 @@ describe("Vcs diff", () => {
})
})
test("diff('git') keeps batched patches aligned for type changes", async () => {
if (process.platform === "win32") return
await using tmp = await tmpdir({ git: true })
await fs.writeFile(path.join(tmp.path, "a.txt"), "old\n", "utf-8")
await fs.writeFile(path.join(tmp.path, "b.txt"), "old\n", "utf-8")
await $`git add .`.cwd(tmp.path).quiet()
await $`git commit --no-gpg-sign -m "add files"`.cwd(tmp.path).quiet()
await fs.unlink(path.join(tmp.path, "a.txt"))
await fs.symlink("target", path.join(tmp.path, "a.txt"))
await fs.writeFile(path.join(tmp.path, "b.txt"), "new\n", "utf-8")
await withVcsOnly(tmp.path, async () => {
const diff = await AppRuntime.runPromise(
Effect.gen(function* () {
const vcs = yield* Vcs.Service
return yield* vcs.diff("git")
}),
)
const a = diff.find((item) => item.file === "a.txt")
const b = diff.find((item) => item.file === "b.txt")
expect(a?.patch).toContain("deleted file mode")
expect(a?.patch).toContain("new file mode")
expect(b?.patch).toContain("+new")
})
})
test("diff('branch') returns changes against default branch", async () => {
await using tmp = await tmpdir({ git: true })
await $`git branch -M main`.cwd(tmp.path).quiet()

View File

@@ -0,0 +1,59 @@
import { afterEach, describe, expect, test } from "bun:test"
import { Option, Redacted } from "effect"
import { Flag } from "@opencode-ai/core/flag/flag"
import { ServerAuth } from "../../src/server/auth"
const original = {
OPENCODE_SERVER_PASSWORD: Flag.OPENCODE_SERVER_PASSWORD,
OPENCODE_SERVER_USERNAME: Flag.OPENCODE_SERVER_USERNAME,
}
afterEach(() => {
Flag.OPENCODE_SERVER_PASSWORD = original.OPENCODE_SERVER_PASSWORD
Flag.OPENCODE_SERVER_USERNAME = original.OPENCODE_SERVER_USERNAME
})
describe("ServerAuth", () => {
test("does not emit auth headers without a password", () => {
Flag.OPENCODE_SERVER_PASSWORD = undefined
Flag.OPENCODE_SERVER_USERNAME = "alice"
expect(ServerAuth.header()).toBeUndefined()
expect(ServerAuth.headers()).toBeUndefined()
})
test("defaults to the opencode username", () => {
Flag.OPENCODE_SERVER_PASSWORD = "secret"
Flag.OPENCODE_SERVER_USERNAME = undefined
expect(ServerAuth.headers()).toEqual({
Authorization: `Basic ${Buffer.from("opencode:secret").toString("base64")}`,
})
})
test("uses the configured username", () => {
Flag.OPENCODE_SERVER_PASSWORD = "secret"
Flag.OPENCODE_SERVER_USERNAME = "alice"
expect(ServerAuth.headers()).toEqual({
Authorization: `Basic ${Buffer.from("alice:secret").toString("base64")}`,
})
})
test("prefers explicit credentials", () => {
Flag.OPENCODE_SERVER_PASSWORD = "secret"
Flag.OPENCODE_SERVER_USERNAME = "alice"
expect(ServerAuth.headers({ password: "cli-secret", username: "bob" })).toEqual({
Authorization: `Basic ${Buffer.from("bob:cli-secret").toString("base64")}`,
})
})
test("validates decoded credentials against effect config", () => {
const config = { password: Option.some("secret"), username: "alice" }
expect(ServerAuth.required(config)).toBe(true)
expect(ServerAuth.authorized({ username: "alice", password: Redacted.make("secret") }, config)).toBe(true)
expect(ServerAuth.authorized({ username: "opencode", password: Redacted.make("secret") }, config)).toBe(false)
})
})

View File

@@ -3,11 +3,8 @@ import { describe, expect } from "bun:test"
import { Effect, Layer, Option, Schema } from "effect"
import { HttpClient, HttpClientRequest, HttpRouter } from "effect/unstable/http"
import { HttpApi, HttpApiBuilder, HttpApiEndpoint, HttpApiGroup } from "effect/unstable/httpapi"
import {
Authorization,
ServerAuthConfig,
authorizationLayer,
} from "../../src/server/routes/instance/httpapi/middleware/authorization"
import { ServerAuth } from "../../src/server/auth"
import { Authorization, authorizationLayer } from "../../src/server/routes/instance/httpapi/middleware/authorization"
import { testEffect } from "../lib/effect"
const Api = HttpApi.make("test-authorization").add(
@@ -27,9 +24,9 @@ const apiLayer = HttpRouter.serve(
{ disableListenLog: true, disableLogger: true },
).pipe(Layer.provideMerge(NodeHttpServer.layerTest))
const noAuthLayer = ServerAuthConfig.layer({ password: Option.none(), username: "opencode" })
const secretLayer = ServerAuthConfig.layer({ password: Option.some("secret"), username: "opencode" })
const kitSecretLayer = ServerAuthConfig.layer({ password: Option.some("secret"), username: "kit" })
const noAuthLayer = ServerAuth.Config.layer({ password: Option.none(), username: "opencode" })
const secretLayer = ServerAuth.Config.layer({ password: Option.some("secret"), username: "opencode" })
const kitSecretLayer = ServerAuth.Config.layer({ password: Option.some("secret"), username: "kit" })
const it = testEffect(apiLayer.pipe(Layer.provide(noAuthLayer)))
const itSecret = testEffect(apiLayer.pipe(Layer.provide(secretLayer)))

View File

@@ -222,7 +222,7 @@ describe("HttpApi server", () => {
})
test("covers every generated OpenAPI route with Effect HttpApi contracts", async () => {
const honoRoutes = openApiRouteKeys(await Server.openapi())
const honoRoutes = openApiRouteKeys(await Server.openapiHono())
const effectRoutes = openApiRouteKeys(effectOpenApi())
expect(honoRoutes.filter((route) => !effectRoutes.includes(route))).toEqual([])
@@ -237,7 +237,7 @@ describe("HttpApi server", () => {
})
test("matches generated OpenAPI route parameters", async () => {
const hono = openApiParameters(await Server.openapi())
const hono = openApiParameters(await Server.openapiHono())
const effect = openApiParameters(effectOpenApi())
expect(
@@ -248,7 +248,7 @@ describe("HttpApi server", () => {
})
test("matches generated OpenAPI request body shape", async () => {
const hono = openApiRequestBodies(await Server.openapi())
const hono = openApiRequestBodies(await Server.openapiHono())
const effect = openApiRequestBodies(effectOpenApi())
expect(

View File

@@ -0,0 +1,109 @@
import { afterEach, describe, expect, test } from "bun:test"
import { Flag } from "@opencode-ai/core/flag/flag"
import * as Log from "@opencode-ai/core/util/log"
import { resetDatabase } from "../fixture/db"
import { disposeAllInstances, tmpdir } from "../fixture/fixture"
import { HttpApiListener } from "../../src/server/httpapi-listener"
import { PtyPaths } from "../../src/server/routes/instance/httpapi/groups/pty"
void Log.init({ print: false })
const original = Flag.OPENCODE_EXPERIMENTAL_HTTPAPI
const testPty = process.platform === "win32" ? test.skip : test
afterEach(async () => {
Flag.OPENCODE_EXPERIMENTAL_HTTPAPI = original
await disposeAllInstances()
await resetDatabase()
})
async function startListener() {
Flag.OPENCODE_EXPERIMENTAL_HTTPAPI = true
return HttpApiListener.listen({ hostname: "127.0.0.1", port: 0 })
}
describe("native HttpApi listener", () => {
test("serves HTTP routes via the HttpApi web handler", async () => {
await using tmp = await tmpdir({ git: true, config: { formatter: false, lsp: false } })
const listener = await startListener()
try {
const response = await fetch(`${listener.url.origin}${PtyPaths.shells}`, {
headers: { "x-opencode-directory": tmp.path },
})
expect(response.status).toBe(200)
const body = await response.json()
expect(Array.isArray(body)).toBe(true)
expect(body[0]).toMatchObject({
path: expect.any(String),
name: expect.any(String),
acceptable: expect.any(Boolean),
})
} finally {
await listener.stop(true)
}
})
testPty("PTY websocket connect echoes input back to the client", async () => {
await using tmp = await tmpdir({ git: true, config: { formatter: false, lsp: false } })
const listener = await startListener()
try {
const created = await fetch(`${listener.url.origin}${PtyPaths.create}`, {
method: "POST",
headers: {
"x-opencode-directory": tmp.path,
"content-type": "application/json",
},
body: JSON.stringify({ command: "/bin/cat", title: "listener-smoke" }),
})
expect(created.status).toBe(200)
const info = (await created.json()) as { id: string }
try {
const wsURL = new URL(PtyPaths.connect.replace(":ptyID", info.id), listener.url)
wsURL.protocol = "ws:"
wsURL.searchParams.set("directory", tmp.path)
wsURL.searchParams.set("cursor", "-1")
const messages: string[] = []
const ws = new WebSocket(wsURL)
ws.binaryType = "arraybuffer"
const opened = new Promise<void>((resolve, reject) => {
ws.addEventListener("open", () => resolve(), { once: true })
ws.addEventListener("error", () => reject(new Error("ws error before open")), { once: true })
})
const closed = new Promise<void>((resolve) => {
ws.addEventListener("close", () => resolve(), { once: true })
})
ws.addEventListener("message", (event) => {
const data = event.data
messages.push(typeof data === "string" ? data : new TextDecoder().decode(data as ArrayBuffer))
})
await opened
ws.send("ping-listener\n")
const start = Date.now()
while (!messages.some((m) => m.includes("ping-listener")) && Date.now() - start < 5_000) {
await new Promise((r) => setTimeout(r, 50))
}
ws.close(1000, "done")
expect(messages.some((m) => m.includes("ping-listener"))).toBe(true)
// Verify close event fires (handler.onClose path runs and the
// Bun.serve websocket lifecycle reaches close).
await closed
expect(ws.readyState).toBe(WebSocket.CLOSED)
} finally {
await fetch(`${listener.url.origin}${PtyPaths.remove.replace(":ptyID", info.id)}`, {
method: "DELETE",
headers: { "x-opencode-directory": tmp.path },
}).catch(() => undefined)
}
} finally {
await listener.stop(true)
}
})
})

View File

@@ -46,7 +46,7 @@ afterEach(async () => {
describe("tui HttpApi bridge", () => {
test("documents legacy bad request responses", async () => {
const legacy = await Server.openapi()
const legacy = await Server.openapiHono()
const effect = OpenApi.fromApi(TuiApi)
for (const path of [TuiPaths.appendPrompt, TuiPaths.executeCommand, TuiPaths.publish, TuiPaths.selectSession]) {
expect(legacy.paths[path].post?.responses?.[400]).toBeDefined()

View File

@@ -12,12 +12,10 @@ import {
HttpServerResponse,
} from "effect/unstable/http"
import { AppFileSystem } from "@opencode-ai/core/filesystem"
import {
ServerAuthConfig,
authorizationRouterMiddleware,
} from "../../src/server/routes/instance/httpapi/middleware/authorization"
import { ServerAuth } from "../../src/server/auth"
import { authorizationRouterMiddleware } from "../../src/server/routes/instance/httpapi/middleware/authorization"
import { ExperimentalHttpApiServer } from "../../src/server/routes/instance/httpapi/server"
import { serveUIEffect } from "../../src/server/routes/ui"
import { serveUIEffect } from "../../src/server/shared/ui"
import { Server } from "../../src/server/server"
void Log.init({ print: false })
@@ -81,7 +79,7 @@ function uiApp(input?: { password?: string; username?: string; client?: Layer.La
yield* router.add("*", "/*", (request) => serveUIEffect(request, { fs, client }))
}),
).pipe(
Layer.provide(authorizationRouterMiddleware.layer.pipe(Layer.provide(ServerAuthConfig.defaultLayer))),
Layer.provide(authorizationRouterMiddleware.layer.pipe(Layer.provide(ServerAuth.Config.defaultLayer))),
Layer.provide([
AppFileSystem.defaultLayer,
input?.client ?? httpClient(new Response("ui")),
@@ -201,6 +199,7 @@ describe("HttpApi UI fallback", () => {
const response = await uiApp({ password: "secret", username: "opencode" }).request("/")
expect(response.status).toBe(401)
expect(response.headers.get("www-authenticate")).toBe('Basic realm="Secure Area"')
})
test("accepts auth token for the web UI", async () => {

View File

@@ -1,5 +1,9 @@
import { describe, expect, test } from "bun:test"
import { isLocalWorkspaceRoute, getWorkspaceRouteSessionID, workspaceProxyURL } from "../../src/server/workspace"
import {
isLocalWorkspaceRoute,
getWorkspaceRouteSessionID,
workspaceProxyURL,
} from "../../src/server/shared/workspace-routing"
import { SessionID } from "../../src/session/schema"
describe("isLocalWorkspaceRoute", () => {

View File

@@ -22,6 +22,19 @@ describe("util.error", () => {
expect(data.code).toBe("E_BAD")
})
test("never returns bare {} for opaque object errors", () => {
// Plain empty object — what the SDK threw before we wrapped it.
expect(errorFormat({})).not.toBe("{}")
expect(errorFormat({})).toContain("no message")
// Object with only non-enumerable own properties (JSON.stringify drops them).
class OpaqueError {}
const opaque = new OpaqueError()
Object.defineProperty(opaque, "secret", { value: "hidden", enumerable: false })
expect(errorFormat(opaque)).not.toBe("{}")
expect(errorFormat(opaque)).toContain("OpaqueError")
})
test("handles opaque throwables with custom toString", () => {
const err = {
toString() {

View File

@@ -12,10 +12,12 @@ import { createClient } from "@hey-api/openapi-ts"
const openapiSource = process.env.OPENCODE_SDK_OPENAPI === "hono" ? "hono" : "httpapi"
const opencode = path.resolve(dir, "../../opencode")
// `bun dev generate` now derives the spec from the Effect HttpApi contract by
// default; pass `--hono` to fall back to the legacy Hono spec for parity diffs.
if (openapiSource === "httpapi") {
await $`bun dev generate --httpapi > ${dir}/openapi.json`.cwd(opencode)
} else {
await $`bun dev generate > ${dir}/openapi.json`.cwd(opencode)
} else {
await $`bun dev generate --hono > ${dir}/openapi.json`.cwd(opencode)
}
await createClient({

View File

@@ -84,5 +84,24 @@ export function createOpencodeClient(config?: Config & { directory?: string; exp
return response
})
// The generated client falls back to throwing a literal `{}` when the server
// responds with an empty / unparseable error body, which surfaces as a bare
// `{}` in TUI / CLI error output. Wrap ONLY that case in a real Error so
// downstream formatters get a useful message — but pass through any parsed
// JSON error body unchanged so existing consumers can still inspect fields.
client.interceptors.error.use((error, response, request) => {
const isEmpty =
error === undefined ||
error === null ||
error === "" ||
(typeof error === "object" && !(error instanceof Error) && Object.keys(error).length === 0)
if (!isEmpty) return error
const method = request?.method ?? "?"
const url = request?.url ?? "?"
if (!response) return new Error(`opencode server ${method} ${url}: network error (no response)`)
const status = response.status
const statusText = response.statusText ? " " + response.statusText : ""
return new Error(`opencode server ${method} ${url}${status}${statusText}: (empty response body)`)
})
return new OpencodeClient({ client })
}

File diff suppressed because it is too large Load Diff

View File

@@ -82,6 +82,11 @@ function section(areas: Set<string>) {
return "Core"
}
function type(message: string) {
if (message.match(/fix/i)) return "Bugfixes"
return "Improvements"
}
function reverted(commits: Commit[]) {
const seen = new Map<string, Commit>()
@@ -193,13 +198,20 @@ async function thanks(from: string, to: string, reuse: boolean) {
}
function format(from: string, to: string, list: Commit[], thanks: string[]) {
const grouped = new Map<string, string[]>()
for (const title of order) grouped.set(title, [])
const grouped = new Map<string, Map<string, string[]>>()
for (const title of order) {
grouped.set(
title,
new Map([
["Improvements", []],
["Bugfixes", []],
]),
)
}
for (const commit of list) {
const title = section(commit.areas)
const attr = commit.author && !team.includes(commit.author) ? ` (@${commit.author})` : ""
grouped.get(title)!.push(`- \`${commit.hash}\` ${commit.message}${attr}`)
grouped.get(section(commit.areas))!.get(type(commit.message))!.push(`- \`${commit.hash}\` ${commit.message}${attr}`)
}
const lines = [`Last release: ${ref(from)}`, `Target ref: ${to}`, ""]
@@ -209,11 +221,23 @@ function format(from: string, to: string, list: Commit[], thanks: string[]) {
}
for (const title of order) {
const entries = grouped.get(title)
if (!entries || entries.length === 0) continue
const groups = grouped.get(title)
if (!groups || [...groups.values()].every((entries) => entries.length === 0)) continue
lines.push(`## ${title}`)
lines.push(...entries)
lines.push("")
const improvements = groups.get("Improvements")!
const bugfixes = groups.get("Bugfixes")!
if (bugfixes.length === 0) {
lines.push(...improvements)
lines.push("")
continue
}
for (const [subtitle, entries] of groups) {
if (entries.length === 0) continue
lines.push(`### ${subtitle}`)
lines.push(...entries)
lines.push("")
}
}
if (thanks.length > 0) {