mirror of
https://fastgit.cc/https://github.com/anomalyco/opencode
synced 2026-05-05 16:20:21 +08:00
Compare commits
85 Commits
kit/httpap
...
kit/delete
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cf729039b4 | ||
|
|
61ae4b3df3 | ||
|
|
c7452c869b | ||
|
|
1013ee6c35 | ||
|
|
158d089d6c | ||
|
|
1d63f4b22f | ||
|
|
b631cb4634 | ||
|
|
4e83fe886c | ||
|
|
2d0a757eb2 | ||
|
|
75d141b574 | ||
|
|
39c88f9afb | ||
|
|
0df2bb0f3b | ||
|
|
f6a3615f59 | ||
|
|
edd480f56b | ||
|
|
2740d398fa | ||
|
|
f33b17e8ac | ||
|
|
22a4a9df8b | ||
|
|
84afd2bef8 | ||
|
|
ca2411d332 | ||
|
|
6b852774e1 | ||
|
|
f14784d531 | ||
|
|
6a5e329427 | ||
|
|
4b65b1e053 | ||
|
|
d431a0e4b4 | ||
|
|
5720883d5d | ||
|
|
007b57f078 | ||
|
|
fb07c2070c | ||
|
|
25dc6f09bc | ||
|
|
b70e2700ef | ||
|
|
1aed6b1d8b | ||
|
|
c1f607d206 | ||
|
|
2c819f290f | ||
|
|
6e9f10ad3f | ||
|
|
1251a870cb | ||
|
|
67047fa766 | ||
|
|
a366128a93 | ||
|
|
9f708e748a | ||
|
|
7bc26dafae | ||
|
|
ce89bcb8e2 | ||
|
|
c2b1974ddd | ||
|
|
ca6150d6f0 | ||
|
|
825ab2e38d | ||
|
|
755cd561ec | ||
|
|
6312c55d55 | ||
|
|
a9dc0fae3d | ||
|
|
7749d8e85f | ||
|
|
28112fbd12 | ||
|
|
387220f368 | ||
|
|
adb7cb1037 | ||
|
|
c06af70ab0 | ||
|
|
40dc2fa3c1 | ||
|
|
df7dd06a0f | ||
|
|
57d5c095d8 | ||
|
|
13ac849db5 | ||
|
|
8694c5b68f | ||
|
|
0a7d02c87c | ||
|
|
e77867ef05 | ||
|
|
fb224d8974 | ||
|
|
101566131d | ||
|
|
8433e8b433 | ||
|
|
379600b5ab | ||
|
|
7a503de606 | ||
|
|
2ad1eb56d3 | ||
|
|
a43f767abb | ||
|
|
0ee3b87289 | ||
|
|
3c9f3c5786 | ||
|
|
ca75ac6681 | ||
|
|
d1f597b5b5 | ||
|
|
8299fb3e2b | ||
|
|
4f7f90133d | ||
|
|
b205e104f6 | ||
|
|
252e2f98e6 | ||
|
|
e2afdc1202 | ||
|
|
a08e4c9651 | ||
|
|
7ccab8d272 | ||
|
|
fc57eb3b8e | ||
|
|
9179bafd54 | ||
|
|
2df8eda8a3 | ||
|
|
bd32252a7e | ||
|
|
1717d636a2 | ||
|
|
8e016b4703 | ||
|
|
b89d48a2a4 | ||
|
|
33312bfd1b | ||
|
|
3f1ce36418 | ||
|
|
0e13279545 |
1
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
1
.github/ISSUE_TEMPLATE/bug-report.yml
vendored
@@ -1,6 +1,5 @@
|
||||
name: Bug report
|
||||
description: Report an issue that should be fixed
|
||||
labels: ["bug"]
|
||||
body:
|
||||
- type: textarea
|
||||
id: description
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
1
.github/ISSUE_TEMPLATE/feature-request.yml
vendored
@@ -1,6 +1,5 @@
|
||||
name: 🚀 Feature Request
|
||||
description: Suggest an idea, feature, or enhancement
|
||||
labels: [discussion]
|
||||
title: "[FEATURE]:"
|
||||
|
||||
body:
|
||||
|
||||
1
.github/ISSUE_TEMPLATE/question.yml
vendored
1
.github/ISSUE_TEMPLATE/question.yml
vendored
@@ -1,6 +1,5 @@
|
||||
name: Question
|
||||
description: Ask a question
|
||||
labels: ["question"]
|
||||
body:
|
||||
- type: textarea
|
||||
id: question
|
||||
|
||||
1
.github/TEAM_MEMBERS
vendored
1
.github/TEAM_MEMBERS
vendored
@@ -11,6 +11,5 @@ MrMushrooooom
|
||||
nexxeln
|
||||
R44VC0RP
|
||||
rekram1-node
|
||||
RhysSullivan
|
||||
thdxr
|
||||
simonklee
|
||||
|
||||
41
.github/VOUCHED.td
vendored
41
.github/VOUCHED.td
vendored
@@ -1,41 +0,0 @@
|
||||
# Vouched contributors for this project.
|
||||
#
|
||||
# See https://github.com/mitchellh/vouch for details.
|
||||
#
|
||||
# Syntax:
|
||||
# - One handle per line (without @), sorted alphabetically.
|
||||
# - Optional platform prefix: platform:username (e.g., github:user).
|
||||
# - Denounce with minus prefix: -username or -platform:username.
|
||||
# - Optional details after a space following the handle.
|
||||
adamdotdevin
|
||||
-agusbasari29 AI PR slop
|
||||
ariane-emory
|
||||
-atharvau AI review spamming literally every PR
|
||||
-borealbytes
|
||||
-carycooper777
|
||||
-danieljoshuanazareth
|
||||
-danieljoshuanazareth
|
||||
-davidbernat looks to be a clawdbot that spams team and sends super weird emails, doesnt appear to be a real person
|
||||
dmtrkovalenko
|
||||
edemaine
|
||||
fahreddinozcan
|
||||
-florianleibert
|
||||
fwang
|
||||
iamdavidhill
|
||||
jayair
|
||||
kitlangton
|
||||
kommander
|
||||
-opencode2026
|
||||
-opencodeengineer bot that spams issues
|
||||
r44vc0rp
|
||||
rekram1-node
|
||||
-ricardo-m-l
|
||||
-robinmordasiewicz
|
||||
rubdos
|
||||
-saisharan0103 spamming ai prs
|
||||
shantur
|
||||
simonklee
|
||||
-spider-yamet clawdbot/llm psychosis, spam pinging the team
|
||||
-terisuke
|
||||
thdxr
|
||||
-toastythebot
|
||||
170
.github/workflows/daily-issues-recap.yml
vendored
170
.github/workflows/daily-issues-recap.yml
vendored
@@ -1,170 +0,0 @@
|
||||
name: daily-issues-recap
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run at 6 PM EST (23:00 UTC, or 22:00 UTC during daylight saving)
|
||||
- cron: "0 23 * * *"
|
||||
workflow_dispatch: # Allow manual trigger for testing
|
||||
|
||||
jobs:
|
||||
daily-recap:
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
permissions:
|
||||
contents: read
|
||||
issues: read
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- uses: ./.github/actions/setup-bun
|
||||
|
||||
- name: Install opencode
|
||||
run: curl -fsSL https://opencode.ai/install | bash
|
||||
|
||||
- name: Generate daily issues recap
|
||||
id: recap
|
||||
env:
|
||||
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
OPENCODE_PERMISSION: |
|
||||
{
|
||||
"bash": {
|
||||
"*": "deny",
|
||||
"gh issue*": "allow",
|
||||
"gh search*": "allow"
|
||||
},
|
||||
"webfetch": "deny",
|
||||
"edit": "deny",
|
||||
"write": "deny"
|
||||
}
|
||||
run: |
|
||||
# Get today's date range
|
||||
TODAY=$(date -u +%Y-%m-%d)
|
||||
|
||||
opencode run -m opencode/claude-sonnet-4-5 "Generate a daily issues recap for the OpenCode repository.
|
||||
|
||||
TODAY'S DATE: ${TODAY}
|
||||
|
||||
STEP 1: Gather today's issues
|
||||
Search for all OPEN issues created today (${TODAY}) using:
|
||||
gh issue list --repo ${{ github.repository }} --state open --search \"created:${TODAY}\" --json number,title,body,labels,state,comments,createdAt,author --limit 500
|
||||
|
||||
IMPORTANT: EXCLUDE all issues authored by Anomaly team members. Filter out issues where the author login matches ANY of these:
|
||||
adamdotdevin, Brendonovich, fwang, Hona, iamdavidhill, jayair, kitlangton, kommander, MrMushrooooom, R44VC0RP, rekram1-node, thdxr
|
||||
This recap is specifically for COMMUNITY (external) issues only.
|
||||
|
||||
STEP 2: Analyze and categorize
|
||||
For each issue created today, categorize it:
|
||||
|
||||
**Severity Assessment:**
|
||||
- CRITICAL: Crashes, data loss, security issues, blocks major functionality
|
||||
- HIGH: Significant bugs affecting many users, important features broken
|
||||
- MEDIUM: Bugs with workarounds, minor features broken
|
||||
- LOW: Minor issues, cosmetic, nice-to-haves
|
||||
|
||||
**Activity Assessment:**
|
||||
- Note issues with high comment counts or engagement
|
||||
- Note issues from repeat reporters (check if author has filed before)
|
||||
|
||||
STEP 3: Cross-reference with existing issues
|
||||
For issues that seem like feature requests or recurring bugs:
|
||||
- Search for similar older issues to identify patterns
|
||||
- Note if this is a frequently requested feature
|
||||
- Identify any issues that are duplicates of long-standing requests
|
||||
|
||||
STEP 4: Generate the recap
|
||||
Create a structured recap with these sections:
|
||||
|
||||
===DISCORD_START===
|
||||
**Daily Issues Recap - ${TODAY}**
|
||||
|
||||
**Summary Stats**
|
||||
- Total issues opened today: [count]
|
||||
- By category: [bugs/features/questions]
|
||||
|
||||
**Critical/High Priority Issues**
|
||||
[List any CRITICAL or HIGH severity issues with brief descriptions and issue numbers]
|
||||
|
||||
**Most Active/Discussed**
|
||||
[Issues with significant engagement or from active community members]
|
||||
|
||||
**Trending Topics**
|
||||
[Patterns noticed - e.g., 'Multiple reports about X', 'Continued interest in Y feature']
|
||||
|
||||
**Duplicates & Related**
|
||||
[Issues that relate to existing open issues]
|
||||
===DISCORD_END===
|
||||
|
||||
STEP 5: Format for Discord
|
||||
Format the recap as a Discord-compatible message:
|
||||
- Use Discord markdown (**, __, etc.)
|
||||
- BE EXTREMELY CONCISE - this is an EOD summary, not a detailed report
|
||||
- Use hyperlinked issue numbers with suppressed embeds: [#1234](<https://github.com/${{ github.repository }}/issues/1234>)
|
||||
- Group related issues on single lines where possible
|
||||
- Add emoji sparingly for critical items only
|
||||
- HARD LIMIT: Keep under 1800 characters total
|
||||
- Skip sections that have nothing notable (e.g., if no critical issues, omit that section)
|
||||
- Prioritize signal over completeness - only surface what matters
|
||||
|
||||
OUTPUT: Output ONLY the content between ===DISCORD_START=== and ===DISCORD_END=== markers. Include the markers so I can extract it." > /tmp/recap_raw.txt
|
||||
|
||||
# Extract only the Discord message between markers
|
||||
sed -n '/===DISCORD_START===/,/===DISCORD_END===/p' /tmp/recap_raw.txt | grep -v '===DISCORD' > /tmp/recap.txt
|
||||
|
||||
echo "recap_file=/tmp/recap.txt" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Post to Discord
|
||||
env:
|
||||
DISCORD_WEBHOOK_URL: ${{ secrets.DISCORD_ISSUES_WEBHOOK_URL }}
|
||||
run: |
|
||||
if [ -z "$DISCORD_WEBHOOK_URL" ]; then
|
||||
echo "Warning: DISCORD_ISSUES_WEBHOOK_URL secret not set, skipping Discord post"
|
||||
cat /tmp/recap.txt
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Read the recap
|
||||
RECAP_RAW=$(cat /tmp/recap.txt)
|
||||
RECAP_LENGTH=${#RECAP_RAW}
|
||||
|
||||
echo "Recap length: ${RECAP_LENGTH} chars"
|
||||
|
||||
# Function to post a message to Discord
|
||||
post_to_discord() {
|
||||
local msg="$1"
|
||||
local content=$(echo "$msg" | jq -Rs '.')
|
||||
curl -s -H "Content-Type: application/json" \
|
||||
-X POST \
|
||||
-d "{\"content\": ${content}}" \
|
||||
"$DISCORD_WEBHOOK_URL"
|
||||
sleep 1
|
||||
}
|
||||
|
||||
# If under limit, send as single message
|
||||
if [ "$RECAP_LENGTH" -le 1950 ]; then
|
||||
post_to_discord "$RECAP_RAW"
|
||||
else
|
||||
echo "Splitting into multiple messages..."
|
||||
remaining="$RECAP_RAW"
|
||||
while [ ${#remaining} -gt 0 ]; do
|
||||
if [ ${#remaining} -le 1950 ]; then
|
||||
post_to_discord "$remaining"
|
||||
break
|
||||
else
|
||||
chunk="${remaining:0:1900}"
|
||||
last_newline=$(echo "$chunk" | grep -bo $'\n' | tail -1 | cut -d: -f1)
|
||||
if [ -n "$last_newline" ] && [ "$last_newline" -gt 500 ]; then
|
||||
chunk="${remaining:0:$last_newline}"
|
||||
remaining="${remaining:$((last_newline+1))}"
|
||||
else
|
||||
chunk="${remaining:0:1900}"
|
||||
remaining="${remaining:1900}"
|
||||
fi
|
||||
post_to_discord "$chunk"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
echo "Posted daily recap to Discord"
|
||||
173
.github/workflows/daily-pr-recap.yml
vendored
173
.github/workflows/daily-pr-recap.yml
vendored
@@ -1,173 +0,0 @@
|
||||
name: daily-pr-recap
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Run at 5pm EST (22:00 UTC, or 21:00 UTC during daylight saving)
|
||||
- cron: "0 22 * * *"
|
||||
workflow_dispatch: # Allow manual trigger for testing
|
||||
|
||||
jobs:
|
||||
pr-recap:
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
permissions:
|
||||
contents: read
|
||||
pull-requests: read
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
fetch-depth: 1
|
||||
|
||||
- uses: ./.github/actions/setup-bun
|
||||
|
||||
- name: Install opencode
|
||||
run: curl -fsSL https://opencode.ai/install | bash
|
||||
|
||||
- name: Generate daily PR recap
|
||||
id: recap
|
||||
env:
|
||||
OPENCODE_API_KEY: ${{ secrets.OPENCODE_API_KEY }}
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
OPENCODE_PERMISSION: |
|
||||
{
|
||||
"bash": {
|
||||
"*": "deny",
|
||||
"gh pr*": "allow",
|
||||
"gh search*": "allow"
|
||||
},
|
||||
"webfetch": "deny",
|
||||
"edit": "deny",
|
||||
"write": "deny"
|
||||
}
|
||||
run: |
|
||||
TODAY=$(date -u +%Y-%m-%d)
|
||||
|
||||
opencode run -m opencode/claude-sonnet-4-5 "Generate a daily PR activity recap for the OpenCode repository.
|
||||
|
||||
TODAY'S DATE: ${TODAY}
|
||||
|
||||
STEP 1: Gather PR data
|
||||
Run these commands to gather PR information. ONLY include OPEN PRs created or updated TODAY (${TODAY}):
|
||||
|
||||
# Open PRs created today
|
||||
gh pr list --repo ${{ github.repository }} --state open --search \"created:${TODAY}\" --json number,title,author,labels,createdAt,updatedAt,reviewDecision,isDraft,additions,deletions --limit 100
|
||||
|
||||
# Open PRs with activity today (updated today)
|
||||
gh pr list --repo ${{ github.repository }} --state open --search \"updated:${TODAY}\" --json number,title,author,labels,createdAt,updatedAt,reviewDecision,isDraft,additions,deletions --limit 100
|
||||
|
||||
IMPORTANT: EXCLUDE all PRs authored by Anomaly team members. Filter out PRs where the author login matches ANY of these:
|
||||
adamdotdevin, Brendonovich, fwang, Hona, iamdavidhill, jayair, kitlangton, kommander, MrMushrooooom, R44VC0RP, rekram1-node, thdxr
|
||||
This recap is specifically for COMMUNITY (external) contributions only.
|
||||
|
||||
|
||||
|
||||
STEP 2: For high-activity PRs, check comment counts
|
||||
For promising PRs, run:
|
||||
gh pr view [NUMBER] --repo ${{ github.repository }} --json comments --jq '[.comments[] | select(.author.login != \"copilot-pull-request-reviewer\" and .author.login != \"github-actions\")] | length'
|
||||
|
||||
IMPORTANT: When counting comments/activity, EXCLUDE these bot accounts:
|
||||
- copilot-pull-request-reviewer
|
||||
- github-actions
|
||||
|
||||
STEP 3: Identify what matters (ONLY from today's PRs)
|
||||
|
||||
**Bug Fixes From Today:**
|
||||
- PRs with 'fix' or 'bug' in title created/updated today
|
||||
- Small bug fixes (< 100 lines changed) that are easy to review
|
||||
- Bug fixes from community contributors
|
||||
|
||||
**High Activity Today:**
|
||||
- PRs with significant human comments today (excluding bots listed above)
|
||||
- PRs with back-and-forth discussion today
|
||||
|
||||
**Quick Wins:**
|
||||
- Small PRs (< 50 lines) that are approved or nearly approved
|
||||
- PRs that just need a final review
|
||||
|
||||
STEP 4: Generate the recap
|
||||
Create a structured recap:
|
||||
|
||||
===DISCORD_START===
|
||||
**Daily PR Recap - ${TODAY}**
|
||||
|
||||
**New PRs Today**
|
||||
[PRs opened today - group by type: bug fixes, features, etc.]
|
||||
|
||||
**Active PRs Today**
|
||||
[PRs with activity/updates today - significant discussion]
|
||||
|
||||
**Quick Wins**
|
||||
[Small PRs ready to merge]
|
||||
===DISCORD_END===
|
||||
|
||||
STEP 5: Format for Discord
|
||||
- Use Discord markdown (**, __, etc.)
|
||||
- BE EXTREMELY CONCISE - surface what we might miss
|
||||
- Use hyperlinked PR numbers with suppressed embeds: [#1234](<https://github.com/${{ github.repository }}/pull/1234>)
|
||||
- Include PR author: [#1234](<url>) (@author)
|
||||
- For bug fixes, add brief description of what it fixes
|
||||
- Show line count for quick wins: \"(+15/-3 lines)\"
|
||||
- HARD LIMIT: Keep under 1800 characters total
|
||||
- Skip empty sections
|
||||
- Focus on PRs that need human eyes
|
||||
|
||||
OUTPUT: Output ONLY the content between ===DISCORD_START=== and ===DISCORD_END=== markers. Include the markers so I can extract it." > /tmp/pr_recap_raw.txt
|
||||
|
||||
# Extract only the Discord message between markers
|
||||
sed -n '/===DISCORD_START===/,/===DISCORD_END===/p' /tmp/pr_recap_raw.txt | grep -v '===DISCORD' > /tmp/pr_recap.txt
|
||||
|
||||
echo "recap_file=/tmp/pr_recap.txt" >> $GITHUB_OUTPUT
|
||||
|
||||
- name: Post to Discord
|
||||
env:
|
||||
DISCORD_WEBHOOK_URL: ${{ secrets.DISCORD_ISSUES_WEBHOOK_URL }}
|
||||
run: |
|
||||
if [ -z "$DISCORD_WEBHOOK_URL" ]; then
|
||||
echo "Warning: DISCORD_ISSUES_WEBHOOK_URL secret not set, skipping Discord post"
|
||||
cat /tmp/pr_recap.txt
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Read the recap
|
||||
RECAP_RAW=$(cat /tmp/pr_recap.txt)
|
||||
RECAP_LENGTH=${#RECAP_RAW}
|
||||
|
||||
echo "Recap length: ${RECAP_LENGTH} chars"
|
||||
|
||||
# Function to post a message to Discord
|
||||
post_to_discord() {
|
||||
local msg="$1"
|
||||
local content=$(echo "$msg" | jq -Rs '.')
|
||||
curl -s -H "Content-Type: application/json" \
|
||||
-X POST \
|
||||
-d "{\"content\": ${content}}" \
|
||||
"$DISCORD_WEBHOOK_URL"
|
||||
sleep 1
|
||||
}
|
||||
|
||||
# If under limit, send as single message
|
||||
if [ "$RECAP_LENGTH" -le 1950 ]; then
|
||||
post_to_discord "$RECAP_RAW"
|
||||
else
|
||||
echo "Splitting into multiple messages..."
|
||||
remaining="$RECAP_RAW"
|
||||
while [ ${#remaining} -gt 0 ]; do
|
||||
if [ ${#remaining} -le 1950 ]; then
|
||||
post_to_discord "$remaining"
|
||||
break
|
||||
else
|
||||
chunk="${remaining:0:1900}"
|
||||
last_newline=$(echo "$chunk" | grep -bo $'\n' | tail -1 | cut -d: -f1)
|
||||
if [ -n "$last_newline" ] && [ "$last_newline" -gt 500 ]; then
|
||||
chunk="${remaining:0:$last_newline}"
|
||||
remaining="${remaining:$((last_newline+1))}"
|
||||
else
|
||||
chunk="${remaining:0:1900}"
|
||||
remaining="${remaining:1900}"
|
||||
fi
|
||||
post_to_discord "$chunk"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
echo "Posted daily PR recap to Discord"
|
||||
219
.github/workflows/publish.yml
vendored
219
.github/workflows/publish.yml
vendored
@@ -209,182 +209,6 @@ jobs:
|
||||
packages/opencode/dist/opencode-windows-x64
|
||||
packages/opencode/dist/opencode-windows-x64-baseline
|
||||
|
||||
build-tauri:
|
||||
needs:
|
||||
- build-cli
|
||||
- version
|
||||
continue-on-error: false
|
||||
env:
|
||||
AZURE_CLIENT_ID: ${{ secrets.AZURE_CLIENT_ID }}
|
||||
AZURE_TENANT_ID: ${{ secrets.AZURE_TENANT_ID }}
|
||||
AZURE_SUBSCRIPTION_ID: ${{ secrets.AZURE_SUBSCRIPTION_ID }}
|
||||
AZURE_TRUSTED_SIGNING_ACCOUNT_NAME: ${{ secrets.AZURE_TRUSTED_SIGNING_ACCOUNT_NAME }}
|
||||
AZURE_TRUSTED_SIGNING_CERTIFICATE_PROFILE: ${{ secrets.AZURE_TRUSTED_SIGNING_CERTIFICATE_PROFILE }}
|
||||
AZURE_TRUSTED_SIGNING_ENDPOINT: ${{ secrets.AZURE_TRUSTED_SIGNING_ENDPOINT }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
settings:
|
||||
- host: macos-latest
|
||||
target: x86_64-apple-darwin
|
||||
- host: macos-latest
|
||||
target: aarch64-apple-darwin
|
||||
# github-hosted: blacksmith lacks ARM64 MSVC cross-compilation toolchain
|
||||
- host: windows-2025
|
||||
target: aarch64-pc-windows-msvc
|
||||
- host: blacksmith-4vcpu-windows-2025
|
||||
target: x86_64-pc-windows-msvc
|
||||
- host: blacksmith-4vcpu-ubuntu-2404
|
||||
target: x86_64-unknown-linux-gnu
|
||||
- host: blacksmith-8vcpu-ubuntu-2404-arm
|
||||
target: aarch64-unknown-linux-gnu
|
||||
runs-on: ${{ matrix.settings.host }}
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
with:
|
||||
fetch-tags: true
|
||||
|
||||
- uses: apple-actions/import-codesign-certs@v2
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
with:
|
||||
keychain: build
|
||||
p12-file-base64: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
p12-password: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
|
||||
- name: Verify Certificate
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
run: |
|
||||
CERT_INFO=$(security find-identity -v -p codesigning build.keychain | grep "Developer ID Application")
|
||||
CERT_ID=$(echo "$CERT_INFO" | awk -F'"' '{print $2}')
|
||||
echo "CERT_ID=$CERT_ID" >> $GITHUB_ENV
|
||||
echo "Certificate imported."
|
||||
|
||||
- name: Setup Apple API Key
|
||||
if: ${{ runner.os == 'macOS' }}
|
||||
run: |
|
||||
echo "${{ secrets.APPLE_API_KEY_PATH }}" > $RUNNER_TEMP/apple-api-key.p8
|
||||
|
||||
- uses: ./.github/actions/setup-bun
|
||||
|
||||
- name: Azure login
|
||||
if: runner.os == 'Windows'
|
||||
uses: azure/login@v2
|
||||
with:
|
||||
client-id: ${{ env.AZURE_CLIENT_ID }}
|
||||
tenant-id: ${{ env.AZURE_TENANT_ID }}
|
||||
subscription-id: ${{ env.AZURE_SUBSCRIPTION_ID }}
|
||||
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "24"
|
||||
|
||||
- name: Cache apt packages
|
||||
if: contains(matrix.settings.host, 'ubuntu')
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/apt-cache
|
||||
key: ${{ runner.os }}-${{ matrix.settings.target }}-apt-${{ hashFiles('.github/workflows/publish.yml') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ matrix.settings.target }}-apt-
|
||||
|
||||
- name: install dependencies (ubuntu only)
|
||||
if: contains(matrix.settings.host, 'ubuntu')
|
||||
run: |
|
||||
mkdir -p ~/apt-cache && chmod -R a+rw ~/apt-cache
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y --no-install-recommends -o dir::cache::archives="$HOME/apt-cache" libwebkit2gtk-4.1-dev libappindicator3-dev librsvg2-dev patchelf
|
||||
sudo chmod -R a+rw ~/apt-cache
|
||||
|
||||
- name: install Rust stable
|
||||
uses: dtolnay/rust-toolchain@stable
|
||||
with:
|
||||
targets: ${{ matrix.settings.target }}
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: packages/desktop/src-tauri
|
||||
shared-key: ${{ matrix.settings.target }}
|
||||
|
||||
- name: Prepare
|
||||
run: |
|
||||
cd packages/desktop
|
||||
bun ./scripts/prepare.ts
|
||||
env:
|
||||
OPENCODE_VERSION: ${{ needs.version.outputs.version }}
|
||||
GITHUB_TOKEN: ${{ steps.committer.outputs.token }}
|
||||
OPENCODE_CLI_ARTIFACT: ${{ (runner.os == 'Windows' && 'opencode-cli-windows') || 'opencode-cli' }}
|
||||
RUST_TARGET: ${{ matrix.settings.target }}
|
||||
GH_TOKEN: ${{ github.token }}
|
||||
GITHUB_RUN_ID: ${{ github.run_id }}
|
||||
|
||||
- name: Resolve tauri portable SHA
|
||||
if: contains(matrix.settings.host, 'ubuntu')
|
||||
run: echo "TAURI_PORTABLE_SHA=$(git ls-remote https://github.com/tauri-apps/tauri.git refs/heads/feat/truly-portable-appimage | cut -f1)" >> "$GITHUB_ENV"
|
||||
|
||||
# Fixes AppImage build issues, can be removed when https://github.com/tauri-apps/tauri/pull/12491 is released
|
||||
- name: Install tauri-cli from portable appimage branch
|
||||
uses: taiki-e/cache-cargo-install-action@v3
|
||||
if: contains(matrix.settings.host, 'ubuntu')
|
||||
with:
|
||||
tool: tauri-cli
|
||||
git: https://github.com/tauri-apps/tauri
|
||||
# branch: feat/truly-portable-appimage
|
||||
rev: ${{ env.TAURI_PORTABLE_SHA }}
|
||||
|
||||
- name: Show tauri-cli version
|
||||
if: contains(matrix.settings.host, 'ubuntu')
|
||||
run: cargo tauri --version
|
||||
|
||||
- name: Setup git committer
|
||||
id: committer
|
||||
uses: ./.github/actions/setup-git-committer
|
||||
with:
|
||||
opencode-app-id: ${{ vars.OPENCODE_APP_ID }}
|
||||
opencode-app-secret: ${{ secrets.OPENCODE_APP_SECRET }}
|
||||
|
||||
- name: Build and upload artifacts
|
||||
uses: tauri-apps/tauri-action@390cbe447412ced1303d35abe75287949e43437a
|
||||
timeout-minutes: 60
|
||||
with:
|
||||
projectPath: packages/desktop
|
||||
uploadWorkflowArtifacts: true
|
||||
tauriScript: ${{ (contains(matrix.settings.host, 'ubuntu') && 'cargo tauri') || '' }}
|
||||
args: --target ${{ matrix.settings.target }} --config ${{ (github.ref_name == 'beta' && './src-tauri/tauri.beta.conf.json') || './src-tauri/tauri.prod.conf.json' }} --verbose
|
||||
updaterJsonPreferNsis: true
|
||||
releaseId: ${{ needs.version.outputs.release }}
|
||||
tagName: ${{ needs.version.outputs.tag }}
|
||||
releaseDraft: true
|
||||
releaseAssetNamePattern: opencode-desktop-[platform]-[arch][ext]
|
||||
repo: ${{ (github.ref_name == 'beta' && 'opencode-beta') || '' }}
|
||||
releaseCommitish: ${{ github.sha }}
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ steps.committer.outputs.token }}
|
||||
TAURI_BUNDLER_NEW_APPIMAGE_FORMAT: true
|
||||
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
|
||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
|
||||
APPLE_CERTIFICATE: ${{ secrets.APPLE_CERTIFICATE }}
|
||||
APPLE_CERTIFICATE_PASSWORD: ${{ secrets.APPLE_CERTIFICATE_PASSWORD }}
|
||||
APPLE_SIGNING_IDENTITY: ${{ env.CERT_ID }}
|
||||
APPLE_API_ISSUER: ${{ secrets.APPLE_API_ISSUER }}
|
||||
APPLE_API_KEY: ${{ secrets.APPLE_API_KEY }}
|
||||
APPLE_API_KEY_PATH: ${{ runner.temp }}/apple-api-key.p8
|
||||
|
||||
- name: Verify signed Windows desktop artifacts
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
run: |
|
||||
$files = @(
|
||||
"${{ github.workspace }}\packages\desktop\src-tauri\sidecars\opencode-cli-${{ matrix.settings.target }}.exe"
|
||||
)
|
||||
$files += Get-ChildItem "${{ github.workspace }}\packages\desktop\src-tauri\target\${{ matrix.settings.target }}\release\bundle\nsis\*.exe" | Select-Object -ExpandProperty FullName
|
||||
|
||||
foreach ($file in $files) {
|
||||
$sig = Get-AuthenticodeSignature $file
|
||||
if ($sig.Status -ne "Valid") {
|
||||
throw "Invalid signature for ${file}: $($sig.Status)"
|
||||
}
|
||||
}
|
||||
|
||||
build-electron:
|
||||
needs:
|
||||
- build-cli
|
||||
@@ -524,6 +348,30 @@ jobs:
|
||||
env:
|
||||
OPENCODE_CHANNEL: ${{ (github.ref_name == 'beta' && 'beta') || 'prod' }}
|
||||
|
||||
- name: Create and upload macOS .app.tar.gz
|
||||
if: runner.os == 'macOS' && needs.version.outputs.release
|
||||
working-directory: packages/desktop-electron/dist
|
||||
env:
|
||||
GH_TOKEN: ${{ steps.committer.outputs.token }}
|
||||
run: |
|
||||
if [[ "${{ matrix.settings.target }}" == "x86_64-apple-darwin" ]]; then
|
||||
APP_DIR="mac"
|
||||
OUT_NAME="opencode-desktop-mac-x64.app.tar.gz"
|
||||
elif [[ "${{ matrix.settings.target }}" == "aarch64-apple-darwin" ]]; then
|
||||
APP_DIR="mac-arm64"
|
||||
OUT_NAME="opencode-desktop-mac-arm64.app.tar.gz"
|
||||
else
|
||||
echo "Unknown macOS target: ${{ matrix.settings.target }}"
|
||||
exit 1
|
||||
fi
|
||||
APP_PATH=$(find "$APP_DIR" -maxdepth 1 -name "*.app" -type d | head -1)
|
||||
if [ -z "$APP_PATH" ]; then
|
||||
echo "No .app bundle found in $APP_DIR"
|
||||
exit 1
|
||||
fi
|
||||
tar -czf "$OUT_NAME" -C "$(dirname "$APP_PATH")" "$(basename "$APP_PATH")"
|
||||
gh release upload "v${{ needs.version.outputs.version }}" "$OUT_NAME" --clobber --repo "${{ needs.version.outputs.repo }}"
|
||||
|
||||
- name: Verify signed Windows Electron artifacts
|
||||
if: runner.os == 'Windows'
|
||||
shell: pwsh
|
||||
@@ -542,7 +390,7 @@ jobs:
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
with:
|
||||
name: opencode-electron-${{ matrix.settings.target }}
|
||||
name: opencode-desktop-${{ matrix.settings.target }}
|
||||
path: packages/desktop-electron/dist/*
|
||||
|
||||
- uses: actions/upload-artifact@v4
|
||||
@@ -556,7 +404,6 @@ jobs:
|
||||
- version
|
||||
- build-cli
|
||||
- sign-cli-windows
|
||||
- build-tauri
|
||||
- build-electron
|
||||
if: always() && !failure() && !cancelled()
|
||||
runs-on: blacksmith-4vcpu-ubuntu-2404
|
||||
@@ -583,13 +430,6 @@ jobs:
|
||||
node-version: "24"
|
||||
registry-url: "https://registry.npmjs.org"
|
||||
|
||||
- name: Setup git committer
|
||||
id: committer
|
||||
uses: ./.github/actions/setup-git-committer
|
||||
with:
|
||||
opencode-app-id: ${{ vars.OPENCODE_APP_ID }}
|
||||
opencode-app-secret: ${{ secrets.OPENCODE_APP_SECRET }}
|
||||
|
||||
- uses: actions/download-artifact@v4
|
||||
with:
|
||||
name: opencode-cli
|
||||
@@ -611,6 +451,13 @@ jobs:
|
||||
pattern: latest-yml-*
|
||||
path: /tmp/latest-yml
|
||||
|
||||
- name: Setup git committer
|
||||
id: committer
|
||||
uses: ./.github/actions/setup-git-committer
|
||||
with:
|
||||
opencode-app-id: ${{ vars.OPENCODE_APP_ID }}
|
||||
opencode-app-secret: ${{ secrets.OPENCODE_APP_SECRET }}
|
||||
|
||||
- name: Cache apt packages (AUR)
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
@@ -639,3 +486,5 @@ jobs:
|
||||
GH_REPO: ${{ needs.version.outputs.repo }}
|
||||
NPM_CONFIG_PROVENANCE: false
|
||||
LATEST_YML_DIR: /tmp/latest-yml
|
||||
TAURI_SIGNING_PRIVATE_KEY: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY }}
|
||||
TAURI_SIGNING_PRIVATE_KEY_PASSWORD: ${{ secrets.TAURI_SIGNING_PRIVATE_KEY_PASSWORD }}
|
||||
|
||||
116
.github/workflows/vouch-check-issue.yml
vendored
116
.github/workflows/vouch-check-issue.yml
vendored
@@ -1,116 +0,0 @@
|
||||
name: vouch-check-issue
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [opened]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check if issue author is denounced
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const author = context.payload.issue.user.login;
|
||||
const issueNumber = context.payload.issue.number;
|
||||
|
||||
// Skip bots
|
||||
if (author.endsWith('[bot]')) {
|
||||
core.info(`Skipping bot: ${author}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Read the VOUCHED.td file via API (no checkout needed)
|
||||
let content;
|
||||
try {
|
||||
const response = await github.rest.repos.getContent({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
path: '.github/VOUCHED.td',
|
||||
});
|
||||
content = Buffer.from(response.data.content, 'base64').toString('utf-8');
|
||||
} catch (error) {
|
||||
if (error.status === 404) {
|
||||
core.info('No .github/VOUCHED.td file found, skipping check.');
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Parse the .td file for vouched and denounced users
|
||||
const vouched = new Set();
|
||||
const denounced = new Map();
|
||||
for (const line of content.split('\n')) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed || trimmed.startsWith('#')) continue;
|
||||
|
||||
const isDenounced = trimmed.startsWith('-');
|
||||
const rest = isDenounced ? trimmed.slice(1).trim() : trimmed;
|
||||
if (!rest) continue;
|
||||
|
||||
const spaceIdx = rest.indexOf(' ');
|
||||
const handle = spaceIdx === -1 ? rest : rest.slice(0, spaceIdx);
|
||||
const reason = spaceIdx === -1 ? null : rest.slice(spaceIdx + 1).trim();
|
||||
|
||||
// Handle platform:username or bare username
|
||||
// Only match bare usernames or github: prefix (skip other platforms)
|
||||
const colonIdx = handle.indexOf(':');
|
||||
if (colonIdx !== -1) {
|
||||
const platform = handle.slice(0, colonIdx).toLowerCase();
|
||||
if (platform !== 'github') continue;
|
||||
}
|
||||
const username = colonIdx === -1 ? handle : handle.slice(colonIdx + 1);
|
||||
if (!username) continue;
|
||||
|
||||
if (isDenounced) {
|
||||
denounced.set(username.toLowerCase(), reason);
|
||||
continue;
|
||||
}
|
||||
|
||||
vouched.add(username.toLowerCase());
|
||||
}
|
||||
|
||||
// Check if the author is denounced
|
||||
const reason = denounced.get(author.toLowerCase());
|
||||
if (reason !== undefined) {
|
||||
// Author is denounced — close the issue
|
||||
const body = 'This issue has been automatically closed.';
|
||||
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
body,
|
||||
});
|
||||
|
||||
await github.rest.issues.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
state: 'closed',
|
||||
state_reason: 'not_planned',
|
||||
});
|
||||
|
||||
core.info(`Closed issue #${issueNumber} from denounced user ${author}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Author is positively vouched — add label
|
||||
if (!vouched.has(author.toLowerCase())) {
|
||||
core.info(`User ${author} is not denounced or vouched. Allowing issue.`);
|
||||
return;
|
||||
}
|
||||
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: issueNumber,
|
||||
labels: ['Vouched'],
|
||||
});
|
||||
|
||||
core.info(`Added vouched label to issue #${issueNumber} from ${author}`);
|
||||
114
.github/workflows/vouch-check-pr.yml
vendored
114
.github/workflows/vouch-check-pr.yml
vendored
@@ -1,114 +0,0 @@
|
||||
name: vouch-check-pr
|
||||
|
||||
on:
|
||||
pull_request_target:
|
||||
types: [opened]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
issues: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
check:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check if PR author is denounced
|
||||
uses: actions/github-script@v7
|
||||
with:
|
||||
script: |
|
||||
const author = context.payload.pull_request.user.login;
|
||||
const prNumber = context.payload.pull_request.number;
|
||||
|
||||
// Skip bots
|
||||
if (author.endsWith('[bot]')) {
|
||||
core.info(`Skipping bot: ${author}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Read the VOUCHED.td file via API (no checkout needed)
|
||||
let content;
|
||||
try {
|
||||
const response = await github.rest.repos.getContent({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
path: '.github/VOUCHED.td',
|
||||
});
|
||||
content = Buffer.from(response.data.content, 'base64').toString('utf-8');
|
||||
} catch (error) {
|
||||
if (error.status === 404) {
|
||||
core.info('No .github/VOUCHED.td file found, skipping check.');
|
||||
return;
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
|
||||
// Parse the .td file for vouched and denounced users
|
||||
const vouched = new Set();
|
||||
const denounced = new Map();
|
||||
for (const line of content.split('\n')) {
|
||||
const trimmed = line.trim();
|
||||
if (!trimmed || trimmed.startsWith('#')) continue;
|
||||
|
||||
const isDenounced = trimmed.startsWith('-');
|
||||
const rest = isDenounced ? trimmed.slice(1).trim() : trimmed;
|
||||
if (!rest) continue;
|
||||
|
||||
const spaceIdx = rest.indexOf(' ');
|
||||
const handle = spaceIdx === -1 ? rest : rest.slice(0, spaceIdx);
|
||||
const reason = spaceIdx === -1 ? null : rest.slice(spaceIdx + 1).trim();
|
||||
|
||||
// Handle platform:username or bare username
|
||||
// Only match bare usernames or github: prefix (skip other platforms)
|
||||
const colonIdx = handle.indexOf(':');
|
||||
if (colonIdx !== -1) {
|
||||
const platform = handle.slice(0, colonIdx).toLowerCase();
|
||||
if (platform !== 'github') continue;
|
||||
}
|
||||
const username = colonIdx === -1 ? handle : handle.slice(colonIdx + 1);
|
||||
if (!username) continue;
|
||||
|
||||
if (isDenounced) {
|
||||
denounced.set(username.toLowerCase(), reason);
|
||||
continue;
|
||||
}
|
||||
|
||||
vouched.add(username.toLowerCase());
|
||||
}
|
||||
|
||||
// Check if the author is denounced
|
||||
const reason = denounced.get(author.toLowerCase());
|
||||
if (reason !== undefined) {
|
||||
// Author is denounced — close the PR
|
||||
await github.rest.issues.createComment({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: prNumber,
|
||||
body: 'This pull request has been automatically closed.',
|
||||
});
|
||||
|
||||
await github.rest.pulls.update({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
pull_number: prNumber,
|
||||
state: 'closed',
|
||||
});
|
||||
|
||||
core.info(`Closed PR #${prNumber} from denounced user ${author}`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Author is positively vouched — add label
|
||||
if (!vouched.has(author.toLowerCase())) {
|
||||
core.info(`User ${author} is not denounced or vouched. Allowing PR.`);
|
||||
return;
|
||||
}
|
||||
|
||||
await github.rest.issues.addLabels({
|
||||
owner: context.repo.owner,
|
||||
repo: context.repo.repo,
|
||||
issue_number: prNumber,
|
||||
labels: ['Vouched'],
|
||||
});
|
||||
|
||||
core.info(`Added vouched label to PR #${prNumber} from ${author}`);
|
||||
38
.github/workflows/vouch-manage-by-issue.yml
vendored
38
.github/workflows/vouch-manage-by-issue.yml
vendored
@@ -1,38 +0,0 @@
|
||||
name: vouch-manage-by-issue
|
||||
|
||||
on:
|
||||
issue_comment:
|
||||
types: [created]
|
||||
|
||||
concurrency:
|
||||
group: vouch-manage
|
||||
cancel-in-progress: false
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
issues: write
|
||||
pull-requests: read
|
||||
|
||||
jobs:
|
||||
manage:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
|
||||
- name: Setup git committer
|
||||
id: committer
|
||||
uses: ./.github/actions/setup-git-committer
|
||||
with:
|
||||
opencode-app-id: ${{ vars.OPENCODE_APP_ID }}
|
||||
opencode-app-secret: ${{ secrets.OPENCODE_APP_SECRET }}
|
||||
|
||||
- uses: mitchellh/vouch/action/manage-by-issue@main
|
||||
with:
|
||||
issue-id: ${{ github.event.issue.number }}
|
||||
comment-id: ${{ github.event.comment.id }}
|
||||
roles: admin,maintain,write
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ steps.committer.outputs.token }}
|
||||
@@ -1,7 +1,7 @@
|
||||
---
|
||||
mode: primary
|
||||
hidden: true
|
||||
model: opencode/minimax-m2.5
|
||||
model: opencode/gpt-5.4-nano
|
||||
color: "#44BA81"
|
||||
tools:
|
||||
"*": false
|
||||
@@ -14,127 +14,30 @@ Use your github-triage tool to triage issues.
|
||||
|
||||
This file is the source of truth for ownership/routing rules.
|
||||
|
||||
## Labels
|
||||
Assign issues by choosing the team with the strongest overlap. The github-triage tool will assign a random member from that team.
|
||||
|
||||
### windows
|
||||
Do not add labels to issues. Only assign an owner.
|
||||
|
||||
Use for any issue that mentions Windows (the OS). Be sure they are saying that they are on Windows.
|
||||
When calling github-triage, pass one of these team values: tui, desktop_web, core, inference, windows.
|
||||
|
||||
- Use if they mention WSL too
|
||||
## Teams
|
||||
|
||||
#### perf
|
||||
### TUI
|
||||
|
||||
Performance-related issues:
|
||||
Terminal UI issues, including rendering, keybindings, scrolling, terminal compatibility, SSH behavior, crashes in the TUI, and low-level TUI performance.
|
||||
|
||||
- Slow performance
|
||||
- High RAM usage
|
||||
- High CPU usage
|
||||
### Desktop / Web
|
||||
|
||||
**Only** add if it's likely a RAM or CPU issue. **Do not** add for LLM slowness.
|
||||
Desktop application and browser-based app issues, including `opencode web`, desktop-specific UI behavior, packaging, and web view problems.
|
||||
|
||||
#### desktop
|
||||
### Core
|
||||
|
||||
Desktop app issues:
|
||||
Core opencode server and harness issues, including sqlite, snapshots, memory, API behavior, agent context construction, tool execution, provider integrations, model behavior, documentation, and larger architectural features.
|
||||
|
||||
- `opencode web` command
|
||||
- The desktop app itself
|
||||
### Inference
|
||||
|
||||
**Only** add if it's specifically about the Desktop application or `opencode web` view. **Do not** add for terminal, TUI, or general opencode issues.
|
||||
OpenCode Zen, OpenCode Go, and billing issues.
|
||||
|
||||
#### nix
|
||||
### Windows
|
||||
|
||||
**Only** add if the issue explicitly mentions nix.
|
||||
|
||||
If the issue does not mention nix, do not add nix.
|
||||
|
||||
If the issue mentions nix, assign to `rekram1-node`.
|
||||
|
||||
#### zen
|
||||
|
||||
**Only** add if the issue mentions "zen" or "opencode zen" or "opencode black".
|
||||
|
||||
If the issue doesn't have "zen" or "opencode black" in it then don't add zen label
|
||||
|
||||
#### core
|
||||
|
||||
Use for core server issues in `packages/opencode/`, excluding `packages/opencode/src/cli/cmd/tui/`.
|
||||
|
||||
Examples:
|
||||
|
||||
- LSP server behavior
|
||||
- Harness behavior (agent + tools)
|
||||
- Feature requests for server behavior
|
||||
- Agent context construction
|
||||
- API endpoints
|
||||
- Provider integration issues
|
||||
- New, broken, or poor-quality models
|
||||
|
||||
#### acp
|
||||
|
||||
If the issue mentions acp support, assign acp label.
|
||||
|
||||
#### docs
|
||||
|
||||
Add if the issue requests better documentation or docs updates.
|
||||
|
||||
#### opentui
|
||||
|
||||
TUI issues potentially caused by our underlying TUI library:
|
||||
|
||||
- Keybindings not working
|
||||
- Scroll speed issues (too fast/slow/laggy)
|
||||
- Screen flickering
|
||||
- Crashes with opentui in the log
|
||||
|
||||
**Do not** add for general TUI bugs.
|
||||
|
||||
When assigning to people here are the following rules:
|
||||
|
||||
Desktop / Web:
|
||||
Use for desktop-labeled issues only.
|
||||
|
||||
- adamdotdevin
|
||||
- iamdavidhill
|
||||
- Brendonovich
|
||||
- nexxeln
|
||||
|
||||
Zen:
|
||||
ONLY assign if the issue will have the "zen" label.
|
||||
|
||||
- fwang
|
||||
- MrMushrooooom
|
||||
|
||||
TUI (`packages/opencode/src/cli/cmd/tui/...`):
|
||||
|
||||
- thdxr for TUI UX/UI product decisions and interaction flow
|
||||
- kommander for OpenTUI engine issues: rendering artifacts, keybind handling, terminal compatibility, SSH behavior, and low-level perf bottlenecks
|
||||
- rekram1-node for TUI bugs that are not clearly OpenTUI engine issues
|
||||
|
||||
Core (`packages/opencode/...`, excluding TUI subtree):
|
||||
|
||||
- thdxr for sqlite/snapshot/memory bugs and larger architectural core features
|
||||
- jlongster for opencode server + API feature work (tool currently remaps jlongster -> thdxr until assignable)
|
||||
- rekram1-node for harness issues, provider issues, and other bug-squashing
|
||||
|
||||
For core bugs that do not clearly map, either thdxr or rekram1-node is acceptable.
|
||||
|
||||
Docs:
|
||||
|
||||
- R44VC0RP
|
||||
|
||||
Windows:
|
||||
|
||||
- Hona (assign any issue that mentions Windows or is likely Windows-specific)
|
||||
|
||||
Determinism rules:
|
||||
|
||||
- If title + body does not contain "zen", do not add the "zen" label
|
||||
- If "nix" label is added but title + body does not mention nix/nixos, the tool will drop "nix"
|
||||
- If title + body mentions nix/nixos, assign to `rekram1-node`
|
||||
- If "desktop" label is added, the tool will override assignee and randomly pick one Desktop / Web owner
|
||||
|
||||
In all other cases, choose the team/section with the most overlap with the issue and assign a member from that team at random.
|
||||
|
||||
ACP:
|
||||
|
||||
- rekram1-node (assign any acp issues to rekram1-node)
|
||||
Windows-specific issues, including native Windows behavior, WSL interactions, path handling, shell compatibility, and installation or runtime problems that only happen on Windows.
|
||||
|
||||
@@ -18,9 +18,12 @@ Do not use `git log` or author metadata when deciding attribution.
|
||||
|
||||
Rules:
|
||||
|
||||
- Write the final file with sections in this order:
|
||||
- Write the final file with release sections in this order:
|
||||
`## Core`, `## TUI`, `## Desktop`, `## SDK`, `## Extensions`
|
||||
- Only include sections that have at least one notable entry
|
||||
- Within each release section, keep bug fixes grouped under `### Bugfixes`
|
||||
- Keep other notable entries under `### Improvements` when a section has bug fixes too
|
||||
- Omit empty subsections
|
||||
- Keep one bullet per commit you keep
|
||||
- Skip commits that are entirely internal, CI, tests, refactors, or otherwise not user-facing
|
||||
- Start each bullet with a capital letter
|
||||
|
||||
@@ -1,16 +1,14 @@
|
||||
/// <reference path="../env.d.ts" />
|
||||
import { tool } from "@opencode-ai/plugin"
|
||||
|
||||
const TEAM = {
|
||||
desktop: ["adamdotdevin", "iamdavidhill", "Brendonovich", "nexxeln"],
|
||||
zen: ["fwang", "MrMushrooooom"],
|
||||
tui: ["kommander", "rekram1-node", "simonklee"],
|
||||
core: ["kitlangton", "rekram1-node", "jlongster"],
|
||||
docs: ["R44VC0RP"],
|
||||
tui: ["kommander", "simonklee"],
|
||||
desktop_web: ["Hona", "Brendonovich"],
|
||||
core: ["jlongster", "rekram1-node", "nexxeln", "kitlangton"],
|
||||
inference: ["fwang", "MrMushrooooom"],
|
||||
windows: ["Hona"],
|
||||
} as const
|
||||
|
||||
const ASSIGNEES = [...new Set(Object.values(TEAM).flat())]
|
||||
|
||||
function pick<T>(items: readonly T[]) {
|
||||
return items[Math.floor(Math.random() * items.length)]!
|
||||
}
|
||||
@@ -38,79 +36,25 @@ async function githubFetch(endpoint: string, options: RequestInit = {}) {
|
||||
}
|
||||
|
||||
export default tool({
|
||||
description: `Use this tool to assign and/or label a GitHub issue.
|
||||
description: `Use this tool to assign a GitHub issue.
|
||||
|
||||
Choose labels and assignee using the current triage policy and ownership rules.
|
||||
Pick the most fitting labels for the issue and assign one owner.
|
||||
|
||||
If unsure, choose the team/section with the most overlap with the issue and assign a member from that team at random.`,
|
||||
Provide the team that should own the issue. This tool picks a random assignee from that team and does not apply labels.`,
|
||||
args: {
|
||||
assignee: tool.schema
|
||||
.enum(ASSIGNEES as [string, ...string[]])
|
||||
.describe("The username of the assignee")
|
||||
.default("rekram1-node"),
|
||||
labels: tool.schema
|
||||
.array(tool.schema.enum(["nix", "opentui", "perf", "web", "desktop", "zen", "docs", "windows", "core"]))
|
||||
.describe("The labels(s) to add to the issue")
|
||||
.default([]),
|
||||
team: tool.schema
|
||||
.enum(Object.keys(TEAM) as [keyof typeof TEAM, ...(keyof typeof TEAM)[]])
|
||||
.describe("The owning team"),
|
||||
},
|
||||
async execute(args) {
|
||||
const issue = getIssueNumber()
|
||||
const owner = "anomalyco"
|
||||
const repo = "opencode"
|
||||
|
||||
const results: string[] = []
|
||||
let labels = [...new Set(args.labels.map((x) => (x === "desktop" ? "web" : x)))]
|
||||
const web = labels.includes("web")
|
||||
const text = `${process.env.ISSUE_TITLE ?? ""}\n${process.env.ISSUE_BODY ?? ""}`.toLowerCase()
|
||||
const zen = /\bzen\b/.test(text) || text.includes("opencode black")
|
||||
const nix = /\bnix(os)?\b/.test(text)
|
||||
|
||||
if (labels.includes("nix") && !nix) {
|
||||
labels = labels.filter((x) => x !== "nix")
|
||||
results.push("Dropped label: nix (issue does not mention nix)")
|
||||
}
|
||||
|
||||
const assignee = nix ? "rekram1-node" : web ? pick(TEAM.desktop) : args.assignee
|
||||
|
||||
if (labels.includes("zen") && !zen) {
|
||||
throw new Error("Only add the zen label when issue title/body contains 'zen'")
|
||||
}
|
||||
|
||||
if (web && !nix && !(TEAM.desktop as readonly string[]).includes(assignee)) {
|
||||
throw new Error("Web issues must be assigned to adamdotdevin, iamdavidhill, Brendonovich, or nexxeln")
|
||||
}
|
||||
|
||||
if ((TEAM.zen as readonly string[]).includes(assignee) && !labels.includes("zen")) {
|
||||
throw new Error("Only zen issues should be assigned to fwang or MrMushrooooom")
|
||||
}
|
||||
|
||||
if (assignee === "Hona" && !labels.includes("windows")) {
|
||||
throw new Error("Only windows issues should be assigned to Hona")
|
||||
}
|
||||
|
||||
if (assignee === "R44VC0RP" && !labels.includes("docs")) {
|
||||
throw new Error("Only docs issues should be assigned to R44VC0RP")
|
||||
}
|
||||
|
||||
if (assignee === "kommander" && !labels.includes("opentui")) {
|
||||
throw new Error("Only opentui issues should be assigned to kommander")
|
||||
}
|
||||
const assignee = pick(TEAM[args.team])
|
||||
|
||||
await githubFetch(`/repos/${owner}/${repo}/issues/${issue}/assignees`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ assignees: [assignee] }),
|
||||
})
|
||||
results.push(`Assigned @${assignee} to issue #${issue}`)
|
||||
|
||||
if (labels.length > 0) {
|
||||
await githubFetch(`/repos/${owner}/${repo}/issues/${issue}/labels`, {
|
||||
method: "POST",
|
||||
body: JSON.stringify({ labels }),
|
||||
})
|
||||
results.push(`Added labels: ${labels.join(", ")}`)
|
||||
}
|
||||
|
||||
return results.join("\n")
|
||||
return `Assigned @${assignee} from ${args.team} to issue #${issue}`
|
||||
},
|
||||
})
|
||||
|
||||
42
bun.lock
42
bun.lock
@@ -29,7 +29,7 @@
|
||||
},
|
||||
"packages/app": {
|
||||
"name": "@opencode-ai/app",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"dependencies": {
|
||||
"@kobalte/core": "catalog:",
|
||||
"@opencode-ai/core": "workspace:*",
|
||||
@@ -85,7 +85,7 @@
|
||||
},
|
||||
"packages/console/app": {
|
||||
"name": "@opencode-ai/console-app",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"dependencies": {
|
||||
"@cloudflare/vite-plugin": "1.15.2",
|
||||
"@ibm/plex": "6.4.1",
|
||||
@@ -119,7 +119,7 @@
|
||||
},
|
||||
"packages/console/core": {
|
||||
"name": "@opencode-ai/console-core",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-sts": "3.782.0",
|
||||
"@jsx-email/render": "1.1.1",
|
||||
@@ -146,7 +146,7 @@
|
||||
},
|
||||
"packages/console/function": {
|
||||
"name": "@opencode-ai/console-function",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"dependencies": {
|
||||
"@ai-sdk/anthropic": "3.0.64",
|
||||
"@ai-sdk/openai": "3.0.48",
|
||||
@@ -170,7 +170,7 @@
|
||||
},
|
||||
"packages/console/mail": {
|
||||
"name": "@opencode-ai/console-mail",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"dependencies": {
|
||||
"@jsx-email/all": "2.2.3",
|
||||
"@jsx-email/cli": "1.4.3",
|
||||
@@ -194,7 +194,7 @@
|
||||
},
|
||||
"packages/core": {
|
||||
"name": "@opencode-ai/core",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"bin": {
|
||||
"opencode": "./bin/opencode",
|
||||
},
|
||||
@@ -228,7 +228,7 @@
|
||||
},
|
||||
"packages/desktop": {
|
||||
"name": "@opencode-ai/desktop",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"dependencies": {
|
||||
"@opencode-ai/app": "workspace:*",
|
||||
"@opencode-ai/ui": "workspace:*",
|
||||
@@ -263,7 +263,7 @@
|
||||
},
|
||||
"packages/desktop-electron": {
|
||||
"name": "@opencode-ai/desktop-electron",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"dependencies": {
|
||||
"drizzle-orm": "catalog:",
|
||||
"effect": "catalog:",
|
||||
@@ -309,7 +309,7 @@
|
||||
},
|
||||
"packages/enterprise": {
|
||||
"name": "@opencode-ai/enterprise",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"dependencies": {
|
||||
"@opencode-ai/core": "workspace:*",
|
||||
"@opencode-ai/ui": "workspace:*",
|
||||
@@ -338,7 +338,7 @@
|
||||
},
|
||||
"packages/function": {
|
||||
"name": "@opencode-ai/function",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"dependencies": {
|
||||
"@octokit/auth-app": "8.0.1",
|
||||
"@octokit/rest": "catalog:",
|
||||
@@ -354,7 +354,7 @@
|
||||
},
|
||||
"packages/opencode": {
|
||||
"name": "opencode",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"bin": {
|
||||
"opencode": "./bin/opencode",
|
||||
},
|
||||
@@ -387,10 +387,6 @@
|
||||
"@effect/opentelemetry": "catalog:",
|
||||
"@effect/platform-node": "catalog:",
|
||||
"@gitlab/opencode-gitlab-auth": "1.3.3",
|
||||
"@hono/node-server": "1.19.11",
|
||||
"@hono/node-ws": "1.3.0",
|
||||
"@hono/standard-validator": "0.1.5",
|
||||
"@hono/zod-validator": "catalog:",
|
||||
"@lydell/node-pty": "catalog:",
|
||||
"@modelcontextprotocol/sdk": "1.27.1",
|
||||
"@octokit/graphql": "9.0.2",
|
||||
@@ -430,8 +426,6 @@
|
||||
"glob": "13.0.5",
|
||||
"google-auth-library": "10.5.0",
|
||||
"gray-matter": "4.0.3",
|
||||
"hono": "catalog:",
|
||||
"hono-openapi": "catalog:",
|
||||
"ignore": "7.0.5",
|
||||
"immer": "11.1.4",
|
||||
"jsonc-parser": "3.3.1",
|
||||
@@ -458,7 +452,6 @@
|
||||
"xdg-basedir": "5.1.0",
|
||||
"yargs": "18.0.0",
|
||||
"zod": "catalog:",
|
||||
"zod-to-json-schema": "3.24.5",
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "7.28.4",
|
||||
@@ -491,12 +484,11 @@
|
||||
"typescript": "catalog:",
|
||||
"vscode-languageserver-types": "3.17.5",
|
||||
"why-is-node-running": "3.2.2",
|
||||
"zod-to-json-schema": "3.24.5",
|
||||
},
|
||||
},
|
||||
"packages/plugin": {
|
||||
"name": "@opencode-ai/plugin",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"dependencies": {
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"effect": "catalog:",
|
||||
@@ -531,7 +523,7 @@
|
||||
},
|
||||
"packages/sdk/js": {
|
||||
"name": "@opencode-ai/sdk",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"dependencies": {
|
||||
"cross-spawn": "catalog:",
|
||||
},
|
||||
@@ -546,7 +538,7 @@
|
||||
},
|
||||
"packages/slack": {
|
||||
"name": "@opencode-ai/slack",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"dependencies": {
|
||||
"@opencode-ai/sdk": "workspace:*",
|
||||
"@slack/bolt": "^3.17.1",
|
||||
@@ -581,7 +573,7 @@
|
||||
},
|
||||
"packages/ui": {
|
||||
"name": "@opencode-ai/ui",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"dependencies": {
|
||||
"@kobalte/core": "catalog:",
|
||||
"@opencode-ai/core": "workspace:*",
|
||||
@@ -630,7 +622,7 @@
|
||||
},
|
||||
"packages/web": {
|
||||
"name": "@opencode-ai/web",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"dependencies": {
|
||||
"@astrojs/cloudflare": "12.6.3",
|
||||
"@astrojs/markdown-remark": "6.3.1",
|
||||
@@ -1228,8 +1220,6 @@
|
||||
|
||||
"@hono/node-server": ["@hono/node-server@1.19.11", "", { "peerDependencies": { "hono": "^4" } }, "sha512-dr8/3zEaB+p0D2n/IUrlPF1HZm586qgJNXK1a9fhg/PzdtkK7Ksd5l312tJX2yBuALqDYBlG20QEbayqPyxn+g=="],
|
||||
|
||||
"@hono/node-ws": ["@hono/node-ws@1.3.0", "", { "dependencies": { "ws": "^8.17.0" }, "peerDependencies": { "@hono/node-server": "^1.19.2", "hono": "^4.6.0" } }, "sha512-ju25YbbvLuXdqBCmLZLqnNYu1nbHIQjoyUqA8ApZOeL1k4skuiTcw5SW77/5SUYo2Xi2NVBJoVlfQurnKEp03Q=="],
|
||||
|
||||
"@hono/standard-validator": ["@hono/standard-validator@0.1.5", "", { "peerDependencies": { "@standard-schema/spec": "1.0.0", "hono": ">=3.9.0" } }, "sha512-EIyZPPwkyLn6XKwFj5NBEWHXhXbgmnVh2ceIFo5GO7gKI9WmzTjPDKnppQB0KrqKeAkq3kpoW4SIbu5X1dgx3w=="],
|
||||
|
||||
"@hono/zod-validator": ["@hono/zod-validator@0.4.2", "", { "peerDependencies": { "hono": ">=3.9.0", "zod": "^3.19.1" } }, "sha512-1rrlBg+EpDPhzOV4hT9pxr5+xDVmKuz6YJl+la7VCwK6ass5ldyKm5fD+umJdV2zhHD6jROoCCv8NbTwyfhT0g=="],
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
{
|
||||
"nodeModules": {
|
||||
"x86_64-linux": "sha256-SLWRe4uPSRWgU+NPa1BywmrUtNVIC0Oy2mjmxclxk+s=",
|
||||
"aarch64-linux": "sha256-toHEeIqMzrmThoV0B52juGKm4pa/aJN3gBFFtrSZp2Q=",
|
||||
"aarch64-darwin": "sha256-lYUsUxq5zR2RXjqZTEdjduOncnlwvTlxDJVKWXJuKPY=",
|
||||
"x86_64-darwin": "sha256-77XmuEYqGwb1mkEHfnghq1VtukFTneohA0FW6WDOk1U="
|
||||
"x86_64-linux": "sha256-9wTDLZsuGjkWyVOb6AG2VRYPiaSj/lnXwVkSwNeDcns=",
|
||||
"aarch64-linux": "sha256-gmKlL2fQxY8bo+//8m9e1TNYJK3RXa4i8xsgtd046bc=",
|
||||
"aarch64-darwin": "sha256-ENSJK+7rZi3m342mjtGg9N0P6zWEypXMpI7QdFMydbc=",
|
||||
"x86_64-darwin": "sha256-gkxCxGh5dlwj03vZdz20pbiAwFEDpAlu/5iU8cwZOGI="
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/app",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"description": "",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
|
||||
@@ -15,6 +15,7 @@ import { terminalFontFamily, useSettings } from "@/context/settings"
|
||||
import type { LocalPTY } from "@/context/terminal"
|
||||
import { disposeIfDisposable, getHoveredLinkText, setOptionIfSupported } from "@/utils/runtime-adapters"
|
||||
import { terminalWriter } from "@/utils/terminal-writer"
|
||||
import { terminalWebSocketURL } from "@/utils/terminal-websocket-url"
|
||||
|
||||
const TOGGLE_TERMINAL_ID = "terminal.toggle"
|
||||
const DEFAULT_TOGGLE_TERMINAL_KEYBIND = "ctrl+`"
|
||||
@@ -67,13 +68,6 @@ const debugTerminal = (...values: unknown[]) => {
|
||||
console.debug("[terminal]", ...values)
|
||||
}
|
||||
|
||||
const errorName = (err: unknown) => {
|
||||
if (!err || typeof err !== "object") return
|
||||
if (!("name" in err)) return
|
||||
const errorName = err.name
|
||||
return typeof errorName === "string" ? errorName : undefined
|
||||
}
|
||||
|
||||
const useTerminalUiBindings = (input: {
|
||||
container: HTMLDivElement
|
||||
term: Term
|
||||
@@ -478,14 +472,34 @@ export const Terminal = (props: TerminalProps) => {
|
||||
|
||||
const gone = () =>
|
||||
client.pty
|
||||
.get({ ptyID: id })
|
||||
.then(() => false)
|
||||
.get({ ptyID: id }, { throwOnError: false })
|
||||
.then((result) => result.response.status === 404)
|
||||
.catch((err) => {
|
||||
if (errorName(err) === "NotFoundError") return true
|
||||
debugTerminal("failed to inspect terminal session", err)
|
||||
return false
|
||||
})
|
||||
|
||||
const connectToken = async () => {
|
||||
const result = await client.pty
|
||||
.connectToken(
|
||||
{ ptyID: id, directory },
|
||||
{
|
||||
throwOnError: false,
|
||||
headers: { "x-opencode-ticket": "1" },
|
||||
},
|
||||
)
|
||||
.catch((err: unknown) => {
|
||||
if (err instanceof Error && err.message.includes("Request is not supported")) return
|
||||
throw err
|
||||
})
|
||||
if (!result) return
|
||||
if (result.response.status === 200 && result.data?.ticket) return result.data.ticket
|
||||
if (result.response.status === 404 || result.response.status === 405) return
|
||||
if (result.response.status === 403)
|
||||
throw new Error("PTY connect ticket rejected by origin or CSRF checks. Check the server CORS config.")
|
||||
throw new Error(`PTY connect ticket failed with ${result.response.status}`)
|
||||
}
|
||||
|
||||
const retry = (err: unknown) => {
|
||||
if (disposed) return
|
||||
if (reconn !== undefined) return
|
||||
@@ -505,22 +519,30 @@ export const Terminal = (props: TerminalProps) => {
|
||||
}, ms)
|
||||
}
|
||||
|
||||
const open = () => {
|
||||
const open = async () => {
|
||||
if (disposed) return
|
||||
drop?.()
|
||||
|
||||
const next = new URL(url + `/pty/${id}/connect`)
|
||||
next.searchParams.set("directory", directory)
|
||||
next.searchParams.set("cursor", String(seek))
|
||||
next.protocol = next.protocol === "https:" ? "wss:" : "ws:"
|
||||
if (!sameOrigin && password) {
|
||||
next.searchParams.set("auth_token", btoa(`${username}:${password}`))
|
||||
// For same-origin requests, let the browser reuse the page's existing auth.
|
||||
next.username = username
|
||||
next.password = password
|
||||
}
|
||||
const ticket = await connectToken().catch((err) => {
|
||||
fail(err)
|
||||
return undefined
|
||||
})
|
||||
if (once.value) return
|
||||
if (disposed) return
|
||||
|
||||
const socket = new WebSocket(next)
|
||||
const socket = new WebSocket(
|
||||
terminalWebSocketURL({
|
||||
url,
|
||||
id,
|
||||
directory,
|
||||
cursor: seek,
|
||||
ticket,
|
||||
sameOrigin,
|
||||
username,
|
||||
password,
|
||||
authToken: server.current?.type === "http" ? server.current.authToken : false,
|
||||
}),
|
||||
)
|
||||
socket.binaryType = "arraybuffer"
|
||||
ws = socket
|
||||
|
||||
|
||||
53
packages/app/src/context/server.test.ts
Normal file
53
packages/app/src/context/server.test.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import { resolveServerList, ServerConnection } from "./server"
|
||||
|
||||
describe("resolveServerList", () => {
|
||||
test("lets startup auth_token credentials override a persisted same-url server", () => {
|
||||
const list = resolveServerList({
|
||||
stored: [{ url: "https://server.example.test" }],
|
||||
props: [
|
||||
{
|
||||
type: "http",
|
||||
authToken: true,
|
||||
http: {
|
||||
url: "https://server.example.test",
|
||||
username: "opencode",
|
||||
password: "secret",
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
|
||||
expect(list).toHaveLength(1)
|
||||
expect(list[0]?.type).toBe("http")
|
||||
expect(list[0]?.http).toEqual({
|
||||
url: "https://server.example.test",
|
||||
username: "opencode",
|
||||
password: "secret",
|
||||
})
|
||||
expect(list[0]?.type === "http" ? list[0].authToken : false).toBe(true)
|
||||
expect(ServerConnection.key(list[0]!) as string).toBe("https://server.example.test")
|
||||
})
|
||||
|
||||
test("keeps persisted credentials when startup has no auth_token", () => {
|
||||
const list = resolveServerList({
|
||||
stored: [
|
||||
{
|
||||
url: "https://server.example.test",
|
||||
username: "opencode",
|
||||
password: "saved",
|
||||
},
|
||||
],
|
||||
props: [{ type: "http", http: { url: "https://server.example.test" } }],
|
||||
})
|
||||
|
||||
expect(list).toHaveLength(1)
|
||||
expect(list[0]?.type).toBe("http")
|
||||
expect(list[0]?.http).toEqual({
|
||||
url: "https://server.example.test",
|
||||
username: "opencode",
|
||||
password: "saved",
|
||||
})
|
||||
expect(list[0]?.type === "http" ? list[0].authToken : true).toBeUndefined()
|
||||
})
|
||||
})
|
||||
@@ -33,6 +33,33 @@ function isLocalHost(url: string) {
|
||||
if (host === "localhost" || host === "127.0.0.1") return "local"
|
||||
}
|
||||
|
||||
export function resolveServerList(input: {
|
||||
props?: Array<ServerConnection.Any>
|
||||
stored: StoredServer[]
|
||||
}): Array<ServerConnection.Any> {
|
||||
const servers = [
|
||||
...input.stored.map((value) =>
|
||||
typeof value === "string"
|
||||
? {
|
||||
type: "http" as const,
|
||||
http: { url: value },
|
||||
}
|
||||
: value,
|
||||
),
|
||||
...(input.props ?? []),
|
||||
]
|
||||
|
||||
const deduped = new Map<ServerConnection.Key, ServerConnection.Any>()
|
||||
for (const value of servers) {
|
||||
const conn: ServerConnection.Any = "type" in value ? value : { type: "http", http: value }
|
||||
const key = ServerConnection.key(conn)
|
||||
if (deduped.has(key) && conn.type === "http" && !conn.authToken) continue
|
||||
deduped.set(key, conn)
|
||||
}
|
||||
|
||||
return [...deduped.values()]
|
||||
}
|
||||
|
||||
export namespace ServerConnection {
|
||||
type Base = { displayName?: string }
|
||||
|
||||
@@ -46,6 +73,7 @@ export namespace ServerConnection {
|
||||
export type Http = {
|
||||
type: "http"
|
||||
http: HttpBase
|
||||
authToken?: boolean
|
||||
} & Base
|
||||
|
||||
export type Sidecar = {
|
||||
@@ -113,26 +141,7 @@ export const { use: useServer, provider: ServerProvider } = createSimpleContext(
|
||||
const url = (x: StoredServer) => (typeof x === "string" ? x : "type" in x ? x.http.url : x.url)
|
||||
|
||||
const allServers = createMemo((): Array<ServerConnection.Any> => {
|
||||
const servers = [
|
||||
...(props.servers ?? []),
|
||||
...store.list.map((value) =>
|
||||
typeof value === "string"
|
||||
? {
|
||||
type: "http" as const,
|
||||
http: { url: value },
|
||||
}
|
||||
: value,
|
||||
),
|
||||
]
|
||||
|
||||
const deduped = new Map(
|
||||
servers.map((value) => {
|
||||
const conn: ServerConnection.Any = "type" in value ? value : { type: "http", http: value }
|
||||
return [ServerConnection.key(conn), conn]
|
||||
}),
|
||||
)
|
||||
|
||||
return [...deduped.values()]
|
||||
return resolveServerList({ stored: store.list, props: props.servers })
|
||||
})
|
||||
|
||||
const [state, setState] = createStore({
|
||||
@@ -174,7 +183,7 @@ export const { use: useServer, provider: ServerProvider } = createSimpleContext(
|
||||
function add(input: ServerConnection.Http) {
|
||||
const url_ = normalizeServerUrl(input.http.url)
|
||||
if (!url_) return
|
||||
const conn = { ...input, http: { ...input.http, url: url_ } }
|
||||
const conn: ServerConnection.Http = { ...input, authToken: undefined, http: { ...input.http, url: url_ } }
|
||||
return batch(() => {
|
||||
const existing = store.list.findIndex((x) => url(x) === url_)
|
||||
if (existing !== -1) {
|
||||
|
||||
@@ -1,6 +1,9 @@
|
||||
import { beforeAll, describe, expect, mock, test } from "bun:test"
|
||||
|
||||
let getWorkspaceTerminalCacheKey: (dir: string) => string
|
||||
type ServerKey = Parameters<typeof import("./terminal").getTerminalServerScope>[1]
|
||||
|
||||
let getWorkspaceTerminalCacheKey: (dir: string, scope?: string) => string
|
||||
let getTerminalServerScope: typeof import("./terminal").getTerminalServerScope
|
||||
let getLegacyTerminalStorageKeys: (dir: string, legacySessionID?: string) => string[]
|
||||
let migrateTerminalState: (value: unknown) => unknown
|
||||
|
||||
@@ -17,6 +20,7 @@ beforeAll(async () => {
|
||||
}))
|
||||
const mod = await import("./terminal")
|
||||
getWorkspaceTerminalCacheKey = mod.getWorkspaceTerminalCacheKey
|
||||
getTerminalServerScope = mod.getTerminalServerScope
|
||||
getLegacyTerminalStorageKeys = mod.getLegacyTerminalStorageKeys
|
||||
migrateTerminalState = mod.migrateTerminalState
|
||||
})
|
||||
@@ -25,6 +29,45 @@ describe("getWorkspaceTerminalCacheKey", () => {
|
||||
test("uses workspace-only directory cache key", () => {
|
||||
expect(getWorkspaceTerminalCacheKey("/repo")).toBe("/repo:__workspace__")
|
||||
})
|
||||
|
||||
test("can include a server scope", () => {
|
||||
expect(getWorkspaceTerminalCacheKey("/repo", "wsl:Debian")).toBe("wsl:Debian:/repo:__workspace__")
|
||||
})
|
||||
})
|
||||
|
||||
describe("getTerminalServerScope", () => {
|
||||
test("preserves local server keys", () => {
|
||||
expect(
|
||||
getTerminalServerScope(
|
||||
{ type: "sidecar", variant: "base", http: { url: "http://127.0.0.1:4096" } },
|
||||
"sidecar" as ServerKey,
|
||||
),
|
||||
).toBeUndefined()
|
||||
expect(
|
||||
getTerminalServerScope(
|
||||
{ type: "http", http: { url: "http://localhost:4096" } },
|
||||
"http://localhost:4096" as ServerKey,
|
||||
),
|
||||
).toBeUndefined()
|
||||
expect(
|
||||
getTerminalServerScope({ type: "http", http: { url: "http://[::1]:4096" } }, "http://[::1]:4096" as ServerKey),
|
||||
).toBeUndefined()
|
||||
})
|
||||
|
||||
test("scopes non-local server keys", () => {
|
||||
expect(
|
||||
getTerminalServerScope(
|
||||
{ type: "sidecar", variant: "wsl", distro: "Debian", http: { url: "http://127.0.0.1:4096" } },
|
||||
"wsl:Debian" as ServerKey,
|
||||
),
|
||||
).toBe("wsl:Debian" as ServerKey)
|
||||
expect(
|
||||
getTerminalServerScope(
|
||||
{ type: "http", http: { url: "https://example.com" } },
|
||||
"https://example.com" as ServerKey,
|
||||
),
|
||||
).toBe("https://example.com" as ServerKey)
|
||||
})
|
||||
})
|
||||
|
||||
describe("getLegacyTerminalStorageKeys", () => {
|
||||
|
||||
@@ -4,6 +4,7 @@ import { batch, createEffect, createMemo, createRoot, on, onCleanup } from "soli
|
||||
import { useParams } from "@solidjs/router"
|
||||
import { useSDK } from "./sdk"
|
||||
import type { Platform } from "./platform"
|
||||
import { ServerConnection, useServer } from "./server"
|
||||
import { defaultTitle, titleNumber } from "./terminal-title"
|
||||
import { Persist, persisted, removePersisted } from "@/utils/persist"
|
||||
|
||||
@@ -82,10 +83,31 @@ export function migrateTerminalState(value: unknown) {
|
||||
}
|
||||
}
|
||||
|
||||
export function getWorkspaceTerminalCacheKey(dir: string) {
|
||||
export function getWorkspaceTerminalCacheKey(dir: string, scope?: string) {
|
||||
if (scope) return `${scope}:${dir}:${WORKSPACE_KEY}`
|
||||
return `${dir}:${WORKSPACE_KEY}`
|
||||
}
|
||||
|
||||
export function getTerminalServerScope(conn: ServerConnection.Any | undefined, key: ServerConnection.Key) {
|
||||
if (!conn) return
|
||||
if (conn.type === "sidecar" && conn.variant === "base") return
|
||||
if (conn.type === "http") {
|
||||
try {
|
||||
const url = new URL(conn.http.url)
|
||||
if (
|
||||
url.hostname === "localhost" ||
|
||||
url.hostname === "127.0.0.1" ||
|
||||
url.hostname === "::1" ||
|
||||
url.hostname === "[::1]"
|
||||
)
|
||||
return
|
||||
} catch {
|
||||
return key
|
||||
}
|
||||
}
|
||||
return key
|
||||
}
|
||||
|
||||
export function getLegacyTerminalStorageKeys(dir: string, legacySessionID?: string) {
|
||||
if (!legacySessionID) return [`${dir}/terminal.v1`]
|
||||
return [`${dir}/terminal/${legacySessionID}.v1`, `${dir}/terminal.v1`]
|
||||
@@ -110,15 +132,16 @@ const trimTerminal = (pty: LocalPTY) => {
|
||||
}
|
||||
}
|
||||
|
||||
export function clearWorkspaceTerminals(dir: string, sessionIDs?: string[], platform?: Platform) {
|
||||
const key = getWorkspaceTerminalCacheKey(dir)
|
||||
export function clearWorkspaceTerminals(dir: string, sessionIDs?: string[], platform?: Platform, scope?: string) {
|
||||
const key = getWorkspaceTerminalCacheKey(dir, scope)
|
||||
for (const cache of caches) {
|
||||
const entry = cache.get(key)
|
||||
entry?.value.clear()
|
||||
}
|
||||
|
||||
void removePersisted(Persist.workspace(dir, "terminal"), platform)
|
||||
void removePersisted(Persist.workspace(dir, scope ? `terminal:${scope}` : "terminal"), platform)
|
||||
|
||||
if (scope) return
|
||||
const legacy = new Set(getLegacyTerminalStorageKeys(dir))
|
||||
for (const id of sessionIDs ?? []) {
|
||||
for (const key of getLegacyTerminalStorageKeys(dir, id)) {
|
||||
@@ -130,12 +153,17 @@ export function clearWorkspaceTerminals(dir: string, sessionIDs?: string[], plat
|
||||
}
|
||||
}
|
||||
|
||||
function createWorkspaceTerminalSession(sdk: ReturnType<typeof useSDK>, dir: string, legacySessionID?: string) {
|
||||
const legacy = getLegacyTerminalStorageKeys(dir, legacySessionID)
|
||||
function createWorkspaceTerminalSession(
|
||||
sdk: ReturnType<typeof useSDK>,
|
||||
dir: string,
|
||||
legacySessionID?: string,
|
||||
scope?: string,
|
||||
) {
|
||||
const legacy = scope ? [] : getLegacyTerminalStorageKeys(dir, legacySessionID)
|
||||
|
||||
const [store, setStore, _, ready] = persisted(
|
||||
{
|
||||
...Persist.workspace(dir, "terminal", legacy),
|
||||
...Persist.workspace(dir, scope ? `terminal:${scope}` : "terminal", legacy),
|
||||
migrate: migrateTerminalState,
|
||||
},
|
||||
createStore<{
|
||||
@@ -357,8 +385,12 @@ export const { use: useTerminal, provider: TerminalProvider } = createSimpleCont
|
||||
gate: false,
|
||||
init: () => {
|
||||
const sdk = useSDK()
|
||||
const server = useServer()
|
||||
const params = useParams()
|
||||
const cache = new Map<string, TerminalCacheEntry>()
|
||||
const scope = createMemo(() => {
|
||||
return getTerminalServerScope(server.current, server.key)
|
||||
})
|
||||
|
||||
caches.add(cache)
|
||||
onCleanup(() => caches.delete(cache))
|
||||
@@ -382,9 +414,9 @@ export const { use: useTerminal, provider: TerminalProvider } = createSimpleCont
|
||||
}
|
||||
}
|
||||
|
||||
const loadWorkspace = (dir: string, legacySessionID?: string) => {
|
||||
const loadWorkspace = (dir: string, legacySessionID: string | undefined, serverScope: string | undefined) => {
|
||||
// Terminals are workspace-scoped so tabs persist while switching sessions in the same directory.
|
||||
const key = getWorkspaceTerminalCacheKey(dir)
|
||||
const key = getWorkspaceTerminalCacheKey(dir, serverScope)
|
||||
const existing = cache.get(key)
|
||||
if (existing) {
|
||||
cache.delete(key)
|
||||
@@ -393,7 +425,7 @@ export const { use: useTerminal, provider: TerminalProvider } = createSimpleCont
|
||||
}
|
||||
|
||||
const entry = createRoot((dispose) => ({
|
||||
value: createWorkspaceTerminalSession(sdk, dir, legacySessionID),
|
||||
value: createWorkspaceTerminalSession(sdk, dir, legacySessionID, serverScope),
|
||||
dispose,
|
||||
}))
|
||||
|
||||
@@ -402,16 +434,16 @@ export const { use: useTerminal, provider: TerminalProvider } = createSimpleCont
|
||||
return entry.value
|
||||
}
|
||||
|
||||
const workspace = createMemo(() => loadWorkspace(params.dir!, params.id))
|
||||
const workspace = createMemo(() => loadWorkspace(params.dir!, params.id, scope()))
|
||||
|
||||
createEffect(
|
||||
on(
|
||||
() => ({ dir: params.dir, id: params.id }),
|
||||
() => ({ dir: params.dir, id: params.id, scope: scope() }),
|
||||
(next, prev) => {
|
||||
if (!prev?.dir) return
|
||||
if (next.dir === prev.dir && next.id === prev.id) return
|
||||
if (next.dir === prev.dir && next.id) return
|
||||
loadWorkspace(prev.dir, prev.id).trimAll()
|
||||
if (next.dir === prev.dir && next.id === prev.id && next.scope === prev.scope) return
|
||||
if (next.dir === prev.dir && next.id && next.scope === prev.scope) return
|
||||
loadWorkspace(prev.dir, prev.id, prev.scope).trimAll()
|
||||
},
|
||||
{ defer: true },
|
||||
),
|
||||
|
||||
@@ -7,6 +7,7 @@ import { type Platform, PlatformProvider } from "@/context/platform"
|
||||
import { dict as en } from "@/i18n/en"
|
||||
import { dict as zh } from "@/i18n/zh"
|
||||
import { handleNotificationClick } from "@/utils/notification-click"
|
||||
import { authFromToken } from "@/utils/server"
|
||||
import pkg from "../package.json"
|
||||
import { ServerConnection } from "./context/server"
|
||||
|
||||
@@ -111,6 +112,13 @@ const getDefaultUrl = () => {
|
||||
return getCurrentUrl()
|
||||
}
|
||||
|
||||
const clearAuthToken = () => {
|
||||
const params = new URLSearchParams(location.search)
|
||||
if (!params.has("auth_token")) return
|
||||
params.delete("auth_token")
|
||||
history.replaceState(null, "", location.pathname + (params.size ? `?${params}` : "") + location.hash)
|
||||
}
|
||||
|
||||
const platform: Platform = {
|
||||
platform: "web",
|
||||
version: pkg.version,
|
||||
@@ -146,7 +154,16 @@ if (import.meta.env.VITE_SENTRY_DSN) {
|
||||
}
|
||||
|
||||
if (root instanceof HTMLElement) {
|
||||
const server: ServerConnection.Http = { type: "http", http: { url: getCurrentUrl() } }
|
||||
const auth = authFromToken(new URLSearchParams(location.search).get("auth_token"))
|
||||
clearAuthToken()
|
||||
const server: ServerConnection.Http = {
|
||||
type: "http",
|
||||
authToken: !!auth,
|
||||
http: {
|
||||
url: getCurrentUrl(),
|
||||
...auth,
|
||||
},
|
||||
}
|
||||
render(
|
||||
() => (
|
||||
<PlatformProvider value={platform}>
|
||||
|
||||
@@ -35,7 +35,7 @@ import type { DragEvent } from "@thisbeyond/solid-dnd"
|
||||
import { useProviders } from "@/hooks/use-providers"
|
||||
import { showToast, Toast, toaster } from "@opencode-ai/ui/toast"
|
||||
import { useGlobalSDK } from "@/context/global-sdk"
|
||||
import { clearWorkspaceTerminals } from "@/context/terminal"
|
||||
import { clearWorkspaceTerminals, getTerminalServerScope } from "@/context/terminal"
|
||||
import { dropSessionCaches, pickSessionCacheEvictions } from "@/context/global-sync/session-cache"
|
||||
import {
|
||||
clearSessionPrefetchInflight,
|
||||
@@ -1557,6 +1557,7 @@ export default function Layout(props: ParentProps) {
|
||||
directory,
|
||||
sessions.map((s) => s.id),
|
||||
platform,
|
||||
getTerminalServerScope(server.current, server.key),
|
||||
)
|
||||
await globalSDK.client.instance.dispose({ directory }).catch(() => undefined)
|
||||
|
||||
|
||||
@@ -37,6 +37,7 @@ export function TerminalPanel() {
|
||||
const [store, setStore] = createStore({
|
||||
autoCreated: false,
|
||||
activeDraggable: undefined as string | undefined,
|
||||
recovered: {} as Record<string, boolean>,
|
||||
view: typeof window === "undefined" ? 1000 : (window.visualViewport?.height ?? window.innerHeight),
|
||||
})
|
||||
|
||||
@@ -145,6 +146,21 @@ export function TerminalPanel() {
|
||||
const all = terminal.all
|
||||
const ids = createMemo(() => all().map((pty) => pty.id))
|
||||
|
||||
const recoverTerminal = (key: string, id: string, clone: (id: string) => Promise<void>) => {
|
||||
if (store.recovered[key]) return
|
||||
setStore("recovered", key, true)
|
||||
void clone(id)
|
||||
}
|
||||
|
||||
const terminalRecoveryKey = (pty: { id: string; title: string; titleNumber: number }) => {
|
||||
return String(pty.titleNumber || pty.title || pty.id)
|
||||
}
|
||||
|
||||
const markTerminalConnected = (key: string, id: string, trim: (id: string) => void) => {
|
||||
setStore("recovered", key, false)
|
||||
trim(id)
|
||||
}
|
||||
|
||||
const handleTerminalDragStart = (event: unknown) => {
|
||||
const id = getDraggableId(event)
|
||||
if (!id) return
|
||||
@@ -280,9 +296,9 @@ export function TerminalPanel() {
|
||||
<Terminal
|
||||
pty={pty()}
|
||||
autoFocus={opened()}
|
||||
onConnect={() => ops.trim(id)}
|
||||
onConnect={() => markTerminalConnected(terminalRecoveryKey(pty()), id, ops.trim)}
|
||||
onCleanup={ops.update}
|
||||
onConnectError={() => ops.clone(id)}
|
||||
onConnectError={() => recoverTerminal(terminalRecoveryKey(pty()), id, ops.clone)}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
23
packages/app/src/utils/server.test.ts
Normal file
23
packages/app/src/utils/server.test.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import { authFromToken, authTokenFromCredentials } from "./server"
|
||||
|
||||
describe("authFromToken", () => {
|
||||
test("decodes basic auth credentials from auth_token", () => {
|
||||
expect(authFromToken(btoa("kit:secret"))).toEqual({ username: "kit", password: "secret" })
|
||||
})
|
||||
|
||||
test("defaults blank username to opencode", () => {
|
||||
expect(authFromToken(btoa(":secret"))).toEqual({ username: "opencode", password: "secret" })
|
||||
})
|
||||
|
||||
test("ignores malformed tokens", () => {
|
||||
expect(authFromToken("not base64")).toBeUndefined()
|
||||
expect(authFromToken(btoa("missing-separator"))).toBeUndefined()
|
||||
})
|
||||
})
|
||||
|
||||
describe("authTokenFromCredentials", () => {
|
||||
test("encodes credentials with the default username", () => {
|
||||
expect(authTokenFromCredentials({ password: "secret" })).toBe(btoa("opencode:secret"))
|
||||
})
|
||||
})
|
||||
@@ -1,5 +1,21 @@
|
||||
import { createOpencodeClient } from "@opencode-ai/sdk/v2/client"
|
||||
import type { ServerConnection } from "@/context/server"
|
||||
import { decode64 } from "@/utils/base64"
|
||||
|
||||
export function authTokenFromCredentials(input: { username?: string; password: string }) {
|
||||
return btoa(`${input.username ?? "opencode"}:${input.password}`)
|
||||
}
|
||||
|
||||
export function authFromToken(token: string | null) {
|
||||
const decoded = decode64(token ?? undefined)
|
||||
if (!decoded) return
|
||||
const separator = decoded.indexOf(":")
|
||||
if (separator === -1) return
|
||||
return {
|
||||
username: decoded.slice(0, separator) || "opencode",
|
||||
password: decoded.slice(separator + 1),
|
||||
}
|
||||
}
|
||||
|
||||
export function createSdkForServer({
|
||||
server,
|
||||
@@ -10,7 +26,7 @@ export function createSdkForServer({
|
||||
const auth = (() => {
|
||||
if (!server.password) return
|
||||
return {
|
||||
Authorization: `Basic ${btoa(`${server.username ?? "opencode"}:${server.password}`)}`,
|
||||
Authorization: `Basic ${authTokenFromCredentials({ username: server.username, password: server.password })}`,
|
||||
}
|
||||
})()
|
||||
|
||||
|
||||
52
packages/app/src/utils/terminal-websocket-url.test.ts
Normal file
52
packages/app/src/utils/terminal-websocket-url.test.ts
Normal file
@@ -0,0 +1,52 @@
|
||||
import { describe, expect, test } from "bun:test"
|
||||
import { terminalWebSocketURL } from "./terminal-websocket-url"
|
||||
|
||||
describe("terminalWebSocketURL", () => {
|
||||
test("uses query auth without embedding credentials in websocket URL", () => {
|
||||
const url = terminalWebSocketURL({
|
||||
url: "http://127.0.0.1:49365",
|
||||
id: "pty_test",
|
||||
directory: "/tmp/project",
|
||||
cursor: 0,
|
||||
sameOrigin: false,
|
||||
username: "opencode",
|
||||
password: "secret",
|
||||
})
|
||||
|
||||
expect(url.protocol).toBe("ws:")
|
||||
expect(url.username).toBe("")
|
||||
expect(url.password).toBe("")
|
||||
expect(url.searchParams.get("auth_token")).toBe(btoa("opencode:secret"))
|
||||
})
|
||||
|
||||
test("omits query auth for same-origin saved credentials", () => {
|
||||
const url = terminalWebSocketURL({
|
||||
url: "https://app.example.test",
|
||||
id: "pty_test",
|
||||
directory: "/tmp/project",
|
||||
cursor: 10,
|
||||
sameOrigin: true,
|
||||
username: "opencode",
|
||||
password: "secret",
|
||||
})
|
||||
|
||||
expect(url.protocol).toBe("wss:")
|
||||
expect(url.searchParams.has("auth_token")).toBe(false)
|
||||
})
|
||||
|
||||
test("uses query auth for same-origin credentials from auth_token", () => {
|
||||
const url = terminalWebSocketURL({
|
||||
url: "https://app.example.test",
|
||||
id: "pty_test",
|
||||
directory: "/tmp/project",
|
||||
cursor: 10,
|
||||
sameOrigin: true,
|
||||
username: "opencode",
|
||||
password: "secret",
|
||||
authToken: true,
|
||||
})
|
||||
|
||||
expect(url.protocol).toBe("wss:")
|
||||
expect(url.searchParams.get("auth_token")).toBe(btoa("opencode:secret"))
|
||||
})
|
||||
})
|
||||
28
packages/app/src/utils/terminal-websocket-url.ts
Normal file
28
packages/app/src/utils/terminal-websocket-url.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { authTokenFromCredentials } from "@/utils/server"
|
||||
|
||||
export function terminalWebSocketURL(input: {
|
||||
url: string
|
||||
id: string
|
||||
directory: string
|
||||
cursor: number
|
||||
ticket?: string
|
||||
sameOrigin?: boolean
|
||||
username?: string
|
||||
password?: string
|
||||
authToken?: boolean
|
||||
}) {
|
||||
const next = new URL(`${input.url}/pty/${input.id}/connect`)
|
||||
next.searchParams.set("directory", input.directory)
|
||||
next.searchParams.set("cursor", String(input.cursor))
|
||||
next.protocol = next.protocol === "https:" ? "wss:" : "ws:"
|
||||
if (input.ticket) {
|
||||
next.searchParams.set("ticket", input.ticket)
|
||||
return next
|
||||
}
|
||||
if (input.password && (!input.sameOrigin || input.authToken))
|
||||
next.searchParams.set(
|
||||
"auth_token",
|
||||
authTokenFromCredentials({ username: input.username, password: input.password }),
|
||||
)
|
||||
return next
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-app",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -2,11 +2,11 @@ import type { APIEvent } from "@solidjs/start"
|
||||
import type { DownloadPlatform } from "../types"
|
||||
|
||||
const prodAssetNames: Record<string, string> = {
|
||||
"darwin-aarch64-dmg": "opencode-desktop-darwin-aarch64.dmg",
|
||||
"darwin-x64-dmg": "opencode-desktop-darwin-x64.dmg",
|
||||
"windows-x64-nsis": "opencode-desktop-windows-x64.exe",
|
||||
"darwin-aarch64-dmg": "opencode-desktop-mac-arm64.dmg",
|
||||
"darwin-x64-dmg": "opencode-desktop-mac-x64.dmg",
|
||||
"windows-x64-nsis": "opencode-desktop-win-x64.exe",
|
||||
"linux-x64-deb": "opencode-desktop-linux-amd64.deb",
|
||||
"linux-x64-appimage": "opencode-desktop-linux-amd64.AppImage",
|
||||
"linux-x64-appimage": "opencode-desktop-linux-x86_64.AppImage",
|
||||
"linux-x64-rpm": "opencode-desktop-linux-x86_64.rpm",
|
||||
} satisfies Record<DownloadPlatform, string>
|
||||
|
||||
@@ -32,13 +32,6 @@ export async function GET({ params: { platform, channel } }: APIEvent) {
|
||||
|
||||
const resp = await fetch(
|
||||
`https://github.com/anomalyco/${channel === "stable" ? "opencode" : "opencode-beta"}/releases/latest/download/${assetName}`,
|
||||
{
|
||||
cf: {
|
||||
// in case gh releases has rate limits
|
||||
cacheTtl: 60 * 5,
|
||||
cacheEverything: true,
|
||||
},
|
||||
} as any,
|
||||
)
|
||||
|
||||
const downloadName = downloadNames[platform]
|
||||
|
||||
@@ -158,11 +158,13 @@ export async function handler(
|
||||
Object.entries(obj).flatMap(([k, v]) => {
|
||||
if (Array.isArray(v)) return [[k, v]]
|
||||
if (typeof v === "object") return [[k, replacer(v)]]
|
||||
if (v === "$ip") return [[k, ip]]
|
||||
if (v === "$workspace") return authInfo?.workspaceID ? [[k, authInfo?.workspaceID]] : []
|
||||
if (v.startsWith("$header.")) {
|
||||
const headerValue = input.request.headers.get(v.slice(8))
|
||||
return headerValue ? [[k, headerValue]] : []
|
||||
if (typeof v === "string") {
|
||||
if (v === "$ip") return [[k, ip]]
|
||||
if (v === "$workspace") return authInfo?.workspaceID ? [[k, authInfo?.workspaceID]] : []
|
||||
if (v.startsWith("$header.")) {
|
||||
const headerValue = input.request.headers.get(v.slice(8))
|
||||
return headerValue ? [[k, headerValue]] : []
|
||||
}
|
||||
}
|
||||
return [[k, v]]
|
||||
}),
|
||||
@@ -917,6 +919,13 @@ export async function handler(
|
||||
"tokens.cache_read": cacheReadTokens,
|
||||
"tokens.cache_write_5m": cacheWrite5mTokens,
|
||||
"tokens.cache_write_1h": cacheWrite1hTokens,
|
||||
"cost.input.microcents": centsToMicroCents(inputCost),
|
||||
"cost.output.microcents": centsToMicroCents(outputCost),
|
||||
"cost.reasoning.microcents": reasoningCost ? centsToMicroCents(reasoningCost) : undefined,
|
||||
"cost.cache_read.microcents": cacheReadCost ? centsToMicroCents(cacheReadCost) : undefined,
|
||||
"cost.cache_write.microcents": cacheWrite5mCost ? centsToMicroCents(cacheWrite5mCost) : undefined,
|
||||
"cost.total.microcents": centsToMicroCents(totalCostInCent),
|
||||
// deprecated - remove after May 20, 2026
|
||||
"cost.input": Math.round(inputCost),
|
||||
"cost.output": Math.round(outputCost),
|
||||
"cost.reasoning": reasoningCost ? Math.round(reasoningCost) : undefined,
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"name": "@opencode-ai/console-core",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-function",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/console-mail",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"dependencies": {
|
||||
"@jsx-email/all": "2.2.3",
|
||||
"@jsx-email/cli": "1.4.3",
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"name": "@opencode-ai/core",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -24,6 +24,7 @@ export namespace AppFileSystem {
|
||||
readonly isDir: (path: string) => Effect.Effect<boolean>
|
||||
readonly isFile: (path: string) => Effect.Effect<boolean>
|
||||
readonly existsSafe: (path: string) => Effect.Effect<boolean>
|
||||
readonly readFileStringSafe: (path: string) => Effect.Effect<string | undefined, Error>
|
||||
readonly readJson: (path: string) => Effect.Effect<unknown, Error>
|
||||
readonly writeJson: (path: string, data: unknown, mode?: number) => Effect.Effect<void, Error>
|
||||
readonly ensureDir: (path: string) => Effect.Effect<void, Error>
|
||||
@@ -47,6 +48,12 @@ export namespace AppFileSystem {
|
||||
return yield* fs.exists(path).pipe(Effect.orElseSucceed(() => false))
|
||||
})
|
||||
|
||||
const readFileStringSafe = Effect.fn("FileSystem.readFileStringSafe")(function* (path: string) {
|
||||
return yield* fs
|
||||
.readFileString(path)
|
||||
.pipe(Effect.catchReason("PlatformError", "NotFound", () => Effect.succeed(undefined)))
|
||||
})
|
||||
|
||||
const isDir = Effect.fn("FileSystem.isDir")(function* (path: string) {
|
||||
const info = yield* fs.stat(path).pipe(Effect.catch(() => Effect.void))
|
||||
return info?.type === "Directory"
|
||||
@@ -163,6 +170,7 @@ export namespace AppFileSystem {
|
||||
return Service.of({
|
||||
...fs,
|
||||
existsSafe,
|
||||
readFileStringSafe,
|
||||
isDir,
|
||||
isFile,
|
||||
readDirectoryEntries,
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Config } from "effect"
|
||||
import { InstallationChannel } from "../installation/version"
|
||||
|
||||
function truthy(key: string) {
|
||||
const value = process.env[key]?.toLowerCase()
|
||||
@@ -11,10 +10,6 @@ function falsy(key: string) {
|
||||
return value === "false" || value === "0"
|
||||
}
|
||||
|
||||
// Channels that default to the new effect-httpapi server backend. The legacy
|
||||
// hono backend remains the default for stable (`prod`/`latest`) installs.
|
||||
const HTTPAPI_DEFAULT_ON_CHANNELS = new Set(["dev", "beta", "local"])
|
||||
|
||||
function number(key: string) {
|
||||
const value = process.env[key]
|
||||
if (!value) return undefined
|
||||
@@ -86,14 +81,6 @@ export const Flag = {
|
||||
OPENCODE_STRICT_CONFIG_DEPS: truthy("OPENCODE_STRICT_CONFIG_DEPS"),
|
||||
|
||||
OPENCODE_WORKSPACE_ID: process.env["OPENCODE_WORKSPACE_ID"],
|
||||
// Defaults to true on dev/beta/local channels so internal users exercise the
|
||||
// new effect-httpapi server backend. Stable (`prod`/`latest`) installs stay
|
||||
// on the legacy hono backend until the rollout is complete. An explicit env
|
||||
// var ("true"/"1" or "false"/"0") always wins, providing an opt-in for
|
||||
// stable users and an escape hatch for dev/beta users.
|
||||
OPENCODE_EXPERIMENTAL_HTTPAPI:
|
||||
truthy("OPENCODE_EXPERIMENTAL_HTTPAPI") ||
|
||||
(!falsy("OPENCODE_EXPERIMENTAL_HTTPAPI") && HTTPAPI_DEFAULT_ON_CHANNELS.has(InstallationChannel)),
|
||||
OPENCODE_EXPERIMENTAL_WORKSPACES: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_WORKSPACES"),
|
||||
OPENCODE_EXPERIMENTAL_EVENT_SYSTEM: OPENCODE_EXPERIMENTAL || truthy("OPENCODE_EXPERIMENTAL_EVENT_SYSTEM"),
|
||||
|
||||
|
||||
@@ -71,6 +71,8 @@ export const layer = Layer.effect(
|
||||
Effect.sync(() => Service.of(make())),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer
|
||||
|
||||
export const layerWith = (input: Partial<Interface>) =>
|
||||
Layer.effect(
|
||||
Service,
|
||||
|
||||
@@ -65,6 +65,34 @@ describe("AppFileSystem", () => {
|
||||
)
|
||||
})
|
||||
|
||||
describe("readFileStringSafe", () => {
|
||||
it(
|
||||
"returns file contents when file exists",
|
||||
Effect.gen(function* () {
|
||||
const fs = yield* AppFileSystem.Service
|
||||
const filesys = yield* FileSystem.FileSystem
|
||||
const tmp = yield* filesys.makeTempDirectoryScoped()
|
||||
const file = path.join(tmp, "exists.txt")
|
||||
yield* filesys.writeFileString(file, "hello")
|
||||
|
||||
const result = yield* fs.readFileStringSafe(file)
|
||||
expect(result).toBe("hello")
|
||||
}),
|
||||
)
|
||||
|
||||
it(
|
||||
"returns undefined for missing file (NotFound)",
|
||||
Effect.gen(function* () {
|
||||
const fs = yield* AppFileSystem.Service
|
||||
const filesys = yield* FileSystem.FileSystem
|
||||
const tmp = yield* filesys.makeTempDirectoryScoped()
|
||||
|
||||
const result = yield* fs.readFileStringSafe(path.join(tmp, "does-not-exist.txt"))
|
||||
expect(result).toBeUndefined()
|
||||
}),
|
||||
)
|
||||
})
|
||||
|
||||
describe("readJson / writeJson", () => {
|
||||
it(
|
||||
"round-trips JSON data",
|
||||
|
||||
@@ -27,7 +27,7 @@ const channel = (() => {
|
||||
})()
|
||||
|
||||
const getBase = (): Configuration => ({
|
||||
artifactName: "opencode-electron-${os}-${arch}.${ext}",
|
||||
artifactName: "opencode-desktop-${os}-${arch}.${ext}",
|
||||
directories: {
|
||||
output: "dist",
|
||||
buildResources: "resources",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@opencode-ai/desktop-electron",
|
||||
"private": true,
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"homepage": "https://opencode.ai",
|
||||
|
||||
@@ -74,6 +74,7 @@ setupApp()
|
||||
function setupApp() {
|
||||
ensureLoopbackNoProxy()
|
||||
app.commandLine.appendSwitch("proxy-bypass-list", "<-loopback>")
|
||||
if (!app.isPackaged) app.commandLine.appendSwitch("remote-debugging-port", "9222")
|
||||
|
||||
if (!app.requestSingleInstanceLock()) {
|
||||
app.quit()
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "@opencode-ai/desktop",
|
||||
"private": true,
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
#!/usr/bin/env bun
|
||||
|
||||
import { Buffer } from "node:buffer"
|
||||
import { $ } from "bun"
|
||||
import path from "node:path"
|
||||
import { parseArgs } from "node:util"
|
||||
|
||||
const { values } = parseArgs({
|
||||
args: Bun.argv.slice(2),
|
||||
@@ -12,8 +13,6 @@ const { values } = parseArgs({
|
||||
|
||||
const dryRun = values["dry-run"]
|
||||
|
||||
import { parseArgs } from "node:util"
|
||||
|
||||
const repo = process.env.GH_REPO
|
||||
if (!repo) throw new Error("GH_REPO is required")
|
||||
|
||||
@@ -23,20 +22,22 @@ if (!releaseId) throw new Error("OPENCODE_RELEASE is required")
|
||||
const version = process.env.OPENCODE_VERSION
|
||||
if (!version) throw new Error("OPENCODE_VERSION is required")
|
||||
|
||||
const dir = process.env.LATEST_YML_DIR
|
||||
if (!dir) throw new Error("LATEST_YML_DIR is required")
|
||||
const root = dir
|
||||
|
||||
const token = process.env.GH_TOKEN ?? process.env.GITHUB_TOKEN
|
||||
if (!token) throw new Error("GH_TOKEN or GITHUB_TOKEN is required")
|
||||
|
||||
const apiHeaders = {
|
||||
Authorization: `token ${token}`,
|
||||
Accept: "application/vnd.github+json",
|
||||
}
|
||||
|
||||
const releaseRes = await fetch(`https://api.github.com/repos/${repo}/releases/${releaseId}`, {
|
||||
headers: apiHeaders,
|
||||
const rel = await fetch(`https://api.github.com/repos/${repo}/releases/${releaseId}`, {
|
||||
headers: {
|
||||
Authorization: `token ${token}`,
|
||||
Accept: "application/vnd.github+json",
|
||||
},
|
||||
})
|
||||
|
||||
if (!releaseRes.ok) {
|
||||
throw new Error(`Failed to fetch release: ${releaseRes.status} ${releaseRes.statusText}`)
|
||||
if (!rel.ok) {
|
||||
throw new Error(`Failed to fetch release: ${rel.status} ${rel.statusText}`)
|
||||
}
|
||||
|
||||
type Asset = {
|
||||
@@ -45,115 +46,169 @@ type Asset = {
|
||||
}
|
||||
|
||||
type Release = {
|
||||
tag_name?: string
|
||||
assets?: Asset[]
|
||||
}
|
||||
|
||||
const release = (await releaseRes.json()) as Release
|
||||
const assets = release.assets ?? []
|
||||
const assetByName = new Map(assets.map((asset) => [asset.name, asset]))
|
||||
const assets = ((await rel.json()) as Release).assets ?? []
|
||||
const amap = new Map(assets.map((item) => [item.name, item]))
|
||||
|
||||
const latestAsset = assetByName.get("latest.json")
|
||||
if (!latestAsset) {
|
||||
console.log("latest.json not found, skipping tauri finalization")
|
||||
process.exit(0)
|
||||
type Item = {
|
||||
url: string
|
||||
}
|
||||
|
||||
const latestRes = await fetch(latestAsset.url, {
|
||||
headers: {
|
||||
Authorization: `token ${token}`,
|
||||
Accept: "application/octet-stream",
|
||||
},
|
||||
})
|
||||
|
||||
if (!latestRes.ok) {
|
||||
throw new Error(`Failed to fetch latest.json: ${latestRes.status} ${latestRes.statusText}`)
|
||||
type Yml = {
|
||||
version: string
|
||||
files: Item[]
|
||||
}
|
||||
|
||||
const latestText = new TextDecoder().decode(await latestRes.arrayBuffer())
|
||||
const latest = JSON.parse(latestText)
|
||||
const base = { ...latest }
|
||||
delete base.platforms
|
||||
function parse(text: string): Yml {
|
||||
const lines = text.split("\n")
|
||||
let version = ""
|
||||
const files: Item[] = []
|
||||
let url = ""
|
||||
|
||||
const fetchSignature = async (asset: Asset) => {
|
||||
const res = await fetch(asset.url, {
|
||||
const flush = () => {
|
||||
if (!url) return
|
||||
files.push({ url })
|
||||
url = ""
|
||||
}
|
||||
|
||||
for (const line of lines) {
|
||||
const trim = line.trim()
|
||||
if (line.startsWith("version:")) {
|
||||
version = line.slice("version:".length).trim()
|
||||
continue
|
||||
}
|
||||
if (trim.startsWith("- url:")) {
|
||||
flush()
|
||||
url = trim.slice("- url:".length).trim()
|
||||
continue
|
||||
}
|
||||
const indented = line.startsWith(" ") || line.startsWith("\t")
|
||||
if (!indented) flush()
|
||||
}
|
||||
flush()
|
||||
|
||||
return { version, files }
|
||||
}
|
||||
|
||||
async function read(sub: string, file: string) {
|
||||
const item = Bun.file(path.join(root, sub, file))
|
||||
if (!(await item.exists())) return undefined
|
||||
return parse(await item.text())
|
||||
}
|
||||
|
||||
function pick(list: Item[], exts: string[]) {
|
||||
for (const ext of exts) {
|
||||
const found = list.find((item) => item.url.split("?")[0]?.toLowerCase().endsWith(ext))
|
||||
if (found) return found.url
|
||||
}
|
||||
}
|
||||
|
||||
function link(raw: string) {
|
||||
if (raw.startsWith("https://") || raw.startsWith("http://")) return raw
|
||||
return `https://github.com/${repo}/releases/download/v${version}/${raw}`
|
||||
}
|
||||
|
||||
async function sign(url: string, key: string) {
|
||||
const name = decodeURIComponent(new URL(url).pathname.split("/").pop() ?? key)
|
||||
const asset = amap.get(name)
|
||||
const res = await fetch(asset?.url ?? url, {
|
||||
headers: {
|
||||
Authorization: `token ${token}`,
|
||||
Accept: "application/octet-stream",
|
||||
...(asset ? { Accept: "application/octet-stream" } : {}),
|
||||
},
|
||||
})
|
||||
|
||||
if (!res.ok) {
|
||||
throw new Error(`Failed to fetch signature: ${res.status} ${res.statusText}`)
|
||||
throw new Error(`Failed to fetch file ${name}: ${res.status} ${res.statusText} (${asset?.url ?? url})`)
|
||||
}
|
||||
|
||||
return Buffer.from(await res.arrayBuffer()).toString()
|
||||
const tmp = process.env.RUNNER_TEMP ?? "/tmp"
|
||||
const file = path.join(tmp, name)
|
||||
await Bun.write(file, await res.arrayBuffer())
|
||||
await $`bunx @tauri-apps/cli signer sign ${file}`
|
||||
const sigFile = Bun.file(`${file}.sig`)
|
||||
if (!(await sigFile.exists())) throw new Error(`Signature file not found for ${name}`)
|
||||
return (await sigFile.text()).trim()
|
||||
}
|
||||
|
||||
const entries: Record<string, { url: string; signature: string }> = {}
|
||||
const add = (key: string, asset: Asset, signature: string) => {
|
||||
if (entries[key]) return
|
||||
entries[key] = {
|
||||
url: `https://github.com/${repo}/releases/download/v${version}/${asset.name}`,
|
||||
signature,
|
||||
}
|
||||
const add = async (data: Record<string, { url: string; signature: string }>, key: string, raw: string | undefined) => {
|
||||
if (!raw) return
|
||||
if (data[key]) return
|
||||
const url = link(raw)
|
||||
data[key] = { url, signature: await sign(url, key) }
|
||||
}
|
||||
|
||||
const targets = [
|
||||
{ key: "linux-x86_64-deb", asset: "opencode-desktop-linux-amd64.deb" },
|
||||
{ key: "linux-x86_64-rpm", asset: "opencode-desktop-linux-x86_64.rpm" },
|
||||
{ key: "linux-aarch64-deb", asset: "opencode-desktop-linux-arm64.deb" },
|
||||
{ key: "linux-aarch64-rpm", asset: "opencode-desktop-linux-aarch64.rpm" },
|
||||
{ key: "windows-aarch64-nsis", asset: "opencode-desktop-windows-arm64.exe" },
|
||||
{ key: "windows-x86_64-nsis", asset: "opencode-desktop-windows-x64.exe" },
|
||||
{ key: "darwin-x86_64-app", asset: "opencode-desktop-darwin-x64.app.tar.gz" },
|
||||
{
|
||||
key: "darwin-aarch64-app",
|
||||
asset: "opencode-desktop-darwin-aarch64.app.tar.gz",
|
||||
},
|
||||
]
|
||||
|
||||
for (const target of targets) {
|
||||
const asset = assetByName.get(target.asset)
|
||||
if (!asset) continue
|
||||
|
||||
const sig = assetByName.get(`${target.asset}.sig`)
|
||||
if (!sig) continue
|
||||
|
||||
const signature = await fetchSignature(sig)
|
||||
add(target.key, asset, signature)
|
||||
const alias = (data: Record<string, { url: string; signature: string }>, key: string, src: string) => {
|
||||
if (data[key]) return
|
||||
if (!data[src]) return
|
||||
data[key] = data[src]
|
||||
}
|
||||
|
||||
const alias = (key: string, source: string) => {
|
||||
if (entries[key]) return
|
||||
const entry = entries[source]
|
||||
if (!entry) return
|
||||
entries[key] = entry
|
||||
}
|
||||
const winx = await read("latest-yml-x86_64-pc-windows-msvc", "latest.yml")
|
||||
const wina = await read("latest-yml-aarch64-pc-windows-msvc", "latest.yml")
|
||||
const macx = await read("latest-yml-x86_64-apple-darwin", "latest-mac.yml")
|
||||
const maca = await read("latest-yml-aarch64-apple-darwin", "latest-mac.yml")
|
||||
const linx = await read("latest-yml-x86_64-unknown-linux-gnu", "latest-linux.yml")
|
||||
const lina = await read("latest-yml-aarch64-unknown-linux-gnu", "latest-linux-arm64.yml")
|
||||
|
||||
alias("linux-x86_64", "linux-x86_64-deb")
|
||||
alias("linux-aarch64", "linux-aarch64-deb")
|
||||
alias("windows-aarch64", "windows-aarch64-nsis")
|
||||
alias("windows-x86_64", "windows-x86_64-nsis")
|
||||
alias("darwin-x86_64", "darwin-x86_64-app")
|
||||
alias("darwin-aarch64", "darwin-aarch64-app")
|
||||
const yver = winx?.version ?? wina?.version ?? macx?.version ?? maca?.version ?? linx?.version ?? lina?.version
|
||||
if (yver && yver !== version) throw new Error(`latest.yml version mismatch: expected ${version}, got ${yver}`)
|
||||
|
||||
const out: Record<string, { url: string; signature: string }> = {}
|
||||
|
||||
const winxexe = pick(winx?.files ?? [], [".exe"])
|
||||
const winaexe = pick(wina?.files ?? [], [".exe"])
|
||||
|
||||
const macxTarGz = "opencode-desktop-mac-x64.app.tar.gz"
|
||||
const macaTarGz = "opencode-desktop-mac-arm64.app.tar.gz"
|
||||
|
||||
const linxDeb = pick(linx?.files ?? [], [".deb"])
|
||||
const linxRpm = pick(linx?.files ?? [], [".rpm"])
|
||||
const linxAppImage = pick(linx?.files ?? [], [".appimage"])
|
||||
const linaDeb = pick(lina?.files ?? [], [".deb"])
|
||||
const linaRpm = pick(lina?.files ?? [], [".rpm"])
|
||||
const linaAppImage = pick(lina?.files ?? [], [".appimage"])
|
||||
|
||||
await add(out, "windows-x86_64-nsis", winxexe)
|
||||
await add(out, "windows-aarch64-nsis", winaexe)
|
||||
await add(out, "darwin-x86_64-app", macxTarGz)
|
||||
await add(out, "darwin-aarch64-app", macaTarGz)
|
||||
|
||||
await add(out, "linux-x86_64-deb", linxDeb)
|
||||
await add(out, "linux-x86_64-rpm", linxRpm)
|
||||
await add(out, "linux-x86_64-appimage", linxAppImage)
|
||||
await add(out, "linux-aarch64-deb", linaDeb)
|
||||
await add(out, "linux-aarch64-rpm", linaRpm)
|
||||
await add(out, "linux-aarch64-appimage", linaAppImage)
|
||||
|
||||
alias(out, "windows-x86_64", "windows-x86_64-nsis")
|
||||
alias(out, "windows-aarch64", "windows-aarch64-nsis")
|
||||
alias(out, "darwin-x86_64", "darwin-x86_64-app")
|
||||
alias(out, "darwin-aarch64", "darwin-aarch64-app")
|
||||
alias(out, "linux-x86_64", "linux-x86_64-deb")
|
||||
alias(out, "linux-aarch64", "linux-aarch64-deb")
|
||||
|
||||
const platforms = Object.fromEntries(
|
||||
Object.keys(entries)
|
||||
Object.keys(out)
|
||||
.sort()
|
||||
.map((key) => [key, entries[key]]),
|
||||
.map((key) => [key, out[key]]),
|
||||
)
|
||||
const output = {
|
||||
...base,
|
||||
|
||||
if (!Object.keys(platforms).length) throw new Error("No updater files found in latest.yml artifacts")
|
||||
|
||||
const data = {
|
||||
version,
|
||||
notes: "",
|
||||
pub_date: new Date().toISOString(),
|
||||
platforms,
|
||||
}
|
||||
|
||||
const dir = process.env.RUNNER_TEMP ?? "/tmp"
|
||||
const file = `${dir}/latest.json`
|
||||
await Bun.write(file, JSON.stringify(output, null, 2))
|
||||
const tmp = process.env.RUNNER_TEMP ?? "/tmp"
|
||||
const file = path.join(tmp, "latest.json")
|
||||
await Bun.write(file, JSON.stringify(data, null, 2))
|
||||
|
||||
const tag = release.tag_name
|
||||
if (!tag) throw new Error("Release tag not found")
|
||||
const tag = `v${version}`
|
||||
|
||||
if (dryRun) {
|
||||
console.log(`dry-run: wrote latest.json for ${tag} to ${file}`)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/enterprise",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
id = "opencode"
|
||||
name = "OpenCode"
|
||||
description = "The open source coding agent."
|
||||
version = "1.14.33"
|
||||
version = "1.14.35"
|
||||
schema_version = 1
|
||||
authors = ["Anomaly"]
|
||||
repository = "https://github.com/anomalyco/opencode"
|
||||
@@ -11,26 +11,26 @@ name = "OpenCode"
|
||||
icon = "./icons/opencode.svg"
|
||||
|
||||
[agent_servers.opencode.targets.darwin-aarch64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.14.33/opencode-darwin-arm64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.14.35/opencode-darwin-arm64.zip"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.darwin-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.14.33/opencode-darwin-x64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.14.35/opencode-darwin-x64.zip"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.linux-aarch64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.14.33/opencode-linux-arm64.tar.gz"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.14.35/opencode-linux-arm64.tar.gz"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.linux-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.14.33/opencode-linux-x64.tar.gz"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.14.35/opencode-linux-x64.tar.gz"
|
||||
cmd = "./opencode"
|
||||
args = ["acp"]
|
||||
|
||||
[agent_servers.opencode.targets.windows-x86_64]
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.14.33/opencode-windows-x64.zip"
|
||||
archive = "https://github.com/anomalyco/opencode/releases/download/v1.14.35/opencode-windows-x64.zip"
|
||||
cmd = "./opencode.exe"
|
||||
args = ["acp"]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@opencode-ai/function",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"private": true,
|
||||
"type": "module",
|
||||
|
||||
@@ -0,0 +1 @@
|
||||
ALTER TABLE `event_sequence` ADD `owner_id` text;
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"$schema": "https://json.schemastore.org/package.json",
|
||||
"version": "1.14.33",
|
||||
"version": "1.14.35",
|
||||
"name": "opencode",
|
||||
"type": "module",
|
||||
"license": "MIT",
|
||||
@@ -33,10 +33,10 @@
|
||||
"node": "./src/pty/pty.node.ts",
|
||||
"default": "./src/pty/pty.bun.ts"
|
||||
},
|
||||
"#hono": {
|
||||
"bun": "./src/server/adapter.bun.ts",
|
||||
"node": "./src/server/adapter.node.ts",
|
||||
"default": "./src/server/adapter.bun.ts"
|
||||
"#httpapi-server": {
|
||||
"bun": "./src/server/httpapi-server.node.ts",
|
||||
"node": "./src/server/httpapi-server.node.ts",
|
||||
"default": "./src/server/httpapi-server.node.ts"
|
||||
}
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -69,8 +69,7 @@
|
||||
"prettier": "3.6.2",
|
||||
"typescript": "catalog:",
|
||||
"vscode-languageserver-types": "3.17.5",
|
||||
"why-is-node-running": "3.2.2",
|
||||
"zod-to-json-schema": "3.24.5"
|
||||
"why-is-node-running": "3.2.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"@actions/core": "1.11.1",
|
||||
@@ -101,10 +100,6 @@
|
||||
"@effect/opentelemetry": "catalog:",
|
||||
"@effect/platform-node": "catalog:",
|
||||
"@gitlab/opencode-gitlab-auth": "1.3.3",
|
||||
"@hono/node-server": "1.19.11",
|
||||
"@hono/node-ws": "1.3.0",
|
||||
"@hono/standard-validator": "0.1.5",
|
||||
"@hono/zod-validator": "catalog:",
|
||||
"@lydell/node-pty": "catalog:",
|
||||
"@modelcontextprotocol/sdk": "1.27.1",
|
||||
"@octokit/graphql": "9.0.2",
|
||||
@@ -144,8 +139,6 @@
|
||||
"glob": "13.0.5",
|
||||
"google-auth-library": "10.5.0",
|
||||
"gray-matter": "4.0.3",
|
||||
"hono": "catalog:",
|
||||
"hono-openapi": "catalog:",
|
||||
"ignore": "7.0.5",
|
||||
"immer": "11.1.4",
|
||||
"jsonc-parser": "3.3.1",
|
||||
@@ -171,8 +164,7 @@
|
||||
"which": "6.0.1",
|
||||
"xdg-basedir": "5.1.0",
|
||||
"yargs": "18.0.0",
|
||||
"zod": "catalog:",
|
||||
"zod-to-json-schema": "3.24.5"
|
||||
"zod": "catalog:"
|
||||
},
|
||||
"overrides": {
|
||||
"drizzle-orm": "catalog:"
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,4 @@
|
||||
import z from "zod"
|
||||
import { Schema } from "effect"
|
||||
import { zodObject } from "@/util/effect-zod"
|
||||
|
||||
export type Definition<Type extends string = string, Properties extends Schema.Top = Schema.Top> = {
|
||||
type: Type
|
||||
@@ -18,23 +16,6 @@ export function define<Type extends string, Properties extends Schema.Top>(
|
||||
return result
|
||||
}
|
||||
|
||||
export function payloads() {
|
||||
return registry
|
||||
.entries()
|
||||
.map(([type, def]) => {
|
||||
return z
|
||||
.object({
|
||||
id: z.string(),
|
||||
type: z.literal(type),
|
||||
properties: zodObject(def.properties),
|
||||
})
|
||||
.meta({
|
||||
ref: `Event.${def.type}`,
|
||||
})
|
||||
})
|
||||
.toArray()
|
||||
}
|
||||
|
||||
export function effectPayloads() {
|
||||
return registry
|
||||
.entries()
|
||||
|
||||
@@ -4,6 +4,7 @@ import { effectCmd } from "../effect-cmd"
|
||||
import { AgentSideConnection, ndJsonStream } from "@agentclientprotocol/sdk"
|
||||
import { ACP } from "@/acp/agent"
|
||||
import { Server } from "@/server/server"
|
||||
import { ServerAuth } from "@/server/auth"
|
||||
import { createOpencodeClient } from "@opencode-ai/sdk/v2"
|
||||
import { withNetworkOptions, resolveNetworkOptions } from "../network"
|
||||
|
||||
@@ -26,6 +27,7 @@ export const AcpCommand = effectCmd({
|
||||
|
||||
const sdk = createOpencodeClient({
|
||||
baseUrl: `http://${server.hostname}:${server.port}`,
|
||||
headers: ServerAuth.headers(),
|
||||
})
|
||||
|
||||
const input = new WritableStream<Uint8Array>({
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { cmd } from "./cmd"
|
||||
import * as prompts from "@clack/prompts"
|
||||
import { AppRuntime } from "@/effect/app-runtime"
|
||||
import { UI } from "../ui"
|
||||
import { Global } from "@opencode-ai/core/global"
|
||||
import { Agent } from "../../agent/agent"
|
||||
@@ -9,8 +8,7 @@ import path from "path"
|
||||
import fs from "fs/promises"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import matter from "gray-matter"
|
||||
import { Instance } from "../../project/instance"
|
||||
import { WithInstance } from "../../project/with-instance"
|
||||
import { InstanceRef } from "@/effect/instance-ref"
|
||||
import { EOL } from "os"
|
||||
import type { Argv } from "yargs"
|
||||
import { Effect } from "effect"
|
||||
@@ -35,7 +33,7 @@ const AVAILABLE_PERMISSIONS = [
|
||||
"skill",
|
||||
]
|
||||
|
||||
const AgentCreateCommand = cmd({
|
||||
const AgentCreateCommand = effectCmd({
|
||||
command: "create",
|
||||
describe: "create a new agent",
|
||||
builder: (yargs: Argv) =>
|
||||
@@ -63,176 +61,172 @@ const AgentCreateCommand = cmd({
|
||||
alias: ["m"],
|
||||
describe: "model to use in the format of provider/model",
|
||||
}),
|
||||
async handler(args) {
|
||||
await WithInstance.provide({
|
||||
directory: process.cwd(),
|
||||
async fn() {
|
||||
const cliPath = args.path
|
||||
const cliDescription = args.description
|
||||
const cliMode = args.mode as AgentMode | undefined
|
||||
const perms = args.permissions
|
||||
handler: Effect.fn("Cli.agent.create")(function* (args) {
|
||||
const maybeCtx = yield* InstanceRef
|
||||
if (!maybeCtx) return yield* Effect.die("InstanceRef not provided")
|
||||
const ctx = maybeCtx
|
||||
const agentSvc = yield* Agent.Service
|
||||
yield* Effect.promise(async () => {
|
||||
const cliPath = args.path
|
||||
const cliDescription = args.description
|
||||
const cliMode = args.mode as AgentMode | undefined
|
||||
const perms = args.permissions
|
||||
|
||||
const isFullyNonInteractive = cliPath && cliDescription && cliMode && perms !== undefined
|
||||
const isFullyNonInteractive = cliPath && cliDescription && cliMode && perms !== undefined
|
||||
|
||||
if (!isFullyNonInteractive) {
|
||||
UI.empty()
|
||||
prompts.intro("Create agent")
|
||||
}
|
||||
if (!isFullyNonInteractive) {
|
||||
UI.empty()
|
||||
prompts.intro("Create agent")
|
||||
}
|
||||
|
||||
const project = Instance.project
|
||||
const project = ctx.project
|
||||
|
||||
// Determine scope/path
|
||||
let targetPath: string
|
||||
if (cliPath) {
|
||||
targetPath = path.join(cliPath, "agent")
|
||||
} else {
|
||||
let scope: "global" | "project" = "global"
|
||||
if (project.vcs === "git") {
|
||||
const scopeResult = await prompts.select({
|
||||
message: "Location",
|
||||
options: [
|
||||
{
|
||||
label: "Current project",
|
||||
value: "project" as const,
|
||||
hint: Instance.worktree,
|
||||
},
|
||||
{
|
||||
label: "Global",
|
||||
value: "global" as const,
|
||||
hint: Global.Path.config,
|
||||
},
|
||||
],
|
||||
})
|
||||
if (prompts.isCancel(scopeResult)) throw new UI.CancelledError()
|
||||
scope = scopeResult
|
||||
}
|
||||
targetPath = path.join(
|
||||
scope === "global" ? Global.Path.config : path.join(Instance.worktree, ".opencode"),
|
||||
"agent",
|
||||
)
|
||||
}
|
||||
|
||||
// Get description
|
||||
let description: string
|
||||
if (cliDescription) {
|
||||
description = cliDescription
|
||||
} else {
|
||||
const query = await prompts.text({
|
||||
message: "Description",
|
||||
placeholder: "What should this agent do?",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(query)) throw new UI.CancelledError()
|
||||
description = query
|
||||
}
|
||||
|
||||
// Generate agent
|
||||
const spinner = prompts.spinner()
|
||||
spinner.start("Generating agent configuration...")
|
||||
const model = args.model ? Provider.parseModel(args.model) : undefined
|
||||
const generated = await AppRuntime.runPromise(
|
||||
Agent.Service.use((svc) => svc.generate({ description, model })),
|
||||
).catch((error) => {
|
||||
spinner.stop(`LLM failed to generate agent: ${error.message}`, 1)
|
||||
if (isFullyNonInteractive) process.exit(1)
|
||||
throw new UI.CancelledError()
|
||||
})
|
||||
spinner.stop(`Agent ${generated.identifier} generated`)
|
||||
|
||||
// Select permissions to allow
|
||||
let selected: string[]
|
||||
if (perms !== undefined) {
|
||||
selected = perms ? perms.split(",").map((t) => t.trim()) : AVAILABLE_PERMISSIONS
|
||||
} else {
|
||||
const result = await prompts.multiselect({
|
||||
message: "Select permissions to allow (Space to toggle)",
|
||||
options: AVAILABLE_PERMISSIONS.map((permission) => ({
|
||||
label: permission,
|
||||
value: permission,
|
||||
})),
|
||||
initialValues: AVAILABLE_PERMISSIONS,
|
||||
})
|
||||
if (prompts.isCancel(result)) throw new UI.CancelledError()
|
||||
selected = result
|
||||
}
|
||||
|
||||
// Get mode
|
||||
let mode: AgentMode
|
||||
if (cliMode) {
|
||||
mode = cliMode
|
||||
} else {
|
||||
const modeResult = await prompts.select({
|
||||
message: "Agent mode",
|
||||
// Determine scope/path
|
||||
let targetPath: string
|
||||
if (cliPath) {
|
||||
targetPath = path.join(cliPath, "agent")
|
||||
} else {
|
||||
let scope: "global" | "project" = "global"
|
||||
if (project.vcs === "git") {
|
||||
const scopeResult = await prompts.select({
|
||||
message: "Location",
|
||||
options: [
|
||||
{
|
||||
label: "All",
|
||||
value: "all" as const,
|
||||
hint: "Can function in both primary and subagent roles",
|
||||
label: "Current project",
|
||||
value: "project" as const,
|
||||
hint: ctx.worktree,
|
||||
},
|
||||
{
|
||||
label: "Primary",
|
||||
value: "primary" as const,
|
||||
hint: "Acts as a primary/main agent",
|
||||
},
|
||||
{
|
||||
label: "Subagent",
|
||||
value: "subagent" as const,
|
||||
hint: "Can be used as a subagent by other agents",
|
||||
label: "Global",
|
||||
value: "global" as const,
|
||||
hint: Global.Path.config,
|
||||
},
|
||||
],
|
||||
initialValue: "all" as const,
|
||||
})
|
||||
if (prompts.isCancel(modeResult)) throw new UI.CancelledError()
|
||||
mode = modeResult
|
||||
if (prompts.isCancel(scopeResult)) throw new UI.CancelledError()
|
||||
scope = scopeResult
|
||||
}
|
||||
targetPath = path.join(scope === "global" ? Global.Path.config : path.join(ctx.worktree, ".opencode"), "agent")
|
||||
}
|
||||
|
||||
// Build permissions config — deny anything not explicitly selected.
|
||||
const permissions: Record<string, "deny"> = {}
|
||||
for (const permission of AVAILABLE_PERMISSIONS) {
|
||||
if (!selected.includes(permission)) {
|
||||
permissions[permission] = "deny"
|
||||
}
|
||||
// Get description
|
||||
let description: string
|
||||
if (cliDescription) {
|
||||
description = cliDescription
|
||||
} else {
|
||||
const query = await prompts.text({
|
||||
message: "Description",
|
||||
placeholder: "What should this agent do?",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(query)) throw new UI.CancelledError()
|
||||
description = query
|
||||
}
|
||||
|
||||
// Generate agent
|
||||
const spinner = prompts.spinner()
|
||||
spinner.start("Generating agent configuration...")
|
||||
const model = args.model ? Provider.parseModel(args.model) : undefined
|
||||
const generated = await Effect.runPromise(agentSvc.generate({ description, model })).catch((error) => {
|
||||
spinner.stop(`LLM failed to generate agent: ${error.message}`, 1)
|
||||
if (isFullyNonInteractive) process.exit(1)
|
||||
throw new UI.CancelledError()
|
||||
})
|
||||
spinner.stop(`Agent ${generated.identifier} generated`)
|
||||
|
||||
// Select permissions to allow
|
||||
let selected: string[]
|
||||
if (perms !== undefined) {
|
||||
selected = perms ? perms.split(",").map((t) => t.trim()) : AVAILABLE_PERMISSIONS
|
||||
} else {
|
||||
const result = await prompts.multiselect({
|
||||
message: "Select permissions to allow (Space to toggle)",
|
||||
options: AVAILABLE_PERMISSIONS.map((permission) => ({
|
||||
label: permission,
|
||||
value: permission,
|
||||
})),
|
||||
initialValues: AVAILABLE_PERMISSIONS,
|
||||
})
|
||||
if (prompts.isCancel(result)) throw new UI.CancelledError()
|
||||
selected = result
|
||||
}
|
||||
|
||||
// Get mode
|
||||
let mode: AgentMode
|
||||
if (cliMode) {
|
||||
mode = cliMode
|
||||
} else {
|
||||
const modeResult = await prompts.select({
|
||||
message: "Agent mode",
|
||||
options: [
|
||||
{
|
||||
label: "All",
|
||||
value: "all" as const,
|
||||
hint: "Can function in both primary and subagent roles",
|
||||
},
|
||||
{
|
||||
label: "Primary",
|
||||
value: "primary" as const,
|
||||
hint: "Acts as a primary/main agent",
|
||||
},
|
||||
{
|
||||
label: "Subagent",
|
||||
value: "subagent" as const,
|
||||
hint: "Can be used as a subagent by other agents",
|
||||
},
|
||||
],
|
||||
initialValue: "all" as const,
|
||||
})
|
||||
if (prompts.isCancel(modeResult)) throw new UI.CancelledError()
|
||||
mode = modeResult
|
||||
}
|
||||
|
||||
// Build permissions config — deny anything not explicitly selected.
|
||||
const permissions: Record<string, "deny"> = {}
|
||||
for (const permission of AVAILABLE_PERMISSIONS) {
|
||||
if (!selected.includes(permission)) {
|
||||
permissions[permission] = "deny"
|
||||
}
|
||||
}
|
||||
|
||||
// Build frontmatter
|
||||
const frontmatter: {
|
||||
description: string
|
||||
mode: AgentMode
|
||||
permission?: Record<string, "deny">
|
||||
} = {
|
||||
description: generated.whenToUse,
|
||||
mode,
|
||||
}
|
||||
if (Object.keys(permissions).length > 0) {
|
||||
frontmatter.permission = permissions
|
||||
}
|
||||
// Build frontmatter
|
||||
const frontmatter: {
|
||||
description: string
|
||||
mode: AgentMode
|
||||
permission?: Record<string, "deny">
|
||||
} = {
|
||||
description: generated.whenToUse,
|
||||
mode,
|
||||
}
|
||||
if (Object.keys(permissions).length > 0) {
|
||||
frontmatter.permission = permissions
|
||||
}
|
||||
|
||||
// Write file
|
||||
const content = matter.stringify(generated.systemPrompt, frontmatter)
|
||||
const filePath = path.join(targetPath, `${generated.identifier}.md`)
|
||||
// Write file
|
||||
const content = matter.stringify(generated.systemPrompt, frontmatter)
|
||||
const filePath = path.join(targetPath, `${generated.identifier}.md`)
|
||||
|
||||
await fs.mkdir(targetPath, { recursive: true })
|
||||
|
||||
if (await Filesystem.exists(filePath)) {
|
||||
if (isFullyNonInteractive) {
|
||||
console.error(`Error: Agent file already exists: ${filePath}`)
|
||||
process.exit(1)
|
||||
}
|
||||
prompts.log.error(`Agent file already exists: ${filePath}`)
|
||||
throw new UI.CancelledError()
|
||||
}
|
||||
|
||||
await Filesystem.write(filePath, content)
|
||||
await fs.mkdir(targetPath, { recursive: true })
|
||||
|
||||
if (await Filesystem.exists(filePath)) {
|
||||
if (isFullyNonInteractive) {
|
||||
console.log(filePath)
|
||||
} else {
|
||||
prompts.log.success(`Agent created: ${filePath}`)
|
||||
prompts.outro("Done")
|
||||
console.error(`Error: Agent file already exists: ${filePath}`)
|
||||
process.exit(1)
|
||||
}
|
||||
},
|
||||
prompts.log.error(`Agent file already exists: ${filePath}`)
|
||||
throw new UI.CancelledError()
|
||||
}
|
||||
|
||||
await Filesystem.write(filePath, content)
|
||||
|
||||
if (isFullyNonInteractive) {
|
||||
console.log(filePath)
|
||||
} else {
|
||||
prompts.log.success(`Agent created: ${filePath}`)
|
||||
prompts.outro("Done")
|
||||
}
|
||||
})
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
const AgentListCommand = effectCmd({
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
import { Global } from "@opencode-ai/core/global"
|
||||
import { InstallationVersion } from "@opencode-ai/core/installation/version"
|
||||
import { Flag } from "@opencode-ai/core/flag/flag"
|
||||
import os from "os"
|
||||
import { Duration, Effect } from "effect"
|
||||
import { Config } from "@/config/config"
|
||||
import { ConfigPlugin } from "@/config/plugin"
|
||||
import { effectCmd } from "../../effect-cmd"
|
||||
import { cmd } from "../cmd"
|
||||
import { ConfigCommand } from "./config"
|
||||
@@ -26,6 +31,7 @@ export const DebugCommand = cmd({
|
||||
.command(SnapshotCommand)
|
||||
.command(StartupCommand)
|
||||
.command(AgentCommand)
|
||||
.command(InfoCommand)
|
||||
.command(PathsCommand)
|
||||
.command(WaitCommand)
|
||||
.demandCommand(),
|
||||
@@ -40,6 +46,34 @@ const WaitCommand = effectCmd({
|
||||
}),
|
||||
})
|
||||
|
||||
const InfoCommand = effectCmd({
|
||||
command: "info",
|
||||
describe: "show debug information",
|
||||
handler: Effect.fn("Cli.debug.info")(function* () {
|
||||
const config = yield* Config.Service.use((cfg) => cfg.get())
|
||||
const termProgram = process.env.TERM_PROGRAM
|
||||
? `${process.env.TERM_PROGRAM}${process.env.TERM_PROGRAM_VERSION ? ` ${process.env.TERM_PROGRAM_VERSION}` : ""}`
|
||||
: undefined
|
||||
const terminal = [termProgram, process.env.TERM].filter((item): item is string => Boolean(item)).join(" / ")
|
||||
|
||||
console.log(`opencode version: ${InstallationVersion}`)
|
||||
console.log(`os: ${os.type()} ${os.release()} ${os.arch()}`)
|
||||
console.log(`terminal: ${terminal || "unknown"}`)
|
||||
console.log("plugins:")
|
||||
if (Flag.OPENCODE_PURE) {
|
||||
console.log("external plugins disabled (--pure)")
|
||||
return
|
||||
}
|
||||
if (!config.plugin_origins?.length) {
|
||||
console.log("none")
|
||||
return
|
||||
}
|
||||
for (const plugin of config.plugin_origins) {
|
||||
console.log(`- ${ConfigPlugin.pluginSpecifier(plugin.spec)}`)
|
||||
}
|
||||
}),
|
||||
})
|
||||
|
||||
const PathsCommand = cmd({
|
||||
command: "paths",
|
||||
describe: "show global paths (data, config, cache, state)",
|
||||
|
||||
@@ -1,22 +1,13 @@
|
||||
import { Server } from "../../server/server"
|
||||
import { PublicApi } from "../../server/routes/instance/httpapi/public"
|
||||
import type { CommandModule } from "yargs"
|
||||
import { OpenApi } from "effect/unstable/httpapi"
|
||||
|
||||
type Args = {
|
||||
httpapi: boolean
|
||||
}
|
||||
type Args = {}
|
||||
|
||||
export const GenerateCommand = {
|
||||
command: "generate",
|
||||
builder: (yargs) =>
|
||||
yargs.option("httpapi", {
|
||||
type: "boolean",
|
||||
default: false,
|
||||
description: "Generate OpenAPI from the experimental Effect HttpApi contract",
|
||||
}),
|
||||
handler: async (args) => {
|
||||
const specs = args.httpapi ? OpenApi.fromApi(PublicApi) : await Server.openapi()
|
||||
builder: (yargs) => yargs,
|
||||
handler: async () => {
|
||||
const specs = (await Server.openapi()) as { paths: Record<string, Record<string, any>> }
|
||||
for (const item of Object.values(specs.paths)) {
|
||||
for (const method of ["get", "post", "put", "delete", "patch"] as const) {
|
||||
const operation = item[method]
|
||||
|
||||
@@ -29,7 +29,6 @@ import { Provider } from "@/provider/provider"
|
||||
import { Bus } from "../../bus"
|
||||
import { MessageV2 } from "../../session/message-v2"
|
||||
import { SessionPrompt } from "@/session/prompt"
|
||||
import { AppRuntime } from "@/effect/app-runtime"
|
||||
import { Git } from "@/git"
|
||||
import { setTimeout as sleep } from "node:timers/promises"
|
||||
import { Process } from "@/util/process"
|
||||
@@ -206,6 +205,8 @@ export const GithubInstallCommand = effectCmd({
|
||||
const maybeCtx = yield* InstanceRef
|
||||
if (!maybeCtx) return yield* Effect.die("InstanceRef not provided")
|
||||
const ctx = maybeCtx
|
||||
const modelsDev = yield* ModelsDev.Service
|
||||
const gitSvc = yield* Git.Service
|
||||
yield* Effect.promise(async () => {
|
||||
{
|
||||
UI.empty()
|
||||
@@ -213,7 +214,7 @@ export const GithubInstallCommand = effectCmd({
|
||||
const app = await getAppInfo()
|
||||
await installGitHubApp()
|
||||
|
||||
const providers = await AppRuntime.runPromise(ModelsDev.Service.use((s) => s.get())).then((p) => {
|
||||
const providers = await Effect.runPromise(modelsDev.get()).then((p) => {
|
||||
// TODO: add guide for copilot, for now just hide it
|
||||
delete p["github-copilot"]
|
||||
return p
|
||||
@@ -261,9 +262,9 @@ export const GithubInstallCommand = effectCmd({
|
||||
}
|
||||
|
||||
// Get repo info
|
||||
const info = await AppRuntime.runPromise(
|
||||
Git.Service.use((git) => git.run(["remote", "get-url", "origin"], { cwd: ctx.worktree })),
|
||||
).then((x) => x.text().trim())
|
||||
const info = await Effect.runPromise(gitSvc.run(["remote", "get-url", "origin"], { cwd: ctx.worktree })).then(
|
||||
(x) => x.text().trim(),
|
||||
)
|
||||
const parsed = parseGitHubRemote(info)
|
||||
if (!parsed) {
|
||||
prompts.log.error(`Could not find git repository. Please run this command from a git repository.`)
|
||||
@@ -440,6 +441,10 @@ export const GithubRunCommand = effectCmd({
|
||||
handler: Effect.fn("Cli.github.run")(function* (args) {
|
||||
const ctx = yield* InstanceRef
|
||||
if (!ctx) return yield* Effect.die("InstanceRef not provided")
|
||||
const gitSvc = yield* Git.Service
|
||||
const sessionSvc = yield* Session.Service
|
||||
const sessionShare = yield* SessionShare.Service
|
||||
const sessionPrompt = yield* SessionPrompt.Service
|
||||
yield* Effect.promise(async () => {
|
||||
const isMock = args.token || args.event
|
||||
|
||||
@@ -503,21 +508,20 @@ export const GithubRunCommand = effectCmd({
|
||||
: "issue"
|
||||
: undefined
|
||||
const gitText = async (args: string[]) => {
|
||||
const result = await AppRuntime.runPromise(Git.Service.use((git) => git.run(args, { cwd: ctx.worktree })))
|
||||
const result = await Effect.runPromise(gitSvc.run(args, { cwd: ctx.worktree }))
|
||||
if (result.exitCode !== 0) {
|
||||
throw new Process.RunFailedError(["git", ...args], result.exitCode, result.stdout, result.stderr)
|
||||
}
|
||||
return result.text().trim()
|
||||
}
|
||||
const gitRun = async (args: string[]) => {
|
||||
const result = await AppRuntime.runPromise(Git.Service.use((git) => git.run(args, { cwd: ctx.worktree })))
|
||||
const result = await Effect.runPromise(gitSvc.run(args, { cwd: ctx.worktree }))
|
||||
if (result.exitCode !== 0) {
|
||||
throw new Process.RunFailedError(["git", ...args], result.exitCode, result.stdout, result.stderr)
|
||||
}
|
||||
return result
|
||||
}
|
||||
const gitStatus = (args: string[]) =>
|
||||
AppRuntime.runPromise(Git.Service.use((git) => git.run(args, { cwd: ctx.worktree })))
|
||||
const gitStatus = (args: string[]) => Effect.runPromise(gitSvc.run(args, { cwd: ctx.worktree }))
|
||||
const commitChanges = async (summary: string, actor?: string) => {
|
||||
const args = ["commit", "-m", summary]
|
||||
if (actor) args.push("-m", `Co-authored-by: ${actor} <${actor}@users.noreply.github.com>`)
|
||||
@@ -554,24 +558,22 @@ export const GithubRunCommand = effectCmd({
|
||||
|
||||
// Setup opencode session
|
||||
const repoData = await fetchRepo()
|
||||
session = await AppRuntime.runPromise(
|
||||
Session.Service.use((svc) =>
|
||||
svc.create({
|
||||
permission: [
|
||||
{
|
||||
permission: "question",
|
||||
action: "deny",
|
||||
pattern: "*",
|
||||
},
|
||||
],
|
||||
}),
|
||||
),
|
||||
session = await Effect.runPromise(
|
||||
sessionSvc.create({
|
||||
permission: [
|
||||
{
|
||||
permission: "question",
|
||||
action: "deny",
|
||||
pattern: "*",
|
||||
},
|
||||
],
|
||||
}),
|
||||
)
|
||||
subscribeSessionEvents()
|
||||
shareId = await (async () => {
|
||||
if (share === false) return
|
||||
if (!share && repoData.data.private) return
|
||||
await AppRuntime.runPromise(SessionShare.Service.use((svc) => svc.share(session.id)))
|
||||
await Effect.runPromise(sessionShare.share(session.id))
|
||||
return session.id.slice(-8)
|
||||
})()
|
||||
console.log("opencode session", session.id)
|
||||
@@ -944,9 +946,9 @@ export const GithubRunCommand = effectCmd({
|
||||
async function chat(message: string, files: PromptFiles = []) {
|
||||
console.log("Sending message to opencode...")
|
||||
|
||||
return AppRuntime.runPromise(
|
||||
return Effect.runPromise(
|
||||
Effect.gen(function* () {
|
||||
const prompt = yield* SessionPrompt.Service
|
||||
const prompt = sessionPrompt
|
||||
const result = yield* prompt.prompt({
|
||||
sessionID: session.id,
|
||||
messageID: MessageID.ascending(),
|
||||
|
||||
@@ -11,8 +11,7 @@ import { McpAuth } from "../../mcp/auth"
|
||||
import { McpOAuthProvider } from "../../mcp/oauth-provider"
|
||||
import { Config } from "@/config/config"
|
||||
import { ConfigMCP } from "../../config/mcp"
|
||||
import { Instance } from "../../project/instance"
|
||||
import { WithInstance } from "../../project/with-instance"
|
||||
import { InstanceRef } from "@/effect/instance-ref"
|
||||
import { Installation } from "../../installation"
|
||||
import { InstallationVersion } from "@opencode-ai/core/installation/version"
|
||||
import path from "path"
|
||||
@@ -20,7 +19,6 @@ import { Global } from "@opencode-ai/core/global"
|
||||
import { modify, applyEdits } from "jsonc-parser"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import { Bus } from "../../bus"
|
||||
import { AppRuntime } from "../../effect/app-runtime"
|
||||
import { Effect } from "effect"
|
||||
|
||||
function getAuthStatusIcon(status: MCP.AuthStatus): string {
|
||||
@@ -433,171 +431,171 @@ async function addMcpToConfig(name: string, mcpConfig: ConfigMCP.Info, configPat
|
||||
return configPath
|
||||
}
|
||||
|
||||
export const McpAddCommand = cmd({
|
||||
export const McpAddCommand = effectCmd({
|
||||
command: "add",
|
||||
describe: "add an MCP server",
|
||||
async handler() {
|
||||
await WithInstance.provide({
|
||||
directory: process.cwd(),
|
||||
async fn() {
|
||||
UI.empty()
|
||||
prompts.intro("Add MCP server")
|
||||
handler: Effect.fn("Cli.mcp.add")(function* () {
|
||||
const maybeCtx = yield* InstanceRef
|
||||
if (!maybeCtx) return yield* Effect.die("InstanceRef not provided")
|
||||
const ctx = maybeCtx
|
||||
yield* Effect.promise(async () => {
|
||||
UI.empty()
|
||||
prompts.intro("Add MCP server")
|
||||
|
||||
const project = Instance.project
|
||||
const project = ctx.project
|
||||
|
||||
// Resolve config paths eagerly for hints
|
||||
const [projectConfigPath, globalConfigPath] = await Promise.all([
|
||||
resolveConfigPath(Instance.worktree),
|
||||
resolveConfigPath(Global.Path.config, true),
|
||||
])
|
||||
// Resolve config paths eagerly for hints
|
||||
const [projectConfigPath, globalConfigPath] = await Promise.all([
|
||||
resolveConfigPath(ctx.worktree),
|
||||
resolveConfigPath(Global.Path.config, true),
|
||||
])
|
||||
|
||||
// Determine scope
|
||||
let configPath = globalConfigPath
|
||||
if (project.vcs === "git") {
|
||||
const scopeResult = await prompts.select({
|
||||
message: "Location",
|
||||
options: [
|
||||
{
|
||||
label: "Current project",
|
||||
value: projectConfigPath,
|
||||
hint: projectConfigPath,
|
||||
},
|
||||
{
|
||||
label: "Global",
|
||||
value: globalConfigPath,
|
||||
hint: globalConfigPath,
|
||||
},
|
||||
],
|
||||
})
|
||||
if (prompts.isCancel(scopeResult)) throw new UI.CancelledError()
|
||||
configPath = scopeResult
|
||||
}
|
||||
|
||||
const name = await prompts.text({
|
||||
message: "Enter MCP server name",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(name)) throw new UI.CancelledError()
|
||||
|
||||
const type = await prompts.select({
|
||||
message: "Select MCP server type",
|
||||
// Determine scope
|
||||
let configPath = globalConfigPath
|
||||
if (project.vcs === "git") {
|
||||
const scopeResult = await prompts.select({
|
||||
message: "Location",
|
||||
options: [
|
||||
{
|
||||
label: "Local",
|
||||
value: "local",
|
||||
hint: "Run a local command",
|
||||
label: "Current project",
|
||||
value: projectConfigPath,
|
||||
hint: projectConfigPath,
|
||||
},
|
||||
{
|
||||
label: "Remote",
|
||||
value: "remote",
|
||||
hint: "Connect to a remote URL",
|
||||
label: "Global",
|
||||
value: globalConfigPath,
|
||||
hint: globalConfigPath,
|
||||
},
|
||||
],
|
||||
})
|
||||
if (prompts.isCancel(type)) throw new UI.CancelledError()
|
||||
if (prompts.isCancel(scopeResult)) throw new UI.CancelledError()
|
||||
configPath = scopeResult
|
||||
}
|
||||
|
||||
if (type === "local") {
|
||||
const command = await prompts.text({
|
||||
message: "Enter command to run",
|
||||
placeholder: "e.g., opencode x @modelcontextprotocol/server-filesystem",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(command)) throw new UI.CancelledError()
|
||||
const name = await prompts.text({
|
||||
message: "Enter MCP server name",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(name)) throw new UI.CancelledError()
|
||||
|
||||
const mcpConfig: ConfigMCP.Info = {
|
||||
type: "local",
|
||||
command: command.split(" "),
|
||||
}
|
||||
const type = await prompts.select({
|
||||
message: "Select MCP server type",
|
||||
options: [
|
||||
{
|
||||
label: "Local",
|
||||
value: "local",
|
||||
hint: "Run a local command",
|
||||
},
|
||||
{
|
||||
label: "Remote",
|
||||
value: "remote",
|
||||
hint: "Connect to a remote URL",
|
||||
},
|
||||
],
|
||||
})
|
||||
if (prompts.isCancel(type)) throw new UI.CancelledError()
|
||||
|
||||
await addMcpToConfig(name, mcpConfig, configPath)
|
||||
prompts.log.success(`MCP server "${name}" added to ${configPath}`)
|
||||
prompts.outro("MCP server added successfully")
|
||||
return
|
||||
if (type === "local") {
|
||||
const command = await prompts.text({
|
||||
message: "Enter command to run",
|
||||
placeholder: "e.g., opencode x @modelcontextprotocol/server-filesystem",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(command)) throw new UI.CancelledError()
|
||||
|
||||
const mcpConfig: ConfigMCP.Info = {
|
||||
type: "local",
|
||||
command: command.split(" "),
|
||||
}
|
||||
|
||||
if (type === "remote") {
|
||||
const url = await prompts.text({
|
||||
message: "Enter MCP server URL",
|
||||
placeholder: "e.g., https://example.com/mcp",
|
||||
validate: (x) => {
|
||||
if (!x) return "Required"
|
||||
if (x.length === 0) return "Required"
|
||||
const isValid = URL.canParse(x)
|
||||
return isValid ? undefined : "Invalid URL"
|
||||
},
|
||||
})
|
||||
if (prompts.isCancel(url)) throw new UI.CancelledError()
|
||||
await addMcpToConfig(name, mcpConfig, configPath)
|
||||
prompts.log.success(`MCP server "${name}" added to ${configPath}`)
|
||||
prompts.outro("MCP server added successfully")
|
||||
return
|
||||
}
|
||||
|
||||
const useOAuth = await prompts.confirm({
|
||||
message: "Does this server require OAuth authentication?",
|
||||
if (type === "remote") {
|
||||
const url = await prompts.text({
|
||||
message: "Enter MCP server URL",
|
||||
placeholder: "e.g., https://example.com/mcp",
|
||||
validate: (x) => {
|
||||
if (!x) return "Required"
|
||||
if (x.length === 0) return "Required"
|
||||
const isValid = URL.canParse(x)
|
||||
return isValid ? undefined : "Invalid URL"
|
||||
},
|
||||
})
|
||||
if (prompts.isCancel(url)) throw new UI.CancelledError()
|
||||
|
||||
const useOAuth = await prompts.confirm({
|
||||
message: "Does this server require OAuth authentication?",
|
||||
initialValue: false,
|
||||
})
|
||||
if (prompts.isCancel(useOAuth)) throw new UI.CancelledError()
|
||||
|
||||
let mcpConfig: ConfigMCP.Info
|
||||
|
||||
if (useOAuth) {
|
||||
const hasClientId = await prompts.confirm({
|
||||
message: "Do you have a pre-registered client ID?",
|
||||
initialValue: false,
|
||||
})
|
||||
if (prompts.isCancel(useOAuth)) throw new UI.CancelledError()
|
||||
if (prompts.isCancel(hasClientId)) throw new UI.CancelledError()
|
||||
|
||||
let mcpConfig: ConfigMCP.Info
|
||||
if (hasClientId) {
|
||||
const clientId = await prompts.text({
|
||||
message: "Enter client ID",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(clientId)) throw new UI.CancelledError()
|
||||
|
||||
if (useOAuth) {
|
||||
const hasClientId = await prompts.confirm({
|
||||
message: "Do you have a pre-registered client ID?",
|
||||
const hasSecret = await prompts.confirm({
|
||||
message: "Do you have a client secret?",
|
||||
initialValue: false,
|
||||
})
|
||||
if (prompts.isCancel(hasClientId)) throw new UI.CancelledError()
|
||||
if (prompts.isCancel(hasSecret)) throw new UI.CancelledError()
|
||||
|
||||
if (hasClientId) {
|
||||
const clientId = await prompts.text({
|
||||
message: "Enter client ID",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
let clientSecret: string | undefined
|
||||
if (hasSecret) {
|
||||
const secret = await prompts.password({
|
||||
message: "Enter client secret",
|
||||
})
|
||||
if (prompts.isCancel(clientId)) throw new UI.CancelledError()
|
||||
if (prompts.isCancel(secret)) throw new UI.CancelledError()
|
||||
clientSecret = secret
|
||||
}
|
||||
|
||||
const hasSecret = await prompts.confirm({
|
||||
message: "Do you have a client secret?",
|
||||
initialValue: false,
|
||||
})
|
||||
if (prompts.isCancel(hasSecret)) throw new UI.CancelledError()
|
||||
|
||||
let clientSecret: string | undefined
|
||||
if (hasSecret) {
|
||||
const secret = await prompts.password({
|
||||
message: "Enter client secret",
|
||||
})
|
||||
if (prompts.isCancel(secret)) throw new UI.CancelledError()
|
||||
clientSecret = secret
|
||||
}
|
||||
|
||||
mcpConfig = {
|
||||
type: "remote",
|
||||
url,
|
||||
oauth: {
|
||||
clientId,
|
||||
...(clientSecret && { clientSecret }),
|
||||
},
|
||||
}
|
||||
} else {
|
||||
mcpConfig = {
|
||||
type: "remote",
|
||||
url,
|
||||
oauth: {},
|
||||
}
|
||||
mcpConfig = {
|
||||
type: "remote",
|
||||
url,
|
||||
oauth: {
|
||||
clientId,
|
||||
...(clientSecret && { clientSecret }),
|
||||
},
|
||||
}
|
||||
} else {
|
||||
mcpConfig = {
|
||||
type: "remote",
|
||||
url,
|
||||
oauth: {},
|
||||
}
|
||||
}
|
||||
|
||||
await addMcpToConfig(name, mcpConfig, configPath)
|
||||
prompts.log.success(`MCP server "${name}" added to ${configPath}`)
|
||||
} else {
|
||||
mcpConfig = {
|
||||
type: "remote",
|
||||
url,
|
||||
}
|
||||
}
|
||||
|
||||
prompts.outro("MCP server added successfully")
|
||||
},
|
||||
await addMcpToConfig(name, mcpConfig, configPath)
|
||||
prompts.log.success(`MCP server "${name}" added to ${configPath}`)
|
||||
}
|
||||
|
||||
prompts.outro("MCP server added successfully")
|
||||
})
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
export const McpDebugCommand = cmd({
|
||||
export const McpDebugCommand = effectCmd({
|
||||
command: "debug <name>",
|
||||
describe: "debug OAuth connection for an MCP server",
|
||||
builder: (yargs) =>
|
||||
@@ -606,182 +604,172 @@ export const McpDebugCommand = cmd({
|
||||
type: "string",
|
||||
demandOption: true,
|
||||
}),
|
||||
async handler(args) {
|
||||
await WithInstance.provide({
|
||||
directory: process.cwd(),
|
||||
async fn() {
|
||||
UI.empty()
|
||||
prompts.intro("MCP OAuth Debug")
|
||||
handler: Effect.fn("Cli.mcp.debug")(function* (args) {
|
||||
const config = yield* Config.Service.use((cfg) => cfg.get())
|
||||
const mcp = yield* MCP.Service
|
||||
const auth = yield* McpAuth.Service
|
||||
yield* Effect.promise(async () => {
|
||||
UI.empty()
|
||||
prompts.intro("MCP OAuth Debug")
|
||||
|
||||
const config = await AppRuntime.runPromise(Config.Service.use((cfg) => cfg.get()))
|
||||
const mcpServers = config.mcp ?? {}
|
||||
const serverName = args.name
|
||||
const mcpServers = config.mcp ?? {}
|
||||
const serverName = args.name
|
||||
|
||||
const serverConfig = mcpServers[serverName]
|
||||
if (!serverConfig) {
|
||||
prompts.log.error(`MCP server not found: ${serverName}`)
|
||||
prompts.outro("Done")
|
||||
return
|
||||
const serverConfig = mcpServers[serverName]
|
||||
if (!serverConfig) {
|
||||
prompts.log.error(`MCP server not found: ${serverName}`)
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
|
||||
if (!isMcpRemote(serverConfig)) {
|
||||
prompts.log.error(`MCP server ${serverName} is not a remote server`)
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
|
||||
if (serverConfig.oauth === false) {
|
||||
prompts.log.warn(`MCP server ${serverName} has OAuth explicitly disabled`)
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
|
||||
prompts.log.info(`Server: ${serverName}`)
|
||||
prompts.log.info(`URL: ${serverConfig.url}`)
|
||||
|
||||
// Check stored auth status — services already in hand, run inline.
|
||||
const { authStatus, entry } = await Effect.runPromise(
|
||||
Effect.all({
|
||||
authStatus: mcp.getAuthStatus(serverName),
|
||||
entry: auth.get(serverName),
|
||||
}),
|
||||
)
|
||||
prompts.log.info(`Auth status: ${getAuthStatusIcon(authStatus)} ${getAuthStatusText(authStatus)}`)
|
||||
|
||||
if (entry?.tokens) {
|
||||
prompts.log.info(` Access token: ${entry.tokens.accessToken.substring(0, 20)}...`)
|
||||
if (entry.tokens.expiresAt) {
|
||||
const expiresDate = new Date(entry.tokens.expiresAt * 1000)
|
||||
const isExpired = entry.tokens.expiresAt < Date.now() / 1000
|
||||
prompts.log.info(` Expires: ${expiresDate.toISOString()} ${isExpired ? "(EXPIRED)" : ""}`)
|
||||
}
|
||||
|
||||
if (!isMcpRemote(serverConfig)) {
|
||||
prompts.log.error(`MCP server ${serverName} is not a remote server`)
|
||||
prompts.outro("Done")
|
||||
return
|
||||
if (entry.tokens.refreshToken) {
|
||||
prompts.log.info(` Refresh token: present`)
|
||||
}
|
||||
|
||||
if (serverConfig.oauth === false) {
|
||||
prompts.log.warn(`MCP server ${serverName} has OAuth explicitly disabled`)
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
if (entry?.clientInfo) {
|
||||
prompts.log.info(` Client ID: ${entry.clientInfo.clientId}`)
|
||||
if (entry.clientInfo.clientSecretExpiresAt) {
|
||||
const expiresDate = new Date(entry.clientInfo.clientSecretExpiresAt * 1000)
|
||||
prompts.log.info(` Client secret expires: ${expiresDate.toISOString()}`)
|
||||
}
|
||||
}
|
||||
|
||||
prompts.log.info(`Server: ${serverName}`)
|
||||
prompts.log.info(`URL: ${serverConfig.url}`)
|
||||
const spinner = prompts.spinner()
|
||||
spinner.start("Testing connection...")
|
||||
|
||||
// Check stored auth status
|
||||
const { authStatus, entry } = await AppRuntime.runPromise(
|
||||
Effect.gen(function* () {
|
||||
const mcp = yield* MCP.Service
|
||||
const auth = yield* McpAuth.Service
|
||||
return {
|
||||
authStatus: yield* mcp.getAuthStatus(serverName),
|
||||
entry: yield* auth.get(serverName),
|
||||
}
|
||||
}),
|
||||
)
|
||||
prompts.log.info(`Auth status: ${getAuthStatusIcon(authStatus)} ${getAuthStatusText(authStatus)}`)
|
||||
|
||||
if (entry?.tokens) {
|
||||
prompts.log.info(` Access token: ${entry.tokens.accessToken.substring(0, 20)}...`)
|
||||
if (entry.tokens.expiresAt) {
|
||||
const expiresDate = new Date(entry.tokens.expiresAt * 1000)
|
||||
const isExpired = entry.tokens.expiresAt < Date.now() / 1000
|
||||
prompts.log.info(` Expires: ${expiresDate.toISOString()} ${isExpired ? "(EXPIRED)" : ""}`)
|
||||
}
|
||||
if (entry.tokens.refreshToken) {
|
||||
prompts.log.info(` Refresh token: present`)
|
||||
}
|
||||
}
|
||||
if (entry?.clientInfo) {
|
||||
prompts.log.info(` Client ID: ${entry.clientInfo.clientId}`)
|
||||
if (entry.clientInfo.clientSecretExpiresAt) {
|
||||
const expiresDate = new Date(entry.clientInfo.clientSecretExpiresAt * 1000)
|
||||
prompts.log.info(` Client secret expires: ${expiresDate.toISOString()}`)
|
||||
}
|
||||
}
|
||||
|
||||
const spinner = prompts.spinner()
|
||||
spinner.start("Testing connection...")
|
||||
|
||||
// Test basic HTTP connectivity first
|
||||
try {
|
||||
const response = await fetch(serverConfig.url, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json, text/event-stream",
|
||||
// Test basic HTTP connectivity first
|
||||
try {
|
||||
const response = await fetch(serverConfig.url, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
"Content-Type": "application/json",
|
||||
Accept: "application/json, text/event-stream",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
jsonrpc: "2.0",
|
||||
method: "initialize",
|
||||
params: {
|
||||
protocolVersion: "2024-11-05",
|
||||
capabilities: {},
|
||||
clientInfo: { name: "opencode-debug", version: InstallationVersion },
|
||||
},
|
||||
body: JSON.stringify({
|
||||
jsonrpc: "2.0",
|
||||
method: "initialize",
|
||||
params: {
|
||||
protocolVersion: "2024-11-05",
|
||||
capabilities: {},
|
||||
clientInfo: { name: "opencode-debug", version: InstallationVersion },
|
||||
},
|
||||
id: 1,
|
||||
}),
|
||||
id: 1,
|
||||
}),
|
||||
})
|
||||
|
||||
spinner.stop(`HTTP response: ${response.status} ${response.statusText}`)
|
||||
|
||||
// Check for WWW-Authenticate header
|
||||
const wwwAuth = response.headers.get("www-authenticate")
|
||||
if (wwwAuth) {
|
||||
prompts.log.info(`WWW-Authenticate: ${wwwAuth}`)
|
||||
}
|
||||
|
||||
if (response.status === 401) {
|
||||
prompts.log.warn("Server returned 401 Unauthorized")
|
||||
|
||||
// Try to discover OAuth metadata
|
||||
const oauthConfig = typeof serverConfig.oauth === "object" ? serverConfig.oauth : undefined
|
||||
const authProvider = new McpOAuthProvider(
|
||||
serverName,
|
||||
serverConfig.url,
|
||||
{
|
||||
clientId: oauthConfig?.clientId,
|
||||
clientSecret: oauthConfig?.clientSecret,
|
||||
scope: oauthConfig?.scope,
|
||||
redirectUri: oauthConfig?.redirectUri,
|
||||
},
|
||||
{
|
||||
onRedirect: async () => {},
|
||||
},
|
||||
auth,
|
||||
)
|
||||
|
||||
prompts.log.info("Testing OAuth flow (without completing authorization)...")
|
||||
|
||||
// Try creating transport with auth provider to trigger discovery
|
||||
const transport = new StreamableHTTPClientTransport(new URL(serverConfig.url), {
|
||||
authProvider,
|
||||
})
|
||||
|
||||
spinner.stop(`HTTP response: ${response.status} ${response.statusText}`)
|
||||
|
||||
// Check for WWW-Authenticate header
|
||||
const wwwAuth = response.headers.get("www-authenticate")
|
||||
if (wwwAuth) {
|
||||
prompts.log.info(`WWW-Authenticate: ${wwwAuth}`)
|
||||
}
|
||||
|
||||
if (response.status === 401) {
|
||||
prompts.log.warn("Server returned 401 Unauthorized")
|
||||
|
||||
// Try to discover OAuth metadata
|
||||
const oauthConfig = typeof serverConfig.oauth === "object" ? serverConfig.oauth : undefined
|
||||
const auth = await AppRuntime.runPromise(
|
||||
Effect.gen(function* () {
|
||||
return yield* McpAuth.Service
|
||||
}),
|
||||
)
|
||||
const authProvider = new McpOAuthProvider(
|
||||
serverName,
|
||||
serverConfig.url,
|
||||
{
|
||||
clientId: oauthConfig?.clientId,
|
||||
clientSecret: oauthConfig?.clientSecret,
|
||||
scope: oauthConfig?.scope,
|
||||
redirectUri: oauthConfig?.redirectUri,
|
||||
},
|
||||
{
|
||||
onRedirect: async () => {},
|
||||
},
|
||||
auth,
|
||||
)
|
||||
|
||||
prompts.log.info("Testing OAuth flow (without completing authorization)...")
|
||||
|
||||
// Try creating transport with auth provider to trigger discovery
|
||||
const transport = new StreamableHTTPClientTransport(new URL(serverConfig.url), {
|
||||
authProvider,
|
||||
try {
|
||||
const client = new Client({
|
||||
name: "opencode-debug",
|
||||
version: InstallationVersion,
|
||||
})
|
||||
await client.connect(transport)
|
||||
prompts.log.success("Connection successful (already authenticated)")
|
||||
await client.close()
|
||||
} catch (error) {
|
||||
if (error instanceof UnauthorizedError) {
|
||||
prompts.log.info(`OAuth flow triggered: ${error.message}`)
|
||||
|
||||
try {
|
||||
const client = new Client({
|
||||
name: "opencode-debug",
|
||||
version: InstallationVersion,
|
||||
})
|
||||
await client.connect(transport)
|
||||
prompts.log.success("Connection successful (already authenticated)")
|
||||
await client.close()
|
||||
} catch (error) {
|
||||
if (error instanceof UnauthorizedError) {
|
||||
prompts.log.info(`OAuth flow triggered: ${error.message}`)
|
||||
|
||||
// Check if dynamic registration would be attempted
|
||||
const clientInfo = await authProvider.clientInformation()
|
||||
if (clientInfo) {
|
||||
prompts.log.info(`Client ID available: ${clientInfo.client_id}`)
|
||||
} else {
|
||||
prompts.log.info("No client ID - dynamic registration will be attempted")
|
||||
}
|
||||
// Check if dynamic registration would be attempted
|
||||
const clientInfo = await authProvider.clientInformation()
|
||||
if (clientInfo) {
|
||||
prompts.log.info(`Client ID available: ${clientInfo.client_id}`)
|
||||
} else {
|
||||
prompts.log.error(`Connection error: ${error instanceof Error ? error.message : String(error)}`)
|
||||
prompts.log.info("No client ID - dynamic registration will be attempted")
|
||||
}
|
||||
}
|
||||
} else if (response.status >= 200 && response.status < 300) {
|
||||
prompts.log.success("Server responded successfully (no auth required or already authenticated)")
|
||||
const body = await response.text()
|
||||
try {
|
||||
const json = JSON.parse(body)
|
||||
if (json.result?.serverInfo) {
|
||||
prompts.log.info(`Server info: ${JSON.stringify(json.result.serverInfo)}`)
|
||||
}
|
||||
} catch {
|
||||
// Not JSON, ignore
|
||||
}
|
||||
} else {
|
||||
prompts.log.warn(`Unexpected status: ${response.status}`)
|
||||
const body = await response.text().catch(() => "")
|
||||
if (body) {
|
||||
prompts.log.info(`Response body: ${body.substring(0, 500)}`)
|
||||
} else {
|
||||
prompts.log.error(`Connection error: ${error instanceof Error ? error.message : String(error)}`)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
spinner.stop("Connection failed", 1)
|
||||
prompts.log.error(`Error: ${error instanceof Error ? error.message : String(error)}`)
|
||||
} else if (response.status >= 200 && response.status < 300) {
|
||||
prompts.log.success("Server responded successfully (no auth required or already authenticated)")
|
||||
const body = await response.text()
|
||||
try {
|
||||
const json = JSON.parse(body)
|
||||
if (json.result?.serverInfo) {
|
||||
prompts.log.info(`Server info: ${JSON.stringify(json.result.serverInfo)}`)
|
||||
}
|
||||
} catch {
|
||||
// Not JSON, ignore
|
||||
}
|
||||
} else {
|
||||
prompts.log.warn(`Unexpected status: ${response.status}`)
|
||||
const body = await response.text().catch(() => "")
|
||||
if (body) {
|
||||
prompts.log.info(`Response body: ${body.substring(0, 500)}`)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
spinner.stop("Connection failed", 1)
|
||||
prompts.log.error(`Error: ${error instanceof Error ? error.message : String(error)}`)
|
||||
}
|
||||
|
||||
prompts.outro("Debug complete")
|
||||
},
|
||||
prompts.outro("Debug complete")
|
||||
})
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -1,59 +1,69 @@
|
||||
import { Auth } from "../../auth"
|
||||
import { AppRuntime } from "../../effect/app-runtime"
|
||||
import { cmd } from "./cmd"
|
||||
import * as prompts from "@clack/prompts"
|
||||
import { CliError, effectCmd, fail } from "../effect-cmd"
|
||||
import { UI } from "../ui"
|
||||
import * as Prompt from "../effect/prompt"
|
||||
import { ModelsDev } from "@/provider/models"
|
||||
|
||||
const getModels = () => AppRuntime.runPromise(ModelsDev.Service.use((s) => s.get()))
|
||||
const refreshModels = () => AppRuntime.runPromise(ModelsDev.Service.use((s) => s.refresh(true)))
|
||||
import { map, pipe, sortBy, values } from "remeda"
|
||||
import path from "path"
|
||||
import os from "os"
|
||||
import { Config } from "@/config/config"
|
||||
import { Global } from "@opencode-ai/core/global"
|
||||
import { Plugin } from "../../plugin"
|
||||
import { WithInstance } from "../../project/with-instance"
|
||||
import type { Hooks } from "@opencode-ai/plugin"
|
||||
import { Process } from "@/util/process"
|
||||
import { errorMessage } from "@/util/error"
|
||||
import { text } from "node:stream/consumers"
|
||||
import { Effect } from "effect"
|
||||
import { Effect, Option } from "effect"
|
||||
|
||||
type PluginAuth = NonNullable<Hooks["auth"]>
|
||||
|
||||
const put = (key: string, info: Auth.Info) =>
|
||||
AppRuntime.runPromise(
|
||||
Effect.gen(function* () {
|
||||
const auth = yield* Auth.Service
|
||||
yield* auth.set(key, info)
|
||||
}),
|
||||
)
|
||||
const promptValue = <Value>(value: Option.Option<Value>) => {
|
||||
if (Option.isNone(value)) return Effect.die(new UI.CancelledError())
|
||||
return Effect.succeed(value.value)
|
||||
}
|
||||
|
||||
async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string, methodName?: string): Promise<boolean> {
|
||||
let index = 0
|
||||
if (methodName) {
|
||||
const put = Effect.fn("Cli.providers.put")(function* (key: string, info: Auth.Info) {
|
||||
const auth = yield* Auth.Service
|
||||
yield* Effect.orDie(auth.set(key, info))
|
||||
})
|
||||
|
||||
const cliTry = <Value>(message: string, fn: () => PromiseLike<Value>) =>
|
||||
Effect.tryPromise({
|
||||
try: fn,
|
||||
catch: (error) => new CliError({ message: message + errorMessage(error) }),
|
||||
})
|
||||
|
||||
const handlePluginAuth = Effect.fn("Cli.providers.pluginAuth")(function* (
|
||||
plugin: { auth: PluginAuth },
|
||||
provider: string,
|
||||
methodName?: string,
|
||||
) {
|
||||
const index = yield* Effect.gen(function* () {
|
||||
if (!methodName) {
|
||||
if (plugin.auth.methods.length <= 1) return 0
|
||||
return yield* promptValue(
|
||||
yield* Prompt.select({
|
||||
message: "Login method",
|
||||
options: plugin.auth.methods.map((x, index) => ({
|
||||
label: x.label,
|
||||
value: index,
|
||||
})),
|
||||
}),
|
||||
)
|
||||
}
|
||||
const match = plugin.auth.methods.findIndex((x) => x.label.toLowerCase() === methodName.toLowerCase())
|
||||
if (match === -1) {
|
||||
prompts.log.error(
|
||||
return yield* fail(
|
||||
`Unknown method "${methodName}" for ${provider}. Available: ${plugin.auth.methods.map((x) => x.label).join(", ")}`,
|
||||
)
|
||||
process.exit(1)
|
||||
}
|
||||
index = match
|
||||
} else if (plugin.auth.methods.length > 1) {
|
||||
const method = await prompts.select({
|
||||
message: "Login method",
|
||||
options: plugin.auth.methods.map((x, index) => ({
|
||||
label: x.label,
|
||||
value: index.toString(),
|
||||
})),
|
||||
})
|
||||
if (prompts.isCancel(method)) throw new UI.CancelledError()
|
||||
index = parseInt(method)
|
||||
}
|
||||
return match
|
||||
})
|
||||
const method = plugin.auth.methods[index]
|
||||
|
||||
await new Promise((r) => setTimeout(r, 10))
|
||||
yield* Effect.sleep("10 millis")
|
||||
const inputs: Record<string, string> = {}
|
||||
if (method.prompts) {
|
||||
for (const prompt of method.prompts) {
|
||||
@@ -65,46 +75,44 @@ async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string,
|
||||
}
|
||||
if (prompt.condition && !prompt.condition(inputs)) continue
|
||||
if (prompt.type === "select") {
|
||||
const value = await prompts.select({
|
||||
const value = yield* Prompt.select({
|
||||
message: prompt.message,
|
||||
options: prompt.options,
|
||||
})
|
||||
if (prompts.isCancel(value)) throw new UI.CancelledError()
|
||||
inputs[prompt.key] = value
|
||||
} else {
|
||||
const value = await prompts.text({
|
||||
message: prompt.message,
|
||||
placeholder: prompt.placeholder,
|
||||
validate: prompt.validate ? (v) => prompt.validate!(v ?? "") : undefined,
|
||||
})
|
||||
if (prompts.isCancel(value)) throw new UI.CancelledError()
|
||||
inputs[prompt.key] = value
|
||||
inputs[prompt.key] = yield* promptValue(value)
|
||||
continue
|
||||
}
|
||||
const value = yield* Prompt.text({
|
||||
message: prompt.message,
|
||||
placeholder: prompt.placeholder,
|
||||
validate: prompt.validate ? (v) => prompt.validate!(v ?? "") : undefined,
|
||||
})
|
||||
inputs[prompt.key] = yield* promptValue(value)
|
||||
}
|
||||
}
|
||||
|
||||
if (method.type === "oauth") {
|
||||
const authorize = await method.authorize(inputs)
|
||||
const authorize = yield* cliTry("Failed to authorize: ", () => method.authorize(inputs))
|
||||
|
||||
if (authorize.url) {
|
||||
prompts.log.info("Go to: " + authorize.url)
|
||||
yield* Prompt.log.info("Go to: " + authorize.url)
|
||||
}
|
||||
|
||||
if (authorize.method === "auto") {
|
||||
if (authorize.instructions) {
|
||||
prompts.log.info(authorize.instructions)
|
||||
yield* Prompt.log.info(authorize.instructions)
|
||||
}
|
||||
const spinner = prompts.spinner()
|
||||
spinner.start("Waiting for authorization...")
|
||||
const result = await authorize.callback()
|
||||
const spinner = Prompt.spinner()
|
||||
yield* spinner.start("Waiting for authorization...")
|
||||
const result = yield* cliTry("Failed to authorize: ", () => authorize.callback())
|
||||
if (result.type === "failed") {
|
||||
spinner.stop("Failed to authorize", 1)
|
||||
yield* spinner.stop("Failed to authorize", 1)
|
||||
}
|
||||
if (result.type === "success") {
|
||||
const saveProvider = result.provider ?? provider
|
||||
if ("refresh" in result) {
|
||||
const { type: _, provider: __, refresh, access, expires, ...extraFields } = result
|
||||
await put(saveProvider, {
|
||||
yield* put(saveProvider, {
|
||||
type: "oauth",
|
||||
refresh,
|
||||
access,
|
||||
@@ -113,30 +121,30 @@ async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string,
|
||||
})
|
||||
}
|
||||
if ("key" in result) {
|
||||
await put(saveProvider, {
|
||||
yield* put(saveProvider, {
|
||||
type: "api",
|
||||
key: result.key,
|
||||
})
|
||||
}
|
||||
spinner.stop("Login successful")
|
||||
yield* spinner.stop("Login successful")
|
||||
}
|
||||
}
|
||||
|
||||
if (authorize.method === "code") {
|
||||
const code = await prompts.text({
|
||||
const code = yield* Prompt.text({
|
||||
message: "Paste the authorization code here: ",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(code)) throw new UI.CancelledError()
|
||||
const result = await authorize.callback(code)
|
||||
const authorizationCode = yield* promptValue(code)
|
||||
const result = yield* cliTry("Failed to authorize: ", () => authorize.callback(authorizationCode))
|
||||
if (result.type === "failed") {
|
||||
prompts.log.error("Failed to authorize")
|
||||
yield* Prompt.log.error("Failed to authorize")
|
||||
}
|
||||
if (result.type === "success") {
|
||||
const saveProvider = result.provider ?? provider
|
||||
if ("refresh" in result) {
|
||||
const { type: _, provider: __, refresh, access, expires, ...extraFields } = result
|
||||
await put(saveProvider, {
|
||||
yield* put(saveProvider, {
|
||||
type: "oauth",
|
||||
refresh,
|
||||
access,
|
||||
@@ -145,56 +153,57 @@ async function handlePluginAuth(plugin: { auth: PluginAuth }, provider: string,
|
||||
})
|
||||
}
|
||||
if ("key" in result) {
|
||||
await put(saveProvider, {
|
||||
yield* put(saveProvider, {
|
||||
type: "api",
|
||||
key: result.key,
|
||||
})
|
||||
}
|
||||
prompts.log.success("Login successful")
|
||||
yield* Prompt.log.success("Login successful")
|
||||
}
|
||||
}
|
||||
|
||||
prompts.outro("Done")
|
||||
yield* Prompt.outro("Done")
|
||||
return true
|
||||
}
|
||||
|
||||
if (method.type === "api") {
|
||||
const key = await prompts.password({
|
||||
const key = yield* Prompt.password({
|
||||
message: "Enter your API key",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(key)) throw new UI.CancelledError()
|
||||
const apiKey = yield* promptValue(key)
|
||||
|
||||
const metadata = Object.keys(inputs).length ? { metadata: inputs } : {}
|
||||
if (!method.authorize) {
|
||||
await put(provider, {
|
||||
const authorizeApi = method.authorize
|
||||
if (!authorizeApi) {
|
||||
yield* put(provider, {
|
||||
type: "api",
|
||||
key,
|
||||
key: apiKey,
|
||||
...metadata,
|
||||
})
|
||||
prompts.outro("Done")
|
||||
yield* Prompt.outro("Done")
|
||||
return true
|
||||
}
|
||||
|
||||
const result = await method.authorize(inputs)
|
||||
const result = yield* cliTry("Failed to authorize: ", () => authorizeApi(inputs))
|
||||
if (result.type === "failed") {
|
||||
prompts.log.error("Failed to authorize")
|
||||
yield* Prompt.log.error("Failed to authorize")
|
||||
}
|
||||
if (result.type === "success") {
|
||||
const saveProvider = result.provider ?? provider
|
||||
await put(saveProvider, {
|
||||
yield* put(saveProvider, {
|
||||
type: "api",
|
||||
key: result.key ?? key,
|
||||
key: result.key ?? apiKey,
|
||||
...metadata,
|
||||
})
|
||||
prompts.log.success("Login successful")
|
||||
yield* Prompt.log.success("Login successful")
|
||||
}
|
||||
prompts.outro("Done")
|
||||
yield* Prompt.outro("Done")
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
})
|
||||
|
||||
export function resolvePluginProviders(input: {
|
||||
hooks: Hooks[]
|
||||
@@ -232,30 +241,30 @@ export const ProvidersCommand = cmd({
|
||||
async handler() {},
|
||||
})
|
||||
|
||||
export const ProvidersListCommand = cmd({
|
||||
export const ProvidersListCommand = effectCmd({
|
||||
command: "list",
|
||||
aliases: ["ls"],
|
||||
describe: "list providers and credentials",
|
||||
async handler(_args) {
|
||||
// Lists global credentials + provider env vars; no project instance needed.
|
||||
instance: false,
|
||||
handler: Effect.fn("Cli.providers.list")(function* (_args) {
|
||||
const authSvc = yield* Auth.Service
|
||||
const modelsDev = yield* ModelsDev.Service
|
||||
|
||||
UI.empty()
|
||||
const authPath = path.join(Global.Path.data, "auth.json")
|
||||
const homedir = os.homedir()
|
||||
const displayPath = authPath.startsWith(homedir) ? authPath.replace(homedir, "~") : authPath
|
||||
prompts.intro(`Credentials ${UI.Style.TEXT_DIM}${displayPath}`)
|
||||
const results = await AppRuntime.runPromise(
|
||||
Effect.gen(function* () {
|
||||
const auth = yield* Auth.Service
|
||||
return Object.entries(yield* auth.all())
|
||||
}),
|
||||
)
|
||||
const database = await getModels()
|
||||
yield* Prompt.intro(`Credentials ${UI.Style.TEXT_DIM}${displayPath}`)
|
||||
const results = Object.entries(yield* Effect.orDie(authSvc.all()))
|
||||
const database = yield* modelsDev.get()
|
||||
|
||||
for (const [providerID, result] of results) {
|
||||
const name = database[providerID]?.name || providerID
|
||||
prompts.log.info(`${name} ${UI.Style.TEXT_DIM}${result.type}`)
|
||||
yield* Prompt.log.info(`${name} ${UI.Style.TEXT_DIM}${result.type}`)
|
||||
}
|
||||
|
||||
prompts.outro(`${results.length} credentials`)
|
||||
yield* Prompt.outro(`${results.length} credentials`)
|
||||
|
||||
const activeEnvVars: Array<{ provider: string; envVar: string }> = []
|
||||
|
||||
@@ -272,18 +281,18 @@ export const ProvidersListCommand = cmd({
|
||||
|
||||
if (activeEnvVars.length > 0) {
|
||||
UI.empty()
|
||||
prompts.intro("Environment")
|
||||
yield* Prompt.intro("Environment")
|
||||
|
||||
for (const { provider, envVar } of activeEnvVars) {
|
||||
prompts.log.info(`${provider} ${UI.Style.TEXT_DIM}${envVar}`)
|
||||
yield* Prompt.log.info(`${provider} ${UI.Style.TEXT_DIM}${envVar}`)
|
||||
}
|
||||
|
||||
prompts.outro(`${activeEnvVars.length} environment variable` + (activeEnvVars.length === 1 ? "" : "s"))
|
||||
yield* Prompt.outro(`${activeEnvVars.length} environment variable` + (activeEnvVars.length === 1 ? "" : "s"))
|
||||
}
|
||||
},
|
||||
}),
|
||||
})
|
||||
|
||||
export const ProvidersLoginCommand = cmd({
|
||||
export const ProvidersLoginCommand = effectCmd({
|
||||
command: "login [url]",
|
||||
describe: "log in to a provider",
|
||||
builder: (yargs) =>
|
||||
@@ -302,228 +311,202 @@ export const ProvidersLoginCommand = cmd({
|
||||
describe: "login method label (skips method selection)",
|
||||
type: "string",
|
||||
}),
|
||||
async handler(args) {
|
||||
await WithInstance.provide({
|
||||
directory: process.cwd(),
|
||||
async fn() {
|
||||
UI.empty()
|
||||
prompts.intro("Add credential")
|
||||
if (args.url) {
|
||||
const url = args.url.replace(/\/+$/, "")
|
||||
const wellknown = (await fetch(`${url}/.well-known/opencode`).then((x) => x.json())) as {
|
||||
auth: { command: string[]; env: string }
|
||||
}
|
||||
prompts.log.info(`Running \`${wellknown.auth.command.join(" ")}\``)
|
||||
const proc = Process.spawn(wellknown.auth.command, {
|
||||
stdout: "pipe",
|
||||
})
|
||||
if (!proc.stdout) {
|
||||
prompts.log.error("Failed")
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
const [exit, token] = await Promise.all([proc.exited, text(proc.stdout)])
|
||||
if (exit !== 0) {
|
||||
prompts.log.error("Failed")
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
await put(url, {
|
||||
type: "wellknown",
|
||||
key: wellknown.auth.env,
|
||||
token: token.trim(),
|
||||
})
|
||||
prompts.log.success("Logged into " + url)
|
||||
prompts.outro("Done")
|
||||
return
|
||||
}
|
||||
await refreshModels().catch(() => {})
|
||||
handler: Effect.fn("Cli.providers.login")(function* (args) {
|
||||
const authSvc = yield* Auth.Service
|
||||
|
||||
const config = await AppRuntime.runPromise(Config.Service.use((cfg) => cfg.get()))
|
||||
|
||||
const disabled = new Set(config.disabled_providers ?? [])
|
||||
const enabled = config.enabled_providers ? new Set(config.enabled_providers) : undefined
|
||||
|
||||
const providers = await getModels().then((x) => {
|
||||
const filtered: Record<string, (typeof x)[string]> = {}
|
||||
for (const [key, value] of Object.entries(x)) {
|
||||
if ((enabled ? enabled.has(key) : true) && !disabled.has(key)) {
|
||||
filtered[key] = value
|
||||
}
|
||||
}
|
||||
return filtered
|
||||
})
|
||||
const hooks = await AppRuntime.runPromise(
|
||||
Effect.gen(function* () {
|
||||
const plugin = yield* Plugin.Service
|
||||
return yield* plugin.list()
|
||||
}),
|
||||
)
|
||||
|
||||
const priority: Record<string, number> = {
|
||||
opencode: 0,
|
||||
openai: 1,
|
||||
"github-copilot": 2,
|
||||
google: 3,
|
||||
anthropic: 4,
|
||||
openrouter: 5,
|
||||
vercel: 6,
|
||||
}
|
||||
const pluginProviders = resolvePluginProviders({
|
||||
hooks,
|
||||
existingProviders: providers,
|
||||
disabled,
|
||||
enabled,
|
||||
providerNames: Object.fromEntries(Object.entries(config.provider ?? {}).map(([id, p]) => [id, p.name])),
|
||||
})
|
||||
const options = [
|
||||
...pipe(
|
||||
providers,
|
||||
values(),
|
||||
sortBy(
|
||||
(x) => priority[x.id] ?? 99,
|
||||
(x) => x.name ?? x.id,
|
||||
),
|
||||
map((x) => ({
|
||||
label: x.name,
|
||||
value: x.id,
|
||||
hint: {
|
||||
opencode: "recommended",
|
||||
openai: "ChatGPT Plus/Pro or API key",
|
||||
}[x.id],
|
||||
})),
|
||||
),
|
||||
...pluginProviders.map((x) => ({
|
||||
label: x.name,
|
||||
value: x.id,
|
||||
hint: "plugin",
|
||||
})),
|
||||
]
|
||||
|
||||
let provider: string
|
||||
if (args.provider) {
|
||||
const input = args.provider
|
||||
const byID = options.find((x) => x.value === input)
|
||||
const byName = options.find((x) => x.label.toLowerCase() === input.toLowerCase())
|
||||
const match = byID ?? byName
|
||||
if (!match) {
|
||||
prompts.log.error(`Unknown provider "${input}"`)
|
||||
process.exit(1)
|
||||
}
|
||||
provider = match.value
|
||||
} else {
|
||||
const selected = await prompts.autocomplete({
|
||||
message: "Select provider",
|
||||
maxItems: 8,
|
||||
options: [
|
||||
...options,
|
||||
{
|
||||
value: "other",
|
||||
label: "Other",
|
||||
},
|
||||
],
|
||||
})
|
||||
if (prompts.isCancel(selected)) throw new UI.CancelledError()
|
||||
provider = selected as string
|
||||
}
|
||||
|
||||
const plugin = hooks.findLast((x) => x.auth?.provider === provider)
|
||||
if (plugin && plugin.auth) {
|
||||
const handled = await handlePluginAuth({ auth: plugin.auth }, provider, args.method)
|
||||
if (handled) return
|
||||
}
|
||||
|
||||
if (provider === "other") {
|
||||
const custom = await prompts.text({
|
||||
message: "Enter provider id",
|
||||
validate: (x) => (x && x.match(/^[0-9a-z-]+$/) ? undefined : "a-z, 0-9 and hyphens only"),
|
||||
})
|
||||
if (prompts.isCancel(custom)) throw new UI.CancelledError()
|
||||
provider = custom.replace(/^@ai-sdk\//, "")
|
||||
|
||||
const customPlugin = hooks.findLast((x) => x.auth?.provider === provider)
|
||||
if (customPlugin && customPlugin.auth) {
|
||||
const handled = await handlePluginAuth({ auth: customPlugin.auth }, provider, args.method)
|
||||
if (handled) return
|
||||
}
|
||||
|
||||
prompts.log.warn(
|
||||
`This only stores a credential for ${provider} - you will need configure it in opencode.json, check the docs for examples.`,
|
||||
)
|
||||
}
|
||||
|
||||
if (provider === "amazon-bedrock") {
|
||||
prompts.log.info(
|
||||
"Amazon Bedrock authentication priority:\n" +
|
||||
" 1. Bearer token (AWS_BEARER_TOKEN_BEDROCK or /connect)\n" +
|
||||
" 2. AWS credential chain (profile, access keys, IAM roles, EKS IRSA)\n\n" +
|
||||
"Configure via opencode.json options (profile, region, endpoint) or\n" +
|
||||
"AWS environment variables (AWS_PROFILE, AWS_REGION, AWS_ACCESS_KEY_ID, AWS_WEB_IDENTITY_TOKEN_FILE).",
|
||||
)
|
||||
}
|
||||
|
||||
if (provider === "opencode") {
|
||||
prompts.log.info("Create an api key at https://opencode.ai/auth")
|
||||
}
|
||||
|
||||
if (provider === "vercel") {
|
||||
prompts.log.info("You can create an api key at https://vercel.link/ai-gateway-token")
|
||||
}
|
||||
|
||||
if (["cloudflare", "cloudflare-ai-gateway"].includes(provider)) {
|
||||
prompts.log.info(
|
||||
"Cloudflare AI Gateway can be configured with CLOUDFLARE_GATEWAY_ID, CLOUDFLARE_ACCOUNT_ID, and CLOUDFLARE_API_TOKEN environment variables. Read more: https://opencode.ai/docs/providers/#cloudflare-ai-gateway",
|
||||
)
|
||||
}
|
||||
|
||||
const key = await prompts.password({
|
||||
message: "Enter your API key",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
if (prompts.isCancel(key)) throw new UI.CancelledError()
|
||||
await put(provider, {
|
||||
type: "api",
|
||||
key,
|
||||
})
|
||||
|
||||
prompts.outro("Done")
|
||||
},
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
export const ProvidersLogoutCommand = cmd({
|
||||
command: "logout",
|
||||
describe: "log out from a configured provider",
|
||||
async handler(_args) {
|
||||
UI.empty()
|
||||
const credentials: Array<[string, Auth.Info]> = await AppRuntime.runPromise(
|
||||
Effect.gen(function* () {
|
||||
const auth = yield* Auth.Service
|
||||
return Object.entries(yield* auth.all())
|
||||
}),
|
||||
)
|
||||
prompts.intro("Remove credential")
|
||||
if (credentials.length === 0) {
|
||||
prompts.log.error("No credentials found")
|
||||
yield* Prompt.intro("Add credential")
|
||||
if (args.url) {
|
||||
const url = args.url.replace(/\/+$/, "")
|
||||
const wellknown = (yield* cliTry(`Failed to load auth provider metadata from ${url}: `, () =>
|
||||
fetch(`${url}/.well-known/opencode`).then((x) => x.json()),
|
||||
)) as {
|
||||
auth: { command: string[]; env: string }
|
||||
}
|
||||
yield* Prompt.log.info(`Running \`${wellknown.auth.command.join(" ")}\``)
|
||||
const abort = new AbortController()
|
||||
const proc = Process.spawn(wellknown.auth.command, { stdout: "pipe", stderr: "inherit", abort: abort.signal })
|
||||
if (!proc.stdout) {
|
||||
yield* Prompt.log.error("Failed")
|
||||
yield* Prompt.outro("Done")
|
||||
return
|
||||
}
|
||||
const [exit, token] = yield* cliTry("Failed to run auth provider command: ", () =>
|
||||
Promise.all([proc.exited, text(proc.stdout!)]),
|
||||
).pipe(Effect.ensuring(Effect.sync(() => abort.abort())))
|
||||
if (exit !== 0) {
|
||||
yield* Prompt.log.error("Failed")
|
||||
yield* Prompt.outro("Done")
|
||||
return
|
||||
}
|
||||
yield* Effect.orDie(authSvc.set(url, { type: "wellknown", key: wellknown.auth.env, token: token.trim() }))
|
||||
yield* Prompt.log.success("Logged into " + url)
|
||||
yield* Prompt.outro("Done")
|
||||
return
|
||||
}
|
||||
const database = await getModels()
|
||||
const selected = await prompts.select({
|
||||
|
||||
const cfgSvc = yield* Config.Service
|
||||
const pluginSvc = yield* Plugin.Service
|
||||
const modelsDev = yield* ModelsDev.Service
|
||||
yield* Effect.ignore(modelsDev.refresh(true))
|
||||
|
||||
const config = yield* cfgSvc.get()
|
||||
|
||||
const disabled = new Set(config.disabled_providers ?? [])
|
||||
const enabled = config.enabled_providers ? new Set(config.enabled_providers) : undefined
|
||||
|
||||
const allProviders = yield* modelsDev.get()
|
||||
const providers: Record<string, (typeof allProviders)[string]> = {}
|
||||
for (const [key, value] of Object.entries(allProviders)) {
|
||||
if ((enabled ? enabled.has(key) : true) && !disabled.has(key)) providers[key] = value
|
||||
}
|
||||
const hooks = yield* pluginSvc.list()
|
||||
|
||||
const priority: Record<string, number> = {
|
||||
opencode: 0,
|
||||
openai: 1,
|
||||
"github-copilot": 2,
|
||||
google: 3,
|
||||
anthropic: 4,
|
||||
openrouter: 5,
|
||||
vercel: 6,
|
||||
}
|
||||
const pluginProviders = resolvePluginProviders({
|
||||
hooks,
|
||||
existingProviders: providers,
|
||||
disabled,
|
||||
enabled,
|
||||
providerNames: Object.fromEntries(Object.entries(config.provider ?? {}).map(([id, p]) => [id, p.name])),
|
||||
})
|
||||
const options = [
|
||||
...pipe(
|
||||
providers,
|
||||
values(),
|
||||
sortBy(
|
||||
(x) => priority[x.id] ?? 99,
|
||||
(x) => x.name ?? x.id,
|
||||
),
|
||||
map((x) => ({
|
||||
label: x.name,
|
||||
value: x.id,
|
||||
hint: {
|
||||
opencode: "recommended",
|
||||
openai: "ChatGPT Plus/Pro or API key",
|
||||
}[x.id],
|
||||
})),
|
||||
),
|
||||
...pluginProviders.map((x) => ({
|
||||
label: x.name,
|
||||
value: x.id,
|
||||
hint: "plugin",
|
||||
})),
|
||||
]
|
||||
|
||||
let provider: string
|
||||
if (args.provider) {
|
||||
const input = args.provider
|
||||
const byID = options.find((x) => x.value === input)
|
||||
const byName = options.find((x) => x.label.toLowerCase() === input.toLowerCase())
|
||||
const match = byID ?? byName
|
||||
if (!match) {
|
||||
return yield* fail(`Unknown provider "${input}"`)
|
||||
}
|
||||
provider = match.value
|
||||
} else {
|
||||
provider = yield* promptValue(
|
||||
yield* Prompt.autocomplete({
|
||||
message: "Select provider",
|
||||
maxItems: 8,
|
||||
options: [...options, { value: "other", label: "Other" }],
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
const plugin = hooks.findLast((x) => x.auth?.provider === provider)
|
||||
if (plugin && plugin.auth) {
|
||||
const handled = yield* handlePluginAuth({ auth: plugin.auth! }, provider, args.method)
|
||||
if (handled) return
|
||||
}
|
||||
|
||||
if (provider === "other") {
|
||||
provider = (yield* promptValue(
|
||||
yield* Prompt.text({
|
||||
message: "Enter provider id",
|
||||
validate: (x) => (x && x.match(/^[0-9a-z-]+$/) ? undefined : "a-z, 0-9 and hyphens only"),
|
||||
}),
|
||||
)).replace(/^@ai-sdk\//, "")
|
||||
|
||||
const customPlugin = hooks.findLast((x) => x.auth?.provider === provider)
|
||||
if (customPlugin && customPlugin.auth) {
|
||||
const handled = yield* handlePluginAuth({ auth: customPlugin.auth! }, provider, args.method)
|
||||
if (handled) return
|
||||
}
|
||||
|
||||
yield* Prompt.log.warn(
|
||||
`This only stores a credential for ${provider} - you will need configure it in opencode.json, check the docs for examples.`,
|
||||
)
|
||||
}
|
||||
|
||||
if (provider === "amazon-bedrock") {
|
||||
yield* Prompt.log.info(
|
||||
"Amazon Bedrock authentication priority:\n" +
|
||||
" 1. Bearer token (AWS_BEARER_TOKEN_BEDROCK or /connect)\n" +
|
||||
" 2. AWS credential chain (profile, access keys, IAM roles, EKS IRSA)\n\n" +
|
||||
"Configure via opencode.json options (profile, region, endpoint) or\n" +
|
||||
"AWS environment variables (AWS_PROFILE, AWS_REGION, AWS_ACCESS_KEY_ID, AWS_WEB_IDENTITY_TOKEN_FILE).",
|
||||
)
|
||||
}
|
||||
|
||||
if (provider === "opencode") {
|
||||
yield* Prompt.log.info("Create an api key at https://opencode.ai/auth")
|
||||
}
|
||||
|
||||
if (provider === "vercel") {
|
||||
yield* Prompt.log.info("You can create an api key at https://vercel.link/ai-gateway-token")
|
||||
}
|
||||
|
||||
if (["cloudflare", "cloudflare-ai-gateway"].includes(provider)) {
|
||||
yield* Prompt.log.info(
|
||||
"Cloudflare AI Gateway can be configured with CLOUDFLARE_GATEWAY_ID, CLOUDFLARE_ACCOUNT_ID, and CLOUDFLARE_API_TOKEN environment variables. Read more: https://opencode.ai/docs/providers/#cloudflare-ai-gateway",
|
||||
)
|
||||
}
|
||||
|
||||
const key = yield* Prompt.password({
|
||||
message: "Enter your API key",
|
||||
validate: (x) => (x && x.length > 0 ? undefined : "Required"),
|
||||
})
|
||||
const apiKey = yield* promptValue(key)
|
||||
yield* Effect.orDie(authSvc.set(provider, { type: "api", key: apiKey }))
|
||||
|
||||
yield* Prompt.outro("Done")
|
||||
}),
|
||||
})
|
||||
|
||||
export const ProvidersLogoutCommand = effectCmd({
|
||||
command: "logout",
|
||||
describe: "log out from a configured provider",
|
||||
// Removes a global auth credential; no project instance needed.
|
||||
instance: false,
|
||||
handler: Effect.fn("Cli.providers.logout")(function* (_args) {
|
||||
const authSvc = yield* Auth.Service
|
||||
const modelsDev = yield* ModelsDev.Service
|
||||
|
||||
UI.empty()
|
||||
const credentials: Array<[string, Auth.Info]> = Object.entries(yield* Effect.orDie(authSvc.all()))
|
||||
yield* Prompt.intro("Remove credential")
|
||||
if (credentials.length === 0) {
|
||||
yield* Prompt.log.error("No credentials found")
|
||||
return
|
||||
}
|
||||
const database = yield* modelsDev.get()
|
||||
const selected = yield* Prompt.select({
|
||||
message: "Select provider",
|
||||
options: credentials.map(([key, value]) => ({
|
||||
label: (database[key]?.name || key) + UI.Style.TEXT_DIM + " (" + value.type + ")",
|
||||
value: key,
|
||||
})),
|
||||
})
|
||||
if (prompts.isCancel(selected)) throw new UI.CancelledError()
|
||||
const providerID = selected as string
|
||||
await AppRuntime.runPromise(
|
||||
Effect.gen(function* () {
|
||||
const auth = yield* Auth.Service
|
||||
yield* auth.remove(providerID)
|
||||
}),
|
||||
)
|
||||
prompts.outro("Logout successful")
|
||||
},
|
||||
yield* Effect.orDie(authSvc.remove(yield* promptValue(selected)))
|
||||
yield* Prompt.outro("Logout successful")
|
||||
}),
|
||||
})
|
||||
|
||||
@@ -5,6 +5,7 @@ import { Effect } from "effect"
|
||||
import { UI } from "../ui"
|
||||
import { effectCmd } from "../effect-cmd"
|
||||
import { Flag } from "@opencode-ai/core/flag/flag"
|
||||
import { ServerAuth } from "@/server/auth"
|
||||
import { EOL } from "os"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import { createOpencodeClient, type OpencodeClient, type ToolPart } from "@opencode-ai/sdk/v2"
|
||||
@@ -26,7 +27,6 @@ import { ShellTool } from "../../tool/shell"
|
||||
import { ShellID } from "../../tool/shell/id"
|
||||
import { TodoWriteTool } from "../../tool/todo"
|
||||
import { Locale } from "@/util/locale"
|
||||
import { AppRuntime } from "@/effect/app-runtime"
|
||||
|
||||
type ToolProps<T> = {
|
||||
input: Tool.InferParameters<T>
|
||||
@@ -276,6 +276,11 @@ export const RunCommand = effectCmd({
|
||||
type: "string",
|
||||
describe: "basic auth password (defaults to OPENCODE_SERVER_PASSWORD)",
|
||||
})
|
||||
.option("username", {
|
||||
alias: ["u"],
|
||||
type: "string",
|
||||
describe: "basic auth username (defaults to OPENCODE_SERVER_USERNAME or 'opencode')",
|
||||
})
|
||||
.option("dir", {
|
||||
type: "string",
|
||||
describe: "directory to run in, path on remote server if attaching",
|
||||
@@ -299,6 +304,7 @@ export const RunCommand = effectCmd({
|
||||
default: false,
|
||||
}),
|
||||
handler: Effect.fn("Cli.run")(function* (args) {
|
||||
const agentSvc = yield* Agent.Service
|
||||
yield* Effect.promise(async () => {
|
||||
let message = [...args.message, ...(args["--"] || [])]
|
||||
.map((arg) => (arg.includes(" ") ? `"${arg.replace(/"/g, '\\"')}"` : arg))
|
||||
@@ -602,7 +608,7 @@ export const RunCommand = effectCmd({
|
||||
return name
|
||||
}
|
||||
|
||||
const entry = await AppRuntime.runPromise(Agent.Service.use((svc) => svc.get(name)))
|
||||
const entry = await Effect.runPromise(agentSvc.get(name))
|
||||
if (!entry) {
|
||||
UI.println(
|
||||
UI.Style.TEXT_WARNING_BOLD + "!",
|
||||
@@ -656,13 +662,7 @@ export const RunCommand = effectCmd({
|
||||
}
|
||||
|
||||
if (args.attach) {
|
||||
const headers = (() => {
|
||||
const password = args.password ?? process.env.OPENCODE_SERVER_PASSWORD
|
||||
if (!password) return undefined
|
||||
const username = process.env.OPENCODE_SERVER_USERNAME ?? "opencode"
|
||||
const auth = `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`
|
||||
return { Authorization: auth }
|
||||
})()
|
||||
const headers = ServerAuth.headers({ password: args.password, username: args.username })
|
||||
const sdk = createOpencodeClient({ baseUrl: args.attach, directory, headers })
|
||||
return await execute(sdk)
|
||||
}
|
||||
|
||||
@@ -5,6 +5,7 @@ import { win32DisableProcessedInput, win32InstallCtrlCGuard } from "./win32"
|
||||
import { TuiConfig } from "@/cli/cmd/tui/config/tui"
|
||||
import { errorMessage } from "@/util/error"
|
||||
import { validateSession } from "./validate-session"
|
||||
import { ServerAuth } from "@/server/auth"
|
||||
|
||||
export const AttachCommand = cmd({
|
||||
command: "attach <url>",
|
||||
@@ -38,6 +39,11 @@ export const AttachCommand = cmd({
|
||||
alias: ["p"],
|
||||
type: "string",
|
||||
describe: "basic auth password (defaults to OPENCODE_SERVER_PASSWORD)",
|
||||
})
|
||||
.option("username", {
|
||||
alias: ["u"],
|
||||
type: "string",
|
||||
describe: "basic auth username (defaults to OPENCODE_SERVER_USERNAME or 'opencode')",
|
||||
}),
|
||||
handler: async (args) => {
|
||||
const unguard = win32InstallCtrlCGuard()
|
||||
@@ -60,12 +66,7 @@ export const AttachCommand = cmd({
|
||||
return args.dir
|
||||
}
|
||||
})()
|
||||
const headers = (() => {
|
||||
const password = args.password ?? process.env.OPENCODE_SERVER_PASSWORD
|
||||
if (!password) return undefined
|
||||
const auth = `Basic ${Buffer.from(`opencode:${password}`).toString("base64")}`
|
||||
return { Authorization: auth }
|
||||
})()
|
||||
const headers = ServerAuth.headers({ password: args.password, username: args.username })
|
||||
const config = await TuiConfig.get()
|
||||
|
||||
try {
|
||||
|
||||
@@ -2,7 +2,7 @@ import { useDialog } from "@tui/ui/dialog"
|
||||
import { DialogSelect } from "@tui/ui/dialog-select"
|
||||
import { useRoute } from "@tui/context/route"
|
||||
import { useSync } from "@tui/context/sync"
|
||||
import { createMemo, createResource, createSignal, onMount } from "solid-js"
|
||||
import { createMemo, createResource, createSignal, onMount, type JSX } from "solid-js"
|
||||
import { Locale } from "@/util/locale"
|
||||
import { useProject } from "@tui/context/project"
|
||||
import { useKeybind } from "../context/keybind"
|
||||
@@ -10,15 +10,13 @@ import { useTheme } from "../context/theme"
|
||||
import { useSDK } from "../context/sdk"
|
||||
import { Flag } from "@opencode-ai/core/flag/flag"
|
||||
import { DialogSessionRename } from "./dialog-session-rename"
|
||||
import { Keybind } from "@/util/keybind"
|
||||
import { createDebouncedSignal } from "../util/signal"
|
||||
import { useToast } from "../ui/toast"
|
||||
import { DialogWorkspaceCreate, openWorkspaceSession, restoreWorkspaceSession } from "./dialog-workspace-create"
|
||||
import { openWorkspaceSelect, type WorkspaceSelection, warpWorkspaceSession } from "./dialog-workspace-create"
|
||||
import { Spinner } from "./spinner"
|
||||
import { errorMessage } from "@/util/error"
|
||||
import { DialogSessionDeleteFailed } from "./dialog-session-delete-failed"
|
||||
|
||||
type WorkspaceStatus = "connected" | "connecting" | "disconnected" | "error"
|
||||
import { WorkspaceLabel } from "./workspace-label"
|
||||
|
||||
export function DialogSessionList() {
|
||||
const dialog = useDialog()
|
||||
@@ -44,26 +42,39 @@ export function DialogSessionList() {
|
||||
const currentSessionID = createMemo(() => (route.data.type === "session" ? route.data.sessionID : undefined))
|
||||
const sessions = createMemo(() => searchResults() ?? sync.data.session)
|
||||
|
||||
function createWorkspace() {
|
||||
dialog.replace(() => (
|
||||
<DialogWorkspaceCreate
|
||||
onSelect={(workspaceID) =>
|
||||
openWorkspaceSession({
|
||||
dialog,
|
||||
route,
|
||||
sdk,
|
||||
sync,
|
||||
toast,
|
||||
workspaceID,
|
||||
})
|
||||
}
|
||||
/>
|
||||
))
|
||||
}
|
||||
|
||||
function recover(session: NonNullable<ReturnType<typeof sessions>[number]>) {
|
||||
const workspace = project.workspace.get(session.workspaceID!)
|
||||
const list = () => dialog.replace(() => <DialogSessionList />)
|
||||
const warp = async (selection: WorkspaceSelection) => {
|
||||
const workspaceID = await (async () => {
|
||||
if (selection.type === "none") return null
|
||||
if (selection.type === "existing") return selection.workspaceID
|
||||
const result = await sdk.client.experimental.workspace
|
||||
.create({ type: selection.workspaceType, branch: null })
|
||||
.catch(() => undefined)
|
||||
const workspace = result?.data
|
||||
if (!workspace) {
|
||||
toast.show({
|
||||
message: `Failed to create workspace: ${errorMessage(result?.error ?? "no response")}`,
|
||||
variant: "error",
|
||||
})
|
||||
return
|
||||
}
|
||||
await project.workspace.sync()
|
||||
return workspace.id
|
||||
})()
|
||||
if (workspaceID === undefined) return
|
||||
await warpWorkspaceSession({
|
||||
dialog,
|
||||
sdk,
|
||||
sync,
|
||||
project,
|
||||
toast,
|
||||
workspaceID,
|
||||
sessionID: session.id,
|
||||
done: list,
|
||||
})
|
||||
}
|
||||
dialog.replace(() => (
|
||||
<DialogSessionDeleteFailed
|
||||
session={session.title}
|
||||
@@ -90,22 +101,15 @@ export function DialogSessionList() {
|
||||
return true
|
||||
}}
|
||||
onRestore={() => {
|
||||
dialog.replace(() => (
|
||||
<DialogWorkspaceCreate
|
||||
onSelect={(workspaceID) =>
|
||||
restoreWorkspaceSession({
|
||||
dialog,
|
||||
sdk,
|
||||
sync,
|
||||
project,
|
||||
toast,
|
||||
workspaceID,
|
||||
sessionID: session.id,
|
||||
done: list,
|
||||
})
|
||||
}
|
||||
/>
|
||||
))
|
||||
void openWorkspaceSelect({
|
||||
dialog,
|
||||
sdk,
|
||||
sync,
|
||||
toast,
|
||||
onSelect: (selection) => {
|
||||
void warp(selection)
|
||||
},
|
||||
})
|
||||
return false
|
||||
}}
|
||||
/>
|
||||
@@ -124,30 +128,17 @@ export function DialogSessionList() {
|
||||
.map((x) => {
|
||||
const workspace = x.workspaceID ? project.workspace.get(x.workspaceID) : undefined
|
||||
|
||||
let workspaceStatus: WorkspaceStatus | null = null
|
||||
if (x.workspaceID) {
|
||||
workspaceStatus = project.workspace.status(x.workspaceID) || "error"
|
||||
}
|
||||
|
||||
let footer = ""
|
||||
let footer: JSX.Element | string = ""
|
||||
if (Flag.OPENCODE_EXPERIMENTAL_WORKSPACES) {
|
||||
if (x.workspaceID) {
|
||||
let desc = "unknown"
|
||||
if (workspace) {
|
||||
desc = `${workspace.type}: ${workspace.name}`
|
||||
}
|
||||
|
||||
footer = (
|
||||
<>
|
||||
{desc}{" "}
|
||||
<span
|
||||
style={{
|
||||
fg: workspaceStatus === "connected" ? theme.success : theme.error,
|
||||
}}
|
||||
>
|
||||
●
|
||||
</span>
|
||||
</>
|
||||
footer = workspace ? (
|
||||
<WorkspaceLabel
|
||||
type={workspace.type}
|
||||
name={workspace.name}
|
||||
status={project.workspace.status(x.workspaceID) ?? "error"}
|
||||
/>
|
||||
) : (
|
||||
<WorkspaceLabel type="unknown" name={x.workspaceID} status="error" />
|
||||
)
|
||||
}
|
||||
} else {
|
||||
@@ -250,15 +241,6 @@ export function DialogSessionList() {
|
||||
dialog.replace(() => <DialogSessionRename session={option.value} />)
|
||||
},
|
||||
},
|
||||
{
|
||||
keybind: Keybind.parse("ctrl+w")[0],
|
||||
title: "new workspace",
|
||||
side: "right",
|
||||
disabled: !Flag.OPENCODE_EXPERIMENTAL_WORKSPACES,
|
||||
onTrigger: () => {
|
||||
createWorkspace()
|
||||
},
|
||||
},
|
||||
]}
|
||||
/>
|
||||
)
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import { createOpencodeClient } from "@opencode-ai/sdk/v2"
|
||||
import type { Workspace } from "@opencode-ai/sdk/v2"
|
||||
import { useDialog } from "@tui/ui/dialog"
|
||||
import { DialogSelect } from "@tui/ui/dialog-select"
|
||||
import { useRoute } from "@tui/context/route"
|
||||
import { DialogSelect, type DialogSelectOption } from "@tui/ui/dialog-select"
|
||||
import { useSync } from "@tui/context/sync"
|
||||
import { useProject } from "@tui/context/project"
|
||||
import { createMemo, createSignal, onMount } from "solid-js"
|
||||
import { setTimeout as sleep } from "node:timers/promises"
|
||||
import { errorMessage } from "@/util/error"
|
||||
import { useSDK } from "../context/sdk"
|
||||
import { useToast } from "../ui/toast"
|
||||
@@ -16,184 +14,212 @@ type Adapter = {
|
||||
description: string
|
||||
}
|
||||
|
||||
function scoped(sdk: ReturnType<typeof useSDK>, sync: ReturnType<typeof useSync>, workspaceID: string) {
|
||||
return createOpencodeClient({
|
||||
baseUrl: sdk.url,
|
||||
fetch: sdk.fetch,
|
||||
directory: sync.path.directory || sdk.directory,
|
||||
experimental_workspaceID: workspaceID,
|
||||
})
|
||||
}
|
||||
export type WorkspaceSelection =
|
||||
| {
|
||||
type: "none"
|
||||
}
|
||||
| {
|
||||
type: "new"
|
||||
workspaceType: string
|
||||
workspaceName: string
|
||||
}
|
||||
| {
|
||||
type: "existing"
|
||||
workspaceID: string
|
||||
workspaceType: string
|
||||
workspaceName: string
|
||||
}
|
||||
|
||||
export async function openWorkspaceSession(input: {
|
||||
dialog: ReturnType<typeof useDialog>
|
||||
route: ReturnType<typeof useRoute>
|
||||
type WorkspaceSelectValue = WorkspaceSelection | { type: "existing-list" }
|
||||
type ExistingWorkspaceSelectValue = { workspace: Workspace }
|
||||
|
||||
async function loadWorkspaceAdapters(input: {
|
||||
sdk: ReturnType<typeof useSDK>
|
||||
sync: ReturnType<typeof useSync>
|
||||
toast: ReturnType<typeof useToast>
|
||||
workspaceID: string
|
||||
}) {
|
||||
const client = scoped(input.sdk, input.sync, input.workspaceID)
|
||||
|
||||
while (true) {
|
||||
const result = await client.session.create({ workspace: input.workspaceID }).catch(() => undefined)
|
||||
if (!result) {
|
||||
input.toast.show({
|
||||
message: "Failed to create workspace session",
|
||||
variant: "error",
|
||||
})
|
||||
return
|
||||
}
|
||||
if (result.response?.status && result.response.status >= 500 && result.response.status < 600) {
|
||||
await sleep(1000)
|
||||
continue
|
||||
}
|
||||
if (!result.data) {
|
||||
input.toast.show({
|
||||
message: "Failed to create workspace session",
|
||||
variant: "error",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
input.route.navigate({
|
||||
type: "session",
|
||||
sessionID: result.data.id,
|
||||
})
|
||||
input.dialog.clear()
|
||||
return
|
||||
}
|
||||
const dir = input.sync.path.directory || input.sdk.directory
|
||||
const url = new URL("/experimental/workspace/adapter", input.sdk.url)
|
||||
if (dir) url.searchParams.set("directory", dir)
|
||||
const res = await input.sdk
|
||||
.fetch(url)
|
||||
.then((x) => x.json() as Promise<Adapter[]>)
|
||||
.catch(() => undefined)
|
||||
if (res) return res
|
||||
input.toast.show({
|
||||
message: "Failed to load workspace adapters",
|
||||
variant: "error",
|
||||
})
|
||||
}
|
||||
|
||||
export async function restoreWorkspaceSession(input: {
|
||||
export async function openWorkspaceSelect(input: {
|
||||
dialog: ReturnType<typeof useDialog>
|
||||
sdk: ReturnType<typeof useSDK>
|
||||
sync: ReturnType<typeof useSync>
|
||||
toast: ReturnType<typeof useToast>
|
||||
onSelect: (selection: WorkspaceSelection) => Promise<void> | void
|
||||
}) {
|
||||
input.dialog.clear()
|
||||
const adapters = await loadWorkspaceAdapters(input)
|
||||
if (!adapters) return
|
||||
input.dialog.replace(() => <DialogWorkspaceSelect adapters={adapters} onSelect={input.onSelect} />)
|
||||
}
|
||||
|
||||
export async function warpWorkspaceSession(input: {
|
||||
dialog: ReturnType<typeof useDialog>
|
||||
sdk: ReturnType<typeof useSDK>
|
||||
sync: ReturnType<typeof useSync>
|
||||
project: ReturnType<typeof useProject>
|
||||
toast: ReturnType<typeof useToast>
|
||||
workspaceID: string
|
||||
workspaceID: string | null
|
||||
sessionID: string
|
||||
done?: () => void
|
||||
}) {
|
||||
}): Promise<boolean> {
|
||||
const result = await input.sdk.client.experimental.workspace
|
||||
.sessionRestore({ id: input.workspaceID, sessionID: input.sessionID })
|
||||
.warp({
|
||||
id: input.workspaceID ?? undefined,
|
||||
sessionID: input.sessionID,
|
||||
})
|
||||
.catch(() => undefined)
|
||||
if (!result?.data) {
|
||||
input.toast.show({
|
||||
message: `Failed to restore session: ${errorMessage(result?.error ?? "no response")}`,
|
||||
message: `Failed to warp session: ${errorMessage(result?.error ?? "no response")}`,
|
||||
variant: "error",
|
||||
})
|
||||
return
|
||||
return false
|
||||
}
|
||||
|
||||
input.project.workspace.set(input.workspaceID)
|
||||
|
||||
await input.sync.bootstrap({ fatal: false }).catch(() => undefined)
|
||||
|
||||
await Promise.all([input.project.workspace.sync(), input.sync.session.sync(input.sessionID)])
|
||||
await Promise.all([input.project.workspace.sync(), input.sync.session.refresh()])
|
||||
|
||||
input.toast.show({
|
||||
message: "Session restored into the new workspace",
|
||||
variant: "success",
|
||||
})
|
||||
input.done?.()
|
||||
if (input.done) return
|
||||
if (input.done) return true
|
||||
input.dialog.clear()
|
||||
return true
|
||||
}
|
||||
|
||||
export function DialogWorkspaceCreate(props: { onSelect: (workspaceID: string) => Promise<void> | void }) {
|
||||
export function DialogWorkspaceSelect(props: {
|
||||
adapters?: Adapter[]
|
||||
onSelect: (selection: WorkspaceSelection) => Promise<void> | void
|
||||
}) {
|
||||
const dialog = useDialog()
|
||||
const sync = useSync()
|
||||
const project = useProject()
|
||||
const sync = useSync()
|
||||
const sdk = useSDK()
|
||||
const toast = useToast()
|
||||
const [creating, setCreating] = createSignal<string>()
|
||||
const [adapters, setAdapters] = createSignal<Adapter[]>()
|
||||
const [adapters, setAdapters] = createSignal<Adapter[] | undefined>(props.adapters)
|
||||
|
||||
onMount(() => {
|
||||
dialog.setSize("medium")
|
||||
void (async () => {
|
||||
const dir = sync.path.directory || sdk.directory
|
||||
const url = new URL("/experimental/workspace/adapter", sdk.url)
|
||||
if (dir) url.searchParams.set("directory", dir)
|
||||
const res = await sdk
|
||||
.fetch(url)
|
||||
.then((x) => x.json() as Promise<Adapter[]>)
|
||||
.catch(() => undefined)
|
||||
if (!res) {
|
||||
toast.show({
|
||||
message: "Failed to load workspace adapters",
|
||||
variant: "error",
|
||||
})
|
||||
return
|
||||
}
|
||||
if (adapters()) return
|
||||
const res = await loadWorkspaceAdapters({ sdk, sync, toast })
|
||||
if (!res) return
|
||||
setAdapters(res)
|
||||
})()
|
||||
})
|
||||
|
||||
const options = createMemo(() => {
|
||||
const type = creating()
|
||||
if (type) {
|
||||
return [
|
||||
{
|
||||
title: `Creating ${type} workspace...`,
|
||||
value: "creating" as const,
|
||||
description: "This can take a while for remote environments",
|
||||
},
|
||||
]
|
||||
}
|
||||
const options = createMemo<DialogSelectOption<WorkspaceSelectValue>[]>(() => {
|
||||
const list = adapters()
|
||||
if (!list) {
|
||||
return [
|
||||
{
|
||||
title: "Loading workspaces...",
|
||||
value: "loading" as const,
|
||||
description: "Fetching available workspace adapters",
|
||||
if (!list) return []
|
||||
const recent = sync.data.session
|
||||
.toSorted((a, b) => b.time.updated - a.time.updated)
|
||||
.flatMap((session) => (session.workspaceID ? [session.workspaceID] : []))
|
||||
.filter((workspaceID, index, list) => list.indexOf(workspaceID) === index)
|
||||
.slice(0, 3)
|
||||
.flatMap((workspaceID) => {
|
||||
const workspace = project.workspace.get(workspaceID)
|
||||
return workspace ? [workspace] : []
|
||||
})
|
||||
return [
|
||||
...list.map((adapter) => ({
|
||||
title: adapter.name,
|
||||
value: { type: "new" as const, workspaceType: adapter.type, workspaceName: adapter.name },
|
||||
description: adapter.description,
|
||||
category: "New workspace",
|
||||
})),
|
||||
{
|
||||
title: "None",
|
||||
value: { type: "none" as const },
|
||||
description: "Use the local project",
|
||||
category: "Choose workspace",
|
||||
},
|
||||
...recent.map((workspace: Workspace) => ({
|
||||
title: workspace.name,
|
||||
description: `(${workspace.type})`,
|
||||
value: {
|
||||
type: "existing" as const,
|
||||
workspaceID: workspace.id,
|
||||
workspaceType: workspace.type,
|
||||
workspaceName: workspace.name,
|
||||
},
|
||||
]
|
||||
}
|
||||
return list.map((item) => ({
|
||||
title: item.name,
|
||||
value: item.type,
|
||||
description: item.description,
|
||||
}))
|
||||
category: "Choose workspace",
|
||||
})),
|
||||
{
|
||||
title: "View all workspaces",
|
||||
value: { type: "existing-list" as const },
|
||||
description: "Choose from all workspaces",
|
||||
category: "Choose workspace",
|
||||
},
|
||||
]
|
||||
})
|
||||
|
||||
const create = async (type: string) => {
|
||||
if (creating()) return
|
||||
setCreating(type)
|
||||
|
||||
const result = await sdk.client.experimental.workspace.create({ type, branch: null }).catch(() => {
|
||||
toast.show({
|
||||
message: "Creating workspace failed",
|
||||
variant: "error",
|
||||
})
|
||||
return undefined
|
||||
})
|
||||
|
||||
const workspace = result?.data
|
||||
if (!workspace) {
|
||||
setCreating(undefined)
|
||||
toast.show({
|
||||
message: `Failed to create workspace: ${errorMessage(result?.error ?? "no response")}`,
|
||||
variant: "error",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
await project.workspace.sync()
|
||||
await props.onSelect(workspace.id)
|
||||
setCreating(undefined)
|
||||
}
|
||||
|
||||
if (!adapters()) return null
|
||||
return (
|
||||
<DialogSelect
|
||||
title={creating() ? "Creating Workspace" : "New Workspace"}
|
||||
<DialogSelect<WorkspaceSelectValue>
|
||||
title="Warp"
|
||||
skipFilter={true}
|
||||
renderFilter={false}
|
||||
options={options()}
|
||||
onSelect={(option) => {
|
||||
if (option.value === "creating" || option.value === "loading") return
|
||||
void create(option.value)
|
||||
if (!option.value) return
|
||||
if (option.value.type === "none") {
|
||||
void props.onSelect(option.value)
|
||||
return
|
||||
}
|
||||
if (option.value.type === "new") {
|
||||
void props.onSelect(option.value)
|
||||
return
|
||||
}
|
||||
if (option.value.type === "existing") {
|
||||
void props.onSelect(option.value)
|
||||
return
|
||||
}
|
||||
|
||||
dialog.replace(() => <DialogExistingWorkspaceSelect onSelect={props.onSelect} />)
|
||||
}}
|
||||
/>
|
||||
)
|
||||
}
|
||||
|
||||
function DialogExistingWorkspaceSelect(props: { onSelect: (selection: WorkspaceSelection) => Promise<void> | void }) {
|
||||
const project = useProject()
|
||||
|
||||
const options = createMemo<DialogSelectOption<ExistingWorkspaceSelectValue>[]>(() =>
|
||||
project.workspace
|
||||
.list()
|
||||
.filter((workspace) => project.workspace.status(workspace.id) === "connected")
|
||||
.map((workspace: Workspace) => ({
|
||||
title: workspace.name,
|
||||
description: `(${workspace.type})`,
|
||||
value: { workspace },
|
||||
})),
|
||||
)
|
||||
|
||||
return (
|
||||
<DialogSelect<ExistingWorkspaceSelectValue>
|
||||
title="Existing Workspace"
|
||||
options={options()}
|
||||
onSelect={(option) => {
|
||||
void props.onSelect({
|
||||
type: "existing",
|
||||
workspaceID: option.value.workspace.id,
|
||||
workspaceType: option.value.workspace.type,
|
||||
workspaceName: option.value.workspace.name,
|
||||
})
|
||||
}}
|
||||
/>
|
||||
)
|
||||
|
||||
@@ -7,6 +7,7 @@ import { Filesystem } from "@/util/filesystem"
|
||||
import { useLocal } from "@tui/context/local"
|
||||
import { tint, useTheme } from "@tui/context/theme"
|
||||
import { EmptyBorder, SplitBorder } from "@tui/component/border"
|
||||
import { Spinner } from "@tui/component/spinner"
|
||||
import { useSDK } from "@tui/context/sdk"
|
||||
import { useRoute } from "@tui/context/route"
|
||||
import { useProject } from "@tui/context/project"
|
||||
@@ -41,9 +42,11 @@ import { useKV } from "../../context/kv"
|
||||
import { createFadeIn } from "../../util/signal"
|
||||
import { useTextareaKeybindings } from "../textarea-keybindings"
|
||||
import { DialogSkill } from "../dialog-skill"
|
||||
import { DialogWorkspaceCreate, restoreWorkspaceSession } from "../dialog-workspace-create"
|
||||
import { openWorkspaceSelect, warpWorkspaceSession, type WorkspaceSelection } from "../dialog-workspace-create"
|
||||
import { DialogWorkspaceUnavailable } from "../dialog-workspace-unavailable"
|
||||
import { useArgs } from "@tui/context/args"
|
||||
import { Flag } from "@opencode-ai/core/flag/flag"
|
||||
import { WorkspaceLabel, type WorkspaceStatus } from "../workspace-label"
|
||||
|
||||
export type PromptProps = {
|
||||
sessionID?: string
|
||||
@@ -173,9 +176,92 @@ export function Prompt(props: PromptProps) {
|
||||
const [editorContextHover, setEditorContextHover] = createSignal(false)
|
||||
let lastSubmittedEditorSelectionKey: string | undefined
|
||||
const [auto, setAuto] = createSignal<AutocompleteRef>()
|
||||
const [workspaceSelection, setWorkspaceSelection] = createSignal<WorkspaceSelection>()
|
||||
const [workspaceCreating, setWorkspaceCreating] = createSignal(false)
|
||||
const [workspaceCreatingDots, setWorkspaceCreatingDots] = createSignal(3)
|
||||
const [warpNotice, setWarpNotice] = createSignal<string>()
|
||||
const currentProviderLabel = createMemo(() => local.model.parsed().provider)
|
||||
const hasRightContent = createMemo(() => Boolean(props.right))
|
||||
|
||||
function selectWorkspace(selection: WorkspaceSelection | undefined) {
|
||||
setWorkspaceSelection(selection)
|
||||
}
|
||||
|
||||
function setCreatingWorkspace(creating: boolean) {
|
||||
setWorkspaceCreating(creating)
|
||||
}
|
||||
|
||||
function showWarpNotice(name: string) {
|
||||
setWarpNotice(`Warped to ${name}`)
|
||||
setTimeout(() => setWarpNotice(undefined), 4000)
|
||||
}
|
||||
|
||||
async function createWorkspace(selection: Extract<WorkspaceSelection, { type: "new" }>) {
|
||||
setCreatingWorkspace(true)
|
||||
const result = await sdk.client.experimental.workspace
|
||||
.create({ type: selection.workspaceType, branch: null })
|
||||
.catch(() => undefined)
|
||||
if (result == undefined || result.error || !result.data) {
|
||||
selectWorkspace(undefined)
|
||||
setCreatingWorkspace(false)
|
||||
toast.show({
|
||||
message: "Creating workspace failed",
|
||||
variant: "error",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
await project.workspace.sync()
|
||||
const workspace = result.data
|
||||
selectWorkspace({
|
||||
type: "existing",
|
||||
workspaceID: workspace.id,
|
||||
workspaceType: workspace.type,
|
||||
workspaceName: workspace.name,
|
||||
})
|
||||
setCreatingWorkspace(false)
|
||||
return workspace
|
||||
}
|
||||
|
||||
async function warpSession(selection: WorkspaceSelection) {
|
||||
if (!props.sessionID) {
|
||||
selectWorkspace(selection)
|
||||
dialog.clear()
|
||||
if (selection.type === "new") void createWorkspace(selection)
|
||||
return
|
||||
}
|
||||
selectWorkspace(selection)
|
||||
dialog.clear()
|
||||
|
||||
const workspace =
|
||||
selection.type === "none"
|
||||
? { id: null, name: "local project" }
|
||||
: selection.type === "existing"
|
||||
? { id: selection.workspaceID, name: selection.workspaceName }
|
||||
: await createWorkspace(selection)
|
||||
if (!workspace) return
|
||||
|
||||
const warped = await warpWorkspaceSession({
|
||||
dialog,
|
||||
sdk,
|
||||
sync,
|
||||
project,
|
||||
toast,
|
||||
workspaceID: workspace.id,
|
||||
sessionID: props.sessionID,
|
||||
})
|
||||
if (warped) showWarpNotice(workspace.name)
|
||||
}
|
||||
|
||||
createEffect(() => {
|
||||
if (!workspaceCreating()) {
|
||||
setWorkspaceCreatingDots(3)
|
||||
return
|
||||
}
|
||||
const timer = setInterval(() => setWorkspaceCreatingDots((dots) => (dots % 3) + 1), 1000)
|
||||
onCleanup(() => clearInterval(timer))
|
||||
})
|
||||
|
||||
function promptModelWarning() {
|
||||
toast.show({
|
||||
variant: "warning",
|
||||
@@ -213,6 +299,7 @@ export function Prompt(props: PromptProps) {
|
||||
})
|
||||
|
||||
createEffect(() => {
|
||||
if (!input || input.isDestroyed) return
|
||||
if (props.disabled) input.cursorColor = theme.backgroundElement
|
||||
if (!props.disabled) input.cursorColor = theme.text
|
||||
})
|
||||
@@ -489,6 +576,27 @@ export function Prompt(props: PromptProps) {
|
||||
))
|
||||
},
|
||||
},
|
||||
{
|
||||
title: "Warp",
|
||||
description: "Change the workspace for the session",
|
||||
value: "workspace.set",
|
||||
category: "Session",
|
||||
enabled: Flag.OPENCODE_EXPERIMENTAL_WORKSPACES,
|
||||
slash: {
|
||||
name: "warp",
|
||||
},
|
||||
onSelect: (dialog) => {
|
||||
void openWorkspaceSelect({
|
||||
dialog,
|
||||
sdk,
|
||||
sync,
|
||||
toast,
|
||||
onSelect: (selection) => {
|
||||
void warpSession(selection)
|
||||
},
|
||||
})
|
||||
},
|
||||
},
|
||||
]
|
||||
})
|
||||
|
||||
@@ -699,6 +807,8 @@ export function Prompt(props: PromptProps) {
|
||||
])
|
||||
|
||||
async function submit() {
|
||||
setWarpNotice(undefined)
|
||||
|
||||
// IME: double-defer may fire before onContentChange flushes the last
|
||||
// composed character (e.g. Korean hangul) to the store, so read
|
||||
// plainText directly and sync before any downstream reads.
|
||||
@@ -707,6 +817,7 @@ export function Prompt(props: PromptProps) {
|
||||
syncExtmarksWithPromptParts()
|
||||
}
|
||||
if (props.disabled) return false
|
||||
if (workspaceCreating()) return false
|
||||
if (autocomplete?.visible) return false
|
||||
if (!store.prompt.input) return false
|
||||
const agent = local.agent.current()
|
||||
@@ -729,21 +840,16 @@ export function Prompt(props: PromptProps) {
|
||||
dialog.replace(() => (
|
||||
<DialogWorkspaceUnavailable
|
||||
onRestore={() => {
|
||||
dialog.replace(() => (
|
||||
<DialogWorkspaceCreate
|
||||
onSelect={(nextWorkspaceID) =>
|
||||
restoreWorkspaceSession({
|
||||
dialog,
|
||||
sdk,
|
||||
sync,
|
||||
project,
|
||||
toast,
|
||||
workspaceID: nextWorkspaceID,
|
||||
sessionID: props.sessionID!,
|
||||
})
|
||||
}
|
||||
/>
|
||||
))
|
||||
void openWorkspaceSelect({
|
||||
dialog,
|
||||
sdk,
|
||||
sync,
|
||||
toast,
|
||||
onSelect: (selection) => {
|
||||
void warpSession(selection)
|
||||
},
|
||||
})
|
||||
return false
|
||||
}}
|
||||
/>
|
||||
))
|
||||
@@ -753,6 +859,14 @@ export function Prompt(props: PromptProps) {
|
||||
const variant = local.model.variant.current()
|
||||
let sessionID = props.sessionID
|
||||
if (sessionID == null) {
|
||||
const workspace = workspaceSelection()
|
||||
const workspaceID = iife(() => {
|
||||
if (!workspace) return undefined
|
||||
if (workspace.type === "none") return undefined
|
||||
if (workspace.type === "existing") return workspace.workspaceID
|
||||
return undefined
|
||||
})
|
||||
|
||||
const res = await sdk.client.session.create({
|
||||
workspace: props.workspaceID,
|
||||
agent: agent.name,
|
||||
@@ -1025,6 +1139,29 @@ export function Prompt(props: PromptProps) {
|
||||
return `Ask anything... "${list()[store.placeholder % list().length]}"`
|
||||
})
|
||||
|
||||
const workspaceLabel = createMemo<
|
||||
| { type: "new"; workspaceType: string }
|
||||
| { type: "existing"; workspaceType: string; workspaceName: string; status?: WorkspaceStatus }
|
||||
| undefined
|
||||
>(() => {
|
||||
const selected = workspaceSelection()
|
||||
if (!selected) return
|
||||
if (selected.type === "none") return
|
||||
if (props.sessionID && !workspaceCreating()) return
|
||||
if (selected.type === "new") {
|
||||
return {
|
||||
type: "new",
|
||||
workspaceType: selected.workspaceType,
|
||||
}
|
||||
}
|
||||
return {
|
||||
type: "existing",
|
||||
workspaceType: selected.workspaceType,
|
||||
workspaceName: selected.workspaceName,
|
||||
status: selected.type === "existing" ? "connected" : undefined,
|
||||
}
|
||||
})
|
||||
|
||||
const spinnerDef = createMemo(() => {
|
||||
const agent = local.agent.current()
|
||||
const color = agent ? local.agent.color(agent.name) : theme.border
|
||||
@@ -1281,7 +1418,7 @@ export function Prompt(props: PromptProps) {
|
||||
}}
|
||||
onMouseDown={(r: MouseEvent) => r.target?.focus()}
|
||||
focusedBackgroundColor={theme.backgroundElement}
|
||||
cursorColor={theme.text}
|
||||
cursorColor={props.disabled ? theme.backgroundElement : theme.text}
|
||||
syntaxStyle={syntax()}
|
||||
/>
|
||||
<box flexDirection="row" flexShrink={0} paddingTop={1} gap={1} justifyContent="space-between">
|
||||
@@ -1351,86 +1488,124 @@ export function Prompt(props: PromptProps) {
|
||||
/>
|
||||
</box>
|
||||
<box width="100%" flexDirection="row" justifyContent="space-between">
|
||||
<Show when={status().type !== "idle"} fallback={props.hint ?? <text />}>
|
||||
<box
|
||||
flexDirection="row"
|
||||
gap={1}
|
||||
flexGrow={1}
|
||||
justifyContent={status().type === "retry" ? "space-between" : "flex-start"}
|
||||
>
|
||||
<box flexShrink={0} flexDirection="row" gap={1}>
|
||||
<box marginLeft={1}>
|
||||
<Show when={kv.get("animations_enabled", true)} fallback={<text fg={theme.textMuted}>[⋯]</text>}>
|
||||
<spinner color={spinnerDef().color} frames={spinnerDef().frames} interval={40} />
|
||||
</Show>
|
||||
</box>
|
||||
<box flexDirection="row" gap={1} flexShrink={0}>
|
||||
{(() => {
|
||||
const retry = createMemo(() => {
|
||||
const s = status()
|
||||
if (s.type !== "retry") return
|
||||
return s
|
||||
})
|
||||
const message = createMemo(() => {
|
||||
const r = retry()
|
||||
if (!r) return
|
||||
if (r.message.includes("exceeded your current quota") && r.message.includes("gemini"))
|
||||
return "gemini is way too hot right now"
|
||||
if (r.message.length > 80) return r.message.slice(0, 80) + "..."
|
||||
return r.message
|
||||
})
|
||||
const isTruncated = createMemo(() => {
|
||||
const r = retry()
|
||||
if (!r) return false
|
||||
return r.message.length > 120
|
||||
})
|
||||
const [seconds, setSeconds] = createSignal(0)
|
||||
onMount(() => {
|
||||
const timer = setInterval(() => {
|
||||
const next = retry()?.next
|
||||
if (next) setSeconds(Math.round((next - Date.now()) / 1000))
|
||||
}, 1000)
|
||||
|
||||
onCleanup(() => {
|
||||
clearInterval(timer)
|
||||
<Switch>
|
||||
<Match when={status().type !== "idle"}>
|
||||
<box
|
||||
flexDirection="row"
|
||||
gap={1}
|
||||
flexGrow={1}
|
||||
justifyContent={status().type === "retry" ? "space-between" : "flex-start"}
|
||||
>
|
||||
<box flexShrink={0} flexDirection="row" gap={1}>
|
||||
<box marginLeft={1}>
|
||||
<Show when={kv.get("animations_enabled", true)} fallback={<text fg={theme.textMuted}>[⋯]</text>}>
|
||||
<spinner color={spinnerDef().color} frames={spinnerDef().frames} interval={40} />
|
||||
</Show>
|
||||
</box>
|
||||
<box flexDirection="row" gap={1} flexShrink={0}>
|
||||
{(() => {
|
||||
const retry = createMemo(() => {
|
||||
const s = status()
|
||||
if (s.type !== "retry") return
|
||||
return s
|
||||
})
|
||||
})
|
||||
const handleMessageClick = () => {
|
||||
const r = retry()
|
||||
if (!r) return
|
||||
if (isTruncated()) {
|
||||
void DialogAlert.show(dialog, "Retry Error", r.message)
|
||||
const message = createMemo(() => {
|
||||
const r = retry()
|
||||
if (!r) return
|
||||
if (r.message.includes("exceeded your current quota") && r.message.includes("gemini"))
|
||||
return "gemini is way too hot right now"
|
||||
if (r.message.length > 80) return r.message.slice(0, 80) + "..."
|
||||
return r.message
|
||||
})
|
||||
const isTruncated = createMemo(() => {
|
||||
const r = retry()
|
||||
if (!r) return false
|
||||
return r.message.length > 120
|
||||
})
|
||||
const [seconds, setSeconds] = createSignal(0)
|
||||
onMount(() => {
|
||||
const timer = setInterval(() => {
|
||||
const next = retry()?.next
|
||||
if (next) setSeconds(Math.round((next - Date.now()) / 1000))
|
||||
}, 1000)
|
||||
|
||||
onCleanup(() => {
|
||||
clearInterval(timer)
|
||||
})
|
||||
})
|
||||
const handleMessageClick = () => {
|
||||
const r = retry()
|
||||
if (!r) return
|
||||
if (isTruncated()) {
|
||||
void DialogAlert.show(dialog, "Retry Error", r.message)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const retryText = () => {
|
||||
const r = retry()
|
||||
if (!r) return ""
|
||||
const baseMessage = message()
|
||||
const truncatedHint = isTruncated() ? " (click to expand)" : ""
|
||||
const duration = formatDuration(seconds())
|
||||
const retryInfo = ` [retrying ${duration ? `in ${duration} ` : ""}attempt #${r.attempt}]`
|
||||
return baseMessage + truncatedHint + retryInfo
|
||||
}
|
||||
const retryText = () => {
|
||||
const r = retry()
|
||||
if (!r) return ""
|
||||
const baseMessage = message()
|
||||
const truncatedHint = isTruncated() ? " (click to expand)" : ""
|
||||
const duration = formatDuration(seconds())
|
||||
const retryInfo = ` [retrying ${duration ? `in ${duration} ` : ""}attempt #${r.attempt}]`
|
||||
return baseMessage + truncatedHint + retryInfo
|
||||
}
|
||||
|
||||
return (
|
||||
<Show when={retry()}>
|
||||
<box onMouseUp={handleMessageClick}>
|
||||
<text fg={theme.error}>{retryText()}</text>
|
||||
</box>
|
||||
</Show>
|
||||
)
|
||||
})()}
|
||||
return (
|
||||
<Show when={retry()}>
|
||||
<box onMouseUp={handleMessageClick}>
|
||||
<text fg={theme.error}>{retryText()}</text>
|
||||
</box>
|
||||
</Show>
|
||||
)
|
||||
})()}
|
||||
</box>
|
||||
</box>
|
||||
<text fg={store.interrupt > 0 ? theme.primary : theme.text}>
|
||||
esc{" "}
|
||||
<span style={{ fg: store.interrupt > 0 ? theme.primary : theme.textMuted }}>
|
||||
{store.interrupt > 0 ? "again to interrupt" : "interrupt"}
|
||||
</span>
|
||||
</text>
|
||||
</box>
|
||||
<text fg={store.interrupt > 0 ? theme.primary : theme.text}>
|
||||
esc{" "}
|
||||
<span style={{ fg: store.interrupt > 0 ? theme.primary : theme.textMuted }}>
|
||||
{store.interrupt > 0 ? "again to interrupt" : "interrupt"}
|
||||
</span>
|
||||
</text>
|
||||
</box>
|
||||
</Show>
|
||||
</Match>
|
||||
<Match when={warpNotice()}>
|
||||
{(notice) => (
|
||||
<box paddingLeft={3}>
|
||||
<text fg={theme.accent}>{notice()}</text>
|
||||
</box>
|
||||
)}
|
||||
</Match>
|
||||
<Match when={workspaceLabel()}>
|
||||
{(workspace) => (
|
||||
<box paddingLeft={3} flexDirection="row" gap={1}>
|
||||
<Show when={workspaceCreating()}>
|
||||
<Spinner color={theme.accent} />
|
||||
</Show>
|
||||
<text fg={workspaceCreating() ? theme.accent : theme.text}>
|
||||
{(() => {
|
||||
const item = workspace()
|
||||
if (item.type === "new") {
|
||||
if (workspaceCreating())
|
||||
return `Creating ${item.workspaceType}${".".repeat(workspaceCreatingDots())}`
|
||||
return (
|
||||
<>
|
||||
Workspace <span style={{ fg: theme.textMuted }}>(new {item.workspaceType})</span>
|
||||
</>
|
||||
)
|
||||
}
|
||||
return (
|
||||
<>
|
||||
Workspace <span style={{ fg: theme.textMuted }}>{item.workspaceName}</span>
|
||||
</>
|
||||
)
|
||||
})()}
|
||||
</text>
|
||||
</box>
|
||||
)}
|
||||
</Match>
|
||||
<Match when={true}>{props.hint ?? <text />}</Match>
|
||||
</Switch>
|
||||
<Show when={status().type !== "retry"}>
|
||||
<box gap={2} flexDirection="row">
|
||||
<Show when={editorFileLabelDisplay()}>
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
import { useTheme } from "@tui/context/theme"
|
||||
|
||||
export type WorkspaceStatus = "connected" | "connecting" | "disconnected" | "error"
|
||||
|
||||
export function WorkspaceLabel(props: { type: string; name: string; status?: WorkspaceStatus; icon?: boolean }) {
|
||||
const { theme } = useTheme()
|
||||
const color = () => {
|
||||
if (props.status === "connected") return theme.success
|
||||
if (props.status === "error") return theme.error
|
||||
return theme.textMuted
|
||||
}
|
||||
|
||||
return (
|
||||
<>
|
||||
{props.icon ? <span style={{ fg: color() }}>● </span> : undefined}
|
||||
<span style={{ fg: theme.text }}>{props.name}</span> <span style={{ fg: theme.textMuted }}>({props.type})</span>
|
||||
</>
|
||||
)
|
||||
}
|
||||
@@ -68,29 +68,73 @@ function normalize(raw: Record<string, unknown>) {
|
||||
}
|
||||
}
|
||||
|
||||
async function resolvePlugins(config: Info, configFilepath: string) {
|
||||
if (!config.plugin) return config
|
||||
for (let i = 0; i < config.plugin.length; i++) {
|
||||
config.plugin[i] = await ConfigPlugin.resolvePluginSpec(config.plugin[i], configFilepath)
|
||||
}
|
||||
return config
|
||||
}
|
||||
|
||||
async function mergeFile(acc: Acc, file: string, ctx: { directory: string }) {
|
||||
const data = await loadFile(file)
|
||||
acc.result = mergeDeep(acc.result, data)
|
||||
if (!data.plugin?.length) return
|
||||
|
||||
const scope = pluginScope(file, ctx)
|
||||
const plugins = ConfigPlugin.deduplicatePluginOrigins([
|
||||
...(acc.result.plugin_origins ?? []),
|
||||
...data.plugin.map((spec) => ({ spec, scope, source: file })),
|
||||
])
|
||||
acc.result.plugin = plugins.map((item) => item.spec)
|
||||
acc.result.plugin_origins = plugins
|
||||
}
|
||||
|
||||
const loadState = Effect.fn("TuiConfig.loadState")(function* (ctx: { directory: string }) {
|
||||
const afs = yield* AppFileSystem.Service
|
||||
|
||||
const resolvePlugins = (config: Info, configFilepath: string): Effect.Effect<Info> =>
|
||||
Effect.gen(function* () {
|
||||
const plugins = config.plugin
|
||||
if (!plugins) return config
|
||||
for (let i = 0; i < plugins.length; i++) {
|
||||
plugins[i] = yield* Effect.promise(() => ConfigPlugin.resolvePluginSpec(plugins[i], configFilepath))
|
||||
}
|
||||
return config
|
||||
})
|
||||
|
||||
const load = (text: string, configFilepath: string): Effect.Effect<Info> =>
|
||||
Effect.gen(function* () {
|
||||
const expanded = yield* Effect.promise(() =>
|
||||
ConfigVariable.substitute({ text, type: "path", path: configFilepath, missing: "empty" }),
|
||||
)
|
||||
const data = ConfigParse.jsonc(expanded, configFilepath)
|
||||
if (!isRecord(data)) return {} as Info
|
||||
// Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json
|
||||
// (mirroring the old opencode.json shape) still get their settings applied.
|
||||
const validated = ConfigParse.schema(Info, normalize(data), configFilepath)
|
||||
return yield* resolvePlugins(validated, configFilepath)
|
||||
}).pipe(
|
||||
// catchCause (not tapErrorCause + orElseSucceed) because ConfigParse.jsonc/.schema
|
||||
// can sync-throw — those become defects, which orElseSucceed wouldn't catch.
|
||||
Effect.catchCause((cause) =>
|
||||
Effect.sync(() => {
|
||||
log.warn("invalid tui config", { path: configFilepath, cause })
|
||||
return {} as Info
|
||||
}),
|
||||
),
|
||||
)
|
||||
|
||||
const loadFile = (filepath: string): Effect.Effect<Info> =>
|
||||
Effect.gen(function* () {
|
||||
// Silent-swallow non-NotFound read errors (perms, EISDIR, IO) → log + skip.
|
||||
// Matches how parse/schema/plugin failures in load() are handled — every
|
||||
// broken-config path degrades gracefully rather than crashing TUI startup.
|
||||
const text = yield* afs.readFileStringSafe(filepath).pipe(
|
||||
Effect.catchCause((cause) =>
|
||||
Effect.sync(() => {
|
||||
log.warn("failed to read tui config", { path: filepath, cause })
|
||||
return undefined
|
||||
}),
|
||||
),
|
||||
)
|
||||
if (!text) return {} as Info
|
||||
return yield* load(text, filepath)
|
||||
})
|
||||
|
||||
const mergeFile = (acc: Acc, file: string) =>
|
||||
Effect.gen(function* () {
|
||||
const data = yield* loadFile(file)
|
||||
acc.result = mergeDeep(acc.result, data)
|
||||
if (!data.plugin?.length) return
|
||||
|
||||
const scope = pluginScope(file, ctx)
|
||||
const plugins = ConfigPlugin.deduplicatePluginOrigins([
|
||||
...(acc.result.plugin_origins ?? []),
|
||||
...data.plugin.map((spec) => ({ spec, scope, source: file })),
|
||||
])
|
||||
acc.result.plugin = plugins.map((item) => item.spec)
|
||||
acc.result.plugin_origins = plugins
|
||||
})
|
||||
|
||||
// Every config dir we may read from: global config dir, any `.opencode`
|
||||
// folders between cwd and home, and OPENCODE_CONFIG_DIR.
|
||||
const directories = yield* ConfigPaths.directories(ctx.directory)
|
||||
@@ -104,19 +148,19 @@ const loadState = Effect.fn("TuiConfig.loadState")(function* (ctx: { directory:
|
||||
|
||||
// 1. Global tui config (lowest precedence).
|
||||
for (const file of ConfigPaths.fileInDirectory(Global.Path.config, "tui")) {
|
||||
yield* Effect.promise(() => mergeFile(acc, file, ctx)).pipe(Effect.orDie)
|
||||
yield* mergeFile(acc, file)
|
||||
}
|
||||
|
||||
// 2. Explicit OPENCODE_TUI_CONFIG override, if set.
|
||||
if (Flag.OPENCODE_TUI_CONFIG) {
|
||||
const configFile = Flag.OPENCODE_TUI_CONFIG
|
||||
yield* Effect.promise(() => mergeFile(acc, configFile, ctx)).pipe(Effect.orDie)
|
||||
yield* mergeFile(acc, configFile)
|
||||
log.debug("loaded custom tui config", { path: configFile })
|
||||
}
|
||||
|
||||
// 3. Project tui files, applied root-first so the closest file wins.
|
||||
for (const file of projectFiles) {
|
||||
yield* Effect.promise(() => mergeFile(acc, file, ctx)).pipe(Effect.orDie)
|
||||
yield* mergeFile(acc, file)
|
||||
}
|
||||
|
||||
// 4. `.opencode` directories (and OPENCODE_CONFIG_DIR) discovered while
|
||||
@@ -127,7 +171,7 @@ const loadState = Effect.fn("TuiConfig.loadState")(function* (ctx: { directory:
|
||||
for (const dir of dirs) {
|
||||
if (!dir.endsWith(".opencode") && dir !== Flag.OPENCODE_CONFIG_DIR) continue
|
||||
for (const file of ConfigPaths.fileInDirectory(dir, "tui")) {
|
||||
yield* Effect.promise(() => mergeFile(acc, file, ctx)).pipe(Effect.orDie)
|
||||
yield* mergeFile(acc, file)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -192,29 +236,3 @@ export async function waitForDependencies() {
|
||||
export async function get() {
|
||||
return runPromise((svc) => svc.get())
|
||||
}
|
||||
|
||||
async function loadFile(filepath: string): Promise<Info> {
|
||||
const text = await ConfigPaths.readFile(filepath)
|
||||
if (!text) return {}
|
||||
return load(text, filepath).catch((error) => {
|
||||
log.warn("failed to load tui config", { path: filepath, error })
|
||||
return {}
|
||||
})
|
||||
}
|
||||
|
||||
async function load(text: string, configFilepath: string): Promise<Info> {
|
||||
return ConfigVariable.substitute({ text, type: "path", path: configFilepath, missing: "empty" })
|
||||
.then((expanded) => ConfigParse.jsonc(expanded, configFilepath))
|
||||
.then((data) => {
|
||||
if (!isRecord(data)) return {}
|
||||
|
||||
// Flatten a nested "tui" key so users who wrote `{ "tui": { ... } }` inside tui.json
|
||||
// (mirroring the old opencode.json shape) still get their settings applied.
|
||||
return ConfigParse.schema(Info, normalize(data), configFilepath)
|
||||
})
|
||||
.then((data) => resolvePlugins(data, configFilepath))
|
||||
.catch((error) => {
|
||||
log.warn("invalid tui config", { path: configFilepath, error })
|
||||
return {}
|
||||
})
|
||||
}
|
||||
|
||||
@@ -11,21 +11,21 @@ import { createSimpleContext } from "./helper"
|
||||
import { useSDK } from "./sdk"
|
||||
|
||||
function activeAssistant(messages: SessionMessage[]) {
|
||||
const index = messages.findLastIndex((message) => message.type === "assistant" && !message.time.completed)
|
||||
const index = messages.findIndex((message) => message.type === "assistant" && !message.time.completed)
|
||||
if (index < 0) return
|
||||
const assistant = messages[index]
|
||||
return assistant?.type === "assistant" ? assistant : undefined
|
||||
}
|
||||
|
||||
function activeCompaction(messages: SessionMessage[]) {
|
||||
const index = messages.findLastIndex((message) => message.type === "compaction")
|
||||
const index = messages.findIndex((message) => message.type === "compaction")
|
||||
if (index < 0) return
|
||||
const compaction = messages[index]
|
||||
return compaction?.type === "compaction" ? compaction : undefined
|
||||
}
|
||||
|
||||
function activeShell(messages: SessionMessage[], callID: string) {
|
||||
const index = messages.findLastIndex((message) => message.type === "shell" && message.callID === callID)
|
||||
const index = messages.findIndex((message) => message.type === "shell" && message.callID === callID)
|
||||
if (index < 0) return
|
||||
const shell = messages[index]
|
||||
return shell?.type === "shell" ? shell : undefined
|
||||
@@ -74,7 +74,7 @@ export const { use: useSyncV2, provider: SyncProviderV2 } = createSimpleContext(
|
||||
switch (event.type) {
|
||||
case "session.next.prompted": {
|
||||
update(event.properties.sessionID, (draft) => {
|
||||
draft.push({
|
||||
draft.unshift({
|
||||
id: event.id,
|
||||
type: "user",
|
||||
text: event.properties.prompt.text,
|
||||
@@ -87,7 +87,7 @@ export const { use: useSyncV2, provider: SyncProviderV2 } = createSimpleContext(
|
||||
}
|
||||
case "session.next.synthetic":
|
||||
update(event.properties.sessionID, (draft) => {
|
||||
draft.push({
|
||||
draft.unshift({
|
||||
id: event.id,
|
||||
type: "synthetic",
|
||||
sessionID: event.properties.sessionID,
|
||||
@@ -98,7 +98,7 @@ export const { use: useSyncV2, provider: SyncProviderV2 } = createSimpleContext(
|
||||
break
|
||||
case "session.next.shell.started":
|
||||
update(event.properties.sessionID, (draft) => {
|
||||
draft.push({
|
||||
draft.unshift({
|
||||
id: event.id,
|
||||
type: "shell",
|
||||
callID: event.properties.callID,
|
||||
@@ -120,7 +120,7 @@ export const { use: useSyncV2, provider: SyncProviderV2 } = createSimpleContext(
|
||||
update(event.properties.sessionID, (draft) => {
|
||||
const currentAssistant = activeAssistant(draft)
|
||||
if (currentAssistant) currentAssistant.time.completed = event.properties.timestamp
|
||||
draft.push({
|
||||
draft.unshift({
|
||||
id: event.id,
|
||||
type: "assistant",
|
||||
agent: event.properties.agent,
|
||||
@@ -143,6 +143,15 @@ export const { use: useSyncV2, provider: SyncProviderV2 } = createSimpleContext(
|
||||
currentAssistant.snapshot = { ...currentAssistant.snapshot, end: event.properties.snapshot }
|
||||
})
|
||||
break
|
||||
case "session.next.step.failed":
|
||||
update(event.properties.sessionID, (draft) => {
|
||||
const currentAssistant = activeAssistant(draft)
|
||||
if (!currentAssistant) return
|
||||
currentAssistant.time.completed = event.properties.timestamp
|
||||
currentAssistant.finish = "error"
|
||||
currentAssistant.error = event.properties.error
|
||||
})
|
||||
break
|
||||
case "session.next.text.started":
|
||||
update(event.properties.sessionID, (draft) => {
|
||||
activeAssistant(draft)?.content.push({ type: "text", text: "" })
|
||||
@@ -210,7 +219,7 @@ export const { use: useSyncV2, provider: SyncProviderV2 } = createSimpleContext(
|
||||
match.time.completed = event.properties.timestamp
|
||||
})
|
||||
break
|
||||
case "session.next.tool.error":
|
||||
case "session.next.tool.failed":
|
||||
update(event.properties.sessionID, (draft) => {
|
||||
const match = latestTool(activeAssistant(draft), event.properties.callID)
|
||||
if (match?.state.status !== "running") return
|
||||
@@ -250,7 +259,7 @@ export const { use: useSyncV2, provider: SyncProviderV2 } = createSimpleContext(
|
||||
break
|
||||
case "session.next.compaction.started":
|
||||
update(event.properties.sessionID, (draft) => {
|
||||
draft.push({
|
||||
draft.unshift({
|
||||
id: event.id,
|
||||
type: "compaction",
|
||||
reason: event.properties.reason,
|
||||
|
||||
@@ -5,7 +5,7 @@ import { Spinner } from "@tui/component/spinner"
|
||||
import { useTheme } from "@tui/context/theme"
|
||||
import { useLocal } from "@tui/context/local"
|
||||
import { useKeyboard, useRenderer, useTerminalDimensions, type JSX } from "@opentui/solid"
|
||||
import type { SyntaxStyle } from "@opentui/core"
|
||||
import { TextAttributes, type BoxRenderable, type SyntaxStyle } from "@opentui/core"
|
||||
import { Locale } from "@/util/locale"
|
||||
import { LANGUAGE_EXTENSIONS } from "@/lsp/language"
|
||||
import path from "path"
|
||||
@@ -44,6 +44,10 @@ function View(props: { api: TuiPluginApi; sessionID: string }) {
|
||||
const messages = createMemo(() => sync.data.messages[props.sessionID] ?? [])
|
||||
const renderedMessages = createMemo(() => messages().toReversed())
|
||||
const lastAssistant = createMemo(() => renderedMessages().findLast((message) => message.type === "assistant"))
|
||||
const lastUserCreated = (index: number) =>
|
||||
renderedMessages()
|
||||
.slice(0, index)
|
||||
.findLast((message) => message.type === "user")?.time.created
|
||||
|
||||
createEffect(() => {
|
||||
void sync.session.message.sync(props.sessionID)
|
||||
@@ -83,10 +87,11 @@ function View(props: { api: TuiPluginApi; sessionID: string }) {
|
||||
last={lastAssistant()?.id === message.id}
|
||||
syntax={syntax()}
|
||||
subtleSyntax={subtleSyntax()}
|
||||
start={lastUserCreated(index())}
|
||||
/>
|
||||
</Match>
|
||||
<Match when={message.type === "synthetic"}>
|
||||
<SyntheticMessage message={message as SessionMessageSynthetic} index={index()} />
|
||||
<></>
|
||||
</Match>
|
||||
<Match when={message.type === "shell"}>
|
||||
<ShellMessage message={message as SessionMessageShell} />
|
||||
@@ -146,63 +151,36 @@ function UserMessage(props: { message: SessionMessageUser; index: number }) {
|
||||
<box
|
||||
id={props.message.id}
|
||||
border={["left"]}
|
||||
borderColor={theme.primary}
|
||||
borderColor={theme.secondary}
|
||||
customBorderChars={SplitBorder.customBorderChars}
|
||||
marginTop={props.index === 0 ? 0 : 1}
|
||||
flexShrink={0}
|
||||
>
|
||||
<box paddingTop={1} paddingBottom={1} paddingLeft={2} backgroundColor={theme.backgroundPanel}>
|
||||
<Show
|
||||
when={props.message.text.trim()}
|
||||
fallback={
|
||||
<MissingData label="User message text" detail={`Message ${props.message.id} has no text field content.`} />
|
||||
}
|
||||
>
|
||||
<text fg={theme.text}>{props.message.text}</text>
|
||||
</Show>
|
||||
<Show when={attachments().length}>
|
||||
<box flexDirection="row" paddingTop={1} gap={1} flexWrap="wrap">
|
||||
<For each={props.message.files ?? []}>
|
||||
{(file) => (
|
||||
<text fg={theme.text}>
|
||||
<span style={{ bg: theme.secondary, fg: theme.background }}> {file.mime} </span>
|
||||
<span style={{ bg: theme.backgroundElement, fg: theme.textMuted }}> {file.name ?? file.uri} </span>
|
||||
</text>
|
||||
)}
|
||||
</For>
|
||||
<For each={props.message.agents ?? []}>
|
||||
{(agent) => (
|
||||
<text fg={theme.text}>
|
||||
<span style={{ bg: theme.accent, fg: theme.background }}> agent </span>
|
||||
<span style={{ bg: theme.backgroundElement, fg: theme.textMuted }}> {agent.name} </span>
|
||||
</text>
|
||||
)}
|
||||
</For>
|
||||
</box>
|
||||
</Show>
|
||||
<text fg={theme.textMuted}>{Locale.todayTimeOrDateTime(props.message.time.created)}</text>
|
||||
</box>
|
||||
</box>
|
||||
)
|
||||
}
|
||||
|
||||
function SyntheticMessage(props: { message: SessionMessageSynthetic; index: number }) {
|
||||
const { theme } = useTheme()
|
||||
return (
|
||||
<box
|
||||
id={props.message.id}
|
||||
border={["left"]}
|
||||
borderColor={theme.backgroundElement}
|
||||
customBorderChars={SplitBorder.customBorderChars}
|
||||
marginTop={props.index === 0 ? 0 : 1}
|
||||
paddingLeft={2}
|
||||
paddingTop={1}
|
||||
paddingBottom={1}
|
||||
paddingLeft={2}
|
||||
backgroundColor={theme.backgroundPanel}
|
||||
flexShrink={0}
|
||||
>
|
||||
<text fg={theme.textMuted}>Synthetic</text>
|
||||
<text fg={theme.text}>{props.message.text}</text>
|
||||
<Show when={attachments().length}>
|
||||
<box flexDirection="row" paddingTop={1} gap={1} flexWrap="wrap">
|
||||
<For each={props.message.files ?? []}>
|
||||
{(file) => (
|
||||
<text fg={theme.text}>
|
||||
<span style={{ bg: theme.secondary, fg: theme.background }}> {file.mime} </span>
|
||||
<span style={{ bg: theme.backgroundElement, fg: theme.textMuted }}> {file.name ?? file.uri} </span>
|
||||
</text>
|
||||
)}
|
||||
</For>
|
||||
<For each={props.message.agents ?? []}>
|
||||
{(agent) => (
|
||||
<text fg={theme.text}>
|
||||
<span style={{ bg: theme.accent, fg: theme.background }}> agent </span>
|
||||
<span style={{ bg: theme.backgroundElement, fg: theme.textMuted }}> {agent.name} </span>
|
||||
</text>
|
||||
)}
|
||||
</For>
|
||||
</box>
|
||||
</Show>
|
||||
</box>
|
||||
)
|
||||
}
|
||||
@@ -237,7 +215,7 @@ function ShellMessage(props: { message: SessionMessageShell }) {
|
||||
}
|
||||
|
||||
function CompactionMessage(props: { message: SessionMessageCompaction }) {
|
||||
const { theme } = useTheme()
|
||||
const { theme, syntax } = useTheme()
|
||||
return (
|
||||
<box
|
||||
marginTop={1}
|
||||
@@ -248,7 +226,19 @@ function CompactionMessage(props: { message: SessionMessageCompaction }) {
|
||||
flexShrink={0}
|
||||
>
|
||||
<Show when={props.message.summary}>
|
||||
<text fg={theme.textMuted}>{props.message.summary}</text>
|
||||
{(summary) => (
|
||||
<box paddingLeft={3} paddingTop={1}>
|
||||
<code
|
||||
filetype="markdown"
|
||||
drawUnstyledText={false}
|
||||
streaming={false}
|
||||
syntaxStyle={syntax()}
|
||||
content={summary().trim()}
|
||||
conceal={true}
|
||||
fg={theme.text}
|
||||
/>
|
||||
</box>
|
||||
)}
|
||||
</Show>
|
||||
</box>
|
||||
)
|
||||
@@ -294,12 +284,13 @@ function AssistantMessage(props: {
|
||||
last: boolean
|
||||
syntax: SyntaxStyle
|
||||
subtleSyntax: SyntaxStyle
|
||||
start?: number
|
||||
}) {
|
||||
const { theme } = useTheme()
|
||||
const local = useLocal()
|
||||
const duration = createMemo(() => {
|
||||
if (!props.message.time.completed) return 0
|
||||
return props.message.time.completed - props.message.time.created
|
||||
return props.message.time.completed - (props.start ?? props.message.time.created)
|
||||
})
|
||||
const model = createMemo(() => {
|
||||
const variant = props.message.model.variant ? `/${props.message.model.variant}` : ""
|
||||
@@ -361,7 +352,7 @@ function AssistantText(props: { part: SessionMessageAssistantText; syntax: Synta
|
||||
const { theme } = useTheme()
|
||||
return (
|
||||
<Show when={props.part.text.trim()}>
|
||||
<box paddingLeft={3} marginTop={1} flexShrink={0}>
|
||||
<box paddingLeft={3} marginTop={1} flexShrink={0} id="text">
|
||||
<code
|
||||
filetype="markdown"
|
||||
drawUnstyledText={false}
|
||||
@@ -521,33 +512,93 @@ function InlineTool(props: {
|
||||
part: SessionMessageAssistantTool
|
||||
}) {
|
||||
const { theme } = useTheme()
|
||||
const renderer = useRenderer()
|
||||
const [margin, setMargin] = createSignal(0)
|
||||
const [hover, setHover] = createSignal(false)
|
||||
const [showError, setShowError] = createSignal(false)
|
||||
const error = createMemo(() => (props.part.state.status === "error" ? props.part.state.error.message : undefined))
|
||||
const complete = createMemo(() => !!props.complete)
|
||||
const denied = createMemo(() => {
|
||||
const message = error()
|
||||
if (!message) return false
|
||||
return (
|
||||
message.includes("QuestionRejectedError") ||
|
||||
message.includes("rejected permission") ||
|
||||
message.includes("specified a rule") ||
|
||||
message.includes("user dismissed")
|
||||
)
|
||||
})
|
||||
const fg = createMemo(() => {
|
||||
if (error()) return theme.error
|
||||
if (complete()) return theme.textMuted
|
||||
return theme.text
|
||||
})
|
||||
const attributes = createMemo(() => (denied() ? TextAttributes.STRIKETHROUGH : undefined))
|
||||
return (
|
||||
<box marginTop={1} paddingLeft={3} flexShrink={0}>
|
||||
<Switch>
|
||||
<Match when={props.spinner}>
|
||||
<Spinner color={theme.text}>{props.children}</Spinner>
|
||||
</Match>
|
||||
<Match when={true}>
|
||||
<text paddingLeft={3} fg={props.complete ? theme.textMuted : theme.text}>
|
||||
<Show fallback={<>~ {props.pending}</>} when={props.complete}>
|
||||
{props.icon} {props.children}
|
||||
</Show>
|
||||
</text>
|
||||
</Match>
|
||||
</Switch>
|
||||
<Show when={error() && !denied()}>
|
||||
<text fg={theme.error}>{error()}</text>
|
||||
</Show>
|
||||
<box
|
||||
marginTop={margin()}
|
||||
paddingLeft={3}
|
||||
flexShrink={0}
|
||||
flexDirection="row"
|
||||
gap={1}
|
||||
backgroundColor={hover() && error() ? theme.backgroundMenu : undefined}
|
||||
onMouseOver={() => error() && setHover(true)}
|
||||
onMouseOut={() => setHover(false)}
|
||||
onMouseUp={() => {
|
||||
if (!error()) return
|
||||
if (renderer.getSelection()?.getSelectedText()) return
|
||||
setShowError((prev) => !prev)
|
||||
}}
|
||||
renderBefore={function () {
|
||||
const el = this as BoxRenderable
|
||||
const parent = el.parent
|
||||
if (!parent) return
|
||||
const previous = parent.getChildren()[parent.getChildren().indexOf(el) - 1]
|
||||
if (!previous) {
|
||||
setMargin(0)
|
||||
return
|
||||
}
|
||||
if (previous.id.startsWith("text")) setMargin(1)
|
||||
}}
|
||||
>
|
||||
<box flexShrink={0}>
|
||||
<Switch>
|
||||
<Match when={props.spinner}>
|
||||
<Spinner color={theme.text} />
|
||||
</Match>
|
||||
<Match when={complete()}>
|
||||
<text fg={fg()} attributes={attributes()}>
|
||||
{props.icon}
|
||||
</text>
|
||||
</Match>
|
||||
<Match when={true}>
|
||||
<text fg={fg()} attributes={attributes()}>
|
||||
~
|
||||
</text>
|
||||
</Match>
|
||||
</Switch>
|
||||
</box>
|
||||
<box flexGrow={1}>
|
||||
<box>
|
||||
<Switch>
|
||||
<Match when={complete()}>
|
||||
<text fg={fg()} attributes={attributes()}>
|
||||
{props.children}
|
||||
</text>
|
||||
</Match>
|
||||
<Match when={true}>
|
||||
<text fg={fg()} attributes={attributes()}>
|
||||
{props.pending}
|
||||
</text>
|
||||
</Match>
|
||||
</Switch>
|
||||
</box>
|
||||
<Show when={showError() && error()}>
|
||||
<box>
|
||||
<text fg={theme.error}>{error()}</text>
|
||||
</box>
|
||||
</Show>
|
||||
</box>
|
||||
</box>
|
||||
)
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ import { InstallationChannel, InstallationVersion } from "@opencode-ai/core/inst
|
||||
import { TuiPluginRuntime } from "@/cli/cmd/tui/plugin/runtime"
|
||||
|
||||
import { getScrollAcceleration } from "../../util/scroll"
|
||||
import { WorkspaceLabel } from "../../component/workspace-label"
|
||||
|
||||
export function Sidebar(props: { sessionID: string; overlay?: boolean }) {
|
||||
const project = useProject()
|
||||
@@ -14,17 +15,10 @@ export function Sidebar(props: { sessionID: string; overlay?: boolean }) {
|
||||
const { theme } = useTheme()
|
||||
const tuiConfig = useTuiConfig()
|
||||
const session = createMemo(() => sync.session.get(props.sessionID))
|
||||
const workspaceStatus = () => {
|
||||
const workspace = () => {
|
||||
const workspaceID = session()?.workspaceID
|
||||
if (!workspaceID) return "error"
|
||||
return project.workspace.status(workspaceID) ?? "error"
|
||||
}
|
||||
const workspaceLabel = () => {
|
||||
const workspaceID = session()?.workspaceID
|
||||
if (!workspaceID) return "unknown"
|
||||
const info = project.workspace.get(workspaceID)
|
||||
if (!info) return "unknown"
|
||||
return `${info.type}: ${info.name}`
|
||||
if (!workspaceID) return
|
||||
return project.workspace.get(workspaceID)
|
||||
}
|
||||
const scrollAcceleration = createMemo(() => getScrollAcceleration(tuiConfig))
|
||||
|
||||
@@ -67,8 +61,19 @@ export function Sidebar(props: { sessionID: string; overlay?: boolean }) {
|
||||
</Show>
|
||||
<Show when={session()!.workspaceID}>
|
||||
<text fg={theme.textMuted}>
|
||||
<span style={{ fg: workspaceStatus() === "connected" ? theme.success : theme.error }}>●</span>{" "}
|
||||
{workspaceLabel()}
|
||||
<Show
|
||||
when={workspace()}
|
||||
fallback={<WorkspaceLabel type="unknown" name={session()!.workspaceID!} status="error" icon />}
|
||||
>
|
||||
{(item) => (
|
||||
<WorkspaceLabel
|
||||
type={item().type}
|
||||
name={item().name}
|
||||
status={project.workspace.status(item().id) ?? "error"}
|
||||
icon
|
||||
/>
|
||||
)}
|
||||
</Show>
|
||||
</text>
|
||||
</Show>
|
||||
<Show when={session()!.share?.url}>
|
||||
|
||||
@@ -23,6 +23,7 @@ export interface DialogSelectProps<T> {
|
||||
onFilter?: (query: string) => void
|
||||
onSelect?: (option: DialogSelectOption<T>) => void
|
||||
skipFilter?: boolean
|
||||
renderFilter?: boolean
|
||||
keybind?: {
|
||||
keybind?: Keybind.Info
|
||||
title: string
|
||||
@@ -81,7 +82,7 @@ export function DialogSelect<T>(props: DialogSelectProps<T>) {
|
||||
let input: InputRenderable
|
||||
|
||||
const filtered = createMemo(() => {
|
||||
if (props.skipFilter) return props.options.filter((x) => x.disabled !== true)
|
||||
if (props.skipFilter || props.renderFilter === false) return props.options.filter((x) => x.disabled !== true)
|
||||
const needle = store.filter.toLowerCase()
|
||||
const options = pipe(
|
||||
props.options,
|
||||
@@ -250,30 +251,32 @@ export function DialogSelect<T>(props: DialogSelectProps<T>) {
|
||||
esc
|
||||
</text>
|
||||
</box>
|
||||
<box paddingTop={1}>
|
||||
<input
|
||||
onInput={(e) => {
|
||||
batch(() => {
|
||||
setStore("filter", e)
|
||||
props.onFilter?.(e)
|
||||
})
|
||||
}}
|
||||
focusedBackgroundColor={theme.backgroundPanel}
|
||||
cursorColor={theme.primary}
|
||||
focusedTextColor={theme.textMuted}
|
||||
ref={(r) => {
|
||||
input = r
|
||||
input.traits = { status: "FILTER" }
|
||||
setTimeout(() => {
|
||||
if (!input) return
|
||||
if (input.isDestroyed) return
|
||||
input.focus()
|
||||
}, 1)
|
||||
}}
|
||||
placeholder={props.placeholder ?? "Search"}
|
||||
placeholderColor={theme.textMuted}
|
||||
/>
|
||||
</box>
|
||||
<Show when={props.renderFilter !== false}>
|
||||
<box paddingTop={1}>
|
||||
<input
|
||||
onInput={(e) => {
|
||||
batch(() => {
|
||||
setStore("filter", e)
|
||||
props.onFilter?.(e)
|
||||
})
|
||||
}}
|
||||
focusedBackgroundColor={theme.backgroundPanel}
|
||||
cursorColor={theme.primary}
|
||||
focusedTextColor={theme.textMuted}
|
||||
ref={(r) => {
|
||||
input = r
|
||||
input.traits = { status: "FILTER" }
|
||||
setTimeout(() => {
|
||||
if (!input) return
|
||||
if (input.isDestroyed) return
|
||||
input.focus()
|
||||
}, 1)
|
||||
}}
|
||||
placeholder={props.placeholder ?? "Search"}
|
||||
placeholderColor={theme.textMuted}
|
||||
/>
|
||||
</box>
|
||||
</Show>
|
||||
</box>
|
||||
<Show
|
||||
when={grouped().length > 0}
|
||||
|
||||
@@ -7,7 +7,7 @@ import { Rpc } from "@/util/rpc"
|
||||
import { upgrade } from "@/cli/upgrade"
|
||||
import { Config } from "@/config/config"
|
||||
import { GlobalBus } from "@/bus/global"
|
||||
import { Flag } from "@opencode-ai/core/flag/flag"
|
||||
import { ServerAuth } from "@/server/auth"
|
||||
import { writeHeapSnapshot } from "node:v8"
|
||||
import { Heap } from "@/cli/heap"
|
||||
import { AppRuntime } from "@/effect/app-runtime"
|
||||
@@ -50,7 +50,7 @@ let server: Awaited<ReturnType<typeof Server.listen>> | undefined
|
||||
export const rpc = {
|
||||
async fetch(input: { url: string; method: string; headers: Record<string, string>; body?: string }) {
|
||||
const headers = { ...input.headers }
|
||||
const auth = getAuthorizationHeader()
|
||||
const auth = ServerAuth.header()
|
||||
if (auth && !headers["authorization"] && !headers["Authorization"]) {
|
||||
headers["Authorization"] = auth
|
||||
}
|
||||
@@ -102,10 +102,3 @@ export const rpc = {
|
||||
}
|
||||
|
||||
Rpc.listen(rpc)
|
||||
|
||||
function getAuthorizationHeader(): string | undefined {
|
||||
const password = Flag.OPENCODE_SERVER_PASSWORD
|
||||
if (!password) return undefined
|
||||
const username = Flag.OPENCODE_SERVER_USERNAME ?? "opencode"
|
||||
return `Basic ${btoa(`${username}:${password}`)}`
|
||||
}
|
||||
|
||||
@@ -3,6 +3,7 @@ import { Effect, Schema } from "effect"
|
||||
import { AppRuntime, type AppServices } from "@/effect/app-runtime"
|
||||
import { InstanceStore } from "@/project/instance-store"
|
||||
import { InstanceRef } from "@/effect/instance-ref"
|
||||
import { Instance } from "@/project/instance"
|
||||
import { cmd, type WithDoubleDash } from "./cmd/cmd"
|
||||
|
||||
/**
|
||||
@@ -82,17 +83,21 @@ export const effectCmd = <Args, A>(opts: EffectCmdOpts<Args, A>) =>
|
||||
return
|
||||
}
|
||||
const directory = opts.directory?.(args) ?? process.cwd()
|
||||
await AppRuntime.runPromise(
|
||||
InstanceStore.Service.use((store) =>
|
||||
store.provide(
|
||||
{ directory },
|
||||
Effect.gen(function* () {
|
||||
const ctx = yield* InstanceRef
|
||||
const body = opts.handler(args)
|
||||
return ctx ? yield* body.pipe(Effect.ensuring(store.dispose(ctx))) : yield* body
|
||||
}),
|
||||
),
|
||||
),
|
||||
// Two-phase: load ctx, then run body inside Instance.current ALS.
|
||||
// Effect's InstanceRef is provided via fiber context, but that context is
|
||||
// lost across `await` inside `Effect.promise(async () => ...)` callbacks
|
||||
// — when handlers re-enter Effect via `AppRuntime.runPromise(svc.method())`
|
||||
// there, attach() falls back to Instance.current ALS, which Node preserves
|
||||
// across awaits. Matches the pre-effectCmd `bootstrap()` behavior.
|
||||
const { store, ctx } = await AppRuntime.runPromise(
|
||||
InstanceStore.Service.use((store) => store.load({ directory }).pipe(Effect.map((ctx) => ({ store, ctx })))),
|
||||
)
|
||||
try {
|
||||
await Instance.restore(ctx, () =>
|
||||
AppRuntime.runPromise(opts.handler(args).pipe(Effect.provideService(InstanceRef, ctx))),
|
||||
)
|
||||
} finally {
|
||||
await AppRuntime.runPromise(store.dispose(ctx))
|
||||
}
|
||||
},
|
||||
})
|
||||
|
||||
@@ -6,15 +6,27 @@ export const outro = (msg: string) => Effect.sync(() => prompts.outro(msg))
|
||||
|
||||
export const log = {
|
||||
info: (msg: string) => Effect.sync(() => prompts.log.info(msg)),
|
||||
error: (msg: string) => Effect.sync(() => prompts.log.error(msg)),
|
||||
warn: (msg: string) => Effect.sync(() => prompts.log.warn(msg)),
|
||||
success: (msg: string) => Effect.sync(() => prompts.log.success(msg)),
|
||||
}
|
||||
|
||||
const optional = <Value>(result: Value | symbol) => {
|
||||
if (prompts.isCancel(result)) return Option.none<Value>()
|
||||
return Option.some(result)
|
||||
}
|
||||
|
||||
export const select = <Value>(opts: Parameters<typeof prompts.select<Value>>[0]) =>
|
||||
Effect.tryPromise(() => prompts.select(opts)).pipe(
|
||||
Effect.map((result) => {
|
||||
if (prompts.isCancel(result)) return Option.none<Value>()
|
||||
return Option.some(result)
|
||||
}),
|
||||
)
|
||||
Effect.promise(() => prompts.select(opts)).pipe(Effect.map((result) => optional(result)))
|
||||
|
||||
export const autocomplete = <Value>(opts: Parameters<typeof prompts.autocomplete<Value>>[0]) =>
|
||||
Effect.promise(() => prompts.autocomplete(opts)).pipe(Effect.map((result) => optional(result)))
|
||||
|
||||
export const text = (opts: Parameters<typeof prompts.text>[0]) =>
|
||||
Effect.promise(() => prompts.text(opts)).pipe(Effect.map((result) => optional(result)))
|
||||
|
||||
export const password = (opts: Parameters<typeof prompts.password>[0]) =>
|
||||
Effect.promise(() => prompts.password(opts)).pipe(Effect.map((result) => optional(result)))
|
||||
|
||||
export const spinner = () => {
|
||||
const s = prompts.spinner()
|
||||
|
||||
@@ -355,15 +355,7 @@ export const layer = Layer.effect(
|
||||
const env = yield* Env.Service
|
||||
const npmSvc = yield* Npm.Service
|
||||
|
||||
const readConfigFile = Effect.fnUntraced(function* (filepath: string) {
|
||||
return yield* fs.readFileString(filepath).pipe(
|
||||
Effect.catchIf(
|
||||
(e) => e.reason._tag === "NotFound",
|
||||
() => Effect.succeed(undefined),
|
||||
),
|
||||
Effect.orDie,
|
||||
)
|
||||
})
|
||||
const readConfigFile = (filepath: string) => fs.readFileStringSafe(filepath).pipe(Effect.orDie)
|
||||
|
||||
const loadConfig = Effect.fnUntraced(function* (
|
||||
text: string,
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
export * as ConfigPaths from "./paths"
|
||||
|
||||
import path from "path"
|
||||
import { Filesystem } from "@/util/filesystem"
|
||||
import { Flag } from "@opencode-ai/core/flag/flag"
|
||||
import { Global } from "@opencode-ai/core/global"
|
||||
import { unique } from "remeda"
|
||||
import { JsonError } from "./error"
|
||||
import * as Effect from "effect/Effect"
|
||||
import { AppFileSystem } from "@opencode-ai/core/filesystem"
|
||||
|
||||
@@ -45,11 +43,3 @@ export const directories = Effect.fn("ConfigPaths.directories")(function* (direc
|
||||
export function fileInDirectory(dir: string, name: string) {
|
||||
return [path.join(dir, `${name}.json`), path.join(dir, `${name}.jsonc`)]
|
||||
}
|
||||
|
||||
/** Read a config file, returning undefined for missing files and throwing JsonError for other failures. */
|
||||
export async function readFile(filepath: string) {
|
||||
return Filesystem.readText(filepath).catch((err: NodeJS.ErrnoException) => {
|
||||
if (err.code === "ENOENT") return
|
||||
throw new JsonError({ path: filepath }, { cause: err })
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
import { Schema } from "effect"
|
||||
import { ProjectID } from "@/project/schema"
|
||||
import { WorkspaceID } from "./schema"
|
||||
import { zod } from "@/util/effect-zod"
|
||||
import { type DeepMutable, withStatics } from "@/util/schema"
|
||||
import type { DeepMutable } from "@/util/schema"
|
||||
|
||||
export const WorkspaceInfo = Schema.Struct({
|
||||
id: WorkspaceID,
|
||||
@@ -12,16 +11,14 @@ export const WorkspaceInfo = Schema.Struct({
|
||||
directory: Schema.NullOr(Schema.String),
|
||||
extra: Schema.NullOr(Schema.Unknown),
|
||||
projectID: ProjectID,
|
||||
})
|
||||
.annotate({ identifier: "Workspace" })
|
||||
.pipe(withStatics((s) => ({ zod: zod(s) })))
|
||||
}).annotate({ identifier: "Workspace" })
|
||||
export type WorkspaceInfo = DeepMutable<Schema.Schema.Type<typeof WorkspaceInfo>>
|
||||
|
||||
export const WorkspaceAdapterEntry = Schema.Struct({
|
||||
type: Schema.String,
|
||||
name: Schema.String,
|
||||
description: Schema.String,
|
||||
}).pipe(withStatics((s) => ({ zod: zod(s) })))
|
||||
})
|
||||
export type WorkspaceAdapterEntry = Schema.Schema.Type<typeof WorkspaceAdapterEntry>
|
||||
|
||||
export type Target =
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
import { Context, Effect, FiberMap, Layer, Schema, Stream } from "effect"
|
||||
import { Context, Effect, FiberMap, Iterable, Layer, Schema, Stream } from "effect"
|
||||
import { FetchHttpClient, HttpBody, HttpClient, HttpClientError, HttpClientRequest } from "effect/unstable/http"
|
||||
import { Database } from "@/storage/db"
|
||||
import { asc } from "drizzle-orm"
|
||||
import { eq } from "drizzle-orm"
|
||||
import { inArray } from "drizzle-orm"
|
||||
import { Project } from "@/project/project"
|
||||
import { Instance } from "@/project/instance"
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { GlobalBus } from "@/bus/global"
|
||||
import { Auth } from "@/auth"
|
||||
@@ -20,14 +21,14 @@ import { getAdapter } from "./adapters"
|
||||
import { type WorkspaceInfo, WorkspaceInfo as WorkspaceInfoSchema } from "./types"
|
||||
import { WorkspaceID } from "./schema"
|
||||
import { Session } from "@/session/session"
|
||||
import { SessionPrompt } from "@/session/prompt"
|
||||
import { SessionTable } from "@/session/session.sql"
|
||||
import { SessionID } from "@/session/schema"
|
||||
import { errorData } from "@/util/error"
|
||||
import { waitEvent } from "./util"
|
||||
import { WorkspaceContext } from "./workspace-context"
|
||||
import { EffectBridge } from "@/effect/bridge"
|
||||
import { NonNegativeInt, withStatics } from "@/util/schema"
|
||||
import { zod as effectZod, zodObject } from "@/util/effect-zod"
|
||||
import { NonNegativeInt } from "@/util/schema"
|
||||
|
||||
export const Info = WorkspaceInfoSchema
|
||||
export type Info = WorkspaceInfo
|
||||
@@ -38,13 +39,6 @@ export const ConnectionStatus = Schema.Struct({
|
||||
})
|
||||
export type ConnectionStatus = Schema.Schema.Type<typeof ConnectionStatus>
|
||||
|
||||
const Restore = Schema.Struct({
|
||||
workspaceID: WorkspaceID,
|
||||
sessionID: SessionID,
|
||||
total: NonNegativeInt,
|
||||
step: NonNegativeInt,
|
||||
})
|
||||
|
||||
export const Event = {
|
||||
Ready: BusEvent.define(
|
||||
"workspace.ready",
|
||||
@@ -58,7 +52,6 @@ export const Event = {
|
||||
message: Schema.String,
|
||||
}),
|
||||
),
|
||||
Restore: BusEvent.define("workspace.restore", Restore),
|
||||
Status: BusEvent.define("workspace.status", ConnectionStatus),
|
||||
}
|
||||
|
||||
@@ -84,15 +77,15 @@ export const CreateInput = Schema.Struct({
|
||||
type: Info.fields.type,
|
||||
branch: Info.fields.branch,
|
||||
projectID: ProjectID,
|
||||
extra: Info.fields.extra,
|
||||
}).pipe(withStatics((s) => ({ zod: effectZod(s), zodObject: zodObject(s) })))
|
||||
extra: Schema.optional(Info.fields.extra),
|
||||
})
|
||||
export type CreateInput = Schema.Schema.Type<typeof CreateInput>
|
||||
|
||||
export const SessionRestoreInput = Schema.Struct({
|
||||
workspaceID: WorkspaceID,
|
||||
export const SessionWarpInput = Schema.Struct({
|
||||
workspaceID: Schema.NullOr(WorkspaceID),
|
||||
sessionID: SessionID,
|
||||
}).pipe(withStatics((s) => ({ zod: effectZod(s), zodObject: zodObject(s) })))
|
||||
export type SessionRestoreInput = Schema.Schema.Type<typeof SessionRestoreInput>
|
||||
})
|
||||
export type SessionWarpInput = Schema.Schema.Type<typeof SessionWarpInput>
|
||||
|
||||
export class SyncHttpError extends Schema.TaggedErrorClass<SyncHttpError>()("WorkspaceSyncHttpError", {
|
||||
message: Schema.String,
|
||||
@@ -116,8 +109,8 @@ export class SessionEventsNotFoundError extends Schema.TaggedErrorClass<SessionE
|
||||
},
|
||||
) {}
|
||||
|
||||
export class SessionRestoreHttpError extends Schema.TaggedErrorClass<SessionRestoreHttpError>()(
|
||||
"WorkspaceSessionRestoreHttpError",
|
||||
export class SessionWarpHttpError extends Schema.TaggedErrorClass<SessionWarpHttpError>()(
|
||||
"WorkspaceSessionWarpHttpError",
|
||||
{
|
||||
message: Schema.String,
|
||||
workspaceID: WorkspaceID,
|
||||
@@ -138,17 +131,17 @@ export class SyncAbortedError extends Schema.TaggedErrorClass<SyncAbortedError>(
|
||||
}) {}
|
||||
|
||||
type CreateError = Auth.AuthError
|
||||
type SessionRestoreError =
|
||||
type SessionWarpError =
|
||||
| WorkspaceNotFoundError
|
||||
| SessionEventsNotFoundError
|
||||
| SessionRestoreHttpError
|
||||
| SessionWarpHttpError
|
||||
| HttpClientError.HttpClientError
|
||||
type WaitForSyncError = SyncTimeoutError | SyncAbortedError
|
||||
type SyncLoopError = SyncHttpError | HttpClientError.HttpClientError
|
||||
|
||||
export interface Interface {
|
||||
readonly create: (input: CreateInput) => Effect.Effect<Info, CreateError>
|
||||
readonly sessionRestore: (input: SessionRestoreInput) => Effect.Effect<{ total: number }, SessionRestoreError>
|
||||
readonly sessionWarp: (input: SessionWarpInput) => Effect.Effect<void, SessionWarpError>
|
||||
readonly list: (project: Project.Info) => Effect.Effect<Info[]>
|
||||
readonly get: (id: WorkspaceID) => Effect.Effect<Info | undefined>
|
||||
readonly remove: (id: WorkspaceID) => Effect.Effect<Info | undefined>
|
||||
@@ -169,6 +162,7 @@ export const layer = Layer.effect(
|
||||
Effect.gen(function* () {
|
||||
const auth = yield* Auth.Service
|
||||
const session = yield* Session.Service
|
||||
const prompt = yield* SessionPrompt.Service
|
||||
const http = yield* HttpClient.HttpClient
|
||||
const sync = yield* SyncEvent.Service
|
||||
const connections = new Map<WorkspaceID, ConnectionStatus>()
|
||||
@@ -461,7 +455,7 @@ export const layer = Layer.effect(
|
||||
const id = WorkspaceID.ascending(input.id)
|
||||
const adapter = getAdapter(input.projectID, input.type)
|
||||
const config = yield* EffectBridge.fromPromise(() =>
|
||||
adapter.configure({ ...input, id, name: Slug.create(), directory: null }),
|
||||
adapter.configure({ ...input, id, name: Slug.create(), directory: null, extra: input.extra ?? null }),
|
||||
)
|
||||
|
||||
const info: Info = {
|
||||
@@ -518,29 +512,93 @@ export const layer = Layer.effect(
|
||||
return info
|
||||
})
|
||||
|
||||
const sessionRestore = Effect.fn("Workspace.sessionRestore")(function* (input: SessionRestoreInput) {
|
||||
const sessionWarp = Effect.fn("Workspace.sessionWarp")(function* (input: SessionWarpInput) {
|
||||
return yield* Effect.gen(function* () {
|
||||
log.info("session restore requested", {
|
||||
log.info("session warp requested", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
})
|
||||
|
||||
const space = yield* get(input.workspaceID)
|
||||
const current = yield* db((db) =>
|
||||
db
|
||||
.select({ workspaceID: SessionTable.workspace_id })
|
||||
.from(SessionTable)
|
||||
.where(eq(SessionTable.id, input.sessionID))
|
||||
.get(),
|
||||
)
|
||||
|
||||
if (current?.workspaceID) {
|
||||
const previous = yield* get(current.workspaceID)
|
||||
if (previous) {
|
||||
const adapter = getAdapter(previous.projectID, previous.type)
|
||||
const target = yield* EffectBridge.fromPromise(() => adapter.target(previous))
|
||||
|
||||
if (target.type === "remote") {
|
||||
yield* syncHistory(previous, target.url, target.headers).pipe(
|
||||
Effect.catch((error) =>
|
||||
Effect.sync(() => {
|
||||
log.warn("session warp final source sync failed", {
|
||||
workspaceID: previous.id,
|
||||
sessionID: input.sessionID,
|
||||
error: errorData(error),
|
||||
})
|
||||
}),
|
||||
),
|
||||
)
|
||||
} else {
|
||||
yield* prompt.cancel(input.sessionID)
|
||||
}
|
||||
|
||||
// "claim" this session so any future events coming from
|
||||
// the old workspace are ignored
|
||||
SyncEvent.claim(input.sessionID, input.workspaceID ?? Instance.project.id)
|
||||
}
|
||||
}
|
||||
|
||||
if (input.workspaceID === null) {
|
||||
yield* Effect.sync(() =>
|
||||
SyncEvent.run(Session.Event.Updated, {
|
||||
sessionID: input.sessionID,
|
||||
info: {
|
||||
workspaceID: null,
|
||||
},
|
||||
}),
|
||||
)
|
||||
|
||||
log.info("session warp complete", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
target: "local",
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const workspaceID = input.workspaceID
|
||||
const space = yield* get(workspaceID)
|
||||
if (!space)
|
||||
return yield* new WorkspaceNotFoundError({
|
||||
message: `Workspace not found: ${input.workspaceID}`,
|
||||
workspaceID: input.workspaceID,
|
||||
message: `Workspace not found: ${workspaceID}`,
|
||||
workspaceID,
|
||||
})
|
||||
|
||||
const adapter = getAdapter(space.projectID, space.type)
|
||||
const target = yield* EffectBridge.fromPromise(() => adapter.target(space))
|
||||
|
||||
yield* sync.run(Session.Event.Updated, {
|
||||
sessionID: input.sessionID,
|
||||
info: {
|
||||
if (target.type === "local") {
|
||||
yield* sync.run(Session.Event.Updated, {
|
||||
sessionID: input.sessionID,
|
||||
info: {
|
||||
workspaceID: input.workspaceID,
|
||||
},
|
||||
})
|
||||
|
||||
log.info("session warp complete", {
|
||||
workspaceID: input.workspaceID,
|
||||
},
|
||||
})
|
||||
sessionID: input.sessionID,
|
||||
target: target.directory,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
const rows = yield* db((db) =>
|
||||
db
|
||||
@@ -562,130 +620,95 @@ export const layer = Layer.effect(
|
||||
sessionID: input.sessionID,
|
||||
})
|
||||
|
||||
const size = 10
|
||||
// TODO: look into using effect APIs to process this in chunks
|
||||
const sets = Array.from({ length: Math.ceil(rows.length / size) }, (_, i) =>
|
||||
rows.slice(i * size, (i + 1) * size),
|
||||
)
|
||||
const total = sets.length
|
||||
const batches = Iterable.chunksOf(rows, 10)
|
||||
const total = Iterable.size(batches)
|
||||
|
||||
log.info("session restore prepared", {
|
||||
log.info("session warp prepared", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
workspaceType: space.type,
|
||||
directory: space.directory,
|
||||
target: target.type === "remote" ? String(route(target.url, "/sync/replay")) : target.directory,
|
||||
target: String(route(target.url, "/sync/replay")),
|
||||
events: rows.length,
|
||||
batches: total,
|
||||
first: rows[0]?.seq,
|
||||
last: rows.at(-1)?.seq,
|
||||
})
|
||||
|
||||
yield* Effect.sync(() =>
|
||||
GlobalBus.emit("event", {
|
||||
directory: "global",
|
||||
workspace: input.workspaceID,
|
||||
payload: {
|
||||
type: Event.Restore.type,
|
||||
properties: {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
total,
|
||||
step: 0,
|
||||
},
|
||||
},
|
||||
}),
|
||||
)
|
||||
|
||||
for (const [i, events] of sets.entries()) {
|
||||
log.info("session restore batch starting", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
step: i + 1,
|
||||
total,
|
||||
events: events.length,
|
||||
first: events[0]?.seq,
|
||||
last: events.at(-1)?.seq,
|
||||
target: target.type === "remote" ? String(route(target.url, "/sync/replay")) : target.directory,
|
||||
})
|
||||
|
||||
if (target.type === "local") {
|
||||
yield* sync.replayAll(events)
|
||||
log.info("session restore batch replayed locally", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
step: i + 1,
|
||||
total,
|
||||
events: events.length,
|
||||
})
|
||||
} else {
|
||||
const url = route(target.url, "/sync/replay")
|
||||
const res = yield* http.execute(
|
||||
HttpClientRequest.post(url, {
|
||||
headers: new Headers(target.headers),
|
||||
body: HttpBody.jsonUnsafe({
|
||||
directory: space.directory ?? "",
|
||||
events,
|
||||
yield* Effect.forEach(
|
||||
batches,
|
||||
(events, i) =>
|
||||
Effect.gen(function* () {
|
||||
const response = yield* http.execute(
|
||||
HttpClientRequest.post(route(target.url, "/sync/replay"), {
|
||||
headers: new Headers(target.headers),
|
||||
body: HttpBody.jsonUnsafe({
|
||||
directory: space.directory ?? "",
|
||||
events,
|
||||
}),
|
||||
}),
|
||||
}),
|
||||
)
|
||||
)
|
||||
|
||||
if (res.status < 200 || res.status >= 300) {
|
||||
const body = yield* res.text
|
||||
log.error("session restore batch failed", {
|
||||
if (response.status < 200 || response.status >= 300) {
|
||||
const body = yield* response.text
|
||||
log.error("session warp batch failed", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
step: i + 1,
|
||||
total,
|
||||
status: response.status,
|
||||
body,
|
||||
})
|
||||
return yield* new SessionWarpHttpError({
|
||||
message: `Failed to warp session ${input.sessionID} into workspace ${workspaceID}: HTTP ${response.status} ${body}`,
|
||||
workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
status: response.status,
|
||||
body,
|
||||
})
|
||||
}
|
||||
|
||||
log.info("session warp batch posted", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
step: i + 1,
|
||||
total,
|
||||
status: res.status,
|
||||
body,
|
||||
status: response.status,
|
||||
})
|
||||
return yield* new SessionRestoreHttpError({
|
||||
message: `Failed to replay session ${input.sessionID} into workspace ${input.workspaceID}: HTTP ${res.status} ${body}`,
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
status: res.status,
|
||||
body,
|
||||
})
|
||||
}
|
||||
|
||||
log.info("session restore batch posted", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
step: i + 1,
|
||||
total,
|
||||
status: res.status,
|
||||
})
|
||||
}
|
||||
|
||||
yield* Effect.sync(() =>
|
||||
GlobalBus.emit("event", {
|
||||
directory: "global",
|
||||
workspace: input.workspaceID,
|
||||
payload: {
|
||||
type: Event.Restore.type,
|
||||
properties: {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
total,
|
||||
step: i + 1,
|
||||
},
|
||||
},
|
||||
}),
|
||||
)
|
||||
{ discard: true },
|
||||
)
|
||||
|
||||
const response = yield* http.execute(
|
||||
HttpClientRequest.post(route(target.url, "/sync/steal"), {
|
||||
headers: new Headers(target.headers),
|
||||
body: HttpBody.jsonUnsafe({ sessionID: input.sessionID }),
|
||||
}),
|
||||
)
|
||||
if (response.status < 200 || response.status >= 300) {
|
||||
const body = yield* response.text
|
||||
log.error("session warp steal failed", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
status: response.status,
|
||||
body,
|
||||
})
|
||||
return yield* new SessionWarpHttpError({
|
||||
message: `Failed to steal session ${input.sessionID} into workspace ${workspaceID}: HTTP ${response.status} ${body}`,
|
||||
workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
status: response.status,
|
||||
body,
|
||||
})
|
||||
}
|
||||
|
||||
log.info("session restore complete", {
|
||||
log.info("session warp complete", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
batches: total,
|
||||
})
|
||||
|
||||
return { total }
|
||||
}).pipe(
|
||||
Effect.tapError((err) =>
|
||||
Effect.sync(() =>
|
||||
log.error("session restore failed", {
|
||||
log.error("session warp failed", {
|
||||
workspaceID: input.workspaceID,
|
||||
sessionID: input.sessionID,
|
||||
error: errorData(err),
|
||||
@@ -814,7 +837,7 @@ export const layer = Layer.effect(
|
||||
|
||||
return Service.of({
|
||||
create,
|
||||
sessionRestore,
|
||||
sessionWarp,
|
||||
list,
|
||||
get,
|
||||
remove,
|
||||
@@ -830,6 +853,7 @@ export const defaultLayer = layer.pipe(
|
||||
Layer.provide(Auth.defaultLayer),
|
||||
Layer.provide(Session.defaultLayer),
|
||||
Layer.provide(SyncEvent.defaultLayer),
|
||||
Layer.provide(SessionPrompt.defaultLayer),
|
||||
Layer.provide(FetchHttpClient.layer),
|
||||
)
|
||||
|
||||
|
||||
@@ -46,6 +46,7 @@ import { Vcs } from "@/project/vcs"
|
||||
import { Workspace } from "@/control-plane/workspace"
|
||||
import { Worktree } from "@/worktree"
|
||||
import { Pty } from "@/pty"
|
||||
import { PtyTicket } from "@/pty/ticket"
|
||||
import { Installation } from "@/installation"
|
||||
import { ShareNext } from "@/share/share-next"
|
||||
import { SessionShare } from "@/share/session"
|
||||
@@ -98,6 +99,7 @@ export const AppLayer = Layer.mergeAll(
|
||||
Workspace.defaultLayer,
|
||||
Worktree.appLayer,
|
||||
Pty.defaultLayer,
|
||||
PtyTicket.defaultLayer,
|
||||
Installation.defaultLayer,
|
||||
ShareNext.defaultLayer,
|
||||
SessionShare.defaultLayer,
|
||||
|
||||
@@ -24,6 +24,7 @@ const fail = (err: unknown) =>
|
||||
text: () => "",
|
||||
stdout: Buffer.alloc(0),
|
||||
stderr: Buffer.from(err instanceof Error ? err.message : String(err)),
|
||||
truncated: false,
|
||||
}) satisfies Result
|
||||
|
||||
export type Kind = "added" | "deleted" | "modified"
|
||||
@@ -45,16 +46,28 @@ export type Stat = {
|
||||
readonly deletions: number
|
||||
}
|
||||
|
||||
export type Patch = {
|
||||
readonly text: string
|
||||
readonly truncated: boolean
|
||||
}
|
||||
|
||||
export interface PatchOptions {
|
||||
readonly context?: number
|
||||
readonly maxOutputBytes?: number
|
||||
}
|
||||
|
||||
export interface Result {
|
||||
readonly exitCode: number
|
||||
readonly text: () => string
|
||||
readonly stdout: Buffer
|
||||
readonly stderr: Buffer
|
||||
readonly truncated: boolean
|
||||
}
|
||||
|
||||
export interface Options {
|
||||
readonly cwd: string
|
||||
readonly env?: Record<string, string>
|
||||
readonly maxOutputBytes?: number
|
||||
}
|
||||
|
||||
export interface Interface {
|
||||
@@ -68,6 +81,10 @@ export interface Interface {
|
||||
readonly status: (cwd: string) => Effect.Effect<Item[]>
|
||||
readonly diff: (cwd: string, ref: string) => Effect.Effect<Item[]>
|
||||
readonly stats: (cwd: string, ref: string) => Effect.Effect<Stat[]>
|
||||
readonly patch: (cwd: string, ref: string, file: string, options?: PatchOptions) => Effect.Effect<Patch>
|
||||
readonly patchAll: (cwd: string, ref: string, options?: PatchOptions) => Effect.Effect<Patch>
|
||||
readonly patchUntracked: (cwd: string, file: string, options?: PatchOptions) => Effect.Effect<Patch>
|
||||
readonly statUntracked: (cwd: string, file: string) => Effect.Effect<Stat | undefined>
|
||||
}
|
||||
|
||||
const kind = (code: string): Kind => {
|
||||
@@ -96,15 +113,31 @@ export const layer = Layer.effect(
|
||||
stderr: "pipe",
|
||||
})
|
||||
const handle = yield* spawner.spawn(proc)
|
||||
const [stdout, stderr] = yield* Effect.all(
|
||||
[Stream.mkString(Stream.decodeText(handle.stdout)), Stream.mkString(Stream.decodeText(handle.stderr))],
|
||||
{ concurrency: 2 },
|
||||
)
|
||||
const collect = (stream: typeof handle.stdout) =>
|
||||
Stream.runFold(
|
||||
stream,
|
||||
() => ({ chunks: [] as Uint8Array[], bytes: 0, truncated: false }),
|
||||
(acc, chunk) => {
|
||||
if (opts.maxOutputBytes === undefined) {
|
||||
acc.chunks.push(chunk)
|
||||
acc.bytes += chunk.length
|
||||
return acc
|
||||
}
|
||||
|
||||
const remaining = opts.maxOutputBytes - acc.bytes
|
||||
if (remaining > 0) acc.chunks.push(remaining >= chunk.length ? chunk : chunk.slice(0, remaining))
|
||||
acc.bytes += chunk.length
|
||||
acc.truncated = acc.truncated || acc.bytes > opts.maxOutputBytes
|
||||
return acc
|
||||
},
|
||||
).pipe(Effect.map((x) => ({ buffer: Buffer.concat(x.chunks), truncated: x.truncated })))
|
||||
const [stdout, stderr] = yield* Effect.all([collect(handle.stdout), collect(handle.stderr)], { concurrency: 2 })
|
||||
return {
|
||||
exitCode: yield* handle.exitCode,
|
||||
text: () => stdout,
|
||||
stdout: Buffer.from(stdout),
|
||||
stderr: Buffer.from(stderr),
|
||||
text: () => stdout.buffer.toString("utf8"),
|
||||
stdout: stdout.buffer,
|
||||
stderr: stderr.buffer,
|
||||
truncated: stdout.truncated || stderr.truncated,
|
||||
} satisfies Result
|
||||
},
|
||||
Effect.scoped,
|
||||
@@ -240,6 +273,61 @@ export const layer = Layer.effect(
|
||||
})
|
||||
})
|
||||
|
||||
const patch = Effect.fn("Git.patch")(function* (cwd: string, ref: string, file: string, options?: PatchOptions) {
|
||||
const result = yield* run(
|
||||
["diff", "--patch", "--no-ext-diff", "--no-renames", `--unified=${options?.context ?? 3}`, ref, "--", file],
|
||||
{ cwd, maxOutputBytes: options?.maxOutputBytes },
|
||||
)
|
||||
return { text: result.truncated ? "" : result.text(), truncated: result.truncated } satisfies Patch
|
||||
})
|
||||
|
||||
const patchAll = Effect.fn("Git.patchAll")(function* (cwd: string, ref: string, options?: PatchOptions) {
|
||||
const result = yield* run(
|
||||
["diff", "--patch", "--no-ext-diff", "--no-renames", `--unified=${options?.context ?? 3}`, ref, "--", "."],
|
||||
{ cwd, maxOutputBytes: options?.maxOutputBytes },
|
||||
)
|
||||
return { text: result.text(), truncated: result.truncated } satisfies Patch
|
||||
})
|
||||
|
||||
const patchUntracked = Effect.fn("Git.patchUntracked")(function* (
|
||||
cwd: string,
|
||||
file: string,
|
||||
options?: PatchOptions,
|
||||
) {
|
||||
const result = yield* run(
|
||||
[
|
||||
"diff",
|
||||
"--no-index",
|
||||
"--patch",
|
||||
"--no-ext-diff",
|
||||
"--no-renames",
|
||||
`--unified=${options?.context ?? 3}`,
|
||||
"--",
|
||||
"/dev/null",
|
||||
file,
|
||||
],
|
||||
{ cwd, maxOutputBytes: options?.maxOutputBytes },
|
||||
)
|
||||
return { text: result.truncated ? "" : result.text(), truncated: result.truncated } satisfies Patch
|
||||
})
|
||||
|
||||
const statUntracked = Effect.fn("Git.statUntracked")(function* (cwd: string, file: string) {
|
||||
const result = yield* run(["diff", "--no-index", "--numstat", "--", "/dev/null", file], {
|
||||
cwd,
|
||||
maxOutputBytes: 4096,
|
||||
})
|
||||
if (result.truncated) return
|
||||
const parts = result.text().split("\t")
|
||||
if (parts.length < 2) return
|
||||
const additions = parts[0] === "-" ? 0 : Number.parseInt(parts[0] || "0", 10)
|
||||
const deletions = parts[1] === "-" ? 0 : Number.parseInt(parts[1] || "0", 10)
|
||||
return {
|
||||
file,
|
||||
additions: Number.isFinite(additions) ? additions : 0,
|
||||
deletions: Number.isFinite(deletions) ? deletions : 0,
|
||||
} satisfies Stat
|
||||
})
|
||||
|
||||
return Service.of({
|
||||
run,
|
||||
branch,
|
||||
@@ -251,6 +339,10 @@ export const layer = Layer.effect(
|
||||
status,
|
||||
diff,
|
||||
stats,
|
||||
patch,
|
||||
patchAll,
|
||||
patchUntracked,
|
||||
statUntracked,
|
||||
})
|
||||
}),
|
||||
)
|
||||
|
||||
@@ -13,6 +13,7 @@ const prefixes = {
|
||||
tool: "tool",
|
||||
workspace: "wrk",
|
||||
entry: "ent",
|
||||
account: "act",
|
||||
} as const
|
||||
|
||||
export function schema(prefix: keyof typeof prefixes) {
|
||||
|
||||
@@ -14,7 +14,14 @@ const ISSUER = "https://auth.openai.com"
|
||||
const CODEX_API_ENDPOINT = "https://chatgpt.com/backend-api/codex/responses"
|
||||
const OAUTH_PORT = 1455
|
||||
const OAUTH_POLLING_SAFETY_MARGIN_MS = 3000
|
||||
const ALLOWED_MODELS = new Set(["gpt-5.5", "gpt-5.2", "gpt-5.3-codex", "gpt-5.4", "gpt-5.4-mini"])
|
||||
const ALLOWED_MODELS = new Set([
|
||||
"gpt-5.5",
|
||||
"gpt-5.2",
|
||||
"gpt-5.3-codex",
|
||||
"gpt-5.3-codex-spark",
|
||||
"gpt-5.4",
|
||||
"gpt-5.4-mini",
|
||||
])
|
||||
|
||||
interface PkceCodes {
|
||||
verifier: string
|
||||
|
||||
@@ -10,6 +10,7 @@ import { Bus } from "../bus"
|
||||
import * as Log from "@opencode-ai/core/util/log"
|
||||
import { createOpencodeClient } from "@opencode-ai/sdk"
|
||||
import { Flag } from "@opencode-ai/core/flag/flag"
|
||||
import { ServerAuth } from "@/server/auth"
|
||||
import { CodexAuthPlugin } from "./codex"
|
||||
import { Session } from "@/session/session"
|
||||
import { NamedError } from "@opencode-ai/core/util/error"
|
||||
@@ -124,11 +125,7 @@ export const layer = Layer.effect(
|
||||
const client = createOpencodeClient({
|
||||
baseUrl: "http://localhost:4096",
|
||||
directory: ctx.directory,
|
||||
headers: Flag.OPENCODE_SERVER_PASSWORD
|
||||
? {
|
||||
Authorization: `Basic ${Buffer.from(`${Flag.OPENCODE_SERVER_USERNAME ?? "opencode"}:${Flag.OPENCODE_SERVER_PASSWORD}`).toString("base64")}`,
|
||||
}
|
||||
: undefined,
|
||||
headers: ServerAuth.headers(),
|
||||
fetch: async (...args) => Server.Default().app.fetch(...args),
|
||||
})
|
||||
const cfg = yield* config.get()
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
import { Effect, Layer, Context, Schema, Stream, Scope } from "effect"
|
||||
import { formatPatch, structuredPatch } from "diff"
|
||||
import path from "path"
|
||||
import { Bus } from "@/bus"
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { InstanceState } from "@/effect/instance-state"
|
||||
import { AppFileSystem } from "@opencode-ai/core/filesystem"
|
||||
import { FileWatcher } from "@/file/watcher"
|
||||
import { Git } from "@/git"
|
||||
import * as Log from "@opencode-ai/core/util/log"
|
||||
@@ -12,20 +10,11 @@ import { zod } from "@/util/effect-zod"
|
||||
import { NonNegativeInt, withStatics } from "@/util/schema"
|
||||
|
||||
const log = Log.create({ service: "vcs" })
|
||||
const PATCH_CONTEXT_LINES = 2_147_483_647
|
||||
const MAX_PATCH_BYTES = 10_000_000
|
||||
const MAX_TOTAL_PATCH_BYTES = 10_000_000
|
||||
|
||||
const count = (text: string) => {
|
||||
if (!text) return 0
|
||||
if (!text.endsWith("\n")) return text.split("\n").length
|
||||
return text.slice(0, -1).split("\n").length
|
||||
}
|
||||
|
||||
const work = Effect.fnUntraced(function* (fs: AppFileSystem.Interface, cwd: string, file: string) {
|
||||
const full = path.join(cwd, file)
|
||||
if (!(yield* fs.exists(full).pipe(Effect.orDie))) return ""
|
||||
const buf = yield* fs.readFile(full).pipe(Effect.catch(() => Effect.succeed(new Uint8Array())))
|
||||
if (Buffer.from(buf).includes(0)) return ""
|
||||
return Buffer.from(buf).toString("utf8")
|
||||
})
|
||||
const emptyPatch = (file: string) => formatPatch(structuredPatch(file, file, "", "", "", "", { context: 0 }))
|
||||
|
||||
const nums = (list: Git.Stat[]) =>
|
||||
new Map(list.map((item) => [item.file, { additions: item.additions, deletions: item.deletions }] as const))
|
||||
@@ -38,59 +27,170 @@ const merge = (...lists: Git.Item[][]) => {
|
||||
return [...out.values()]
|
||||
}
|
||||
|
||||
const emptyBatch = () => ({ patches: new Map<string, string>(), capped: false })
|
||||
|
||||
const parseQuotedPath = (value: string) => {
|
||||
let out = ""
|
||||
for (let idx = 1; idx < value.length; idx++) {
|
||||
const char = value[idx]
|
||||
if (char === '"') return { value: out, end: idx + 1 }
|
||||
if (char !== "\\") {
|
||||
out += char
|
||||
continue
|
||||
}
|
||||
|
||||
const next = value[++idx]
|
||||
if (next === "t") out += "\t"
|
||||
else if (next === "n") out += "\n"
|
||||
else if (next === "r") out += "\r"
|
||||
else if (next === '"' || next === "\\") out += next
|
||||
else out += next ?? ""
|
||||
}
|
||||
}
|
||||
|
||||
const parsePathToken = (value: string) => {
|
||||
if (!value.startsWith('"')) return value.split("\t")[0]
|
||||
return parseQuotedPath(value)?.value ?? value
|
||||
}
|
||||
|
||||
const fileFromDiffPath = (value: string | undefined) => {
|
||||
if (!value || value === "/dev/null") return
|
||||
const file = parsePathToken(value)
|
||||
if (file.startsWith("a/") || file.startsWith("b/")) return file.slice(2)
|
||||
return file
|
||||
}
|
||||
|
||||
const fileFromGitHeader = (header: string) => {
|
||||
if (header.startsWith('"')) {
|
||||
const first = parseQuotedPath(header)
|
||||
const second = first ? header.slice(first.end).trimStart() : undefined
|
||||
if (!second) return
|
||||
if (!second.startsWith('"')) return fileFromDiffPath(second)
|
||||
return fileFromDiffPath(parseQuotedPath(second)?.value)
|
||||
}
|
||||
|
||||
const separator = header.indexOf(" b/")
|
||||
if (separator === -1) return
|
||||
return fileFromDiffPath(header.slice(separator + 1))
|
||||
}
|
||||
|
||||
const fileFromPatchChunk = (chunk: string) => {
|
||||
const next = /^\+\+\+ (.+)$/m.exec(chunk)?.[1]
|
||||
const before = /^--- (.+)$/m.exec(chunk)?.[1]
|
||||
const file = fileFromDiffPath(next) ?? fileFromDiffPath(before)
|
||||
if (file) return file
|
||||
|
||||
const header = /^diff --git (.+)$/m.exec(chunk)?.[1]
|
||||
return fileFromGitHeader(header ?? "")
|
||||
}
|
||||
|
||||
const splitGitPatch = (patch: Git.Patch) => {
|
||||
const starts = [...patch.text.matchAll(/(?:^|\n)diff --git /g)].map((match) =>
|
||||
match[0].startsWith("\n") ? match.index + 1 : match.index,
|
||||
)
|
||||
const chunks = starts.map((start, index) => patch.text.slice(start, starts[index + 1] ?? patch.text.length))
|
||||
if (!patch.truncated) return chunks
|
||||
return chunks.slice(0, -1)
|
||||
}
|
||||
|
||||
const batchPatches = Effect.fnUntraced(function* (git: Git.Interface, cwd: string, ref: string, list: Git.Item[]) {
|
||||
if (list.length === 0) return { patches: new Map<string, string>(), capped: false }
|
||||
|
||||
const result = yield* git.patchAll(cwd, ref, {
|
||||
context: PATCH_CONTEXT_LINES,
|
||||
maxOutputBytes: MAX_TOTAL_PATCH_BYTES,
|
||||
})
|
||||
if (result.truncated) log.warn("batched patch exceeded byte limit", { max: MAX_TOTAL_PATCH_BYTES })
|
||||
|
||||
return {
|
||||
patches: splitGitPatch(result).reduce((acc, patch, index) => {
|
||||
const file = fileFromPatchChunk(patch) ?? list[index]?.file
|
||||
if (!file) return acc
|
||||
acc.set(file, (acc.get(file) ?? "") + patch)
|
||||
return acc
|
||||
}, new Map<string, string>()),
|
||||
capped: result.truncated,
|
||||
}
|
||||
})
|
||||
|
||||
const nativePatch = Effect.fnUntraced(function* (
|
||||
git: Git.Interface,
|
||||
cwd: string,
|
||||
ref: string | undefined,
|
||||
item: Git.Item,
|
||||
) {
|
||||
const result =
|
||||
item.code === "??" || !ref
|
||||
? yield* git.patchUntracked(cwd, item.file, { context: PATCH_CONTEXT_LINES, maxOutputBytes: MAX_PATCH_BYTES })
|
||||
: yield* git.patch(cwd, ref, item.file, { context: PATCH_CONTEXT_LINES, maxOutputBytes: MAX_PATCH_BYTES })
|
||||
if (!result.truncated && result.text) return result.text
|
||||
|
||||
if (result.truncated) log.warn("patch exceeded byte limit", { file: item.file, max: MAX_PATCH_BYTES })
|
||||
return emptyPatch(item.file)
|
||||
})
|
||||
|
||||
const totalPatch = (file: string, patch: string, total: number) => {
|
||||
if (total + Buffer.byteLength(patch) <= MAX_TOTAL_PATCH_BYTES) return { patch, capped: false }
|
||||
log.warn("total patch budget exceeded", { file, max: MAX_TOTAL_PATCH_BYTES })
|
||||
return { patch: emptyPatch(file), capped: true }
|
||||
}
|
||||
|
||||
const patchForItem = Effect.fnUntraced(function* (
|
||||
git: Git.Interface,
|
||||
cwd: string,
|
||||
ref: string | undefined,
|
||||
item: Git.Item,
|
||||
batch: { patches: Map<string, string>; capped: boolean },
|
||||
capped: boolean,
|
||||
) {
|
||||
if (capped) return emptyPatch(item.file)
|
||||
|
||||
const batched = batch.patches.get(item.file)
|
||||
if (batched !== undefined) return batched
|
||||
if (item.code !== "??" && batch.capped) return emptyPatch(item.file)
|
||||
return yield* nativePatch(git, cwd, ref, item)
|
||||
})
|
||||
|
||||
const files = Effect.fnUntraced(function* (
|
||||
fs: AppFileSystem.Interface,
|
||||
git: Git.Interface,
|
||||
cwd: string,
|
||||
ref: string | undefined,
|
||||
list: Git.Item[],
|
||||
map: Map<string, { additions: number; deletions: number }>,
|
||||
batch: { patches: Map<string, string>; capped: boolean },
|
||||
) {
|
||||
const base = ref ? yield* git.prefix(cwd) : ""
|
||||
const patch = (file: string, before: string, after: string) =>
|
||||
formatPatch(structuredPatch(file, file, before, after, "", "", { context: Number.MAX_SAFE_INTEGER }))
|
||||
const next = yield* Effect.forEach(
|
||||
list,
|
||||
(item) =>
|
||||
Effect.gen(function* () {
|
||||
const before = item.status === "added" || !ref ? "" : yield* git.show(cwd, ref, item.file, base)
|
||||
const after = item.status === "deleted" ? "" : yield* work(fs, cwd, item.file)
|
||||
const stat = map.get(item.file)
|
||||
return {
|
||||
file: item.file,
|
||||
patch: patch(item.file, before, after),
|
||||
additions: stat?.additions ?? (item.status === "added" ? count(after) : 0),
|
||||
deletions: stat?.deletions ?? (item.status === "deleted" ? count(before) : 0),
|
||||
status: item.status,
|
||||
} satisfies FileDiff
|
||||
}),
|
||||
{ concurrency: 8 },
|
||||
)
|
||||
return next.toSorted((a, b) => a.file.localeCompare(b.file))
|
||||
const next: FileDiff[] = []
|
||||
let total = 0
|
||||
let capped = false
|
||||
|
||||
for (const item of list.toSorted((a, b) => a.file.localeCompare(b.file))) {
|
||||
const stat = map.get(item.file) ?? (item.status === "added" ? yield* git.statUntracked(cwd, item.file) : undefined)
|
||||
const patch = yield* patchForItem(git, cwd, ref, item, batch, capped)
|
||||
const result: { patch: string; capped: boolean } = capped
|
||||
? { patch, capped: true }
|
||||
: totalPatch(item.file, patch, total)
|
||||
capped = capped || result.capped
|
||||
if (!capped) {
|
||||
total += Buffer.byteLength(result.patch)
|
||||
capped = total >= MAX_TOTAL_PATCH_BYTES
|
||||
}
|
||||
next.push({
|
||||
file: item.file,
|
||||
patch: result.patch,
|
||||
additions: stat?.additions ?? 0,
|
||||
deletions: stat?.deletions ?? 0,
|
||||
status: item.status,
|
||||
})
|
||||
}
|
||||
|
||||
return next
|
||||
})
|
||||
|
||||
const track = Effect.fnUntraced(function* (
|
||||
fs: AppFileSystem.Interface,
|
||||
git: Git.Interface,
|
||||
cwd: string,
|
||||
ref: string | undefined,
|
||||
) {
|
||||
if (!ref) return yield* files(fs, git, cwd, ref, yield* git.status(cwd), new Map())
|
||||
const [list, stats] = yield* Effect.all([git.status(cwd), git.stats(cwd, ref)], { concurrency: 2 })
|
||||
return yield* files(fs, git, cwd, ref, list, nums(stats))
|
||||
})
|
||||
|
||||
const compare = Effect.fnUntraced(function* (
|
||||
fs: AppFileSystem.Interface,
|
||||
git: Git.Interface,
|
||||
cwd: string,
|
||||
ref: string,
|
||||
) {
|
||||
const diffAgainstRef = Effect.fnUntraced(function* (git: Git.Interface, cwd: string, ref: string) {
|
||||
const [list, stats, extra] = yield* Effect.all([git.diff(cwd, ref), git.stats(cwd, ref), git.status(cwd)], {
|
||||
concurrency: 3,
|
||||
})
|
||||
return yield* files(
|
||||
fs,
|
||||
git,
|
||||
cwd,
|
||||
ref,
|
||||
@@ -99,9 +199,15 @@ const compare = Effect.fnUntraced(function* (
|
||||
extra.filter((item) => item.code === "??"),
|
||||
),
|
||||
nums(stats),
|
||||
yield* batchPatches(git, cwd, ref, list),
|
||||
)
|
||||
})
|
||||
|
||||
const track = Effect.fnUntraced(function* (git: Git.Interface, cwd: string, ref: string | undefined) {
|
||||
if (!ref) return yield* files(git, cwd, ref, yield* git.status(cwd), new Map(), emptyBatch())
|
||||
return yield* diffAgainstRef(git, cwd, ref)
|
||||
})
|
||||
|
||||
export const Mode = Schema.Literals(["git", "branch"]).pipe(withStatics((s) => ({ zod: zod(s) })))
|
||||
export type Mode = Schema.Schema.Type<typeof Mode>
|
||||
|
||||
@@ -147,10 +253,9 @@ interface State {
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/Vcs") {}
|
||||
|
||||
export const layer: Layer.Layer<Service, never, AppFileSystem.Service | Git.Service | Bus.Service> = Layer.effect(
|
||||
export const layer: Layer.Layer<Service, never, Git.Service | Bus.Service> = Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const fs = yield* AppFileSystem.Service
|
||||
const git = yield* Git.Service
|
||||
const bus = yield* Bus.Service
|
||||
const scope = yield* Scope.Scope
|
||||
@@ -204,23 +309,19 @@ export const layer: Layer.Layer<Service, never, AppFileSystem.Service | Git.Serv
|
||||
const ctx = yield* InstanceState.context
|
||||
if (ctx.project.vcs !== "git") return []
|
||||
if (mode === "git") {
|
||||
return yield* track(fs, git, ctx.directory, (yield* git.hasHead(ctx.directory)) ? "HEAD" : undefined)
|
||||
return yield* track(git, ctx.directory, (yield* git.hasHead(ctx.directory)) ? "HEAD" : undefined)
|
||||
}
|
||||
|
||||
if (!value.root) return []
|
||||
if (value.current && value.current === value.root.name) return []
|
||||
const ref = yield* git.mergeBase(ctx.directory, value.root.ref)
|
||||
if (!ref) return []
|
||||
return yield* compare(fs, git, ctx.directory, ref)
|
||||
return yield* diffAgainstRef(git, ctx.directory, ref)
|
||||
}),
|
||||
})
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer = layer.pipe(
|
||||
Layer.provide(Git.defaultLayer),
|
||||
Layer.provide(AppFileSystem.defaultLayer),
|
||||
Layer.provide(Bus.layer),
|
||||
)
|
||||
export const defaultLayer = layer.pipe(Layer.provide(Git.defaultLayer), Layer.provide(Bus.layer))
|
||||
|
||||
export * as Vcs from "./vcs"
|
||||
|
||||
@@ -138,6 +138,14 @@ function useLanguageModel(sdk: any) {
|
||||
return sdk.responses === undefined && sdk.chat === undefined
|
||||
}
|
||||
|
||||
function selectAzureLanguageModel(sdk: any, modelID: string, useChat: boolean) {
|
||||
if (useChat && sdk.chat) return sdk.chat(modelID)
|
||||
if (sdk.responses) return sdk.responses(modelID)
|
||||
if (sdk.messages) return sdk.messages(modelID)
|
||||
if (sdk.chat) return sdk.chat(modelID)
|
||||
return sdk.languageModel(modelID)
|
||||
}
|
||||
|
||||
function custom(dep: CustomDep): Record<string, CustomLoader> {
|
||||
return {
|
||||
anthropic: () =>
|
||||
@@ -222,12 +230,7 @@ function custom(dep: CustomDep): Record<string, CustomLoader> {
|
||||
return {
|
||||
autoload: false,
|
||||
async getModel(sdk: any, modelID: string, options?: Record<string, any>) {
|
||||
if (useLanguageModel(sdk)) return sdk.languageModel(modelID)
|
||||
if (options?.["useCompletionUrls"]) {
|
||||
return sdk.chat(modelID)
|
||||
} else {
|
||||
return sdk.responses(modelID)
|
||||
}
|
||||
return selectAzureLanguageModel(sdk, modelID, Boolean(options?.["useCompletionUrls"]))
|
||||
},
|
||||
options: {
|
||||
resourceName: resource,
|
||||
@@ -247,12 +250,7 @@ function custom(dep: CustomDep): Record<string, CustomLoader> {
|
||||
return {
|
||||
autoload: false,
|
||||
async getModel(sdk: any, modelID: string, options?: Record<string, any>) {
|
||||
if (useLanguageModel(sdk)) return sdk.languageModel(modelID)
|
||||
if (options?.["useCompletionUrls"]) {
|
||||
return sdk.chat(modelID)
|
||||
} else {
|
||||
return sdk.responses(modelID)
|
||||
}
|
||||
return selectAzureLanguageModel(sdk, modelID, Boolean(options?.["useCompletionUrls"]))
|
||||
},
|
||||
options: {
|
||||
baseURL: resourceName ? `https://${resourceName}.cognitiveservices.azure.com/openai` : undefined,
|
||||
|
||||
68
packages/opencode/src/pty/ticket.ts
Normal file
68
packages/opencode/src/pty/ticket.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
export * as PtyTicket from "./ticket"
|
||||
|
||||
import { WorkspaceID } from "@/control-plane/schema"
|
||||
import { InstanceRef, WorkspaceRef } from "@/effect/instance-ref"
|
||||
import { PtyID } from "@/pty/schema"
|
||||
import { PositiveInt } from "@/util/schema"
|
||||
import { Cache, Context, Duration, Effect, Layer, Schema } from "effect"
|
||||
|
||||
const DEFAULT_TTL = Duration.seconds(60)
|
||||
const CAPACITY = 10_000
|
||||
|
||||
export const ConnectToken = Schema.Struct({
|
||||
ticket: Schema.String,
|
||||
expires_in: PositiveInt,
|
||||
})
|
||||
|
||||
export type Scope = {
|
||||
readonly ptyID: PtyID
|
||||
readonly directory?: string
|
||||
readonly workspaceID?: WorkspaceID
|
||||
}
|
||||
|
||||
export interface Interface {
|
||||
issue(input: Scope): Effect.Effect<typeof ConnectToken.Type>
|
||||
consume(input: Scope & { readonly ticket: string }): Effect.Effect<boolean>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/PtyTicket") {}
|
||||
|
||||
function matches(record: Scope, input: Scope) {
|
||||
return (
|
||||
record.ptyID === input.ptyID && record.directory === input.directory && record.workspaceID === input.workspaceID
|
||||
)
|
||||
}
|
||||
|
||||
// Tickets are inserted via Cache.set and removed atomically via invalidateWhen. The lookup is
|
||||
// never invoked; it dies if it ever is, which would signal a misuse of the Service interface.
|
||||
const noLookup = () => Effect.die("PtyTicket cache must be used via set/invalidateWhen, never get")
|
||||
|
||||
// Visible for tests so the TTL can be shortened. Production uses `layer` with the default TTL.
|
||||
export const make = (ttl: Duration.Input = DEFAULT_TTL) =>
|
||||
Effect.gen(function* () {
|
||||
const cache = yield* Cache.make<string, Scope>({ capacity: CAPACITY, lookup: noLookup, timeToLive: ttl })
|
||||
const expiresIn = Math.max(1, Math.round(Duration.toSeconds(Duration.fromInputUnsafe(ttl))))
|
||||
return Service.of({
|
||||
issue: Effect.fn("PtyTicket.issue")(function* (input) {
|
||||
const ticket = crypto.randomUUID()
|
||||
yield* Cache.set(cache, ticket, input)
|
||||
return { ticket, expires_in: expiresIn }
|
||||
}),
|
||||
consume: Effect.fn("PtyTicket.consume")(function* (input) {
|
||||
return yield* Cache.invalidateWhen(cache, input.ticket, (stored) => matches(stored, input))
|
||||
}),
|
||||
})
|
||||
})
|
||||
|
||||
export const layer = Layer.effect(Service, make())
|
||||
|
||||
export const defaultLayer = layer
|
||||
|
||||
export const scope = Effect.gen(function* () {
|
||||
const instance = yield* InstanceRef
|
||||
const workspaceID = yield* WorkspaceRef
|
||||
return {
|
||||
directory: instance?.directory,
|
||||
workspaceID,
|
||||
}
|
||||
})
|
||||
@@ -1,44 +0,0 @@
|
||||
import type { Hono } from "hono"
|
||||
import { createBunWebSocket } from "hono/bun"
|
||||
import type { Adapter, FetchApp, Opts } from "./adapter"
|
||||
|
||||
function listen(app: FetchApp, opts: Opts, websocket?: ReturnType<typeof createBunWebSocket>["websocket"]) {
|
||||
const start = (port: number) => {
|
||||
try {
|
||||
if (websocket) {
|
||||
return Bun.serve({ fetch: app.fetch, hostname: opts.hostname, idleTimeout: 0, websocket, port })
|
||||
}
|
||||
return Bun.serve({ fetch: app.fetch, hostname: opts.hostname, idleTimeout: 0, port })
|
||||
} catch {
|
||||
return
|
||||
}
|
||||
}
|
||||
const server = opts.port === 0 ? (start(4096) ?? start(0)) : start(opts.port)
|
||||
if (!server) {
|
||||
throw new Error(`Failed to start server on port ${opts.port}`)
|
||||
}
|
||||
if (!server.port) {
|
||||
throw new Error(`Failed to resolve server address for port ${opts.port}`)
|
||||
}
|
||||
return {
|
||||
port: server.port,
|
||||
stop(close?: boolean) {
|
||||
return Promise.resolve(server.stop(close))
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const adapter: Adapter = {
|
||||
create(app: Hono) {
|
||||
const ws = createBunWebSocket()
|
||||
return {
|
||||
upgradeWebSocket: ws.upgradeWebSocket,
|
||||
listen: (opts) => Promise.resolve(listen(app, opts, ws.websocket)),
|
||||
}
|
||||
},
|
||||
createFetch(app) {
|
||||
return {
|
||||
listen: (opts) => Promise.resolve(listen(app, opts)),
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,73 +0,0 @@
|
||||
import { createAdaptorServer, type ServerType } from "@hono/node-server"
|
||||
import { createNodeWebSocket } from "@hono/node-ws"
|
||||
import type { Hono } from "hono"
|
||||
import type { Adapter, FetchApp, Opts } from "./adapter"
|
||||
|
||||
async function listen(app: FetchApp, opts: Opts, inject?: (server: ServerType) => void) {
|
||||
const start = (port: number) =>
|
||||
new Promise<ServerType>((resolve, reject) => {
|
||||
const server = createAdaptorServer({ fetch: app.fetch })
|
||||
inject?.(server)
|
||||
const fail = (err: Error) => {
|
||||
cleanup()
|
||||
reject(err)
|
||||
}
|
||||
const ready = () => {
|
||||
cleanup()
|
||||
resolve(server)
|
||||
}
|
||||
const cleanup = () => {
|
||||
server.off("error", fail)
|
||||
server.off("listening", ready)
|
||||
}
|
||||
server.once("error", fail)
|
||||
server.once("listening", ready)
|
||||
server.listen(port, opts.hostname)
|
||||
})
|
||||
|
||||
const server = opts.port === 0 ? await start(4096).catch(() => start(0)) : await start(opts.port)
|
||||
const addr = server.address()
|
||||
if (!addr || typeof addr === "string") {
|
||||
throw new Error(`Failed to resolve server address for port ${opts.port}`)
|
||||
}
|
||||
|
||||
let closing: Promise<void> | undefined
|
||||
return {
|
||||
port: addr.port,
|
||||
stop(close?: boolean) {
|
||||
closing ??= new Promise<void>((resolve, reject) => {
|
||||
server.close((err) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
return
|
||||
}
|
||||
resolve()
|
||||
})
|
||||
if (close) {
|
||||
if ("closeAllConnections" in server && typeof server.closeAllConnections === "function") {
|
||||
server.closeAllConnections()
|
||||
}
|
||||
if ("closeIdleConnections" in server && typeof server.closeIdleConnections === "function") {
|
||||
server.closeIdleConnections()
|
||||
}
|
||||
}
|
||||
})
|
||||
return closing
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
export const adapter: Adapter = {
|
||||
create(app: Hono) {
|
||||
const ws = createNodeWebSocket({ app })
|
||||
return {
|
||||
upgradeWebSocket: ws.upgradeWebSocket,
|
||||
listen: (opts) => listen(app, opts, ws.injectWebSocket),
|
||||
}
|
||||
},
|
||||
createFetch(app) {
|
||||
return {
|
||||
listen: (opts) => listen(app, opts),
|
||||
}
|
||||
},
|
||||
}
|
||||
@@ -1,26 +0,0 @@
|
||||
import type { Hono } from "hono"
|
||||
import type { UpgradeWebSocket } from "hono/ws"
|
||||
|
||||
export type FetchApp = {
|
||||
fetch(request: Request): Response | Promise<Response>
|
||||
}
|
||||
|
||||
export type Opts = {
|
||||
port: number
|
||||
hostname: string
|
||||
}
|
||||
|
||||
export type Listener = {
|
||||
port: number
|
||||
stop: (close?: boolean) => Promise<void>
|
||||
}
|
||||
|
||||
export interface Runtime {
|
||||
upgradeWebSocket: UpgradeWebSocket
|
||||
listen(opts: Opts): Promise<Listener>
|
||||
}
|
||||
|
||||
export interface Adapter {
|
||||
create(app: Hono): Runtime
|
||||
createFetch(app: FetchApp): Omit<Runtime, "upgradeWebSocket">
|
||||
}
|
||||
48
packages/opencode/src/server/auth.ts
Normal file
48
packages/opencode/src/server/auth.ts
Normal file
@@ -0,0 +1,48 @@
|
||||
export * as ServerAuth from "./auth"
|
||||
|
||||
import { ConfigService } from "@/effect/config-service"
|
||||
import { Flag } from "@opencode-ai/core/flag/flag"
|
||||
import { Config as EffectConfig, Context, Option, Redacted } from "effect"
|
||||
|
||||
export type Credentials = {
|
||||
password?: string
|
||||
username?: string
|
||||
}
|
||||
|
||||
export type DecodedCredentials = {
|
||||
readonly username: string
|
||||
readonly password: Redacted.Redacted
|
||||
}
|
||||
|
||||
export class Config extends ConfigService.Service<Config>()("@opencode/ServerAuthConfig", {
|
||||
password: EffectConfig.string("OPENCODE_SERVER_PASSWORD").pipe(EffectConfig.option),
|
||||
username: EffectConfig.string("OPENCODE_SERVER_USERNAME").pipe(EffectConfig.withDefault("opencode")),
|
||||
}) {}
|
||||
|
||||
export type Info = Context.Service.Shape<typeof Config>
|
||||
|
||||
export function required(config: Info) {
|
||||
return Option.isSome(config.password) && config.password.value !== ""
|
||||
}
|
||||
|
||||
export function authorized(credentials: DecodedCredentials, config: Info) {
|
||||
return (
|
||||
Option.isSome(config.password) &&
|
||||
credentials.username === config.username &&
|
||||
Redacted.value(credentials.password) === config.password.value
|
||||
)
|
||||
}
|
||||
|
||||
export function header(credentials?: Credentials) {
|
||||
const password = credentials?.password ?? Flag.OPENCODE_SERVER_PASSWORD
|
||||
if (!password) return undefined
|
||||
|
||||
const username = credentials?.username ?? Flag.OPENCODE_SERVER_USERNAME ?? "opencode"
|
||||
return `Basic ${Buffer.from(`${username}:${password}`).toString("base64")}`
|
||||
}
|
||||
|
||||
export function headers(credentials?: Credentials) {
|
||||
const authorization = header(credentials)
|
||||
if (!authorization) return undefined
|
||||
return { Authorization: authorization }
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
import { Flag } from "@opencode-ai/core/flag/flag"
|
||||
import { InstallationChannel, InstallationVersion } from "@opencode-ai/core/installation/version"
|
||||
|
||||
export type Backend = "effect-httpapi" | "hono"
|
||||
|
||||
export type Selection = {
|
||||
backend: Backend
|
||||
reason: "env" | "stable" | "explicit"
|
||||
}
|
||||
|
||||
export type Attributes = ReturnType<typeof attributes>
|
||||
|
||||
export function select(): Selection {
|
||||
if (Flag.OPENCODE_EXPERIMENTAL_HTTPAPI) return { backend: "effect-httpapi", reason: "env" }
|
||||
return { backend: "hono", reason: "stable" }
|
||||
}
|
||||
|
||||
export function attributes(selection: Selection): Record<string, string> {
|
||||
return {
|
||||
"opencode.server.backend": selection.backend,
|
||||
"opencode.server.backend.reason": selection.reason,
|
||||
"opencode.installation.channel": InstallationChannel,
|
||||
"opencode.installation.version": InstallationVersion,
|
||||
}
|
||||
}
|
||||
|
||||
export function force(selection: Selection, backend: Backend): Selection {
|
||||
return {
|
||||
backend,
|
||||
reason: selection.backend === backend ? selection.reason : "explicit",
|
||||
}
|
||||
}
|
||||
@@ -1,7 +1,13 @@
|
||||
import { Context } from "effect"
|
||||
|
||||
const opencodeOrigin = /^https:\/\/([a-z0-9-]+\.)*opencode\.ai$/
|
||||
|
||||
export type CorsOptions = { readonly cors?: ReadonlyArray<string> }
|
||||
|
||||
export const CorsConfig = Context.Reference<CorsOptions | undefined>("@opencode/ServerCorsConfig", {
|
||||
defaultValue: () => undefined,
|
||||
})
|
||||
|
||||
export function isAllowedCorsOrigin(input: string | undefined, opts?: CorsOptions) {
|
||||
if (!input) return true
|
||||
if (input.startsWith("http://localhost:")) return true
|
||||
@@ -12,3 +18,17 @@ export function isAllowedCorsOrigin(input: string | undefined, opts?: CorsOption
|
||||
if (opencodeOrigin.test(input)) return true
|
||||
return opts?.cors?.includes(input) ?? false
|
||||
}
|
||||
|
||||
export function isAllowedRequestOrigin(input: string | undefined, host: string | undefined, opts?: CorsOptions) {
|
||||
if (!input) return true
|
||||
if (host && sameHost(input, host)) return true
|
||||
return isAllowedCorsOrigin(input, opts)
|
||||
}
|
||||
|
||||
function sameHost(origin: string, host: string) {
|
||||
try {
|
||||
return new URL(origin).host === host
|
||||
} catch {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,36 +0,0 @@
|
||||
import { resolver } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { NotFoundError } from "@/storage/storage"
|
||||
|
||||
export const ERRORS = {
|
||||
400: {
|
||||
description: "Bad request",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z
|
||||
.object({
|
||||
data: z.any(),
|
||||
errors: z.array(z.record(z.string(), z.any())),
|
||||
success: z.literal(false),
|
||||
})
|
||||
.meta({
|
||||
ref: "BadRequestError",
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
404: {
|
||||
description: "Not found",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(NotFoundError.Schema),
|
||||
},
|
||||
},
|
||||
},
|
||||
} as const
|
||||
|
||||
export function errors(...codes: number[]) {
|
||||
return Object.fromEntries(codes.map((code) => [code, ERRORS[code as keyof typeof ERRORS]]))
|
||||
}
|
||||
35
packages/opencode/src/server/httpapi-server.node.ts
Normal file
35
packages/opencode/src/server/httpapi-server.node.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { NodeHttpServer } from "@effect/platform-node"
|
||||
import { Effect, Layer } from "effect"
|
||||
import { createServer } from "node:http"
|
||||
import { Service } from "./httpapi-server"
|
||||
|
||||
export { Service }
|
||||
|
||||
export const name = "node-http-server"
|
||||
|
||||
export type Opts = { port: number; hostname: string }
|
||||
|
||||
export const layer = (opts: Opts) => {
|
||||
const server = createServer()
|
||||
const serverRef = { closeStarted: false, forceStop: false }
|
||||
const close = server.close.bind(server)
|
||||
// Keep shutdown owned by NodeHttpServer, but honor listener.stop(true) by
|
||||
// force-closing active HTTP sockets when its finalizer calls server.close().
|
||||
server.close = ((callback?: Parameters<typeof server.close>[0]) => {
|
||||
serverRef.closeStarted = true
|
||||
const result = close(callback)
|
||||
if (serverRef.forceStop) server.closeAllConnections()
|
||||
return result
|
||||
}) as typeof server.close
|
||||
return Layer.mergeAll(
|
||||
NodeHttpServer.layer(() => server, { port: opts.port, host: opts.hostname, gracefulShutdownTimeout: "1 second" }),
|
||||
Layer.succeed(Service)(
|
||||
Service.of({
|
||||
closeAll: Effect.sync(() => {
|
||||
serverRef.forceStop = true
|
||||
if (serverRef.closeStarted) server.closeAllConnections()
|
||||
}),
|
||||
}),
|
||||
),
|
||||
)
|
||||
}
|
||||
9
packages/opencode/src/server/httpapi-server.ts
Normal file
9
packages/opencode/src/server/httpapi-server.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { Context, Effect } from "effect"
|
||||
|
||||
export interface Interface {
|
||||
readonly closeAll: Effect.Effect<void>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/HttpApiServer") {}
|
||||
|
||||
export * as HttpApiServer from "./httpapi-server"
|
||||
@@ -1,86 +0,0 @@
|
||||
import { Provider } from "@/provider/provider"
|
||||
import { NamedError } from "@opencode-ai/core/util/error"
|
||||
import { NotFoundError } from "@/storage/storage"
|
||||
import { Session } from "@/session/session"
|
||||
import type { ContentfulStatusCode } from "hono/utils/http-status"
|
||||
import type { ErrorHandler, MiddlewareHandler } from "hono"
|
||||
import { HTTPException } from "hono/http-exception"
|
||||
import * as Log from "@opencode-ai/core/util/log"
|
||||
import { Flag } from "@opencode-ai/core/flag/flag"
|
||||
import { basicAuth } from "hono/basic-auth"
|
||||
import { cors } from "hono/cors"
|
||||
import { compress } from "hono/compress"
|
||||
import * as ServerBackend from "./backend"
|
||||
import { isAllowedCorsOrigin, type CorsOptions } from "./cors"
|
||||
|
||||
const log = Log.create({ service: "server" })
|
||||
|
||||
export const ErrorMiddleware: ErrorHandler = (err, c) => {
|
||||
log.error("failed", {
|
||||
error: err,
|
||||
})
|
||||
if (err instanceof NamedError) {
|
||||
let status: ContentfulStatusCode
|
||||
if (err instanceof NotFoundError) status = 404
|
||||
else if (err instanceof Provider.ModelNotFoundError) status = 400
|
||||
else if (err.name === "ProviderAuthValidationFailed") status = 400
|
||||
else if (err.name.startsWith("Worktree")) status = 400
|
||||
else status = 500
|
||||
return c.json(err.toObject(), { status })
|
||||
}
|
||||
if (err instanceof Session.BusyError) {
|
||||
return c.json(new NamedError.Unknown({ message: err.message }).toObject(), { status: 400 })
|
||||
}
|
||||
if (err instanceof HTTPException) return err.getResponse()
|
||||
const message = err instanceof Error && err.stack ? err.stack : err.toString()
|
||||
return c.json(new NamedError.Unknown({ message }).toObject(), {
|
||||
status: 500,
|
||||
})
|
||||
}
|
||||
|
||||
export const AuthMiddleware: MiddlewareHandler = (c, next) => {
|
||||
// Allow CORS preflight requests to succeed without auth.
|
||||
// Browser clients sending Authorization headers will preflight with OPTIONS.
|
||||
if (c.req.method === "OPTIONS") return next()
|
||||
const password = Flag.OPENCODE_SERVER_PASSWORD
|
||||
if (!password) return next()
|
||||
const username = Flag.OPENCODE_SERVER_USERNAME ?? "opencode"
|
||||
|
||||
if (c.req.query("auth_token")) c.req.raw.headers.set("authorization", `Basic ${c.req.query("auth_token")}`)
|
||||
|
||||
return basicAuth({ username, password })(c, next)
|
||||
}
|
||||
|
||||
export function LoggerMiddleware(backendAttributes: ServerBackend.Attributes): MiddlewareHandler {
|
||||
return async (c, next) => {
|
||||
const skip = c.req.path === "/log"
|
||||
if (skip) return next()
|
||||
const attributes = {
|
||||
method: c.req.method,
|
||||
path: c.req.path,
|
||||
...backendAttributes,
|
||||
}
|
||||
log.info("request", attributes)
|
||||
const timer = log.time("request", attributes)
|
||||
await next()
|
||||
timer.stop()
|
||||
}
|
||||
}
|
||||
|
||||
export function CorsMiddleware(opts?: CorsOptions): MiddlewareHandler {
|
||||
return cors({
|
||||
maxAge: 86_400,
|
||||
origin(input) {
|
||||
if (isAllowedCorsOrigin(input, opts)) return input
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const zipped = compress()
|
||||
export const CompressionMiddleware: MiddlewareHandler = (c, next) => {
|
||||
const path = c.req.path
|
||||
const method = c.req.method
|
||||
if (path === "/event" || path === "/global/event") return next()
|
||||
if (method === "POST" && /\/session\/[^/]+\/(message|prompt_async)$/.test(path)) return next()
|
||||
return zipped(c, next)
|
||||
}
|
||||
@@ -1,149 +0,0 @@
|
||||
import { Hono } from "hono"
|
||||
import type { UpgradeWebSocket } from "hono/ws"
|
||||
import * as Log from "@opencode-ai/core/util/log"
|
||||
import * as Fence from "./fence"
|
||||
import type { WorkspaceID } from "@/control-plane/schema"
|
||||
import { Workspace } from "@/control-plane/workspace"
|
||||
import { AppRuntime } from "@/effect/app-runtime"
|
||||
import { ProxyUtil } from "./proxy-util"
|
||||
import { Effect, Stream } from "effect"
|
||||
import { FetchHttpClient, HttpBody, HttpClient, HttpClientRequest } from "effect/unstable/http"
|
||||
|
||||
type Msg = string | ArrayBuffer | Uint8Array
|
||||
|
||||
function send(ws: { send(data: string | ArrayBuffer | Uint8Array): void }, data: any) {
|
||||
if (data instanceof Blob) {
|
||||
return data.arrayBuffer().then((x) => ws.send(x))
|
||||
}
|
||||
return ws.send(data)
|
||||
}
|
||||
|
||||
const app = (upgrade: UpgradeWebSocket) =>
|
||||
new Hono().get(
|
||||
"/__workspace_ws",
|
||||
upgrade((c) => {
|
||||
const url = c.req.header("x-opencode-proxy-url")
|
||||
const queue: Msg[] = []
|
||||
let remote: WebSocket | undefined
|
||||
return {
|
||||
onOpen(_, ws) {
|
||||
if (!url) {
|
||||
ws.close(1011, "missing proxy target")
|
||||
return
|
||||
}
|
||||
remote = new WebSocket(url, ProxyUtil.websocketProtocols(c.req.raw))
|
||||
remote.binaryType = "arraybuffer"
|
||||
remote.onopen = () => {
|
||||
for (const item of queue) remote?.send(item)
|
||||
queue.length = 0
|
||||
}
|
||||
remote.onmessage = (event) => {
|
||||
void send(ws, event.data)
|
||||
}
|
||||
remote.onerror = () => {
|
||||
ws.close(1011, "proxy error")
|
||||
}
|
||||
remote.onclose = (event) => {
|
||||
ws.close(event.code, event.reason)
|
||||
}
|
||||
},
|
||||
onMessage(event) {
|
||||
const data = event.data
|
||||
if (typeof data !== "string" && !(data instanceof Uint8Array) && !(data instanceof ArrayBuffer)) return
|
||||
if (remote?.readyState === WebSocket.OPEN) {
|
||||
remote.send(data)
|
||||
return
|
||||
}
|
||||
queue.push(data)
|
||||
},
|
||||
onClose(event) {
|
||||
remote?.close(event.code, event.reason)
|
||||
},
|
||||
}
|
||||
}),
|
||||
)
|
||||
|
||||
const log = Log.create({ service: "server-proxy" })
|
||||
|
||||
function statusText(response: unknown) {
|
||||
return (response as { source?: Response }).source?.statusText
|
||||
}
|
||||
|
||||
export function httpEffect(url: string | URL, extra: HeadersInit | undefined, req: Request, workspaceID: WorkspaceID) {
|
||||
return Effect.gen(function* () {
|
||||
const syncing = yield* Workspace.Service.use((workspace) => workspace.isSyncing(workspaceID))
|
||||
if (!syncing) {
|
||||
return new Response(`broken sync connection for workspace: ${workspaceID}`, {
|
||||
status: 503,
|
||||
headers: {
|
||||
"content-type": "text/plain; charset=utf-8",
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const response = yield* HttpClient.execute(
|
||||
HttpClientRequest.make(req.method as never)(url, {
|
||||
headers: ProxyUtil.headers(req, extra),
|
||||
body:
|
||||
req.method === "GET" || req.method === "HEAD"
|
||||
? HttpBody.empty
|
||||
: HttpBody.raw(req.body, {
|
||||
contentType: req.headers.get("content-type") ?? undefined,
|
||||
contentLength: req.headers.get("content-length")
|
||||
? Number(req.headers.get("content-length"))
|
||||
: undefined,
|
||||
}),
|
||||
}),
|
||||
)
|
||||
const next = new Headers(response.headers as HeadersInit)
|
||||
const sync = Fence.parse(next)
|
||||
next.delete("content-encoding")
|
||||
next.delete("content-length")
|
||||
|
||||
if (sync) yield* Fence.waitEffect(workspaceID, sync, req.signal)
|
||||
const body = yield* Stream.toReadableStreamEffect(response.stream.pipe(Stream.catchCause(() => Stream.empty)))
|
||||
return new Response(body, {
|
||||
status: response.status,
|
||||
statusText: statusText(response),
|
||||
headers: next,
|
||||
})
|
||||
}).pipe(
|
||||
Effect.provide(FetchHttpClient.layer),
|
||||
Effect.catch(() => Effect.succeed(new Response(null, { status: 500 }))),
|
||||
)
|
||||
}
|
||||
|
||||
export function http(url: string | URL, extra: HeadersInit | undefined, req: Request, workspaceID: WorkspaceID) {
|
||||
return AppRuntime.runPromise(httpEffect(url, extra, req, workspaceID))
|
||||
}
|
||||
|
||||
export function websocket(
|
||||
upgrade: UpgradeWebSocket,
|
||||
target: string | URL,
|
||||
extra: HeadersInit | undefined,
|
||||
req: Request,
|
||||
env: unknown,
|
||||
) {
|
||||
const proxy = new URL(req.url)
|
||||
proxy.pathname = "/__workspace_ws"
|
||||
proxy.search = ""
|
||||
const next = new Headers(req.headers)
|
||||
next.set("x-opencode-proxy-url", ProxyUtil.websocketTargetURL(target))
|
||||
for (const [key, value] of new Headers(extra).entries()) {
|
||||
next.set(key, value)
|
||||
}
|
||||
log.info("proxy websocket", {
|
||||
request: req.url,
|
||||
target: String(target),
|
||||
})
|
||||
return app(upgrade).fetch(
|
||||
new Request(proxy, {
|
||||
method: req.method,
|
||||
headers: next,
|
||||
signal: req.signal,
|
||||
}),
|
||||
env as never,
|
||||
)
|
||||
}
|
||||
|
||||
export * as ServerProxy from "./proxy"
|
||||
@@ -1,160 +0,0 @@
|
||||
import { Auth } from "@/auth"
|
||||
import { AppRuntime } from "@/effect/app-runtime"
|
||||
import * as Log from "@opencode-ai/core/util/log"
|
||||
import { Effect } from "effect"
|
||||
import { ProviderID } from "@/provider/schema"
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, resolver, validator, openAPIRouteHandler } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { errors } from "../../error"
|
||||
|
||||
export function ControlPlaneRoutes(): Hono {
|
||||
const app = new Hono()
|
||||
return app
|
||||
.put(
|
||||
"/auth/:providerID",
|
||||
describeRoute({
|
||||
summary: "Set auth credentials",
|
||||
description: "Set authentication credentials",
|
||||
operationId: "auth.set",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully set authentication credentials",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
providerID: ProviderID.zod,
|
||||
}),
|
||||
),
|
||||
validator("json", Auth.Info.zod),
|
||||
async (c) => {
|
||||
const providerID = c.req.valid("param").providerID
|
||||
const info = c.req.valid("json")
|
||||
await AppRuntime.runPromise(
|
||||
Effect.gen(function* () {
|
||||
const auth = yield* Auth.Service
|
||||
yield* auth.set(providerID, info)
|
||||
}),
|
||||
)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/auth/:providerID",
|
||||
describeRoute({
|
||||
summary: "Remove auth credentials",
|
||||
description: "Remove authentication credentials",
|
||||
operationId: "auth.remove",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully removed authentication credentials",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
providerID: ProviderID.zod,
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const providerID = c.req.valid("param").providerID
|
||||
await AppRuntime.runPromise(
|
||||
Effect.gen(function* () {
|
||||
const auth = yield* Auth.Service
|
||||
yield* auth.remove(providerID)
|
||||
}),
|
||||
)
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/doc",
|
||||
openAPIRouteHandler(app, {
|
||||
documentation: {
|
||||
info: {
|
||||
title: "opencode",
|
||||
version: "0.0.3",
|
||||
description: "opencode api",
|
||||
},
|
||||
openapi: "3.1.1",
|
||||
},
|
||||
}),
|
||||
)
|
||||
.use(
|
||||
validator(
|
||||
"query",
|
||||
z.object({
|
||||
directory: z.string().optional(),
|
||||
workspace: z.string().optional(),
|
||||
}),
|
||||
),
|
||||
)
|
||||
.post(
|
||||
"/log",
|
||||
describeRoute({
|
||||
summary: "Write log",
|
||||
description: "Write a log entry to the server logs with specified level and metadata.",
|
||||
operationId: "app.log",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Log entry written successfully",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"json",
|
||||
z.object({
|
||||
service: z.string().meta({ description: "Service name for the log entry" }),
|
||||
level: z.enum(["debug", "info", "error", "warn"]).meta({ description: "Log level" }),
|
||||
message: z.string().meta({ description: "Log message" }),
|
||||
extra: z
|
||||
.record(z.string(), z.any())
|
||||
.optional()
|
||||
.meta({ description: "Additional metadata for the log entry" }),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const { service, level, message, extra } = c.req.valid("json")
|
||||
const logger = Log.create({ service })
|
||||
|
||||
switch (level) {
|
||||
case "debug":
|
||||
logger.debug(message, extra)
|
||||
break
|
||||
case "info":
|
||||
logger.info(message, extra)
|
||||
break
|
||||
case "error":
|
||||
logger.error(message, extra)
|
||||
break
|
||||
case "warn":
|
||||
logger.warn(message, extra)
|
||||
break
|
||||
}
|
||||
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
}
|
||||
@@ -1,210 +0,0 @@
|
||||
import { Hono } from "hono"
|
||||
import { describeRoute, resolver, validator } from "hono-openapi"
|
||||
import z from "zod"
|
||||
import { Effect } from "effect"
|
||||
import { listAdapters } from "@/control-plane/adapters"
|
||||
import { Workspace } from "@/control-plane/workspace"
|
||||
import { AppRuntime } from "@/effect/app-runtime"
|
||||
import { WorkspaceAdapterEntry } from "@/control-plane/types"
|
||||
import { zodObject } from "@/util/effect-zod"
|
||||
import { Instance } from "@/project/instance"
|
||||
import { errors } from "../../error"
|
||||
import { lazy } from "@/util/lazy"
|
||||
import * as Log from "@opencode-ai/core/util/log"
|
||||
import { errorData } from "@/util/error"
|
||||
|
||||
const log = Log.create({ service: "server.workspace" })
|
||||
|
||||
export const WorkspaceRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/adapter",
|
||||
describeRoute({
|
||||
summary: "List workspace adapters",
|
||||
description: "List all available workspace adapters for the current project.",
|
||||
operationId: "experimental.workspace.adapter.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Workspace adapters",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.array(zodObject(WorkspaceAdapterEntry))),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(await listAdapters(Instance.project.id))
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "Create workspace",
|
||||
description: "Create a workspace for the current project.",
|
||||
operationId: "experimental.workspace.create",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Workspace created",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Workspace.Info.zod),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"json",
|
||||
Workspace.CreateInput.zodObject.omit({
|
||||
projectID: true,
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const body = c.req.valid("json") as Omit<Workspace.CreateInput, "projectID">
|
||||
const workspace = await AppRuntime.runPromise(
|
||||
Workspace.Service.use((svc) =>
|
||||
svc.create({
|
||||
projectID: Instance.project.id,
|
||||
...body,
|
||||
}),
|
||||
),
|
||||
)
|
||||
return c.json(workspace)
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/",
|
||||
describeRoute({
|
||||
summary: "List workspaces",
|
||||
description: "List all workspaces.",
|
||||
operationId: "experimental.workspace.list",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Workspaces",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.array(Workspace.Info.zod)),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(await AppRuntime.runPromise(Workspace.Service.use((svc) => svc.list(Instance.project))))
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/status",
|
||||
describeRoute({
|
||||
summary: "Workspace status",
|
||||
description: "Get connection status for workspaces in the current project.",
|
||||
operationId: "experimental.workspace.status",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Workspace status",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.array(zodObject(Workspace.ConnectionStatus))),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
const result = await AppRuntime.runPromise(
|
||||
Workspace.Service.use((svc) => Effect.all([svc.list(Instance.project), svc.status()])),
|
||||
)
|
||||
const ids = new Set(result[0].map((item) => item.id))
|
||||
return c.json(result[1].filter((item) => ids.has(item.workspaceID)))
|
||||
},
|
||||
)
|
||||
.delete(
|
||||
"/:id",
|
||||
describeRoute({
|
||||
summary: "Remove workspace",
|
||||
description: "Remove an existing workspace.",
|
||||
operationId: "experimental.workspace.remove",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Workspace removed",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Workspace.Info.zod.optional()),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"param",
|
||||
z.object({
|
||||
id: zodObject(Workspace.Info).shape.id,
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const { id } = c.req.valid("param")
|
||||
return c.json(await AppRuntime.runPromise(Workspace.Service.use((svc) => svc.remove(id))))
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/:id/session-restore",
|
||||
describeRoute({
|
||||
summary: "Restore session into workspace",
|
||||
description: "Replay a session's sync events into the target workspace in batches.",
|
||||
operationId: "experimental.workspace.sessionRestore",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Session replay started",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z.object({
|
||||
total: z.number().int().min(0),
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator("param", z.object({ id: zodObject(Workspace.Info).shape.id })),
|
||||
validator("json", Workspace.SessionRestoreInput.zodObject.omit({ workspaceID: true })),
|
||||
async (c) => {
|
||||
const { id } = c.req.valid("param")
|
||||
const body = c.req.valid("json") as Omit<Workspace.SessionRestoreInput, "workspaceID">
|
||||
log.info("session restore route requested", {
|
||||
workspaceID: id,
|
||||
sessionID: body.sessionID,
|
||||
directory: Instance.directory,
|
||||
})
|
||||
try {
|
||||
const result = await AppRuntime.runPromise(
|
||||
Workspace.Service.use((svc) =>
|
||||
svc.sessionRestore({
|
||||
workspaceID: id,
|
||||
...body,
|
||||
}),
|
||||
),
|
||||
)
|
||||
log.info("session restore route complete", {
|
||||
workspaceID: id,
|
||||
sessionID: body.sessionID,
|
||||
total: result.total,
|
||||
})
|
||||
return c.json(result)
|
||||
} catch (err) {
|
||||
log.error("session restore route failed", {
|
||||
workspaceID: id,
|
||||
sessionID: body.sessionID,
|
||||
error: errorData(err),
|
||||
})
|
||||
throw err
|
||||
}
|
||||
},
|
||||
),
|
||||
)
|
||||
@@ -1,286 +0,0 @@
|
||||
import { Hono, type Context } from "hono"
|
||||
import { describeRoute, resolver, validator } from "hono-openapi"
|
||||
import { streamSSE } from "hono/streaming"
|
||||
import { Effect } from "effect"
|
||||
import z from "zod"
|
||||
import { BusEvent } from "@/bus/bus-event"
|
||||
import { SyncEvent } from "@/sync"
|
||||
import { GlobalBus } from "@/bus/global"
|
||||
import { Bus } from "@/bus"
|
||||
import { AppRuntime } from "@/effect/app-runtime"
|
||||
import { AsyncQueue } from "@/util/queue"
|
||||
import { Installation } from "@/installation"
|
||||
import { InstallationVersion } from "@opencode-ai/core/installation/version"
|
||||
import * as Log from "@opencode-ai/core/util/log"
|
||||
import { lazy } from "../../util/lazy"
|
||||
import { Config } from "@/config/config"
|
||||
import { errors } from "../error"
|
||||
import { disposeAllInstancesAndEmitGlobalDisposed } from "../global-lifecycle"
|
||||
|
||||
const log = Log.create({ service: "server" })
|
||||
|
||||
async function streamEvents(c: Context, subscribe: (q: AsyncQueue<string | null>) => () => void) {
|
||||
return streamSSE(c, async (stream) => {
|
||||
const q = new AsyncQueue<string | null>()
|
||||
let done = false
|
||||
|
||||
q.push(
|
||||
JSON.stringify({
|
||||
payload: {
|
||||
id: Bus.createID(),
|
||||
type: "server.connected",
|
||||
properties: {},
|
||||
},
|
||||
}),
|
||||
)
|
||||
|
||||
// Send heartbeat every 10s to prevent stalled proxy streams.
|
||||
const heartbeat = setInterval(() => {
|
||||
q.push(
|
||||
JSON.stringify({
|
||||
payload: {
|
||||
id: Bus.createID(),
|
||||
type: "server.heartbeat",
|
||||
properties: {},
|
||||
},
|
||||
}),
|
||||
)
|
||||
}, 10_000)
|
||||
|
||||
const stop = () => {
|
||||
if (done) return
|
||||
done = true
|
||||
clearInterval(heartbeat)
|
||||
unsub()
|
||||
q.push(null)
|
||||
log.info("global event disconnected")
|
||||
}
|
||||
|
||||
const unsub = subscribe(q)
|
||||
|
||||
stream.onAbort(stop)
|
||||
|
||||
try {
|
||||
for await (const data of q) {
|
||||
if (data === null) return
|
||||
await stream.writeSSE({ data })
|
||||
}
|
||||
} finally {
|
||||
stop()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export const GlobalRoutes = lazy(() =>
|
||||
new Hono()
|
||||
.get(
|
||||
"/health",
|
||||
describeRoute({
|
||||
summary: "Get health",
|
||||
description: "Get health information about the OpenCode server.",
|
||||
operationId: "global.health",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Health information",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.object({ healthy: z.literal(true), version: z.string() })),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json({ healthy: true, version: InstallationVersion })
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/event",
|
||||
describeRoute({
|
||||
summary: "Get global events",
|
||||
description: "Subscribe to global events from the OpenCode system using server-sent events.",
|
||||
operationId: "global.event",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Event stream",
|
||||
content: {
|
||||
"text/event-stream": {
|
||||
schema: resolver(
|
||||
z
|
||||
.object({
|
||||
directory: z.string(),
|
||||
project: z.string().optional(),
|
||||
workspace: z.string().optional(),
|
||||
payload: z.union([...BusEvent.payloads(), ...SyncEvent.payloads()]),
|
||||
})
|
||||
.meta({
|
||||
ref: "GlobalEvent",
|
||||
}),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
log.info("global event connected")
|
||||
c.header("Cache-Control", "no-cache, no-transform")
|
||||
c.header("X-Accel-Buffering", "no")
|
||||
c.header("X-Content-Type-Options", "nosniff")
|
||||
|
||||
return streamEvents(c, (q) => {
|
||||
async function handler(event: any) {
|
||||
q.push(JSON.stringify(event))
|
||||
}
|
||||
GlobalBus.on("event", handler)
|
||||
return () => GlobalBus.off("event", handler)
|
||||
})
|
||||
},
|
||||
)
|
||||
.get(
|
||||
"/config",
|
||||
describeRoute({
|
||||
summary: "Get global configuration",
|
||||
description: "Retrieve the current global OpenCode configuration settings and preferences.",
|
||||
operationId: "global.config.get",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Get global config info",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Config.Info.zod),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
return c.json(await AppRuntime.runPromise(Config.Service.use((cfg) => cfg.getGlobal())))
|
||||
},
|
||||
)
|
||||
.patch(
|
||||
"/config",
|
||||
describeRoute({
|
||||
summary: "Update global configuration",
|
||||
description: "Update global OpenCode configuration settings and preferences.",
|
||||
operationId: "global.config.update",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Successfully updated global config",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(Config.Info.zod),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator("json", Config.Info.zod),
|
||||
async (c) => {
|
||||
const config = c.req.valid("json")
|
||||
const result = await AppRuntime.runPromise(Config.Service.use((cfg) => cfg.updateGlobal(config)))
|
||||
if (result.changed) {
|
||||
void AppRuntime.runPromise(disposeAllInstancesAndEmitGlobalDisposed({ swallowErrors: true })).catch(
|
||||
() => undefined,
|
||||
)
|
||||
}
|
||||
return c.json(result.info)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/dispose",
|
||||
describeRoute({
|
||||
summary: "Dispose instance",
|
||||
description: "Clean up and dispose all OpenCode instances, releasing all resources.",
|
||||
operationId: "global.dispose",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Global disposed",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(z.boolean()),
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
async (c) => {
|
||||
await AppRuntime.runPromise(disposeAllInstancesAndEmitGlobalDisposed())
|
||||
return c.json(true)
|
||||
},
|
||||
)
|
||||
.post(
|
||||
"/upgrade",
|
||||
describeRoute({
|
||||
summary: "Upgrade opencode",
|
||||
description: "Upgrade opencode to the specified version or latest if not specified.",
|
||||
operationId: "global.upgrade",
|
||||
responses: {
|
||||
200: {
|
||||
description: "Upgrade result",
|
||||
content: {
|
||||
"application/json": {
|
||||
schema: resolver(
|
||||
z.union([
|
||||
z.object({
|
||||
success: z.literal(true),
|
||||
version: z.string(),
|
||||
}),
|
||||
z.object({
|
||||
success: z.literal(false),
|
||||
error: z.string(),
|
||||
}),
|
||||
]),
|
||||
),
|
||||
},
|
||||
},
|
||||
},
|
||||
...errors(400),
|
||||
},
|
||||
}),
|
||||
validator(
|
||||
"json",
|
||||
z.object({
|
||||
target: z.string().optional(),
|
||||
}),
|
||||
),
|
||||
async (c) => {
|
||||
const result = await AppRuntime.runPromise(
|
||||
Installation.Service.use((svc) =>
|
||||
Effect.gen(function* () {
|
||||
const method = yield* svc.method()
|
||||
if (method === "unknown") {
|
||||
return { success: false as const, status: 400 as const, error: "Unknown installation method" }
|
||||
}
|
||||
|
||||
const target = c.req.valid("json").target || (yield* svc.latest(method))
|
||||
const result = yield* Effect.catch(
|
||||
svc.upgrade(method, target).pipe(Effect.as({ success: true as const, version: target })),
|
||||
(err) =>
|
||||
Effect.succeed({
|
||||
success: false as const,
|
||||
status: 500 as const,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
}),
|
||||
)
|
||||
if (!result.success) return result
|
||||
return { ...result, status: 200 as const }
|
||||
}),
|
||||
),
|
||||
)
|
||||
if (!result.success) {
|
||||
return c.json({ success: false, error: result.error }, result.status)
|
||||
}
|
||||
const target = result.version
|
||||
GlobalBus.emit("event", {
|
||||
directory: "global",
|
||||
payload: {
|
||||
type: Installation.Event.Updated.type,
|
||||
properties: { version: target },
|
||||
},
|
||||
})
|
||||
return c.json({ success: true, version: target })
|
||||
},
|
||||
),
|
||||
)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user