mirror of
https://fastgit.cc/https://github.com/anomalyco/opencode
synced 2026-04-30 13:51:48 +08:00
feat: unwrap BashArity, Discovery namespaces to flat exports + barrel
This commit is contained in:
@@ -19,7 +19,7 @@ import { Provider } from "@/provider"
|
||||
import { ProviderAuth } from "@/provider"
|
||||
import { Agent } from "@/agent/agent"
|
||||
import { Skill } from "@/skill"
|
||||
import { Discovery } from "@/skill/discovery"
|
||||
import { Discovery } from "@/skill"
|
||||
import { Question } from "@/question"
|
||||
import { Permission } from "@/permission"
|
||||
import { Todo } from "@/session/todo"
|
||||
|
||||
@@ -1,15 +1,14 @@
|
||||
export namespace BashArity {
|
||||
export function prefix(tokens: string[]) {
|
||||
for (let len = tokens.length; len > 0; len--) {
|
||||
const prefix = tokens.slice(0, len).join(" ")
|
||||
const arity = ARITY[prefix]
|
||||
if (arity !== undefined) return tokens.slice(0, arity)
|
||||
}
|
||||
if (tokens.length === 0) return []
|
||||
return tokens.slice(0, 1)
|
||||
export function prefix(tokens: string[]) {
|
||||
for (let len = tokens.length; len > 0; len--) {
|
||||
const prefix = tokens.slice(0, len).join(" ")
|
||||
const arity = ARITY[prefix]
|
||||
if (arity !== undefined) return tokens.slice(0, arity)
|
||||
}
|
||||
if (tokens.length === 0) return []
|
||||
return tokens.slice(0, 1)
|
||||
}
|
||||
|
||||
/* Generated with following prompt:
|
||||
/* Generated with following prompt:
|
||||
You are generating a dictionary of command-prefix arities for bash-style commands.
|
||||
This dictionary is used to identify the "human-understandable command" from an input shell command.### **RULES (follow strictly)**1. Each entry maps a **command prefix string → number**, representing how many **tokens** define the command.
|
||||
2. **Flags NEVER count as tokens**. Only subcommands count.
|
||||
@@ -22,142 +21,141 @@ This dictionary is used to identify the "human-understandable command" from an i
|
||||
* `npm run dev` → `npm run dev` (because `npm run` has arity 3)
|
||||
* `python script.py` → `python script.py` (default: whole input, not in dictionary)### **Now generate the dictionary.**
|
||||
*/
|
||||
const ARITY: Record<string, number> = {
|
||||
cat: 1, // cat file.txt
|
||||
cd: 1, // cd /path/to/dir
|
||||
chmod: 1, // chmod 755 script.sh
|
||||
chown: 1, // chown user:group file.txt
|
||||
cp: 1, // cp source.txt dest.txt
|
||||
echo: 1, // echo "hello world"
|
||||
env: 1, // env
|
||||
export: 1, // export PATH=/usr/bin
|
||||
grep: 1, // grep pattern file.txt
|
||||
kill: 1, // kill 1234
|
||||
killall: 1, // killall process
|
||||
ln: 1, // ln -s source target
|
||||
ls: 1, // ls -la
|
||||
mkdir: 1, // mkdir new-dir
|
||||
mv: 1, // mv old.txt new.txt
|
||||
ps: 1, // ps aux
|
||||
pwd: 1, // pwd
|
||||
rm: 1, // rm file.txt
|
||||
rmdir: 1, // rmdir empty-dir
|
||||
sleep: 1, // sleep 5
|
||||
source: 1, // source ~/.bashrc
|
||||
tail: 1, // tail -f log.txt
|
||||
touch: 1, // touch file.txt
|
||||
unset: 1, // unset VAR
|
||||
which: 1, // which node
|
||||
aws: 3, // aws s3 ls
|
||||
az: 3, // az storage blob list
|
||||
bazel: 2, // bazel build
|
||||
brew: 2, // brew install node
|
||||
bun: 2, // bun install
|
||||
"bun run": 3, // bun run dev
|
||||
"bun x": 3, // bun x vite
|
||||
cargo: 2, // cargo build
|
||||
"cargo add": 3, // cargo add tokio
|
||||
"cargo run": 3, // cargo run main
|
||||
cdk: 2, // cdk deploy
|
||||
cf: 2, // cf push app
|
||||
cmake: 2, // cmake build
|
||||
composer: 2, // composer require laravel
|
||||
consul: 2, // consul members
|
||||
"consul kv": 3, // consul kv get config/app
|
||||
crictl: 2, // crictl ps
|
||||
deno: 2, // deno run server.ts
|
||||
"deno task": 3, // deno task dev
|
||||
doctl: 3, // doctl kubernetes cluster list
|
||||
docker: 2, // docker run nginx
|
||||
"docker builder": 3, // docker builder prune
|
||||
"docker compose": 3, // docker compose up
|
||||
"docker container": 3, // docker container ls
|
||||
"docker image": 3, // docker image prune
|
||||
"docker network": 3, // docker network inspect
|
||||
"docker volume": 3, // docker volume ls
|
||||
eksctl: 2, // eksctl get clusters
|
||||
"eksctl create": 3, // eksctl create cluster
|
||||
firebase: 2, // firebase deploy
|
||||
flyctl: 2, // flyctl deploy
|
||||
gcloud: 3, // gcloud compute instances list
|
||||
gh: 3, // gh pr list
|
||||
git: 2, // git checkout main
|
||||
"git config": 3, // git config user.name
|
||||
"git remote": 3, // git remote add origin
|
||||
"git stash": 3, // git stash pop
|
||||
go: 2, // go build
|
||||
gradle: 2, // gradle build
|
||||
helm: 2, // helm install mychart
|
||||
heroku: 2, // heroku logs
|
||||
hugo: 2, // hugo new site blog
|
||||
ip: 2, // ip link show
|
||||
"ip addr": 3, // ip addr show
|
||||
"ip link": 3, // ip link set eth0 up
|
||||
"ip netns": 3, // ip netns exec foo bash
|
||||
"ip route": 3, // ip route add default via 1.1.1.1
|
||||
kind: 2, // kind delete cluster
|
||||
"kind create": 3, // kind create cluster
|
||||
kubectl: 2, // kubectl get pods
|
||||
"kubectl kustomize": 3, // kubectl kustomize overlays/dev
|
||||
"kubectl rollout": 3, // kubectl rollout restart deploy/api
|
||||
kustomize: 2, // kustomize build .
|
||||
make: 2, // make build
|
||||
mc: 2, // mc ls myminio
|
||||
"mc admin": 3, // mc admin info myminio
|
||||
minikube: 2, // minikube start
|
||||
mongosh: 2, // mongosh test
|
||||
mysql: 2, // mysql -u root
|
||||
mvn: 2, // mvn compile
|
||||
ng: 2, // ng generate component home
|
||||
npm: 2, // npm install
|
||||
"npm exec": 3, // npm exec vite
|
||||
"npm init": 3, // npm init vue
|
||||
"npm run": 3, // npm run dev
|
||||
"npm view": 3, // npm view react version
|
||||
nvm: 2, // nvm use 18
|
||||
nx: 2, // nx build
|
||||
openssl: 2, // openssl genrsa 2048
|
||||
"openssl req": 3, // openssl req -new -key key.pem
|
||||
"openssl x509": 3, // openssl x509 -in cert.pem
|
||||
pip: 2, // pip install numpy
|
||||
pipenv: 2, // pipenv install flask
|
||||
pnpm: 2, // pnpm install
|
||||
"pnpm dlx": 3, // pnpm dlx create-next-app
|
||||
"pnpm exec": 3, // pnpm exec vite
|
||||
"pnpm run": 3, // pnpm run dev
|
||||
poetry: 2, // poetry add requests
|
||||
podman: 2, // podman run alpine
|
||||
"podman container": 3, // podman container ls
|
||||
"podman image": 3, // podman image prune
|
||||
psql: 2, // psql -d mydb
|
||||
pulumi: 2, // pulumi up
|
||||
"pulumi stack": 3, // pulumi stack output
|
||||
pyenv: 2, // pyenv install 3.11
|
||||
python: 2, // python -m venv env
|
||||
rake: 2, // rake db:migrate
|
||||
rbenv: 2, // rbenv install 3.2.0
|
||||
"redis-cli": 2, // redis-cli ping
|
||||
rustup: 2, // rustup update
|
||||
serverless: 2, // serverless invoke
|
||||
sfdx: 3, // sfdx force:org:list
|
||||
skaffold: 2, // skaffold dev
|
||||
sls: 2, // sls deploy
|
||||
sst: 2, // sst deploy
|
||||
swift: 2, // swift build
|
||||
systemctl: 2, // systemctl restart nginx
|
||||
terraform: 2, // terraform apply
|
||||
"terraform workspace": 3, // terraform workspace select prod
|
||||
tmux: 2, // tmux new -s dev
|
||||
turbo: 2, // turbo run build
|
||||
ufw: 2, // ufw allow 22
|
||||
vault: 2, // vault login
|
||||
"vault auth": 3, // vault auth list
|
||||
"vault kv": 3, // vault kv get secret/api
|
||||
vercel: 2, // vercel deploy
|
||||
volta: 2, // volta install node
|
||||
wp: 2, // wp plugin install
|
||||
yarn: 2, // yarn add react
|
||||
"yarn dlx": 3, // yarn dlx create-react-app
|
||||
"yarn run": 3, // yarn run dev
|
||||
}
|
||||
const ARITY: Record<string, number> = {
|
||||
cat: 1, // cat file.txt
|
||||
cd: 1, // cd /path/to/dir
|
||||
chmod: 1, // chmod 755 script.sh
|
||||
chown: 1, // chown user:group file.txt
|
||||
cp: 1, // cp source.txt dest.txt
|
||||
echo: 1, // echo "hello world"
|
||||
env: 1, // env
|
||||
export: 1, // export PATH=/usr/bin
|
||||
grep: 1, // grep pattern file.txt
|
||||
kill: 1, // kill 1234
|
||||
killall: 1, // killall process
|
||||
ln: 1, // ln -s source target
|
||||
ls: 1, // ls -la
|
||||
mkdir: 1, // mkdir new-dir
|
||||
mv: 1, // mv old.txt new.txt
|
||||
ps: 1, // ps aux
|
||||
pwd: 1, // pwd
|
||||
rm: 1, // rm file.txt
|
||||
rmdir: 1, // rmdir empty-dir
|
||||
sleep: 1, // sleep 5
|
||||
source: 1, // source ~/.bashrc
|
||||
tail: 1, // tail -f log.txt
|
||||
touch: 1, // touch file.txt
|
||||
unset: 1, // unset VAR
|
||||
which: 1, // which node
|
||||
aws: 3, // aws s3 ls
|
||||
az: 3, // az storage blob list
|
||||
bazel: 2, // bazel build
|
||||
brew: 2, // brew install node
|
||||
bun: 2, // bun install
|
||||
"bun run": 3, // bun run dev
|
||||
"bun x": 3, // bun x vite
|
||||
cargo: 2, // cargo build
|
||||
"cargo add": 3, // cargo add tokio
|
||||
"cargo run": 3, // cargo run main
|
||||
cdk: 2, // cdk deploy
|
||||
cf: 2, // cf push app
|
||||
cmake: 2, // cmake build
|
||||
composer: 2, // composer require laravel
|
||||
consul: 2, // consul members
|
||||
"consul kv": 3, // consul kv get config/app
|
||||
crictl: 2, // crictl ps
|
||||
deno: 2, // deno run server.ts
|
||||
"deno task": 3, // deno task dev
|
||||
doctl: 3, // doctl kubernetes cluster list
|
||||
docker: 2, // docker run nginx
|
||||
"docker builder": 3, // docker builder prune
|
||||
"docker compose": 3, // docker compose up
|
||||
"docker container": 3, // docker container ls
|
||||
"docker image": 3, // docker image prune
|
||||
"docker network": 3, // docker network inspect
|
||||
"docker volume": 3, // docker volume ls
|
||||
eksctl: 2, // eksctl get clusters
|
||||
"eksctl create": 3, // eksctl create cluster
|
||||
firebase: 2, // firebase deploy
|
||||
flyctl: 2, // flyctl deploy
|
||||
gcloud: 3, // gcloud compute instances list
|
||||
gh: 3, // gh pr list
|
||||
git: 2, // git checkout main
|
||||
"git config": 3, // git config user.name
|
||||
"git remote": 3, // git remote add origin
|
||||
"git stash": 3, // git stash pop
|
||||
go: 2, // go build
|
||||
gradle: 2, // gradle build
|
||||
helm: 2, // helm install mychart
|
||||
heroku: 2, // heroku logs
|
||||
hugo: 2, // hugo new site blog
|
||||
ip: 2, // ip link show
|
||||
"ip addr": 3, // ip addr show
|
||||
"ip link": 3, // ip link set eth0 up
|
||||
"ip netns": 3, // ip netns exec foo bash
|
||||
"ip route": 3, // ip route add default via 1.1.1.1
|
||||
kind: 2, // kind delete cluster
|
||||
"kind create": 3, // kind create cluster
|
||||
kubectl: 2, // kubectl get pods
|
||||
"kubectl kustomize": 3, // kubectl kustomize overlays/dev
|
||||
"kubectl rollout": 3, // kubectl rollout restart deploy/api
|
||||
kustomize: 2, // kustomize build .
|
||||
make: 2, // make build
|
||||
mc: 2, // mc ls myminio
|
||||
"mc admin": 3, // mc admin info myminio
|
||||
minikube: 2, // minikube start
|
||||
mongosh: 2, // mongosh test
|
||||
mysql: 2, // mysql -u root
|
||||
mvn: 2, // mvn compile
|
||||
ng: 2, // ng generate component home
|
||||
npm: 2, // npm install
|
||||
"npm exec": 3, // npm exec vite
|
||||
"npm init": 3, // npm init vue
|
||||
"npm run": 3, // npm run dev
|
||||
"npm view": 3, // npm view react version
|
||||
nvm: 2, // nvm use 18
|
||||
nx: 2, // nx build
|
||||
openssl: 2, // openssl genrsa 2048
|
||||
"openssl req": 3, // openssl req -new -key key.pem
|
||||
"openssl x509": 3, // openssl x509 -in cert.pem
|
||||
pip: 2, // pip install numpy
|
||||
pipenv: 2, // pipenv install flask
|
||||
pnpm: 2, // pnpm install
|
||||
"pnpm dlx": 3, // pnpm dlx create-next-app
|
||||
"pnpm exec": 3, // pnpm exec vite
|
||||
"pnpm run": 3, // pnpm run dev
|
||||
poetry: 2, // poetry add requests
|
||||
podman: 2, // podman run alpine
|
||||
"podman container": 3, // podman container ls
|
||||
"podman image": 3, // podman image prune
|
||||
psql: 2, // psql -d mydb
|
||||
pulumi: 2, // pulumi up
|
||||
"pulumi stack": 3, // pulumi stack output
|
||||
pyenv: 2, // pyenv install 3.11
|
||||
python: 2, // python -m venv env
|
||||
rake: 2, // rake db:migrate
|
||||
rbenv: 2, // rbenv install 3.2.0
|
||||
"redis-cli": 2, // redis-cli ping
|
||||
rustup: 2, // rustup update
|
||||
serverless: 2, // serverless invoke
|
||||
sfdx: 3, // sfdx force:org:list
|
||||
skaffold: 2, // skaffold dev
|
||||
sls: 2, // sls deploy
|
||||
sst: 2, // sst deploy
|
||||
swift: 2, // swift build
|
||||
systemctl: 2, // systemctl restart nginx
|
||||
terraform: 2, // terraform apply
|
||||
"terraform workspace": 3, // terraform workspace select prod
|
||||
tmux: 2, // tmux new -s dev
|
||||
turbo: 2, // turbo run build
|
||||
ufw: 2, // ufw allow 22
|
||||
vault: 2, // vault login
|
||||
"vault auth": 3, // vault auth list
|
||||
"vault kv": 3, // vault kv get secret/api
|
||||
vercel: 2, // vercel deploy
|
||||
volta: 2, // volta install node
|
||||
wp: 2, // wp plugin install
|
||||
yarn: 2, // yarn add react
|
||||
"yarn dlx": 3, // yarn dlx create-react-app
|
||||
"yarn run": 3, // yarn run dev
|
||||
}
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
export * as Permission from "./permission"
|
||||
export * as BashArity from "./arity"
|
||||
|
||||
@@ -6,111 +6,109 @@ import { AppFileSystem } from "@opencode-ai/shared/filesystem"
|
||||
import { Global } from "../global"
|
||||
import { Log } from "../util"
|
||||
|
||||
export namespace Discovery {
|
||||
const skillConcurrency = 4
|
||||
const fileConcurrency = 8
|
||||
const skillConcurrency = 4
|
||||
const fileConcurrency = 8
|
||||
|
||||
class IndexSkill extends Schema.Class<IndexSkill>("IndexSkill")({
|
||||
name: Schema.String,
|
||||
files: Schema.Array(Schema.String),
|
||||
}) {}
|
||||
class IndexSkill extends Schema.Class<IndexSkill>("IndexSkill")({
|
||||
name: Schema.String,
|
||||
files: Schema.Array(Schema.String),
|
||||
}) {}
|
||||
|
||||
class Index extends Schema.Class<Index>("Index")({
|
||||
skills: Schema.Array(IndexSkill),
|
||||
}) {}
|
||||
class Index extends Schema.Class<Index>("Index")({
|
||||
skills: Schema.Array(IndexSkill),
|
||||
}) {}
|
||||
|
||||
export interface Interface {
|
||||
readonly pull: (url: string) => Effect.Effect<string[]>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/SkillDiscovery") {}
|
||||
|
||||
export const layer: Layer.Layer<Service, never, AppFileSystem.Service | Path.Path | HttpClient.HttpClient> =
|
||||
Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const log = Log.create({ service: "skill-discovery" })
|
||||
const fs = yield* AppFileSystem.Service
|
||||
const path = yield* Path.Path
|
||||
const http = HttpClient.filterStatusOk(withTransientReadRetry(yield* HttpClient.HttpClient))
|
||||
const cache = path.join(Global.Path.cache, "skills")
|
||||
|
||||
const download = Effect.fn("Discovery.download")(function* (url: string, dest: string) {
|
||||
if (yield* fs.exists(dest).pipe(Effect.orDie)) return true
|
||||
|
||||
return yield* HttpClientRequest.get(url).pipe(
|
||||
http.execute,
|
||||
Effect.flatMap((res) => res.arrayBuffer),
|
||||
Effect.flatMap((body) => fs.writeWithDirs(dest, new Uint8Array(body))),
|
||||
Effect.as(true),
|
||||
Effect.catch((err) =>
|
||||
Effect.sync(() => {
|
||||
log.error("failed to download", { url, err })
|
||||
return false
|
||||
}),
|
||||
),
|
||||
)
|
||||
})
|
||||
|
||||
const pull = Effect.fn("Discovery.pull")(function* (url: string) {
|
||||
const base = url.endsWith("/") ? url : `${url}/`
|
||||
const index = new URL("index.json", base).href
|
||||
const host = base.slice(0, -1)
|
||||
|
||||
log.info("fetching index", { url: index })
|
||||
|
||||
const data = yield* HttpClientRequest.get(index).pipe(
|
||||
HttpClientRequest.acceptJson,
|
||||
http.execute,
|
||||
Effect.flatMap(HttpClientResponse.schemaBodyJson(Index)),
|
||||
Effect.catch((err) =>
|
||||
Effect.sync(() => {
|
||||
log.error("failed to fetch index", { url: index, err })
|
||||
return null
|
||||
}),
|
||||
),
|
||||
)
|
||||
|
||||
if (!data) return []
|
||||
|
||||
const list = data.skills.filter((skill) => {
|
||||
if (!skill.files.includes("SKILL.md")) {
|
||||
log.warn("skill entry missing SKILL.md", { url: index, skill: skill.name })
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
const dirs = yield* Effect.forEach(
|
||||
list,
|
||||
(skill) =>
|
||||
Effect.gen(function* () {
|
||||
const root = path.join(cache, skill.name)
|
||||
|
||||
yield* Effect.forEach(
|
||||
skill.files,
|
||||
(file) => download(new URL(file, `${host}/${skill.name}/`).href, path.join(root, file)),
|
||||
{
|
||||
concurrency: fileConcurrency,
|
||||
},
|
||||
)
|
||||
|
||||
const md = path.join(root, "SKILL.md")
|
||||
return (yield* fs.exists(md).pipe(Effect.orDie)) ? root : null
|
||||
}),
|
||||
{ concurrency: skillConcurrency },
|
||||
)
|
||||
|
||||
return dirs.filter((dir): dir is string => dir !== null)
|
||||
})
|
||||
|
||||
return Service.of({ pull })
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer: Layer.Layer<Service> = layer.pipe(
|
||||
Layer.provide(FetchHttpClient.layer),
|
||||
Layer.provide(AppFileSystem.defaultLayer),
|
||||
Layer.provide(NodePath.layer),
|
||||
)
|
||||
export interface Interface {
|
||||
readonly pull: (url: string) => Effect.Effect<string[]>
|
||||
}
|
||||
|
||||
export class Service extends Context.Service<Service, Interface>()("@opencode/SkillDiscovery") {}
|
||||
|
||||
export const layer: Layer.Layer<Service, never, AppFileSystem.Service | Path.Path | HttpClient.HttpClient> =
|
||||
Layer.effect(
|
||||
Service,
|
||||
Effect.gen(function* () {
|
||||
const log = Log.create({ service: "skill-discovery" })
|
||||
const fs = yield* AppFileSystem.Service
|
||||
const path = yield* Path.Path
|
||||
const http = HttpClient.filterStatusOk(withTransientReadRetry(yield* HttpClient.HttpClient))
|
||||
const cache = path.join(Global.Path.cache, "skills")
|
||||
|
||||
const download = Effect.fn("Discovery.download")(function* (url: string, dest: string) {
|
||||
if (yield* fs.exists(dest).pipe(Effect.orDie)) return true
|
||||
|
||||
return yield* HttpClientRequest.get(url).pipe(
|
||||
http.execute,
|
||||
Effect.flatMap((res) => res.arrayBuffer),
|
||||
Effect.flatMap((body) => fs.writeWithDirs(dest, new Uint8Array(body))),
|
||||
Effect.as(true),
|
||||
Effect.catch((err) =>
|
||||
Effect.sync(() => {
|
||||
log.error("failed to download", { url, err })
|
||||
return false
|
||||
}),
|
||||
),
|
||||
)
|
||||
})
|
||||
|
||||
const pull = Effect.fn("Discovery.pull")(function* (url: string) {
|
||||
const base = url.endsWith("/") ? url : `${url}/`
|
||||
const index = new URL("index.json", base).href
|
||||
const host = base.slice(0, -1)
|
||||
|
||||
log.info("fetching index", { url: index })
|
||||
|
||||
const data = yield* HttpClientRequest.get(index).pipe(
|
||||
HttpClientRequest.acceptJson,
|
||||
http.execute,
|
||||
Effect.flatMap(HttpClientResponse.schemaBodyJson(Index)),
|
||||
Effect.catch((err) =>
|
||||
Effect.sync(() => {
|
||||
log.error("failed to fetch index", { url: index, err })
|
||||
return null
|
||||
}),
|
||||
),
|
||||
)
|
||||
|
||||
if (!data) return []
|
||||
|
||||
const list = data.skills.filter((skill) => {
|
||||
if (!skill.files.includes("SKILL.md")) {
|
||||
log.warn("skill entry missing SKILL.md", { url: index, skill: skill.name })
|
||||
return false
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
||||
const dirs = yield* Effect.forEach(
|
||||
list,
|
||||
(skill) =>
|
||||
Effect.gen(function* () {
|
||||
const root = path.join(cache, skill.name)
|
||||
|
||||
yield* Effect.forEach(
|
||||
skill.files,
|
||||
(file) => download(new URL(file, `${host}/${skill.name}/`).href, path.join(root, file)),
|
||||
{
|
||||
concurrency: fileConcurrency,
|
||||
},
|
||||
)
|
||||
|
||||
const md = path.join(root, "SKILL.md")
|
||||
return (yield* fs.exists(md).pipe(Effect.orDie)) ? root : null
|
||||
}),
|
||||
{ concurrency: skillConcurrency },
|
||||
)
|
||||
|
||||
return dirs.filter((dir): dir is string => dir !== null)
|
||||
})
|
||||
|
||||
return Service.of({ pull })
|
||||
}),
|
||||
)
|
||||
|
||||
export const defaultLayer: Layer.Layer<Service> = layer.pipe(
|
||||
Layer.provide(FetchHttpClient.layer),
|
||||
Layer.provide(AppFileSystem.defaultLayer),
|
||||
Layer.provide(NodePath.layer),
|
||||
)
|
||||
|
||||
@@ -1 +1,2 @@
|
||||
export * as Skill from "./skill"
|
||||
export * as Discovery from "./discovery"
|
||||
|
||||
@@ -15,7 +15,7 @@ import { Config } from "../config"
|
||||
import { ConfigMarkdown } from "../config"
|
||||
import { Glob } from "@opencode-ai/shared/util/glob"
|
||||
import { Log } from "../util"
|
||||
import { Discovery } from "./discovery"
|
||||
import * as Discovery from "./discovery"
|
||||
|
||||
const log = Log.create({ service: "skill" })
|
||||
const EXTERNAL_DIRS = [".claude", ".agents"]
|
||||
|
||||
@@ -14,7 +14,7 @@ import { fileURLToPath } from "url"
|
||||
import { Flag } from "@/flag/flag"
|
||||
import { Shell } from "@/shell/shell"
|
||||
|
||||
import { BashArity } from "@/permission/arity"
|
||||
import { BashArity } from "@/permission"
|
||||
import * as Truncate from "./truncate"
|
||||
import { Plugin } from "@/plugin"
|
||||
import { Effect, Stream } from "effect"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { test, expect } from "bun:test"
|
||||
import { BashArity } from "../../src/permission/arity"
|
||||
import { BashArity } from "../../src/permission"
|
||||
|
||||
test("arity 1 - unknown commands default to first token", () => {
|
||||
expect(BashArity.prefix(["unknown", "command", "subcommand"])).toEqual(["unknown"])
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { describe, test, expect, beforeAll, afterAll } from "bun:test"
|
||||
import { Effect } from "effect"
|
||||
import { Discovery } from "../../src/skill/discovery"
|
||||
import { Discovery } from "../../src/skill"
|
||||
import { Global } from "../../src/global"
|
||||
import { Filesystem } from "../../src/util"
|
||||
import { rm } from "fs/promises"
|
||||
|
||||
Reference in New Issue
Block a user