From ec1dfc992b8a0d936d034e6e9231710f32ad392d Mon Sep 17 00:00:00 2001 From: Towry Wang Date: Tue, 17 Jun 2025 10:00:43 +0800 Subject: [PATCH 1/3] chore: update homebrew packages and ai tools - Add block-goose-cli to homebrew brews - Comment out goose-cli in ai tools list Review notes: - Changes are straightforward package management updates - No functional impact detected - Consider documenting reason for commenting out goose-cli if temporary --- conf/llm/goose/config.yaml | 7 +++++-- nix/darwin/apps.nix | 1 + nix/hm/ai.nix | 2 +- 3 files changed, 7 insertions(+), 3 deletions(-) diff --git a/conf/llm/goose/config.yaml b/conf/llm/goose/config.yaml index 608c8d11..65e53748 100644 --- a/conf/llm/goose/config.yaml +++ b/conf/llm/goose/config.yaml @@ -107,6 +107,9 @@ extensions: name: brave-search timeout: 300 type: stdio +GOOSE_MODEL: anthropic/claude-sonnet-4 +experiments: + GOOSE_SMART_APPROVE: true +GOOSE_PLANNER_MODEL: anthropic/claude-opus-4 GOOSE_MODE: auto -GOOSE_PROVIDER: github_copilot -GOOSE_MODEL: gpt-4o +GOOSE_PROVIDER: openrouter diff --git a/nix/darwin/apps.nix b/nix/darwin/apps.nix index c184f446..6cbfee07 100644 --- a/nix/darwin/apps.nix +++ b/nix/darwin/apps.nix @@ -14,6 +14,7 @@ homebrew = { brews = [ "asdf" + "block-goose-cli" ]; casks = [ # "font-maple-mono" diff --git a/nix/hm/ai.nix b/nix/hm/ai.nix index 6c4c3a1a..55207905 100644 --- a/nix/hm/ai.nix +++ b/nix/hm/ai.nix @@ -14,7 +14,7 @@ in ollama aider-chat github-mcp-server - goose-cli + # goose-cli ]; programs.fish = { From 5a886f6bf8b7bf6c7473df1b386200dd4913bae7 Mon Sep 17 00:00:00 2001 From: Towry Wang Date: Tue, 17 Jun 2025 10:24:10 +0800 Subject: [PATCH 2/3] feat: add opencode configuration and update dependencies Review notes: - New opencode.json configuration looks complete with multiple agent and MCP server definitions - Sensible model choices and token limits specified - Good separation of different server types and their configurations - Consider adding comments explaining the purpose of each agent type - API keys should be properly documented in project README - Dependency updates appear to be routine version bumps with no breaking changes - No obvious security concerns in the configuration - Recommend adding validation for required environment variables in future iterations --- .gitignore | 1 + conf/llm/aichat/config.yaml | 45 +++++++++-------------- conf/llm/opencode/opencode.json | 65 +++++++++++++++++++++++++++++++++ flake.lock | 12 +++--- nix/hm/ai.nix | 13 +++++-- nix/hm/git.nix | 5 ++- nix/overlay.nix | 4 ++ nix/pkgs/opencode.nix | 45 +++++++++++++++++++++++ 8 files changed, 152 insertions(+), 38 deletions(-) create mode 100644 conf/llm/opencode/opencode.json create mode 100644 nix/pkgs/opencode.nix diff --git a/.gitignore b/.gitignore index 0d77ea8d..d75c6875 100644 --- a/.gitignore +++ b/.gitignore @@ -14,3 +14,4 @@ sync.lock nix/foo.nix ./result .aider* +.opencode/ diff --git a/conf/llm/aichat/config.yaml b/conf/llm/aichat/config.yaml index dc459d78..c4dd1613 100644 --- a/conf/llm/aichat/config.yaml +++ b/conf/llm/aichat/config.yaml @@ -1,35 +1,24 @@ -# model: ollama:llama3.2:3b -model: deepseek:deepseek-chat +# model: deepseek:deepseek-chat +model: openrouter:anthropic/claude-sonnet-4 keybindings: vi function_calling: true -rag_embedding_model: nomic-embed-text:latest +stream: true +save: true +wrap: auto +wrap_code: true +save_shell_history: true +compress_threshold: 256000 +save_session: true +# Text prompt used for creating a concise summary of session message +summarize_prompt: 'This conversation became quite long and we will run out of context. Please summarize this discussion to use as info for a future prompt. Include relevant information to help you continue the conversation.' +# Text prompt used for including the summary of the entire session +summary_prompt: 'This is a summary of the previous conversation. It was cut off because it got too long. This is the summary. Please continue the conversation. If you have questions, feel free to ask!' + clients: - type: openai-compatible - name: ollama - api_base: http://127.0.0.1:11434/v1 - models: - - name: llama3.2:3b - max_input_tokens: 128000 - supports_function_calling: true - - - name: phi4:14b - max_input_tokens: 16000 - supports_function_calling: true - - - name: dolphin3:8b - max_input_tokens: 130000 - supports_function_calling: true - - - name: smallthinker:3b - max_input_tokens: 16384 - supports_function_calling: true - - - name: qwq:32b - max_input_tokens: 33000 - supports_function_calling: true - - - name: nomic-embed-text:latest - type: embedding + name: openrouter + api_base: https://openrouter.ai/api/v1 + api_key: @OPENROUTER_API_KEY@ - type: openai-compatible name: deepseek diff --git a/conf/llm/opencode/opencode.json b/conf/llm/opencode/opencode.json new file mode 100644 index 00000000..7d0fb079 --- /dev/null +++ b/conf/llm/opencode/opencode.json @@ -0,0 +1,65 @@ +{ + "$schema": "https://raw.githubusercontent.com/opencode-ai/opencode/refs/heads/main/opencode-schema.json", + "providers": { + "openrouter": { + "apiKey": "@OPENROUTER_API_KEY@", + "disabled": false + } + }, + "agents": { + "coder": { + "model": "anthropic/claude-sonnet-4", + "maxTokens": 5000 + }, + "task": { + "model": "anthropic/claude-sonnet-4", + "maxTokens": 5000 + }, + "title": { + "model": "anthropic/claude-3.7-sonnet", + "maxTokens": 80 + } + }, + "mcpServers": { + "context7": { + "type": "stdio", + "command": "bunx", + "args": ["@upstash/context7-mcp"] + }, + "memory": { + "type": "stdio", + "command": "bunx", + "args": ["@modelcontextprotocol/server-memory"] + }, + "playwright": { + "type": "stdio", + "command": "bunx", + "args": ["@executeautomation/playwright-mcp-server"] + }, + "deepwiki": { + "type": "stdio", + "command": "bunx", + "args": ["mcp-deepwiki@latest"] + }, + "filesystem": { + "type": "stdio", + "command": "bunx", + "args": ["@modelcontextprotocol/server-filesystem", "/Users/towry/workspace"] + }, + "github": { + "type": "stdio", + "command": "github-mcp-server", + "args": ["stdio", "--toolsets", "all"], + "env": ["GITHUB_PERSONAL_ACCESS_TOKEN"] + }, + "brave-search": { + "type": "stdio", + "command": "bunx", + "args": ["@modelcontextprotocol/server-brave-search"], + "env": ["BRAVE_API_KEY"] + } + }, + "debug": false, + "debugLSP": false, + "autoCompact": true +} diff --git a/flake.lock b/flake.lock index 267581ee..f955d562 100644 --- a/flake.lock +++ b/flake.lock @@ -464,10 +464,10 @@ "rust-overlay": "rust-overlay" }, "locked": { - "lastModified": 1749082306, - "narHash": "sha256-KTRRRq8XnVP6HSGNBzaDl5vzLHI4mufo28UZsxoF81Q=", + "lastModified": 1749774239, + "narHash": "sha256-MBktcY0rpOBUc6DNQ20OKFBzAgaM3Ix+O39ckxPEmIw=", "ref": "refs/heads/main", - "rev": "3023d1c594a24b22cbc12f4154f19271e3204734", + "rev": "0a9ab49dc5e59d9d6032477d0f4e916a24c1ca21", "shallow": true, "type": "git", "url": "ssh://git@github.com/pze/jj.git" @@ -531,10 +531,10 @@ ] }, "locked": { - "lastModified": 1746769616, - "narHash": "sha256-ArSsDx5M4tn6hDnHIuC3xDuL3gVvoZcSiTvHpqiIIHc=", + "lastModified": 1750127002, + "narHash": "sha256-Bj+cEyU2G1t523IbWVrpe4KLF80LgBrsFFqKI0t5gu0=", "ref": "refs/heads/main", - "rev": "33bcbe9a4037d07cb27f837a5074483fbd49dbfa", + "rev": "2c4db01e15c8c3c2e3be10f9257d0b6a03f66cac", "shallow": true, "type": "git", "url": "ssh://git@github.com/towry/nix-priv.git" diff --git a/nix/hm/ai.nix b/nix/hm/ai.nix index 55207905..9d8c3532 100644 --- a/nix/hm/ai.nix +++ b/nix/hm/ai.nix @@ -1,7 +1,7 @@ { pkgs, config, ... }: let - configDir = + aichatConfigDir = if pkgs.stdenv.isDarwin then "${config.home.homeDirectory}/Library/Application Support/aichat" else @@ -15,6 +15,7 @@ in aider-chat github-mcp-server # goose-cli + opencode ]; programs.fish = { @@ -48,14 +49,15 @@ in }; home.file = { - "${configDir}/roles" = { + "${aichatConfigDir}/roles" = { # link to ../../conf/llm/aichat/roles dir source = ../../conf/llm/aichat/roles; recursive = true; }; - "${configDir}/config.yaml" = { + "${aichatConfigDir}/config.yaml" = { source = pkgs.replaceVars ../../conf/llm/aichat/config.yaml { DEEPSEEK_API_KEY = pkgs.nix-priv.keys.deepseek.apiKey; + OPENROUTER_API_KEY = pkgs.nix-priv.keys.openrouter.apiKey; }; }; ".aider.conf.yml" = { @@ -63,5 +65,10 @@ in }; }; + "${config.xdg.configHome}/opencode/.opencode.json" = { + source = pkgs.replaceVars ../../conf/llm/opencode/opencode.json { + OPENROUTER_API_KEY = pkgs.nix-priv.keys.openrouter.apiKey; + }; + }; }; } diff --git a/nix/hm/git.nix b/nix/hm/git.nix index ba5f5a23..e70a96ad 100644 --- a/nix/hm/git.nix +++ b/nix/hm/git.nix @@ -203,7 +203,9 @@ in default = "current"; autoSetupRemote = true; }; - sequence = { editor = "interactive-rebase-tool"; }; + sequence = { + editor = "interactive-rebase-tool"; + }; merge = { autostash = false; tool = "nvim"; @@ -320,6 +322,7 @@ in "target/" ".aider.*" ".aider*" + ".opencode/" ]; }; } diff --git a/nix/overlay.nix b/nix/overlay.nix index 200be14d..e8075ddf 100644 --- a/nix/overlay.nix +++ b/nix/overlay.nix @@ -23,6 +23,10 @@ pkgs = final; system = final.system; }; + opencode = final.callPackage ./pkgs/opencode.nix { + pkgs = final; + system = final.system; + }; # git-fuzzy = final.callPackage ./pkgs/git-fuzzy.nix {}; uclanr = final.callPackage ./pkgs/uclanr.nix { pkgs = final; diff --git a/nix/pkgs/opencode.nix b/nix/pkgs/opencode.nix new file mode 100644 index 00000000..8f56e906 --- /dev/null +++ b/nix/pkgs/opencode.nix @@ -0,0 +1,45 @@ +{ + version ? "0.0.53", + system, + pkgs ? (import { }), +}: +let + url-map = { + x86_64-darwin = "https://github.com/opencode-ai/opencode/releases/download/v${version}/opencode-mac-x86_64.tar.gz"; + aarch64-darwin = "https://github.com/opencode-ai/opencode/releases/download/v${version}/opencode-mac-arm64.tar.gz"; + x86_64-linux = "https://github.com/opencode-ai/opencode/releases/download/v${version}/opencode-linux-x86_64.tar.gz"; + aarch64-linux = "https://github.com/opencode-ai/opencode/releases/download/v${version}/opencode-linux-arm64.tar.gz"; + }; + sha256-map = { + x86_64-darwin = "d7e798df5404ab49f72539772fc886cce3478965d59e0f8fb51809daa508106f"; + aarch64-darwin = "dd2c49d5ed1ff4d98787666d41a772bdcc4595eb02805563293450ff85896ac6"; + x86_64-linux = "1d2b57f9a6ede223c50e865409289bb7e263620bb9b18151f3776974f08ea39f"; + aarch64-linux = "c2f8c9fe365815a13e5789291c133fb6e6312d95e4c5ad7cd5ee4823fd57c68e"; + }; + tarfile = builtins.fetchurl { + url = url-map.${system}; + sha256 = sha256-map.${system}; + }; +in +with pkgs; +stdenv.mkDerivation { + pname = "opencode"; + inherit version; + src = ./.; + unpackPhase = '' + tar -xzf ${tarfile} + ''; + installPhase = '' + mkdir -p $out/bin + mv opencode $out/bin/ + chmod +x $out/bin/opencode + ''; + nativeBuildInputs = [ + makeWrapper + ]; + meta = { + homepage = "https://github.com/opencode-ai/opencode"; + description = "OpenCode AI CLI tool"; + license = lib.licenses.mit; + }; +} From 25da124e211335fba38185ac709f974cb7eda74d Mon Sep 17 00:00:00 2001 From: Towry Wang Date: Tue, 17 Jun 2025 11:40:03 +0800 Subject: [PATCH 3/3] chore: update LLM model configurations to claude-sonnet-4 --- conf/llm/aichat/roles/gen-prompt.md | 2 +- conf/llm/aichat/roles/git-branch.md | 2 +- conf/llm/aichat/roles/git-commit.md | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/conf/llm/aichat/roles/gen-prompt.md b/conf/llm/aichat/roles/gen-prompt.md index 4997d984..4db6863a 100644 --- a/conf/llm/aichat/roles/gen-prompt.md +++ b/conf/llm/aichat/roles/gen-prompt.md @@ -1,5 +1,5 @@ --- -model: deepseek:deepseek-chat +model: openrouter:anthropic/claude-sonnet-4 temperature: 0.1 top_p: 0.2 --- diff --git a/conf/llm/aichat/roles/git-branch.md b/conf/llm/aichat/roles/git-branch.md index 5ed2f3fb..6e7ff4d8 100644 --- a/conf/llm/aichat/roles/git-branch.md +++ b/conf/llm/aichat/roles/git-branch.md @@ -1,5 +1,5 @@ --- -model: deepseek:deepseek-chat +model: openrouter:anthropic/claude-sonnet-4 temperature: 0 top_p: 0.1 --- diff --git a/conf/llm/aichat/roles/git-commit.md b/conf/llm/aichat/roles/git-commit.md index 107fa818..0ac83554 100644 --- a/conf/llm/aichat/roles/git-commit.md +++ b/conf/llm/aichat/roles/git-commit.md @@ -1,5 +1,5 @@ --- -model: deepseek:deepseek-chat +model: openrouter:anthropic/claude-sonnet-4 temperature: 0 top_p: 0.3 ---