Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -14,3 +14,4 @@ sync.lock
nix/foo.nix
./result
.aider*
.opencode/
45 changes: 17 additions & 28 deletions conf/llm/aichat/config.yaml
Original file line number Diff line number Diff line change
@@ -1,35 +1,24 @@
# model: ollama:llama3.2:3b
model: deepseek:deepseek-chat
# model: deepseek:deepseek-chat
model: openrouter:anthropic/claude-sonnet-4
keybindings: vi
function_calling: true
rag_embedding_model: nomic-embed-text:latest
stream: true
save: true
wrap: auto
wrap_code: true
save_shell_history: true
compress_threshold: 256000
save_session: true
# Text prompt used for creating a concise summary of session message
summarize_prompt: '<system>This conversation became quite long and we will run out of context. Please summarize this discussion to use as info for a future prompt. Include relevant information to help you continue the conversation.</system>'
# Text prompt used for including the summary of the entire session
summary_prompt: '<system>This is a summary of the previous conversation. It was cut off because it got too long. This is the summary. Please continue the conversation. If you have questions, feel free to ask!</system>'

clients:
- type: openai-compatible
name: ollama
api_base: http://127.0.0.1:11434/v1
models:
- name: llama3.2:3b
max_input_tokens: 128000
supports_function_calling: true

- name: phi4:14b
max_input_tokens: 16000
supports_function_calling: true

- name: dolphin3:8b
max_input_tokens: 130000
supports_function_calling: true

- name: smallthinker:3b
max_input_tokens: 16384
supports_function_calling: true

- name: qwq:32b
max_input_tokens: 33000
supports_function_calling: true

- name: nomic-embed-text:latest
type: embedding
name: openrouter
api_base: https://openrouter.ai/api/v1
api_key: @OPENROUTER_API_KEY@

- type: openai-compatible
name: deepseek
Expand Down
2 changes: 1 addition & 1 deletion conf/llm/aichat/roles/gen-prompt.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
---
model: deepseek:deepseek-chat
model: openrouter:anthropic/claude-sonnet-4
temperature: 0.1
top_p: 0.2
---
Expand Down
2 changes: 1 addition & 1 deletion conf/llm/aichat/roles/git-branch.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
---
model: deepseek:deepseek-chat
model: openrouter:anthropic/claude-sonnet-4
temperature: 0
top_p: 0.1
---
Expand Down
2 changes: 1 addition & 1 deletion conf/llm/aichat/roles/git-commit.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
---
model: deepseek:deepseek-chat
model: openrouter:anthropic/claude-sonnet-4
temperature: 0
top_p: 0.3
---
Expand Down
7 changes: 5 additions & 2 deletions conf/llm/goose/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,9 @@ extensions:
name: brave-search
timeout: 300
type: stdio
GOOSE_MODEL: anthropic/claude-sonnet-4
experiments:
GOOSE_SMART_APPROVE: true
GOOSE_PLANNER_MODEL: anthropic/claude-opus-4
GOOSE_MODE: auto
GOOSE_PROVIDER: github_copilot
GOOSE_MODEL: gpt-4o
GOOSE_PROVIDER: openrouter
65 changes: 65 additions & 0 deletions conf/llm/opencode/opencode.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,65 @@
{
"$schema": "https://raw.githubusercontent.com/opencode-ai/opencode/refs/heads/main/opencode-schema.json",
"providers": {
"openrouter": {
"apiKey": "@OPENROUTER_API_KEY@",
"disabled": false
}
},
"agents": {
"coder": {
"model": "anthropic/claude-sonnet-4",
"maxTokens": 5000
},
"task": {
"model": "anthropic/claude-sonnet-4",
"maxTokens": 5000
},
"title": {
"model": "anthropic/claude-3.7-sonnet",
"maxTokens": 80
}
},
"mcpServers": {
"context7": {
"type": "stdio",
"command": "bunx",
"args": ["@upstash/context7-mcp"]
},
"memory": {
"type": "stdio",
"command": "bunx",
"args": ["@modelcontextprotocol/server-memory"]
},
"playwright": {
"type": "stdio",
"command": "bunx",
"args": ["@executeautomation/playwright-mcp-server"]
},
"deepwiki": {
"type": "stdio",
"command": "bunx",
"args": ["mcp-deepwiki@latest"]
},
"filesystem": {
"type": "stdio",
"command": "bunx",
"args": ["@modelcontextprotocol/server-filesystem", "/Users/towry/workspace"]
},
"github": {
"type": "stdio",
"command": "github-mcp-server",
"args": ["stdio", "--toolsets", "all"],
"env": ["GITHUB_PERSONAL_ACCESS_TOKEN"]
},
"brave-search": {
"type": "stdio",
"command": "bunx",
"args": ["@modelcontextprotocol/server-brave-search"],
"env": ["BRAVE_API_KEY"]
}
},
"debug": false,
"debugLSP": false,
"autoCompact": true
}
12 changes: 6 additions & 6 deletions flake.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions nix/darwin/apps.nix
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@
homebrew = {
brews = [
"asdf"
"block-goose-cli"
];
casks = [
# "font-maple-mono"
Expand Down
15 changes: 11 additions & 4 deletions nix/hm/ai.nix
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{ pkgs, config, ... }:

let
configDir =
aichatConfigDir =
if pkgs.stdenv.isDarwin then
"${config.home.homeDirectory}/Library/Application Support/aichat"
else
Expand All @@ -14,7 +14,8 @@ in
ollama
aider-chat
github-mcp-server
goose-cli
# goose-cli
opencode
];

programs.fish = {
Expand Down Expand Up @@ -48,20 +49,26 @@ in
};

home.file = {
"${configDir}/roles" = {
"${aichatConfigDir}/roles" = {
# link to ../../conf/llm/aichat/roles dir
source = ../../conf/llm/aichat/roles;
recursive = true;
};
"${configDir}/config.yaml" = {
"${aichatConfigDir}/config.yaml" = {
source = pkgs.replaceVars ../../conf/llm/aichat/config.yaml {
DEEPSEEK_API_KEY = pkgs.nix-priv.keys.deepseek.apiKey;
OPENROUTER_API_KEY = pkgs.nix-priv.keys.openrouter.apiKey;
};
};
".aider.conf.yml" = {
text = builtins.toJSON {

};
};
"${config.xdg.configHome}/opencode/.opencode.json" = {
source = pkgs.replaceVars ../../conf/llm/opencode/opencode.json {
OPENROUTER_API_KEY = pkgs.nix-priv.keys.openrouter.apiKey;
};
};
};
}
5 changes: 4 additions & 1 deletion nix/hm/git.nix
Original file line number Diff line number Diff line change
Expand Up @@ -203,7 +203,9 @@ in
default = "current";
autoSetupRemote = true;
};
sequence = { editor = "interactive-rebase-tool"; };
sequence = {
editor = "interactive-rebase-tool";
};
merge = {
autostash = false;
tool = "nvim";
Expand Down Expand Up @@ -320,6 +322,7 @@ in
"target/"
".aider.*"
".aider*"
".opencode/"
];
};
}
4 changes: 4 additions & 0 deletions nix/overlay.nix
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,10 @@
pkgs = final;
system = final.system;
};
opencode = final.callPackage ./pkgs/opencode.nix {
pkgs = final;
system = final.system;
};
# git-fuzzy = final.callPackage ./pkgs/git-fuzzy.nix {};
uclanr = final.callPackage ./pkgs/uclanr.nix {
pkgs = final;
Expand Down
45 changes: 45 additions & 0 deletions nix/pkgs/opencode.nix
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
{
version ? "0.0.53",
system,
pkgs ? (import <nixpkgs> { }),
}:
let
url-map = {
x86_64-darwin = "https://github.com/opencode-ai/opencode/releases/download/v${version}/opencode-mac-x86_64.tar.gz";
aarch64-darwin = "https://github.com/opencode-ai/opencode/releases/download/v${version}/opencode-mac-arm64.tar.gz";
x86_64-linux = "https://github.com/opencode-ai/opencode/releases/download/v${version}/opencode-linux-x86_64.tar.gz";
aarch64-linux = "https://github.com/opencode-ai/opencode/releases/download/v${version}/opencode-linux-arm64.tar.gz";
};
sha256-map = {
x86_64-darwin = "d7e798df5404ab49f72539772fc886cce3478965d59e0f8fb51809daa508106f";
aarch64-darwin = "dd2c49d5ed1ff4d98787666d41a772bdcc4595eb02805563293450ff85896ac6";
x86_64-linux = "1d2b57f9a6ede223c50e865409289bb7e263620bb9b18151f3776974f08ea39f";
aarch64-linux = "c2f8c9fe365815a13e5789291c133fb6e6312d95e4c5ad7cd5ee4823fd57c68e";
};
tarfile = builtins.fetchurl {
url = url-map.${system};
sha256 = sha256-map.${system};
};
in
with pkgs;
stdenv.mkDerivation {
pname = "opencode";
inherit version;
src = ./.;
unpackPhase = ''
tar -xzf ${tarfile}
'';
installPhase = ''
mkdir -p $out/bin
mv opencode $out/bin/
chmod +x $out/bin/opencode
'';
nativeBuildInputs = [
makeWrapper
];
meta = {
homepage = "https://github.com/opencode-ai/opencode";
description = "OpenCode AI CLI tool";
license = lib.licenses.mit;
};
}