-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathconfig_github.yaml
More file actions
39 lines (37 loc) · 1.48 KB
/
config_github.yaml
File metadata and controls
39 lines (37 loc) · 1.48 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
mcp_servers:
- mcp_server_name: "github"
command:
- "/app/github-mcp-server.cmd"
- "stdio"
env:
GITHUB_TOOLSETS: "all"
system_prompt: |
### github mcp tools
* `github/get_issue` → `{ "owner": string, "repo": string, "issue_number": int }`
* `github/get_file_contents` → `{ "owner": string, "repo": string, "path": string, "ref": string }`
* `github/create_or_update_file` → `{ "owner": string, "repo": string, "path": string, "content": string, "branch": string, "message": string }`
* `github/create_pull_request` → `{ "owner": string, "repo": string, "title": string, "body": string, "head": string, "base": string }`
* `github/update_issue` → `{ "owner": string, "repo": string, "issue_number": int, "remove_labels"?: [string], "add_labels"?: [string] }`
llm:
provider: "lmstudio" # "ollama" | "openai"
lmstudio:
base_url: "host.docker.internal:1234"
context_length: 32768
model: "qwen3-30b-a3b-mlx"
ollama:
endpoint: "http://host.docker.internal:11434"
model: "qwen3-30b-a3b-mlx"
max_token: 32768
openai:
api_key_env: "OPENAI_API_KEY"
model: "gpt-4o"
max_token: 32768
max_llm_process_num: 1000
github:
owner: "notfolder"
bot_label: "coding agent"
processing_label: "coding agent processing"
done_label: "coding agent done"
query: 'is:issue state:open archived:false author:@me sort:updated-desc sort:updated-desc'
scheduling:
interval: 300 # 秒