From 15fa6cef49a424f28c21a0d79bf2a3a3f42f4f1a Mon Sep 17 00:00:00 2001
From: Jon Michael Aanes <jonjmaa@gmail.com>
Date: Mon, 9 Jun 2025 01:23:05 +0200
Subject: [PATCH] Update documentation for dual AI assistant support
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

Extended usage section in module docstring to cover both Aider and Claude Code integration:
- Clear explanation of automatic model routing based on model names
- Comprehensive command line examples for both assistants
- Updated Python API examples with new function signatures
- Environment configuration organized by assistant type
- Model examples categorized by routing destination

Users now have complete guidance on using either Aider or Claude Code with appropriate model selection.

🤖 Generated with [Claude Code](https://claude.ai/code)

Co-Authored-By: Claude <noreply@anthropic.com>
---
 aider_gitea/__init__.py | 91 +++++++++++++++++++++++++++++++++++------
 1 file changed, 78 insertions(+), 13 deletions(-)

diff --git a/aider_gitea/__init__.py b/aider_gitea/__init__.py
index 8fd49ec..cd20be4 100644
--- a/aider_gitea/__init__.py
+++ b/aider_gitea/__init__.py
@@ -1,15 +1,19 @@
 """Aider Gitea.
 
-A code automation tool that integrates Gitea with Aider to automatically solve issues.
+A code automation tool that integrates Gitea with AI assistants to automatically solve issues.
 
 This program monitors your [Gitea](https://about.gitea.com/) repository for issues with the 'aider' label.
 When such an issue is found, it:
 
 1. Creates a new branch.
-2. Invokes [Aider](https://aider.chat/) to solve the issue using a Large-Language Model.
+2. Invokes an AI assistant (Aider or Claude Code) to solve the issue using a Large-Language Model.
 3. Runs tests and code quality checks.
 4. Creates a pull request with the solution.
 
+The tool automatically selects the appropriate AI assistant based on the specified model:
+- **Aider**: Used for non-Anthropic models (e.g., GPT, Ollama, Gemini)
+- **Claude Code**: Used for Anthropic models (e.g., Claude, Sonnet, Haiku, Opus)
+
 Inspired by [the AI workflows](https://github.com/oscoreio/ai-workflows/)
 project.
 
@@ -25,48 +29,109 @@ have the following permissions:
 ### Command Line
 
 ```bash
-# Run with default settings
-python -m aider_gitea
+# Run with default settings (uses Aider)
+python -m aider_gitea --aider-model gpt-4
+
+# Use Claude Code with Anthropic models
+python -m aider_gitea --aider-model claude-3-sonnet
+python -m aider_gitea --aider-model claude-3-haiku
+python -m aider_gitea --aider-model anthropic/claude-3-opus
+
+# Use Aider with various models
+python -m aider_gitea --aider-model gpt-4
+python -m aider_gitea --aider-model ollama/llama3
+python -m aider_gitea --aider-model gemini-pro
 
 # Specify custom repository and owner
-python -m aider_gitea --owner myorg --repo myproject
+python -m aider_gitea --owner myorg --repo myproject --aider-model claude-3-sonnet
 
 # Use a custom Gitea URL
-python -m aider_gitea --gitea-url https://gitea.example.com
+python -m aider_gitea --gitea-url https://gitea.example.com --aider-model gpt-4
 
 # Specify a different base branch
-python -m aider_gitea --base-branch develop
+python -m aider_gitea --base-branch develop --aider-model claude-3-haiku
 ```
 
+### AI Assistant Selection
+
+The tool automatically routes to the appropriate AI assistant based on the model name:
+
+**Claude Code Integration (Anthropic Models):**
+- Model names containing: `claude`, `anthropic`, `sonnet`, `haiku`, `opus`
+- Examples: `claude-3-sonnet`, `claude-3-haiku`, `anthropic/claude-3-opus`
+- Requires: `ANTHROPIC_API_KEY` environment variable
+
+**Aider Integration (All Other Models):**
+- Any model not matching Anthropic patterns
+- Examples: `gpt-4`, `ollama/llama3`, `gemini-pro`, `mistral-7b`
+- Requires: `LLM_API_KEY` environment variable
+
 ### Python API
 
 ```python
-from aider_gitea import solve_issue_in_repository
+from aider_gitea import solve_issue_in_repository, create_code_solver
 from pathlib import Path
+import argparse
 
-# Solve an issue programmatically
-args = argparse.Namespace(
+# Solve an issue programmatically with automatic AI assistant selection
+repository_config = RepositoryConfig(
     gitea_url="https://gitea.example.com",
     owner="myorg",
     repo="myproject",
     base_branch="main"
 )
 
+# Set the model to control which AI assistant is used
+import aider_gitea
+aider_gitea.CODE_MODEL = "claude-3-sonnet"  # Will use Claude Code
+# aider_gitea.CODE_MODEL = "gpt-4"          # Will use Aider
+
+code_solver = create_code_solver()  # Automatically selects based on model
+
 solve_issue_in_repository(
-    args,
+    repository_config,
     Path("/path/to/repo"),
     "issue-123-fix-bug",
     "Fix critical bug",
     "The application crashes when processing large files",
-    "123"
+    "123",
+    gitea_client,
+    code_solver
 )
 ```
 
 ### Environment Configuration
 
 The tool uses environment variables for sensitive information:
+
+**Required for all setups:**
 - `GITEA_TOKEN`: Your Gitea API token
-- `LLM_API_KEY`: API key for the language model used by Aider
+
+**For Aider (non-Anthropic models):**
+- `LLM_API_KEY`: API key for the language model (OpenAI, Ollama, etc.)
+
+**For Claude Code (Anthropic models):**
+- `ANTHROPIC_API_KEY`: Your Anthropic API key for Claude models
+
+### Model Examples
+
+**Anthropic Models (→ Claude Code):**
+```bash
+--aider-model claude-3-sonnet
+--aider-model claude-3-haiku
+--aider-model claude-3-opus
+--aider-model anthropic/claude-3-sonnet
+```
+
+**Non-Anthropic Models (→ Aider):**
+```bash
+--aider-model gpt-4
+--aider-model gpt-3.5-turbo
+--aider-model ollama/llama3
+--aider-model ollama/codellama
+--aider-model gemini-pro
+--aider-model mistral-7b
+```
 ```
 """