Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
## Why `aider-ce`?

`aider-ce` (aka `cecli`, pronounced like "Cecily") is a community-driven fork of the [Aider](https://aider.chat/) AI pair programming tool.
`aider-ce` (aka `cecli`, probably pronounced like "Cecily") is a community-driven fork of the [Aider](https://aider.chat/) AI pair programming tool.
Aider is a fantastic piece of software with a wonderful community but it has been painfully slow in receiving updates in the quickly evolving AI tooling space.

We aim to foster an open, collaborative ecosystem where new features, experiments, and improvements can be developed and shared rapidly. We believe in genuine FOSS principles and actively welcome contributors of all skill levels.
Expand Down
2 changes: 1 addition & 1 deletion aider/__init__.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
from packaging import version

__version__ = "0.91.1.dev"
__version__ = "0.91.2.dev"
safe_version = __version__

try:
Expand Down
27 changes: 25 additions & 2 deletions aider/coders/agent_coder.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,6 @@
view_files_with_symbol,
)

from .agent_prompts import AgentPrompts
from .base_coder import ChatChunks, Coder
from .editblock_coder import do_replace, find_original_update_blocks, find_similar_lines

Expand All @@ -81,7 +80,7 @@ class AgentCoder(Coder):
"""Mode where the LLM autonomously manages which files are in context."""

edit_format = "agent"
gpt_prompts = AgentPrompts()
prompt_format = "agent"

def __init__(self, *args, **kwargs):
# Dictionary to track recently removed files
Expand Down Expand Up @@ -876,6 +875,10 @@ def format_chat_chunks(self):
tool_context = self._generate_tool_context(repetitive_tools)
if tool_context:
post_message_blocks.append(tool_context)
else:
write_context = self._generate_write_context()
if write_context:
post_message_blocks.append(write_context)

if static_blocks:
for block in static_blocks:
Expand Down Expand Up @@ -1934,6 +1937,26 @@ def _generate_tool_context(self, repetitive_tools):
context_parts.append("</context>")
return "\n".join(context_parts)

def _generate_write_context(self):
if self.last_round_tools:
last_round_has_write = any(
tool.lower() in self.write_tools for tool in self.last_round_tools
)
if last_round_has_write:
context_parts = [
'<context name="tool_usage_history">',
"A file was just edited.",
(
" Do not just modify comments"
" and/or logging statements with placeholder information."
),
"Make sure that something of value was done.</context>",
]

return "\n".join(context_parts)

return ""

async def _apply_edits_from_response(self):
"""
Parses and applies SEARCH/REPLACE edits found in self.partial_response_content.
Expand Down
92 changes: 0 additions & 92 deletions aider/coders/agent_prompts.py

This file was deleted.

3 changes: 1 addition & 2 deletions aider/coders/architect_coder.py
Original file line number Diff line number Diff line change
@@ -1,14 +1,13 @@
import asyncio

from ..commands import SwitchCoder
from .architect_prompts import ArchitectPrompts
from .ask_coder import AskCoder
from .base_coder import Coder


class ArchitectCoder(AskCoder):
edit_format = "architect"
gpt_prompts = ArchitectPrompts()
prompt_format = "architect"
auto_accept_architect = False

async def reply_completed(self):
Expand Down
40 changes: 0 additions & 40 deletions aider/coders/architect_prompts.py

This file was deleted.

3 changes: 1 addition & 2 deletions aider/coders/ask_coder.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
from .ask_prompts import AskPrompts
from .base_coder import Coder


class AskCoder(Coder):
"""Ask questions about code without making any changes."""

edit_format = "ask"
gpt_prompts = AskPrompts()
prompt_format = "ask"
35 changes: 0 additions & 35 deletions aider/coders/ask_prompts.py

This file was deleted.

45 changes: 44 additions & 1 deletion aider/coders/base_coder.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,8 @@
from prompt_toolkit.patch_stdout import patch_stdout
from rich.console import Console

from aider import __version__, models, prompts, urls, utils
import aider.prompts.utils.system as prompts
from aider import __version__, models, urls, utils
from aider.commands import Commands, SwitchCoder
from aider.exceptions import LiteLLMExceptions
from aider.helpers import coroutines
Expand All @@ -58,6 +59,7 @@
from aider.utils import format_tokens, is_image_file

from ..dump import dump # noqa: F401
from ..prompts.utils.prompt_registry import registry
from .chat_chunks import ChatChunks


Expand Down Expand Up @@ -155,6 +157,8 @@ class Coder:
# Weak reference to TUI app instance (when running in TUI mode)
tui = None

_prompt_cache = {}

@classmethod
async def create(
self,
Expand Down Expand Up @@ -556,6 +560,45 @@ def __init__(
self.io.tool_output("JSON Schema:")
self.io.tool_output(json.dumps(self.functions, indent=4))

@property
def gpt_prompts(self):
"""Get prompts from the registry based on the coder type."""
cls = self.__class__

# Every coder class MUST have a prompt_format attribute
if not hasattr(cls, "prompt_format"):
raise AttributeError(
f"Coder class {cls.__name__} must have a 'prompt_format' attribute. "
"Add 'prompt_format = \"<format_name>\"' to the class definition."
)

if cls.prompt_format is None:
raise AttributeError(
f"Coder class {cls.__name__} has prompt_format=None. "
"It must have a valid prompt format name."
)

prompt_name = cls.prompt_format

# Check cache first
if prompt_name in Coder._prompt_cache:
return Coder._prompt_cache[prompt_name]

# Get prompts from registry
prompts = registry.get_prompt(prompt_name)

# Create a simple object that allows attribute access
class PromptObject:
def __init__(self, prompts_dict):
for key, value in prompts_dict.items():
setattr(self, key, value)

# Cache the prompt object
prompt_obj = PromptObject(prompts)
Coder._prompt_cache[prompt_name] = prompt_obj

return prompt_obj

def get_announcements(self):
lines = []
lines.append(f"cecli v{__version__}")
Expand Down
Loading
Loading