diff --git a/README.md b/README.md index 4dc0208..340af72 100644 --- a/README.md +++ b/README.md @@ -27,14 +27,6 @@ --- -> ⚠️ **Alpha Release** -> -> Codius is currently in alpha. -> Features may be incomplete, unstable, or subject to change. -> Use with care in production environments — and help shape it by providing feedback or contributing! - ---- - ## Why Codius? > Free domain-driven designers from boilerplate and let them focus on modeling. diff --git a/poetry.lock b/poetry.lock index eafa661..a813e25 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 2.1.3 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.1.4 and should not be changed by hand. [[package]] name = "annotated-types" @@ -1208,14 +1208,14 @@ wcwidth = "*" [[package]] name = "py-dependency-injection" -version = "1.0.0rc1" -description = "A dependency injection library for Python." +version = "1.0.0rc2" +description = "A dependency injection library for Python - inspired by ASP.NET Core." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" groups = ["main"] files = [ - {file = "py-dependency-injection-1.0.0rc1.tar.gz", hash = "sha256:a55b170229adb514a8a2a651b2173a19e587296db8a219877ca1979c5d434060"}, - {file = "py_dependency_injection-1.0.0rc1-py3-none-any.whl", hash = "sha256:a2f3c10c47aaa3e66c703791715c5e3c54f363e3bc91991324e07760b6b793da"}, + {file = "py_dependency_injection-1.0.0rc2-py3-none-any.whl", hash = "sha256:eb8b81b4e79e873c8cf4d36774be2d72c832ce808fbf17550137abf726e52971"}, + {file = "py_dependency_injection-1.0.0rc2.tar.gz", hash = "sha256:27454e797f708b7a40a183effc452950fdd11acac43981959c202f5dca6e1da6"}, ] [[package]] @@ -2319,4 +2319,4 @@ cffi = ["cffi (>=1.11)"] [metadata] lock-version = "2.1" python-versions = ">=3.9,!=3.13.*" -content-hash = "978d15eae9a30897603e83a31dff82b4b5a3eaa4e3c7ece65edd50b071c5aaf1" +content-hash = "144e31b57d88e4fea4e299f73ea79c28d889d15c2ce106942b976a20474dd5a2" diff --git a/pyproject.toml b/pyproject.toml index e15136f..abfa5b0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -26,7 +26,6 @@ dependencies = [ "jinja2", "langgraph", "langchain-openai", - "py-dependency-injection >=1.0.0b3,<2.0.0", "python-dateutil", "python-dotenv", "pyyaml", @@ -34,7 +33,8 @@ dependencies = [ "rich", "tree-sitter==0.23.2", "tree-sitter-c-sharp", - "typing-extensions" + "typing-extensions", + "py-dependency-injection (>=1.0.0rc2,<2.0.0)", ] [tool.poetry.group.dev] diff --git a/src/codius/di.py b/src/codius/di.py index 9f256fd..297ddb2 100644 --- a/src/codius/di.py +++ b/src/codius/di.py @@ -3,9 +3,11 @@ from dependency_injection.container import DependencyContainer from codius.domain.model.config.config import Config +from codius.domain.model.config.llm_provider import LlmProvider from codius.domain.model.port.llm_port import LlmPort from codius.domain.services.config_service import ConfigService from codius.domain.services.session_service import SessionService +from codius.infrastructure.adapter.llm.ollama.ollama_llm_adapter import OllamaLlmAdapter from codius.infrastructure.adapter.llm.openai.openai_llm_adapter import OpenAiLlmAdapter from codius.infrastructure.repository.session_repository import SessionRepository @@ -52,4 +54,16 @@ def register_services(config: Config, args: argparse.Namespace): container.register_scoped(OpenDddConventionService) container.register_scoped(TreeSitterService) container.register_scoped(LlmService) - container.register_scoped(LlmPort, OpenAiLlmAdapter) + + adapter_by_provider = { + LlmProvider.OPENAI: OpenAiLlmAdapter, + LlmProvider.OLLAMA: OllamaLlmAdapter, + } + + provider = config.llm.provider + adapter_cls = adapter_by_provider.get(provider) + + if adapter_cls is None: + raise RuntimeError(f"Unsupported LLM provider in config: {provider}") + + container.register_scoped(LlmPort, adapter_cls) diff --git a/src/codius/domain/model/config/config.py b/src/codius/domain/model/config/config.py index bed4b26..a35fdfd 100644 --- a/src/codius/domain/model/config/config.py +++ b/src/codius/domain/model/config/config.py @@ -9,6 +9,14 @@ "openai": { "model": "gpt-4o", "api_key": "sk-... # Replace with your OpenAI API key" + }, + "anthropic": { + "model": "claude-3-opus", + "api_key": "xyz-... # Replace with your Anthropic API key" + }, + "ollama": { + "base_url": "http://localhost:11434", + "model": "gpt-oss:20b" } }, "approval_mode": ApprovalMode.SUGGEST.value, diff --git a/src/codius/domain/model/config/llm_provider.py b/src/codius/domain/model/config/llm_provider.py index ac4a6bb..3c2f0d1 100644 --- a/src/codius/domain/model/config/llm_provider.py +++ b/src/codius/domain/model/config/llm_provider.py @@ -7,3 +7,4 @@ class LlmProvider(str, Enum): GOOGLE = "google" MISTRAL = "mistral" GROQ = "groq" + OLLAMA = "ollama" diff --git a/src/codius/domain/model/config/ollama/__init__.py b/src/codius/domain/model/config/ollama/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/codius/domain/model/config/ollama/ollama_llm_model.py b/src/codius/domain/model/config/ollama/ollama_llm_model.py new file mode 100644 index 0000000..b6f85aa --- /dev/null +++ b/src/codius/domain/model/config/ollama/ollama_llm_model.py @@ -0,0 +1,8 @@ +from enum import Enum + + +class OllamaModel(str, Enum): + GPT_OSS_20B = "gpt-oss:20b" + GPT_OSS_120B = "gpt-oss:120b" + LLAMA_31_8B = "llama3.1:8b-instruct-q4_K_M" + MISTRAL_7B = "mistral:7b" diff --git a/src/codius/domain/model/intents/intent_type.py b/src/codius/domain/model/intents/intent_type.py index 1437659..88f859f 100644 --- a/src/codius/domain/model/intents/intent_type.py +++ b/src/codius/domain/model/intents/intent_type.py @@ -1,5 +1,8 @@ from enum import Enum +from codius.infrastructure.services.code_scanner.model.building_block_type import \ + BuildingBlockType + class IntentType(str, Enum): ADD_AGGREGATE = "add_aggregate" @@ -22,3 +25,17 @@ class IntentType(str, Enum): REMOVE_REPOSITORY_METHOD = "remove_repository_method" UNSURE = "unsure" + + @property + def building_block(self) -> BuildingBlockType: + if self.name.startswith("ADD_AGGREGATE") or self.name.startswith( + "REMOVE_AGGREGATE"): + return BuildingBlockType.AGGREGATE_ROOT + elif self.name.startswith("ADD_VALUE_OBJECT") or self.name.startswith( + "REMOVE_VALUE_OBJECT"): + return BuildingBlockType.VALUE_OBJECT + elif self.name.startswith("ADD_REPOSITORY") or self.name.startswith( + "REMOVE_REPOSITORY"): + return BuildingBlockType.REPOSITORY + else: + raise NotImplementedError(f"Building block not mapped for intent {self}") diff --git a/src/codius/domain/model/prompts/distill_intent_prompt.py b/src/codius/domain/model/prompts/distill_intent_prompt.py index 3949d69..265acde 100644 --- a/src/codius/domain/model/prompts/distill_intent_prompt.py +++ b/src/codius/domain/model/prompts/distill_intent_prompt.py @@ -22,6 +22,8 @@ from codius.domain.model.intents.value_object.add_value_object_property_intent import AddValueObjectPropertyIntent from codius.domain.model.intents.value_object.remove_value_object_property_intent import RemoveValueObjectPropertyIntent from codius.domain.model.intents.repository.add_repository_intent import AddRepositoryIntent +from codius.infrastructure.services.code_scanner.model.building_block_type import \ + BuildingBlockType @dataclass(frozen=True) @@ -49,6 +51,14 @@ def as_prompt(self) -> str: RemoveRepositoryMethodIntent, ] + all_blocks = {bb.value for bb in BuildingBlockType} + supported_blocks = {intent.building_block.value for intent in IntentType if + intent != IntentType.UNSURE} + unsupported_blocks = all_blocks - supported_blocks + + supported_blocks_text = "\n".join(f"- {b}" for b in sorted(supported_blocks)) + unsupported_blocks_text = "\n".join(f"- {b}" for b in sorted(unsupported_blocks)) + example_blocks = "\n".join( f"### {cls.intent.value}\n```json\n{cls.to_example_json()}\n```" for cls in example_intents @@ -88,6 +98,29 @@ def as_prompt(self) -> str: **Database Providers**: {database_text} +--- + +### Known DDD Building Blocks + +These are the known building blocks in Domain-Driven Design: + +**Supported:** +{supported_blocks_text} + +**Not yet supported:** +{unsupported_blocks_text} + +If the user refers to a known DDD building block that is **not yet supported**, return: + +```json +{{ "intent": "unsupported", "building_block": "" }} +``` + +If the user's intent is unclear or unsupported, respond only with: + +```json +{{ "intent": "unsure" }} + ### Instructions - Break complex modeling instructions into small, **granular intents**. @@ -109,9 +142,4 @@ def as_prompt(self) -> str: ### Examples: {example_blocks} - -If the user's intent is unclear or unsupported, respond only with: - -```json -{{ "intent": "unsure" }} """ diff --git a/src/codius/domain/services/config_service.py b/src/codius/domain/services/config_service.py index 42d0760..2a79440 100644 --- a/src/codius/domain/services/config_service.py +++ b/src/codius/domain/services/config_service.py @@ -7,10 +7,12 @@ from codius.domain.model.config.approval_mode import ApprovalMode from codius.domain.model.config.config import Config from codius.domain.model.config.llm_provider import LlmProvider +from codius.domain.model.config.ollama.ollama_llm_model import OllamaModel from codius.domain.model.config.openai.openai_llm_model import OpenAiModel from codius.infrastructure.adapter.llm.anthropic.anthropic_config import AnthropicConfig from codius.infrastructure.adapter.llm.llm_config import LlmConfig +from codius.infrastructure.adapter.llm.ollama.ollama_config import OllamaConfig from codius.infrastructure.adapter.llm.openai.openai_config import OpenAiConfig from codius.infrastructure.services.project_metadata_service import ProjectMetadataService @@ -55,13 +57,6 @@ def parse_structured(cls, raw: dict) -> Config: llm_config = LlmConfig(provider=provider) - openai = llm_section.get("openai", {}) - raw_model = openai.get("model") or "gpt-4o" - llm_config.openai = OpenAiConfig( - model=OpenAiModel(raw_model), - api_key=openai.get("api_key", "") - ) - anthropic = llm_section.get("anthropic", {}) raw_model = anthropic.get("model") or "claude-3-opus" llm_config.anthropic = AnthropicConfig( @@ -69,6 +64,20 @@ def parse_structured(cls, raw: dict) -> Config: api_key=anthropic.get("api_key", "") ) + ollama = llm_section.get("ollama", {}) + raw_model = ollama.get("model") or OllamaModel.GPT_OSS_20B.value + llm_config.ollama = OllamaConfig( + model=OllamaModel(raw_model), + server_url=ollama.get("server_url", "") + ) + + openai = llm_section.get("openai", {}) + raw_model = openai.get("model") or "gpt-4o" + llm_config.openai = OpenAiConfig( + model=OpenAiModel(raw_model), + api_key=openai.get("api_key", "") + ) + log_level = raw.get("log_level", "warning").lower() allowed_levels = {"debug", "info", "warning", "error", "critical"} if log_level not in allowed_levels: diff --git a/src/codius/graph/nodes/handle_unsupported_intent.py b/src/codius/graph/nodes/handle_unsupported_intent.py new file mode 100644 index 0000000..578f3ef --- /dev/null +++ b/src/codius/graph/nodes/handle_unsupported_intent.py @@ -0,0 +1,34 @@ +from typing import cast + + +def handle_unsupported_intent(state: dict) -> dict: + from codius.domain.model.intents.intent_type import IntentType + + unsupported_blocks = { + intent.get("building_block") + for intent in state.get("intent", []) + if intent.get("intent") == "unsupported" + } + block_list = ", ".join(sorted(unsupported_blocks)) + + # Dynamically get supported building blocks + supported_blocks = { + intent.building_block.value + for intent in IntentType + if intent != IntentType.UNSURE + } + supported_blocks_list = "\n".join(f"- {b}" for b in sorted(supported_blocks)) + + state["final_output"] = ( + f"⚠️ The assistant understood your request, but the following building block(s) aren't supported yet:\n" + f"- {block_list}\n\n" + "We're shipping updates frequently, so it might already be available in a newer version.\n\n" + "👉 Try updating Codius:\n" + "```bash\npip install --upgrade codius\n```\n" + "Then rerun your request.\n\n" + "In the meantime, you can try working with one of the currently supported building blocks:\n" + f"{supported_blocks_list}\n\n" + "💡 Need help or want to request this feature? Let us know on GitHub!" + ) + + return state diff --git a/src/codius/graph/routers/intent_router.py b/src/codius/graph/routers/intent_router.py index f182dfe..ec9f7d7 100644 --- a/src/codius/graph/routers/intent_router.py +++ b/src/codius/graph/routers/intent_router.py @@ -16,6 +16,8 @@ def route_by_intent(state: dict) -> str: if intent_type == "error": return "error" + elif intent_type == "unsupported": + return "unsupported" elif intent_type and intent_type not in {"none", "greeting", "unsure"}: return "valid" diff --git a/src/codius/infrastructure/adapter/llm/llm_config.py b/src/codius/infrastructure/adapter/llm/llm_config.py index 0fc4825..c0af889 100644 --- a/src/codius/infrastructure/adapter/llm/llm_config.py +++ b/src/codius/infrastructure/adapter/llm/llm_config.py @@ -6,6 +6,7 @@ from codius.infrastructure.adapter.llm.google.google_config import GoogleConfig from codius.infrastructure.adapter.llm.groq.groq_config import GroqConfig from codius.infrastructure.adapter.llm.mistral.mistral_config import MistralConfig +from codius.infrastructure.adapter.llm.ollama.ollama_config import OllamaConfig from codius.infrastructure.adapter.llm.openai.openai_config import OpenAiConfig @@ -17,3 +18,4 @@ class LlmConfig: google: Optional[GoogleConfig] = None mistral: Optional[MistralConfig] = None groq: Optional[GroqConfig] = None + ollama: Optional[OllamaConfig] = None diff --git a/src/codius/infrastructure/adapter/llm/ollama/__init__.py b/src/codius/infrastructure/adapter/llm/ollama/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/src/codius/infrastructure/adapter/llm/ollama/ollama_config.py b/src/codius/infrastructure/adapter/llm/ollama/ollama_config.py new file mode 100644 index 0000000..4063575 --- /dev/null +++ b/src/codius/infrastructure/adapter/llm/ollama/ollama_config.py @@ -0,0 +1,7 @@ +from dataclasses import dataclass + + +@dataclass +class OllamaConfig: + server_url: str + model: str diff --git a/src/codius/infrastructure/adapter/llm/ollama/ollama_llm_adapter.py b/src/codius/infrastructure/adapter/llm/ollama/ollama_llm_adapter.py new file mode 100644 index 0000000..9b166f1 --- /dev/null +++ b/src/codius/infrastructure/adapter/llm/ollama/ollama_llm_adapter.py @@ -0,0 +1,48 @@ +import logging, time, requests +from codius.domain.model.config.config import Config +from codius.domain.model.port.llm_port import LlmPort + +logger = logging.getLogger(__name__) + +class OllamaLlmAdapter(LlmPort): + def __init__(self, config: Config): + cfg = config.llm.ollama + if not cfg: + raise RuntimeError("Ollama config not found. Add `llm.ollama` in config.") + self.base_url = cfg.server_url.rstrip("/") + self.model = cfg.model + self.options = {} + self.session = requests.Session() + self.timeout = 300 + + def call_prompt(self, prompt: str) -> str: + start = time.time() + payload = { + "model": self.model, + "prompt": prompt, + "options": self.options, + "stream": False, + } + r = self.session.post(f"{self.base_url}/api/generate", json=payload, timeout=self.timeout) + r.raise_for_status() + data = r.json() + content = data.get("response", "") + logger.debug("Ollama /generate returned %d chars in %d ms", len(content), int((time.time()-start)*1000)) + return content + + def call_chat(self, messages: list[dict]) -> str: + # messages format: [{"role": "system|user|assistant", "content": "..."}, ...] + start = time.time() + payload = { + "model": self.model, + "messages": messages, + "options": self.options, + "stream": False, + } + r = self.session.post(f"{self.base_url}/api/chat", json=payload, timeout=self.timeout) + r.raise_for_status() + data = r.json() + # final message is in data["message"]["content"] per Ollama docs + msg = (data.get("message") or {}).get("content", "") + logger.debug("Ollama /chat returned %d chars in %d ms", len(msg), int((time.time()-start)*1000)) + return msg diff --git a/src/codius/infrastructure/services/graph_service.py b/src/codius/infrastructure/services/graph_service.py index 02d1135..b26c4c6 100644 --- a/src/codius/infrastructure/services/graph_service.py +++ b/src/codius/infrastructure/services/graph_service.py @@ -9,6 +9,7 @@ from codius.graph.nodes.generate_code import generate_code from codius.graph.nodes.handle_intent_error import handle_intent_error from codius.graph.nodes.handle_unclear_intent import handle_unclear_intent +from codius.graph.nodes.handle_unsupported_intent import handle_unsupported_intent from codius.graph.nodes.plan_changes import plan_changes from codius.graph.nodes.revise_intent import revise_intent from codius.graph.routers.approval_router import route_by_user_approval @@ -69,6 +70,7 @@ def _build_graph(self): # Add nodes graph.add_node("DistillIntent", distill_intent) graph.add_node("HandleUnclearIntent", handle_unclear_intent) + graph.add_node("HandleUnsupportedIntent", handle_unsupported_intent) graph.add_node("HandleIntentError", handle_intent_error) graph.add_node("ExtractProjectMetadata", extract_project_metadata) graph.add_node("ExtractBuildingBlocks", extract_building_blocks) @@ -84,7 +86,8 @@ def _build_graph(self): graph.add_conditional_edges("DistillIntent", route_by_intent, { "valid": "ExtractProjectMetadata", "unclear": "HandleUnclearIntent", - "error": "HandleIntentError" + "error": "HandleIntentError", + "unsupported": "HandleUnsupportedIntent", }) graph.add_conditional_edges("Preview", route_by_user_approval, { "apply": "ApplyChanges", diff --git a/src/codius/ui/commands/slash_commands.py b/src/codius/ui/commands/slash_commands.py index b80a7a8..f2a9734 100644 --- a/src/codius/ui/commands/slash_commands.py +++ b/src/codius/ui/commands/slash_commands.py @@ -12,6 +12,8 @@ from codius.di import container from codius.domain.model.config.anthropic.anthropic_llm_model import AnthropicModel from codius.domain.model.config.config import Config +from codius.domain.model.config.llm_provider import LlmProvider +from codius.domain.model.config.ollama.ollama_llm_model import OllamaModel from codius.domain.model.config.openai.openai_llm_model import OpenAiModel from codius.domain.services.config_service import ConfigService from codius.domain.services.session_service import SessionService @@ -22,8 +24,9 @@ from codius.utils import format_timestamp MODEL_CHOICES = { - "openai": list(OpenAiModel), - "anthropic": list(AnthropicModel), + LlmProvider.OPENAI: list(OpenAiModel), + LlmProvider.ANTHROPIC: list(AnthropicModel), + LlmProvider.OLLAMA: list(OllamaModel), } console = Console() @@ -149,18 +152,19 @@ def handle_slash_command(command: str): provider, model = all_combinations[int(selection) - 1] - # Prompt for API key if missing - llm_section = getattr(config.llm, provider, None) - if llm_section is None: - raise ValueError(f"LLM config for provider '{provider}' not found.") + # Prompt for API key if missing (provider not ollama) + if provider != LlmProvider.OLLAMA: + llm_section = getattr(config.llm, provider, None) + if llm_section is None: + raise ValueError(f"LLM config for provider '{provider}' not found.") - current_key = llm_section.api_key + current_key = llm_section.api_key - if not current_key: - key = getpass( - f"Enter API key for {provider} (leave blank to skip): ").strip() - if key: - config_service.set_config_value(f"llm.{provider}.api_key", key) + if not current_key: + key = getpass( + f"Enter API key for {provider} (leave blank to skip): ").strip() + if key: + config_service.set_config_value(f"llm.{provider}.api_key", key) # Set provider and model config_service.set_config_value("llm.provider", provider)