Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
28 commits
Select commit Hold shift + click to select a range
62c24f5
fix: dummy change to trigger please release patch bump
runemalm Jul 11, 2025
81c7135
Merge master.
runemalm Jul 15, 2025
8f66244
chore(ci): add cz toml file
runemalm Jul 15, 2025
4e1059a
chore(ci): replace bump version package with custom script
runemalm Jul 15, 2025
ae3f6e2
chore: install typing-extensions missing after uninstalled package
runemalm Jul 15, 2025
a16a2f1
feat: add support for adding value object intents
runemalm Jul 15, 2025
27b9833
Merge branch 'master' into develop
runemalm Jul 15, 2025
7ce2f15
chore: add missing permission to release workflow
runemalm Jul 15, 2025
d9e6383
Merge branch 'master' into develop
runemalm Jul 15, 2025
0c3a73b
Merge branch 'master' into develop
runemalm Jul 15, 2025
49f739f
chore: fix bump_version.py script (look at all commits since last rel…
runemalm Jul 15, 2025
37c26cf
chore: fix badge in readme
runemalm Jul 15, 2025
2825dbd
Add target to test install released codius package with a specific py…
runemalm Jul 19, 2025
8a0be1b
Move makefile targets.
runemalm Jul 19, 2025
f8a38d0
chore: replace pipenv with poetry
runemalm Jul 19, 2025
806799a
chore: regenerate poetry.lock after pyproject.toml changes
runemalm Jul 19, 2025
f9e12fd
chore: install missing pyfakefs dev package
runemalm Jul 19, 2025
7a7a88b
chore: sync version to pyproject.toml on bump
runemalm Jul 20, 2025
acf9441
chore: fix dependency issues
runemalm Jul 20, 2025
6a5a263
chore: move plan step example json to plan_examples.py
runemalm Jul 20, 2025
3c5fa92
fix: deletion flow lead to crash
runemalm Jul 20, 2025
8d87baa
Merge master.
runemalm Jul 20, 2025
86e995f
fix: crash on requesting to delete building blocks
runemalm Jul 20, 2025
ea83007
Merge branch 'master' into develop
runemalm Jul 20, 2025
3ce1358
chore: remove alpha note from readme
runemalm Jul 20, 2025
88ed58b
chore: show message to user if building block requested is not suppor…
runemalm Jul 24, 2025
857d257
feat: add ollama provider and the new gpt oss-models
runemalm Aug 10, 2025
5a84eea
chore: upgrade py-dependency-injection from 1.0.0-rc.1 -> 1.0.0-rc.2
runemalm Aug 10, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 0 additions & 8 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -27,14 +27,6 @@

---

> ⚠️ **Alpha Release**
>
> Codius is currently in alpha.
> Features may be incomplete, unstable, or subject to change.
> Use with care in production environments — and help shape it by providing feedback or contributing!

---

## Why Codius?

> Free domain-driven designers from boilerplate and let them focus on modeling.
Expand Down
14 changes: 7 additions & 7 deletions poetry.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,15 @@ dependencies = [
"jinja2",
"langgraph",
"langchain-openai",
"py-dependency-injection >=1.0.0b3,<2.0.0",
"python-dateutil",
"python-dotenv",
"pyyaml",
"prompt-toolkit",
"rich",
"tree-sitter==0.23.2",
"tree-sitter-c-sharp",
"typing-extensions"
"typing-extensions",
"py-dependency-injection (>=1.0.0rc2,<2.0.0)",
]

[tool.poetry.group.dev]
Expand Down
16 changes: 15 additions & 1 deletion src/codius/di.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,11 @@
from dependency_injection.container import DependencyContainer

from codius.domain.model.config.config import Config
from codius.domain.model.config.llm_provider import LlmProvider
from codius.domain.model.port.llm_port import LlmPort
from codius.domain.services.config_service import ConfigService
from codius.domain.services.session_service import SessionService
from codius.infrastructure.adapter.llm.ollama.ollama_llm_adapter import OllamaLlmAdapter

from codius.infrastructure.adapter.llm.openai.openai_llm_adapter import OpenAiLlmAdapter
from codius.infrastructure.repository.session_repository import SessionRepository
Expand Down Expand Up @@ -52,4 +54,16 @@ def register_services(config: Config, args: argparse.Namespace):
container.register_scoped(OpenDddConventionService)
container.register_scoped(TreeSitterService)
container.register_scoped(LlmService)
container.register_scoped(LlmPort, OpenAiLlmAdapter)

adapter_by_provider = {
LlmProvider.OPENAI: OpenAiLlmAdapter,
LlmProvider.OLLAMA: OllamaLlmAdapter,
}

provider = config.llm.provider
adapter_cls = adapter_by_provider.get(provider)

if adapter_cls is None:
raise RuntimeError(f"Unsupported LLM provider in config: {provider}")

container.register_scoped(LlmPort, adapter_cls)
8 changes: 8 additions & 0 deletions src/codius/domain/model/config/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,14 @@
"openai": {
"model": "gpt-4o",
"api_key": "sk-... # Replace with your OpenAI API key"
},
"anthropic": {
"model": "claude-3-opus",
"api_key": "xyz-... # Replace with your Anthropic API key"
},
"ollama": {
"base_url": "http://localhost:11434",
"model": "gpt-oss:20b"
}
},
"approval_mode": ApprovalMode.SUGGEST.value,
Expand Down
1 change: 1 addition & 0 deletions src/codius/domain/model/config/llm_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,3 +7,4 @@ class LlmProvider(str, Enum):
GOOGLE = "google"
MISTRAL = "mistral"
GROQ = "groq"
OLLAMA = "ollama"
Empty file.
8 changes: 8 additions & 0 deletions src/codius/domain/model/config/ollama/ollama_llm_model.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
from enum import Enum


class OllamaModel(str, Enum):
GPT_OSS_20B = "gpt-oss:20b"
GPT_OSS_120B = "gpt-oss:120b"
LLAMA_31_8B = "llama3.1:8b-instruct-q4_K_M"
MISTRAL_7B = "mistral:7b"
17 changes: 17 additions & 0 deletions src/codius/domain/model/intents/intent_type.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
from enum import Enum

from codius.infrastructure.services.code_scanner.model.building_block_type import \
BuildingBlockType


class IntentType(str, Enum):
ADD_AGGREGATE = "add_aggregate"
Expand All @@ -22,3 +25,17 @@ class IntentType(str, Enum):
REMOVE_REPOSITORY_METHOD = "remove_repository_method"

UNSURE = "unsure"

@property
def building_block(self) -> BuildingBlockType:
if self.name.startswith("ADD_AGGREGATE") or self.name.startswith(
"REMOVE_AGGREGATE"):
return BuildingBlockType.AGGREGATE_ROOT
elif self.name.startswith("ADD_VALUE_OBJECT") or self.name.startswith(
"REMOVE_VALUE_OBJECT"):
return BuildingBlockType.VALUE_OBJECT
elif self.name.startswith("ADD_REPOSITORY") or self.name.startswith(
"REMOVE_REPOSITORY"):
return BuildingBlockType.REPOSITORY
else:
raise NotImplementedError(f"Building block not mapped for intent {self}")
38 changes: 33 additions & 5 deletions src/codius/domain/model/prompts/distill_intent_prompt.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,8 @@
from codius.domain.model.intents.value_object.add_value_object_property_intent import AddValueObjectPropertyIntent
from codius.domain.model.intents.value_object.remove_value_object_property_intent import RemoveValueObjectPropertyIntent
from codius.domain.model.intents.repository.add_repository_intent import AddRepositoryIntent
from codius.infrastructure.services.code_scanner.model.building_block_type import \
BuildingBlockType


@dataclass(frozen=True)
Expand Down Expand Up @@ -49,6 +51,14 @@ def as_prompt(self) -> str:
RemoveRepositoryMethodIntent,
]

all_blocks = {bb.value for bb in BuildingBlockType}
supported_blocks = {intent.building_block.value for intent in IntentType if
intent != IntentType.UNSURE}
unsupported_blocks = all_blocks - supported_blocks

supported_blocks_text = "\n".join(f"- {b}" for b in sorted(supported_blocks))
unsupported_blocks_text = "\n".join(f"- {b}" for b in sorted(unsupported_blocks))

example_blocks = "\n".join(
f"### {cls.intent.value}\n```json\n{cls.to_example_json()}\n```"
for cls in example_intents
Expand Down Expand Up @@ -88,6 +98,29 @@ def as_prompt(self) -> str:
**Database Providers**:
{database_text}

---

### Known DDD Building Blocks

These are the known building blocks in Domain-Driven Design:

**Supported:**
{supported_blocks_text}

**Not yet supported:**
{unsupported_blocks_text}

If the user refers to a known DDD building block that is **not yet supported**, return:

```json
{{ "intent": "unsupported", "building_block": "<block_name>" }}
```

If the user's intent is unclear or unsupported, respond only with:

```json
{{ "intent": "unsure" }}

### Instructions

- Break complex modeling instructions into small, **granular intents**.
Expand All @@ -109,9 +142,4 @@ def as_prompt(self) -> str:
### Examples:

{example_blocks}

If the user's intent is unclear or unsupported, respond only with:

```json
{{ "intent": "unsure" }}
"""
23 changes: 16 additions & 7 deletions src/codius/domain/services/config_service.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,10 +7,12 @@
from codius.domain.model.config.approval_mode import ApprovalMode
from codius.domain.model.config.config import Config
from codius.domain.model.config.llm_provider import LlmProvider
from codius.domain.model.config.ollama.ollama_llm_model import OllamaModel
from codius.domain.model.config.openai.openai_llm_model import OpenAiModel

from codius.infrastructure.adapter.llm.anthropic.anthropic_config import AnthropicConfig
from codius.infrastructure.adapter.llm.llm_config import LlmConfig
from codius.infrastructure.adapter.llm.ollama.ollama_config import OllamaConfig
from codius.infrastructure.adapter.llm.openai.openai_config import OpenAiConfig
from codius.infrastructure.services.project_metadata_service import ProjectMetadataService

Expand Down Expand Up @@ -55,20 +57,27 @@ def parse_structured(cls, raw: dict) -> Config:

llm_config = LlmConfig(provider=provider)

openai = llm_section.get("openai", {})
raw_model = openai.get("model") or "gpt-4o"
llm_config.openai = OpenAiConfig(
model=OpenAiModel(raw_model),
api_key=openai.get("api_key", "")
)

anthropic = llm_section.get("anthropic", {})
raw_model = anthropic.get("model") or "claude-3-opus"
llm_config.anthropic = AnthropicConfig(
model=AnthropicModel(raw_model),
api_key=anthropic.get("api_key", "")
)

ollama = llm_section.get("ollama", {})
raw_model = ollama.get("model") or OllamaModel.GPT_OSS_20B.value
llm_config.ollama = OllamaConfig(
model=OllamaModel(raw_model),
server_url=ollama.get("server_url", "")
)

openai = llm_section.get("openai", {})
raw_model = openai.get("model") or "gpt-4o"
llm_config.openai = OpenAiConfig(
model=OpenAiModel(raw_model),
api_key=openai.get("api_key", "")
)

log_level = raw.get("log_level", "warning").lower()
allowed_levels = {"debug", "info", "warning", "error", "critical"}
if log_level not in allowed_levels:
Expand Down
34 changes: 34 additions & 0 deletions src/codius/graph/nodes/handle_unsupported_intent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
from typing import cast


def handle_unsupported_intent(state: dict) -> dict:
from codius.domain.model.intents.intent_type import IntentType

unsupported_blocks = {
intent.get("building_block")
for intent in state.get("intent", [])
if intent.get("intent") == "unsupported"
}
block_list = ", ".join(sorted(unsupported_blocks))

# Dynamically get supported building blocks
supported_blocks = {
intent.building_block.value
for intent in IntentType
if intent != IntentType.UNSURE
}
supported_blocks_list = "\n".join(f"- {b}" for b in sorted(supported_blocks))

state["final_output"] = (
f"⚠️ The assistant understood your request, but the following building block(s) aren't supported yet:\n"
f"- {block_list}\n\n"
"We're shipping updates frequently, so it might already be available in a newer version.\n\n"
"👉 Try updating Codius:\n"
"```bash\npip install --upgrade codius\n```\n"
"Then rerun your request.\n\n"
"In the meantime, you can try working with one of the currently supported building blocks:\n"
f"{supported_blocks_list}\n\n"
"💡 Need help or want to request this feature? Let us know on GitHub!"
)

return state
2 changes: 2 additions & 0 deletions src/codius/graph/routers/intent_router.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@ def route_by_intent(state: dict) -> str:

if intent_type == "error":
return "error"
elif intent_type == "unsupported":
return "unsupported"
elif intent_type and intent_type not in {"none", "greeting", "unsure"}:
return "valid"

Expand Down
2 changes: 2 additions & 0 deletions src/codius/infrastructure/adapter/llm/llm_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
from codius.infrastructure.adapter.llm.google.google_config import GoogleConfig
from codius.infrastructure.adapter.llm.groq.groq_config import GroqConfig
from codius.infrastructure.adapter.llm.mistral.mistral_config import MistralConfig
from codius.infrastructure.adapter.llm.ollama.ollama_config import OllamaConfig
from codius.infrastructure.adapter.llm.openai.openai_config import OpenAiConfig


Expand All @@ -17,3 +18,4 @@ class LlmConfig:
google: Optional[GoogleConfig] = None
mistral: Optional[MistralConfig] = None
groq: Optional[GroqConfig] = None
ollama: Optional[OllamaConfig] = None
Empty file.
7 changes: 7 additions & 0 deletions src/codius/infrastructure/adapter/llm/ollama/ollama_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
from dataclasses import dataclass


@dataclass
class OllamaConfig:
server_url: str
model: str
48 changes: 48 additions & 0 deletions src/codius/infrastructure/adapter/llm/ollama/ollama_llm_adapter.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import logging, time, requests
from codius.domain.model.config.config import Config
from codius.domain.model.port.llm_port import LlmPort

logger = logging.getLogger(__name__)

class OllamaLlmAdapter(LlmPort):
def __init__(self, config: Config):
cfg = config.llm.ollama
if not cfg:
raise RuntimeError("Ollama config not found. Add `llm.ollama` in config.")
self.base_url = cfg.server_url.rstrip("/")
self.model = cfg.model
self.options = {}
self.session = requests.Session()
self.timeout = 300

def call_prompt(self, prompt: str) -> str:
start = time.time()
payload = {
"model": self.model,
"prompt": prompt,
"options": self.options,
"stream": False,
}
r = self.session.post(f"{self.base_url}/api/generate", json=payload, timeout=self.timeout)
r.raise_for_status()
data = r.json()
content = data.get("response", "")
logger.debug("Ollama /generate returned %d chars in %d ms", len(content), int((time.time()-start)*1000))
return content

def call_chat(self, messages: list[dict]) -> str:
# messages format: [{"role": "system|user|assistant", "content": "..."}, ...]
start = time.time()
payload = {
"model": self.model,
"messages": messages,
"options": self.options,
"stream": False,
}
r = self.session.post(f"{self.base_url}/api/chat", json=payload, timeout=self.timeout)
r.raise_for_status()
data = r.json()
# final message is in data["message"]["content"] per Ollama docs
msg = (data.get("message") or {}).get("content", "")
logger.debug("Ollama /chat returned %d chars in %d ms", len(msg), int((time.time()-start)*1000))
return msg
Loading