Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 12 additions & 0 deletions bugbug/tools/test_generation/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.

from bugbug.tools.test_generation.agent import TestGenerationTool
from bugbug.tools.test_generation.data_types import TestGenerationResult

__all__ = [
"TestGenerationResult",
"TestGenerationTool",
]
114 changes: 114 additions & 0 deletions bugbug/tools/test_generation/agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,114 @@
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.

"""Test case and test step generation tool implementation."""

from __future__ import annotations

from typing import Any

from langchain.agents import create_agent
from langchain.chat_models import BaseChatModel, init_chat_model
from langchain.messages import HumanMessage

from bugbug.tools.base import GenerativeModelTool
from bugbug.tools.core.llms import DEFAULT_OPENAI_MODEL
from bugbug.tools.test_generation.data_types import TestGenerationResult
from bugbug.tools.test_generation.prompts import (
TEST_CASES_PROMPT_TEMPLATE,
TEST_STEPS_PROMPT_TEMPLATE,
)


def _message_content_to_text(content: Any) -> str:
if isinstance(content, str):
return content

if isinstance(content, list):
return "".join(
item.get("text", "")
for item in content
if isinstance(item, dict) and item.get("type") == "text"
)

return str(content)


class TestGenerationTool(GenerativeModelTool):
"""Tool for generating QA test cases and test steps."""

def __init__(
self,
llm: BaseChatModel,
target_software: str = "Mozilla Firefox",
) -> None:
self.target_software = target_software
self.agent = create_agent(llm)

@classmethod
def create(cls, **kwargs):
"""Factory method to instantiate the tool with default dependencies."""
if "llm" not in kwargs:
kwargs["llm"] = init_chat_model(DEFAULT_OPENAI_MODEL)

return cls(**kwargs)

def _invoke_llm(self, prompt: str) -> str:
result = self.agent.invoke({"messages": [HumanMessage(prompt)]})
return _message_content_to_text(result["messages"][-1].content).strip()

def generate_test_cases(
self,
feature_description: str,
test_scope: str,
qa_test_cases: str = "",
) -> str:
"""Generate missed test cases for a feature."""
prompt = TEST_CASES_PROMPT_TEMPLATE.format(
target_software=self.target_software,
feature_description=feature_description,
test_scope=test_scope,
qa_test_cases=qa_test_cases or "N/A",
)
return self._invoke_llm(prompt)

def generate_test_steps(
self,
feature_description: str,
test_cases: str,
) -> str:
"""Generate detailed test steps for each test case."""
prompt = TEST_STEPS_PROMPT_TEMPLATE.format(
target_software=self.target_software,
feature_description=feature_description,
test_cases=test_cases,
)
return self._invoke_llm(prompt)

def run(
self,
feature_description: str,
test_scope: str,
qa_test_cases: str = "",
generate_steps: bool = True,
) -> TestGenerationResult:
"""Generate test cases and optionally generate steps for them."""
generated_test_cases = self.generate_test_cases(
feature_description,
test_scope,
qa_test_cases,
)

test_steps = None
if generate_steps and generated_test_cases:
test_steps = self.generate_test_steps(
feature_description,
generated_test_cases,
)

return TestGenerationResult(
test_cases=generated_test_cases,
test_steps=test_steps,
)
12 changes: 12 additions & 0 deletions bugbug/tools/test_generation/data_types.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.

from dataclasses import dataclass


@dataclass(frozen=True)
class TestGenerationResult:
test_cases: str
test_steps: str | None = None
60 changes: 60 additions & 0 deletions bugbug/tools/test_generation/prompts.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.

TEST_CASES_PROMPT_TEMPLATE = """You are an expert Quality Assurance Engineer with expertise in designing high level test cases for features of the {target_software} web browser.
You are given a feature's description, its scope of testing, and a list of already existing test cases.
Using the knowledge and information you are given, generate no more than 15 test cases that have been missed for the feature.

-- This is the feature's description --:
{feature_description}

-- These are the existing test cases so far for the feature --:
{qa_test_cases}

-- This is the feature's scope of testing --:
{test_scope}

-- Here are some tips for success --:
1. Thoroughly understand the feature from the description, scope of testing and the existing test cases.
2. Alter the wording while generating test cases.
3. Check to see if each generated case is relevant to the feature.
4. Check to see if each generated case is within the scope of testing.
5. Check to see if each generated case is dissimilar to any existing test cases.

The test cases should be presented in a numbered list, with each entry being a single, concise test case.
Avoid using a title and markdown formatting."""


TEST_STEPS_PROMPT_TEMPLATE = """You are an expert Quality Assurance Engineer with expertise in designing detailed test steps for test cases of features of the {target_software} web browser.
You are given a feature's description and a list of test cases.
Using the knowledge and information you are given, generate test steps for each test case.

-- This is the feature's description --:
{feature_description}

-- These are the test cases for the feature --:
{test_cases}

-- Here are some tips for success --:
1. Thoroughly understand the feature from the description and the test cases.
2. For each test case, generate clear and concise steps to execute the test case.
3. Each test case should have its own set of steps.
4. Present the steps in a numbered list under each test case.
Avoid using a title and markdown formatting.

-- Here are some examples --:
Test Case 1: Ensure that Rich suggestions entries match the design
Test Steps:
1. Launch Firefox.
2. Start typing a popular keyword inside the Address Bar.
3. Observe the Rich entities icon and description.

Test Case 2: Search-shortcut - Ensure that Rich entities are accessible via keyboard
Test Steps:
1. Launch Firefox.
2. Observe the Address Bar.
3. Click inside the Address Bar, select the google search shortcut.
4. Press 'Down' arrow key.
5. Navigate through the Rich entities using Up/Down arrow keys."""