Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "GitAuto"
version = "1.57.0"
version = "1.58.0"
requires-python = ">=3.14"
dependencies = [
"annotated-doc==0.0.4",
Expand Down
11 changes: 11 additions & 0 deletions services/git/get_local_head_sha.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
from utils.command.run_subprocess import run_subprocess
from utils.error.handle_exceptions import handle_exceptions
from utils.logging.logging_config import logger


@handle_exceptions(default_return_value="", raise_on_error=False)
def get_local_head_sha(clone_dir: str):
result = run_subprocess(args=["git", "rev-parse", "HEAD"], cwd=clone_dir)
sha = result.stdout.strip()
logger.info("Local HEAD in %s: %s", clone_dir, sha)
return sha
66 changes: 66 additions & 0 deletions services/git/test_get_local_head_sha.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
# pyright: reportUnusedVariable=false
import subprocess
from unittest.mock import patch

import pytest

from services.git.get_local_head_sha import get_local_head_sha


def test_returns_empty_string_when_subprocess_fails():
with patch("services.git.get_local_head_sha.run_subprocess") as mock_run:
mock_run.side_effect = ValueError("not a git repo")
result = get_local_head_sha(clone_dir="/nonexistent")
assert result == ""


def test_returns_stripped_sha_from_stdout():
class FakeResult:
stdout = "abc123def456\n"

with patch(
"services.git.get_local_head_sha.run_subprocess", return_value=FakeResult()
) as mock_run:
result = get_local_head_sha(clone_dir="/tmp/fake")
assert result == "abc123def456"
mock_run.assert_called_once_with(
args=["git", "rev-parse", "HEAD"], cwd="/tmp/fake"
)


@pytest.mark.integration
def test_sociable_reads_real_head_sha(local_repo):
_clone_url, work_dir = local_repo
expected = subprocess.run(
["git", "rev-parse", "HEAD"],
cwd=work_dir,
check=True,
capture_output=True,
text=True,
).stdout.strip()

result = get_local_head_sha(clone_dir=work_dir)
assert result == expected


@pytest.mark.integration
def test_sociable_head_sha_changes_after_commit(local_repo):
_clone_url, work_dir = local_repo
initial = get_local_head_sha(clone_dir=work_dir)

with open(f"{work_dir}/new_file.txt", "w", encoding="utf-8") as f:
f.write("new content\n")
subprocess.run(
["git", "add", "new_file.txt"], cwd=work_dir, check=True, capture_output=True
)
subprocess.run(
["git", "commit", "-m", "add new file"],
cwd=work_dir,
check=True,
capture_output=True,
)

after = get_local_head_sha(clone_dir=work_dir)
assert after != initial
assert after != ""
assert initial != ""
28 changes: 28 additions & 0 deletions services/webhook/check_suite_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from services.git.create_empty_commit import create_empty_commit
from services.git.get_clone_dir import get_clone_dir
from services.git.get_clone_url import get_clone_url
from services.git.get_local_head_sha import get_local_head_sha
from services.git.clone_repo_and_install_dependencies import (
clone_repo_and_install_dependencies,
)
Expand Down Expand Up @@ -60,6 +61,7 @@
from services.supabase.circleci_tokens.get_circleci_token import get_circleci_token
from services.supabase.codecov_tokens.get_codecov_token import get_codecov_token
from services.supabase.create_user_request import create_user_request
from services.supabase.llm_requests.get_total_cost_for_pr import get_total_cost_for_pr
from services.supabase.repositories.get_repository import get_repository
from services.supabase.usage.check_older_active_test_failure import (
check_older_active_test_failure_request,
Expand Down Expand Up @@ -928,6 +930,7 @@ async def handle_check_suite(
)

cost_cap_usd = get_credit_price(model_id) * COST_CAP_RATIO
initial_head_sha = base_args["latest_commit_sha"]
concurrent_push_detected = False

for iteration in range(MAX_ITERATIONS):
Expand Down Expand Up @@ -1041,6 +1044,26 @@ async def handle_check_suite(
final_result = await verify_task_is_complete(base_args=base_args)
is_completed = final_result.success

# Decide whether to create the retrigger empty commit.
# If the agent made no code changes on this run AND we're at/over the PR's cost cap, creating an empty commit would retrigger CI on the same failing code, fire this handler again, re-bail on cost cap (cost is PR-cumulative), commit again, loop forever.
# When commits were pushed this run, retrigger CI — those changes might fix it.
final_head_sha = get_local_head_sha(clone_dir=clone_dir)
has_change_commits = bool(final_head_sha) and final_head_sha != initial_head_sha
total_cost_usd = get_total_cost_for_pr(
owner_name=owner_name, repo_name=repo_name, pr_number=pr_number
)
cost_cap_reached = total_cost_usd >= cost_cap_usd
logger.info(
"Retrigger decision on PR #%s: initial_head=%s final_head=%s has_change_commits=%s total_cost=$%.4f cost_cap=$%.4f cost_cap_reached=%s",
pr_number,
initial_head_sha,
final_head_sha,
has_change_commits,
total_cost_usd,
cost_cap_usd,
cost_cap_reached,
)

# Concurrent push bail: skip final empty commit (racer's push already triggers CI), post accurate message
if concurrent_push_detected:
logger.info(
Expand All @@ -1049,6 +1072,11 @@ async def handle_check_suite(
)
bail_msg = f"Another commit landed on `{head_branch}` while I was working on `{check_run_name}`. Their push triggers CI on its own, so I'm stopping here — your push stands."
update_comment(body=bail_msg, base_args=base_args)
elif cost_cap_reached and not has_change_commits:
logger.info(
"check_suite: cost cap reached with no change commits on PR #%s; skipping final empty commit to prevent infinite retry loop",
pr_number,
)
# If stopped due to trigger being disabled, post reason and skip empty commit
elif completion_reason:
logger.info(
Expand Down
138 changes: 134 additions & 4 deletions services/webhook/test_check_suite_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
from constants.messages import CHECK_RUN_FAILED_MESSAGE
from services.agents.verify_task_is_complete import VerifyTaskIsCompleteResult
from services.chat_with_agent import AgentResult
from services.webhook import check_suite_handler
from services.webhook.check_suite_handler import handle_check_suite
from utils.logs.label_log_source import label_log_source

Expand Down Expand Up @@ -796,6 +797,14 @@ async def test_handle_check_suite_with_existing_retry_pair(


@pytest.mark.asyncio
@patch(
"services.webhook.check_suite_handler.get_local_head_sha",
return_value="abc123",
)
@patch(
"services.webhook.check_suite_handler.get_total_cost_for_pr",
return_value=999.99,
)
@patch("services.webhook.check_suite_handler.get_failed_check_runs_from_check_suite")
@patch("services.webhook.check_suite_handler.get_installation_access_token")
@patch("services.webhook.check_suite_handler.get_repository")
Expand Down Expand Up @@ -828,7 +837,7 @@ async def test_handle_check_suite_with_closed_pr(
_mock_ensure_php,
_mock_start_async,
_mock_update_usage,
_mock_create_empty_commit,
mock_create_empty_commit,
mock_should_bail,
_mock_update_retry_hashes,
mock_get_retry_hashes,
Expand All @@ -844,6 +853,8 @@ async def test_handle_check_suite_with_closed_pr(
mock_get_repo,
mock_get_token,
mock_get_failed_runs,
_mock_get_total_cost_for_pr,
_mock_get_local_head_sha,
mock_check_run_payload,
):
"""Test handling when the PR is closed during processing."""
Expand Down Expand Up @@ -897,8 +908,20 @@ async def test_handle_check_suite_with_closed_pr(
# Verify comment was updated (post-loop update)
mock_update_comment.assert_called()

# Regression: cost cap reached (mocked $999.99 >= cap) with no change commits (HEAD == initial SHA) must skip the final empty commit.
# Otherwise CI would be retriggered → re-enter this handler → bail on cost cap again → loop forever.
mock_create_empty_commit.assert_not_called()


@pytest.mark.asyncio
@patch(
"services.webhook.check_suite_handler.get_local_head_sha",
return_value="abc123",
)
@patch(
"services.webhook.check_suite_handler.get_total_cost_for_pr",
return_value=999.99,
)
@patch("services.webhook.check_suite_handler.get_failed_check_runs_from_check_suite")
@patch("services.webhook.check_suite_handler.get_installation_access_token")
@patch("services.webhook.check_suite_handler.get_repository")
Expand Down Expand Up @@ -931,7 +954,7 @@ async def test_handle_check_suite_with_deleted_branch(
_mock_ensure_php,
_mock_start_async,
_mock_update_usage,
_mock_create_empty_commit,
mock_create_empty_commit,
mock_should_bail,
_mock_update_retry_hashes,
mock_get_retry_hashes,
Expand All @@ -947,6 +970,8 @@ async def test_handle_check_suite_with_deleted_branch(
mock_get_repo,
mock_get_token,
mock_get_failed_runs,
_mock_get_total_cost_for_pr,
_mock_get_local_head_sha,
mock_check_run_payload,
):
"""Test handling when the branch is deleted during processing."""
Expand Down Expand Up @@ -1000,6 +1025,113 @@ async def test_handle_check_suite_with_deleted_branch(
# Verify comment was updated (post-loop update)
mock_update_comment.assert_called()

# Regression: cost cap reached (mocked $999.99 >= cap) with no change commits (HEAD == initial SHA) must skip the final empty commit.
# Otherwise CI would be retriggered → re-enter this handler → bail on cost cap again → loop forever.
mock_create_empty_commit.assert_not_called()


@pytest.mark.asyncio
@patch(
"services.webhook.check_suite_handler.get_local_head_sha",
return_value="different_head_sha",
)
@patch(
"services.webhook.check_suite_handler.get_total_cost_for_pr",
return_value=999.99,
)
@patch("services.webhook.check_suite_handler.get_failed_check_runs_from_check_suite")
@patch("services.webhook.check_suite_handler.get_installation_access_token")
@patch("services.webhook.check_suite_handler.get_repository")
@patch("services.webhook.check_suite_handler.slack_notify")
@patch("services.webhook.check_suite_handler.get_pr_comments")
@patch("services.webhook.check_suite_handler.create_comment")
@patch("services.webhook.check_suite_handler.create_user_request")
@patch("services.webhook.check_suite_handler.cancel_workflow_runs")
@patch("services.webhook.check_suite_handler.get_pull_request")
@patch("services.webhook.check_suite_handler.get_pull_request_files")
@patch("services.webhook.check_suite_handler.get_workflow_run_logs")
@patch("services.webhook.check_suite_handler.update_comment")
@patch("services.webhook.check_suite_handler.get_retry_error_hashes")
@patch("services.webhook.check_suite_handler.update_retry_error_hashes")
@patch("services.webhook.check_suite_handler.should_bail", return_value=True)
@patch("services.webhook.check_suite_handler.create_empty_commit")
@patch("services.webhook.check_suite_handler.update_usage")
@patch("services.webhook.check_suite_handler.ensure_node_packages")
@patch("services.webhook.check_suite_handler.ensure_php_packages")
@patch(
"services.webhook.check_suite_handler.get_head_commit_count_behind_base",
return_value=0,
)
@patch("services.webhook.check_suite_handler.git_merge_base_into_pr")
@patch("services.webhook.check_suite_handler.clone_repo_and_install_dependencies")
async def test_cost_cap_with_change_commits_still_retriggers_ci(
_mock_prepare_repo,
_mock_get_behind,
_mock_merge_base,
_mock_ensure_php,
_mock_start_async,
_mock_update_usage,
mock_create_empty_commit,
_mock_should_bail,
_mock_update_retry_hashes,
mock_get_retry_hashes,
_mock_update_comment,
mock_get_logs,
mock_get_changes,
mock_get_pr,
_mock_cancel_workflow_runs,
mock_create_user_request,
mock_create_comment,
mock_get_pr_comments,
_mock_slack_notify,
mock_get_repo,
mock_get_token,
mock_get_failed_runs,
_mock_get_total_cost_for_pr,
_mock_get_local_head_sha,
mock_check_run_payload,
):
"""Cost cap reached but HEAD moved (agent pushed change commits this run) →
create the final empty commit to retrigger CI. The pushed fixes might pass CI."""
payload = mock_check_run_payload.copy()
payload["check_suite"] = payload["check_suite"].copy()
payload["check_suite"]["id"] = random.randint(1000000, 9999999)

mock_get_token.return_value = "ghs_test_token_for_testing"
mock_get_failed_runs.return_value = [
{
"details_url": "https://github.com/test-owner/test-repo/actions/runs/12345/job/67890",
"name": "test",
"head_sha": "abc123",
}
]
mock_get_repo.return_value = {"trigger_on_test_failure": True}
mock_get_pr_comments.return_value = []
mock_create_comment.return_value = "http://comment-url"
mock_create_user_request.return_value = "usage-id-123"
mock_get_pr.return_value = {
"title": "Low Test Coverage: src/main.py",
"body": "Test PR description",
"user": {"login": "test-user"},
"base": {"ref": "main"},
}
mock_get_changes.return_value = [
{
"filename": "src/main.py",
"status": "modified",
"additions": 10,
"deletions": 5,
}
]
mock_get_logs.return_value = "Test failure log content"
mock_get_retry_hashes.return_value = []

await handle_check_suite(payload)

# HEAD "different_head_sha" != initial "abc123" → has_change_commits=True.
# Even with cost cap reached, we must still retrigger CI because the commits pushed during this run might fix the failure. Skipping would lose that signal.
mock_create_empty_commit.assert_called_once()


@pytest.mark.asyncio
@patch("services.webhook.check_suite_handler.get_failed_check_runs_from_check_suite")
Expand Down Expand Up @@ -1771,8 +1903,6 @@ def test_patch_truncation_in_changed_files():
def test_check_suite_handler_imports_label_log_source():
# Smoke test: the handler must import label_log_source or the runtime-provenance tagging isn't actually wired.
# Without this, a future refactor could silently drop the import and logs would lose their `[log source: ...]` header — the exact regression that produced Foxquilt PR #203.
from services.webhook import check_suite_handler # pylint: disable=import-outside-toplevel

assert hasattr(check_suite_handler, "label_log_source")


Expand Down
2 changes: 1 addition & 1 deletion uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading