mirror of
https://github.com/All-Hands-AI/OpenHands.git
synced 2026-01-08 22:38:05 -05:00
Forgejo integration (#11111)
Co-authored-by: johba <admin@noreply.localhost> Co-authored-by: openhands <openhands@all-hands.dev> Co-authored-by: johba <johba@harb.eth> Co-authored-by: enyst <engel.nyst@gmail.com> Co-authored-by: Graham Neubig <neubig@gmail.com> Co-authored-by: MrGeorgen <65063405+MrGeorgen@users.noreply.github.com> Co-authored-by: MrGeorgen <moinl6162@gmail.com>
This commit is contained in:
@@ -109,6 +109,30 @@ def get_provider_tokens():
|
||||
bitbucket_token = SecretStr(os.environ['BITBUCKET_TOKEN'])
|
||||
provider_tokens[ProviderType.BITBUCKET] = ProviderToken(token=bitbucket_token)
|
||||
|
||||
# Forgejo support (e.g., Codeberg or self-hosted Forgejo)
|
||||
if 'FORGEJO_TOKEN' in os.environ:
|
||||
forgejo_token = SecretStr(os.environ['FORGEJO_TOKEN'])
|
||||
# If a base URL is provided, extract the domain to use as host override
|
||||
forgejo_base_url = os.environ.get('FORGEJO_BASE_URL', '').strip()
|
||||
host: str | None = None
|
||||
if forgejo_base_url:
|
||||
# Normalize by stripping protocol and any path (e.g., /api/v1)
|
||||
url = forgejo_base_url
|
||||
if url.startswith(('http://', 'https://')):
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
|
||||
parsed = urlparse(url)
|
||||
host = parsed.netloc or None
|
||||
except Exception:
|
||||
pass
|
||||
if host is None:
|
||||
host = url.replace('https://', '').replace('http://', '')
|
||||
host = host.split('/')[0].strip('/') if host else None
|
||||
provider_tokens[ProviderType.FORGEJO] = ProviderToken(
|
||||
token=forgejo_token, host=host
|
||||
)
|
||||
|
||||
# Wrap provider tokens in Secrets if any tokens were found
|
||||
secret_store = (
|
||||
Secrets(provider_tokens=provider_tokens) if provider_tokens else None # type: ignore[arg-type]
|
||||
|
||||
0
openhands/integrations/forgejo/__init__.py
Normal file
0
openhands/integrations/forgejo/__init__.py
Normal file
56
openhands/integrations/forgejo/forgejo_service.py
Normal file
56
openhands/integrations/forgejo/forgejo_service.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from pydantic import SecretStr
|
||||
|
||||
from openhands.integrations.forgejo.service import (
|
||||
ForgejoBranchesMixin,
|
||||
ForgejoFeaturesMixin,
|
||||
ForgejoMixinBase,
|
||||
ForgejoPRsMixin,
|
||||
ForgejoReposMixin,
|
||||
ForgejoResolverMixin,
|
||||
)
|
||||
from openhands.integrations.service_types import GitService
|
||||
from openhands.utils.import_utils import get_impl
|
||||
|
||||
|
||||
class ForgejoService(
|
||||
ForgejoBranchesMixin,
|
||||
ForgejoFeaturesMixin,
|
||||
ForgejoPRsMixin,
|
||||
ForgejoReposMixin,
|
||||
ForgejoResolverMixin,
|
||||
ForgejoMixinBase,
|
||||
GitService,
|
||||
):
|
||||
"""Assembled Forgejo service combining mixins by feature area."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
user_id: str | None = None,
|
||||
external_auth_id: str | None = None,
|
||||
external_auth_token: SecretStr | None = None,
|
||||
token: SecretStr | None = None,
|
||||
external_token_manager: bool = False,
|
||||
base_domain: str | None = None,
|
||||
base_url: str | None = None,
|
||||
) -> None:
|
||||
ForgejoMixinBase.__init__(
|
||||
self,
|
||||
user_id=user_id,
|
||||
external_auth_id=external_auth_id,
|
||||
external_auth_token=external_auth_token,
|
||||
token=token,
|
||||
external_token_manager=external_token_manager,
|
||||
base_domain=base_domain,
|
||||
base_url=base_url,
|
||||
)
|
||||
|
||||
|
||||
forgejo_service_cls = os.environ.get(
|
||||
'OPENHANDS_FORGEJO_SERVICE_CLS',
|
||||
'openhands.integrations.forgejo.forgejo_service.ForgejoService',
|
||||
)
|
||||
ForgejoServiceImpl = get_impl(ForgejoService, forgejo_service_cls)
|
||||
15
openhands/integrations/forgejo/service/__init__.py
Normal file
15
openhands/integrations/forgejo/service/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from .base import ForgejoMixinBase
|
||||
from .branches import ForgejoBranchesMixin
|
||||
from .features import ForgejoFeaturesMixin
|
||||
from .prs import ForgejoPRsMixin
|
||||
from .repos import ForgejoReposMixin
|
||||
from .resolver import ForgejoResolverMixin
|
||||
|
||||
__all__ = [
|
||||
'ForgejoMixinBase',
|
||||
'ForgejoBranchesMixin',
|
||||
'ForgejoFeaturesMixin',
|
||||
'ForgejoPRsMixin',
|
||||
'ForgejoReposMixin',
|
||||
'ForgejoResolverMixin',
|
||||
]
|
||||
219
openhands/integrations/forgejo/service/base.py
Normal file
219
openhands/integrations/forgejo/service/base.py
Normal file
@@ -0,0 +1,219 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import httpx
|
||||
from pydantic import SecretStr
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.protocols.http_client import HTTPClient
|
||||
from openhands.integrations.service_types import (
|
||||
BaseGitService,
|
||||
OwnerType,
|
||||
ProviderType,
|
||||
Repository,
|
||||
RequestMethod,
|
||||
UnknownException,
|
||||
User,
|
||||
)
|
||||
from openhands.utils.http_session import httpx_verify_option
|
||||
|
||||
|
||||
class ForgejoMixinBase(BaseGitService, HTTPClient):
|
||||
"""Common functionality shared by Forgejo service mixins."""
|
||||
|
||||
DEFAULT_BASE_URL = 'https://codeberg.org/api/v1'
|
||||
DEFAULT_DOMAIN = 'codeberg.org'
|
||||
|
||||
token: SecretStr = SecretStr('')
|
||||
refresh = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
user_id: str | None = None,
|
||||
external_auth_id: str | None = None,
|
||||
external_auth_token: SecretStr | None = None,
|
||||
token: SecretStr | None = None,
|
||||
external_token_manager: bool = False,
|
||||
base_domain: str | None = None,
|
||||
base_url: str | None = None,
|
||||
) -> None:
|
||||
self.user_id = user_id
|
||||
self.external_auth_id = external_auth_id
|
||||
self.external_auth_token = external_auth_token
|
||||
self.external_token_manager = external_token_manager
|
||||
|
||||
if token:
|
||||
self.token = token
|
||||
else:
|
||||
env_token = os.environ.get('FORGEJO_TOKEN')
|
||||
if env_token:
|
||||
self.token = SecretStr(env_token)
|
||||
|
||||
env_base_url = os.environ.get('FORGEJO_BASE_URL')
|
||||
self.BASE_URL = self._resolve_base_url(base_url, base_domain, env_base_url)
|
||||
self.base_url = self.BASE_URL # Backwards compatibility for existing usage
|
||||
parsed = urlparse(self.BASE_URL)
|
||||
self.base_domain = parsed.netloc or self.DEFAULT_DOMAIN
|
||||
self.web_base_url = f'https://{self.base_domain}'.rstrip('/')
|
||||
|
||||
@property
|
||||
def provider(self) -> str:
|
||||
return ProviderType.FORGEJO.value
|
||||
|
||||
async def get_latest_token(self) -> SecretStr | None:
|
||||
return self.token
|
||||
|
||||
async def _get_headers(self) -> dict[str, Any]:
|
||||
if not self.token:
|
||||
latest_token = await self.get_latest_token()
|
||||
if latest_token:
|
||||
self.token = latest_token
|
||||
|
||||
return {
|
||||
'Authorization': f'token {self.token.get_secret_value() if self.token else ""}',
|
||||
'Accept': 'application/json',
|
||||
}
|
||||
|
||||
async def _make_request(
|
||||
self,
|
||||
url: str,
|
||||
params: dict | None = None,
|
||||
method: RequestMethod = RequestMethod.GET,
|
||||
) -> tuple[Any, dict]:
|
||||
try:
|
||||
async with httpx.AsyncClient(verify=httpx_verify_option()) as client:
|
||||
headers = await self._get_headers()
|
||||
response = await self.execute_request(
|
||||
client=client,
|
||||
url=url,
|
||||
headers=headers,
|
||||
params=params,
|
||||
method=method,
|
||||
)
|
||||
|
||||
if self.refresh and self._has_token_expired(response.status_code):
|
||||
await self.get_latest_token()
|
||||
headers = await self._get_headers()
|
||||
response = await self.execute_request(
|
||||
client=client,
|
||||
url=url,
|
||||
headers=headers,
|
||||
params=params,
|
||||
method=method,
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
headers_out: dict[str, str] = {}
|
||||
for header in ('Link', 'X-Total-Count', 'X-Total'):
|
||||
if header in response.headers:
|
||||
headers_out[header] = response.headers[header]
|
||||
|
||||
content_type = response.headers.get('Content-Type', '')
|
||||
if 'application/json' in content_type:
|
||||
return response.json(), headers_out
|
||||
return response.text, headers_out
|
||||
|
||||
except httpx.HTTPStatusError as err:
|
||||
raise self.handle_http_status_error(err)
|
||||
except httpx.HTTPError as err:
|
||||
raise self.handle_http_error(err)
|
||||
|
||||
def _resolve_base_url(
|
||||
self,
|
||||
explicit_base_url: str | None,
|
||||
base_domain: str | None,
|
||||
env_base_url: str | None,
|
||||
) -> str:
|
||||
for candidate in (
|
||||
explicit_base_url,
|
||||
base_domain,
|
||||
env_base_url,
|
||||
self.DEFAULT_BASE_URL,
|
||||
):
|
||||
if not candidate:
|
||||
continue
|
||||
|
||||
normalized = candidate.strip().rstrip('/')
|
||||
if not normalized:
|
||||
continue
|
||||
|
||||
if normalized.startswith(('http://', 'https://')):
|
||||
url = normalized
|
||||
else:
|
||||
url = f'https://{normalized}'
|
||||
|
||||
if '/api/' in url:
|
||||
return url
|
||||
|
||||
return f'{url}/api/v1'
|
||||
|
||||
return self.DEFAULT_BASE_URL
|
||||
|
||||
async def get_user(self) -> User: # type: ignore[override]
|
||||
url = f'{self.BASE_URL}/user'
|
||||
response, _ = await self._make_request(url)
|
||||
|
||||
return User(
|
||||
id=str(response.get('id', '')),
|
||||
login=response.get('username', ''),
|
||||
avatar_url=response.get('avatar_url', ''),
|
||||
name=response.get('full_name'),
|
||||
email=response.get('email'),
|
||||
company=response.get('organization'),
|
||||
)
|
||||
|
||||
def _parse_repository(
|
||||
self, repo: dict, link_header: str | None = None
|
||||
) -> Repository:
|
||||
owner = repo.get('owner') or {}
|
||||
owner_type = (
|
||||
OwnerType.ORGANIZATION
|
||||
if (owner.get('type') or '').lower() == 'organization'
|
||||
else OwnerType.USER
|
||||
)
|
||||
|
||||
return Repository(
|
||||
id=str(repo.get('id', '')),
|
||||
full_name=repo.get('full_name', ''),
|
||||
stargazers_count=repo.get('stars_count'),
|
||||
git_provider=ProviderType.FORGEJO,
|
||||
is_public=not repo.get('private', False),
|
||||
link_header=link_header,
|
||||
pushed_at=repo.get('updated_at') or repo.get('pushed_at'),
|
||||
owner_type=owner_type,
|
||||
main_branch=repo.get('default_branch'),
|
||||
)
|
||||
|
||||
def _split_repo(self, repository: str) -> tuple[str, str]:
|
||||
repo_path = repository.strip()
|
||||
if repo_path.startswith(('http://', 'https://')):
|
||||
parsed = urlparse(repo_path)
|
||||
repo_path = parsed.path.lstrip('/')
|
||||
|
||||
parts = [part for part in repo_path.split('/') if part]
|
||||
if len(parts) < 2:
|
||||
raise ValueError(f'Invalid repository format: {repository}')
|
||||
|
||||
return parts[0], parts[1]
|
||||
|
||||
def _build_repo_api_url(self, owner: str, repo: str, *segments: str) -> str:
|
||||
base = f'{self.BASE_URL}/repos/{owner}/{repo}'
|
||||
if segments:
|
||||
base = f'{base}/{"/".join(segments)}'
|
||||
return base
|
||||
|
||||
def _map_sort(self, sort: str) -> str:
|
||||
sort_map = {
|
||||
'pushed': 'updated',
|
||||
'updated': 'updated',
|
||||
'created': 'created',
|
||||
'full_name': 'name',
|
||||
}
|
||||
return sort_map.get(sort, 'updated')
|
||||
|
||||
def handle_http_error(self, e: httpx.HTTPError) -> UnknownException: # type: ignore[override]
|
||||
logger.warning(f'HTTP error on {self.provider} API: {type(e).__name__} : {e}')
|
||||
return UnknownException(f'HTTP error {type(e).__name__} : {e}')
|
||||
74
openhands/integrations/forgejo/service/branches.py
Normal file
74
openhands/integrations/forgejo/service/branches.py
Normal file
@@ -0,0 +1,74 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from openhands.integrations.forgejo.service.base import ForgejoMixinBase
|
||||
from openhands.integrations.service_types import Branch, PaginatedBranchesResponse
|
||||
|
||||
|
||||
class ForgejoBranchesMixin(ForgejoMixinBase):
|
||||
"""Branch-related operations for Forgejo."""
|
||||
|
||||
async def get_branches(self, repository: str) -> list[Branch]: # type: ignore[override]
|
||||
branches: list[Branch] = []
|
||||
page = 1
|
||||
per_page = 100
|
||||
|
||||
while True:
|
||||
paginated = await self.get_paginated_branches(repository, page, per_page)
|
||||
branches.extend(paginated.branches)
|
||||
if not paginated.has_next_page:
|
||||
break
|
||||
page += 1
|
||||
|
||||
return branches
|
||||
|
||||
async def get_paginated_branches(
|
||||
self, repository: str, page: int = 1, per_page: int = 30
|
||||
) -> PaginatedBranchesResponse: # type: ignore[override]
|
||||
owner, repo = self._split_repo(repository)
|
||||
url = self._build_repo_api_url(owner, repo, 'branches')
|
||||
params = {
|
||||
'page': str(page),
|
||||
'limit': str(per_page),
|
||||
}
|
||||
|
||||
response, headers = await self._make_request(url, params)
|
||||
branch_items = response if isinstance(response, list) else []
|
||||
|
||||
branches: list[Branch] = []
|
||||
for branch in branch_items:
|
||||
commit_info = branch.get('commit') or {}
|
||||
commit_sha = (
|
||||
commit_info.get('id')
|
||||
or commit_info.get('sha')
|
||||
or commit_info.get('commit', {}).get('sha')
|
||||
)
|
||||
branches.append(
|
||||
Branch(
|
||||
name=branch.get('name', ''),
|
||||
commit_sha=commit_sha or '',
|
||||
protected=branch.get('protected', False),
|
||||
last_push_date=None,
|
||||
)
|
||||
)
|
||||
|
||||
link_header = headers.get('Link', '')
|
||||
total_count_header = headers.get('X-Total-Count') or headers.get('X-Total')
|
||||
total_count = int(total_count_header) if total_count_header else None
|
||||
has_next_page = 'rel="next"' in link_header
|
||||
|
||||
return PaginatedBranchesResponse(
|
||||
branches=branches,
|
||||
has_next_page=has_next_page,
|
||||
current_page=page,
|
||||
per_page=per_page,
|
||||
total_count=total_count,
|
||||
)
|
||||
|
||||
async def search_branches(
|
||||
self, repository: str, query: str, per_page: int = 30
|
||||
) -> list[Branch]: # type: ignore[override]
|
||||
all_branches = await self.get_branches(repository)
|
||||
lowered = query.lower()
|
||||
return [branch for branch in all_branches if lowered in branch.name.lower()][
|
||||
:per_page
|
||||
]
|
||||
123
openhands/integrations/forgejo/service/features.py
Normal file
123
openhands/integrations/forgejo/service/features.py
Normal file
@@ -0,0 +1,123 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
from typing import Any
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.forgejo.service.base import ForgejoMixinBase
|
||||
from openhands.integrations.service_types import (
|
||||
MicroagentContentResponse,
|
||||
MicroagentResponse,
|
||||
ProviderType,
|
||||
ResourceNotFoundError,
|
||||
SuggestedTask,
|
||||
)
|
||||
|
||||
|
||||
class ForgejoFeaturesMixin(ForgejoMixinBase):
|
||||
"""Microagent and feature helpers for Forgejo."""
|
||||
|
||||
async def _get_cursorrules_url(self, repository: str) -> str:
|
||||
owner, repo = self._split_repo(repository)
|
||||
return self._build_repo_api_url(owner, repo, 'contents', '.cursorrules')
|
||||
|
||||
async def _get_microagents_directory_url(
|
||||
self, repository: str, microagents_path: str
|
||||
) -> str:
|
||||
owner, repo = self._split_repo(repository)
|
||||
normalized_path = microagents_path.strip('/')
|
||||
return self._build_repo_api_url(owner, repo, 'contents', normalized_path)
|
||||
|
||||
def _get_microagents_directory_params(self, microagents_path: str) -> dict | None:
|
||||
return None
|
||||
|
||||
def _is_valid_microagent_file(self, item: dict[str, Any] | None) -> bool:
|
||||
if not isinstance(item, dict):
|
||||
return False
|
||||
if item.get('type') != 'file':
|
||||
return False
|
||||
name = item.get('name', '')
|
||||
return isinstance(name, str) and (
|
||||
name.endswith('.md') or name.endswith('.cursorrules')
|
||||
)
|
||||
|
||||
def _get_file_name_from_item(self, item: dict[str, Any] | None) -> str:
|
||||
if not isinstance(item, dict):
|
||||
return ''
|
||||
name = item.get('name')
|
||||
return name if isinstance(name, str) else ''
|
||||
|
||||
def _get_file_path_from_item(
|
||||
self, item: dict[str, Any] | None, microagents_path: str
|
||||
) -> str:
|
||||
file_name = self._get_file_name_from_item(item)
|
||||
if not microagents_path:
|
||||
return file_name
|
||||
return f'{microagents_path.strip("/")}/{file_name}'
|
||||
|
||||
async def get_microagents(self, repository: str) -> list[MicroagentResponse]: # type: ignore[override]
|
||||
microagents_path = self._determine_microagents_path(repository)
|
||||
microagents: list[MicroagentResponse] = []
|
||||
|
||||
try:
|
||||
directory_url = await self._get_microagents_directory_url(
|
||||
repository, microagents_path
|
||||
)
|
||||
items, _ = await self._make_request(directory_url)
|
||||
except ResourceNotFoundError:
|
||||
items = []
|
||||
except Exception as exc:
|
||||
# Fail gracefully if the directory cannot be inspected
|
||||
self._log_microagent_warning(repository, str(exc))
|
||||
items = []
|
||||
|
||||
if isinstance(items, list):
|
||||
for item in items:
|
||||
if self._is_valid_microagent_file(item):
|
||||
file_name = self._get_file_name_from_item(item)
|
||||
file_path = self._get_file_path_from_item(item, microagents_path)
|
||||
microagents.append(
|
||||
self._create_microagent_response(file_name, file_path)
|
||||
)
|
||||
|
||||
cursorrules = await self._check_cursorrules_file(repository)
|
||||
if cursorrules:
|
||||
microagents.append(cursorrules)
|
||||
|
||||
return microagents
|
||||
|
||||
async def get_microagent_content(
|
||||
self, repository: str, file_path: str
|
||||
) -> MicroagentContentResponse: # type: ignore[override]
|
||||
owner, repo = self._split_repo(repository)
|
||||
normalized_path = file_path.lstrip('/')
|
||||
url = self._build_repo_api_url(owner, repo, 'contents', normalized_path)
|
||||
|
||||
response, _ = await self._make_request(url)
|
||||
content = response.get('content') or ''
|
||||
encoding = (response.get('encoding') or 'base64').lower()
|
||||
|
||||
if encoding == 'base64':
|
||||
try:
|
||||
decoded = base64.b64decode(content).decode('utf-8')
|
||||
except Exception:
|
||||
decoded = ''
|
||||
else:
|
||||
decoded = content
|
||||
|
||||
try:
|
||||
return self._parse_microagent_content(decoded, file_path)
|
||||
except Exception:
|
||||
return MicroagentContentResponse(
|
||||
content=decoded,
|
||||
path=file_path,
|
||||
triggers=[],
|
||||
git_provider=ProviderType.FORGEJO.value,
|
||||
)
|
||||
|
||||
async def get_suggested_tasks(self) -> list[SuggestedTask]: # type: ignore[override]
|
||||
# Suggested tasks are not yet implemented for Forgejo.
|
||||
return []
|
||||
|
||||
def _log_microagent_warning(self, repository: str, message: str) -> None:
|
||||
logger.debug(f'Forgejo microagent scan warning for {repository}: {message}')
|
||||
84
openhands/integrations/forgejo/service/prs.py
Normal file
84
openhands/integrations/forgejo/service/prs.py
Normal file
@@ -0,0 +1,84 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.forgejo.service.base import ForgejoMixinBase
|
||||
from openhands.integrations.service_types import RequestMethod, UnknownException
|
||||
|
||||
|
||||
class ForgejoPRsMixin(ForgejoMixinBase):
|
||||
"""Pull request helpers for Forgejo."""
|
||||
|
||||
async def create_pull_request(self, data: dict[str, Any] | None = None) -> dict:
|
||||
payload: dict[str, Any] = dict(data or {})
|
||||
|
||||
repository = payload.pop('repository', None)
|
||||
owner = payload.pop('owner', None)
|
||||
repo_name = payload.pop('repo', None)
|
||||
|
||||
if repository and isinstance(repository, str):
|
||||
owner, repo_name = self._split_repo(repository)
|
||||
else:
|
||||
owner = str(owner or self.user_id or '').strip()
|
||||
repo_name = str(repo_name or '').strip()
|
||||
|
||||
if not owner or not repo_name:
|
||||
raise ValueError(
|
||||
'Repository information is required to create a pull request'
|
||||
)
|
||||
|
||||
url = self._build_repo_api_url(owner, repo_name, 'pulls')
|
||||
response, _ = await self._make_request(
|
||||
url,
|
||||
payload,
|
||||
method=RequestMethod.POST,
|
||||
)
|
||||
|
||||
if not isinstance(response, dict):
|
||||
raise UnknownException('Unexpected response creating Forgejo pull request')
|
||||
|
||||
if 'number' not in response and 'index' in response:
|
||||
response['number'] = response['index']
|
||||
|
||||
if 'html_url' not in response and 'url' in response:
|
||||
response['html_url'] = response['url']
|
||||
|
||||
return response
|
||||
|
||||
async def request_reviewers(
|
||||
self, repository: str, pr_number: int, reviewers: list[str]
|
||||
) -> None:
|
||||
if not reviewers:
|
||||
return
|
||||
|
||||
owner, repo = self._split_repo(repository)
|
||||
url = self._build_repo_api_url(
|
||||
owner, repo, 'pulls', str(pr_number), 'requested_reviewers'
|
||||
)
|
||||
|
||||
try:
|
||||
await self._make_request(
|
||||
url,
|
||||
{'reviewers': reviewers},
|
||||
method=RequestMethod.POST,
|
||||
)
|
||||
except Exception as exc: # pragma: no cover - log and continue
|
||||
logger.warning(
|
||||
'Failed to request Forgejo reviewers %s for %s/%s PR #%s: %s',
|
||||
reviewers,
|
||||
owner,
|
||||
repo,
|
||||
pr_number,
|
||||
exc,
|
||||
)
|
||||
|
||||
async def get_pr_details(self, repository: str, pr_number: int) -> dict: # type: ignore[override]
|
||||
owner, repo = self._split_repo(repository)
|
||||
url = self._build_repo_api_url(owner, repo, 'pulls', str(pr_number))
|
||||
response, _ = await self._make_request(url)
|
||||
return response
|
||||
|
||||
async def is_pr_open(self, repository: str, pr_number: int) -> bool: # type: ignore[override]
|
||||
pr_details = await self.get_pr_details(repository, pr_number)
|
||||
return (pr_details.get('state') or '').lower() == 'open'
|
||||
109
openhands/integrations/forgejo/service/repos.py
Normal file
109
openhands/integrations/forgejo/service/repos.py
Normal file
@@ -0,0 +1,109 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from openhands.integrations.forgejo.service.base import ForgejoMixinBase
|
||||
from openhands.integrations.service_types import Repository
|
||||
from openhands.server.types import AppMode
|
||||
|
||||
|
||||
class ForgejoReposMixin(ForgejoMixinBase):
|
||||
"""Repository operations for Forgejo."""
|
||||
|
||||
async def search_repositories(
|
||||
self,
|
||||
query: str,
|
||||
per_page: int,
|
||||
sort: str,
|
||||
order: str,
|
||||
public: bool,
|
||||
app_mode: AppMode,
|
||||
) -> list[Repository]: # type: ignore[override]
|
||||
url = f'{self.BASE_URL}/repos/search'
|
||||
params = {
|
||||
'q': query,
|
||||
'limit': per_page,
|
||||
'sort': sort,
|
||||
'order': order,
|
||||
'mode': 'source',
|
||||
}
|
||||
|
||||
response, _ = await self._make_request(url, params)
|
||||
repos = response.get('data', []) if isinstance(response, dict) else []
|
||||
if public:
|
||||
repos = [repo for repo in repos if not repo.get('private', False)]
|
||||
return [self._parse_repository(repo) for repo in repos]
|
||||
|
||||
async def get_all_repositories(
|
||||
self, sort: str, app_mode: AppMode
|
||||
) -> list[Repository]: # type: ignore[override]
|
||||
max_repos = 1000
|
||||
per_page = 100
|
||||
collected: list[dict] = []
|
||||
page = 1
|
||||
last_link_header: str | None = None
|
||||
|
||||
url = f'{self.BASE_URL}/user/repos'
|
||||
forgejo_sort = self._map_sort(sort)
|
||||
|
||||
while len(collected) < max_repos:
|
||||
params = {
|
||||
'page': str(page),
|
||||
'limit': str(per_page),
|
||||
'sort': forgejo_sort,
|
||||
}
|
||||
response, headers = await self._make_request(url, params)
|
||||
last_link_header = headers.get('Link')
|
||||
|
||||
page_repos = response if isinstance(response, list) else []
|
||||
if not page_repos:
|
||||
break
|
||||
|
||||
collected.extend(page_repos)
|
||||
if 'rel="next"' not in (last_link_header or ''):
|
||||
break
|
||||
|
||||
page += 1
|
||||
|
||||
collected = collected[:max_repos]
|
||||
return [
|
||||
self._parse_repository(repo, link_header=last_link_header)
|
||||
for repo in collected
|
||||
]
|
||||
|
||||
async def get_paginated_repos(
|
||||
self,
|
||||
page: int,
|
||||
per_page: int,
|
||||
sort: str,
|
||||
installation_id: str | None,
|
||||
query: str | None = None,
|
||||
) -> list[Repository]: # type: ignore[override]
|
||||
_ = installation_id
|
||||
url = f'{self.BASE_URL}/user/repos'
|
||||
params = {
|
||||
'page': str(page),
|
||||
'limit': str(per_page),
|
||||
'sort': self._map_sort(sort),
|
||||
}
|
||||
|
||||
response, headers = await self._make_request(url, params)
|
||||
repos = response if isinstance(response, list) else []
|
||||
|
||||
if query:
|
||||
lowered = query.lower()
|
||||
repos = [
|
||||
repo
|
||||
for repo in repos
|
||||
if lowered in (repo.get('full_name') or '').lower()
|
||||
]
|
||||
|
||||
link_header = headers.get('Link')
|
||||
return [self._parse_repository(repo, link_header=link_header) for repo in repos]
|
||||
|
||||
async def get_repository_details_from_repo_name(
|
||||
self, repository: str
|
||||
) -> Repository: # type: ignore[override]
|
||||
owner, repo = self._split_repo(repository)
|
||||
url = self._build_repo_api_url(owner, repo)
|
||||
response, headers = await self._make_request(url)
|
||||
link_header = headers.get('Link')
|
||||
return self._parse_repository(response, link_header=link_header)
|
||||
137
openhands/integrations/forgejo/service/resolver.py
Normal file
137
openhands/integrations/forgejo/service/resolver.py
Normal file
@@ -0,0 +1,137 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from typing import cast
|
||||
|
||||
from openhands.integrations.forgejo.service.base import ForgejoMixinBase
|
||||
from openhands.integrations.service_types import Comment
|
||||
from openhands.resolver.interfaces.issue import ReviewThread
|
||||
|
||||
|
||||
class ForgejoResolverMixin(ForgejoMixinBase):
|
||||
"""Lightweight helpers used by resolver flows for Forgejo."""
|
||||
|
||||
async def get_issue_title_and_body(
|
||||
self, repository: str, issue_number: int
|
||||
) -> tuple[str, str]:
|
||||
owner, repo = self._split_repo(repository)
|
||||
url = self._build_repo_api_url(owner, repo, 'issues', str(issue_number))
|
||||
response, _ = await self._make_request(url)
|
||||
title = response.get('title') or ''
|
||||
body = response.get('body') or response.get('content') or ''
|
||||
return title, body
|
||||
|
||||
async def get_issue_comments(
|
||||
self,
|
||||
repository: str,
|
||||
issue_number: int,
|
||||
max_comments: int = 20,
|
||||
) -> list[Comment]:
|
||||
owner, repo = self._split_repo(repository)
|
||||
url = self._build_repo_api_url(
|
||||
owner, repo, 'issues', str(issue_number), 'comments'
|
||||
)
|
||||
per_page = min(max_comments, 50)
|
||||
params = {
|
||||
'page': '1',
|
||||
'limit': str(per_page),
|
||||
'order': 'desc',
|
||||
}
|
||||
|
||||
response, _ = await self._make_request(url, params)
|
||||
raw_comments = response if isinstance(response, list) else []
|
||||
|
||||
comments: list[Comment] = []
|
||||
for payload in raw_comments:
|
||||
comment = self._to_comment(payload)
|
||||
if comment is not None:
|
||||
comments.append(comment)
|
||||
|
||||
comments.sort(key=lambda c: c.created_at)
|
||||
return comments[-max_comments:]
|
||||
|
||||
async def get_pr_comments(
|
||||
self,
|
||||
repository: str,
|
||||
pr_number: int,
|
||||
max_comments: int = 50,
|
||||
) -> list[Comment]:
|
||||
owner, repo = self._split_repo(repository)
|
||||
url = self._build_repo_api_url(owner, repo, 'pulls', str(pr_number), 'comments')
|
||||
per_page = min(max_comments, 50)
|
||||
params = {
|
||||
'page': '1',
|
||||
'limit': str(per_page),
|
||||
'order': 'desc',
|
||||
}
|
||||
|
||||
response, _ = await self._make_request(url, params)
|
||||
raw_comments = response if isinstance(response, list) else []
|
||||
|
||||
comments: list[Comment] = []
|
||||
for payload in raw_comments:
|
||||
comment = self._to_comment(payload)
|
||||
if comment is not None:
|
||||
comments.append(comment)
|
||||
|
||||
comments.sort(key=lambda c: c.created_at)
|
||||
return comments[-max_comments:]
|
||||
|
||||
async def get_pr_review_threads(
|
||||
self,
|
||||
repository: str,
|
||||
pr_number: int,
|
||||
max_threads: int = 10,
|
||||
) -> list[ReviewThread]:
|
||||
owner, repo = self._split_repo(repository)
|
||||
url = self._build_repo_api_url(owner, repo, 'pulls', str(pr_number), 'comments')
|
||||
params = {'page': '1', 'limit': '100', 'order': 'asc'}
|
||||
|
||||
response, _ = await self._make_request(url, params)
|
||||
raw_comments = response if isinstance(response, list) else []
|
||||
|
||||
grouped: dict[str, list[str]] = defaultdict(list)
|
||||
files: dict[str, set[str]] = defaultdict(set)
|
||||
|
||||
for payload in raw_comments:
|
||||
if not isinstance(payload, dict):
|
||||
continue
|
||||
path = cast(str, payload.get('path') or 'general')
|
||||
body = cast(str, payload.get('body') or '')
|
||||
grouped[path].append(body)
|
||||
if payload.get('path'):
|
||||
files[path].add(cast(str, payload['path']))
|
||||
|
||||
threads: list[ReviewThread] = []
|
||||
for path, messages in grouped.items():
|
||||
comment_text = '\n---\n'.join(messages)
|
||||
file_list = sorted(files.get(path, {path}))
|
||||
threads.append(ReviewThread(comment=comment_text, files=file_list))
|
||||
|
||||
return threads[:max_threads]
|
||||
|
||||
def _to_comment(self, payload: dict | None) -> Comment | None:
|
||||
if not isinstance(payload, dict):
|
||||
return None
|
||||
body = payload.get('body') or ''
|
||||
author = (payload.get('user') or {}).get('login') or 'unknown'
|
||||
created_at = self._parse_datetime(payload.get('created_at'))
|
||||
updated_at = self._parse_datetime(payload.get('updated_at'))
|
||||
|
||||
return Comment(
|
||||
id=str(payload.get('id', 'unknown')),
|
||||
body=body,
|
||||
author=author,
|
||||
created_at=created_at,
|
||||
updated_at=updated_at,
|
||||
system=payload.get('void', False),
|
||||
)
|
||||
|
||||
def _parse_datetime(self, value: str | None) -> datetime:
|
||||
if not value:
|
||||
return datetime.fromtimestamp(0)
|
||||
try:
|
||||
return datetime.fromisoformat(value.replace('Z', '+00:00'))
|
||||
except ValueError:
|
||||
return datetime.fromtimestamp(0)
|
||||
@@ -22,6 +22,7 @@ from openhands.integrations.azure_devops.azure_devops_service import (
|
||||
AzureDevOpsServiceImpl,
|
||||
)
|
||||
from openhands.integrations.bitbucket.bitbucket_service import BitBucketServiceImpl
|
||||
from openhands.integrations.forgejo.forgejo_service import ForgejoServiceImpl
|
||||
from openhands.integrations.github.github_service import GithubServiceImpl
|
||||
from openhands.integrations.gitlab.gitlab_service import GitLabServiceImpl
|
||||
from openhands.integrations.service_types import (
|
||||
@@ -105,6 +106,7 @@ class ProviderHandler:
|
||||
ProviderType.GITHUB: 'github.com',
|
||||
ProviderType.GITLAB: 'gitlab.com',
|
||||
ProviderType.BITBUCKET: 'bitbucket.org',
|
||||
ProviderType.FORGEJO: 'codeberg.org',
|
||||
ProviderType.AZURE_DEVOPS: 'dev.azure.com',
|
||||
}
|
||||
|
||||
@@ -126,6 +128,7 @@ class ProviderHandler:
|
||||
ProviderType.GITHUB: GithubServiceImpl,
|
||||
ProviderType.GITLAB: GitLabServiceImpl,
|
||||
ProviderType.BITBUCKET: BitBucketServiceImpl,
|
||||
ProviderType.FORGEJO: ForgejoServiceImpl,
|
||||
ProviderType.AZURE_DEVOPS: AzureDevOpsServiceImpl,
|
||||
}
|
||||
|
||||
@@ -672,6 +675,14 @@ class ProviderHandler:
|
||||
if provider != ProviderType.AZURE_DEVOPS:
|
||||
domain = self.provider_tokens[provider].host or domain
|
||||
|
||||
# Normalize domain to prevent double protocols or path segments
|
||||
if domain:
|
||||
domain = domain.strip()
|
||||
domain = domain.replace('https://', '').replace('http://', '')
|
||||
# Remove any trailing path like /api/v3 or /api/v4
|
||||
if '/' in domain:
|
||||
domain = domain.split('/')[0]
|
||||
|
||||
# Try to use token if available, otherwise use public URL
|
||||
if self.provider_tokens and provider in self.provider_tokens:
|
||||
git_token = self.provider_tokens[provider].token
|
||||
@@ -747,7 +758,7 @@ class ProviderHandler:
|
||||
f'https://user:***@{clean_domain}/{repo_name}.git'
|
||||
)
|
||||
else:
|
||||
# GitHub
|
||||
# GitHub, Forgejo
|
||||
remote_url = f'https://{token_value}@{domain}/{repo_name}.git'
|
||||
else:
|
||||
remote_url = f'https://{domain}/{repo_name}.git'
|
||||
|
||||
@@ -21,6 +21,7 @@ class ProviderType(Enum):
|
||||
GITHUB = 'github'
|
||||
GITLAB = 'gitlab'
|
||||
BITBUCKET = 'bitbucket'
|
||||
FORGEJO = 'forgejo'
|
||||
AZURE_DEVOPS = 'azure_devops'
|
||||
ENTERPRISE_SSO = 'enterprise_sso'
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ from openhands.integrations.azure_devops.azure_devops_service import (
|
||||
AzureDevOpsServiceImpl as AzureDevOpsService,
|
||||
)
|
||||
from openhands.integrations.bitbucket.bitbucket_service import BitBucketService
|
||||
from openhands.integrations.forgejo.forgejo_service import ForgejoService
|
||||
from openhands.integrations.github.github_service import GitHubService
|
||||
from openhands.integrations.gitlab.gitlab_service import GitLabService
|
||||
from openhands.integrations.provider import ProviderType
|
||||
@@ -48,6 +49,17 @@ async def validate_provider_token(
|
||||
except Exception as e:
|
||||
gitlab_error = e
|
||||
|
||||
# Try Forgejo if a base_domain was provided (custom instances may not contain
|
||||
# the substring 'forgejo' or 'codeberg')
|
||||
forgejo_error = None
|
||||
if base_domain:
|
||||
try:
|
||||
forgejo_service = ForgejoService(token=token, base_domain=base_domain)
|
||||
await forgejo_service.get_user()
|
||||
return ProviderType.FORGEJO
|
||||
except Exception as e:
|
||||
forgejo_error = e
|
||||
|
||||
# Try Bitbucket next
|
||||
bitbucket_error = None
|
||||
try:
|
||||
@@ -67,7 +79,7 @@ async def validate_provider_token(
|
||||
azure_devops_error = e
|
||||
|
||||
logger.debug(
|
||||
f'Failed to validate token: {github_error} \n {gitlab_error} \n {bitbucket_error} \n {azure_devops_error}'
|
||||
f'Failed to validate token: {github_error} \n {gitlab_error} \n {forgejo_error} \n {bitbucket_error} \n {azure_devops_error}'
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
480
openhands/resolver/interfaces/forgejo.py
Normal file
480
openhands/resolver/interfaces/forgejo.py
Normal file
@@ -0,0 +1,480 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from urllib.parse import quote
|
||||
|
||||
import httpx
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.resolver.interfaces.issue import (
|
||||
Issue,
|
||||
IssueHandlerInterface,
|
||||
ReviewThread,
|
||||
)
|
||||
from openhands.resolver.utils import extract_issue_references
|
||||
|
||||
|
||||
class ForgejoIssueHandler(IssueHandlerInterface):
|
||||
"""Issue handler implementation for Forgejo-based providers (e.g. Codeberg)."""
|
||||
|
||||
API_PREFIX = '/api/v1'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
owner: str,
|
||||
repo: str,
|
||||
token: str,
|
||||
username: str | None = None,
|
||||
base_domain: str = 'codeberg.org',
|
||||
):
|
||||
self.owner = owner
|
||||
self.repo = repo
|
||||
self.token = token
|
||||
self.username = username
|
||||
self.base_domain = base_domain
|
||||
self.base_url = self.get_base_url()
|
||||
self.download_url = self.get_download_url()
|
||||
self.clone_url = self.get_clone_url()
|
||||
self.headers = self.get_headers()
|
||||
|
||||
def _api_root(self) -> str:
|
||||
return f'https://{self.base_domain}{self.API_PREFIX}'
|
||||
|
||||
@staticmethod
|
||||
def _to_int(value: Any) -> int:
|
||||
try:
|
||||
return int(value)
|
||||
except (TypeError, ValueError):
|
||||
return 0
|
||||
|
||||
def set_owner(self, owner: str) -> None:
|
||||
self.owner = owner
|
||||
self.base_url = self.get_base_url()
|
||||
self.download_url = self.get_download_url()
|
||||
|
||||
def get_headers(self) -> dict[str, str]:
|
||||
return {
|
||||
'Authorization': f'token {self.token}',
|
||||
'Accept': 'application/json',
|
||||
}
|
||||
|
||||
def get_base_url(self) -> str:
|
||||
return f'{self._api_root()}/repos/{self.owner}/{self.repo}'
|
||||
|
||||
def get_authorize_url(self) -> str:
|
||||
credential = (
|
||||
f'{self.username}:{self.token}'
|
||||
if self.username
|
||||
else f'x-auth-token:{self.token}'
|
||||
)
|
||||
return f'https://{credential}@{self.base_domain}/'
|
||||
|
||||
def get_branch_url(self, branch_name: str) -> str:
|
||||
escaped_branch = quote(branch_name, safe='')
|
||||
return f'{self.get_base_url()}/branches/{escaped_branch}'
|
||||
|
||||
def get_download_url(self) -> str:
|
||||
return f'{self.get_base_url()}/issues'
|
||||
|
||||
def get_clone_url(self) -> str:
|
||||
credential = (
|
||||
f'{self.username}:{self.token}'
|
||||
if self.username
|
||||
else f'x-access-token:{self.token}'
|
||||
)
|
||||
return f'https://{credential}@{self.base_domain}/{self.owner}/{self.repo}.git'
|
||||
|
||||
def get_graphql_url(self) -> str:
|
||||
# Forgejo does not expose a GraphQL endpoint.
|
||||
return ''
|
||||
|
||||
def get_compare_url(self, branch_name: str) -> str:
|
||||
return (
|
||||
f'https://{self.base_domain}/{self.owner}/{self.repo}/compare/{branch_name}'
|
||||
)
|
||||
|
||||
def download_issues(self) -> list[Any]:
|
||||
page = 1
|
||||
all_issues: list[Any] = []
|
||||
|
||||
while True:
|
||||
params = {'state': 'open', 'limit': '50', 'page': str(page)}
|
||||
response = httpx.get(self.download_url, headers=self.headers, params=params)
|
||||
response.raise_for_status()
|
||||
issues = response.json()
|
||||
|
||||
if not issues:
|
||||
break
|
||||
|
||||
if not isinstance(issues, list) or any(
|
||||
not isinstance(issue, dict) for issue in issues
|
||||
):
|
||||
raise ValueError(
|
||||
'Expected list of dictionaries from Forgejo issues API.'
|
||||
)
|
||||
|
||||
all_issues.extend(issues)
|
||||
page += 1
|
||||
|
||||
return all_issues
|
||||
|
||||
def get_issue_comments(
|
||||
self, issue_number: int, comment_id: int | None = None
|
||||
) -> list[str] | None:
|
||||
url = f'{self.get_download_url()}/{issue_number}/comments'
|
||||
page = 1
|
||||
params = {'limit': '50', 'page': str(page)}
|
||||
all_comments: list[str] = []
|
||||
|
||||
while True:
|
||||
response = httpx.get(url, headers=self.headers, params=params)
|
||||
response.raise_for_status()
|
||||
comments = response.json()
|
||||
|
||||
if not comments:
|
||||
break
|
||||
|
||||
if comment_id is not None:
|
||||
matching_comment = next(
|
||||
(
|
||||
comment['body']
|
||||
for comment in comments
|
||||
if self._to_int(comment.get('id')) == comment_id
|
||||
),
|
||||
None,
|
||||
)
|
||||
if matching_comment:
|
||||
return [matching_comment]
|
||||
else:
|
||||
all_comments.extend(
|
||||
comment['body'] for comment in comments if comment.get('body')
|
||||
)
|
||||
|
||||
page += 1
|
||||
params = {'limit': '50', 'page': str(page)}
|
||||
|
||||
return all_comments if all_comments else None
|
||||
|
||||
def get_pull_url(self, pr_number: int) -> str:
|
||||
return f'https://{self.base_domain}/{self.owner}/{self.repo}/pulls/{pr_number}'
|
||||
|
||||
def get_branch_name(self, base_branch_name: str) -> str:
|
||||
branch_name = base_branch_name
|
||||
attempt = 1
|
||||
while self.branch_exists(branch_name):
|
||||
attempt += 1
|
||||
branch_name = f'{base_branch_name}-try{attempt}'
|
||||
return branch_name
|
||||
|
||||
def get_default_branch_name(self) -> str:
|
||||
response = httpx.get(self.get_base_url(), headers=self.headers)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
return str(data.get('default_branch'))
|
||||
|
||||
def branch_exists(self, branch_name: str) -> bool:
|
||||
response = httpx.get(self.get_branch_url(branch_name), headers=self.headers)
|
||||
exists = response.status_code == 200
|
||||
logger.info(f'Branch {branch_name} exists: {exists}')
|
||||
return exists
|
||||
|
||||
def reply_to_comment(self, pr_number: int, comment_id: str, reply: str) -> None:
|
||||
# Forgejo does not support threaded replies via API; add a regular comment referencing the original ID.
|
||||
message = f'OpenHands reply to comment {comment_id}\n\n{reply}'
|
||||
self.send_comment_msg(pr_number, message)
|
||||
|
||||
def create_pull_request(self, data: dict[str, Any] | None = None) -> dict[str, Any]:
|
||||
payload = data or {}
|
||||
response = httpx.post(
|
||||
f'{self.get_base_url()}/pulls', headers=self.headers, json=payload
|
||||
)
|
||||
if response.status_code == 403:
|
||||
raise RuntimeError(
|
||||
'Failed to create pull request due to missing permissions. '
|
||||
'Ensure the token has write access to the repository.'
|
||||
)
|
||||
response.raise_for_status()
|
||||
pr_data = response.json()
|
||||
pr_data.setdefault('number', pr_data.get('index'))
|
||||
if 'html_url' not in pr_data and 'url' in pr_data:
|
||||
pr_data['html_url'] = pr_data['url']
|
||||
return dict(pr_data)
|
||||
|
||||
def request_reviewers(self, reviewer: str, pr_number: int) -> None:
|
||||
url = f'{self.get_base_url()}/pulls/{pr_number}/requested_reviewers'
|
||||
response = httpx.post(
|
||||
url,
|
||||
headers=self.headers,
|
||||
json={'reviewers': [reviewer]},
|
||||
)
|
||||
if response.status_code not in (200, 201, 204):
|
||||
logger.warning(
|
||||
f'Failed to request review from {reviewer}: {response.status_code} {response.text}'
|
||||
)
|
||||
|
||||
def send_comment_msg(self, issue_number: int, msg: str) -> None:
|
||||
comment_url = f'{self.get_download_url()}/{issue_number}/comments'
|
||||
response = httpx.post(
|
||||
comment_url,
|
||||
headers=self.headers,
|
||||
json={'body': msg},
|
||||
)
|
||||
if response.status_code not in (200, 201):
|
||||
logger.error(
|
||||
f'Failed to post comment: {response.status_code} {response.text}'
|
||||
)
|
||||
|
||||
def get_context_from_external_issues_references(
|
||||
self,
|
||||
closing_issues: list[str],
|
||||
closing_issue_numbers: list[int],
|
||||
issue_body: str,
|
||||
review_comments: list[str] | None,
|
||||
review_threads: list[ReviewThread],
|
||||
thread_comments: list[str] | None,
|
||||
) -> list[str]:
|
||||
new_references: list[int] = []
|
||||
|
||||
if issue_body:
|
||||
new_references.extend(extract_issue_references(issue_body))
|
||||
|
||||
if review_comments:
|
||||
for comment in review_comments:
|
||||
new_references.extend(extract_issue_references(comment))
|
||||
|
||||
if review_threads:
|
||||
for thread in review_threads:
|
||||
new_references.extend(extract_issue_references(thread.comment))
|
||||
|
||||
if thread_comments:
|
||||
for thread_comment in thread_comments:
|
||||
new_references.extend(extract_issue_references(thread_comment))
|
||||
|
||||
unique_ids = set(new_references).difference(closing_issue_numbers)
|
||||
|
||||
for issue_number in unique_ids:
|
||||
try:
|
||||
response = httpx.get(
|
||||
f'{self.get_download_url()}/{issue_number}',
|
||||
headers=self.headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
issue_data = response.json()
|
||||
body = issue_data.get('body', '')
|
||||
if body:
|
||||
closing_issues.append(body)
|
||||
except httpx.HTTPError as exc:
|
||||
logger.warning(f'Failed to fetch issue {issue_number}: {exc}')
|
||||
|
||||
return closing_issues
|
||||
|
||||
def get_pull_url_for_issue(self, issue_number: int) -> str:
|
||||
return (
|
||||
f'https://{self.base_domain}/{self.owner}/{self.repo}/issues/{issue_number}'
|
||||
)
|
||||
|
||||
def get_converted_issues(
|
||||
self, issue_numbers: list[int] | None = None, comment_id: int | None = None
|
||||
) -> list[Issue]:
|
||||
if not issue_numbers:
|
||||
raise ValueError('Unspecified issue numbers')
|
||||
|
||||
all_issues = self.download_issues()
|
||||
logger.info(f'Limiting resolving to issues {issue_numbers}.')
|
||||
filtered = [
|
||||
issue
|
||||
for issue in all_issues
|
||||
if self._to_int(issue.get('number') or issue.get('index')) in issue_numbers
|
||||
]
|
||||
|
||||
converted: list[Issue] = []
|
||||
for issue in filtered:
|
||||
if any(issue.get(key) is None for key in ['number', 'title']):
|
||||
logger.warning(
|
||||
f'Skipping issue {issue} as it is missing number or title.'
|
||||
)
|
||||
continue
|
||||
|
||||
issue_number = self._to_int(issue.get('number') or issue.get('index'))
|
||||
body = issue.get('body') or ''
|
||||
thread_comments = self.get_issue_comments(issue_number, comment_id)
|
||||
|
||||
issue_details = Issue(
|
||||
owner=self.owner,
|
||||
repo=self.repo,
|
||||
number=issue_number,
|
||||
title=issue['title'],
|
||||
body=body,
|
||||
thread_comments=thread_comments,
|
||||
review_comments=None,
|
||||
review_threads=None,
|
||||
)
|
||||
converted.append(issue_details)
|
||||
|
||||
return converted
|
||||
|
||||
|
||||
class ForgejoPRHandler(ForgejoIssueHandler):
|
||||
def __init__(
|
||||
self,
|
||||
owner: str,
|
||||
repo: str,
|
||||
token: str,
|
||||
username: str | None = None,
|
||||
base_domain: str = 'codeberg.org',
|
||||
):
|
||||
super().__init__(owner, repo, token, username, base_domain)
|
||||
self.download_url = f'{self.get_base_url()}/pulls'
|
||||
|
||||
def download_pr_metadata(
|
||||
self, pull_number: int, comment_id: int | None = None
|
||||
) -> tuple[list[str], list[int], list[str] | None, list[ReviewThread], list[str]]:
|
||||
closing_issues: list[str] = []
|
||||
closing_issue_numbers: list[int] = []
|
||||
|
||||
try:
|
||||
response = httpx.get(
|
||||
f'{self.get_base_url()}/pulls/{pull_number}', headers=self.headers
|
||||
)
|
||||
response.raise_for_status()
|
||||
pr_data = response.json()
|
||||
body = pr_data.get('body') or ''
|
||||
closing_refs = extract_issue_references(body)
|
||||
closing_issue_numbers.extend(closing_refs)
|
||||
if body:
|
||||
closing_issues.append(body)
|
||||
except httpx.HTTPError as exc:
|
||||
logger.warning(f'Failed to fetch PR metadata for {pull_number}: {exc}')
|
||||
|
||||
review_comments = self.get_pr_comments(pull_number, comment_id)
|
||||
review_threads: list[ReviewThread] = []
|
||||
thread_ids: list[str] = []
|
||||
|
||||
return (
|
||||
closing_issues,
|
||||
closing_issue_numbers,
|
||||
review_comments,
|
||||
review_threads,
|
||||
thread_ids,
|
||||
)
|
||||
|
||||
def get_pr_comments(
|
||||
self, pr_number: int, comment_id: int | None = None
|
||||
) -> list[str] | None:
|
||||
url = f'{self.get_base_url()}/pulls/{pr_number}/comments'
|
||||
page = 1
|
||||
params = {'limit': '50', 'page': str(page)}
|
||||
collected: list[str] = []
|
||||
|
||||
while True:
|
||||
response = httpx.get(url, headers=self.headers, params=params)
|
||||
response.raise_for_status()
|
||||
comments = response.json()
|
||||
|
||||
if not comments:
|
||||
break
|
||||
|
||||
filtered = [
|
||||
comment for comment in comments if not comment.get('is_system', False)
|
||||
]
|
||||
|
||||
if comment_id is not None:
|
||||
matching = next(
|
||||
(
|
||||
comment['body']
|
||||
for comment in filtered
|
||||
if self._to_int(comment.get('id')) == comment_id
|
||||
),
|
||||
None,
|
||||
)
|
||||
if matching:
|
||||
return [matching]
|
||||
else:
|
||||
collected.extend(
|
||||
comment['body'] for comment in filtered if comment.get('body')
|
||||
)
|
||||
|
||||
page += 1
|
||||
params = {'limit': '50', 'page': str(page)}
|
||||
|
||||
return collected if collected else None
|
||||
|
||||
def get_context_from_external_issues_references(
|
||||
self,
|
||||
closing_issues: list[str],
|
||||
closing_issue_numbers: list[int],
|
||||
issue_body: str,
|
||||
review_comments: list[str] | None,
|
||||
review_threads: list[ReviewThread],
|
||||
thread_comments: list[str] | None,
|
||||
) -> list[str]:
|
||||
return super().get_context_from_external_issues_references(
|
||||
closing_issues,
|
||||
closing_issue_numbers,
|
||||
issue_body,
|
||||
review_comments,
|
||||
review_threads,
|
||||
thread_comments,
|
||||
)
|
||||
|
||||
def get_converted_issues(
|
||||
self, issue_numbers: list[int] | None = None, comment_id: int | None = None
|
||||
) -> list[Issue]:
|
||||
if not issue_numbers:
|
||||
raise ValueError('Unspecified issue numbers')
|
||||
|
||||
response = httpx.get(self.download_url, headers=self.headers)
|
||||
response.raise_for_status()
|
||||
all_prs = response.json()
|
||||
|
||||
logger.info(f'Limiting resolving to PRs {issue_numbers}.')
|
||||
filtered = [
|
||||
pr
|
||||
for pr in all_prs
|
||||
if self._to_int(pr.get('number') or pr.get('index')) in issue_numbers
|
||||
]
|
||||
|
||||
converted: list[Issue] = []
|
||||
for pr in filtered:
|
||||
if any(pr.get(key) is None for key in ['number', 'title']):
|
||||
logger.warning(f'Skipping PR {pr} as it is missing number or title.')
|
||||
continue
|
||||
|
||||
body = pr.get('body') or ''
|
||||
pr_number = self._to_int(pr.get('number') or pr.get('index', 0))
|
||||
(
|
||||
closing_issues,
|
||||
closing_issue_numbers,
|
||||
review_comments,
|
||||
review_threads,
|
||||
thread_ids,
|
||||
) = self.download_pr_metadata(pr_number, comment_id)
|
||||
head_branch = (pr.get('head') or {}).get('ref')
|
||||
thread_comments = self.get_pr_comments(pr_number, comment_id)
|
||||
|
||||
closing_issues = self.get_context_from_external_issues_references(
|
||||
closing_issues,
|
||||
closing_issue_numbers,
|
||||
body,
|
||||
review_comments,
|
||||
review_threads,
|
||||
thread_comments,
|
||||
)
|
||||
|
||||
issue_details = Issue(
|
||||
owner=self.owner,
|
||||
repo=self.repo,
|
||||
number=pr_number,
|
||||
title=pr['title'],
|
||||
body=body,
|
||||
closing_issues=closing_issues,
|
||||
review_comments=review_comments,
|
||||
review_threads=review_threads,
|
||||
thread_ids=thread_ids,
|
||||
head_branch=head_branch,
|
||||
thread_comments=thread_comments,
|
||||
)
|
||||
|
||||
converted.append(issue_details)
|
||||
|
||||
return converted
|
||||
@@ -5,6 +5,10 @@ from openhands.resolver.interfaces.bitbucket import (
|
||||
BitbucketIssueHandler,
|
||||
BitbucketPRHandler,
|
||||
)
|
||||
from openhands.resolver.interfaces.forgejo import (
|
||||
ForgejoIssueHandler,
|
||||
ForgejoPRHandler,
|
||||
)
|
||||
from openhands.resolver.interfaces.github import GithubIssueHandler, GithubPRHandler
|
||||
from openhands.resolver.interfaces.gitlab import GitlabIssueHandler, GitlabPRHandler
|
||||
from openhands.resolver.interfaces.issue_definitions import (
|
||||
@@ -69,6 +73,17 @@ class IssueHandlerFactory:
|
||||
),
|
||||
self.llm_config,
|
||||
)
|
||||
elif self.platform == ProviderType.FORGEJO:
|
||||
return ServiceContextIssue(
|
||||
ForgejoIssueHandler(
|
||||
self.owner,
|
||||
self.repo,
|
||||
self.token,
|
||||
self.username,
|
||||
self.base_domain,
|
||||
),
|
||||
self.llm_config,
|
||||
)
|
||||
elif self.platform == ProviderType.AZURE_DEVOPS:
|
||||
# Parse owner as organization/project
|
||||
parts = self.owner.split('/')
|
||||
@@ -125,6 +140,17 @@ class IssueHandlerFactory:
|
||||
),
|
||||
self.llm_config,
|
||||
)
|
||||
elif self.platform == ProviderType.FORGEJO:
|
||||
return ServiceContextPR(
|
||||
ForgejoPRHandler(
|
||||
self.owner,
|
||||
self.repo,
|
||||
self.token,
|
||||
self.username,
|
||||
self.base_domain,
|
||||
),
|
||||
self.llm_config,
|
||||
)
|
||||
elif self.platform == ProviderType.AZURE_DEVOPS:
|
||||
# Parse owner as organization/project
|
||||
parts = self.owner.split('/')
|
||||
|
||||
@@ -54,6 +54,7 @@ class IssueResolver:
|
||||
|
||||
def __init__(self, args: Namespace) -> None:
|
||||
"""Initialize the IssueResolver with the given parameters.
|
||||
|
||||
Params initialized:
|
||||
owner: Owner of the repo.
|
||||
repo: Repository name.
|
||||
@@ -82,6 +83,7 @@ class IssueResolver:
|
||||
or os.getenv('GITLAB_TOKEN')
|
||||
or os.getenv('BITBUCKET_TOKEN')
|
||||
or os.getenv('AZURE_DEVOPS_TOKEN')
|
||||
or os.getenv('FORGEJO_TOKEN')
|
||||
)
|
||||
username = args.username if args.username else os.getenv('GIT_USERNAME')
|
||||
if not username:
|
||||
|
||||
@@ -13,6 +13,7 @@ from openhands.integrations.service_types import ProviderType
|
||||
from openhands.llm.llm import LLM
|
||||
from openhands.resolver.interfaces.azure_devops import AzureDevOpsIssueHandler
|
||||
from openhands.resolver.interfaces.bitbucket import BitbucketIssueHandler
|
||||
from openhands.resolver.interfaces.forgejo import ForgejoIssueHandler
|
||||
from openhands.resolver.interfaces.github import GithubIssueHandler
|
||||
from openhands.resolver.interfaces.gitlab import GitlabIssueHandler
|
||||
from openhands.resolver.interfaces.issue import Issue
|
||||
@@ -26,6 +27,10 @@ from openhands.resolver.utils import identify_token
|
||||
from openhands.utils.async_utils import GENERAL_TIMEOUT, call_async_from_sync
|
||||
from openhands.utils.environment import get_effective_llm_base_url
|
||||
|
||||
PR_SIGNATURE = (
|
||||
'Automatic fix generated by [OpenHands](https://github.com/OpenHands/OpenHands/) 🙌'
|
||||
)
|
||||
|
||||
|
||||
def apply_patch(repo_dir: str, patch: str) -> None:
|
||||
"""Apply a patch to a repository.
|
||||
@@ -248,7 +253,7 @@ def send_pull_request(
|
||||
git_user_name: str = 'openhands',
|
||||
git_user_email: str = 'openhands@all-hands.dev',
|
||||
) -> str:
|
||||
"""Send a pull request to a GitHub, GitLab, Bitbucket, or Azure DevOps repository.
|
||||
"""Send a pull request to a GitHub, GitLab, Bitbucket, Forgejo, or Azure DevOps repository.
|
||||
|
||||
Args:
|
||||
issue: The issue to send the pull request for
|
||||
@@ -262,21 +267,22 @@ def send_pull_request(
|
||||
target_branch: The target branch to create the pull request against (defaults to repository default branch)
|
||||
reviewer: The username of the reviewer to assign
|
||||
pr_title: Custom title for the pull request (optional)
|
||||
base_domain: The base domain for the git server (defaults to "github.com" for GitHub, "gitlab.com" for GitLab, "bitbucket.org" for Bitbucket, and "dev.azure.com" for Azure DevOps)
|
||||
base_domain: The base domain for the git server (defaults to "github.com" for GitHub, "gitlab.com" for GitLab, "bitbucket.org" for Bitbucket, "codeberg.org" for Forgejo, and "dev.azure.com" for Azure DevOps)
|
||||
git_user_name: Git username to configure when creating commits
|
||||
git_user_email: Git email to configure when creating commits
|
||||
"""
|
||||
if pr_type not in ['branch', 'draft', 'ready']:
|
||||
raise ValueError(f'Invalid pr_type: {pr_type}')
|
||||
|
||||
# Determine default base_domain based on platform
|
||||
if base_domain is None:
|
||||
if platform == ProviderType.GITHUB:
|
||||
base_domain = 'github.com'
|
||||
elif platform == ProviderType.GITLAB:
|
||||
base_domain = 'gitlab.com'
|
||||
elif platform == ProviderType.AZURE_DEVOPS:
|
||||
base_domain = 'dev.azure.com'
|
||||
else: # platform == ProviderType.BITBUCKET
|
||||
base_domain = 'bitbucket.org'
|
||||
base_domain = {
|
||||
ProviderType.GITHUB: 'github.com',
|
||||
ProviderType.GITLAB: 'gitlab.com',
|
||||
ProviderType.BITBUCKET: 'bitbucket.org',
|
||||
ProviderType.FORGEJO: 'codeberg.org',
|
||||
ProviderType.AZURE_DEVOPS: 'dev.azure.com',
|
||||
}.get(platform, 'github.com')
|
||||
|
||||
# Create the appropriate handler based on platform
|
||||
handler = None
|
||||
@@ -297,6 +303,11 @@ def send_pull_request(
|
||||
),
|
||||
None,
|
||||
)
|
||||
elif platform == ProviderType.FORGEJO:
|
||||
handler = ServiceContextIssue(
|
||||
ForgejoIssueHandler(issue.owner, issue.repo, token, username, base_domain),
|
||||
None,
|
||||
)
|
||||
elif platform == ProviderType.AZURE_DEVOPS:
|
||||
# For Azure DevOps, owner is "organization/project"
|
||||
organization, project = issue.owner.split('/')
|
||||
@@ -360,11 +371,11 @@ def send_pull_request(
|
||||
pr_body = f'This pull request fixes #{issue.number}.'
|
||||
if additional_message:
|
||||
pr_body += f'\n\n{additional_message}'
|
||||
pr_body += '\n\nAutomatic fix generated by [OpenHands](https://github.com/OpenHands/OpenHands/) 🙌'
|
||||
pr_body += f'\n\n{PR_SIGNATURE}'
|
||||
|
||||
# For cross repo pull request, we need to send head parameter like fork_owner:branch as per git documentation here : https://docs.github.com/en/rest/pulls/pulls?apiVersion=2022-11-28#create-a-pull-request
|
||||
# head parameter usage : The name of the branch where your changes are implemented. For cross-repository pull requests in the same network, namespace head with a user like this: username:branch.
|
||||
if fork_owner and platform == ProviderType.GITHUB:
|
||||
if fork_owner and platform in (ProviderType.GITHUB, ProviderType.FORGEJO):
|
||||
head_branch = f'{fork_owner}:{branch_name}'
|
||||
else:
|
||||
head_branch = branch_name
|
||||
@@ -374,17 +385,40 @@ def send_pull_request(
|
||||
url = handler.get_compare_url(branch_name)
|
||||
else:
|
||||
# Prepare the PR for the GitHub API
|
||||
data = {
|
||||
'title': final_pr_title,
|
||||
('body' if platform == ProviderType.GITHUB else 'description'): pr_body,
|
||||
(
|
||||
'head' if platform == ProviderType.GITHUB else 'source_branch'
|
||||
): head_branch,
|
||||
(
|
||||
'base' if platform == ProviderType.GITHUB else 'target_branch'
|
||||
): base_branch,
|
||||
'draft': pr_type == 'draft',
|
||||
}
|
||||
if platform == ProviderType.GITHUB:
|
||||
data = {
|
||||
'title': final_pr_title,
|
||||
'body': pr_body,
|
||||
'head': head_branch,
|
||||
'base': base_branch,
|
||||
'draft': pr_type == 'draft',
|
||||
}
|
||||
elif platform == ProviderType.GITLAB:
|
||||
data = {
|
||||
'title': final_pr_title,
|
||||
'description': pr_body,
|
||||
'source_branch': head_branch,
|
||||
'target_branch': base_branch,
|
||||
'draft': pr_type == 'draft',
|
||||
}
|
||||
elif platform == ProviderType.BITBUCKET:
|
||||
data = {
|
||||
'title': final_pr_title,
|
||||
'description': pr_body,
|
||||
'source_branch': head_branch,
|
||||
'target_branch': base_branch,
|
||||
'draft': pr_type == 'draft',
|
||||
}
|
||||
elif platform == ProviderType.FORGEJO:
|
||||
data = {
|
||||
'title': final_pr_title,
|
||||
'body': pr_body,
|
||||
'head': head_branch,
|
||||
'base': base_branch,
|
||||
'draft': pr_type == 'draft',
|
||||
}
|
||||
else:
|
||||
raise ValueError(f'Unsupported platform for PR creation: {platform}')
|
||||
|
||||
pr_data = handler.create_pull_request(data)
|
||||
url = pr_data['html_url']
|
||||
@@ -429,13 +463,13 @@ def update_existing_pull_request(
|
||||
|
||||
# Determine default base_domain based on platform
|
||||
if base_domain is None:
|
||||
base_domain = (
|
||||
'github.com'
|
||||
if platform == ProviderType.GITHUB
|
||||
else 'gitlab.com'
|
||||
if platform == ProviderType.GITLAB
|
||||
else 'dev.azure.com'
|
||||
)
|
||||
base_domain = {
|
||||
ProviderType.GITHUB: 'github.com',
|
||||
ProviderType.GITLAB: 'gitlab.com',
|
||||
ProviderType.AZURE_DEVOPS: 'dev.azure.com',
|
||||
ProviderType.BITBUCKET: 'bitbucket.org',
|
||||
ProviderType.FORGEJO: 'codeberg.org',
|
||||
}.get(platform, 'github.com')
|
||||
|
||||
handler = None
|
||||
if platform == ProviderType.GITHUB:
|
||||
@@ -443,6 +477,11 @@ def update_existing_pull_request(
|
||||
GithubIssueHandler(issue.owner, issue.repo, token, username, base_domain),
|
||||
llm_config,
|
||||
)
|
||||
elif platform == ProviderType.GITLAB:
|
||||
handler = ServiceContextIssue(
|
||||
GitlabIssueHandler(issue.owner, issue.repo, token, username, base_domain),
|
||||
llm_config,
|
||||
)
|
||||
elif platform == ProviderType.AZURE_DEVOPS:
|
||||
# For Azure DevOps, owner is "organization/project"
|
||||
organization, project = issue.owner.split('/')
|
||||
@@ -450,11 +489,20 @@ def update_existing_pull_request(
|
||||
AzureDevOpsIssueHandler(token, organization, project, issue.repo),
|
||||
llm_config,
|
||||
)
|
||||
else: # platform == ProviderType.GITLAB
|
||||
elif platform == ProviderType.BITBUCKET:
|
||||
handler = ServiceContextIssue(
|
||||
GitlabIssueHandler(issue.owner, issue.repo, token, username, base_domain),
|
||||
BitbucketIssueHandler(
|
||||
issue.owner, issue.repo, token, username, base_domain
|
||||
),
|
||||
llm_config,
|
||||
)
|
||||
elif platform == ProviderType.FORGEJO:
|
||||
handler = ServiceContextIssue(
|
||||
ForgejoIssueHandler(issue.owner, issue.repo, token, username, base_domain),
|
||||
llm_config,
|
||||
)
|
||||
else:
|
||||
raise ValueError(f'Unsupported platform: {platform}')
|
||||
|
||||
branch_name = issue.head_branch
|
||||
|
||||
@@ -503,7 +551,10 @@ def update_existing_pull_request(
|
||||
comment_message = response.choices[0].message.content.strip()
|
||||
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
comment_message = f'A new OpenHands update is available, but failed to parse or summarize the changes:\n{additional_message}'
|
||||
comment_message = (
|
||||
'A new OpenHands update is available, but failed to parse or summarize '
|
||||
f'the changes:\n{additional_message}'
|
||||
)
|
||||
|
||||
# Post a comment on the PR
|
||||
if comment_message:
|
||||
@@ -727,10 +778,11 @@ def main() -> None:
|
||||
or os.getenv('GITHUB_TOKEN')
|
||||
or os.getenv('GITLAB_TOKEN')
|
||||
or os.getenv('AZURE_DEVOPS_TOKEN')
|
||||
or os.getenv('FORGEJO_TOKEN')
|
||||
)
|
||||
if not token:
|
||||
raise ValueError(
|
||||
'token is not set, set via --token or GITHUB_TOKEN, GITLAB_TOKEN, or AZURE_DEVOPS_TOKEN environment variable.'
|
||||
'token is not set, set via --token or GITHUB_TOKEN, GITLAB_TOKEN, AZURE_DEVOPS_TOKEN, or FORGEJO_TOKEN environment variable.'
|
||||
)
|
||||
username = my_args.username if my_args.username else os.getenv('GIT_USERNAME')
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from types import MappingProxyType
|
||||
from typing import cast
|
||||
from typing import Annotated, cast
|
||||
|
||||
from fastapi import APIRouter, Depends, Query, status
|
||||
from fastapi.responses import JSONResponse
|
||||
@@ -67,7 +67,7 @@ async def get_user_installations(
|
||||
@app.get('/repositories', response_model=list[Repository])
|
||||
async def get_user_repositories(
|
||||
sort: str = 'pushed',
|
||||
selected_provider: ProviderType | None = None,
|
||||
selected_provider: Annotated[ProviderType | None, Query()] = None,
|
||||
page: int | None = None,
|
||||
per_page: int | None = None,
|
||||
installation_id: str | None = None,
|
||||
@@ -137,7 +137,7 @@ async def search_repositories(
|
||||
per_page: int = 5,
|
||||
sort: str = 'stars',
|
||||
order: str = 'desc',
|
||||
selected_provider: ProviderType | None = None,
|
||||
selected_provider: Annotated[ProviderType | None, Query()] = None,
|
||||
provider_tokens: PROVIDER_TOKEN_TYPE | None = Depends(get_provider_tokens),
|
||||
access_token: SecretStr | None = Depends(get_access_token),
|
||||
user_id: str | None = Depends(get_user_id),
|
||||
@@ -171,7 +171,7 @@ async def search_branches(
|
||||
repository: str,
|
||||
query: str,
|
||||
per_page: int = 30,
|
||||
selected_provider: ProviderType | None = None,
|
||||
selected_provider: Annotated[ProviderType | None, Query()] = None,
|
||||
provider_tokens: PROVIDER_TOKEN_TYPE | None = Depends(get_provider_tokens),
|
||||
access_token: SecretStr | None = Depends(get_access_token),
|
||||
user_id: str | None = Depends(get_user_id),
|
||||
@@ -243,6 +243,7 @@ async def get_repository_branches(
|
||||
repository: str,
|
||||
page: int = 1,
|
||||
per_page: int = 30,
|
||||
selected_provider: Annotated[ProviderType | None, Query()] = None,
|
||||
provider_tokens: PROVIDER_TOKEN_TYPE | None = Depends(get_provider_tokens),
|
||||
access_token: SecretStr | None = Depends(get_access_token),
|
||||
user_id: str | None = Depends(get_user_id),
|
||||
@@ -253,6 +254,7 @@ async def get_repository_branches(
|
||||
repository: The repository name in the format 'owner/repo'
|
||||
page: Page number for pagination (default: 1)
|
||||
per_page: Number of branches per page (default: 30)
|
||||
selected_provider: Optional provider hint to avoid trying other providers
|
||||
|
||||
Returns:
|
||||
A paginated response with branches for the repository
|
||||
@@ -263,7 +265,10 @@ async def get_repository_branches(
|
||||
)
|
||||
try:
|
||||
branches_response: PaginatedBranchesResponse = await client.get_branches(
|
||||
repository, page=page, per_page=per_page
|
||||
repository,
|
||||
specified_provider=selected_provider,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
)
|
||||
return branches_response
|
||||
|
||||
|
||||
Reference in New Issue
Block a user