mirror of
https://github.com/All-Hands-AI/OpenHands.git
synced 2026-01-08 22:38:05 -05:00
Forgejo integration (#11111)
Co-authored-by: johba <admin@noreply.localhost> Co-authored-by: openhands <openhands@all-hands.dev> Co-authored-by: johba <johba@harb.eth> Co-authored-by: enyst <engel.nyst@gmail.com> Co-authored-by: Graham Neubig <neubig@gmail.com> Co-authored-by: MrGeorgen <65063405+MrGeorgen@users.noreply.github.com> Co-authored-by: MrGeorgen <moinl6162@gmail.com>
This commit is contained in:
@@ -298,6 +298,7 @@ describe("Form submission", () => {
|
||||
gitlab: { token: "", host: "" },
|
||||
bitbucket: { token: "", host: "" },
|
||||
azure_devops: { token: "", host: "" },
|
||||
forgejo: { token: "", host: "" },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -320,6 +321,7 @@ describe("Form submission", () => {
|
||||
gitlab: { token: "test-token", host: "" },
|
||||
bitbucket: { token: "", host: "" },
|
||||
azure_devops: { token: "", host: "" },
|
||||
forgejo: { token: "", host: "" },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -342,6 +344,7 @@ describe("Form submission", () => {
|
||||
gitlab: { token: "", host: "" },
|
||||
bitbucket: { token: "test-token", host: "" },
|
||||
azure_devops: { token: "", host: "" },
|
||||
forgejo: { token: "", host: "" },
|
||||
});
|
||||
});
|
||||
|
||||
@@ -364,6 +367,7 @@ describe("Form submission", () => {
|
||||
gitlab: { token: "", host: "" },
|
||||
bitbucket: { token: "", host: "" },
|
||||
azure_devops: { token: "test-token", host: "" },
|
||||
forgejo: { token: "", host: "" },
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -131,9 +131,18 @@ class GitService {
|
||||
repository: string,
|
||||
page: number = 1,
|
||||
perPage: number = 30,
|
||||
selectedProvider?: Provider,
|
||||
): Promise<PaginatedBranchesResponse> {
|
||||
const { data } = await openHands.get<PaginatedBranchesResponse>(
|
||||
`/api/user/repository/branches?repository=${encodeURIComponent(repository)}&page=${page}&per_page=${perPage}`,
|
||||
`/api/user/repository/branches`,
|
||||
{
|
||||
params: {
|
||||
repository,
|
||||
page,
|
||||
per_page: perPage,
|
||||
selected_provider: selectedProvider,
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
return data;
|
||||
|
||||
@@ -0,0 +1,64 @@
|
||||
import { useTranslation } from "react-i18next";
|
||||
import { I18nKey } from "#/i18n/declaration";
|
||||
import { SettingsInput } from "../settings-input";
|
||||
import { KeyStatusIcon } from "../key-status-icon";
|
||||
import { cn } from "#/utils/utils";
|
||||
|
||||
interface ForgejoTokenInputProps {
|
||||
onChange: (value: string) => void;
|
||||
onForgejoHostChange: (value: string) => void;
|
||||
isForgejoTokenSet: boolean;
|
||||
name: string;
|
||||
forgejoHostSet: string | null | undefined;
|
||||
className?: string;
|
||||
}
|
||||
|
||||
export function ForgejoTokenInput({
|
||||
onChange,
|
||||
onForgejoHostChange,
|
||||
isForgejoTokenSet,
|
||||
name,
|
||||
forgejoHostSet,
|
||||
className,
|
||||
}: ForgejoTokenInputProps) {
|
||||
const { t } = useTranslation();
|
||||
|
||||
return (
|
||||
<div className={cn("flex flex-col gap-6", className)}>
|
||||
<SettingsInput
|
||||
testId={name}
|
||||
name={name}
|
||||
onChange={onChange}
|
||||
label={t(I18nKey.FORGEJO$TOKEN_LABEL)}
|
||||
type="password"
|
||||
className="w-full max-w-[680px]"
|
||||
placeholder={isForgejoTokenSet ? "<hidden>" : ""}
|
||||
startContent={
|
||||
isForgejoTokenSet && (
|
||||
<KeyStatusIcon
|
||||
testId="forgejo-set-token-indicator"
|
||||
isSet={isForgejoTokenSet}
|
||||
/>
|
||||
)
|
||||
}
|
||||
/>
|
||||
|
||||
<SettingsInput
|
||||
onChange={onForgejoHostChange || (() => {})}
|
||||
name="forgejo-host-input"
|
||||
testId="forgejo-host-input"
|
||||
label={t(I18nKey.FORGEJO$HOST_LABEL)}
|
||||
type="text"
|
||||
className="w-full max-w-[680px]"
|
||||
placeholder="codeberg.org"
|
||||
defaultValue={forgejoHostSet || undefined}
|
||||
startContent={
|
||||
forgejoHostSet &&
|
||||
forgejoHostSet.trim() !== "" && (
|
||||
<KeyStatusIcon testId="forgejo-set-host-indicator" isSet />
|
||||
)
|
||||
}
|
||||
/>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -31,13 +31,7 @@ interface ConversationSubscriptionsContextType {
|
||||
subscribeToConversation: (options: {
|
||||
conversationId: string;
|
||||
sessionApiKey: string | null;
|
||||
providersSet: (
|
||||
| "github"
|
||||
| "gitlab"
|
||||
| "bitbucket"
|
||||
| "azure_devops"
|
||||
| "enterprise_sso"
|
||||
)[];
|
||||
providersSet: import("#/types/settings").Provider[];
|
||||
baseUrl: string;
|
||||
socketPath?: string;
|
||||
onEvent?: (event: unknown, conversationId: string) => void;
|
||||
@@ -141,13 +135,7 @@ export function ConversationSubscriptionsProvider({
|
||||
(options: {
|
||||
conversationId: string;
|
||||
sessionApiKey: string | null;
|
||||
providersSet: (
|
||||
| "github"
|
||||
| "gitlab"
|
||||
| "bitbucket"
|
||||
| "azure_devops"
|
||||
| "enterprise_sso"
|
||||
)[];
|
||||
providersSet: import("#/types/settings").Provider[];
|
||||
baseUrl: string;
|
||||
socketPath?: string;
|
||||
onEvent?: (event: unknown, conversationId: string) => void;
|
||||
|
||||
@@ -20,7 +20,7 @@ export function useBranchData(
|
||||
isLoading,
|
||||
isFetchingNextPage,
|
||||
isError,
|
||||
} = useRepositoryBranchesPaginated(repository);
|
||||
} = useRepositoryBranchesPaginated(repository, 30, provider);
|
||||
|
||||
// Search branches when user types
|
||||
const { data: searchData, isLoading: isSearchLoading } = useSearchBranches(
|
||||
|
||||
@@ -1,13 +1,22 @@
|
||||
import { useQuery, useInfiniteQuery } from "@tanstack/react-query";
|
||||
import GitService from "#/api/git-service/git-service.api";
|
||||
import { Branch, PaginatedBranchesResponse } from "#/types/git";
|
||||
import { Provider } from "#/types/settings";
|
||||
|
||||
export const useRepositoryBranches = (repository: string | null) =>
|
||||
export const useRepositoryBranches = (
|
||||
repository: string | null,
|
||||
selectedProvider?: Provider,
|
||||
) =>
|
||||
useQuery<Branch[]>({
|
||||
queryKey: ["repository", repository, "branches"],
|
||||
queryKey: ["repository", repository, "branches", selectedProvider],
|
||||
queryFn: async () => {
|
||||
if (!repository) return [];
|
||||
const response = await GitService.getRepositoryBranches(repository);
|
||||
const response = await GitService.getRepositoryBranches(
|
||||
repository,
|
||||
1,
|
||||
30,
|
||||
selectedProvider,
|
||||
);
|
||||
// Ensure we return an array even if the response is malformed
|
||||
return Array.isArray(response.branches) ? response.branches : [];
|
||||
},
|
||||
@@ -18,9 +27,17 @@ export const useRepositoryBranches = (repository: string | null) =>
|
||||
export const useRepositoryBranchesPaginated = (
|
||||
repository: string | null,
|
||||
perPage: number = 30,
|
||||
selectedProvider?: Provider,
|
||||
) =>
|
||||
useInfiniteQuery<PaginatedBranchesResponse, Error>({
|
||||
queryKey: ["repository", repository, "branches", "paginated", perPage],
|
||||
queryKey: [
|
||||
"repository",
|
||||
repository,
|
||||
"branches",
|
||||
"paginated",
|
||||
perPage,
|
||||
selectedProvider,
|
||||
],
|
||||
queryFn: async ({ pageParam = 1 }) => {
|
||||
if (!repository) {
|
||||
return {
|
||||
@@ -35,6 +52,7 @@ export const useRepositoryBranchesPaginated = (
|
||||
repository,
|
||||
pageParam as number,
|
||||
perPage,
|
||||
selectedProvider,
|
||||
);
|
||||
},
|
||||
enabled: !!repository,
|
||||
|
||||
@@ -82,7 +82,7 @@ export const useCreateConversationAndSubscribeMultiple = () => {
|
||||
subscribeToConversation({
|
||||
conversationId,
|
||||
sessionApiKey,
|
||||
providersSet: providers,
|
||||
providersSet: providers as Provider[],
|
||||
baseUrl,
|
||||
socketPath: conversationData.socketPath,
|
||||
onEvent: conversationData.onEventCallback,
|
||||
|
||||
@@ -14,6 +14,8 @@ export enum I18nKey {
|
||||
MICROAGENT$WHAT_TO_REMEMBER = "MICROAGENT$WHAT_TO_REMEMBER",
|
||||
MICROAGENT$ADD_TRIGGERS = "MICROAGENT$ADD_TRIGGERS",
|
||||
MICROAGENT$WAIT_FOR_RUNTIME = "MICROAGENT$WAIT_FOR_RUNTIME",
|
||||
FORGEJO$TOKEN_LABEL = "FORGEJO$TOKEN_LABEL",
|
||||
FORGEJO$HOST_LABEL = "FORGEJO$HOST_LABEL",
|
||||
MICROAGENT$ADDING_CONTEXT = "MICROAGENT$ADDING_CONTEXT",
|
||||
MICROAGENT$VIEW_CONVERSATION = "MICROAGENT$VIEW_CONVERSATION",
|
||||
MICROAGENT$SUCCESS_PR_READY = "MICROAGENT$SUCCESS_PR_READY",
|
||||
|
||||
@@ -223,6 +223,38 @@
|
||||
"de": "Bitte warten Sie, bis die Laufzeitumgebung aktiv ist.",
|
||||
"uk": "Будь ласка, зачекайте, поки середовище виконання стане активним."
|
||||
},
|
||||
"FORGEJO$TOKEN_LABEL": {
|
||||
"en": "Forgejo Personal Access Token",
|
||||
"ja": "Forgejo 個人用アクセス トークン",
|
||||
"zh-CN": "Forgejo 个人访问令牌",
|
||||
"zh-TW": "Forgejo 個人存取權杖",
|
||||
"ko-KR": "Forgejo 개인 액세스 토큰",
|
||||
"no": "Forgejo personlig tilgangstoken",
|
||||
"it": "Token di accesso personale Forgejo",
|
||||
"pt": "Token de acesso pessoal do Forgejo",
|
||||
"es": "Token de acceso personal de Forgejo",
|
||||
"ar": "رمز الوصول الشخصي لـ Forgejo",
|
||||
"fr": "Jeton d'accès personnel Forgejo",
|
||||
"tr": "Forgejo kişisel erişim belirteci",
|
||||
"de": "Forgejo persönliches Zugriffstoken",
|
||||
"uk": "Персональний токен доступу Forgejo"
|
||||
},
|
||||
"FORGEJO$HOST_LABEL": {
|
||||
"en": "Forgejo Host (domain)",
|
||||
"ja": "Forgejo ホスト (ドメイン)",
|
||||
"zh-CN": "Forgejo 主机(域名)",
|
||||
"zh-TW": "Forgejo 主機(網域)",
|
||||
"ko-KR": "Forgejo 호스트(도메인)",
|
||||
"no": "Forgejo vert (domene)",
|
||||
"it": "Host Forgejo (dominio)",
|
||||
"pt": "Host do Forgejo (domínio)",
|
||||
"es": "Host de Forgejo (dominio)",
|
||||
"ar": "مضيف Forgejo (نطاق)",
|
||||
"fr": "Hôte Forgejo (domaine)",
|
||||
"tr": "Forgejo ana makinesi (alan adı)",
|
||||
"de": "Forgejo Host (Domain)",
|
||||
"uk": "Хост Forgejo (домен)"
|
||||
},
|
||||
"MICROAGENT$ADDING_CONTEXT": {
|
||||
"en": "OpenHands is adding this new context to your respository. We'll let you know when the pull request is ready.",
|
||||
"ja": "OpenHandsはこの新しいコンテキストをあなたのリポジトリに追加しています。プルリクエストの準備ができたらお知らせします。",
|
||||
|
||||
@@ -8,6 +8,7 @@ import { GitHubTokenInput } from "#/components/features/settings/git-settings/gi
|
||||
import { GitLabTokenInput } from "#/components/features/settings/git-settings/gitlab-token-input";
|
||||
import { BitbucketTokenInput } from "#/components/features/settings/git-settings/bitbucket-token-input";
|
||||
import { AzureDevOpsTokenInput } from "#/components/features/settings/git-settings/azure-devops-token-input";
|
||||
import { ForgejoTokenInput } from "#/components/features/settings/git-settings/forgejo-token-input";
|
||||
import { ConfigureGitHubRepositoriesAnchor } from "#/components/features/settings/git-settings/configure-github-repositories-anchor";
|
||||
import { InstallSlackAppAnchor } from "#/components/features/settings/git-settings/install-slack-app-anchor";
|
||||
import { I18nKey } from "#/i18n/declaration";
|
||||
@@ -40,6 +41,8 @@ function GitSettingsScreen() {
|
||||
React.useState(false);
|
||||
const [azureDevOpsTokenInputHasValue, setAzureDevOpsTokenInputHasValue] =
|
||||
React.useState(false);
|
||||
const [forgejoTokenInputHasValue, setForgejoTokenInputHasValue] =
|
||||
React.useState(false);
|
||||
|
||||
const [githubHostInputHasValue, setGithubHostInputHasValue] =
|
||||
React.useState(false);
|
||||
@@ -49,17 +52,21 @@ function GitSettingsScreen() {
|
||||
React.useState(false);
|
||||
const [azureDevOpsHostInputHasValue, setAzureDevOpsHostInputHasValue] =
|
||||
React.useState(false);
|
||||
const [forgejoHostInputHasValue, setForgejoHostInputHasValue] =
|
||||
React.useState(false);
|
||||
|
||||
const existingGithubHost = settings?.provider_tokens_set.github;
|
||||
const existingGitlabHost = settings?.provider_tokens_set.gitlab;
|
||||
const existingBitbucketHost = settings?.provider_tokens_set.bitbucket;
|
||||
const existingAzureDevOpsHost = settings?.provider_tokens_set.azure_devops;
|
||||
const existingForgejoHost = settings?.provider_tokens_set.forgejo;
|
||||
|
||||
const isSaas = config?.APP_MODE === "saas";
|
||||
const isGitHubTokenSet = providers.includes("github");
|
||||
const isGitLabTokenSet = providers.includes("gitlab");
|
||||
const isBitbucketTokenSet = providers.includes("bitbucket");
|
||||
const isAzureDevOpsTokenSet = providers.includes("azure_devops");
|
||||
const isForgejoTokenSet = providers.includes("forgejo");
|
||||
|
||||
const formAction = async (formData: FormData) => {
|
||||
const disconnectButtonClicked =
|
||||
@@ -70,18 +77,36 @@ function GitSettingsScreen() {
|
||||
return;
|
||||
}
|
||||
|
||||
const githubToken = formData.get("github-token-input")?.toString() || "";
|
||||
const gitlabToken = formData.get("gitlab-token-input")?.toString() || "";
|
||||
const bitbucketToken =
|
||||
formData.get("bitbucket-token-input")?.toString() || "";
|
||||
const azureDevOpsToken =
|
||||
formData.get("azure-devops-token-input")?.toString() || "";
|
||||
const githubHost = formData.get("github-host-input")?.toString() || "";
|
||||
const gitlabHost = formData.get("gitlab-host-input")?.toString() || "";
|
||||
const bitbucketHost =
|
||||
formData.get("bitbucket-host-input")?.toString() || "";
|
||||
const azureDevOpsHost =
|
||||
formData.get("azure-devops-host-input")?.toString() || "";
|
||||
const githubToken = (
|
||||
formData.get("github-token-input")?.toString() || ""
|
||||
).trim();
|
||||
const gitlabToken = (
|
||||
formData.get("gitlab-token-input")?.toString() || ""
|
||||
).trim();
|
||||
const bitbucketToken = (
|
||||
formData.get("bitbucket-token-input")?.toString() || ""
|
||||
).trim();
|
||||
const azureDevOpsToken = (
|
||||
formData.get("azure-devops-token-input")?.toString() || ""
|
||||
).trim();
|
||||
const forgejoToken = (
|
||||
formData.get("forgejo-token-input")?.toString() || ""
|
||||
).trim();
|
||||
const githubHost = (
|
||||
formData.get("github-host-input")?.toString() || ""
|
||||
).trim();
|
||||
const gitlabHost = (
|
||||
formData.get("gitlab-host-input")?.toString() || ""
|
||||
).trim();
|
||||
const bitbucketHost = (
|
||||
formData.get("bitbucket-host-input")?.toString() || ""
|
||||
).trim();
|
||||
const azureDevOpsHost = (
|
||||
formData.get("azure-devops-host-input")?.toString() || ""
|
||||
).trim();
|
||||
const forgejoHost = (
|
||||
formData.get("forgejo-host-input")?.toString() || ""
|
||||
).trim();
|
||||
|
||||
// Create providers object with all tokens
|
||||
const providerTokens: Record<string, { token: string; host: string }> = {
|
||||
@@ -89,6 +114,7 @@ function GitSettingsScreen() {
|
||||
gitlab: { token: gitlabToken, host: gitlabHost },
|
||||
bitbucket: { token: bitbucketToken, host: bitbucketHost },
|
||||
azure_devops: { token: azureDevOpsToken, host: azureDevOpsHost },
|
||||
forgejo: { token: forgejoToken, host: forgejoHost },
|
||||
};
|
||||
|
||||
saveGitProviders(
|
||||
@@ -108,10 +134,12 @@ function GitSettingsScreen() {
|
||||
setGitlabTokenInputHasValue(false);
|
||||
setBitbucketTokenInputHasValue(false);
|
||||
setAzureDevOpsTokenInputHasValue(false);
|
||||
setForgejoTokenInputHasValue(false);
|
||||
setGithubHostInputHasValue(false);
|
||||
setGitlabHostInputHasValue(false);
|
||||
setBitbucketHostInputHasValue(false);
|
||||
setAzureDevOpsHostInputHasValue(false);
|
||||
setForgejoHostInputHasValue(false);
|
||||
},
|
||||
},
|
||||
);
|
||||
@@ -122,10 +150,12 @@ function GitSettingsScreen() {
|
||||
!gitlabTokenInputHasValue &&
|
||||
!bitbucketTokenInputHasValue &&
|
||||
!azureDevOpsTokenInputHasValue &&
|
||||
!forgejoTokenInputHasValue &&
|
||||
!githubHostInputHasValue &&
|
||||
!gitlabHostInputHasValue &&
|
||||
!bitbucketHostInputHasValue &&
|
||||
!azureDevOpsHostInputHasValue;
|
||||
!azureDevOpsHostInputHasValue &&
|
||||
!forgejoHostInputHasValue;
|
||||
const shouldRenderExternalConfigureButtons = isSaas && config.APP_SLUG;
|
||||
const shouldRenderProjectManagementIntegrations =
|
||||
config?.FEATURE_FLAGS?.ENABLE_JIRA ||
|
||||
@@ -226,6 +256,20 @@ function GitSettingsScreen() {
|
||||
azureDevOpsHostSet={existingAzureDevOpsHost}
|
||||
/>
|
||||
)}
|
||||
|
||||
{!isSaas && (
|
||||
<ForgejoTokenInput
|
||||
name="forgejo-token-input"
|
||||
isForgejoTokenSet={isForgejoTokenSet}
|
||||
onChange={(value) => {
|
||||
setForgejoTokenInputHasValue(!!value);
|
||||
}}
|
||||
onForgejoHostChange={(value) => {
|
||||
setForgejoHostInputHasValue(!!value);
|
||||
}}
|
||||
forgejoHostSet={existingForgejoHost}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
@@ -244,7 +288,8 @@ function GitSettingsScreen() {
|
||||
!isGitHubTokenSet &&
|
||||
!isGitLabTokenSet &&
|
||||
!isBitbucketTokenSet &&
|
||||
!isAzureDevOpsTokenSet
|
||||
!isAzureDevOpsTokenSet &&
|
||||
!isForgejoTokenSet
|
||||
}
|
||||
>
|
||||
{t(I18nKey.GIT$DISCONNECT_TOKENS)}
|
||||
|
||||
@@ -3,6 +3,7 @@ export const ProviderOptions = {
|
||||
gitlab: "gitlab",
|
||||
bitbucket: "bitbucket",
|
||||
azure_devops: "azure_devops",
|
||||
forgejo: "forgejo",
|
||||
enterprise_sso: "enterprise_sso",
|
||||
} as const;
|
||||
|
||||
|
||||
@@ -215,6 +215,10 @@ export const getGitProviderBaseUrl = (gitProvider: Provider): string => {
|
||||
return "https://bitbucket.org";
|
||||
case "azure_devops":
|
||||
return "https://dev.azure.com";
|
||||
case "forgejo":
|
||||
// Default UI links to Codeberg unless a custom host is available in settings
|
||||
// Note: UI link builders don't currently receive host; consider plumbing settings if needed
|
||||
return "https://codeberg.org";
|
||||
default:
|
||||
return "";
|
||||
}
|
||||
@@ -229,6 +233,7 @@ export const getProviderName = (gitProvider: Provider) => {
|
||||
if (gitProvider === "gitlab") return "GitLab";
|
||||
if (gitProvider === "bitbucket") return "Bitbucket";
|
||||
if (gitProvider === "azure_devops") return "Azure DevOps";
|
||||
if (gitProvider === "forgejo") return "Forgejo";
|
||||
return "GitHub";
|
||||
};
|
||||
|
||||
@@ -269,6 +274,8 @@ export const constructPullRequestUrl = (
|
||||
switch (provider) {
|
||||
case "github":
|
||||
return `${baseUrl}/${repositoryName}/pull/${prNumber}`;
|
||||
case "forgejo":
|
||||
return `${baseUrl}/${repositoryName}/pull/${prNumber}`;
|
||||
case "gitlab":
|
||||
return `${baseUrl}/${repositoryName}/-/merge_requests/${prNumber}`;
|
||||
case "bitbucket":
|
||||
@@ -312,6 +319,8 @@ export const constructMicroagentUrl = (
|
||||
switch (gitProvider) {
|
||||
case "github":
|
||||
return `${baseUrl}/${repositoryName}/blob/main/${microagentPath}`;
|
||||
case "forgejo":
|
||||
return `${baseUrl}/${repositoryName}/src/branch/main/${microagentPath}`;
|
||||
case "gitlab":
|
||||
return `${baseUrl}/${repositoryName}/-/blob/main/${microagentPath}`;
|
||||
case "bitbucket":
|
||||
@@ -390,6 +399,8 @@ export const constructBranchUrl = (
|
||||
switch (provider) {
|
||||
case "github":
|
||||
return `${baseUrl}/${repositoryName}/tree/${branchName}`;
|
||||
case "forgejo":
|
||||
return `${baseUrl}/${repositoryName}/src/branch/${branchName}`;
|
||||
case "gitlab":
|
||||
return `${baseUrl}/${repositoryName}/-/tree/${branchName}`;
|
||||
case "bitbucket":
|
||||
|
||||
@@ -109,6 +109,30 @@ def get_provider_tokens():
|
||||
bitbucket_token = SecretStr(os.environ['BITBUCKET_TOKEN'])
|
||||
provider_tokens[ProviderType.BITBUCKET] = ProviderToken(token=bitbucket_token)
|
||||
|
||||
# Forgejo support (e.g., Codeberg or self-hosted Forgejo)
|
||||
if 'FORGEJO_TOKEN' in os.environ:
|
||||
forgejo_token = SecretStr(os.environ['FORGEJO_TOKEN'])
|
||||
# If a base URL is provided, extract the domain to use as host override
|
||||
forgejo_base_url = os.environ.get('FORGEJO_BASE_URL', '').strip()
|
||||
host: str | None = None
|
||||
if forgejo_base_url:
|
||||
# Normalize by stripping protocol and any path (e.g., /api/v1)
|
||||
url = forgejo_base_url
|
||||
if url.startswith(('http://', 'https://')):
|
||||
try:
|
||||
from urllib.parse import urlparse
|
||||
|
||||
parsed = urlparse(url)
|
||||
host = parsed.netloc or None
|
||||
except Exception:
|
||||
pass
|
||||
if host is None:
|
||||
host = url.replace('https://', '').replace('http://', '')
|
||||
host = host.split('/')[0].strip('/') if host else None
|
||||
provider_tokens[ProviderType.FORGEJO] = ProviderToken(
|
||||
token=forgejo_token, host=host
|
||||
)
|
||||
|
||||
# Wrap provider tokens in Secrets if any tokens were found
|
||||
secret_store = (
|
||||
Secrets(provider_tokens=provider_tokens) if provider_tokens else None # type: ignore[arg-type]
|
||||
|
||||
0
openhands/integrations/forgejo/__init__.py
Normal file
0
openhands/integrations/forgejo/__init__.py
Normal file
56
openhands/integrations/forgejo/forgejo_service.py
Normal file
56
openhands/integrations/forgejo/forgejo_service.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from pydantic import SecretStr
|
||||
|
||||
from openhands.integrations.forgejo.service import (
|
||||
ForgejoBranchesMixin,
|
||||
ForgejoFeaturesMixin,
|
||||
ForgejoMixinBase,
|
||||
ForgejoPRsMixin,
|
||||
ForgejoReposMixin,
|
||||
ForgejoResolverMixin,
|
||||
)
|
||||
from openhands.integrations.service_types import GitService
|
||||
from openhands.utils.import_utils import get_impl
|
||||
|
||||
|
||||
class ForgejoService(
|
||||
ForgejoBranchesMixin,
|
||||
ForgejoFeaturesMixin,
|
||||
ForgejoPRsMixin,
|
||||
ForgejoReposMixin,
|
||||
ForgejoResolverMixin,
|
||||
ForgejoMixinBase,
|
||||
GitService,
|
||||
):
|
||||
"""Assembled Forgejo service combining mixins by feature area."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
user_id: str | None = None,
|
||||
external_auth_id: str | None = None,
|
||||
external_auth_token: SecretStr | None = None,
|
||||
token: SecretStr | None = None,
|
||||
external_token_manager: bool = False,
|
||||
base_domain: str | None = None,
|
||||
base_url: str | None = None,
|
||||
) -> None:
|
||||
ForgejoMixinBase.__init__(
|
||||
self,
|
||||
user_id=user_id,
|
||||
external_auth_id=external_auth_id,
|
||||
external_auth_token=external_auth_token,
|
||||
token=token,
|
||||
external_token_manager=external_token_manager,
|
||||
base_domain=base_domain,
|
||||
base_url=base_url,
|
||||
)
|
||||
|
||||
|
||||
forgejo_service_cls = os.environ.get(
|
||||
'OPENHANDS_FORGEJO_SERVICE_CLS',
|
||||
'openhands.integrations.forgejo.forgejo_service.ForgejoService',
|
||||
)
|
||||
ForgejoServiceImpl = get_impl(ForgejoService, forgejo_service_cls)
|
||||
15
openhands/integrations/forgejo/service/__init__.py
Normal file
15
openhands/integrations/forgejo/service/__init__.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from .base import ForgejoMixinBase
|
||||
from .branches import ForgejoBranchesMixin
|
||||
from .features import ForgejoFeaturesMixin
|
||||
from .prs import ForgejoPRsMixin
|
||||
from .repos import ForgejoReposMixin
|
||||
from .resolver import ForgejoResolverMixin
|
||||
|
||||
__all__ = [
|
||||
'ForgejoMixinBase',
|
||||
'ForgejoBranchesMixin',
|
||||
'ForgejoFeaturesMixin',
|
||||
'ForgejoPRsMixin',
|
||||
'ForgejoReposMixin',
|
||||
'ForgejoResolverMixin',
|
||||
]
|
||||
219
openhands/integrations/forgejo/service/base.py
Normal file
219
openhands/integrations/forgejo/service/base.py
Normal file
@@ -0,0 +1,219 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
from typing import Any
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import httpx
|
||||
from pydantic import SecretStr
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.protocols.http_client import HTTPClient
|
||||
from openhands.integrations.service_types import (
|
||||
BaseGitService,
|
||||
OwnerType,
|
||||
ProviderType,
|
||||
Repository,
|
||||
RequestMethod,
|
||||
UnknownException,
|
||||
User,
|
||||
)
|
||||
from openhands.utils.http_session import httpx_verify_option
|
||||
|
||||
|
||||
class ForgejoMixinBase(BaseGitService, HTTPClient):
|
||||
"""Common functionality shared by Forgejo service mixins."""
|
||||
|
||||
DEFAULT_BASE_URL = 'https://codeberg.org/api/v1'
|
||||
DEFAULT_DOMAIN = 'codeberg.org'
|
||||
|
||||
token: SecretStr = SecretStr('')
|
||||
refresh = False
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
user_id: str | None = None,
|
||||
external_auth_id: str | None = None,
|
||||
external_auth_token: SecretStr | None = None,
|
||||
token: SecretStr | None = None,
|
||||
external_token_manager: bool = False,
|
||||
base_domain: str | None = None,
|
||||
base_url: str | None = None,
|
||||
) -> None:
|
||||
self.user_id = user_id
|
||||
self.external_auth_id = external_auth_id
|
||||
self.external_auth_token = external_auth_token
|
||||
self.external_token_manager = external_token_manager
|
||||
|
||||
if token:
|
||||
self.token = token
|
||||
else:
|
||||
env_token = os.environ.get('FORGEJO_TOKEN')
|
||||
if env_token:
|
||||
self.token = SecretStr(env_token)
|
||||
|
||||
env_base_url = os.environ.get('FORGEJO_BASE_URL')
|
||||
self.BASE_URL = self._resolve_base_url(base_url, base_domain, env_base_url)
|
||||
self.base_url = self.BASE_URL # Backwards compatibility for existing usage
|
||||
parsed = urlparse(self.BASE_URL)
|
||||
self.base_domain = parsed.netloc or self.DEFAULT_DOMAIN
|
||||
self.web_base_url = f'https://{self.base_domain}'.rstrip('/')
|
||||
|
||||
@property
|
||||
def provider(self) -> str:
|
||||
return ProviderType.FORGEJO.value
|
||||
|
||||
async def get_latest_token(self) -> SecretStr | None:
|
||||
return self.token
|
||||
|
||||
async def _get_headers(self) -> dict[str, Any]:
|
||||
if not self.token:
|
||||
latest_token = await self.get_latest_token()
|
||||
if latest_token:
|
||||
self.token = latest_token
|
||||
|
||||
return {
|
||||
'Authorization': f'token {self.token.get_secret_value() if self.token else ""}',
|
||||
'Accept': 'application/json',
|
||||
}
|
||||
|
||||
async def _make_request(
|
||||
self,
|
||||
url: str,
|
||||
params: dict | None = None,
|
||||
method: RequestMethod = RequestMethod.GET,
|
||||
) -> tuple[Any, dict]:
|
||||
try:
|
||||
async with httpx.AsyncClient(verify=httpx_verify_option()) as client:
|
||||
headers = await self._get_headers()
|
||||
response = await self.execute_request(
|
||||
client=client,
|
||||
url=url,
|
||||
headers=headers,
|
||||
params=params,
|
||||
method=method,
|
||||
)
|
||||
|
||||
if self.refresh and self._has_token_expired(response.status_code):
|
||||
await self.get_latest_token()
|
||||
headers = await self._get_headers()
|
||||
response = await self.execute_request(
|
||||
client=client,
|
||||
url=url,
|
||||
headers=headers,
|
||||
params=params,
|
||||
method=method,
|
||||
)
|
||||
|
||||
response.raise_for_status()
|
||||
headers_out: dict[str, str] = {}
|
||||
for header in ('Link', 'X-Total-Count', 'X-Total'):
|
||||
if header in response.headers:
|
||||
headers_out[header] = response.headers[header]
|
||||
|
||||
content_type = response.headers.get('Content-Type', '')
|
||||
if 'application/json' in content_type:
|
||||
return response.json(), headers_out
|
||||
return response.text, headers_out
|
||||
|
||||
except httpx.HTTPStatusError as err:
|
||||
raise self.handle_http_status_error(err)
|
||||
except httpx.HTTPError as err:
|
||||
raise self.handle_http_error(err)
|
||||
|
||||
def _resolve_base_url(
|
||||
self,
|
||||
explicit_base_url: str | None,
|
||||
base_domain: str | None,
|
||||
env_base_url: str | None,
|
||||
) -> str:
|
||||
for candidate in (
|
||||
explicit_base_url,
|
||||
base_domain,
|
||||
env_base_url,
|
||||
self.DEFAULT_BASE_URL,
|
||||
):
|
||||
if not candidate:
|
||||
continue
|
||||
|
||||
normalized = candidate.strip().rstrip('/')
|
||||
if not normalized:
|
||||
continue
|
||||
|
||||
if normalized.startswith(('http://', 'https://')):
|
||||
url = normalized
|
||||
else:
|
||||
url = f'https://{normalized}'
|
||||
|
||||
if '/api/' in url:
|
||||
return url
|
||||
|
||||
return f'{url}/api/v1'
|
||||
|
||||
return self.DEFAULT_BASE_URL
|
||||
|
||||
async def get_user(self) -> User: # type: ignore[override]
|
||||
url = f'{self.BASE_URL}/user'
|
||||
response, _ = await self._make_request(url)
|
||||
|
||||
return User(
|
||||
id=str(response.get('id', '')),
|
||||
login=response.get('username', ''),
|
||||
avatar_url=response.get('avatar_url', ''),
|
||||
name=response.get('full_name'),
|
||||
email=response.get('email'),
|
||||
company=response.get('organization'),
|
||||
)
|
||||
|
||||
def _parse_repository(
|
||||
self, repo: dict, link_header: str | None = None
|
||||
) -> Repository:
|
||||
owner = repo.get('owner') or {}
|
||||
owner_type = (
|
||||
OwnerType.ORGANIZATION
|
||||
if (owner.get('type') or '').lower() == 'organization'
|
||||
else OwnerType.USER
|
||||
)
|
||||
|
||||
return Repository(
|
||||
id=str(repo.get('id', '')),
|
||||
full_name=repo.get('full_name', ''),
|
||||
stargazers_count=repo.get('stars_count'),
|
||||
git_provider=ProviderType.FORGEJO,
|
||||
is_public=not repo.get('private', False),
|
||||
link_header=link_header,
|
||||
pushed_at=repo.get('updated_at') or repo.get('pushed_at'),
|
||||
owner_type=owner_type,
|
||||
main_branch=repo.get('default_branch'),
|
||||
)
|
||||
|
||||
def _split_repo(self, repository: str) -> tuple[str, str]:
|
||||
repo_path = repository.strip()
|
||||
if repo_path.startswith(('http://', 'https://')):
|
||||
parsed = urlparse(repo_path)
|
||||
repo_path = parsed.path.lstrip('/')
|
||||
|
||||
parts = [part for part in repo_path.split('/') if part]
|
||||
if len(parts) < 2:
|
||||
raise ValueError(f'Invalid repository format: {repository}')
|
||||
|
||||
return parts[0], parts[1]
|
||||
|
||||
def _build_repo_api_url(self, owner: str, repo: str, *segments: str) -> str:
|
||||
base = f'{self.BASE_URL}/repos/{owner}/{repo}'
|
||||
if segments:
|
||||
base = f'{base}/{"/".join(segments)}'
|
||||
return base
|
||||
|
||||
def _map_sort(self, sort: str) -> str:
|
||||
sort_map = {
|
||||
'pushed': 'updated',
|
||||
'updated': 'updated',
|
||||
'created': 'created',
|
||||
'full_name': 'name',
|
||||
}
|
||||
return sort_map.get(sort, 'updated')
|
||||
|
||||
def handle_http_error(self, e: httpx.HTTPError) -> UnknownException: # type: ignore[override]
|
||||
logger.warning(f'HTTP error on {self.provider} API: {type(e).__name__} : {e}')
|
||||
return UnknownException(f'HTTP error {type(e).__name__} : {e}')
|
||||
74
openhands/integrations/forgejo/service/branches.py
Normal file
74
openhands/integrations/forgejo/service/branches.py
Normal file
@@ -0,0 +1,74 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from openhands.integrations.forgejo.service.base import ForgejoMixinBase
|
||||
from openhands.integrations.service_types import Branch, PaginatedBranchesResponse
|
||||
|
||||
|
||||
class ForgejoBranchesMixin(ForgejoMixinBase):
|
||||
"""Branch-related operations for Forgejo."""
|
||||
|
||||
async def get_branches(self, repository: str) -> list[Branch]: # type: ignore[override]
|
||||
branches: list[Branch] = []
|
||||
page = 1
|
||||
per_page = 100
|
||||
|
||||
while True:
|
||||
paginated = await self.get_paginated_branches(repository, page, per_page)
|
||||
branches.extend(paginated.branches)
|
||||
if not paginated.has_next_page:
|
||||
break
|
||||
page += 1
|
||||
|
||||
return branches
|
||||
|
||||
async def get_paginated_branches(
|
||||
self, repository: str, page: int = 1, per_page: int = 30
|
||||
) -> PaginatedBranchesResponse: # type: ignore[override]
|
||||
owner, repo = self._split_repo(repository)
|
||||
url = self._build_repo_api_url(owner, repo, 'branches')
|
||||
params = {
|
||||
'page': str(page),
|
||||
'limit': str(per_page),
|
||||
}
|
||||
|
||||
response, headers = await self._make_request(url, params)
|
||||
branch_items = response if isinstance(response, list) else []
|
||||
|
||||
branches: list[Branch] = []
|
||||
for branch in branch_items:
|
||||
commit_info = branch.get('commit') or {}
|
||||
commit_sha = (
|
||||
commit_info.get('id')
|
||||
or commit_info.get('sha')
|
||||
or commit_info.get('commit', {}).get('sha')
|
||||
)
|
||||
branches.append(
|
||||
Branch(
|
||||
name=branch.get('name', ''),
|
||||
commit_sha=commit_sha or '',
|
||||
protected=branch.get('protected', False),
|
||||
last_push_date=None,
|
||||
)
|
||||
)
|
||||
|
||||
link_header = headers.get('Link', '')
|
||||
total_count_header = headers.get('X-Total-Count') or headers.get('X-Total')
|
||||
total_count = int(total_count_header) if total_count_header else None
|
||||
has_next_page = 'rel="next"' in link_header
|
||||
|
||||
return PaginatedBranchesResponse(
|
||||
branches=branches,
|
||||
has_next_page=has_next_page,
|
||||
current_page=page,
|
||||
per_page=per_page,
|
||||
total_count=total_count,
|
||||
)
|
||||
|
||||
async def search_branches(
|
||||
self, repository: str, query: str, per_page: int = 30
|
||||
) -> list[Branch]: # type: ignore[override]
|
||||
all_branches = await self.get_branches(repository)
|
||||
lowered = query.lower()
|
||||
return [branch for branch in all_branches if lowered in branch.name.lower()][
|
||||
:per_page
|
||||
]
|
||||
123
openhands/integrations/forgejo/service/features.py
Normal file
123
openhands/integrations/forgejo/service/features.py
Normal file
@@ -0,0 +1,123 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import base64
|
||||
from typing import Any
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.forgejo.service.base import ForgejoMixinBase
|
||||
from openhands.integrations.service_types import (
|
||||
MicroagentContentResponse,
|
||||
MicroagentResponse,
|
||||
ProviderType,
|
||||
ResourceNotFoundError,
|
||||
SuggestedTask,
|
||||
)
|
||||
|
||||
|
||||
class ForgejoFeaturesMixin(ForgejoMixinBase):
|
||||
"""Microagent and feature helpers for Forgejo."""
|
||||
|
||||
async def _get_cursorrules_url(self, repository: str) -> str:
|
||||
owner, repo = self._split_repo(repository)
|
||||
return self._build_repo_api_url(owner, repo, 'contents', '.cursorrules')
|
||||
|
||||
async def _get_microagents_directory_url(
|
||||
self, repository: str, microagents_path: str
|
||||
) -> str:
|
||||
owner, repo = self._split_repo(repository)
|
||||
normalized_path = microagents_path.strip('/')
|
||||
return self._build_repo_api_url(owner, repo, 'contents', normalized_path)
|
||||
|
||||
def _get_microagents_directory_params(self, microagents_path: str) -> dict | None:
|
||||
return None
|
||||
|
||||
def _is_valid_microagent_file(self, item: dict[str, Any] | None) -> bool:
|
||||
if not isinstance(item, dict):
|
||||
return False
|
||||
if item.get('type') != 'file':
|
||||
return False
|
||||
name = item.get('name', '')
|
||||
return isinstance(name, str) and (
|
||||
name.endswith('.md') or name.endswith('.cursorrules')
|
||||
)
|
||||
|
||||
def _get_file_name_from_item(self, item: dict[str, Any] | None) -> str:
|
||||
if not isinstance(item, dict):
|
||||
return ''
|
||||
name = item.get('name')
|
||||
return name if isinstance(name, str) else ''
|
||||
|
||||
def _get_file_path_from_item(
|
||||
self, item: dict[str, Any] | None, microagents_path: str
|
||||
) -> str:
|
||||
file_name = self._get_file_name_from_item(item)
|
||||
if not microagents_path:
|
||||
return file_name
|
||||
return f'{microagents_path.strip("/")}/{file_name}'
|
||||
|
||||
async def get_microagents(self, repository: str) -> list[MicroagentResponse]: # type: ignore[override]
|
||||
microagents_path = self._determine_microagents_path(repository)
|
||||
microagents: list[MicroagentResponse] = []
|
||||
|
||||
try:
|
||||
directory_url = await self._get_microagents_directory_url(
|
||||
repository, microagents_path
|
||||
)
|
||||
items, _ = await self._make_request(directory_url)
|
||||
except ResourceNotFoundError:
|
||||
items = []
|
||||
except Exception as exc:
|
||||
# Fail gracefully if the directory cannot be inspected
|
||||
self._log_microagent_warning(repository, str(exc))
|
||||
items = []
|
||||
|
||||
if isinstance(items, list):
|
||||
for item in items:
|
||||
if self._is_valid_microagent_file(item):
|
||||
file_name = self._get_file_name_from_item(item)
|
||||
file_path = self._get_file_path_from_item(item, microagents_path)
|
||||
microagents.append(
|
||||
self._create_microagent_response(file_name, file_path)
|
||||
)
|
||||
|
||||
cursorrules = await self._check_cursorrules_file(repository)
|
||||
if cursorrules:
|
||||
microagents.append(cursorrules)
|
||||
|
||||
return microagents
|
||||
|
||||
async def get_microagent_content(
|
||||
self, repository: str, file_path: str
|
||||
) -> MicroagentContentResponse: # type: ignore[override]
|
||||
owner, repo = self._split_repo(repository)
|
||||
normalized_path = file_path.lstrip('/')
|
||||
url = self._build_repo_api_url(owner, repo, 'contents', normalized_path)
|
||||
|
||||
response, _ = await self._make_request(url)
|
||||
content = response.get('content') or ''
|
||||
encoding = (response.get('encoding') or 'base64').lower()
|
||||
|
||||
if encoding == 'base64':
|
||||
try:
|
||||
decoded = base64.b64decode(content).decode('utf-8')
|
||||
except Exception:
|
||||
decoded = ''
|
||||
else:
|
||||
decoded = content
|
||||
|
||||
try:
|
||||
return self._parse_microagent_content(decoded, file_path)
|
||||
except Exception:
|
||||
return MicroagentContentResponse(
|
||||
content=decoded,
|
||||
path=file_path,
|
||||
triggers=[],
|
||||
git_provider=ProviderType.FORGEJO.value,
|
||||
)
|
||||
|
||||
async def get_suggested_tasks(self) -> list[SuggestedTask]: # type: ignore[override]
|
||||
# Suggested tasks are not yet implemented for Forgejo.
|
||||
return []
|
||||
|
||||
def _log_microagent_warning(self, repository: str, message: str) -> None:
|
||||
logger.debug(f'Forgejo microagent scan warning for {repository}: {message}')
|
||||
84
openhands/integrations/forgejo/service/prs.py
Normal file
84
openhands/integrations/forgejo/service/prs.py
Normal file
@@ -0,0 +1,84 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.integrations.forgejo.service.base import ForgejoMixinBase
|
||||
from openhands.integrations.service_types import RequestMethod, UnknownException
|
||||
|
||||
|
||||
class ForgejoPRsMixin(ForgejoMixinBase):
|
||||
"""Pull request helpers for Forgejo."""
|
||||
|
||||
async def create_pull_request(self, data: dict[str, Any] | None = None) -> dict:
|
||||
payload: dict[str, Any] = dict(data or {})
|
||||
|
||||
repository = payload.pop('repository', None)
|
||||
owner = payload.pop('owner', None)
|
||||
repo_name = payload.pop('repo', None)
|
||||
|
||||
if repository and isinstance(repository, str):
|
||||
owner, repo_name = self._split_repo(repository)
|
||||
else:
|
||||
owner = str(owner or self.user_id or '').strip()
|
||||
repo_name = str(repo_name or '').strip()
|
||||
|
||||
if not owner or not repo_name:
|
||||
raise ValueError(
|
||||
'Repository information is required to create a pull request'
|
||||
)
|
||||
|
||||
url = self._build_repo_api_url(owner, repo_name, 'pulls')
|
||||
response, _ = await self._make_request(
|
||||
url,
|
||||
payload,
|
||||
method=RequestMethod.POST,
|
||||
)
|
||||
|
||||
if not isinstance(response, dict):
|
||||
raise UnknownException('Unexpected response creating Forgejo pull request')
|
||||
|
||||
if 'number' not in response and 'index' in response:
|
||||
response['number'] = response['index']
|
||||
|
||||
if 'html_url' not in response and 'url' in response:
|
||||
response['html_url'] = response['url']
|
||||
|
||||
return response
|
||||
|
||||
async def request_reviewers(
|
||||
self, repository: str, pr_number: int, reviewers: list[str]
|
||||
) -> None:
|
||||
if not reviewers:
|
||||
return
|
||||
|
||||
owner, repo = self._split_repo(repository)
|
||||
url = self._build_repo_api_url(
|
||||
owner, repo, 'pulls', str(pr_number), 'requested_reviewers'
|
||||
)
|
||||
|
||||
try:
|
||||
await self._make_request(
|
||||
url,
|
||||
{'reviewers': reviewers},
|
||||
method=RequestMethod.POST,
|
||||
)
|
||||
except Exception as exc: # pragma: no cover - log and continue
|
||||
logger.warning(
|
||||
'Failed to request Forgejo reviewers %s for %s/%s PR #%s: %s',
|
||||
reviewers,
|
||||
owner,
|
||||
repo,
|
||||
pr_number,
|
||||
exc,
|
||||
)
|
||||
|
||||
async def get_pr_details(self, repository: str, pr_number: int) -> dict: # type: ignore[override]
|
||||
owner, repo = self._split_repo(repository)
|
||||
url = self._build_repo_api_url(owner, repo, 'pulls', str(pr_number))
|
||||
response, _ = await self._make_request(url)
|
||||
return response
|
||||
|
||||
async def is_pr_open(self, repository: str, pr_number: int) -> bool: # type: ignore[override]
|
||||
pr_details = await self.get_pr_details(repository, pr_number)
|
||||
return (pr_details.get('state') or '').lower() == 'open'
|
||||
109
openhands/integrations/forgejo/service/repos.py
Normal file
109
openhands/integrations/forgejo/service/repos.py
Normal file
@@ -0,0 +1,109 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from openhands.integrations.forgejo.service.base import ForgejoMixinBase
|
||||
from openhands.integrations.service_types import Repository
|
||||
from openhands.server.types import AppMode
|
||||
|
||||
|
||||
class ForgejoReposMixin(ForgejoMixinBase):
|
||||
"""Repository operations for Forgejo."""
|
||||
|
||||
async def search_repositories(
|
||||
self,
|
||||
query: str,
|
||||
per_page: int,
|
||||
sort: str,
|
||||
order: str,
|
||||
public: bool,
|
||||
app_mode: AppMode,
|
||||
) -> list[Repository]: # type: ignore[override]
|
||||
url = f'{self.BASE_URL}/repos/search'
|
||||
params = {
|
||||
'q': query,
|
||||
'limit': per_page,
|
||||
'sort': sort,
|
||||
'order': order,
|
||||
'mode': 'source',
|
||||
}
|
||||
|
||||
response, _ = await self._make_request(url, params)
|
||||
repos = response.get('data', []) if isinstance(response, dict) else []
|
||||
if public:
|
||||
repos = [repo for repo in repos if not repo.get('private', False)]
|
||||
return [self._parse_repository(repo) for repo in repos]
|
||||
|
||||
async def get_all_repositories(
|
||||
self, sort: str, app_mode: AppMode
|
||||
) -> list[Repository]: # type: ignore[override]
|
||||
max_repos = 1000
|
||||
per_page = 100
|
||||
collected: list[dict] = []
|
||||
page = 1
|
||||
last_link_header: str | None = None
|
||||
|
||||
url = f'{self.BASE_URL}/user/repos'
|
||||
forgejo_sort = self._map_sort(sort)
|
||||
|
||||
while len(collected) < max_repos:
|
||||
params = {
|
||||
'page': str(page),
|
||||
'limit': str(per_page),
|
||||
'sort': forgejo_sort,
|
||||
}
|
||||
response, headers = await self._make_request(url, params)
|
||||
last_link_header = headers.get('Link')
|
||||
|
||||
page_repos = response if isinstance(response, list) else []
|
||||
if not page_repos:
|
||||
break
|
||||
|
||||
collected.extend(page_repos)
|
||||
if 'rel="next"' not in (last_link_header or ''):
|
||||
break
|
||||
|
||||
page += 1
|
||||
|
||||
collected = collected[:max_repos]
|
||||
return [
|
||||
self._parse_repository(repo, link_header=last_link_header)
|
||||
for repo in collected
|
||||
]
|
||||
|
||||
async def get_paginated_repos(
|
||||
self,
|
||||
page: int,
|
||||
per_page: int,
|
||||
sort: str,
|
||||
installation_id: str | None,
|
||||
query: str | None = None,
|
||||
) -> list[Repository]: # type: ignore[override]
|
||||
_ = installation_id
|
||||
url = f'{self.BASE_URL}/user/repos'
|
||||
params = {
|
||||
'page': str(page),
|
||||
'limit': str(per_page),
|
||||
'sort': self._map_sort(sort),
|
||||
}
|
||||
|
||||
response, headers = await self._make_request(url, params)
|
||||
repos = response if isinstance(response, list) else []
|
||||
|
||||
if query:
|
||||
lowered = query.lower()
|
||||
repos = [
|
||||
repo
|
||||
for repo in repos
|
||||
if lowered in (repo.get('full_name') or '').lower()
|
||||
]
|
||||
|
||||
link_header = headers.get('Link')
|
||||
return [self._parse_repository(repo, link_header=link_header) for repo in repos]
|
||||
|
||||
async def get_repository_details_from_repo_name(
|
||||
self, repository: str
|
||||
) -> Repository: # type: ignore[override]
|
||||
owner, repo = self._split_repo(repository)
|
||||
url = self._build_repo_api_url(owner, repo)
|
||||
response, headers = await self._make_request(url)
|
||||
link_header = headers.get('Link')
|
||||
return self._parse_repository(response, link_header=link_header)
|
||||
137
openhands/integrations/forgejo/service/resolver.py
Normal file
137
openhands/integrations/forgejo/service/resolver.py
Normal file
@@ -0,0 +1,137 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections import defaultdict
|
||||
from datetime import datetime
|
||||
from typing import cast
|
||||
|
||||
from openhands.integrations.forgejo.service.base import ForgejoMixinBase
|
||||
from openhands.integrations.service_types import Comment
|
||||
from openhands.resolver.interfaces.issue import ReviewThread
|
||||
|
||||
|
||||
class ForgejoResolverMixin(ForgejoMixinBase):
|
||||
"""Lightweight helpers used by resolver flows for Forgejo."""
|
||||
|
||||
async def get_issue_title_and_body(
|
||||
self, repository: str, issue_number: int
|
||||
) -> tuple[str, str]:
|
||||
owner, repo = self._split_repo(repository)
|
||||
url = self._build_repo_api_url(owner, repo, 'issues', str(issue_number))
|
||||
response, _ = await self._make_request(url)
|
||||
title = response.get('title') or ''
|
||||
body = response.get('body') or response.get('content') or ''
|
||||
return title, body
|
||||
|
||||
async def get_issue_comments(
|
||||
self,
|
||||
repository: str,
|
||||
issue_number: int,
|
||||
max_comments: int = 20,
|
||||
) -> list[Comment]:
|
||||
owner, repo = self._split_repo(repository)
|
||||
url = self._build_repo_api_url(
|
||||
owner, repo, 'issues', str(issue_number), 'comments'
|
||||
)
|
||||
per_page = min(max_comments, 50)
|
||||
params = {
|
||||
'page': '1',
|
||||
'limit': str(per_page),
|
||||
'order': 'desc',
|
||||
}
|
||||
|
||||
response, _ = await self._make_request(url, params)
|
||||
raw_comments = response if isinstance(response, list) else []
|
||||
|
||||
comments: list[Comment] = []
|
||||
for payload in raw_comments:
|
||||
comment = self._to_comment(payload)
|
||||
if comment is not None:
|
||||
comments.append(comment)
|
||||
|
||||
comments.sort(key=lambda c: c.created_at)
|
||||
return comments[-max_comments:]
|
||||
|
||||
async def get_pr_comments(
|
||||
self,
|
||||
repository: str,
|
||||
pr_number: int,
|
||||
max_comments: int = 50,
|
||||
) -> list[Comment]:
|
||||
owner, repo = self._split_repo(repository)
|
||||
url = self._build_repo_api_url(owner, repo, 'pulls', str(pr_number), 'comments')
|
||||
per_page = min(max_comments, 50)
|
||||
params = {
|
||||
'page': '1',
|
||||
'limit': str(per_page),
|
||||
'order': 'desc',
|
||||
}
|
||||
|
||||
response, _ = await self._make_request(url, params)
|
||||
raw_comments = response if isinstance(response, list) else []
|
||||
|
||||
comments: list[Comment] = []
|
||||
for payload in raw_comments:
|
||||
comment = self._to_comment(payload)
|
||||
if comment is not None:
|
||||
comments.append(comment)
|
||||
|
||||
comments.sort(key=lambda c: c.created_at)
|
||||
return comments[-max_comments:]
|
||||
|
||||
async def get_pr_review_threads(
|
||||
self,
|
||||
repository: str,
|
||||
pr_number: int,
|
||||
max_threads: int = 10,
|
||||
) -> list[ReviewThread]:
|
||||
owner, repo = self._split_repo(repository)
|
||||
url = self._build_repo_api_url(owner, repo, 'pulls', str(pr_number), 'comments')
|
||||
params = {'page': '1', 'limit': '100', 'order': 'asc'}
|
||||
|
||||
response, _ = await self._make_request(url, params)
|
||||
raw_comments = response if isinstance(response, list) else []
|
||||
|
||||
grouped: dict[str, list[str]] = defaultdict(list)
|
||||
files: dict[str, set[str]] = defaultdict(set)
|
||||
|
||||
for payload in raw_comments:
|
||||
if not isinstance(payload, dict):
|
||||
continue
|
||||
path = cast(str, payload.get('path') or 'general')
|
||||
body = cast(str, payload.get('body') or '')
|
||||
grouped[path].append(body)
|
||||
if payload.get('path'):
|
||||
files[path].add(cast(str, payload['path']))
|
||||
|
||||
threads: list[ReviewThread] = []
|
||||
for path, messages in grouped.items():
|
||||
comment_text = '\n---\n'.join(messages)
|
||||
file_list = sorted(files.get(path, {path}))
|
||||
threads.append(ReviewThread(comment=comment_text, files=file_list))
|
||||
|
||||
return threads[:max_threads]
|
||||
|
||||
def _to_comment(self, payload: dict | None) -> Comment | None:
|
||||
if not isinstance(payload, dict):
|
||||
return None
|
||||
body = payload.get('body') or ''
|
||||
author = (payload.get('user') or {}).get('login') or 'unknown'
|
||||
created_at = self._parse_datetime(payload.get('created_at'))
|
||||
updated_at = self._parse_datetime(payload.get('updated_at'))
|
||||
|
||||
return Comment(
|
||||
id=str(payload.get('id', 'unknown')),
|
||||
body=body,
|
||||
author=author,
|
||||
created_at=created_at,
|
||||
updated_at=updated_at,
|
||||
system=payload.get('void', False),
|
||||
)
|
||||
|
||||
def _parse_datetime(self, value: str | None) -> datetime:
|
||||
if not value:
|
||||
return datetime.fromtimestamp(0)
|
||||
try:
|
||||
return datetime.fromisoformat(value.replace('Z', '+00:00'))
|
||||
except ValueError:
|
||||
return datetime.fromtimestamp(0)
|
||||
@@ -22,6 +22,7 @@ from openhands.integrations.azure_devops.azure_devops_service import (
|
||||
AzureDevOpsServiceImpl,
|
||||
)
|
||||
from openhands.integrations.bitbucket.bitbucket_service import BitBucketServiceImpl
|
||||
from openhands.integrations.forgejo.forgejo_service import ForgejoServiceImpl
|
||||
from openhands.integrations.github.github_service import GithubServiceImpl
|
||||
from openhands.integrations.gitlab.gitlab_service import GitLabServiceImpl
|
||||
from openhands.integrations.service_types import (
|
||||
@@ -105,6 +106,7 @@ class ProviderHandler:
|
||||
ProviderType.GITHUB: 'github.com',
|
||||
ProviderType.GITLAB: 'gitlab.com',
|
||||
ProviderType.BITBUCKET: 'bitbucket.org',
|
||||
ProviderType.FORGEJO: 'codeberg.org',
|
||||
ProviderType.AZURE_DEVOPS: 'dev.azure.com',
|
||||
}
|
||||
|
||||
@@ -126,6 +128,7 @@ class ProviderHandler:
|
||||
ProviderType.GITHUB: GithubServiceImpl,
|
||||
ProviderType.GITLAB: GitLabServiceImpl,
|
||||
ProviderType.BITBUCKET: BitBucketServiceImpl,
|
||||
ProviderType.FORGEJO: ForgejoServiceImpl,
|
||||
ProviderType.AZURE_DEVOPS: AzureDevOpsServiceImpl,
|
||||
}
|
||||
|
||||
@@ -672,6 +675,14 @@ class ProviderHandler:
|
||||
if provider != ProviderType.AZURE_DEVOPS:
|
||||
domain = self.provider_tokens[provider].host or domain
|
||||
|
||||
# Normalize domain to prevent double protocols or path segments
|
||||
if domain:
|
||||
domain = domain.strip()
|
||||
domain = domain.replace('https://', '').replace('http://', '')
|
||||
# Remove any trailing path like /api/v3 or /api/v4
|
||||
if '/' in domain:
|
||||
domain = domain.split('/')[0]
|
||||
|
||||
# Try to use token if available, otherwise use public URL
|
||||
if self.provider_tokens and provider in self.provider_tokens:
|
||||
git_token = self.provider_tokens[provider].token
|
||||
@@ -747,7 +758,7 @@ class ProviderHandler:
|
||||
f'https://user:***@{clean_domain}/{repo_name}.git'
|
||||
)
|
||||
else:
|
||||
# GitHub
|
||||
# GitHub, Forgejo
|
||||
remote_url = f'https://{token_value}@{domain}/{repo_name}.git'
|
||||
else:
|
||||
remote_url = f'https://{domain}/{repo_name}.git'
|
||||
|
||||
@@ -21,6 +21,7 @@ class ProviderType(Enum):
|
||||
GITHUB = 'github'
|
||||
GITLAB = 'gitlab'
|
||||
BITBUCKET = 'bitbucket'
|
||||
FORGEJO = 'forgejo'
|
||||
AZURE_DEVOPS = 'azure_devops'
|
||||
ENTERPRISE_SSO = 'enterprise_sso'
|
||||
|
||||
|
||||
@@ -5,6 +5,7 @@ from openhands.integrations.azure_devops.azure_devops_service import (
|
||||
AzureDevOpsServiceImpl as AzureDevOpsService,
|
||||
)
|
||||
from openhands.integrations.bitbucket.bitbucket_service import BitBucketService
|
||||
from openhands.integrations.forgejo.forgejo_service import ForgejoService
|
||||
from openhands.integrations.github.github_service import GitHubService
|
||||
from openhands.integrations.gitlab.gitlab_service import GitLabService
|
||||
from openhands.integrations.provider import ProviderType
|
||||
@@ -48,6 +49,17 @@ async def validate_provider_token(
|
||||
except Exception as e:
|
||||
gitlab_error = e
|
||||
|
||||
# Try Forgejo if a base_domain was provided (custom instances may not contain
|
||||
# the substring 'forgejo' or 'codeberg')
|
||||
forgejo_error = None
|
||||
if base_domain:
|
||||
try:
|
||||
forgejo_service = ForgejoService(token=token, base_domain=base_domain)
|
||||
await forgejo_service.get_user()
|
||||
return ProviderType.FORGEJO
|
||||
except Exception as e:
|
||||
forgejo_error = e
|
||||
|
||||
# Try Bitbucket next
|
||||
bitbucket_error = None
|
||||
try:
|
||||
@@ -67,7 +79,7 @@ async def validate_provider_token(
|
||||
azure_devops_error = e
|
||||
|
||||
logger.debug(
|
||||
f'Failed to validate token: {github_error} \n {gitlab_error} \n {bitbucket_error} \n {azure_devops_error}'
|
||||
f'Failed to validate token: {github_error} \n {gitlab_error} \n {forgejo_error} \n {bitbucket_error} \n {azure_devops_error}'
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
480
openhands/resolver/interfaces/forgejo.py
Normal file
480
openhands/resolver/interfaces/forgejo.py
Normal file
@@ -0,0 +1,480 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
from urllib.parse import quote
|
||||
|
||||
import httpx
|
||||
|
||||
from openhands.core.logger import openhands_logger as logger
|
||||
from openhands.resolver.interfaces.issue import (
|
||||
Issue,
|
||||
IssueHandlerInterface,
|
||||
ReviewThread,
|
||||
)
|
||||
from openhands.resolver.utils import extract_issue_references
|
||||
|
||||
|
||||
class ForgejoIssueHandler(IssueHandlerInterface):
|
||||
"""Issue handler implementation for Forgejo-based providers (e.g. Codeberg)."""
|
||||
|
||||
API_PREFIX = '/api/v1'
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
owner: str,
|
||||
repo: str,
|
||||
token: str,
|
||||
username: str | None = None,
|
||||
base_domain: str = 'codeberg.org',
|
||||
):
|
||||
self.owner = owner
|
||||
self.repo = repo
|
||||
self.token = token
|
||||
self.username = username
|
||||
self.base_domain = base_domain
|
||||
self.base_url = self.get_base_url()
|
||||
self.download_url = self.get_download_url()
|
||||
self.clone_url = self.get_clone_url()
|
||||
self.headers = self.get_headers()
|
||||
|
||||
def _api_root(self) -> str:
|
||||
return f'https://{self.base_domain}{self.API_PREFIX}'
|
||||
|
||||
@staticmethod
|
||||
def _to_int(value: Any) -> int:
|
||||
try:
|
||||
return int(value)
|
||||
except (TypeError, ValueError):
|
||||
return 0
|
||||
|
||||
def set_owner(self, owner: str) -> None:
|
||||
self.owner = owner
|
||||
self.base_url = self.get_base_url()
|
||||
self.download_url = self.get_download_url()
|
||||
|
||||
def get_headers(self) -> dict[str, str]:
|
||||
return {
|
||||
'Authorization': f'token {self.token}',
|
||||
'Accept': 'application/json',
|
||||
}
|
||||
|
||||
def get_base_url(self) -> str:
|
||||
return f'{self._api_root()}/repos/{self.owner}/{self.repo}'
|
||||
|
||||
def get_authorize_url(self) -> str:
|
||||
credential = (
|
||||
f'{self.username}:{self.token}'
|
||||
if self.username
|
||||
else f'x-auth-token:{self.token}'
|
||||
)
|
||||
return f'https://{credential}@{self.base_domain}/'
|
||||
|
||||
def get_branch_url(self, branch_name: str) -> str:
|
||||
escaped_branch = quote(branch_name, safe='')
|
||||
return f'{self.get_base_url()}/branches/{escaped_branch}'
|
||||
|
||||
def get_download_url(self) -> str:
|
||||
return f'{self.get_base_url()}/issues'
|
||||
|
||||
def get_clone_url(self) -> str:
|
||||
credential = (
|
||||
f'{self.username}:{self.token}'
|
||||
if self.username
|
||||
else f'x-access-token:{self.token}'
|
||||
)
|
||||
return f'https://{credential}@{self.base_domain}/{self.owner}/{self.repo}.git'
|
||||
|
||||
def get_graphql_url(self) -> str:
|
||||
# Forgejo does not expose a GraphQL endpoint.
|
||||
return ''
|
||||
|
||||
def get_compare_url(self, branch_name: str) -> str:
|
||||
return (
|
||||
f'https://{self.base_domain}/{self.owner}/{self.repo}/compare/{branch_name}'
|
||||
)
|
||||
|
||||
def download_issues(self) -> list[Any]:
|
||||
page = 1
|
||||
all_issues: list[Any] = []
|
||||
|
||||
while True:
|
||||
params = {'state': 'open', 'limit': '50', 'page': str(page)}
|
||||
response = httpx.get(self.download_url, headers=self.headers, params=params)
|
||||
response.raise_for_status()
|
||||
issues = response.json()
|
||||
|
||||
if not issues:
|
||||
break
|
||||
|
||||
if not isinstance(issues, list) or any(
|
||||
not isinstance(issue, dict) for issue in issues
|
||||
):
|
||||
raise ValueError(
|
||||
'Expected list of dictionaries from Forgejo issues API.'
|
||||
)
|
||||
|
||||
all_issues.extend(issues)
|
||||
page += 1
|
||||
|
||||
return all_issues
|
||||
|
||||
def get_issue_comments(
|
||||
self, issue_number: int, comment_id: int | None = None
|
||||
) -> list[str] | None:
|
||||
url = f'{self.get_download_url()}/{issue_number}/comments'
|
||||
page = 1
|
||||
params = {'limit': '50', 'page': str(page)}
|
||||
all_comments: list[str] = []
|
||||
|
||||
while True:
|
||||
response = httpx.get(url, headers=self.headers, params=params)
|
||||
response.raise_for_status()
|
||||
comments = response.json()
|
||||
|
||||
if not comments:
|
||||
break
|
||||
|
||||
if comment_id is not None:
|
||||
matching_comment = next(
|
||||
(
|
||||
comment['body']
|
||||
for comment in comments
|
||||
if self._to_int(comment.get('id')) == comment_id
|
||||
),
|
||||
None,
|
||||
)
|
||||
if matching_comment:
|
||||
return [matching_comment]
|
||||
else:
|
||||
all_comments.extend(
|
||||
comment['body'] for comment in comments if comment.get('body')
|
||||
)
|
||||
|
||||
page += 1
|
||||
params = {'limit': '50', 'page': str(page)}
|
||||
|
||||
return all_comments if all_comments else None
|
||||
|
||||
def get_pull_url(self, pr_number: int) -> str:
|
||||
return f'https://{self.base_domain}/{self.owner}/{self.repo}/pulls/{pr_number}'
|
||||
|
||||
def get_branch_name(self, base_branch_name: str) -> str:
|
||||
branch_name = base_branch_name
|
||||
attempt = 1
|
||||
while self.branch_exists(branch_name):
|
||||
attempt += 1
|
||||
branch_name = f'{base_branch_name}-try{attempt}'
|
||||
return branch_name
|
||||
|
||||
def get_default_branch_name(self) -> str:
|
||||
response = httpx.get(self.get_base_url(), headers=self.headers)
|
||||
response.raise_for_status()
|
||||
data = response.json()
|
||||
return str(data.get('default_branch'))
|
||||
|
||||
def branch_exists(self, branch_name: str) -> bool:
|
||||
response = httpx.get(self.get_branch_url(branch_name), headers=self.headers)
|
||||
exists = response.status_code == 200
|
||||
logger.info(f'Branch {branch_name} exists: {exists}')
|
||||
return exists
|
||||
|
||||
def reply_to_comment(self, pr_number: int, comment_id: str, reply: str) -> None:
|
||||
# Forgejo does not support threaded replies via API; add a regular comment referencing the original ID.
|
||||
message = f'OpenHands reply to comment {comment_id}\n\n{reply}'
|
||||
self.send_comment_msg(pr_number, message)
|
||||
|
||||
def create_pull_request(self, data: dict[str, Any] | None = None) -> dict[str, Any]:
|
||||
payload = data or {}
|
||||
response = httpx.post(
|
||||
f'{self.get_base_url()}/pulls', headers=self.headers, json=payload
|
||||
)
|
||||
if response.status_code == 403:
|
||||
raise RuntimeError(
|
||||
'Failed to create pull request due to missing permissions. '
|
||||
'Ensure the token has write access to the repository.'
|
||||
)
|
||||
response.raise_for_status()
|
||||
pr_data = response.json()
|
||||
pr_data.setdefault('number', pr_data.get('index'))
|
||||
if 'html_url' not in pr_data and 'url' in pr_data:
|
||||
pr_data['html_url'] = pr_data['url']
|
||||
return dict(pr_data)
|
||||
|
||||
def request_reviewers(self, reviewer: str, pr_number: int) -> None:
|
||||
url = f'{self.get_base_url()}/pulls/{pr_number}/requested_reviewers'
|
||||
response = httpx.post(
|
||||
url,
|
||||
headers=self.headers,
|
||||
json={'reviewers': [reviewer]},
|
||||
)
|
||||
if response.status_code not in (200, 201, 204):
|
||||
logger.warning(
|
||||
f'Failed to request review from {reviewer}: {response.status_code} {response.text}'
|
||||
)
|
||||
|
||||
def send_comment_msg(self, issue_number: int, msg: str) -> None:
|
||||
comment_url = f'{self.get_download_url()}/{issue_number}/comments'
|
||||
response = httpx.post(
|
||||
comment_url,
|
||||
headers=self.headers,
|
||||
json={'body': msg},
|
||||
)
|
||||
if response.status_code not in (200, 201):
|
||||
logger.error(
|
||||
f'Failed to post comment: {response.status_code} {response.text}'
|
||||
)
|
||||
|
||||
def get_context_from_external_issues_references(
|
||||
self,
|
||||
closing_issues: list[str],
|
||||
closing_issue_numbers: list[int],
|
||||
issue_body: str,
|
||||
review_comments: list[str] | None,
|
||||
review_threads: list[ReviewThread],
|
||||
thread_comments: list[str] | None,
|
||||
) -> list[str]:
|
||||
new_references: list[int] = []
|
||||
|
||||
if issue_body:
|
||||
new_references.extend(extract_issue_references(issue_body))
|
||||
|
||||
if review_comments:
|
||||
for comment in review_comments:
|
||||
new_references.extend(extract_issue_references(comment))
|
||||
|
||||
if review_threads:
|
||||
for thread in review_threads:
|
||||
new_references.extend(extract_issue_references(thread.comment))
|
||||
|
||||
if thread_comments:
|
||||
for thread_comment in thread_comments:
|
||||
new_references.extend(extract_issue_references(thread_comment))
|
||||
|
||||
unique_ids = set(new_references).difference(closing_issue_numbers)
|
||||
|
||||
for issue_number in unique_ids:
|
||||
try:
|
||||
response = httpx.get(
|
||||
f'{self.get_download_url()}/{issue_number}',
|
||||
headers=self.headers,
|
||||
)
|
||||
response.raise_for_status()
|
||||
issue_data = response.json()
|
||||
body = issue_data.get('body', '')
|
||||
if body:
|
||||
closing_issues.append(body)
|
||||
except httpx.HTTPError as exc:
|
||||
logger.warning(f'Failed to fetch issue {issue_number}: {exc}')
|
||||
|
||||
return closing_issues
|
||||
|
||||
def get_pull_url_for_issue(self, issue_number: int) -> str:
|
||||
return (
|
||||
f'https://{self.base_domain}/{self.owner}/{self.repo}/issues/{issue_number}'
|
||||
)
|
||||
|
||||
def get_converted_issues(
|
||||
self, issue_numbers: list[int] | None = None, comment_id: int | None = None
|
||||
) -> list[Issue]:
|
||||
if not issue_numbers:
|
||||
raise ValueError('Unspecified issue numbers')
|
||||
|
||||
all_issues = self.download_issues()
|
||||
logger.info(f'Limiting resolving to issues {issue_numbers}.')
|
||||
filtered = [
|
||||
issue
|
||||
for issue in all_issues
|
||||
if self._to_int(issue.get('number') or issue.get('index')) in issue_numbers
|
||||
]
|
||||
|
||||
converted: list[Issue] = []
|
||||
for issue in filtered:
|
||||
if any(issue.get(key) is None for key in ['number', 'title']):
|
||||
logger.warning(
|
||||
f'Skipping issue {issue} as it is missing number or title.'
|
||||
)
|
||||
continue
|
||||
|
||||
issue_number = self._to_int(issue.get('number') or issue.get('index'))
|
||||
body = issue.get('body') or ''
|
||||
thread_comments = self.get_issue_comments(issue_number, comment_id)
|
||||
|
||||
issue_details = Issue(
|
||||
owner=self.owner,
|
||||
repo=self.repo,
|
||||
number=issue_number,
|
||||
title=issue['title'],
|
||||
body=body,
|
||||
thread_comments=thread_comments,
|
||||
review_comments=None,
|
||||
review_threads=None,
|
||||
)
|
||||
converted.append(issue_details)
|
||||
|
||||
return converted
|
||||
|
||||
|
||||
class ForgejoPRHandler(ForgejoIssueHandler):
|
||||
def __init__(
|
||||
self,
|
||||
owner: str,
|
||||
repo: str,
|
||||
token: str,
|
||||
username: str | None = None,
|
||||
base_domain: str = 'codeberg.org',
|
||||
):
|
||||
super().__init__(owner, repo, token, username, base_domain)
|
||||
self.download_url = f'{self.get_base_url()}/pulls'
|
||||
|
||||
def download_pr_metadata(
|
||||
self, pull_number: int, comment_id: int | None = None
|
||||
) -> tuple[list[str], list[int], list[str] | None, list[ReviewThread], list[str]]:
|
||||
closing_issues: list[str] = []
|
||||
closing_issue_numbers: list[int] = []
|
||||
|
||||
try:
|
||||
response = httpx.get(
|
||||
f'{self.get_base_url()}/pulls/{pull_number}', headers=self.headers
|
||||
)
|
||||
response.raise_for_status()
|
||||
pr_data = response.json()
|
||||
body = pr_data.get('body') or ''
|
||||
closing_refs = extract_issue_references(body)
|
||||
closing_issue_numbers.extend(closing_refs)
|
||||
if body:
|
||||
closing_issues.append(body)
|
||||
except httpx.HTTPError as exc:
|
||||
logger.warning(f'Failed to fetch PR metadata for {pull_number}: {exc}')
|
||||
|
||||
review_comments = self.get_pr_comments(pull_number, comment_id)
|
||||
review_threads: list[ReviewThread] = []
|
||||
thread_ids: list[str] = []
|
||||
|
||||
return (
|
||||
closing_issues,
|
||||
closing_issue_numbers,
|
||||
review_comments,
|
||||
review_threads,
|
||||
thread_ids,
|
||||
)
|
||||
|
||||
def get_pr_comments(
|
||||
self, pr_number: int, comment_id: int | None = None
|
||||
) -> list[str] | None:
|
||||
url = f'{self.get_base_url()}/pulls/{pr_number}/comments'
|
||||
page = 1
|
||||
params = {'limit': '50', 'page': str(page)}
|
||||
collected: list[str] = []
|
||||
|
||||
while True:
|
||||
response = httpx.get(url, headers=self.headers, params=params)
|
||||
response.raise_for_status()
|
||||
comments = response.json()
|
||||
|
||||
if not comments:
|
||||
break
|
||||
|
||||
filtered = [
|
||||
comment for comment in comments if not comment.get('is_system', False)
|
||||
]
|
||||
|
||||
if comment_id is not None:
|
||||
matching = next(
|
||||
(
|
||||
comment['body']
|
||||
for comment in filtered
|
||||
if self._to_int(comment.get('id')) == comment_id
|
||||
),
|
||||
None,
|
||||
)
|
||||
if matching:
|
||||
return [matching]
|
||||
else:
|
||||
collected.extend(
|
||||
comment['body'] for comment in filtered if comment.get('body')
|
||||
)
|
||||
|
||||
page += 1
|
||||
params = {'limit': '50', 'page': str(page)}
|
||||
|
||||
return collected if collected else None
|
||||
|
||||
def get_context_from_external_issues_references(
|
||||
self,
|
||||
closing_issues: list[str],
|
||||
closing_issue_numbers: list[int],
|
||||
issue_body: str,
|
||||
review_comments: list[str] | None,
|
||||
review_threads: list[ReviewThread],
|
||||
thread_comments: list[str] | None,
|
||||
) -> list[str]:
|
||||
return super().get_context_from_external_issues_references(
|
||||
closing_issues,
|
||||
closing_issue_numbers,
|
||||
issue_body,
|
||||
review_comments,
|
||||
review_threads,
|
||||
thread_comments,
|
||||
)
|
||||
|
||||
def get_converted_issues(
|
||||
self, issue_numbers: list[int] | None = None, comment_id: int | None = None
|
||||
) -> list[Issue]:
|
||||
if not issue_numbers:
|
||||
raise ValueError('Unspecified issue numbers')
|
||||
|
||||
response = httpx.get(self.download_url, headers=self.headers)
|
||||
response.raise_for_status()
|
||||
all_prs = response.json()
|
||||
|
||||
logger.info(f'Limiting resolving to PRs {issue_numbers}.')
|
||||
filtered = [
|
||||
pr
|
||||
for pr in all_prs
|
||||
if self._to_int(pr.get('number') or pr.get('index')) in issue_numbers
|
||||
]
|
||||
|
||||
converted: list[Issue] = []
|
||||
for pr in filtered:
|
||||
if any(pr.get(key) is None for key in ['number', 'title']):
|
||||
logger.warning(f'Skipping PR {pr} as it is missing number or title.')
|
||||
continue
|
||||
|
||||
body = pr.get('body') or ''
|
||||
pr_number = self._to_int(pr.get('number') or pr.get('index', 0))
|
||||
(
|
||||
closing_issues,
|
||||
closing_issue_numbers,
|
||||
review_comments,
|
||||
review_threads,
|
||||
thread_ids,
|
||||
) = self.download_pr_metadata(pr_number, comment_id)
|
||||
head_branch = (pr.get('head') or {}).get('ref')
|
||||
thread_comments = self.get_pr_comments(pr_number, comment_id)
|
||||
|
||||
closing_issues = self.get_context_from_external_issues_references(
|
||||
closing_issues,
|
||||
closing_issue_numbers,
|
||||
body,
|
||||
review_comments,
|
||||
review_threads,
|
||||
thread_comments,
|
||||
)
|
||||
|
||||
issue_details = Issue(
|
||||
owner=self.owner,
|
||||
repo=self.repo,
|
||||
number=pr_number,
|
||||
title=pr['title'],
|
||||
body=body,
|
||||
closing_issues=closing_issues,
|
||||
review_comments=review_comments,
|
||||
review_threads=review_threads,
|
||||
thread_ids=thread_ids,
|
||||
head_branch=head_branch,
|
||||
thread_comments=thread_comments,
|
||||
)
|
||||
|
||||
converted.append(issue_details)
|
||||
|
||||
return converted
|
||||
@@ -5,6 +5,10 @@ from openhands.resolver.interfaces.bitbucket import (
|
||||
BitbucketIssueHandler,
|
||||
BitbucketPRHandler,
|
||||
)
|
||||
from openhands.resolver.interfaces.forgejo import (
|
||||
ForgejoIssueHandler,
|
||||
ForgejoPRHandler,
|
||||
)
|
||||
from openhands.resolver.interfaces.github import GithubIssueHandler, GithubPRHandler
|
||||
from openhands.resolver.interfaces.gitlab import GitlabIssueHandler, GitlabPRHandler
|
||||
from openhands.resolver.interfaces.issue_definitions import (
|
||||
@@ -69,6 +73,17 @@ class IssueHandlerFactory:
|
||||
),
|
||||
self.llm_config,
|
||||
)
|
||||
elif self.platform == ProviderType.FORGEJO:
|
||||
return ServiceContextIssue(
|
||||
ForgejoIssueHandler(
|
||||
self.owner,
|
||||
self.repo,
|
||||
self.token,
|
||||
self.username,
|
||||
self.base_domain,
|
||||
),
|
||||
self.llm_config,
|
||||
)
|
||||
elif self.platform == ProviderType.AZURE_DEVOPS:
|
||||
# Parse owner as organization/project
|
||||
parts = self.owner.split('/')
|
||||
@@ -125,6 +140,17 @@ class IssueHandlerFactory:
|
||||
),
|
||||
self.llm_config,
|
||||
)
|
||||
elif self.platform == ProviderType.FORGEJO:
|
||||
return ServiceContextPR(
|
||||
ForgejoPRHandler(
|
||||
self.owner,
|
||||
self.repo,
|
||||
self.token,
|
||||
self.username,
|
||||
self.base_domain,
|
||||
),
|
||||
self.llm_config,
|
||||
)
|
||||
elif self.platform == ProviderType.AZURE_DEVOPS:
|
||||
# Parse owner as organization/project
|
||||
parts = self.owner.split('/')
|
||||
|
||||
@@ -54,6 +54,7 @@ class IssueResolver:
|
||||
|
||||
def __init__(self, args: Namespace) -> None:
|
||||
"""Initialize the IssueResolver with the given parameters.
|
||||
|
||||
Params initialized:
|
||||
owner: Owner of the repo.
|
||||
repo: Repository name.
|
||||
@@ -82,6 +83,7 @@ class IssueResolver:
|
||||
or os.getenv('GITLAB_TOKEN')
|
||||
or os.getenv('BITBUCKET_TOKEN')
|
||||
or os.getenv('AZURE_DEVOPS_TOKEN')
|
||||
or os.getenv('FORGEJO_TOKEN')
|
||||
)
|
||||
username = args.username if args.username else os.getenv('GIT_USERNAME')
|
||||
if not username:
|
||||
|
||||
@@ -13,6 +13,7 @@ from openhands.integrations.service_types import ProviderType
|
||||
from openhands.llm.llm import LLM
|
||||
from openhands.resolver.interfaces.azure_devops import AzureDevOpsIssueHandler
|
||||
from openhands.resolver.interfaces.bitbucket import BitbucketIssueHandler
|
||||
from openhands.resolver.interfaces.forgejo import ForgejoIssueHandler
|
||||
from openhands.resolver.interfaces.github import GithubIssueHandler
|
||||
from openhands.resolver.interfaces.gitlab import GitlabIssueHandler
|
||||
from openhands.resolver.interfaces.issue import Issue
|
||||
@@ -26,6 +27,10 @@ from openhands.resolver.utils import identify_token
|
||||
from openhands.utils.async_utils import GENERAL_TIMEOUT, call_async_from_sync
|
||||
from openhands.utils.environment import get_effective_llm_base_url
|
||||
|
||||
PR_SIGNATURE = (
|
||||
'Automatic fix generated by [OpenHands](https://github.com/OpenHands/OpenHands/) 🙌'
|
||||
)
|
||||
|
||||
|
||||
def apply_patch(repo_dir: str, patch: str) -> None:
|
||||
"""Apply a patch to a repository.
|
||||
@@ -248,7 +253,7 @@ def send_pull_request(
|
||||
git_user_name: str = 'openhands',
|
||||
git_user_email: str = 'openhands@all-hands.dev',
|
||||
) -> str:
|
||||
"""Send a pull request to a GitHub, GitLab, Bitbucket, or Azure DevOps repository.
|
||||
"""Send a pull request to a GitHub, GitLab, Bitbucket, Forgejo, or Azure DevOps repository.
|
||||
|
||||
Args:
|
||||
issue: The issue to send the pull request for
|
||||
@@ -262,21 +267,22 @@ def send_pull_request(
|
||||
target_branch: The target branch to create the pull request against (defaults to repository default branch)
|
||||
reviewer: The username of the reviewer to assign
|
||||
pr_title: Custom title for the pull request (optional)
|
||||
base_domain: The base domain for the git server (defaults to "github.com" for GitHub, "gitlab.com" for GitLab, "bitbucket.org" for Bitbucket, and "dev.azure.com" for Azure DevOps)
|
||||
base_domain: The base domain for the git server (defaults to "github.com" for GitHub, "gitlab.com" for GitLab, "bitbucket.org" for Bitbucket, "codeberg.org" for Forgejo, and "dev.azure.com" for Azure DevOps)
|
||||
git_user_name: Git username to configure when creating commits
|
||||
git_user_email: Git email to configure when creating commits
|
||||
"""
|
||||
if pr_type not in ['branch', 'draft', 'ready']:
|
||||
raise ValueError(f'Invalid pr_type: {pr_type}')
|
||||
|
||||
# Determine default base_domain based on platform
|
||||
if base_domain is None:
|
||||
if platform == ProviderType.GITHUB:
|
||||
base_domain = 'github.com'
|
||||
elif platform == ProviderType.GITLAB:
|
||||
base_domain = 'gitlab.com'
|
||||
elif platform == ProviderType.AZURE_DEVOPS:
|
||||
base_domain = 'dev.azure.com'
|
||||
else: # platform == ProviderType.BITBUCKET
|
||||
base_domain = 'bitbucket.org'
|
||||
base_domain = {
|
||||
ProviderType.GITHUB: 'github.com',
|
||||
ProviderType.GITLAB: 'gitlab.com',
|
||||
ProviderType.BITBUCKET: 'bitbucket.org',
|
||||
ProviderType.FORGEJO: 'codeberg.org',
|
||||
ProviderType.AZURE_DEVOPS: 'dev.azure.com',
|
||||
}.get(platform, 'github.com')
|
||||
|
||||
# Create the appropriate handler based on platform
|
||||
handler = None
|
||||
@@ -297,6 +303,11 @@ def send_pull_request(
|
||||
),
|
||||
None,
|
||||
)
|
||||
elif platform == ProviderType.FORGEJO:
|
||||
handler = ServiceContextIssue(
|
||||
ForgejoIssueHandler(issue.owner, issue.repo, token, username, base_domain),
|
||||
None,
|
||||
)
|
||||
elif platform == ProviderType.AZURE_DEVOPS:
|
||||
# For Azure DevOps, owner is "organization/project"
|
||||
organization, project = issue.owner.split('/')
|
||||
@@ -360,11 +371,11 @@ def send_pull_request(
|
||||
pr_body = f'This pull request fixes #{issue.number}.'
|
||||
if additional_message:
|
||||
pr_body += f'\n\n{additional_message}'
|
||||
pr_body += '\n\nAutomatic fix generated by [OpenHands](https://github.com/OpenHands/OpenHands/) 🙌'
|
||||
pr_body += f'\n\n{PR_SIGNATURE}'
|
||||
|
||||
# For cross repo pull request, we need to send head parameter like fork_owner:branch as per git documentation here : https://docs.github.com/en/rest/pulls/pulls?apiVersion=2022-11-28#create-a-pull-request
|
||||
# head parameter usage : The name of the branch where your changes are implemented. For cross-repository pull requests in the same network, namespace head with a user like this: username:branch.
|
||||
if fork_owner and platform == ProviderType.GITHUB:
|
||||
if fork_owner and platform in (ProviderType.GITHUB, ProviderType.FORGEJO):
|
||||
head_branch = f'{fork_owner}:{branch_name}'
|
||||
else:
|
||||
head_branch = branch_name
|
||||
@@ -374,17 +385,40 @@ def send_pull_request(
|
||||
url = handler.get_compare_url(branch_name)
|
||||
else:
|
||||
# Prepare the PR for the GitHub API
|
||||
data = {
|
||||
'title': final_pr_title,
|
||||
('body' if platform == ProviderType.GITHUB else 'description'): pr_body,
|
||||
(
|
||||
'head' if platform == ProviderType.GITHUB else 'source_branch'
|
||||
): head_branch,
|
||||
(
|
||||
'base' if platform == ProviderType.GITHUB else 'target_branch'
|
||||
): base_branch,
|
||||
'draft': pr_type == 'draft',
|
||||
}
|
||||
if platform == ProviderType.GITHUB:
|
||||
data = {
|
||||
'title': final_pr_title,
|
||||
'body': pr_body,
|
||||
'head': head_branch,
|
||||
'base': base_branch,
|
||||
'draft': pr_type == 'draft',
|
||||
}
|
||||
elif platform == ProviderType.GITLAB:
|
||||
data = {
|
||||
'title': final_pr_title,
|
||||
'description': pr_body,
|
||||
'source_branch': head_branch,
|
||||
'target_branch': base_branch,
|
||||
'draft': pr_type == 'draft',
|
||||
}
|
||||
elif platform == ProviderType.BITBUCKET:
|
||||
data = {
|
||||
'title': final_pr_title,
|
||||
'description': pr_body,
|
||||
'source_branch': head_branch,
|
||||
'target_branch': base_branch,
|
||||
'draft': pr_type == 'draft',
|
||||
}
|
||||
elif platform == ProviderType.FORGEJO:
|
||||
data = {
|
||||
'title': final_pr_title,
|
||||
'body': pr_body,
|
||||
'head': head_branch,
|
||||
'base': base_branch,
|
||||
'draft': pr_type == 'draft',
|
||||
}
|
||||
else:
|
||||
raise ValueError(f'Unsupported platform for PR creation: {platform}')
|
||||
|
||||
pr_data = handler.create_pull_request(data)
|
||||
url = pr_data['html_url']
|
||||
@@ -429,13 +463,13 @@ def update_existing_pull_request(
|
||||
|
||||
# Determine default base_domain based on platform
|
||||
if base_domain is None:
|
||||
base_domain = (
|
||||
'github.com'
|
||||
if platform == ProviderType.GITHUB
|
||||
else 'gitlab.com'
|
||||
if platform == ProviderType.GITLAB
|
||||
else 'dev.azure.com'
|
||||
)
|
||||
base_domain = {
|
||||
ProviderType.GITHUB: 'github.com',
|
||||
ProviderType.GITLAB: 'gitlab.com',
|
||||
ProviderType.AZURE_DEVOPS: 'dev.azure.com',
|
||||
ProviderType.BITBUCKET: 'bitbucket.org',
|
||||
ProviderType.FORGEJO: 'codeberg.org',
|
||||
}.get(platform, 'github.com')
|
||||
|
||||
handler = None
|
||||
if platform == ProviderType.GITHUB:
|
||||
@@ -443,6 +477,11 @@ def update_existing_pull_request(
|
||||
GithubIssueHandler(issue.owner, issue.repo, token, username, base_domain),
|
||||
llm_config,
|
||||
)
|
||||
elif platform == ProviderType.GITLAB:
|
||||
handler = ServiceContextIssue(
|
||||
GitlabIssueHandler(issue.owner, issue.repo, token, username, base_domain),
|
||||
llm_config,
|
||||
)
|
||||
elif platform == ProviderType.AZURE_DEVOPS:
|
||||
# For Azure DevOps, owner is "organization/project"
|
||||
organization, project = issue.owner.split('/')
|
||||
@@ -450,11 +489,20 @@ def update_existing_pull_request(
|
||||
AzureDevOpsIssueHandler(token, organization, project, issue.repo),
|
||||
llm_config,
|
||||
)
|
||||
else: # platform == ProviderType.GITLAB
|
||||
elif platform == ProviderType.BITBUCKET:
|
||||
handler = ServiceContextIssue(
|
||||
GitlabIssueHandler(issue.owner, issue.repo, token, username, base_domain),
|
||||
BitbucketIssueHandler(
|
||||
issue.owner, issue.repo, token, username, base_domain
|
||||
),
|
||||
llm_config,
|
||||
)
|
||||
elif platform == ProviderType.FORGEJO:
|
||||
handler = ServiceContextIssue(
|
||||
ForgejoIssueHandler(issue.owner, issue.repo, token, username, base_domain),
|
||||
llm_config,
|
||||
)
|
||||
else:
|
||||
raise ValueError(f'Unsupported platform: {platform}')
|
||||
|
||||
branch_name = issue.head_branch
|
||||
|
||||
@@ -503,7 +551,10 @@ def update_existing_pull_request(
|
||||
comment_message = response.choices[0].message.content.strip()
|
||||
|
||||
except (json.JSONDecodeError, TypeError):
|
||||
comment_message = f'A new OpenHands update is available, but failed to parse or summarize the changes:\n{additional_message}'
|
||||
comment_message = (
|
||||
'A new OpenHands update is available, but failed to parse or summarize '
|
||||
f'the changes:\n{additional_message}'
|
||||
)
|
||||
|
||||
# Post a comment on the PR
|
||||
if comment_message:
|
||||
@@ -727,10 +778,11 @@ def main() -> None:
|
||||
or os.getenv('GITHUB_TOKEN')
|
||||
or os.getenv('GITLAB_TOKEN')
|
||||
or os.getenv('AZURE_DEVOPS_TOKEN')
|
||||
or os.getenv('FORGEJO_TOKEN')
|
||||
)
|
||||
if not token:
|
||||
raise ValueError(
|
||||
'token is not set, set via --token or GITHUB_TOKEN, GITLAB_TOKEN, or AZURE_DEVOPS_TOKEN environment variable.'
|
||||
'token is not set, set via --token or GITHUB_TOKEN, GITLAB_TOKEN, AZURE_DEVOPS_TOKEN, or FORGEJO_TOKEN environment variable.'
|
||||
)
|
||||
username = my_args.username if my_args.username else os.getenv('GIT_USERNAME')
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
from types import MappingProxyType
|
||||
from typing import cast
|
||||
from typing import Annotated, cast
|
||||
|
||||
from fastapi import APIRouter, Depends, Query, status
|
||||
from fastapi.responses import JSONResponse
|
||||
@@ -67,7 +67,7 @@ async def get_user_installations(
|
||||
@app.get('/repositories', response_model=list[Repository])
|
||||
async def get_user_repositories(
|
||||
sort: str = 'pushed',
|
||||
selected_provider: ProviderType | None = None,
|
||||
selected_provider: Annotated[ProviderType | None, Query()] = None,
|
||||
page: int | None = None,
|
||||
per_page: int | None = None,
|
||||
installation_id: str | None = None,
|
||||
@@ -137,7 +137,7 @@ async def search_repositories(
|
||||
per_page: int = 5,
|
||||
sort: str = 'stars',
|
||||
order: str = 'desc',
|
||||
selected_provider: ProviderType | None = None,
|
||||
selected_provider: Annotated[ProviderType | None, Query()] = None,
|
||||
provider_tokens: PROVIDER_TOKEN_TYPE | None = Depends(get_provider_tokens),
|
||||
access_token: SecretStr | None = Depends(get_access_token),
|
||||
user_id: str | None = Depends(get_user_id),
|
||||
@@ -171,7 +171,7 @@ async def search_branches(
|
||||
repository: str,
|
||||
query: str,
|
||||
per_page: int = 30,
|
||||
selected_provider: ProviderType | None = None,
|
||||
selected_provider: Annotated[ProviderType | None, Query()] = None,
|
||||
provider_tokens: PROVIDER_TOKEN_TYPE | None = Depends(get_provider_tokens),
|
||||
access_token: SecretStr | None = Depends(get_access_token),
|
||||
user_id: str | None = Depends(get_user_id),
|
||||
@@ -243,6 +243,7 @@ async def get_repository_branches(
|
||||
repository: str,
|
||||
page: int = 1,
|
||||
per_page: int = 30,
|
||||
selected_provider: Annotated[ProviderType | None, Query()] = None,
|
||||
provider_tokens: PROVIDER_TOKEN_TYPE | None = Depends(get_provider_tokens),
|
||||
access_token: SecretStr | None = Depends(get_access_token),
|
||||
user_id: str | None = Depends(get_user_id),
|
||||
@@ -253,6 +254,7 @@ async def get_repository_branches(
|
||||
repository: The repository name in the format 'owner/repo'
|
||||
page: Page number for pagination (default: 1)
|
||||
per_page: Number of branches per page (default: 30)
|
||||
selected_provider: Optional provider hint to avoid trying other providers
|
||||
|
||||
Returns:
|
||||
A paginated response with branches for the repository
|
||||
@@ -263,7 +265,10 @@ async def get_repository_branches(
|
||||
)
|
||||
try:
|
||||
branches_response: PaginatedBranchesResponse = await client.get_branches(
|
||||
repository, page=page, per_page=per_page
|
||||
repository,
|
||||
specified_provider=selected_provider,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
)
|
||||
return branches_response
|
||||
|
||||
|
||||
@@ -15,7 +15,7 @@ from openhands.integrations.utils import validate_provider_token
|
||||
from openhands.resolver.interfaces.bitbucket import BitbucketIssueHandler
|
||||
from openhands.resolver.interfaces.issue import Issue
|
||||
from openhands.resolver.interfaces.issue_definitions import ServiceContextIssue
|
||||
from openhands.resolver.send_pull_request import send_pull_request
|
||||
from openhands.resolver.send_pull_request import PR_SIGNATURE, send_pull_request
|
||||
from openhands.runtime.base import Runtime
|
||||
from openhands.server.routes.secrets import check_provider_tokens
|
||||
from openhands.server.settings import POSTProviderModel
|
||||
@@ -219,7 +219,7 @@ def test_send_pull_request_bitbucket(
|
||||
mock_service_context.assert_called_once()
|
||||
|
||||
# Verify create_pull_request was called with the correct data
|
||||
expected_body = 'This pull request fixes #123.\n\nAutomatic fix generated by [OpenHands](https://github.com/OpenHands/OpenHands/) 🙌'
|
||||
expected_body = f'This pull request fixes #123.\n\n{PR_SIGNATURE}'
|
||||
mock_service.create_pull_request.assert_called_once_with(
|
||||
{
|
||||
'title': 'Test PR',
|
||||
@@ -353,8 +353,9 @@ class TestBitbucketProviderDomain(unittest.TestCase):
|
||||
# Provider Token Validation Tests
|
||||
@pytest.mark.asyncio
|
||||
async def test_validate_provider_token_with_bitbucket_token():
|
||||
"""Test that validate_provider_token correctly identifies a Bitbucket token
|
||||
and doesn't try to validate it as GitHub or GitLab.
|
||||
"""Test that validate_provider_token correctly identifies a Bitbucket token.
|
||||
|
||||
Ensures GitHub and GitLab validators are not invoked.
|
||||
"""
|
||||
# Mock the service classes to avoid actual API calls
|
||||
with (
|
||||
@@ -392,9 +393,7 @@ async def test_validate_provider_token_with_bitbucket_token():
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_check_provider_tokens_with_only_bitbucket():
|
||||
"""Test that check_provider_tokens doesn't try to validate GitHub or GitLab tokens
|
||||
when only a Bitbucket token is provided.
|
||||
"""
|
||||
"""Test that check_provider_tokens ignores GitHub/GitLab tokens when only Bitbucket is provided."""
|
||||
# Create a mock validate_provider_token function
|
||||
mock_validate = AsyncMock()
|
||||
mock_validate.return_value = ProviderType.BITBUCKET
|
||||
|
||||
@@ -0,0 +1,78 @@
|
||||
"""Tests for Forgejo integration with send_pull_request."""
|
||||
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
from openhands.integrations.service_types import ProviderType as ServiceProviderType
|
||||
from openhands.resolver.interfaces.issue import Issue
|
||||
from openhands.resolver.send_pull_request import PR_SIGNATURE, send_pull_request
|
||||
|
||||
|
||||
@patch('openhands.resolver.send_pull_request.ServiceContextIssue')
|
||||
@patch('openhands.resolver.send_pull_request.ForgejoIssueHandler')
|
||||
@patch('subprocess.run')
|
||||
def test_send_pull_request_forgejo(
|
||||
mock_run, mock_forgejo_handler, mock_service_context
|
||||
):
|
||||
"""Ensure we can build and submit a Forgejo pull request."""
|
||||
mock_run.return_value = MagicMock(returncode=0)
|
||||
|
||||
handler_instance = MagicMock()
|
||||
mock_forgejo_handler.return_value = handler_instance
|
||||
|
||||
service_context_instance = MagicMock()
|
||||
service_context_instance.get_branch_name.return_value = 'openhands-fix-issue-7'
|
||||
service_context_instance.branch_exists.return_value = True
|
||||
service_context_instance.get_default_branch_name.return_value = 'main'
|
||||
service_context_instance.get_clone_url.return_value = (
|
||||
'https://codeberg.org/example/repo.git'
|
||||
)
|
||||
service_context_instance.create_pull_request.return_value = {
|
||||
'html_url': 'https://codeberg.org/example/repo/pulls/42',
|
||||
'number': 42,
|
||||
}
|
||||
service_context_instance._strategy = MagicMock()
|
||||
mock_service_context.return_value = service_context_instance
|
||||
|
||||
issue = Issue(
|
||||
number=7,
|
||||
title='Fix the Forgejo PR flow',
|
||||
owner='example',
|
||||
repo='repo',
|
||||
body='Details about the fix',
|
||||
created_at='2024-01-01T00:00:00Z',
|
||||
updated_at='2024-01-01T00:00:00Z',
|
||||
closed_at=None,
|
||||
head_branch='feature-branch',
|
||||
thread_ids=None,
|
||||
)
|
||||
|
||||
result = send_pull_request(
|
||||
issue=issue,
|
||||
token='forgejo-token',
|
||||
username=None,
|
||||
platform=ServiceProviderType.FORGEJO,
|
||||
patch_dir='/tmp',
|
||||
pr_type='ready',
|
||||
pr_title='Fix the Forgejo PR flow',
|
||||
target_branch='main',
|
||||
)
|
||||
|
||||
assert result == 'https://codeberg.org/example/repo/pulls/42'
|
||||
|
||||
mock_forgejo_handler.assert_called_once_with(
|
||||
'example', 'repo', 'forgejo-token', None, 'codeberg.org'
|
||||
)
|
||||
mock_service_context.assert_called_once_with(handler_instance, None)
|
||||
|
||||
expected_payload = {
|
||||
'title': 'Fix the Forgejo PR flow',
|
||||
'body': f'This pull request fixes #7.\n\n{PR_SIGNATURE}',
|
||||
'head': 'openhands-fix-issue-7',
|
||||
'base': 'main',
|
||||
'draft': False,
|
||||
}
|
||||
service_context_instance.create_pull_request.assert_called_once_with(
|
||||
expected_payload
|
||||
)
|
||||
|
||||
mock_run.assert_called()
|
||||
@@ -4,6 +4,10 @@ from pydantic import SecretStr
|
||||
from openhands.core.config import LLMConfig
|
||||
from openhands.integrations.provider import ProviderType
|
||||
from openhands.resolver.interfaces.azure_devops import AzureDevOpsIssueHandler
|
||||
from openhands.resolver.interfaces.forgejo import (
|
||||
ForgejoIssueHandler,
|
||||
ForgejoPRHandler,
|
||||
)
|
||||
from openhands.resolver.interfaces.github import GithubIssueHandler, GithubPRHandler
|
||||
from openhands.resolver.interfaces.gitlab import GitlabIssueHandler, GitlabPRHandler
|
||||
from openhands.resolver.interfaces.issue_definitions import (
|
||||
@@ -28,7 +32,6 @@ def factory_params(llm_config):
|
||||
'repo': 'test-repo',
|
||||
'token': 'test-token',
|
||||
'username': 'test-user',
|
||||
'base_domain': 'github.com',
|
||||
'llm_config': llm_config,
|
||||
}
|
||||
|
||||
@@ -46,24 +49,76 @@ def azure_factory_params(llm_config):
|
||||
|
||||
|
||||
test_cases = [
|
||||
# platform, issue_type, expected_context_type, expected_handler_type, use_azure_params
|
||||
(ProviderType.GITHUB, 'issue', ServiceContextIssue, GithubIssueHandler, False),
|
||||
(ProviderType.GITHUB, 'pr', ServiceContextPR, GithubPRHandler, False),
|
||||
(ProviderType.GITLAB, 'issue', ServiceContextIssue, GitlabIssueHandler, False),
|
||||
(ProviderType.GITLAB, 'pr', ServiceContextPR, GitlabPRHandler, False),
|
||||
# platform, issue_type, base_domain, expected_context_type, expected_handler_type, use_azure_params
|
||||
(
|
||||
ProviderType.GITHUB,
|
||||
'issue',
|
||||
'github.com',
|
||||
ServiceContextIssue,
|
||||
GithubIssueHandler,
|
||||
False,
|
||||
),
|
||||
(
|
||||
ProviderType.GITHUB,
|
||||
'pr',
|
||||
'github.com',
|
||||
ServiceContextPR,
|
||||
GithubPRHandler,
|
||||
False,
|
||||
),
|
||||
(
|
||||
ProviderType.GITLAB,
|
||||
'issue',
|
||||
'gitlab.com',
|
||||
ServiceContextIssue,
|
||||
GitlabIssueHandler,
|
||||
False,
|
||||
),
|
||||
(
|
||||
ProviderType.GITLAB,
|
||||
'pr',
|
||||
'gitlab.com',
|
||||
ServiceContextPR,
|
||||
GitlabPRHandler,
|
||||
False,
|
||||
),
|
||||
(
|
||||
ProviderType.FORGEJO,
|
||||
'issue',
|
||||
'codeberg.org',
|
||||
ServiceContextIssue,
|
||||
ForgejoIssueHandler,
|
||||
False,
|
||||
),
|
||||
(
|
||||
ProviderType.FORGEJO,
|
||||
'pr',
|
||||
'codeberg.org',
|
||||
ServiceContextPR,
|
||||
ForgejoPRHandler,
|
||||
False,
|
||||
),
|
||||
(
|
||||
ProviderType.AZURE_DEVOPS,
|
||||
'issue',
|
||||
'dev.azure.com',
|
||||
ServiceContextIssue,
|
||||
AzureDevOpsIssueHandler,
|
||||
True,
|
||||
),
|
||||
(ProviderType.AZURE_DEVOPS, 'pr', ServiceContextPR, AzureDevOpsIssueHandler, True),
|
||||
(
|
||||
ProviderType.AZURE_DEVOPS,
|
||||
'pr',
|
||||
'dev.azure.com',
|
||||
ServiceContextPR,
|
||||
AzureDevOpsIssueHandler,
|
||||
True,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
'platform,issue_type,expected_context_type,expected_handler_type,use_azure_params',
|
||||
'platform,issue_type,base_domain,expected_context_type,expected_handler_type,use_azure_params',
|
||||
test_cases,
|
||||
)
|
||||
def test_handler_creation(
|
||||
@@ -71,11 +126,16 @@ def test_handler_creation(
|
||||
azure_factory_params,
|
||||
platform: ProviderType,
|
||||
issue_type: str,
|
||||
base_domain: str,
|
||||
expected_context_type: type,
|
||||
expected_handler_type: type,
|
||||
use_azure_params: bool,
|
||||
):
|
||||
params = azure_factory_params if use_azure_params else factory_params
|
||||
params = (
|
||||
azure_factory_params
|
||||
if use_azure_params
|
||||
else {**factory_params, 'base_domain': base_domain}
|
||||
)
|
||||
factory = IssueHandlerFactory(**params, platform=platform, issue_type=issue_type)
|
||||
|
||||
handler = factory.create()
|
||||
@@ -86,7 +146,10 @@ def test_handler_creation(
|
||||
|
||||
def test_invalid_issue_type(factory_params):
|
||||
factory = IssueHandlerFactory(
|
||||
**factory_params, platform=ProviderType.GITHUB, issue_type='invalid'
|
||||
**factory_params,
|
||||
platform=ProviderType.GITHUB,
|
||||
issue_type='invalid',
|
||||
base_domain='github.com',
|
||||
)
|
||||
|
||||
with pytest.raises(ValueError, match='Invalid issue type: invalid'):
|
||||
|
||||
273
tests/unit/test_forgejo_service.py
Normal file
273
tests/unit/test_forgejo_service.py
Normal file
@@ -0,0 +1,273 @@
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
from pydantic import SecretStr
|
||||
|
||||
from openhands.integrations.forgejo.forgejo_service import ForgejoService
|
||||
from openhands.integrations.service_types import (
|
||||
ProviderType,
|
||||
Repository,
|
||||
RequestMethod,
|
||||
User,
|
||||
)
|
||||
from openhands.server.types import AppMode
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def forgejo_service():
|
||||
return ForgejoService(token=SecretStr('test_token'))
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_user(forgejo_service):
|
||||
# Mock response data
|
||||
mock_user_data = {
|
||||
'id': 1,
|
||||
'username': 'test_user',
|
||||
'avatar_url': 'https://codeberg.org/avatar/test_user',
|
||||
'full_name': 'Test User',
|
||||
'email': 'test@example.com',
|
||||
'organization': 'Test Org',
|
||||
}
|
||||
|
||||
# Mock the _make_request method
|
||||
forgejo_service._make_request = AsyncMock(return_value=(mock_user_data, {}))
|
||||
|
||||
# Call the method
|
||||
user = await forgejo_service.get_user()
|
||||
|
||||
# Verify the result
|
||||
assert isinstance(user, User)
|
||||
assert user.id == '1'
|
||||
assert user.login == 'test_user'
|
||||
assert user.avatar_url == 'https://codeberg.org/avatar/test_user'
|
||||
assert user.name == 'Test User'
|
||||
assert user.email == 'test@example.com'
|
||||
assert user.company == 'Test Org'
|
||||
|
||||
# Verify the _fetch_data call
|
||||
forgejo_service._make_request.assert_called_once_with(
|
||||
f'{forgejo_service.BASE_URL}/user'
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_search_repositories(forgejo_service):
|
||||
# Mock response data
|
||||
mock_repos_data = {
|
||||
'data': [
|
||||
{
|
||||
'id': 1,
|
||||
'full_name': 'test_user/repo1',
|
||||
'stars_count': 10,
|
||||
},
|
||||
{
|
||||
'id': 2,
|
||||
'full_name': 'test_user/repo2',
|
||||
'stars_count': 20,
|
||||
},
|
||||
]
|
||||
}
|
||||
|
||||
# Mock the _fetch_data method
|
||||
forgejo_service._make_request = AsyncMock(return_value=(mock_repos_data, {}))
|
||||
|
||||
# Call the method
|
||||
repos = await forgejo_service.search_repositories(
|
||||
'test', 10, 'updated', 'desc', public=False, app_mode=AppMode.OSS
|
||||
)
|
||||
|
||||
# Verify the result
|
||||
assert len(repos) == 2
|
||||
assert all(isinstance(repo, Repository) for repo in repos)
|
||||
assert repos[0].id == '1'
|
||||
assert repos[0].full_name == 'test_user/repo1'
|
||||
assert repos[0].stargazers_count == 10
|
||||
assert repos[0].git_provider == ProviderType.FORGEJO
|
||||
assert repos[1].id == '2'
|
||||
assert repos[1].full_name == 'test_user/repo2'
|
||||
assert repos[1].stargazers_count == 20
|
||||
assert repos[1].git_provider == ProviderType.FORGEJO
|
||||
|
||||
# Verify the _fetch_data call
|
||||
forgejo_service._make_request.assert_called_once_with(
|
||||
f'{forgejo_service.BASE_URL}/repos/search',
|
||||
{
|
||||
'q': 'test',
|
||||
'limit': 10,
|
||||
'sort': 'updated',
|
||||
'order': 'desc',
|
||||
'mode': 'source',
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_all_repositories(forgejo_service):
|
||||
# Mock response data for first page
|
||||
mock_repos_data_page1 = [
|
||||
{
|
||||
'id': 1,
|
||||
'full_name': 'test_user/repo1',
|
||||
'stars_count': 10,
|
||||
},
|
||||
{
|
||||
'id': 2,
|
||||
'full_name': 'test_user/repo2',
|
||||
'stars_count': 20,
|
||||
},
|
||||
]
|
||||
|
||||
# Mock response data for second page
|
||||
mock_repos_data_page2 = [
|
||||
{
|
||||
'id': 3,
|
||||
'full_name': 'test_user/repo3',
|
||||
'stars_count': 30,
|
||||
},
|
||||
]
|
||||
|
||||
# Mock the _fetch_data method to return different data for different pages
|
||||
forgejo_service._make_request = AsyncMock()
|
||||
forgejo_service._make_request.side_effect = [
|
||||
(
|
||||
mock_repos_data_page1,
|
||||
{'Link': '<https://codeberg.org/api/v1/user/repos?page=2>; rel="next"'},
|
||||
),
|
||||
(mock_repos_data_page2, {'Link': ''}),
|
||||
]
|
||||
|
||||
# Call the method
|
||||
repos = await forgejo_service.get_all_repositories('updated', AppMode.OSS)
|
||||
|
||||
# Verify the result
|
||||
assert len(repos) == 3
|
||||
assert all(isinstance(repo, Repository) for repo in repos)
|
||||
assert repos[0].id == '1'
|
||||
assert repos[0].full_name == 'test_user/repo1'
|
||||
assert repos[0].stargazers_count == 10
|
||||
assert repos[0].git_provider == ProviderType.FORGEJO
|
||||
assert repos[1].id == '2'
|
||||
assert repos[1].full_name == 'test_user/repo2'
|
||||
assert repos[1].stargazers_count == 20
|
||||
assert repos[1].git_provider == ProviderType.FORGEJO
|
||||
assert repos[2].id == '3'
|
||||
assert repos[2].full_name == 'test_user/repo3'
|
||||
assert repos[2].stargazers_count == 30
|
||||
assert repos[2].git_provider == ProviderType.FORGEJO
|
||||
|
||||
# Verify the _fetch_data calls
|
||||
assert forgejo_service._make_request.call_count == 2
|
||||
forgejo_service._make_request.assert_any_call(
|
||||
f'{forgejo_service.BASE_URL}/user/repos',
|
||||
{'page': '1', 'limit': '100', 'sort': 'updated'},
|
||||
)
|
||||
forgejo_service._make_request.assert_any_call(
|
||||
f'{forgejo_service.BASE_URL}/user/repos',
|
||||
{'page': '2', 'limit': '100', 'sort': 'updated'},
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_make_request_success(forgejo_service):
|
||||
# Mock httpx.AsyncClient
|
||||
mock_client = AsyncMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 200
|
||||
mock_response.raise_for_status = MagicMock()
|
||||
mock_response.json.return_value = {'key': 'value'}
|
||||
mock_response.headers = {'Link': 'next_link', 'Content-Type': 'application/json'}
|
||||
mock_client.__aenter__.return_value.get.return_value = mock_response
|
||||
|
||||
# Patch httpx.AsyncClient
|
||||
with patch('httpx.AsyncClient', return_value=mock_client):
|
||||
# Call the method
|
||||
result, headers = await forgejo_service._make_request(
|
||||
'https://test.url', {'param': 'value'}
|
||||
)
|
||||
|
||||
# Verify the result
|
||||
assert result == {'key': 'value'}
|
||||
assert headers == {'Link': 'next_link'}
|
||||
mock_response.raise_for_status.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_make_request_auth_error(forgejo_service):
|
||||
# Mock httpx.AsyncClient
|
||||
mock_client = AsyncMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 401
|
||||
mock_response.raise_for_status.side_effect = httpx.HTTPStatusError(
|
||||
'401 Unauthorized', request=MagicMock(), response=mock_response
|
||||
)
|
||||
mock_client.__aenter__.return_value.get.return_value = mock_response
|
||||
|
||||
# Patch httpx.AsyncClient
|
||||
with patch('httpx.AsyncClient', return_value=mock_client):
|
||||
# Call the method and expect an exception
|
||||
with pytest.raises(Exception) as excinfo:
|
||||
await forgejo_service._make_request('https://test.url', {'param': 'value'})
|
||||
|
||||
# Verify the exception
|
||||
assert 'Invalid forgejo token' in str(excinfo.value)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_make_request_other_error(forgejo_service):
|
||||
# Mock httpx.AsyncClient
|
||||
mock_client = AsyncMock()
|
||||
mock_response = MagicMock()
|
||||
mock_response.status_code = 500
|
||||
mock_response.raise_for_status.side_effect = httpx.HTTPStatusError(
|
||||
'500 Server Error', request=MagicMock(), response=mock_response
|
||||
)
|
||||
mock_client.__aenter__.return_value.get.return_value = mock_response
|
||||
|
||||
# Patch httpx.AsyncClient
|
||||
with patch('httpx.AsyncClient', return_value=mock_client):
|
||||
# Call the method and expect an exception
|
||||
with pytest.raises(Exception) as excinfo:
|
||||
await forgejo_service._make_request('https://test.url', {'param': 'value'})
|
||||
|
||||
# Verify the exception
|
||||
assert 'Unknown error' in str(excinfo.value)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_pull_request(forgejo_service):
|
||||
mock_response = {'index': 42, 'html_url': 'https://example/pr/42'}
|
||||
forgejo_service._make_request = AsyncMock(return_value=(mock_response, {}))
|
||||
|
||||
data = {'owner': 'org', 'repo': 'project', 'title': 'Add feature'}
|
||||
result = await forgejo_service.create_pull_request(data.copy())
|
||||
|
||||
assert result['number'] == 42
|
||||
forgejo_service._make_request.assert_awaited_once_with(
|
||||
f'{forgejo_service.BASE_URL}/repos/org/project/pulls',
|
||||
{'title': 'Add feature'},
|
||||
method=RequestMethod.POST,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_request_reviewers(forgejo_service):
|
||||
forgejo_service._make_request = AsyncMock(return_value=({}, {}))
|
||||
|
||||
await forgejo_service.request_reviewers('org/project', 5, ['alice'])
|
||||
|
||||
forgejo_service._make_request.assert_awaited_once_with(
|
||||
f'{forgejo_service.BASE_URL}/repos/org/project/pulls/5/requested_reviewers',
|
||||
{'reviewers': ['alice']},
|
||||
method=RequestMethod.POST,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_request_reviewers_empty_list(forgejo_service):
|
||||
forgejo_service._make_request = AsyncMock()
|
||||
|
||||
await forgejo_service.request_reviewers('org/project', 5, [])
|
||||
|
||||
forgejo_service._make_request.assert_not_called()
|
||||
Reference in New Issue
Block a user