Skip to content

Git service: attach each service to an allauth provider #11995

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 12 commits into from
Feb 14, 2025
4 changes: 2 additions & 2 deletions readthedocs/api/v2/views/model_views.py
Original file line number Diff line number Diff line change
Expand Up @@ -420,7 +420,7 @@ def get_queryset(self):
self.model.objects.api_v2(self.request.user)
.filter(
remote_organization_relations__account__provider__in=[
service.adapter.provider_id for service in registry
service.allauth_provider.id for service in registry
]
)
.distinct()
Expand Down Expand Up @@ -466,7 +466,7 @@ def get_queryset(self):

query = query.filter(
remote_repository_relations__account__provider__in=[
service.adapter.provider_id for service in registry
service.allauth_provider.id for service in registry
],
).distinct()

Expand Down
110 changes: 33 additions & 77 deletions readthedocs/builds/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,10 @@
)
from readthedocs.builds.models import Build, Version
from readthedocs.builds.utils import memcache_lock
from readthedocs.core.permissions import AdminPermission
from readthedocs.core.utils import send_email, trigger_build
from readthedocs.integrations.models import HttpExchange
from readthedocs.notifications.models import Notification
from readthedocs.oauth.notifications import MESSAGE_OAUTH_BUILD_STATUS_FAILURE
from readthedocs.projects.constants import GITHUB_BRAND, GITLAB_BRAND
from readthedocs.projects.models import Project, WebHookEvent
from readthedocs.storage import build_commands_storage
from readthedocs.worker import app
Expand Down Expand Up @@ -385,24 +383,16 @@ def sync_versions_task(project_pk, tags_data, branches_data, **kwargs):
@app.task(max_retries=3, default_retry_delay=60, queue="web")
def send_build_status(build_pk, commit, status):
"""
Send Build Status to Git Status API for project external versions.

It tries using these services' account in order:

1. user's account that imported the project
2. each user's account from the project's maintainers
Send build status to GitHub/GitLab for a given build/commit.

:param build_pk: Build primary key
:param commit: commit sha of the pull/merge request
:param status: build status failed, pending, or success to be sent.
"""
# TODO: Send build status for Bitbucket.
build = Build.objects.filter(pk=build_pk).first()
if not build:
return

provider_name = build.project.git_provider_name

log.bind(
build_id=build.pk,
project_slug=build.project.slug,
Expand All @@ -412,76 +402,42 @@ def send_build_status(build_pk, commit, status):

log.debug("Sending build status.")

if provider_name in [GITHUB_BRAND, GITLAB_BRAND]:
# get the service class for the project e.g: GitHubService.
service_class = build.project.git_service_class()
users = AdminPermission.admins(build.project)

if build.project.remote_repository:
remote_repository = build.project.remote_repository
remote_repository_relations = (
remote_repository.remote_repository_relations.filter(
account__isnull=False,
# Use ``user_in=`` instead of ``user__projects=`` here
# because User's are not related to Project's directly in
# Read the Docs for Business
user__in=AdminPermission.members(build.project),
)
.select_related("account", "user")
.only("user", "account")
)
# Get the service class for the project e.g: GitHubService.
# We fallback to guess the service from the repo,
# in the future we should only consider projects that have a remote repository.
service_class = build.project.get_git_service_class(fallback_to_clone_url=True)
if not service_class:
log.info("Project isn't connected to a Git service, not sending build status.")
return False

# Try using any of the users' maintainer accounts
# Try to loop through all remote repository relations for the projects users
for relation in remote_repository_relations:
service = service_class(relation.user, relation.account)
# Send status report using the API.
success = service.send_build_status(
build,
commit,
status,
)
if not service_class.supports_build_status:
log.info("Git service doesn't support build status.")
return False

if success:
log.debug(
"Build status report sent correctly.",
user_username=relation.user.username,
)
return True
else:
log.warning("Project does not have a RemoteRepository.")
# Try to send build status for projects with no RemoteRepository
for user in users:
services = service_class.for_user(user)
# Try to loop through services for users all social accounts
# to send successful build status
for service in services:
success = service.send_build_status(
build,
commit,
status,
)
if success:
log.debug(
"Build status report sent correctly using an user account.",
user_username=user.username,
)
return True

# NOTE: this notifications was attached to every user.
# Now, I'm attaching it to the project itself since it's a problem at project level.
Notification.objects.add(
message_id=MESSAGE_OAUTH_BUILD_STATUS_FAILURE,
attached_to=build.project,
format_values={
"provider_name": provider_name,
"url_connect_account": reverse("socialaccount_connections"),
},
dismissable=True,
for service in service_class.for_project(build.project):
success = service.send_build_status(
build,
commit,
status,
)
if success:
log.debug("Build status report sent correctly.")
return True

Notification.objects.add(
message_id=MESSAGE_OAUTH_BUILD_STATUS_FAILURE,
attached_to=build.project,
format_values={
"provider_name": service_class.allauth_provider.name,
"url_connect_account": reverse("socialaccount_connections"),
},
dismissable=True,
)

log.info("No social account or repository permission available.")
return False
log.info(
"No social account or repository permission available, no build status sent."
)
return False


@app.task(queue="web")
Expand Down
71 changes: 71 additions & 0 deletions readthedocs/oauth/clients.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,71 @@
from datetime import datetime

import structlog
from django.utils import timezone
from requests_oauthlib import OAuth2Session

log = structlog.get_logger(__name__)


def _get_token_updater(token):
"""
Update token given data from OAuth response.

Expect the following response into the closure::
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Not sure what this is saying -- do you mean that is the expected value of the token arg?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Nope, this is likely the response you get when the request for a new token is made.


{
u'token_type': u'bearer',
u'scopes': u'webhook repository team account',
u'refresh_token': u'...',
u'access_token': u'...',
u'expires_in': 3600,
u'expires_at': 1449218652.558185
}
"""

def _updater(data):
token.token = data["access_token"]
token.token_secret = data.get("refresh_token", "")
token.expires_at = timezone.make_aware(
datetime.fromtimestamp(data["expires_at"]),
)
token.save()
log.info("Updated token.", token_id=token.pk)

return _updater


def get_oauth2_client(account):
"""Get an OAuth2 client for the given social account."""
token = account.socialtoken_set.first()
if token is None:
return None

token_config = {
"access_token": token.token,
"token_type": "bearer",
}
if token.expires_at is not None:
token_expires = (token.expires_at - timezone.now()).total_seconds()
token_config.update(
{
"refresh_token": token.token_secret,
"expires_in": token_expires,
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I'm a little surprised we're setting this internally -- is it so we can update it if needed?

Copy link
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

yeah, allauth doesn't provide an oauth2 client, so we use requests_oauthlib, and we need to glue the token from allauth other lib. Most of this code was just moved.

}
)

provider = account.get_provider()
social_app = provider.app
oauth2_adapter = provider.get_oauth2_adapter(request=provider.request)

session = OAuth2Session(
client_id=social_app.client_id,
token=token_config,
auto_refresh_kwargs={
"client_id": social_app.client_id,
"client_secret": social_app.secret,
},
auto_refresh_url=oauth2_adapter.access_token_url,
token_updater=_get_token_updater(token),
)
return session
17 changes: 16 additions & 1 deletion readthedocs/oauth/models.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""OAuth service models."""

import structlog
from allauth.socialaccount.models import SocialAccount
from django.contrib.auth.models import User
from django.core.validators import URLValidator
Expand All @@ -14,6 +14,8 @@
from .constants import VCS_PROVIDER_CHOICES
from .querysets import RemoteOrganizationQuerySet, RemoteRepositoryQuerySet

log = structlog.get_logger(__name__)


class RemoteOrganization(TimeStampedModel):

Expand Down Expand Up @@ -224,6 +226,19 @@ def get_remote_repository_relation(self, user, social_account):
)
return remote_repository_relation

def get_service_class(self):
from readthedocs.oauth.services import registry

for service_cls in registry:
if service_cls.vcs_provider_slug == self.vcs_provider:
return service_cls

# NOTE: this should never happen, but we log it just in case
log.exception(
"Service not found for the VCS provider", vcs_provider=self.vcs_provider
)
return None


class RemoteRepositoryRelation(TimeStampedModel):
remote_repository = models.ForeignKey(
Expand Down
Loading