Skip to content

Commit

Permalink
Refactor to allow spackbot to rebuild everything (spack#67)
Browse files Browse the repository at this point in the history
Support spackbot rebuilding all specs from source with `spackbot rebuild everything`.
  • Loading branch information
scottwittenburg authored Oct 14, 2022
1 parent 9cf6873 commit 7accb04
Show file tree
Hide file tree
Showing 10 changed files with 220 additions and 75 deletions.
3 changes: 3 additions & 0 deletions .env-dummy
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,9 @@ TASK_QUEUE_NAME=devtasks
# Optionally configure time before jobs are killed and marked failed (in seconds, default 180s)
WORKER_JOB_TIMEOUT=21600

# For testing, don't send gitlab api requests to the production gitlab mirror
GITLAB_SPACK_PROJECT_URL=https://gitlab.spack.io/api/v4/projects/23

# Debug level (one of: "DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL")
SPACKBOT_LOG_LEVEL=WARNING

Expand Down
1 change: 1 addition & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
aiohttp
boto3
gidgethub
python_dotenv
rq
Expand Down
5 changes: 3 additions & 2 deletions spackbot/comments.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def get_style_message(output):
"""


def get_style_error_message(e_type, e_value, tb):
def format_error_message(msg, e_type, e_value, tb):
"""
Given job failure details, format an error message to post. The
parameters e_type, e_value, and tb (for traceback) should be the same as
Expand All @@ -75,7 +75,7 @@ def get_style_error_message(e_type, e_value, tb):
buffer.close()

return f"""
I encountered an error attempting to format style.
{msg}
<details>
<summary><b>Details</b></summary>
Expand All @@ -94,6 +94,7 @@ def get_style_error_message(e_type, e_value, tb):
- `{helpers.botname} hello`: say hello and get a friendly response back!
- `{helpers.botname} help` or `{helpers.botname} commands`: see this message
- `{helpers.botname} run pipeline` or `{helpers.botname} re-run pipeline`: to request a new run of the GitLab CI pipeline
- `{helpers.botname} rebuild everything`: to run a pipeline rebuilding all specs from source.
- `{helpers.botname} fix style` if you have write and would like me to run `spack style --fix` for you.
- `{helpers.botname} maintainers` or `{helpers.botname} request review`: to look for and assign reviewers for the pull request.
Expand Down
2 changes: 1 addition & 1 deletion spackbot/handlers/__init__.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from .pipelines import run_pipeline # noqa
from .pipelines import run_pipeline, run_pipeline_rebuild_all # noqa
from .labels import add_labels # noqa
from .reviewers import add_reviewers, add_issue_maintainers # noqa
from .reviewers import add_reviewers # noqa
Expand Down
99 changes: 42 additions & 57 deletions spackbot/handlers/pipelines.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,69 +3,54 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

import os
import urllib.parse
import spackbot.helpers as helpers

import aiohttp
from spackbot.workers import (
run_pipeline_task,
report_rebuild_failure,
work_queue,
WORKER_JOB_TIMEOUT,
)

logger = helpers.get_logger(__name__)

# We can only make the request with a GITLAB TOKEN
GITLAB_TOKEN = os.environ.get("GITLAB_TOKEN")


async def run_pipeline(event, gh):
async def run_pipeline_rebuild_all(event, gh, **kwargs):
"""
Run a pipeline that will force-rebuild everything from source
"""
job_metadata = {
"post_comments_url": event.data["issue"]["comments_url"],
"rebuild_everything": True,
"token": kwargs["token"],
}

task_q = work_queue.get_queue()
scheduled_job = task_q.enqueue(
run_pipeline_task,
event,
job_timeout=WORKER_JOB_TIMEOUT,
meta=job_metadata,
on_failure=report_rebuild_failure,
)
logger.info(f"Rebuild everything job enqueued: {scheduled_job.id}")


async def run_pipeline(event, gh, **kwargs):
"""
Make a request to re-run a pipeline.
"""
# Early exit if not authenticated
if not GITLAB_TOKEN:
return "I'm not able to re-run the pipeline now because I don't have authentication."

# Get the pull request number
pr_url = event.data["issue"]["pull_request"]["url"]
number = pr_url.split("/")[-1]

# We need the pull request branch
pr = await gh.getitem(pr_url)

# Get the sender of the PR - do they have write?
sender = event.data["sender"]["login"]
repository = event.data["repository"]
collaborators_url = repository["collaborators_url"]
author = pr["user"]["login"]

# If it's the PR author, we allow it
if author == sender:
logger.info(f"Author {author} is requesting a pipeline run.")

# If they don't have write, we don't allow the command
elif not await helpers.found(
gh.getitem(collaborators_url, {"collaborator": sender})
):
logger.info(f"Not found: {sender}")
return f"Sorry {sender}, I cannot do that for you. Only users with write can make this request!"

# We need the branch name plus number to assemble the GitLab CI
branch = pr["head"]["ref"]
branch = f"pr{number}_{branch}"
branch = urllib.parse.quote_plus(branch)

url = f"{helpers.gitlab_spack_project_url}/pipeline?ref={branch}"
headers = {"PRIVATE-TOKEN": GITLAB_TOKEN}

logger.info(f"{sender} triggering pipeline, url = {url}")

# Don't provide GitHub credentials to GitLab!
async with aiohttp.ClientSession() as session:
async with session.post(url, headers=headers) as response:
result = await response.json()

if "detailed_status" in result and "details_path" in result["detailed_status"]:
url = f"{helpers.spack_gitlab_url}/{result['detailed_status']['details_path']}"
return f"I've started that [pipeline]({url}) for you!"

logger.info(f"Problem triggering pipeline on {branch}")
logger.info(result)
return "I had a problem triggering the pipeline."
job_metadata = {
"post_comments_url": event.data["issue"]["comments_url"],
"token": kwargs["token"],
}

task_q = work_queue.get_queue()
scheduled_job = task_q.enqueue(
run_pipeline_task,
event,
job_timeout=WORKER_JOB_TIMEOUT,
meta=job_metadata,
on_failure=report_rebuild_failure,
)
logger.info(f"Run pipeline job enqueued: {scheduled_job.id}")
15 changes: 8 additions & 7 deletions spackbot/handlers/style.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,15 +3,16 @@
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)

import os

import spackbot.comments as comments
import spackbot.helpers as helpers

from spackbot.workers import fix_style_task, report_style_failure, work_queue
from spackbot.workers import (
fix_style_task,
report_style_failure,
work_queue,
WORKER_JOB_TIMEOUT,
)

# If we don't provide a timeout, the default in RQ is 180 seconds
WORKER_JOB_TIMEOUT = int(os.environ.get("WORKER_JOB_TIMEOUT", "21600"))

logger = helpers.get_logger(__name__)

Expand All @@ -23,8 +24,8 @@ async def style_comment(event, gh):
# If we get here, we have a style failure
# Find the pull request that is matched to the repository. It looks like
# checks are shared across different repos (e.g., a fork and upstream)
repository = event.data["repository"]["full_name"]
for pr in event.data["check_run"]["pull_requests"]:
repository = event.data["repository"]["full_name"] # "spack-test/spack"
for pr in event.data["check_run"]["pull_requests"]: # []
if repository in pr["url"]:

number = pr["url"].split("/")[-1]
Expand Down
17 changes: 16 additions & 1 deletion spackbot/helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,13 +27,18 @@
spack_upstream = "[email protected]:spack/spack"

# Spack has project ID 2
gitlab_spack_project_url = "https://gitlab.spack.io/api/v4/projects/2"
gitlab_spack_project_url = os.environ.get(
"GITLAB_SPACK_PROJECT_URL", "https://gitlab.spack.io/api/v4/projects/2"
)

package_path = r"^var/spack/repos/builtin/packages/(\w[\w-]*)/package.py$"

# Bot name can be modified in the environment
botname = os.environ.get("SPACKBOT_NAME", "@spackbot")

# Bucket where pr binary mirrors live
pr_mirror_bucket = "spack-binaries-prs"

# Aliases for spackbot so spackbot doesn't respond to himself
aliases = ["spack-bot", "spackbot", "spack-bot-develop", botname]
alias_regex = "(%s)" % "|".join(aliases)
Expand Down Expand Up @@ -167,6 +172,16 @@ async def found(coroutine):
raise


async def post(url, headers):
"""
Convenience method to create a new session and make a one-off
post request, given a url and headers to include in the request.
"""
async with aiohttp.ClientSession() as session:
async with session.post(url, headers=headers) as response:
return await response.json()


def synchronous_http_request(url, data=None, token=None):
"""
Makes synchronous http request to the provided url, using the token for
Expand Down
7 changes: 6 additions & 1 deletion spackbot/routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,12 @@ async def add_comments(event, gh, *args, session, **kwargs):
# @spackbot run pipeline | @spackbot re-run pipeline
elif re.search(f"{helpers.botname} (re-?)?run pipeline", comment, re.IGNORECASE):
logger.info("Responding to request to re-run pipeline...")
message = await handlers.run_pipeline(event, gh)
await handlers.run_pipeline(event, gh, **kwargs)

# @spackbot rebuild everything
elif re.search(f"{helpers.botname} rebuild everything", comment, re.IGNORECASE):
logger.info("Responding to request to rebuild everthing...")
await handlers.run_pipeline_rebuild_all(event, gh, **kwargs)

if message:
await gh.post(event.data["issue"]["comments_url"], {}, data={"body": message})
Expand Down
Loading

0 comments on commit 7accb04

Please sign in to comment.