forked from spack/spackbot
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Changes migrated from PR spack#45 using the new workers and a slightly more general name for the long running process queue. TODO: Add a pruning task that is occasionally inserted after a copy task but before reindex
- Loading branch information
1 parent
7accb04
commit b810f0e
Showing
4 changed files
with
241 additions
and
3 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,50 @@ | ||
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other | ||
# Spack Project Developers. See the top-level COPYRIGHT file for details. | ||
# | ||
# SPDX-License-Identifier: (Apache-2.0 OR MIT) | ||
|
||
|
||
import spackbot.helpers as helpers | ||
from spackbot.helpers import pr_expected_base, pr_mirror_base_url | ||
from spackbot.workers import copy_pr_binaries, update_mirror_index, work_queue | ||
|
||
# If we don't provide a timeout, the default in RQ is 180 seconds | ||
WORKER_JOB_TIMEOUT = 6 * 60 * 60 | ||
|
||
logger = helpers.getLogger(__name__) | ||
|
||
|
||
async def graduate_pr_binaries(event, gh): | ||
payload = event.data | ||
|
||
base_branch = payload["pull_request"]["base"]["ref"] | ||
is_merged = payload["pull_request"]["merged"] | ||
|
||
if is_merged and base_branch == pr_expected_base: | ||
pr_number = payload["number"] | ||
pr_branch = payload["pull_request"]["head"]["ref"] | ||
|
||
shared_mirror_url = f"{pr_mirror_base_url}/shared_pr_mirror" | ||
|
||
logger.info( | ||
f"PR {pr_number}/{pr_branch} merged to develop, graduating binaries" | ||
) | ||
|
||
ltask_q = work_queue.get_lqueue() | ||
copy_job = ltask_q.enqueue( | ||
copy_pr_binaries, | ||
pr_number, | ||
pr_branch, | ||
shared_mirror_url, | ||
job_timeout=WORKER_JOB_TIMEOUT, | ||
) | ||
logger.info(f"Copy job queued: {copy_job.id}") | ||
|
||
# If the index job queue has a job queued already, there is no need to | ||
# schedule another one | ||
update_job = ltask_q.enqueue( | ||
update_mirror_index, | ||
shared_mirror_url, | ||
job_timeout=WORKER_JOB_TIMEOUT, | ||
) | ||
logger.info(f"Reindex job queued: {update_job.id}") |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters