10000 Remove by build id by dkliban · Pull Request #3722 · fedora-copr/copr · GitHub
[go: up one dir, main page]
More Web Proxy on the site http://driver.im/
Skip to content

Remove by build id #3722

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
May 16, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions backend/copr_backend/pulp.py
Original file line number Diff line number Diff line change
Expand Up @@ -188,6 +188,21 @@ def delete_content(self, repository, artifacts):
data = {"remove_content_units": artifacts}
return requests.post(url, json=data, **self.request_params)

def get_content(self, build_ids):
"""
Get a list of PRNs for RPMs with provided build ids
https://pulpproject.org/pulp_rpm/restapi/#tag/Content:-Packages/operation/content_rpm_packages_list
"""
query = ""
for build_id in build_ids:
if query:
query += " OR "
query += f"pulp_label_select=\"build_id={build_id}\""
url = self.url("api/v3/content/rpm/packages/?")
# Setting the limit to 1000, but in the future we should use pagination
url += urlencode({"q": query, "fields": "prn", "offset": 0, "limit": 1000})
return requests.get(url, **self.request_params)

def delete_repository(self, repository):
"""
Delete an RPM repository
Expand Down
46 changes: 11 additions & 35 deletions backend/copr_backend/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

from concurrent.futures import ThreadPoolExecutor, as_completed
import os
import json
import shutil
from urllib.parse import urlparse
import requests
Expand Down Expand Up @@ -237,7 +236,6 @@ def upload_rpm(self, repository, path, labels):
return response

def upload_build_results(self, chroot, results_dir, target_dir_name, max_workers=1, build_id=None):
resources = []
futures = {}
with ThreadPoolExecutor(max_workers=max_workers) as executor:
for root, _, files in os.walk(results_dir):
Expand Down Expand Up @@ -265,7 +263,6 @@ def upload_build_results(self, chroot, results_dir, target_dir_name, max_workers
response = future.result()
created = response.json().get("created_resources")
if created:
resources.extend(created)
self.log.info("Uploaded to Pulp: %s", filepath)
else:
failed_tasks.append(response.json().get("pulp_href"))
Expand All @@ -278,12 +275,6 @@ def upload_build_results(self, chroot, results_dir, target_dir_name, max_workers
if exceptions:
raise CoprBackendError(f"Exceptions encountered: {exceptions}")

data = {"resources": resources}
path = os.path.join(results_dir, "pulp.json")
with open(path, "w+", encoding="utf8") as fp:
json.dump(data, fp)
self.log.info("Pulp resources: %s", resources)

def publish_repository(self, chroot, **kwargs):
repository = self._get_repository(chroot)
response = self.client.create_publication(repository)
Expand Down Expand Up @@ -336,7 +327,7 @@ def delete_project(self, dirname):
def delete_builds(self, dirname, chroot_builddirs, build_ids):
# pylint: disable=too-many-locals
result = True
for chroot, subdirs in chroot_builddirs.items():
for chroot in chroot_builddirs.keys():
# We don't upload results of source builds to Pulp
if chroot == "srpm-builds":
continue
Expand All @@ -347,32 +338,17 @@ def delete_builds(self, dirname, chroot_builddirs, build_ids):
self.log.error("%s chroot path doesn't exist", chroot_path)
result = False
continue

repository = self._get_repository(chroot)
for subdir in subdirs:
# It is currently not possible to set labels for Pulp content.
# https://github.com/pulp/pulpcore/issues/3338
# Until it is implemented, we need read all Pulp resources that
# a copr build created from our `pulp.json` in the resultdir.
path = os.path.join(chroot_path, subdir, "pulp.json")
with open(path, "r", encoding="utf8") as fp:
pulp = json.load(fp)

for resource in pulp["resources"]:
is_package = resource.split("/api/v3/")[1].startswith(
"content/rpm/packages")
if not is_package:
self.log.info("Not deleting %s", resource)
continue

# TODO We can make performance improvements here by deleting
# all content at once
response = self.client.delete_content(repository, [resource])
if response.ok:
self.log.info("Successfully deleted Pulp content %s", resource)
else:
result = False
self.log.info("Failed to delete Pulp content %s", resource)
# Find the RPMs by list of build ids
content_response = self.client.get_content(build_ids)
list_of_prns = [package["prn"] for package in content_response.json()["results"] ]

response = self.client.delete_content(repository, list_of_prns)
if response.ok:
self.log.info("Successfully deleted Pulp content %s", list_of_prns)
else:
result = False
self.log.info("Failed to delete Pulp content %s", list_of_prns)

published = self.publish_repository(chroot)
if not published:
Expand Down
0