Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .tekton/devfile-sample-python-basic-fd5a-pull-request.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,8 @@ spec:
value: '{{revision}}'
- name: hermetic
value: "true"
- name: prefetch-input
value: '{"type": "pip", "path": "."}'
pipelineSpec:
finally:
- name: show-sbom
Expand Down
2 changes: 2 additions & 0 deletions .tekton/devfile-sample-python-basic-fd5a-push.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,8 @@ spec:
value: '{{revision}}'
- name: hermetic
value: "true"
- name: prefetch-input
value: '{"type": "pip", "path": "."}'
pipelineSpec:
finally:
- name: show-sbom
Expand Down
21 changes: 21 additions & 0 deletions Dockerfile-build
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
FROM registry.access.redhat.com/ubi9/python-39:1-117.1684741281

# Set the working directory in the container
WORKDIR /projects

USER 0
ADD . .
RUN chown -R 1001:0 ./
USER 1001

RUN pip install pip-tools

RUN pip-compile pyproject.toml --generate-hashes

RUN cat requirements.txt

RUN ./pip_find_builddeps.py requirements.txt -o requirements-build.in

RUN pip-compile requirements-build.in --allow-unsafe --generate-hashes

RUN cat requirements-build.txt
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,12 @@ Before you begin creating an application with this `devfile` code sample, it's h
3. The `devfile.yaml` [`kubernetes-deploy` component](https://github.com/devfile-samples/devfile-sample-python-basic/blob/main/devfile.yaml#L31-L43) points to a `deploy.yaml` file that contains instructions for deploying the built container image.
4. The `devfile.yaml` [`deploy` command](https://github.com/devfile-samples/devfile-sample-python-basic/blob/main/devfile.yaml#L51-L59) completes the [outerloop](https://devfile.io/docs/2.2.0/innerloop-vs-outerloop) deployment phase by pointing to the `image-build` and `kubernetes-deploy` components to create your application.

# Customizations

1. Enabled hermetic builds in ./.tekton/*.yaml
2. Generating prefetch requirements.txt and requirements-build.txt files using Dockerfile-build
3. Enabled prefetch-input in ./.tekton/*.yaml

### Additional resources
* For more information about Python, see [Python](https://www.python.org/).
* For more information about devfiles, see [Devfile.io](https://devfile.io/).
Expand Down
236 changes: 236 additions & 0 deletions pip_find_builddeps.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,236 @@
#!/usr/bin/env python3
import argparse
import datetime
import logging
import re
import shutil
import subprocess
import sys
import tempfile
from pathlib import Path

SCRIPT_NAME = Path(sys.argv[0]).name

DESCRIPTION = """\
Find build dependencies for all your runtime dependencies. The input to this
script must be a requirements.txt file containing all the *recursive* runtime
dependencies. You can use pip-compile to generate such a file. The output is an
intermediate file that must first go through pip-compile before being used in
a Cachito request.
"""

logging.basicConfig(format="%(levelname)s: %(message)s")

log = logging.getLogger(__name__)
log.setLevel(logging.INFO)


class FindBuilddepsError(Exception):
"""Failed to find build dependencies."""


def _pip_download(requirements_files, output_file, tmpdir, no_cache, allow_binary):
"""Run pip download, write output to file."""
cmd = [
"pip",
"download",
"-d",
tmpdir,
"--no-binary",
":all:",
"--use-pep517",
"--verbose",
]
if allow_binary:
cmd.remove("--no-binary")
cmd.remove(":all:")
if no_cache:
cmd.append("--no-cache-dir")
for file in requirements_files:
cmd.append("-r")
cmd.append(file)

with open(output_file, "w") as outfile:
subprocess.run(cmd, stdout=outfile, stderr=outfile, check=True)


def _filter_builddeps(pip_download_output_file):
"""Find builddeps in output of pip download."""
# Requirement is a sequence of non-whitespace, non-';' characters
# Example: package, package==1.0, package[extra]==1.0
requirement_re = r"[^\s;]+"
# Leading whitespace => requirement is a build dependency
# (because all recursive runtime dependencies were present in input files)
builddep_re = re.compile(rf"^\s+Collecting ({requirement_re})")

with open(pip_download_output_file) as f:
matches = (builddep_re.match(line) for line in f)
builddeps = set(match.group(1) for match in matches if match)

return sorted(builddeps)


def find_builddeps(
requirements_files, no_cache=False, ignore_errors=False, allow_binary=False
):
"""
Find build dependencies for packages in requirements files.

:param requirements_files: list of requirements file paths
:param no_cache: do not use pip cache when downloading packages
:param ignore_errors: generate partial output even if pip download fails
:return: list of build dependencies and bool whether output is partial
"""
tmpdir = tempfile.mkdtemp(prefix=f"{SCRIPT_NAME}-")
pip_output_file = Path(tmpdir) / "pip-download-output.txt"
is_partial = False

try:
log.info("Running pip download, this may take a while")
_pip_download(
requirements_files, pip_output_file, tmpdir, no_cache, allow_binary
)
except subprocess.CalledProcessError:
msg = f"Pip download failed, see {pip_output_file} for more info"
if ignore_errors:
log.error(msg)
log.warning("Ignoring error...")
is_partial = True
else:
raise FindBuilddepsError(msg)

log.info("Looking for build dependencies in the output of pip download")
builddeps = _filter_builddeps(pip_output_file)

# Remove tmpdir only if pip download was successful
if not is_partial:
shutil.rmtree(tmpdir)

return builddeps, is_partial


def generate_file_content(builddeps, is_partial):
"""
Generate content to write to output file.

:param builddeps: list of build dependencies to include in file
:param is_partial: indicates that list of build dependencies may be partial
:return: file content
"""
# Month Day Year HH:MM:SS
date = datetime.datetime.now().strftime("%b %d %Y %H:%M:%S")

lines = [f"# Generated by {SCRIPT_NAME} on {date}"]
if builddeps:
lines.extend(builddeps)
else:
lines.append("# <no build dependencies found>")

if is_partial:
lines.append("# <pip download failed, output may be incomplete!>")

file_content = "\n".join(lines)
return file_content


def _parse_requirements_file(builddeps_file):
"""Find deps requirements-build.in file."""
try:
with open(builddeps_file) as f:
# ignore line comments or comments added after dependency is declared
requirement_re = re.compile(r"^([^\s#;]+)")
matches = (requirement_re.match(line) for line in f)
return set(match.group(1) for match in matches if match)
except FileNotFoundError:
# it's ok if the file doens't exist.
return set()


def _sanity_check_args(ap, args):
if args.only_write_on_update and not args.output_file:
ap.error("--only-write-on-update requires an output-file (-o/--output-file).")


def main():
"""Run script."""
ap = argparse.ArgumentParser(description=DESCRIPTION)
ap.add_argument("requirements_files", metavar="REQUIREMENTS_FILE", nargs="+")
ap.add_argument(
"-o", "--output-file", metavar="FILE", help="write output to this file"
)
ap.add_argument(
"-a",
"--append",
action="store_true",
help="append to output file instead of overwriting",
)
ap.add_argument(
"--no-cache",
action="store_true",
help="do not use pip cache when downloading packages",
)
ap.add_argument(
"--ignore-errors",
action="store_true",
help="generate partial output even if pip download fails",
)
ap.add_argument(
"--only-write-on-update",
action="store_true",
help=(
"only write output file if dependencies will be modified - or new "
"dependencies will be added if used in conjunction with -a/--append."
),
)
ap.add_argument(
"--allow-binary",
action="store_true",
help=(
"do not find build dependencies for packages with wheels "
"available for the current platform"
),
)

args = ap.parse_args()
_sanity_check_args(ap, args)

log.info(
"Please make sure the input files meet the requirements of this script (see --help)"
)

builddeps, is_partial = find_builddeps(
args.requirements_files,
no_cache=args.no_cache,
ignore_errors=args.ignore_errors,
allow_binary=args.allow_binary,
)

if args.only_write_on_update:
original_builddeps = _parse_requirements_file(args.output_file)
if args.append:
# append only new dependencies
builddeps = sorted(set(builddeps) - original_builddeps)
if not builddeps or set(builddeps) == original_builddeps:
log.info("No new build dependencies found.")
return

file_content = generate_file_content(builddeps, is_partial)

log.info("Make sure to pip-compile the output before submitting a Cachito request")
if is_partial:
log.warning("Pip download failed, output may be incomplete!")

if args.output_file:
mode = "a" if args.append else "w"
with open(args.output_file, mode) as f:
print(file_content, file=f)
else:
print(file_content)


if __name__ == "__main__":
try:
main()
except FindBuilddepsError as e:
log.error("%s", e)
exit(1)
6 changes: 6 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
[project]
name = "devfile-sample-python-basic"
version = "0.1.0"
dependencies = [
"Flask==2.3.3",
]
8 changes: 8 additions & 0 deletions requirements-build.in
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
# Generated by pip_find_builddeps.py on Apr 09 2024 12:17:44
flit_core<4
packaging>=20
setuptools>=40.8.0
setuptools>=56
setuptools_scm[toml]>=3.4.1
typing-extensions
wheel
44 changes: 44 additions & 0 deletions requirements-build.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
#
# This file is autogenerated by pip-compile with Python 3.9
# by the following command:
#
# pip-compile --allow-unsafe --generate-hashes requirements-build.in
#
flit-core==3.9.0 \
--hash=sha256:72ad266176c4a3fcfab5f2930d76896059851240570ce9a98733b658cb786eba \
--hash=sha256:7aada352fb0c7f5538c4fafeddf314d3a6a92ee8e2b1de70482329e42de70301
# via -r requirements-build.in
packaging==24.0 \
--hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \
--hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9
# via
# -r requirements-build.in
# setuptools-scm
setuptools-scm==8.0.4 \
--hash=sha256:b47844cd2a84b83b3187a5782c71128c28b4c94cad8bfb871da2784a5cb54c4f \
--hash=sha256:b5f43ff6800669595193fd09891564ee9d1d7dcb196cab4b2506d53a2e1c95c7
# via -r requirements-build.in
tomli==2.0.1 \
--hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
--hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
# via
# -r requirements-build.in
# setuptools-scm
typing-extensions==4.11.0 \
--hash=sha256:83f085bd5ca59c80295fc2a82ab5dac679cbe02b9f33f7d83af68e241bea51b0 \
--hash=sha256:c1f94d72897edaf4ce775bb7558d5b79d8126906a14ea5ed1635921406c0387a
# via
# -r requirements-build.in
# setuptools-scm
wheel==0.43.0 \
--hash=sha256:465ef92c69fa5c5da2d1cf8ac40559a8c940886afcef87dcf14b9470862f1d85 \
--hash=sha256:55c570405f142630c6b9f72fe09d9b67cf1477fcf543ae5b8dcb1f5b7377da81
# via -r requirements-build.in

# The following packages are considered to be unsafe in a requirements file:
setuptools==69.2.0 \
--hash=sha256:0ff4183f8f42cd8fa3acea16c45205521a4ef28f73c6391d8a25e92893134f2e \
--hash=sha256:c21c49fb1042386df081cb5d86759792ab89efca84cf114889191cd09aacc80c
# via
# -r requirements-build.in
# setuptools-scm
Loading