Skip to content

Commit

Permalink
feat(python): produce code coverage from a unit test (#404)
Browse files Browse the repository at this point in the history
* feat(python): produce code coverage from a unit test

* accept golden

* Update oci_python_image/hello_world/integration_test.py

Co-authored-by: aspect-workflows[bot] <143031405+aspect-workflows[bot]@users.noreply.github.com>

* Update MODULE.bazel

* chore: exclude speller which is failing to build with coverage instrumentation

* Update MODULE.bazel

---------

Co-authored-by: aspect-workflows[bot] <143031405+aspect-workflows[bot]@users.noreply.github.com>
  • Loading branch information
alexeagle and aspect-workflows[bot] authored Jan 29, 2025
1 parent d659bbd commit e5d4fa6
Show file tree
Hide file tree
Showing 8 changed files with 223 additions and 122 deletions.
6 changes: 5 additions & 1 deletion .aspect/workflows/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,9 +5,13 @@ tasks:
- lint:
- format:
- test:
targets:
- //...
- -//speller/...
bazel:
flags:
- --test_tag_filters=-skip-on-aspect-workflows

coverage:
codecov_upload: true
notifications:
github: {}
4 changes: 4 additions & 0 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# GENERATED FILE - DO NOT EDIT!
# Update with: bazel run //.circleci:write_merged_config
version: 2.1
orbs:
codecov: codecov/[email protected]
workflows:
aspect-workflows:
jobs:
Expand Down Expand Up @@ -156,6 +158,8 @@ jobs:
no_output_timeout: 180m
- store_test_results:
path: /workflows/testlogs
- codecov/upload:
file: bazel-out/_coverage/_coverage_report.dat
- store_artifacts:
path: /workflows/testlogs
- store_artifacts:
Expand Down
2 changes: 1 addition & 1 deletion MODULE.bazel
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ bazel_dep(name = "apple_support", version = "1.15.1")
bazel_dep(name = "aspect_bazel_lib", version = "2.10.0")
bazel_dep(name = "aspect_rules_js", version = "2.0.0")
bazel_dep(name = "aspect_rules_lint", version = "1.0.0-rc10")
bazel_dep(name = "aspect_rules_py", version = "1.0.0")
bazel_dep(name = "aspect_rules_py", version = "1.3.1")
bazel_dep(name = "aspect_rules_swc", version = "2.0.0")
bazel_dep(name = "aspect_rules_ts", version = "3.0.0")
bazel_dep(name = "bazel_skylib", version = "1.7.1")
Expand Down
28 changes: 25 additions & 3 deletions oci_python_image/hello_world/BUILD.bazel
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
load("@aspect_bazel_lib//lib:transitions.bzl", "platform_transition_filegroup")
load("@aspect_rules_py//py:defs.bzl", "py_binary", "py_image_layer", "py_library")
load("@aspect_rules_py//py:defs.bzl", "py_binary", "py_image_layer", "py_library", "py_pytest_main")
load("@container_structure_test//:defs.bzl", "container_structure_test")
load("@pip//:requirements.bzl", "requirement")
load("@rules_oci//oci:defs.bzl", "oci_image", "oci_load")
Expand Down Expand Up @@ -97,13 +97,13 @@ container_structure_test(

py_test(
name = "test_container",
srcs = ["app_test.py"],
srcs = ["integration_test.py"],
# NB: this depends on the image rather than the tarball, to avoid the test needing to wait for
# an action that reads all bytes of the layers and writes all those bytes again.
# However, in order for the image to be loaded into the Docker daemon from files on disk,
# the test Setup has to do some sophisticated work to load each layer.
data = [":platform_image"],
main = "app_test.py",
main = "integration_test.py",
tags = [
"requires-docker",
# TODO(sahin/derek?): this test passes on GitHub Actions runners but fails on AW
Expand All @@ -113,3 +113,25 @@ py_test(
requirement("testcontainers"),
],
)

py_pytest_main(
name = "__test__",
deps = [
requirement("coverage"),
requirement("pytest"),
],
)

py_test(
name = "unit_test",
size = "small",
srcs = [
"app_test.py",
":__test__",
],
main = ":__test__.py",
deps = [
":__test__",
":hello_world_lib",
],
)
121 changes: 4 additions & 117 deletions oci_python_image/hello_world/app_test.py
Original file line number Diff line number Diff line change
@@ -1,119 +1,6 @@
# Loads individual layers from disk by reading their blob locations from
# the OCI format JSON descriptor file.
# TODO: upstream some of this to the testcontainers library to simplify similar code for users.
from testcontainers.core.container import DockerContainer
from testcontainers.core.waiting_utils import wait_for_logs
import docker
import tarfile
import json
import os
import tempfile
import io
from hello_world.app import Cow


def add_json_file(tar, name, contents):
content = json.dumps(contents).encode("utf-8")
info = tarfile.TarInfo(name=name)
info.size = len(content)
tar.addfile(info, fileobj=io.BytesIO(content))


def add_file(tar, name, fileobj):
info = tarfile.TarInfo(name=name)
info.size = os.fstat(fileobj.fileno()).st_size
tar.addfile(info, fileobj=fileobj)
fileobj.close()


def get_blob_path(image, digest):
return "%s/blobs/%s" % (image, digest.replace(":", "/"))


def open_blob(image, digest):
return open(get_blob_path(image, digest), "rb")


def OCIImageContainer(image):
with open("%s/index.json" % image) as indexp:
indexjson = json.load(indexp)

with open_blob(image, indexjson["manifests"][0]["digest"]) as manifestp:
manifest = json.load(manifestp)

with open_blob(image, manifest["config"]["digest"]) as configp:
config = json.load(configp)

client = docker.from_env()

# Probe and layer loading phase
layers = manifest["layers"]
needed = []

# Probing phase
for i, layer in enumerate(layers):
tmp = tempfile.NamedTemporaryFile(suffix=".tar")
tar = tarfile.open(fileobj=tmp, mode="w")
add_json_file(
tar,
name="manifest.json",
contents=[
{
"Config": "config.json",
"RepoTags": [],
"Layers": [layer["digest"]],
}
],
)
add_json_file(
tar,
name="config.json",
contents={
"rootfs": {
"type": "layers",
"diff_ids": [config["rootfs"]["diff_ids"][i]],
}
},
)

tar.close()

try:
# os.system("tar -tvf %s" % tmp.name)
client.images.load(
open(tmp.name, "rb"),
)
except docker.errors.ImageLoadError as e:
needed.append(layer["digest"])

# Loading phase
tmp = tempfile.NamedTemporaryFile(suffix=".tar")
tar = tarfile.open(fileobj=tmp, mode="w")
add_json_file(
tar,
name="manifest.json",
contents=[
{
"Config": "config.json",
"RepoTags": [],
"Layers": list(map(lambda x: x["digest"], manifest["layers"])),
}
],
)
add_file(
tar, name="config.json", fileobj=open_blob(image, manifest["config"]["digest"])
)
for layer in needed:
add_file(tar, name=layer, fileobj=open_blob(image, layer))

tar.close()
r = client.images.load(open(tmp.name, "rb"))
return DockerContainer(r[0].id)


def test_wait_for_hello():
print("Starting container")
with OCIImageContainer("oci_python_image/hello_world/image") as container:
wait_for_logs(container, "hello py_image_layer!")


test_wait_for_hello()
def test_moo():
app = Cow("John")
app.say_hello()
119 changes: 119 additions & 0 deletions oci_python_image/hello_world/integration_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,119 @@
# Loads individual layers from disk by reading their blob locations from
# the OCI format JSON descriptor file.
# TODO: upstream some of this to the testcontainers library to simplify similar code for users.
from testcontainers.core.container import DockerContainer
from testcontainers.core.waiting_utils import wait_for_logs
import docker
import tarfile
import json
import os
import tempfile
import io


def add_json_file(tar, name, contents):
content = json.dumps(contents).encode("utf-8")
info = tarfile.TarInfo(name=name)
info.size = len(content)
tar.addfile(info, fileobj=io.BytesIO(content))


def add_file(tar, name, fileobj):
info = tarfile.TarInfo(name=name)
info.size = os.fstat(fileobj.fileno()).st_size
tar.addfile(info, fileobj=fileobj)
fileobj.close()


def get_blob_path(image, digest):
return "%s/blobs/%s" % (image, digest.replace(":", "/"))


def open_blob(image, digest):
return open(get_blob_path(image, digest), "rb")


def OCIImageContainer(image):
with open("%s/index.json" % image) as indexp:
indexjson = json.load(indexp)

with open_blob(image, indexjson["manifests"][0]["digest"]) as manifestp:
manifest = json.load(manifestp)

with open_blob(image, manifest["config"]["digest"]) as configp:
config = json.load(configp)

client = docker.from_env()

# Probe and layer loading phase
layers = manifest["layers"]
needed = []

# Probing phase
for i, layer in enumerate(layers):
tmp = tempfile.NamedTemporaryFile(suffix=".tar")
tar = tarfile.open(fileobj=tmp, mode="w")
add_json_file(
tar,
name="manifest.json",
contents=[
{
"Config": "config.json",
"RepoTags": [],
"Layers": [layer["digest"]],
}
],
)
add_json_file(
tar,
name="config.json",
contents={
"rootfs": {
"type": "layers",
"diff_ids": [config["rootfs"]["diff_ids"][i]],
}
},
)

tar.close()

try:
# os.system("tar -tvf %s" % tmp.name)
client.images.load(
open(tmp.name, "rb"),
)
except docker.errors.ImageLoadError:
needed.append(layer["digest"])

# Loading phase
tmp = tempfile.NamedTemporaryFile(suffix=".tar")
tar = tarfile.open(fileobj=tmp, mode="w")
add_json_file(
tar,
name="manifest.json",
contents=[
{
"Config": "config.json",
"RepoTags": [],
"Layers": list(map(lambda x: x["digest"], manifest["layers"])),
}
],
)
add_file(
tar, name="config.json", fileobj=open_blob(image, manifest["config"]["digest"])
)
for layer in needed:
add_file(tar, name=layer, fileobj=open_blob(image, layer))

tar.close()
r = client.images.load(open(tmp.name, "rb"))
return DockerContainer(r[0].id)


def test_wait_for_hello():
print("Starting container")
with OCIImageContainer("oci_python_image/hello_world/image") as container:
wait_for_logs(container, "hello py_image_layer!")


test_wait_for_hello()
Loading

0 comments on commit e5d4fa6

Please sign in to comment.