Project import generated by Copybara.
GitOrigin-RevId: af8d9acc8ae00d6a5f1674924eaa976d0d293694
This commit is contained in:
parent
600f7ecce9
commit
2dac67662f
11 changed files with 373 additions and 51 deletions
|
|
@ -9,6 +9,15 @@ permissions:
|
|||
|
||||
jobs:
|
||||
publish:
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- target: //k8s/container/coder-dev-base-image:push
|
||||
tag_prefix: "5-"
|
||||
- target: //experimental/users/acmcarther/temporal/git_workflow:push
|
||||
tag_prefix: "latest-"
|
||||
|
||||
runs-on: docker
|
||||
container:
|
||||
image: forgejo.csbx.dev/acmcarther/coder-dev-base-image:4
|
||||
|
|
@ -30,7 +39,7 @@ jobs:
|
|||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.YESOD_PACKAGE_TOKEN }}
|
||||
|
||||
- name: Publish Coder Dev Base Image
|
||||
- name: Publish Image
|
||||
env:
|
||||
BAZELISK_BASE_URL: "http://bin-cache-http.dev.svc.cluster.local/bazel"
|
||||
# rules_oci respects DOCKER_CONFIG or looks in ~/.docker/config.json
|
||||
|
|
@ -40,7 +49,7 @@ jobs:
|
|||
export DOCKER_CONFIG=$HOME/.docker
|
||||
|
||||
SHORT_SHA=$(git rev-parse --short HEAD)
|
||||
TAG="5-${SHORT_SHA}"
|
||||
TAG="${{ matrix.tag_prefix }}${SHORT_SHA}"
|
||||
|
||||
echo "Pushing image with tag: ${TAG}"
|
||||
bazel run --config=remote //k8s/container/coder-dev-base-image:push -- --tag ${TAG}
|
||||
echo "Pushing image: ${{ matrix.target }} with tag: ${TAG}"
|
||||
bazel run --config=remote ${{ matrix.target }} -- --tag ${TAG}
|
||||
|
|
|
|||
6
MODULE.bazel.lock
generated
6
MODULE.bazel.lock
generated
|
|
@ -461,7 +461,7 @@
|
|||
"bzlTransitiveDigest": "Fi9dJKKU3TihmUj0lWW/2dhHgNs3DPGXi7n1QwQO/jc=",
|
||||
"usagesDigest": "5hPUPNFGWtRV+9o5pQcmQtg06enPjgEj4aEs5DR840s=",
|
||||
"recordedFileInputs": {
|
||||
"@@//third_party/helm/chartfile.lock.json": "44774771aa21a2d913b43594b2a8383716d3183bc922888ba10659c28614c163"
|
||||
"@@//third_party/helm/chartfile.lock.json": "739ba72ac6610ada29d8916ce2f06af1ce01ec8e54457862aa47c62a044bfd72"
|
||||
},
|
||||
"recordedDirentsInputs": {},
|
||||
"envVariables": {},
|
||||
|
|
@ -496,8 +496,8 @@
|
|||
"helm_coderv2_coder": {
|
||||
"repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive",
|
||||
"attributes": {
|
||||
"url": "https://helm.coder.com/v2/coder_helm_2.25.2.tgz",
|
||||
"sha256": "0edd66b6c3dec72110f7998a0e1e71f14291f4be88127cd0fc92d63906cf9864",
|
||||
"url": "https://helm.coder.com/v2/coder_helm_2.29.1.tgz",
|
||||
"sha256": "9ea45a2b58e14431cff31f6febd2ed081e791847998ba78fb91951a03dd0c108",
|
||||
"strip_prefix": "coder",
|
||||
"build_file_content": "\nfilegroup(\n name = \"chart\",\n srcs = glob([\"**\"]),\n visibility = [\"//visibility:public\"],\n)\n"
|
||||
}
|
||||
|
|
|
|||
|
|
@ -20,6 +20,7 @@ jsonnet_library(
|
|||
jsonnet_library(
|
||||
name = "k",
|
||||
srcs = ["k.libsonnet"],
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"@github_com_jsonnet_libs_k8s_libsonnet_1_29//:lib",
|
||||
],
|
||||
|
|
|
|||
|
|
@ -156,7 +156,7 @@ local workspaceRootLength = std.length(workspaceRoot);
|
|||
##########################
|
||||
# Kube object definition #
|
||||
##########################
|
||||
Namespace(name): baseKube.Namespace(name),
|
||||
Namespace(name): baseKubeCompat._Object("v1", "Namespace", name),
|
||||
|
||||
StorageClass(name): baseKubeCompat._Object("storage.k8s.io/v1", "StorageClass", name) {
|
||||
},
|
||||
|
|
|
|||
|
|
@ -1 +1 @@
|
|||
import "external/+jsonnet_deps+github_com_jsonnet_libs_k8s_libsonnet_1_29/1.29/main.libsonnet";
|
||||
import "external/+jsonnet_deps+github_com_jsonnet_libs_k8s_libsonnet_1_29/1.29/main.libsonnet"
|
||||
6
third_party/helm/chartfile.lock.json
vendored
6
third_party/helm/chartfile.lock.json
vendored
|
|
@ -16,9 +16,9 @@
|
|||
"version": "0.1.24"
|
||||
},
|
||||
"coderv2/coder": {
|
||||
"digest": "0edd66b6c3dec72110f7998a0e1e71f14291f4be88127cd0fc92d63906cf9864",
|
||||
"url": "https://helm.coder.com/v2/coder_helm_2.25.2.tgz",
|
||||
"version": "2.25.2"
|
||||
"digest": "9ea45a2b58e14431cff31f6febd2ed081e791847998ba78fb91951a03dd0c108",
|
||||
"url": "https://helm.coder.com/v2/coder_helm_2.29.1.tgz",
|
||||
"version": "2.29.1"
|
||||
},
|
||||
"crossplane/crossplane": {
|
||||
"digest": "1059cc4b87167ba7e1b837a8e6bd787691bc9f84c5a29a7e91dbd0122086c682",
|
||||
|
|
|
|||
2
third_party/helm/chartfile.yaml
vendored
2
third_party/helm/chartfile.yaml
vendored
|
|
@ -52,7 +52,7 @@ requires:
|
|||
- chart: nvidia/gpu-operator
|
||||
version: v25.10.1
|
||||
- chart: coderv2/coder
|
||||
version: 2.25.2
|
||||
version: 2.29.1
|
||||
- chart: harbor/harbor
|
||||
version: 1.16.2
|
||||
- chart: hashicorp/consul
|
||||
|
|
|
|||
|
|
@ -1,15 +1,22 @@
|
|||
load("@pip_third_party//:requirements.bzl", "requirement")
|
||||
load("@rules_python//python:defs.bzl", "py_binary")
|
||||
load("@rules_python//python:defs.bzl", "py_binary", "py_library")
|
||||
|
||||
exports_files([
|
||||
"mypy.ini",
|
||||
"tanka_runner.sh",
|
||||
"helm_pull.py", # Exported for repo rule usage
|
||||
])
|
||||
|
||||
py_library(
|
||||
name = "helm_pull_lib",
|
||||
srcs = ["helm_pull.py"],
|
||||
)
|
||||
|
||||
py_binary(
|
||||
name = "helm_sync",
|
||||
srcs = ["helm_sync.py"],
|
||||
deps = [
|
||||
":helm_pull_lib",
|
||||
requirement("requests"),
|
||||
requirement("pyyaml"),
|
||||
],
|
||||
|
|
@ -45,5 +52,4 @@ py_binary(
|
|||
deps = [
|
||||
requirement("pyyaml"),
|
||||
],
|
||||
)
|
||||
|
||||
)
|
||||
|
|
@ -1,5 +1,66 @@
|
|||
load("@bazel_tools//tools/build_defs/repo:http.bzl", "http_archive")
|
||||
|
||||
def _helm_import_impl(ctx):
|
||||
if ctx.attr.url.startswith("oci://"):
|
||||
# Use the python script to pull
|
||||
output_file = "chart.tgz"
|
||||
|
||||
# We need a python interpreter. We'll assume 'python3' is in the path for now.
|
||||
# In a strict build, we should probably use the python toolchain, but that's hard in a repo rule.
|
||||
|
||||
# Locate the helper script.
|
||||
# Since this is a repo rule, we can't easily access the source tree files unless we use `exports_files` and `ctx.path`.
|
||||
# But `tools/helm_pull.py` is in the main repo.
|
||||
# We can copy the content of the script into the rule or the repo.
|
||||
|
||||
# Let's assume the script content is passed or available.
|
||||
# Actually, reading it from the label is cleaner if possible.
|
||||
script_path = ctx.path(Label("//tools:helm_pull.py"))
|
||||
|
||||
cmd = [
|
||||
"python3",
|
||||
script_path,
|
||||
"--url",
|
||||
ctx.attr.url,
|
||||
"--version",
|
||||
ctx.attr.version,
|
||||
"--output",
|
||||
output_file,
|
||||
]
|
||||
|
||||
if ctx.attr.sha256:
|
||||
cmd.extend(["--digest", ctx.attr.sha256])
|
||||
|
||||
result = ctx.execute(cmd)
|
||||
if result.return_code != 0:
|
||||
fail("Failed to pull OCI chart: \n%s\n%s" % (result.stdout, result.stderr))
|
||||
|
||||
# Extract
|
||||
ctx.extract(output_file, stripPrefix = ctx.attr.strip_prefix)
|
||||
ctx.delete(output_file)
|
||||
|
||||
else:
|
||||
# HTTP fallback (simulating http_archive logic partially)
|
||||
ctx.download_and_extract(
|
||||
url = ctx.attr.url,
|
||||
sha256 = ctx.attr.sha256,
|
||||
stripPrefix = ctx.attr.strip_prefix,
|
||||
)
|
||||
|
||||
# Build file
|
||||
ctx.file("BUILD.bazel", ctx.attr.build_file_content)
|
||||
|
||||
helm_import = repository_rule(
|
||||
implementation = _helm_import_impl,
|
||||
attrs = {
|
||||
"url": attr.string(mandatory = True),
|
||||
"version": attr.string(),
|
||||
"sha256": attr.string(),
|
||||
"strip_prefix": attr.string(),
|
||||
"build_file_content": attr.string(),
|
||||
},
|
||||
)
|
||||
|
||||
def _helm_deps_impl(ctx):
|
||||
# 1. Read the lockfile
|
||||
lockfile_content = ctx.read(ctx.path(Label("//third_party/helm:chartfile.lock.json")))
|
||||
|
|
@ -11,12 +72,13 @@ def _helm_deps_impl(ctx):
|
|||
# repo_name will be "helm_grafana_grafana"
|
||||
repo_name = "helm_" + name.replace("/", "_").replace("-", "_")
|
||||
chart_name = name.split("/")[1]
|
||||
|
||||
http_archive(
|
||||
|
||||
helm_import(
|
||||
name = repo_name,
|
||||
url = info["url"],
|
||||
version = info.get("version", ""), # Version needed for OCI
|
||||
sha256 = info["digest"],
|
||||
strip_prefix = chart_name,
|
||||
strip_prefix = chart_name,
|
||||
build_file_content = """
|
||||
filegroup(
|
||||
name = "chart",
|
||||
|
|
|
|||
182
tools/helm_pull.py
Normal file
182
tools/helm_pull.py
Normal file
|
|
@ -0,0 +1,182 @@
|
|||
import argparse
|
||||
import json
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
import urllib.request
|
||||
import urllib.error
|
||||
import hashlib
|
||||
|
||||
|
||||
def get_token(registry, repository):
|
||||
# 1. Check endpoint to get realm
|
||||
url = f"https://{registry}/v2/"
|
||||
print(f"Checking {url}...", file=sys.stderr)
|
||||
try:
|
||||
urllib.request.urlopen(url)
|
||||
return None # No auth needed?
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code != 401:
|
||||
# Some registries might return 404 or others, but 401 is expected for auth discovery
|
||||
# If 200, no auth.
|
||||
if e.code == 200:
|
||||
return None
|
||||
print(f"Initial check to {url} returned {e.code}: {e}", file=sys.stderr)
|
||||
# If we can't determine auth, assume none or fail later.
|
||||
# But usually 401 gives the realm.
|
||||
pass
|
||||
|
||||
auth_header = e.headers.get("Www-Authenticate")
|
||||
if not auth_header:
|
||||
# If no auth header, maybe it's public?
|
||||
return None
|
||||
|
||||
print(f"Auth header: {auth_header}", file=sys.stderr)
|
||||
|
||||
realm_match = re.search(r'realm="([^"]+)"', auth_header)
|
||||
service_match = re.search(r'service="([^"]+)"', auth_header)
|
||||
|
||||
if not realm_match:
|
||||
print("Could not find realm in Www-Authenticate", file=sys.stderr)
|
||||
return None
|
||||
|
||||
realm = realm_match.group(1)
|
||||
service = service_match.group(1) if service_match else None
|
||||
|
||||
token_url = f"{realm}?scope=repository:{repository}:pull"
|
||||
if service:
|
||||
token_url += f"&service={service}"
|
||||
|
||||
print(f"Fetching token from {token_url}...", file=sys.stderr)
|
||||
req = urllib.request.Request(token_url)
|
||||
with urllib.request.urlopen(req) as r:
|
||||
data = json.loads(r.read())
|
||||
return data.get("token") or data.get("access_token")
|
||||
|
||||
|
||||
def get_manifest(registry, repository, reference, token):
|
||||
url = f"https://{registry}/v2/{repository}/manifests/{reference}"
|
||||
# print(f"Fetching manifest from {url}...", file=sys.stderr)
|
||||
req = urllib.request.Request(url)
|
||||
if token:
|
||||
req.add_header("Authorization", f"Bearer {token}")
|
||||
|
||||
# Accept OCI and Docker manifests
|
||||
req.add_header(
|
||||
"Accept",
|
||||
"application/vnd.oci.image.manifest.v1+json, application/vnd.docker.distribution.manifest.v2+json",
|
||||
)
|
||||
|
||||
with urllib.request.urlopen(req) as r:
|
||||
return json.loads(r.read())
|
||||
|
||||
|
||||
def download_blob(registry, repository, digest, token, output_path):
|
||||
url = f"https://{registry}/v2/{repository}/blobs/{digest}"
|
||||
# print(f"Downloading blob from {url} to {output_path}...", file=sys.stderr)
|
||||
|
||||
req = urllib.request.Request(url)
|
||||
if token:
|
||||
req.add_header("Authorization", f"Bearer {token}")
|
||||
|
||||
with urllib.request.urlopen(req) as r:
|
||||
with open(output_path, "wb") as f:
|
||||
while True:
|
||||
chunk = r.read(8192)
|
||||
if not chunk:
|
||||
break
|
||||
f.write(chunk)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Pull Helm chart from OCI registry")
|
||||
parser.add_argument(
|
||||
"--url",
|
||||
required=True,
|
||||
help="OCI URL (e.g., oci://ghcr.io/stefanprodan/charts/podinfo)",
|
||||
)
|
||||
parser.add_argument("--version", required=True, help="Chart version/tag")
|
||||
parser.add_argument("--output", required=True, help="Output file path (.tgz)")
|
||||
parser.add_argument(
|
||||
"--digest", help="Expected SHA256 digest of the content (optional validation)"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
if not args.url.startswith("oci://"):
|
||||
print("Error: URL must start with oci://", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Parse URL
|
||||
# oci://ghcr.io/stefanprodan/charts/podinfo -> registry=ghcr.io, repo=stefanprodan/charts/podinfo
|
||||
path = args.url[6:]
|
||||
if "/" not in path:
|
||||
print("Error: Invalid OCI URL format", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
registry, repository = path.split("/", 1)
|
||||
|
||||
try:
|
||||
token = get_token(registry, repository)
|
||||
manifest = get_manifest(registry, repository, args.version, token)
|
||||
|
||||
# Find chart layer
|
||||
chart_layer = None
|
||||
# Priority: Helm chart content, then generic OCI layer if only one?
|
||||
# But strictly speaking it should be 'application/vnd.cncf.helm.chart.content.v1.tar+gzip'
|
||||
# or 'application/tar+gzip' sometimes?
|
||||
|
||||
valid_media_types = [
|
||||
"application/vnd.cncf.helm.chart.content.v1.tar+gzip",
|
||||
"application/x-tar", # Sometimes used incorrectly?
|
||||
]
|
||||
|
||||
for layer in manifest.get("layers", []):
|
||||
if layer.get("mediaType") in valid_media_types:
|
||||
chart_layer = layer
|
||||
break
|
||||
|
||||
if not chart_layer:
|
||||
# Fallback: check if config has the media type (sometimes manifests are weird)
|
||||
# or just take the first layer if it looks like a blob?
|
||||
# Let's be strict for now.
|
||||
print("Error: No Helm chart layer found in manifest", file=sys.stderr)
|
||||
print(
|
||||
f"Layers: {[l.get('mediaType') for l in manifest.get('layers', [])]}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
digest = chart_layer["digest"]
|
||||
print(f"Found layer digest: {digest}", file=sys.stderr)
|
||||
|
||||
download_blob(registry, repository, digest, token, args.output)
|
||||
|
||||
# Verify digest if provided
|
||||
if args.digest:
|
||||
sha256 = hashlib.sha256()
|
||||
with open(args.output, "rb") as f:
|
||||
while True:
|
||||
data = f.read(65536)
|
||||
if not data:
|
||||
break
|
||||
sha256.update(data)
|
||||
calculated_digest = "sha256:" + sha256.hexdigest()
|
||||
if calculated_digest != args.digest:
|
||||
print(
|
||||
f"Error: Digest mismatch. Expected {args.digest}, got {calculated_digest}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
print("Digest verified.", file=sys.stderr)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error: {e}", file=sys.stderr)
|
||||
import traceback
|
||||
|
||||
traceback.print_exc(file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -4,13 +4,16 @@ import requests
|
|||
import sys
|
||||
import os
|
||||
from urllib.parse import urljoin
|
||||
from tools import helm_pull
|
||||
|
||||
|
||||
def load_yaml(path):
|
||||
with open(path, 'r') as f:
|
||||
with open(path, "r") as f:
|
||||
return yaml.safe_load(f)
|
||||
|
||||
|
||||
def fetch_index(repo_url):
|
||||
index_url = urljoin(repo_url + '/', 'index.yaml')
|
||||
index_url = urljoin(repo_url + "/", "index.yaml")
|
||||
print(f"Fetching index from {index_url}...")
|
||||
try:
|
||||
r = requests.get(index_url)
|
||||
|
|
@ -20,84 +23,143 @@ def fetch_index(repo_url):
|
|||
print(f"Error fetching {index_url}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def resolve_oci(repo_url, chart_name, version):
|
||||
# Construct OCI URL
|
||||
# repo_url: oci://ghcr.io/stefanprodan/charts
|
||||
# chart_name: podinfo
|
||||
# result: oci://ghcr.io/stefanprodan/charts/podinfo
|
||||
|
||||
base_url = repo_url
|
||||
if base_url.endswith("/"):
|
||||
base_url = base_url[:-1]
|
||||
|
||||
full_url = f"{base_url}/{chart_name}"
|
||||
|
||||
print(f"Resolving OCI chart {full_url}:{version}...")
|
||||
|
||||
# Use helm_pull logic to get manifest and digest
|
||||
# Parse URL
|
||||
path = full_url[6:] # strip oci://
|
||||
registry, repository = path.split("/", 1)
|
||||
|
||||
token = helm_pull.get_token(registry, repository)
|
||||
manifest = helm_pull.get_manifest(registry, repository, version, token)
|
||||
|
||||
valid_media_types = [
|
||||
"application/vnd.cncf.helm.chart.content.v1.tar+gzip",
|
||||
"application/x-tar",
|
||||
]
|
||||
|
||||
chart_layer = None
|
||||
for layer in manifest.get("layers", []):
|
||||
if layer.get("mediaType") in valid_media_types:
|
||||
chart_layer = layer
|
||||
break
|
||||
|
||||
if not chart_layer:
|
||||
raise Exception(
|
||||
f"No Helm chart layer found in manifest for {full_url}:{version}"
|
||||
)
|
||||
|
||||
digest = chart_layer["digest"]
|
||||
|
||||
return {"version": version, "url": full_url, "digest": digest}
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: python helm_sync.py <path_to_chartfile.yaml> [output_lock_file]")
|
||||
sys.exit(1)
|
||||
|
||||
chartfile_path = sys.argv[1]
|
||||
lockfile_path = sys.argv[2] if len(sys.argv) > 2 else chartfile_path.replace('.yaml', '.lock.json')
|
||||
lockfile_path = (
|
||||
sys.argv[2]
|
||||
if len(sys.argv) > 2
|
||||
else chartfile_path.replace(".yaml", ".lock.json")
|
||||
)
|
||||
|
||||
print(f"Reading {chartfile_path}...")
|
||||
chartfile = load_yaml(chartfile_path)
|
||||
|
||||
repos = {r['name']: r['url'] for r in chartfile.get('repositories', [])}
|
||||
repos = {r["name"]: r["url"] for r in chartfile.get("repositories", [])}
|
||||
indices = {}
|
||||
|
||||
lock_data = {"charts": {}}
|
||||
|
||||
for req in chartfile.get('requires', []):
|
||||
chart_ref = req['chart']
|
||||
version = req['version']
|
||||
|
||||
if '/' not in chart_ref:
|
||||
for req in chartfile.get("requires", []):
|
||||
chart_ref = req["chart"]
|
||||
version = req["version"]
|
||||
|
||||
if "/" not in chart_ref:
|
||||
print(f"Invalid chart reference: {chart_ref}. Expected repo/name.")
|
||||
continue
|
||||
|
||||
repo_name, chart_name = chart_ref.split('/', 1)
|
||||
|
||||
|
||||
repo_name, chart_name = chart_ref.split("/", 1)
|
||||
|
||||
if repo_name not in repos:
|
||||
print(f"Repository '{repo_name}' not found for chart {chart_ref}")
|
||||
continue
|
||||
|
||||
|
||||
repo_url = repos[repo_name]
|
||||
|
||||
|
||||
if repo_url.startswith("oci://"):
|
||||
try:
|
||||
lock_data["charts"][chart_ref] = resolve_oci(
|
||||
repo_url, chart_name, version
|
||||
)
|
||||
print(f"Resolved {chart_ref} {version} (OCI)")
|
||||
except Exception as e:
|
||||
print(f"Error resolving OCI chart {chart_ref}: {e}")
|
||||
continue
|
||||
|
||||
if repo_name not in indices:
|
||||
indices[repo_name] = fetch_index(repo_url)
|
||||
|
||||
|
||||
index = indices[repo_name]
|
||||
if not index:
|
||||
print(f"Skipping {chart_ref} due to missing index.")
|
||||
continue
|
||||
|
||||
entries = index.get('entries', {}).get(chart_name, [])
|
||||
|
||||
|
||||
entries = index.get("entries", {}).get(chart_name, [])
|
||||
|
||||
# Find exact version
|
||||
matched_entry = None
|
||||
for entry in entries:
|
||||
if entry['version'] == version:
|
||||
if entry["version"] == version:
|
||||
matched_entry = entry
|
||||
break
|
||||
|
||||
|
||||
if not matched_entry:
|
||||
print(f"Version {version} not found for chart {chart_ref}")
|
||||
continue
|
||||
|
||||
|
||||
# Resolve URL
|
||||
urls = matched_entry.get('urls', [])
|
||||
urls = matched_entry.get("urls", [])
|
||||
if not urls:
|
||||
print(f"No URLs found for {chart_ref} version {version}")
|
||||
continue
|
||||
|
||||
|
||||
# URL can be relative or absolute
|
||||
chart_url = urls[0]
|
||||
if not chart_url.startswith('http'):
|
||||
chart_url = urljoin(repo_url + '/', chart_url)
|
||||
|
||||
digest = matched_entry.get('digest')
|
||||
|
||||
if not chart_url.startswith("http"):
|
||||
chart_url = urljoin(repo_url + "/", chart_url)
|
||||
|
||||
digest = matched_entry.get("digest")
|
||||
|
||||
print(f"Resolved {chart_ref} {version} -> {chart_url}")
|
||||
|
||||
|
||||
lock_data["charts"][chart_ref] = {
|
||||
"version": version,
|
||||
"url": chart_url,
|
||||
"digest": digest
|
||||
"digest": digest,
|
||||
}
|
||||
|
||||
print(f"Writing lockfile to {lockfile_path}...")
|
||||
with open(lockfile_path, 'w') as f:
|
||||
with open(lockfile_path, "w") as f:
|
||||
json.dump(lock_data, f, indent=2, sort_keys=True)
|
||||
f.write('\n')
|
||||
f.write("\n")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
|
|||
Loading…
Add table
Reference in a new issue