Skip to content

Commit 2b48f39

Browse files
committed
Change cmd-buildupload to add S3 metadata
- This change is needed due brew/compliance improvements where we need to keep more information about where the artifacts are archived; - Add --meta parameter in order to update meta.json with bucket, prefix and url information used in S3 upload. More information: coreos#2739 Signed-off-by: Renata Ravanelli <[email protected]>
1 parent 5b63eb7 commit 2b48f39

File tree

1 file changed

+37
-5
lines changed

1 file changed

+37
-5
lines changed

src/cmd-buildupload

Lines changed: 37 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -22,12 +22,16 @@ CACHE_MAX_AGE_ARTIFACT = 60 * 60 * 24 * 365
2222
# set metadata caching to 5m
2323
CACHE_MAX_AGE_METADATA = 60 * 5
2424
from cosalib.builds import Builds, BUILDFILES
25-
from cosalib.cmdlib import load_json, retry_stop, retry_boto_exception, retry_callback # noqa: E402
25+
from cosalib.cmdlib import load_json, retry_stop, retry_boto_exception, retry_callback, write_json # noqa: E402
26+
2627

2728

2829
def main():
2930
args = parse_args()
30-
args.func(args)
31+
if args.record_s3_metadata:
32+
update_meta_json(args)
33+
else:
34+
cmd_upload_s3(args)
3135

3236

3337
def parse_args():
@@ -47,21 +51,25 @@ def parse_args():
4751
subparsers.required = True
4852

4953
s3 = subparsers.add_parser('s3', help='upload an image')
50-
s3.add_argument("url", metavar='<BUCKET>[/PREFIX]',
51-
help="Bucket and path prefix in which to upload")
5254
s3.add_argument("--acl", help="ACL for objects",
5355
action='store', default='private')
56+
s3.add_argument("--arch", help="Update meta.json for specific arch found in builds", nargs='+', default=['all'])
5457
s3.add_argument("--enable-gz-peel", help="Auto-peel .gz extensions "
5558
"and set Content-Disposition names", action='store_true')
5659
s3.add_argument("--endpoint-url", help="URL of S3-compatible server",
5760
action="store", metavar="URL")
61+
s3.add_argument("--record-s3-metadata", help="Update meta.json with S3 information", action='store_true')
62+
s3.add_argument("--s3-bucket", help="S3 bucket to upload", required=True)
63+
s3.add_argument("--s3-path", help="S3 path to upload", required=True)
64+
s3.add_argument("--s3-url", help="S3 redirector url")
5865
s3.set_defaults(func=cmd_upload_s3)
5966

6067
return parser.parse_args()
6168

6269

6370
def cmd_upload_s3(args):
64-
bucket, prefix = args.url.split('/', 1)
71+
bucket = args.s3_bucket
72+
prefix = args.s3_prefix
6573
builds = Builds()
6674
s3_client = boto3.client('s3', endpoint_url=args.endpoint_url)
6775
# This can't be an error for backcompat reasons, but let's print something
@@ -92,6 +100,30 @@ def cmd_upload_s3(args):
92100
subprocess.check_call(['cp-reflink', BUILDFILES['list'], BUILDFILES['sourcedata']])
93101

94102

103+
def update_meta_json(args):
104+
if not args.s3_url:
105+
print("S3 url is required to update meta.json")
106+
exit(1)
107+
builds = Builds()
108+
if args.build == 'latest':
109+
args.build = builds.get_latest()
110+
if args.arch[0] == 'all':
111+
args.arch = builds.get_build_arches(args.build)
112+
print(f"Updating meta.json for build: {args.build} and arch: {args.arch}")
113+
114+
for arch in args.arch:
115+
meta = builds.get_build_meta(args.build, arch)
116+
d = builds.get_build_dir(args.build, arch)
117+
meta['s3'] = {
118+
'bucket': args.s3_bucket,
119+
'path': args.s3_path,
120+
'url': args.s3_url
121+
}
122+
try:
123+
write_json((os.path.join(d, 'meta.json')), meta)
124+
except:
125+
print("Error writing S3 metadata in meta.json")
126+
95127
def s3_upload_build(s3_client, args, builddir, bucket, prefix):
96128
# In the case where we are doing builds for different architectures
97129
# it's likely not all builds for this arch are local. If the meta.json

0 commit comments

Comments
 (0)