@@ -22,12 +22,16 @@ CACHE_MAX_AGE_ARTIFACT = 60 * 60 * 24 * 365
22
22
# set metadata caching to 5m
23
23
CACHE_MAX_AGE_METADATA = 60 * 5
24
24
from cosalib .builds import Builds , BUILDFILES
25
- from cosalib .cmdlib import load_json , retry_stop , retry_boto_exception , retry_callback # noqa: E402
25
+ from cosalib .cmdlib import load_json , retry_stop , retry_boto_exception , retry_callback , write_json # noqa: E402
26
+
26
27
27
28
28
29
def main ():
29
30
args = parse_args ()
30
- args .func (args )
31
+ if args .record_s3_metadata :
32
+ update_meta_json (args )
33
+ else :
34
+ cmd_upload_s3 (args )
31
35
32
36
33
37
def parse_args ():
@@ -47,21 +51,25 @@ def parse_args():
47
51
subparsers .required = True
48
52
49
53
s3 = subparsers .add_parser ('s3' , help = 'upload an image' )
50
- s3 .add_argument ("url" , metavar = '<BUCKET>[/PREFIX]' ,
51
- help = "Bucket and path prefix in which to upload" )
52
54
s3 .add_argument ("--acl" , help = "ACL for objects" ,
53
55
action = 'store' , default = 'private' )
56
+ s3 .add_argument ("--arch" , help = "Update meta.json for specific arch found in builds" , nargs = '+' , default = ['all' ])
54
57
s3 .add_argument ("--enable-gz-peel" , help = "Auto-peel .gz extensions "
55
58
"and set Content-Disposition names" , action = 'store_true' )
56
59
s3 .add_argument ("--endpoint-url" , help = "URL of S3-compatible server" ,
57
60
action = "store" , metavar = "URL" )
61
+ s3 .add_argument ("--record-s3-metadata" , help = "Update meta.json with S3 information" , action = 'store_true' )
62
+ s3 .add_argument ("--s3-bucket" , help = "S3 bucket to upload" , required = True )
63
+ s3 .add_argument ("--s3-path" , help = "S3 path to upload" , required = True )
64
+ s3 .add_argument ("--s3-url" , help = "S3 redirector url" )
58
65
s3 .set_defaults (func = cmd_upload_s3 )
59
66
60
67
return parser .parse_args ()
61
68
62
69
63
70
def cmd_upload_s3 (args ):
64
- bucket , prefix = args .url .split ('/' , 1 )
71
+ bucket = args .s3_bucket
72
+ prefix = args .s3_prefix
65
73
builds = Builds ()
66
74
s3_client = boto3 .client ('s3' , endpoint_url = args .endpoint_url )
67
75
# This can't be an error for backcompat reasons, but let's print something
@@ -92,6 +100,30 @@ def cmd_upload_s3(args):
92
100
subprocess .check_call (['cp-reflink' , BUILDFILES ['list' ], BUILDFILES ['sourcedata' ]])
93
101
94
102
103
+ def update_meta_json (args ):
104
+ if not args .s3_url :
105
+ print ("S3 url is required to update meta.json" )
106
+ exit (1 )
107
+ builds = Builds ()
108
+ if args .build == 'latest' :
109
+ args .build = builds .get_latest ()
110
+ if args .arch [0 ] == 'all' :
111
+ args .arch = builds .get_build_arches (args .build )
112
+ print (f"Updating meta.json for build: { args .build } and arch: { args .arch } " )
113
+
114
+ for arch in args .arch :
115
+ meta = builds .get_build_meta (args .build , arch )
116
+ d = builds .get_build_dir (args .build , arch )
117
+ meta ['s3' ] = {
118
+ 'bucket' : args .s3_bucket ,
119
+ 'path' : args .s3_path ,
120
+ 'url' : args .s3_url
121
+ }
122
+ try :
123
+ write_json ((os .path .join (d , 'meta.json' )), meta )
124
+ except :
125
+ print ("Error writing S3 metadata in meta.json" )
126
+
95
127
def s3_upload_build (s3_client , args , builddir , bucket , prefix ):
96
128
# In the case where we are doing builds for different architectures
97
129
# it's likely not all builds for this arch are local. If the meta.json
0 commit comments