10
10
AUDIENCE = "https://platform.finitestate.io/api/v1/graphql"
11
11
TOKEN_URL = "https://platform.finitestate.io/api/v1/auth/token"
12
12
13
+ """
14
+ DEFAULT CHUNK SIZE: 1000 MiB
15
+ """
16
+ DEFAULT_CHUNK_SIZE = 1024 ** 2 * 1000
17
+ """
18
+ MAX CHUNK SIZE: 2 GiB
19
+ """
20
+ MAX_CHUNK_SIZE = 1024 ** 2 * 2000
21
+ """
22
+ MIN CHUNK SIZE: 5 MiB
23
+ """
24
+ MIN_CHUNK_SIZE = 1024 ** 2 * 5
25
+
13
26
14
27
class UploadMethod (Enum ):
15
28
"""
@@ -1094,15 +1107,15 @@ def download_sbom(token, organization_context, sbom_type="CYCLONEDX", sbom_subty
1094
1107
raise Exception (f"Failed to download the file. Status code: { response .status_code } " )
1095
1108
1096
1109
1097
- def file_chunks (file_path , chunk_size = 1024 * 1024 * 1024 * 5 ):
1110
+ def file_chunks (file_path , chunk_size = DEFAULT_CHUNK_SIZE ):
1098
1111
"""
1099
1112
Helper method to read a file in chunks.
1100
1113
1101
1114
Args:
1102
1115
file_path (str):
1103
1116
Local path to the file to read.
1104
1117
chunk_size (int, optional):
1105
- The size of the chunks to read. Defaults to 5GB .
1118
+ The size of the chunks to read. Defaults to DEFAULT_CHUNK_SIZE .
1106
1119
1107
1120
Yields:
1108
1121
bytes: The next chunk of the file.
@@ -2011,10 +2024,11 @@ def update_finding_statuses(token, organization_context, user_id=None, finding_i
2011
2024
return send_graphql_query (token , organization_context , mutation , variables )
2012
2025
2013
2026
2014
- def upload_file_for_binary_analysis (token , organization_context , test_id = None , file_path = None ,
2015
- chunk_size = 1024 * 1024 * 1024 * 5 , quick_scan = False ):
2027
+ def upload_file_for_binary_analysis (
2028
+ token , organization_context , test_id = None , file_path = None , chunk_size = DEFAULT_CHUNK_SIZE , quick_scan = False
2029
+ ):
2016
2030
"""
2017
- Upload a file for Binary Analysis. Will automatically chunk the file into chunks and upload each chunk. Chunk size defaults to 5GB.
2031
+ Upload a file for Binary Analysis. Will automatically chunk the file into chunks and upload each chunk.
2018
2032
NOTE: This is NOT for uploading third party scanner results. Use upload_test_results_file for that.
2019
2033
2020
2034
Args:
@@ -2027,7 +2041,7 @@ def upload_file_for_binary_analysis(token, organization_context, test_id=None, f
2027
2041
file_path (str, required):
2028
2042
Local path to the file to upload.
2029
2043
chunk_size (int, optional):
2030
- The size of the chunks to read. Defaults to 5GB .
2044
+ The size of the chunks to read. 1000 MiB by default. Min 5MiB and max 2GiB .
2031
2045
quick_scan (bool, optional):
2032
2046
If True, will perform a quick scan of the Binary. Defaults to False (Full Scan). For details, please see the API documentation.
2033
2047
@@ -2039,11 +2053,14 @@ def upload_file_for_binary_analysis(token, organization_context, test_id=None, f
2039
2053
dict: The response from the GraphQL query, a completeMultipartUpload Object.
2040
2054
"""
2041
2055
# To upload a file for Binary Analysis, you must use the generateMultiplePartUploadUrl mutation
2042
-
2043
2056
if not test_id :
2044
2057
raise ValueError ("Test Id is required" )
2045
2058
if not file_path :
2046
2059
raise ValueError ("File Path is required" )
2060
+ if chunk_size < MIN_CHUNK_SIZE :
2061
+ raise ValueError (f"Chunk size must be greater than { MIN_CHUNK_SIZE } bytes" )
2062
+ if chunk_size >= MAX_CHUNK_SIZE :
2063
+ raise ValueError (f"Chunk size must be less than { MAX_CHUNK_SIZE } bytes" )
2047
2064
2048
2065
# Start Multi-part Upload
2049
2066
graphql_query = '''
@@ -2067,9 +2084,10 @@ def upload_file_for_binary_analysis(token, organization_context, test_id=None, f
2067
2084
# if the file is greater than max chunk size (or 5 GB), split the file in chunks,
2068
2085
# call generateUploadPartUrlV2 for each chunk of the file (even if it is a single part)
2069
2086
# and upload the file to the returned upload URL
2070
- i = 1
2087
+ i = 0
2071
2088
part_data = []
2072
2089
for chunk in file_chunks (file_path , chunk_size ):
2090
+ i = i + 1
2073
2091
graphql_query = '''
2074
2092
mutation GenerateUploadPartUrl($partNumber: Int!, $uploadId: ID!, $uploadKey: String!) {
2075
2093
generateUploadPartUrlV2(partNumber: $partNumber, uploadId: $uploadId, uploadKey: $uploadKey) {
0 commit comments