@@ -1985,6 +1985,33 @@ function isAbsoluteURL(url) {
1985
1985
return / ^ ( [ a - z ] [ a - z \d + \- . ] * : ) ? \/ \/ / i. test ( url ) ;
1986
1986
}
1987
1987
1988
+ /**
1989
+ * Trim `n` slashes from the start or `end` of a `subject` string.
1990
+ *
1991
+ * @param {!string } subject String to trim slashes from.
1992
+ * @param {boolean= } opt_end Whether to trim from the end of the string. If not passed or passed as `false`, the start
1993
+ * of the string is inspected instead.
1994
+ * @return {!string } String with slashes trimmed from either the start or end of the string.
1995
+ */
1996
+ function trimSlashes ( subject , opt_end ) {
1997
+ const originalLength = subject . length ;
1998
+ const getSubjectChar = ( ) => subject . charAt ( opt_end === true ? subject . length - 1 : 0 ) ;
1999
+ const trimOne = ( ) => subject = opt_end === true ? subject . slice ( 0 , subject . length - 1 ) : subject . slice ( 1 ) ;
2000
+
2001
+ let char = getSubjectChar ( ) ;
2002
+ let iterations = 0 ;
2003
+
2004
+ while ( char === '/' ) {
2005
+ iterations ++ ;
2006
+ trimOne ( ) ;
2007
+ char = getSubjectChar ( ) ;
2008
+ if ( iterations > originalLength ) {
2009
+ break ; // shouldn't infinite loop, but this protection guarantees it won't look past the length of the string
2010
+ }
2011
+ }
2012
+ return subject ;
2013
+ }
2014
+
1988
2015
/**
1989
2016
* Creates a new URL by combining the specified URLs
1990
2017
*
@@ -1994,9 +2021,83 @@ function isAbsoluteURL(url) {
1994
2021
* @returns {string } The combined URL
1995
2022
*/
1996
2023
function combineURLs ( baseURL , relativeURL ) {
1997
- return relativeURL
1998
- ? baseURL . replace ( / \/ + $ / , '' ) + '/' + relativeURL . replace ( / ^ \/ + / , '' )
2024
+ const combined = relativeURL
2025
+ ? (
2026
+ ( baseURL . charAt ( baseURL . length - 1 ) === '/' ? trimSlashes ( baseURL , true ) : baseURL )
2027
+ + '/'
2028
+ + ( relativeURL . charAt ( 0 ) === '/' ? trimSlashes ( relativeURL ) : relativeURL )
2029
+ )
1999
2030
: baseURL ;
2031
+
2032
+ // corner case: if one of the original URLs has multiple slashes which do not reside at the end (for the `baseURL`) or
2033
+ // at the start (for the `relativeURL`), we sanitize them here. avoidance of regex is deliberate, in order to avoid
2034
+ // polynomial runtime complexity.
2035
+ //
2036
+ // since the `baseURL` and `relativeURL` are guaranteed not to have such artifacts at the end, or beginning,
2037
+ // respectively (by `trimSlashes`), we only need to do a quick check for the presence of a double-slash. if there is
2038
+ // none present, we can bail and return the combined URL.
2039
+ //
2040
+ // See more: CWE-1333, CWE-400, CWE-730 (https://cwe.mitre.org/index.html)
2041
+ //
2042
+ // since Axios only supports a limited set of protocols on each platform, we can safely predict where the protocol
2043
+ // specifier will be. Then, we can scan the inverse range for any double-slashes. If there is no protocol present (as
2044
+ // is the case for relative URLs), we can simply scan the string.
2045
+ //
2046
+ // the full suite of supported protocol prefixes across all platforms is:
2047
+ // `['http', 'https', 'file', 'blob', 'url', 'data']`
2048
+ //
2049
+ // these are all either three, four, or five characters long (in the lone case of `https`). we use these offsets to
2050
+ // probe for the protocol string, without iterating, and then proceed as above.
2051
+ const protocolMinimumOffset = 3 + 1 ; // 3 character minimum + 1 to arrive at `:`
2052
+ const protocolMaximumOffset = 5 + 1 ; // 5 character maximum + 1 to arrive at `:`
2053
+ const combinedLength = combined . length ;
2054
+ const offset = Math . min ( combinedLength , ( protocolMaximumOffset + 2 ) ) ;
2055
+ let sub = combined ;
2056
+
2057
+ /* eslint-disable */
2058
+ let protocolPosition = - 1 ;
2059
+
2060
+ // if the combined URLs are shorter than the minimum, there is no protocol by definition, and the URLs are both
2061
+ // relative (and both very small). because we want the offset of the protocol separator, we return `-1` to
2062
+ // indicate it was not found, or `1` to continue processing (we don't know where it is yet).
2063
+ if ( ! ( combinedLength < protocolMinimumOffset ) ) {
2064
+ // now that we know it's at least as long as the minimum, we can safely slice and check for the protocol tail. the
2065
+ // length of the string can still be less than the maximum offset + 2, though, so we take the minimum of that and
2066
+ // the length of the combined string to prevent overflows. at the same time, we assign the smaller search string to
2067
+ // the subject, so that we don't have to slice it again, and OR-it to the next step.
2068
+ protocolPosition = ( ( sub = sub . slice ( 0 , offset ) ) || sub ) . includes ( '://' ) ?
2069
+ // we've found the protocol separator; return the start position. since we may have sliced the search space,
2070
+ // there may or may not be an offset to apply. otherwise, we just return -1 to indicate it was not found (i.e.
2071
+ // in the case of a relative base URL. since the `indexOf` returns the start of the string, we add `3` to
2072
+ // include the protocol separator itself.
2073
+ ( sub . indexOf ( '://' ) + offset + 3 ) : - 1 ;
2074
+ }
2075
+
2076
+
2077
+ // use the above metric to calculate the minimum search space for double-slashes which need to be sanitized.
2078
+ const doubleSlashSearch = protocolPosition === - 1 ? combined : combined . slice ( protocolPosition ) ;
2079
+
2080
+ // check for double slashes in the target search space. if found, build the return value character by character,
2081
+ // dropping repeated slashes as we go.
2082
+ if ( doubleSlashSearch . includes ( '//' ) ) {
2083
+ let previous = '' ;
2084
+ let charIndex = 0 ;
2085
+ let charsTotal = doubleSlashSearch . length ;
2086
+ let sanitized = '' ;
2087
+
2088
+ while ( charIndex < charsTotal ) {
2089
+ const char = doubleSlashSearch . charAt ( charIndex ) ;
2090
+ if ( char === '/' && previous === '/' ) ; else {
2091
+ sanitized += char ;
2092
+ }
2093
+ previous = char ;
2094
+ charIndex ++ ;
2095
+ }
2096
+
2097
+ // finally, if we trimmed the protocol from the search space, we need to combine it again before we return.
2098
+ return protocolPosition === - 1 ? `${ combined . slice ( 0 , protocolPosition ) } ${ sanitized } ` : sanitized ;
2099
+ }
2100
+ return combined ;
2000
2101
}
2001
2102
2002
2103
/**
@@ -2518,20 +2619,20 @@ function dispatchFetch(config, resolve, reject) {
2518
2619
const requestHeaders = AxiosHeaders$1 . from ( config . headers ) . normalize ( ) ;
2519
2620
let fullPath = buildFullPath ( config . baseURL , config . url ) ;
2520
2621
2521
- // safely parse into `URL`, or use existing/cached URL via config
2622
+ // safely parse into `URL`, or use existing/cached URL via config. skip this step for relative URLs, which do not
2623
+ // parse into `URL` objects because they do not encapsulate origin info. make sure to let protocol-relative URLs
2624
+ // through, though, which are considered absolute.
2522
2625
let parsedUrl = config . parsedUrl ;
2523
- if ( ! parsedUrl ) {
2626
+ if ( ! parsedUrl && ! ( fullPath . startsWith ( '/' ) && ! fullPath . startsWith ( '//' ) ) ) {
2524
2627
try {
2525
2628
// we are unable to parse the URL if it (1) is a relative URL, or (2) is a malformed URL to begin with. to avoid
2526
2629
// #1 causing an error, we can make an attempt here to use the current window origin as a relative base; this will
2527
2630
// only work in browsers, though, so we need to be careful to check that we have an origin in the first place.
2528
2631
if ( platform . isStandardBrowserEnv ) {
2529
- if ( fullPath . startsWith ( '/' ) ) {
2530
- // origin = `https://domain.com` (protocol + host + port if non-standard)
2531
- // fullPath = `/foo/bar` (relative path)
2532
- // `fullPath = https://domain.com/foo/bar`
2533
- fullPath = window . location . origin + fullPath ;
2534
- }
2632
+ // origin = `https://domain.com` (protocol + host + port if non-standard)
2633
+ // fullPath = `/foo/bar` (relative path)
2634
+ // `fullPath = https://domain.com/foo/bar`
2635
+ fullPath = window . location . origin + fullPath ;
2535
2636
}
2536
2637
parsedUrl = new URL ( fullPath ) ;
2537
2638
} catch ( urlParseErr ) {
@@ -2602,11 +2703,11 @@ function dispatchFetch(config, resolve, reject) {
2602
2703
headers . delete ( contentLengthHeader )
2603
2704
) ;
2604
2705
2605
- debugLog ( `request: ${ method } ${ parsedUrl . toString ( ) } (has body: ${ ! ! body } )` ) ;
2706
+ debugLog ( `request: ${ method } ${ parsedUrl ? parsedUrl . toString ( ) : fullPath } (has body: ${ ! ! body } )` ) ;
2606
2707
debugLog ( `finalized headers` , Object . fromEntries ( headers . entries ( ) ) ) ;
2607
2708
2608
2709
// prep HTTP request
2609
- req = new FetchRequest ( parsedUrl , {
2710
+ req = new FetchRequest ( parsedUrl || fullPath , {
2610
2711
method,
2611
2712
headers,
2612
2713
body,
0 commit comments