-
Notifications
You must be signed in to change notification settings - Fork 304
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
[Backport 2.x] Add CI for Windows and MacOS platforms (#2190) #2205
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -8,14 +8,14 @@ env: | |
jobs: | ||
build: | ||
name: build | ||
runs-on: ubuntu-latest | ||
strategy: | ||
fail-fast: true | ||
fail-fast: false | ||
matrix: | ||
jdk: [11, 17] | ||
platform: ["ubuntu-latest", "windows-latest", "macos-latest"] | ||
runs-on: ${{ matrix.platform }} | ||
|
||
steps: | ||
|
||
- name: Set up JDK for build and test | ||
uses: actions/setup-java@v2 | ||
with: | ||
|
@@ -25,21 +25,14 @@ jobs: | |
- name: Checkout security | ||
uses: actions/checkout@v2 | ||
|
||
- name: Cache Gradle packages | ||
uses: actions/cache@v2 | ||
- name: Build and Test | ||
uses: gradle/gradle-build-action@v2 | ||
with: | ||
path: | | ||
~/.gradle/caches | ||
~/.gradle/wrapper | ||
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*', '**/gradle-wrapper.properties') }} | ||
restore-keys: | | ||
${{ runner.os }}-gradle- | ||
|
||
- name: Package | ||
run: ./gradlew clean build -Dbuild.snapshot=false -x test | ||
|
||
- name: Test | ||
run: OPENDISTRO_SECURITY_TEST_OPENSSL_OPT=true ./gradlew test -i | ||
arguments: | | ||
build test -Dbuild.snapshot=false | ||
-x spotlessCheck | ||
-x checkstyleMain | ||
-x checkstyleTest | ||
|
||
- name: Coverage | ||
uses: codecov/codecov-action@v1 | ||
|
@@ -50,13 +43,13 @@ jobs: | |
- uses: actions/upload-artifact@v3 | ||
if: always() | ||
with: | ||
name: ${{ matrix.jdk }}-reports | ||
name: ${{ matrix.platform }}-JDK${{ matrix.jdk }}-reports | ||
path: | | ||
./build/reports/ | ||
|
||
- name: check archive for debugging | ||
if: always() | ||
run: echo "Check the artifact ${{ matrix.jdk }}-reports.zip for detailed test results" | ||
run: echo "Check the artifact ${{ matrix.platform }}-JDK${{ matrix.jdk }}-reports for detailed test results" | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Nice use of all the matrices 👍 |
||
|
||
backward-compatibility: | ||
runs-on: ubuntu-latest | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -46,9 +46,10 @@ | |
public final class SecurityUtils { | ||
|
||
protected final static Logger log = LogManager.getLogger(SecurityUtils.class); | ||
private static final Pattern ENV_PATTERN = Pattern.compile("\\$\\{env\\.([\\w]+)((\\:\\-)?[\\w]*)\\}"); | ||
private static final Pattern ENVBC_PATTERN = Pattern.compile("\\$\\{envbc\\.([\\w]+)((\\:\\-)?[\\w]*)\\}"); | ||
private static final Pattern ENVBASE64_PATTERN = Pattern.compile("\\$\\{envbase64\\.([\\w]+)((\\:\\-)?[\\w]*)\\}"); | ||
private static final String ENV_PATTERN_SUFFIX = "\\.([\\w=():\\-_]+?)(\\:\\-[\\w=():\\-_]*)?\\}"; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I know this was already here but maybe we could add a comment saying what this pattern means for readability? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. This is a backport PR, I'll create a separate small PR to update the comments around this file to make it easier to understand |
||
static final Pattern ENV_PATTERN = Pattern.compile("\\$\\{env" + ENV_PATTERN_SUFFIX); | ||
static final Pattern ENVBC_PATTERN = Pattern.compile("\\$\\{envbc" + ENV_PATTERN_SUFFIX); | ||
static final Pattern ENVBASE64_PATTERN = Pattern.compile("\\$\\{envbase64" + ENV_PATTERN_SUFFIX); | ||
public static Locale EN_Locale = forEN(); | ||
|
||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -34,6 +34,7 @@ | |
import org.apache.http.message.BasicHeader; | ||
import org.junit.Assert; | ||
import org.junit.Assume; | ||
import org.junit.Ignore; | ||
import org.junit.Test; | ||
|
||
import org.opensearch.action.admin.indices.alias.IndicesAliasesRequest; | ||
|
@@ -269,6 +270,7 @@ public void testSingle() throws Exception { | |
} | ||
|
||
@Test | ||
@Ignore // https://github.com/opensearch-project/security/issues/2194 | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. I think you remove this test in your next PR haha. |
||
public void testSpecialUsernames() throws Exception { | ||
|
||
setup(); | ||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -366,7 +366,7 @@ public void testDlsWithMinDocCountZeroAggregations() throws Exception { | |
|
||
// Significant Text Aggregation is not impacted. | ||
// Non-admin user with setting "min_doc_count=0". Expected to only have access to buckets for dept_manager". | ||
String query3 = "{\"aggregations\":{\"significant_termX\":{\"significant_terms\":{\"field\":\"termX.keyword\",\"min_doc_count\":0}}}}"; | ||
String query3 = "{\"size\":100,\"aggregations\":{\"significant_termX\":{\"significant_terms\":{\"field\":\"termX.keyword\",\"min_doc_count\":0}}}}"; | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. What was the reason for swapping to a hard coded implementation? There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Originally posted by @peternied in #2190 (comment) |
||
HttpResponse response5 = rh.executePostRequest("logs*/_search", query3, encodeBasicHeader("dept_manager", "password")); | ||
|
||
Assert.assertEquals(HttpStatus.SC_OK, response5.getStatusCode()); | ||
|
@@ -377,7 +377,7 @@ public void testDlsWithMinDocCountZeroAggregations() throws Exception { | |
Assert.assertFalse(response5.getBody(), response5.getBody().contains("\"termX\":\"E\"")); | ||
|
||
// Non-admin user without setting "min_doc_count". Expected to only have access to buckets for dept_manager". | ||
String query4 = "{\"aggregations\":{\"significant_termX\":{\"significant_terms\":{\"field\":\"termX.keyword\"}}}}"; | ||
String query4 = "{\"size\":100,\"aggregations\":{\"significant_termX\":{\"significant_terms\":{\"field\":\"termX.keyword\"}}}}"; | ||
|
||
HttpResponse response6 = rh.executePostRequest("logs*/_search", query4, encodeBasicHeader("dept_manager", "password")); | ||
|
||
|
@@ -410,7 +410,7 @@ public void testDlsWithMinDocCountZeroAggregations() throws Exception { | |
|
||
// Histogram Aggregation is not impacted. | ||
// Non-admin user with setting "min_doc_count=0". Expected to only have access to buckets for dept_manager". | ||
String query5 = "{\"aggs\":{\"amount\":{\"histogram\":{\"field\":\"amount\",\"interval\":1,\"min_doc_count\":0}}}}"; | ||
String query5 = "{\"size\":100,\"aggs\":{\"amount\":{\"histogram\":{\"field\":\"amount\",\"interval\":1,\"min_doc_count\":0}}}}"; | ||
|
||
HttpResponse response9 = rh.executePostRequest("logs*/_search", query5, encodeBasicHeader("dept_manager", "password")); | ||
|
||
|
@@ -422,7 +422,7 @@ public void testDlsWithMinDocCountZeroAggregations() throws Exception { | |
Assert.assertFalse(response9.getBody(), response9.getBody().contains("\"termX\":\"E\"")); | ||
|
||
// Non-admin user without setting "min_doc_count". Expected to only have access to buckets for dept_manager". | ||
String query6 = "{\"aggs\":{\"amount\":{\"histogram\":{\"field\":\"amount\",\"interval\":1}}}}"; | ||
String query6 = "{\"size\":100,\"aggs\":{\"amount\":{\"histogram\":{\"field\":\"amount\",\"interval\":1}}}}"; | ||
|
||
HttpResponse response10 = rh.executePostRequest("logs*/_search", query6, encodeBasicHeader("dept_manager", "password")); | ||
|
||
|
@@ -456,7 +456,7 @@ public void testDlsWithMinDocCountZeroAggregations() throws Exception { | |
|
||
// Date Histogram Aggregation is not impacted. | ||
// Non-admin user with setting "min_doc_count=0". Expected to only have access to buckets for dept_manager". | ||
String query7 = "{\"aggs\":{\"timestamp\":{\"date_histogram\":{\"field\":\"timestamp\",\"calendar_interval\":\"month\",\"min_doc_count\":0}}}}"; | ||
String query7 = "{\"size\":100,\"aggs\":{\"timestamp\":{\"date_histogram\":{\"field\":\"timestamp\",\"calendar_interval\":\"month\",\"min_doc_count\":0}}}}"; | ||
|
||
HttpResponse response13 = rh.executePostRequest("logs*/_search", query7, encodeBasicHeader("dept_manager", "password")); | ||
|
||
|
@@ -468,7 +468,7 @@ public void testDlsWithMinDocCountZeroAggregations() throws Exception { | |
Assert.assertFalse(response13.getBody(), response13.getBody().contains("\"termX\":\"E\"")); | ||
|
||
// Non-admin user without setting "min_doc_count". Expected to only have access to buckets for dept_manager". | ||
String query8 = "{\"aggs\":{\"timestamp\":{\"date_histogram\":{\"field\":\"timestamp\",\"calendar_interval\":\"month\"}}}}"; | ||
String query8 = "{\"size\":100,\"aggs\":{\"timestamp\":{\"date_histogram\":{\"field\":\"timestamp\",\"calendar_interval\":\"month\"}}}}"; | ||
|
||
HttpResponse response14 = rh.executePostRequest("logs*/_search", query8, encodeBasicHeader("dept_manager", "password")); | ||
|
||
|
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,73 @@ | ||
/* | ||
* SPDX-License-Identifier: Apache-2.0 | ||
* | ||
* The OpenSearch Contributors require contributions made to | ||
* this file be licensed under the Apache-2.0 license or a | ||
* compatible open source license. | ||
* | ||
* Modifications Copyright OpenSearch Contributors. See | ||
* GitHub history for details. | ||
*/ | ||
package org.opensearch.security.support; | ||
|
||
import java.util.Collection; | ||
import java.util.List; | ||
import java.util.function.Predicate; | ||
|
||
import org.junit.Test; | ||
|
||
import static org.hamcrest.MatcherAssert.assertThat; | ||
import static org.hamcrest.Matchers.equalTo; | ||
import static org.opensearch.security.support.SecurityUtils.ENVBASE64_PATTERN; | ||
import static org.opensearch.security.support.SecurityUtils.ENVBC_PATTERN; | ||
import static org.opensearch.security.support.SecurityUtils.ENV_PATTERN; | ||
|
||
public class SecurityUtilsTest { | ||
|
||
private final Collection<String> interestingEnvKeyNames = List.of( | ||
"=ExitCode", | ||
"=C:", | ||
"ProgramFiles(x86)", | ||
"INPUT_GRADLE-HOME-CACHE-CLEANUP", | ||
"MYENV", | ||
"MYENV:", | ||
"MYENV::" | ||
); | ||
private final Collection<String> namesFromThisRuntimeEnvironment = System.getenv().keySet(); | ||
|
||
@Test | ||
public void checkInterestingNamesForEnvPattern() { | ||
checkKeysWithPredicate(interestingEnvKeyNames, "env", ENV_PATTERN.asMatchPredicate()); | ||
} | ||
|
||
@Test | ||
public void checkRuntimeKeyNamesForEnvPattern() { | ||
checkKeysWithPredicate(namesFromThisRuntimeEnvironment, "env", ENV_PATTERN.asMatchPredicate()); | ||
} | ||
|
||
@Test | ||
public void checkInterestingNamesForEnvbcPattern() { | ||
checkKeysWithPredicate(interestingEnvKeyNames, "envbc", ENVBC_PATTERN.asMatchPredicate()); | ||
} | ||
|
||
@Test | ||
public void checkInterestingNamesForEnvBase64Pattern() { | ||
checkKeysWithPredicate(interestingEnvKeyNames, "envbase64", ENVBASE64_PATTERN.asMatchPredicate()); | ||
} | ||
|
||
private void checkKeysWithPredicate(Collection<String> keys, String predicateName, Predicate<String> predicate) { | ||
keys.forEach(envKeyName -> { | ||
final String prefixWithKeyName = "${" + predicateName + "." + envKeyName; | ||
|
||
final String baseKeyName = prefixWithKeyName + "}"; | ||
assertThat("Testing " + envKeyName + ", " + baseKeyName, | ||
predicate.test(baseKeyName), | ||
equalTo(true)); | ||
|
||
final String baseKeyNameWithDefault = prefixWithKeyName + ":-tTt}"; | ||
assertThat("Testing " + envKeyName + " with defaultValue, " + baseKeyNameWithDefault, | ||
predicate.test(baseKeyNameWithDefault), | ||
equalTo(true)); | ||
}); | ||
} | ||
} |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Why do we not want to fail-fast anymore? I believe this will now let the whole matrix operate even if one of the builds fails for a specific OS.
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Fast failing helps free resources faster, but you build less confidence per build iteration. By forcing all builds to keep running we are sure that all integration test runs are completed and you can compare different results between different platforms. If we were optimizing for cost we might stick to the fast failures
With the platform differences, bugs can be introduced into two platforms at once impacting different tests, IMO its more useful for troubleshooting to be able to see if only one workflow was impacted (indicating a random failure) vs many workflows (indicating a new bug was caught).