diff --git a/.drone.yml b/.drone.yml index e93dc2461..c19dcdc05 100644 --- a/.drone.yml +++ b/.drone.yml @@ -1,196 +1,142 @@ --- kind: pipeline name: default - -# Disable default clone -clone: - disable: true +type: docker steps: - # This clone step doesn't use "root" user - - name: clone - image: plugins/git:next - # Restore cache of downloaded dependencies - - name: restore cache + - name: restore-cache image: drillster/drone-volume-cache settings: restore: true mount: - .sbt - .ivy2 - - www/node_modules + - ui/node_modules + - ui/bower_components volumes: [{name: cache, path: /cache}] # Run project tests - - name: run tests and build stage + - name: run-tests image: thehiveproject/drone-scala-node commands: - - . ~/.nvm/nvm.sh - - sbt -Duser.home=$PWD test stage + - sbt -Duser.home=$PWD test:compile test # Build packages - - name: build packages + - name: build-packages image: thehiveproject/drone-scala-node settings: pgp_key: {from_secret: pgp_key} commands: - | + V=$(sbt -no-colors --error "print cortex/version" | tail -1) + if ( echo $V | grep -qi snapshot) + then + exit 1 + fi . ~/.nvm/nvm.sh [ -n "$PLUGIN_PGP_KEY" ] && gpg --batch --import - <<< $PLUGIN_PGP_KEY sbt -Duser.home=$PWD docker:stage debian:packageBin rpm:packageBin universal:packageBin + if ( echo $V | grep -qi rc ) + then + echo $( echo $V | sed -re 's/([0-9]+.[0-9]+.[0-9]+)-RC([0-9]+)-([0-9]+)/\1-RC\2,\1-RC\2-\3/' ) > .tags + else + echo $( echo $V | sed -re 's/([0-9]+).([0-9]+).([0-9]+)-([0-9]+)/\1,\1.\2,\1.\2.\3,\1.\2.\3-\4,latest/' ) > .tags + fi + echo $V > cortex-version.txt + mv target/rpm/RPMS/noarch/cortex*.rpm target/ + mv target/universal/cortex*.zip target/ when: event: [tag] # Save external libraries in cache - - name: save cache + - name: save-cache image: drillster/drone-volume-cache settings: rebuild: true + backend: "filesystem" mount: - .sbt - .ivy2 - - www/node_modules + - .cache + - ui/node_modules + - ui/bower_components volumes: [{name: cache, path: /cache}] + # Send packages using scp + - name: send packages + image: appleboy/drone-scp + settings: + host: {from_secret: package_host} + username: {from_secret: package_user} + key: {from_secret: package_key} + target: {from_secret: incoming_path} + source: + - target/cortex*.deb + - target/cortex*.rpm + - target/cortex*.zip + strip_components: 1 + when: + event: [tag] + + # Publish packages - name: publish packages - image: thehiveproject/drone-bintray + image: appleboy/drone-ssh settings: - user: {from_secret: bintray_user} - key: {from_secret: bintray_key} - subject: thehive-project - package: cortex + host: {from_secret: package_host} + user: {from_secret: package_user} + key: {from_secret: package_key} + publish_script: {from_secret: publish_script} commands: - - | - export PLUGIN_USER - export PLUGIN_KEY - export PLUGIN_SUBJECT - export PLUGIN_PACKAGE - export PLUGIN_VERSION=$(cut -d\" -f2 version.sbt) - echo "Publishing package version $PLUGIN_VERSION" - - if echo $PLUGIN_VERSION | grep -qvi -E \ - -e '^[0-9]+\.[0-9]+\.[0-9]+$' \ - -e '^[0-9]+\.[0-9]+\.[0-9]+-[0-9]+$' \ - -e '^[0-9]+\.[0-9]+\.[0-9]+-RC[0-9]+$'; then - echo The version $PLUGIN_VERSION has invalid format - exit 1 - fi - - CHANNEL=stable - if $(echo $PLUGIN_VERSION | grep -qi rc) - then - CHANNEL=beta - V=$(echo $PLUGIN_VERSION | sed -e 's/-\([rR][cC]\)/-0.1\1/') - DEB_FILE=target/cortex_$${V}_all.deb - RPM_FILE=target/rpm/RPMS/noarch/cortex-$${V}.noarch.rpm - else - DEB_FILE=target/cortex_$${PLUGIN_VERSION}_all.deb - RPM_FILE=target/rpm/RPMS/noarch/cortex-$${PLUGIN_VERSION}.noarch.rpm - fi - ZIP_FILE=target/universal/cortex-$${PLUGIN_VERSION}.zip - - upload \ - --file $DEB_FILE \ - --repo debian-beta \ - --extra-param deb_distribution=any \ - --extra-param deb_component=main \ - --extra-param deb_architecture=all - - [ $CHANNEL = stable ] && upload \ - --file $DEB_FILE \ - --repo debian-stable \ - --extra-param deb_distribution=any \ - --extra-param deb_component=main \ - --extra-param deb_architecture=all - - upload \ - --file $RPM_FILE \ - --repo rpm-beta - - [ $CHANNEL = stable ] && upload \ - --file $RPM_FILE \ - --repo rpm-stable - - upload \ - --file $ZIP_FILE \ - --repo binary - - LATEST_VERSION=latest - [ $CHANNEL = beta ] && LATEST_VERSION=latest-beta - - removeVersion \ - --repo binary \ - --version $LATEST_VERSION - - upload \ - --file $ZIP_FILE \ - --repo binary \ - --version $LATEST_VERSION \ - --dest-file cortex-$${LATEST_VERSION}.zip + - PLUGIN_SCRIPT="bash $PLUGIN_PUBLISH_SCRIPT cortex $(cat cortex-version.txt)" /bin/drone-ssh when: event: [tag] - # Publish docker image + # Publish docker image on Docker Hub - name: docker image: plugins/docker settings: context: target/docker/stage dockerfile: target/docker/stage/Dockerfile repo: thehiveproject/cortex - auto_tag: true username: {from_secret: docker_username} password: {from_secret: docker_password} when: event: [tag] - # Deploy binaries in integration environment - - name: copy binaries in integration environment - image: appleboy/drone-scp - settings: - host: {from_secret: deploy_beta_host} - username: {from_secret: deploy_username} - key: {from_secret: deploy_key} - target: ./cortex-builds/${DRONE_BUILD_NUMBER} - source: target/universal/stage - strip_components: 3 - when: - branch: [develop] - - - name: deploy binaries in integration environment - image: appleboy/drone-ssh - settings: - host: {from_secret: deploy_beta_host} - username: {from_secret: deploy_username} - key: {from_secret: deploy_key} - script: - - ./start cortex ${DRONE_BUILD_NUMBER} - when: - branch: [develop] - - # Deploy binaries in staging environment - - name: copy binaries in staging environment - image: appleboy/drone-scp + # Publish docker image on Harbor + - name: harbor + image: plugins/docker settings: - host: {from_secret: deploy_stable_host} - username: {from_secret: deploy_username} - key: {from_secret: deploy_key} - target: ./cortex-builds/${DRONE_BUILD_NUMBER} - source: target/universal/stage - strip_components: 3 + context: target/docker/stage + dockerfile: target/docker/stage/Dockerfile + registry: {from_secret: harbor_registry} + repo: {from_secret: harbor_repo} + username: {from_secret: harbor_username} + password: {from_secret: harbor_password} when: - branch: [master] + event: [tag] - - name: deploy binaries in staging environment - image: appleboy/drone-ssh + - name: send message + image: thehiveproject/drone_keybase settings: - host: {from_secret: deploy_stable_host} - username: {from_secret: deploy_username} - key: {from_secret: deploy_key} - script: - - ./start cortex ${DRONE_BUILD_NUMBER} + username: {from_secret: keybase_username} + paperkey: {from_secret: keybase_paperkey} + channel: {from_secret: keybase_channel} + commands: + - | + keybase oneshot -u "$PLUGIN_USERNAME" --paperkey "$PLUGIN_PAPERKEY" + URL="$DRONE_SYSTEM_PROTO://$DRONE_SYSTEM_HOST/$DRONE_REPO/$DRONE_BUILD_NUMBER" + if [ $DRONE_BUILD_STATUS = "success" ] + then + keybase chat send "$PLUGIN_CHANNEL" ":white_check_mark: $DRONE_REPO: build succeeded $URL" + else + keybase chat send "$PLUGIN_CHANNEL" ":x: $DRONE_REPO: build failed $URL" + fi when: - branch: [master] + status: + - success + - failure volumes: - name: cache diff --git a/.scalafmt.conf b/.scalafmt.conf index 4885c26f6..3843d59db 100644 --- a/.scalafmt.conf +++ b/.scalafmt.conf @@ -1,4 +1,5 @@ -version = "2.0.0-RC7" +version = 2.3.2 +project.git = true align = more # For pretty alignment. assumeStandardLibraryStripMargin = true style = defaultWithAlign @@ -6,9 +7,8 @@ maxColumn = 150 align.openParenCallSite = false align.openParenDefnSite = false -newlines.alwaysBeforeTopLevelStatements = true +newlines.alwaysBeforeTopLevelStatements = false rewrite.rules = [ - # ExpandImportSelectors RedundantBraces RedundantParens SortModifiers @@ -20,7 +20,7 @@ includeCurlyBraceInSelectChains = true includeNoParensInSelectChains = true rewriteTokens { - "=>" : "⇒" - "<-" : "←" - "->": "→" + "⇒" : "=>" + "←" : "<-" + "→" : "->" } diff --git a/CHANGELOG.md b/CHANGELOG.md index 963af2e24..5fd324029 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,198 +1,161 @@ # Change Log -## [3.0.1](https://github.com/TheHive-Project/Cortex/tree/3.0.1) (2020-01-20) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/3.0.0...HEAD) +## [3.1.0-RC1](https://github.com/TheHive-Project/Cortex/milestone/21) (2020-08-13) **Implemented enhancements:** -- Cortex logs the Play secret key at startup. [\#244](https://github.com/TheHive-Project/Cortex/issues/244) -- Analyzer reports "no output" when it fails [\#241](https://github.com/TheHive-Project/Cortex/issues/241) -- Docker image has many CVE's open against it [\#238](https://github.com/TheHive-Project/Cortex/issues/238) -- Remove Elasticsearch cluster configuration option [\#230](https://github.com/TheHive-Project/Cortex/pull/230) ([adl1995](https://github.com/adl1995)) -- Handle second/minute-rates limits on Flavors and Analyzers [\#164](https://github.com/TheHive-Project/Cortex/issues/164) +- Support of ElasticSearch 7 [\#279](https://github.com/TheHive-Project/Cortex/issues/279) **Fixed bugs:** -- Fix error message display for failed analyzers/responders [\#243](https://github.com/TheHive-Project/Cortex/issues/243) -- Remove reference to google fonts [\#242](https://github.com/TheHive-Project/Cortex/issues/242) -- Encoding issue causes invalid format for catalog file [\#240](https://github.com/TheHive-Project/Cortex/issues/240) -- Missing dependency for cluster [\#239](https://github.com/TheHive-Project/Cortex/issues/239) -- Old non-existent analysers showing in Cortex after an upgrade [\#234](https://github.com/TheHive-Project/Cortex/issues/234) +- OAuth2 SSO Login Broken [\#264](https://github.com/TheHive-Project/Cortex/issues/264) -## [3.0.0](https://github.com/TheHive-Project/Cortex/tree/3.0.0) (2019-09-05) +## [3.0.1](https://github.com/TheHive-Project/Cortex/milestone/24) (2020-04-24) -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/3.0.0-RC4...3.0.0) +**Implemented enhancements:** -**Fixed bugs:** +- Handle second/minute-rates limits on Flavors and Analyzers [\#164](https://github.com/TheHive-Project/Cortex/issues/164) +- Docker image has many CVE's open against it [\#238](https://github.com/TheHive-Project/Cortex/issues/238) +- Analyzer reports "no output" when it fails [\#241](https://github.com/TheHive-Project/Cortex/issues/241) +- Cortex logs the Play secret key at startup. [\#244](https://github.com/TheHive-Project/Cortex/issues/244) -- cortex 3.0.0-RC4 container : StreamSrv error popup spamming the setup page [\#210](https://github.com/TheHive-Project/Cortex/issues/210) +**Fixed bugs:** -## [3.0.0-RC4](https://github.com/TheHive-Project/Cortex/tree/3.0.0-RC4) (2019-07-10) +- Old non-existent analysers showing in Cortex after an upgrade [\#234](https://github.com/TheHive-Project/Cortex/issues/234) +- Missing dependency for cluster [\#239](https://github.com/TheHive-Project/Cortex/issues/239) +- Encoding issue causes invalid format for catalog file [\#240](https://github.com/TheHive-Project/Cortex/issues/240) +- Remove reference to google fonts [\#242](https://github.com/TheHive-Project/Cortex/issues/242) +- Fix error message display for failed analyzers/responders [\#243](https://github.com/TheHive-Project/Cortex/issues/243) -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/3.0.0-RC3...3.0.0-RC4) +## [3.0.0](https://github.com/TheHive-Project/Cortex/milestone/23) (2019-09-05) **Fixed bugs:** -- Yara analyzer configuration dialog broken [\#134](https://github.com/TheHive-Project/Cortex/issues/134) -- Responder run displayed as Analyzer run [\#207](https://github.com/TheHive-Project/Cortex/issues/207) -- docker version of cortex breaks when you don't create a user immediately [\#204](https://github.com/TheHive-Project/Cortex/issues/204) -- Login error after Cortex upgrade to 3 [\#199](https://github.com/TheHive-Project/Cortex/issues/199) +- cortex 3.0.0-RC4 container : StreamSrv error popup spamming the setup page [\#210](https://github.com/TheHive-Project/Cortex/issues/210) + +## [3.0.0-RC4](https://github.com/TheHive-Project/Cortex/milestone/22) (2019-07-11) **Closed issues:** -- PassiveTotal SSL Certificate History analyzer always report at least one record, even if there isn't one [\#209](https://github.com/TheHive-Project/Cortex/issues/209) -- Custom Responder not showing up in Responders Config Tab [\#201](https://github.com/TheHive-Project/Cortex/issues/201) -- Can't enable some "free" Analyzers [\#200](https://github.com/TheHive-Project/Cortex/issues/200) -- docker version of cortex prints a lot of errors for auth failures [\#205](https://github.com/TheHive-Project/Cortex/issues/205) - dockerhub sample uses the wrong port [\#203](https://github.com/TheHive-Project/Cortex/issues/203) +- docker version of cortex prints a lot of errors for auth failures [\#205](https://github.com/TheHive-Project/Cortex/issues/205) + +**Fixed bugs:** -## [3.0.0-RC3](https://github.com/TheHive-Project/Cortex/tree/3.0.0-RC3) (2019-06-05) +- Login error after Cortex upgrade to 3 [\#199](https://github.com/TheHive-Project/Cortex/issues/199) +- docker version of cortex breaks when you don't create a user immediately [\#204](https://github.com/TheHive-Project/Cortex/issues/204) +- Responder run displayed as Analyzer run [\#207](https://github.com/TheHive-Project/Cortex/issues/207) -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/3.0.0-RC2...3.0.0-RC3) +## [3.0.0-RC3](https://github.com/TheHive-Project/Cortex/milestone/20) (2019-06-28) **Implemented enhancements:** -- Improve job details page [\#195](https://github.com/TheHive-Project/Cortex/issues/195) -- Add support of ElasticSearch 6 [\#191](https://github.com/TheHive-Project/Cortex/issues/191) - Upgrade frontend libraries [\#190](https://github.com/TheHive-Project/Cortex/issues/190) +- Add support of ElasticSearch 6 [\#191](https://github.com/TheHive-Project/Cortex/issues/191) +- Improve job details page [\#195](https://github.com/TheHive-Project/Cortex/issues/195) **Fixed bugs:** - Get user detials via API is available to non-admin users [\#194](https://github.com/TheHive-Project/Cortex/issues/194) -## [3.0.0-RC2](https://github.com/TheHive-Project/Cortex/tree/3.0.0-RC2) (2019-05-03) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/3.0.0-RC1...3.0.0-RC2) +## [3.0.0-RC2](https://github.com/TheHive-Project/Cortex/milestone/19) (2019-05-03) **Fixed bugs:** -- Unable to load Analyzers with 3.0.0 [\#185](https://github.com/TheHive-Project/Cortex/issues/185) -- Cortex will fail to run analyzers [\#182](https://github.com/TheHive-Project/Cortex/issues/182) - Docker container exposes tcp/9000 instead of tcp/9001 [\#166](https://github.com/TheHive-Project/Cortex/issues/166) +- Cortex will fail to run analyzers [\#182](https://github.com/TheHive-Project/Cortex/issues/182) +- Unable to load Analyzers with 3.0.0 [\#185](https://github.com/TheHive-Project/Cortex/issues/185) -## [3.0.0-RC1](https://github.com/TheHive-Project/Cortex/tree/3.0.0-RC1) (2019-04-05) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/2.1.3...3.0.0-RC1) +## [3.0.0-RC1](https://github.com/TheHive-Project/Cortex/milestone/14) (2019-05-02) **Implemented enhancements:** -- Remove size limitations [\#178](https://github.com/TheHive-Project/Cortex/issues/178) -- Collapse job error messages by default in job history [\#171](https://github.com/TheHive-Project/Cortex/issues/171) +- File extraction [\#120](https://github.com/TheHive-Project/Cortex/issues/120) +- Single sign-on support for Cortex [\#165](https://github.com/TheHive-Project/Cortex/issues/165) - Update Copyright with year 2019 [\#168](https://github.com/TheHive-Project/Cortex/issues/168) +- Collapse job error messages by default in job history [\#171](https://github.com/TheHive-Project/Cortex/issues/171) +- Provide analyzers and responders packaged with docker [\#175](https://github.com/TheHive-Project/Cortex/issues/175) +- Use files to communicate with analyzer/responder [\#176](https://github.com/TheHive-Project/Cortex/issues/176) +- Remove size limitations [\#178](https://github.com/TheHive-Project/Cortex/issues/178) **Fixed bugs:** -- SSO: Authentication module not found [\#181](https://github.com/TheHive-Project/Cortex/issues/181) - Akka Dispatcher Blocked [\#170](https://github.com/TheHive-Project/Cortex/issues/170) +- SSO: Authentication module not found [\#181](https://github.com/TheHive-Project/Cortex/issues/181) -**Closed issues:** - -- Use files to communicate with analyzer/responder [\#176](https://github.com/TheHive-Project/Cortex/issues/176) -- Provide analyzers and responders packaged with docker [\#175](https://github.com/TheHive-Project/Cortex/issues/175) -- Single sign-on support for Cortex [\#165](https://github.com/TheHive-Project/Cortex/issues/165) -- File extraction [\#120](https://github.com/TheHive-Project/Cortex/issues/120) - -## [2.1.3](https://github.com/TheHive-Project/Cortex/tree/2.1.3) (2018-12-20) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/2.1.2...2.1.3) +## [2.1.3](https://github.com/TheHive-Project/Cortex/milestone/18) (2019-02-05) **Implemented enhancements:** -- Add configuration for drone continuous integration [\#156](https://github.com/TheHive-Project/Cortex/issues/156) - Add PAP property to jobs list [\#146](https://github.com/TheHive-Project/Cortex/issues/146) - -**Fixed bugs:** - -- Wrong checks of role when an user is created [\#158](https://github.com/TheHive-Project/Cortex/issues/158) -- Unable to disable invalid responders [\#157](https://github.com/TheHive-Project/Cortex/issues/157) -- PAP field is ignored from job modal [\#152](https://github.com/TheHive-Project/Cortex/issues/152) -- SinkDB analyzer could not find DIG in the Cortex docker image [\#147](https://github.com/TheHive-Project/Cortex/issues/147) -- GUI Search Function is broken [\#145](https://github.com/TheHive-Project/Cortex/issues/145) +- Add configuration for drone continuous integration [\#156](https://github.com/TheHive-Project/Cortex/issues/156) **Closed issues:** -- Systemd: cortex.service: Failed with result 'exit-code'. [\#155](https://github.com/TheHive-Project/Cortex/issues/155) - conf/logback.xml: Rotate logs [\#62](https://github.com/TheHive-Project/Cortex/issues/62) +- Build Error on NodeJS 8 [\#142](https://github.com/TheHive-Project/Cortex/issues/142) + +**Fixed bugs:** -## [2.1.2](https://github.com/TheHive-Project/Cortex/tree/2.1.2) (2018-10-12) +- GUI Search Function is broken [\#145](https://github.com/TheHive-Project/Cortex/issues/145) +- SinkDB analyzer could not find DIG in the Cortex docker image [\#147](https://github.com/TheHive-Project/Cortex/issues/147) +- PAP field is ignored from job modal [\#152](https://github.com/TheHive-Project/Cortex/issues/152) +- Unable to disable invalid responders [\#157](https://github.com/TheHive-Project/Cortex/issues/157) +- Wrong checks of role when an user is created [\#158](https://github.com/TheHive-Project/Cortex/issues/158) -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/2.1.1...2.1.2) +## [2.1.2](https://github.com/TheHive-Project/Cortex/milestone/17) (2018-10-12) **Fixed bugs:** - findSimilarJob function broken [\#144](https://github.com/TheHive-Project/Cortex/issues/144) -## [2.1.1](https://github.com/TheHive-Project/Cortex/tree/2.1.1) (2018-10-09) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/2.1.0...2.1.1) +## [2.1.1](https://github.com/TheHive-Project/Cortex/milestone/16) (2018-10-12) **Implemented enhancements:** -- Change Debian dependencies [\#141](https://github.com/TheHive-Project/Cortex/issues/141) -- Allow Cortex to use a custom root context [\#140](https://github.com/TheHive-Project/Cortex/issues/140) - Publish stable versions in beta package channels [\#138](https://github.com/TheHive-Project/Cortex/issues/138) +- Allow Cortex to use a custom root context [\#140](https://github.com/TheHive-Project/Cortex/issues/140) +- Change Debian dependencies [\#141](https://github.com/TheHive-Project/Cortex/issues/141) **Fixed bugs:** -- Fix Cache column in analyzers admin page [\#139](https://github.com/TheHive-Project/Cortex/issues/139) -- RPM update replace configuration file [\#137](https://github.com/TheHive-Project/Cortex/issues/137) - Console output should not be logged in syslog [\#136](https://github.com/TheHive-Project/Cortex/issues/136) +- RPM update replace configuration file [\#137](https://github.com/TheHive-Project/Cortex/issues/137) +- Fix Cache column in analyzers admin page [\#139](https://github.com/TheHive-Project/Cortex/issues/139) -## [2.1.0](https://github.com/TheHive-Project/Cortex/tree/2.1.0) (2018-09-25) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/2.1.0-RC1...2.1.0) +## [2.1.0](https://github.com/TheHive-Project/Cortex/milestone/15) (2018-09-25) **Implemented enhancements:** -- Show PAP value in the Org \> Analyzers screen [\#124](https://github.com/TheHive-Project/Cortex/issues/124) - Display cache configuration in analyzer admin page [\#123](https://github.com/TheHive-Project/Cortex/issues/123) +- Show PAP value in the Org > Analyzers screen [\#124](https://github.com/TheHive-Project/Cortex/issues/124) **Fixed bugs:** -- MISP API fails [\#109](https://github.com/TheHive-Project/Cortex/issues/109) -- File_Info issue [\#53](https://github.com/TheHive-Project/Cortex/issues/53) -- Temporary files are not removed at the end of job [\#129](https://github.com/TheHive-Project/Cortex/issues/129) - MISP fails to run analyzers [\#128](https://github.com/TheHive-Project/Cortex/issues/128) +- Temporary files are not removed at the end of job [\#129](https://github.com/TheHive-Project/Cortex/issues/129) -**Merged pull requests:** - -- Update resolvers in build.sbt to contain Maven as a dependency [\#130](https://github.com/TheHive-Project/Cortex/pull/130) ([adl1995](https://github.com/adl1995)) - -## [2.1.0-RC1](https://github.com/TheHive-Project/Cortex/tree/2.1.0-RC1) (2018-07-31) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/2.0.4...2.1.0-RC1) +## [2.1.0-RC1](https://github.com/TheHive-Project/Cortex/milestone/9) (2018-08-22) **Implemented enhancements:** +- PAP as an analyzer restriction [\#65](https://github.com/TheHive-Project/Cortex/issues/65) +- Consider providing checksums for the release files [\#105](https://github.com/TheHive-Project/Cortex/issues/105) +- Automated response via Cortex [\#110](https://github.com/TheHive-Project/Cortex/issues/110) - New TheHive-Project repository [\#112](https://github.com/TheHive-Project/Cortex/issues/112) -**Fixed bugs:** - -- Analyzer Configuration Only Showing Global Configuration [\#104](https://github.com/TheHive-Project/Cortex/issues/104) -- First analyze of a "file" always fail, must re-run the analyze a second time [\#117](https://github.com/TheHive-Project/Cortex/issues/117) -- Analyzers filter in Jobs History view is limited to 25 analyzers [\#116](https://github.com/TheHive-Project/Cortex/issues/116) -- Fix redirection from Migration page to login on 401 error [\#114](https://github.com/TheHive-Project/Cortex/issues/114) - **Closed issues:** -- Automatic observables extraction from analysis reports. [\#111](https://github.com/TheHive-Project/Cortex/issues/111) -- ImportError: No module named 'cortexutils' on V2.0.4 [\#102](https://github.com/TheHive-Project/Cortex/issues/102) -- Error occur from thehive project request to cortex project [\#101](https://github.com/TheHive-Project/Cortex/issues/101) -- Analyzers disappear after deactivation and can not get enabled [\#98](https://github.com/TheHive-Project/Cortex/issues/98) -- Application.conf doesn't have Yeti config nor allows for API Auth [\#54](https://github.com/TheHive-Project/Cortex/issues/54) -- endless loop of cortex analyser call [\#36](https://github.com/TheHive-Project/Cortex/issues/36) -- Automated response via Cortex [\#110](https://github.com/TheHive-Project/Cortex/issues/110) -- Consider providing checksums for the release files [\#105](https://github.com/TheHive-Project/Cortex/issues/105) -- PAP as an analyzer restriction [\#65](https://github.com/TheHive-Project/Cortex/issues/65) +- Unable to update user [\#106](https://github.com/TheHive-Project/Cortex/issues/106) +- Refreshing analyzers does not refresh definition if already defined [\#115](https://github.com/TheHive-Project/Cortex/issues/115) -**Merged pull requests:** - -- Update GitHub path [\#100](https://github.com/TheHive-Project/Cortex/pull/100) ([saadkadhi](https://github.com/saadkadhi)) +**Fixed bugs:** -## [2.0.4](https://github.com/TheHive-Project/Cortex/tree/2.0.4) (2018-04-13) +- Fix redirection from Migration page to login on 401 error [\#114](https://github.com/TheHive-Project/Cortex/issues/114) +- Analyzers filter in Jobs History view is limited to 25 analyzers [\#116](https://github.com/TheHive-Project/Cortex/issues/116) +- First analyze of a "file" always fail, must re-run the analyze a second time [\#117](https://github.com/TheHive-Project/Cortex/issues/117) -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/2.0.3...2.0.4) +## [2.0.4](https://github.com/TheHive-Project/Cortex/milestone/13) (2018-04-13) **Implemented enhancements:** @@ -200,204 +163,128 @@ **Fixed bugs:** -- Strictly filter the list of analyzers in the run dialog [\#95](https://github.com/TheHive-Project/Cortex/issues/95) -- Updating users by orgAdmin users fails silently [\#94](https://github.com/TheHive-Project/Cortex/issues/94) -- Fix analyzer configurations icons [\#93](https://github.com/TheHive-Project/Cortex/issues/93) -- Wrong page redirection [\#92](https://github.com/TheHive-Project/Cortex/issues/92) -- Sort analyzers list by name [\#91](https://github.com/TheHive-Project/Cortex/issues/91) -- Cortex 2.0.3 docker container having cortex analyzer errors [\#90](https://github.com/TheHive-Project/Cortex/issues/90) - Install python3 requirements for analyzers in public docker image [\#58](https://github.com/TheHive-Project/Cortex/issues/58) +- Cortex 2.0.3 docker container having cortex analyzer errors [\#90](https://github.com/TheHive-Project/Cortex/issues/90) +- Sort analyzers list by name [\#91](https://github.com/TheHive-Project/Cortex/issues/91) +- Wrong page redirection [\#92](https://github.com/TheHive-Project/Cortex/issues/92) +- Fix analyzer configurations icons [\#93](https://github.com/TheHive-Project/Cortex/issues/93) +- Updating users by orgAdmin users fails silently [\#94](https://github.com/TheHive-Project/Cortex/issues/94) +- Strictly filter the list of analyzers in the run dialog [\#95](https://github.com/TheHive-Project/Cortex/issues/95) -**Closed issues:** - -- Insufficient Rights To Perform This Action [\#87](https://github.com/TheHive-Project/Cortex/issues/87) - -## [2.0.3](https://github.com/TheHive-Project/Cortex/tree/2.0.3) (2018-04-09) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/2.0.2...2.0.3) +## [2.0.3](https://github.com/TheHive-Project/Cortex/milestone/12) (2018-04-12) **Implemented enhancements:** -- Allow arbitrary parameters for a job [\#86](https://github.com/TheHive-Project/Cortex/issues/86) +- Allow configuring auto artifacts extraction per analyzer [\#80](https://github.com/TheHive-Project/Cortex/issues/80) - Change of global config for proxy is not reflected in analyzer's configurations [\#81](https://github.com/TheHive-Project/Cortex/issues/81) +- Display existing analyzers with invalid definition [\#82](https://github.com/TheHive-Project/Cortex/issues/82) +- Allow specifying a cache period per analyzer [\#85](https://github.com/TheHive-Project/Cortex/issues/85) +- Allow arbitrary parameters for a job [\#86](https://github.com/TheHive-Project/Cortex/issues/86) **Fixed bugs:** +- Version Upgrade of Analyzer makes all Analyzers invisible for TheHive (Cortex2) [\#75](https://github.com/TheHive-Project/Cortex/issues/75) - Refresh Analyzers button not working [\#83](https://github.com/TheHive-Project/Cortex/issues/83) -- Version Upgrade of Analyzer makes all Analyzers invisible for TheHive \(Cortex2\) [\#75](https://github.com/TheHive-Project/Cortex/issues/75) - -**Closed issues:** -- Allow specifying a cache period per analyzer [\#85](https://github.com/TheHive-Project/Cortex/issues/85) -- Display existing analyzers with invalid definition [\#82](https://github.com/TheHive-Project/Cortex/issues/82) -- Allow configuring auto artifacts extraction per analyzer [\#80](https://github.com/TheHive-Project/Cortex/issues/80) - -## [2.0.2](https://github.com/TheHive-Project/Cortex/tree/2.0.2) (2018-04-04) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/2.0.1...2.0.2) +## [2.0.2](https://github.com/TheHive-Project/Cortex/milestone/11) (2018-04-04) **Fixed bugs:** -- Coretxutils and TypeError: argument of type 'bool' is not iterable [\#73](https://github.com/TheHive-Project/Cortex/issues/73) -- Silently failure when ElasticSearch is unreachable [\#76](https://github.com/TheHive-Project/Cortex/issues/76) -- Unable to disable analyzers [\#72](https://github.com/TheHive-Project/Cortex/issues/72) -- Cortex 2 is not passing proxy variable to analyzers [\#71](https://github.com/TheHive-Project/Cortex/issues/71) - Session collision when TheHive & Cortex 2 share the same URL [\#70](https://github.com/TheHive-Project/Cortex/issues/70) +- Cortex 2 is not passing proxy variable to analyzers [\#71](https://github.com/TheHive-Project/Cortex/issues/71) +- Unable to disable analyzers [\#72](https://github.com/TheHive-Project/Cortex/issues/72) +- Silently failure when ElasticSearch is unreachable [\#76](https://github.com/TheHive-Project/Cortex/issues/76) -## [2.0.1](https://github.com/TheHive-Project/Cortex/tree/2.0.1) (2018-03-30) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/2.0.0...2.0.1) +## [2.0.1](https://github.com/TheHive-Project/Cortex/milestone/10) (2018-03-30) **Fixed bugs:** -- File upload component not working [\#69](https://github.com/TheHive-Project/Cortex/issues/69) -- Packages contain obsolete configuration sample [\#68](https://github.com/TheHive-Project/Cortex/issues/68) - User can't change his password [\#67](https://github.com/TheHive-Project/Cortex/issues/67) +- Packages contain obsolete configuration sample [\#68](https://github.com/TheHive-Project/Cortex/issues/68) +- File upload component not working [\#69](https://github.com/TheHive-Project/Cortex/issues/69) -## [2.0.0](https://github.com/TheHive-Project/Cortex/tree/2.0.0) (2018-03-30) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/1.1.4...2.0.0) +## [2.0.0](https://github.com/TheHive-Project/Cortex/milestone/1) (2018-03-30) **Implemented enhancements:** +- Provide Secret Key auth to upstream service [\#2](https://github.com/TheHive-Project/Cortex/issues/2) +- Provide way to reload conf file for new API keys without shutdown. [\#3](https://github.com/TheHive-Project/Cortex/issues/3) +- Provide alternative paths for analyzers in addition to standard path. [\#4](https://github.com/TheHive-Project/Cortex/issues/4) +- Persistence and Report Caching [\#5](https://github.com/TheHive-Project/Cortex/issues/5) +- Limit Rates and Respect Quotas [\#6](https://github.com/TheHive-Project/Cortex/issues/6) +- Local, LDAP, AD and API Key Authentication [\#7](https://github.com/TheHive-Project/Cortex/issues/7) - Display analyzers only if necessary configuration values are set [\#14](https://github.com/TheHive-Project/Cortex/issues/14) **Fixed bugs:** - Error when clicking out of the "New Analysis" box [\#48](https://github.com/TheHive-Project/Cortex/issues/48) -**Closed issues:** +## [1.1.4](https://github.com/TheHive-Project/Cortex/milestone/8) (2017-09-15) -- AMD64 REPO 404 [\#64](https://github.com/TheHive-Project/Cortex/issues/64) -- Unable for Cortex to connected to MISP [\#61](https://github.com/TheHive-Project/Cortex/issues/61) -- Cortex crashed after a OutOfMemoryError [\#60](https://github.com/TheHive-Project/Cortex/issues/60) -- Malwareconfig Lookup and Yara Rule Additions [\#57](https://github.com/TheHive-Project/Cortex/issues/57) -- Shodan Analyzer Fails - Module cortexutils Not Found [\#55](https://github.com/TheHive-Project/Cortex/issues/55) -- API: Resource not found by Assets controller [\#47](https://github.com/TheHive-Project/Cortex/issues/47) -- Wrong MISP config in conf/application.sample [\#45](https://github.com/TheHive-Project/Cortex/issues/45) -- Local, LDAP, AD and API Key Authentication [\#7](https://github.com/TheHive-Project/Cortex/issues/7) -- Limit Rates and Respect Quotas [\#6](https://github.com/TheHive-Project/Cortex/issues/6) -- Persistence and Report Caching [\#5](https://github.com/TheHive-Project/Cortex/issues/5) -- Provide alternative paths for analyzers in addition to standard path. [\#4](https://github.com/TheHive-Project/Cortex/issues/4) -- Provide way to reload conf file for new API keys without shutdown. [\#3](https://github.com/TheHive-Project/Cortex/issues/3) -- Provide Secret Key auth to upstream service [\#2](https://github.com/TheHive-Project/Cortex/issues/2) - -**Merged pull requests:** - -- Add proxy configuration block [\#52](https://github.com/TheHive-Project/Cortex/pull/52) ([cemasirt](https://github.com/cemasirt)) -- Fixed Typo [\#46](https://github.com/TheHive-Project/Cortex/pull/46) ([steoleary](https://github.com/steoleary)) -- Adding WOT config sample [\#43](https://github.com/TheHive-Project/Cortex/pull/43) ([mthlvt](https://github.com/mthlvt)) - -## [1.1.4](https://github.com/TheHive-Project/Cortex/tree/1.1.4) (2017-09-15) +**Implemented enhancements:** -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/1.1.3...1.1.4) +- Disable analyzer in configuration file [\#32](https://github.com/TheHive-Project/Cortex/issues/32) +- Group ownership in Docker image prevents running on OpenShift [\#42](https://github.com/TheHive-Project/Cortex/issues/42) **Fixed bugs:** -- Display a error notification on analyzer start fail [\#39](https://github.com/TheHive-Project/Cortex/issues/39) - Cortex removes the input details from failure reports [\#38](https://github.com/TheHive-Project/Cortex/issues/38) +- Display a error notification on analyzer start fail [\#39](https://github.com/TheHive-Project/Cortex/issues/39) -**Closed issues:** - -- Group ownership in Docker image prevents running on OpenShift [\#42](https://github.com/TheHive-Project/Cortex/issues/42) -- Disable analyzer in configuration file [\#32](https://github.com/TheHive-Project/Cortex/issues/32) - -## [1.1.3](https://github.com/TheHive-Project/Cortex/tree/1.1.3) (2017-06-14) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/debian/1.1.2-2...1.1.3) +## [1.1.3](https://github.com/TheHive-Project/Cortex/milestone/7) (2017-06-29) **Fixed bugs:** -- Problem Start Cortex on Ubuntu 16.04 [\#35](https://github.com/TheHive-Project/Cortex/issues/35) - Error when parsing analyzer failure report [\#33](https://github.com/TheHive-Project/Cortex/issues/33) +- Problem Start Cortex on Ubuntu 16.04 [\#35](https://github.com/TheHive-Project/Cortex/issues/35) -## [debian/1.1.2-2](https://github.com/TheHive-Project/Cortex/tree/debian/1.1.2-2) (2017-05-24) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/1.1.2...debian/1.1.2-2) - -## [1.1.2](https://github.com/TheHive-Project/Cortex/tree/1.1.2) (2017-05-24) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/debian/1.1.1-2...1.1.2) +## [1.1.2](https://github.com/TheHive-Project/Cortex/milestone/6) (2017-06-12) **Implemented enhancements:** -- Add page loader [\#30](https://github.com/TheHive-Project/Cortex/issues/30) - Initialize MISP modules at startup [\#28](https://github.com/TheHive-Project/Cortex/issues/28) +- Add page loader [\#30](https://github.com/TheHive-Project/Cortex/issues/30) **Fixed bugs:** -- jobstatus from jobs within cortex are not updated when status changes [\#31](https://github.com/TheHive-Project/Cortex/issues/31) -- Cortex and MISP unclear and error-loop [\#29](https://github.com/TheHive-Project/Cortex/issues/29) - Error 500 in TheHive when a job is submited to Cortex [\#27](https://github.com/TheHive-Project/Cortex/issues/27) +- Cortex and MISP unclear and error-loop [\#29](https://github.com/TheHive-Project/Cortex/issues/29) +- jobstatus from jobs within cortex are not updated when status changes [\#31](https://github.com/TheHive-Project/Cortex/issues/31) -## [debian/1.1.1-2](https://github.com/TheHive-Project/Cortex/tree/debian/1.1.1-2) (2017-05-19) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/rpm/1.1.1-2...debian/1.1.1-2) - -## [rpm/1.1.1-2](https://github.com/TheHive-Project/Cortex/tree/rpm/1.1.1-2) (2017-05-19) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/1.1.1...rpm/1.1.1-2) - -**Fixed bugs:** - -- After Upgrade from Cortex 1.0.2 to 1.1.1 system does not come up [\#26](https://github.com/TheHive-Project/Cortex/issues/26) +## [1.1.1](https://github.com/TheHive-Project/Cortex/milestone/5) (2017-05-17) -## [1.1.1](https://github.com/TheHive-Project/Cortex/tree/1.1.1) (2017-05-17) +**Implemented enhancements:** -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/1.1.0...1.1.1) +- MISP integration [\#21](https://github.com/TheHive-Project/Cortex/issues/21) **Fixed bugs:** - Missing logos and favicons [\#25](https://github.com/TheHive-Project/Cortex/issues/25) -**Closed issues:** - -- Cortex 1.1.0 doesnt work with theHive 2.11.0 [\#24](https://github.com/TheHive-Project/Cortex/issues/24) -- MISP integration [\#21](https://github.com/TheHive-Project/Cortex/issues/21) - -## [1.1.0](https://github.com/TheHive-Project/Cortex/tree/1.1.0) (2017-05-12) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/1.0.2...1.1.0) +## [1.1.0](https://github.com/TheHive-Project/Cortex/milestone/2) (2017-05-15) **Implemented enhancements:** -- Add support to .deb and .rpm package generation [\#20](https://github.com/TheHive-Project/Cortex/issues/20) -- Scala code cleanup [\#19](https://github.com/TheHive-Project/Cortex/issues/19) - Display analyzers metadata [\#18](https://github.com/TheHive-Project/Cortex/issues/18) +- Scala code cleanup [\#19](https://github.com/TheHive-Project/Cortex/issues/19) +- Add support to .deb and .rpm package generation [\#20](https://github.com/TheHive-Project/Cortex/issues/20) **Closed issues:** -- Display Cortex version on the footer [\#23](https://github.com/TheHive-Project/Cortex/issues/23) - Use new logo and favicon [\#22](https://github.com/TheHive-Project/Cortex/issues/22) +- Display Cortex version on the footer [\#23](https://github.com/TheHive-Project/Cortex/issues/23) -## [1.0.2](https://github.com/TheHive-Project/Cortex/tree/1.0.2) (2017-04-19) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/1.0.1...1.0.2) +## [1.0.2](https://github.com/TheHive-Project/Cortex/milestone/4) (2017-04-18) **Fixed bugs:** -- Redirect to jobs list when a job is not found [\#16](https://github.com/TheHive-Project/Cortex/issues/16) -- Global section in configuration file is ignored [\#13](https://github.com/TheHive-Project/Cortex/issues/13) -- Secure the usage of angular-ui-notification library [\#12](https://github.com/TheHive-Project/Cortex/issues/12) - Jobs list API doesn't take into account the limit param [\#11](https://github.com/TheHive-Project/Cortex/issues/11) +- Secure the usage of angular-ui-notification library [\#12](https://github.com/TheHive-Project/Cortex/issues/12) +- Global section in configuration file is ignored [\#13](https://github.com/TheHive-Project/Cortex/issues/13) +- Redirect to jobs list when a job is not found [\#16](https://github.com/TheHive-Project/Cortex/issues/16) -**Closed issues:** - -- Support for cuckoo malware analysis plattform \(link analysis\) [\#17](https://github.com/TheHive-Project/Cortex/issues/17) -- Documentation on 'How to create an analyzer' [\#10](https://github.com/TheHive-Project/Cortex/issues/10) - -## [1.0.1](https://github.com/TheHive-Project/Cortex/tree/1.0.1) (2017-03-08) - -[Full Changelog](https://github.com/TheHive-Project/Cortex/compare/1.0.0...1.0.1) +## [1.0.1](https://github.com/TheHive-Project/Cortex/milestone/3) (2017-03-22) **Fixed bugs:** - Fix page scroll issues [\#9](https://github.com/TheHive-Project/Cortex/issues/9) - -**Closed issues:** - -- Missing install repertory [\#1](https://github.com/TheHive-Project/Cortex/issues/1) - -## [1.0.0](https://github.com/TheHive-Project/Cortex/tree/1.0.0) (2017-02-01) - -\* _This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)_ diff --git a/app/org/thp/cortex/Module.scala b/app/org/thp/cortex/Module.scala index af4c4cf79..640120a84 100644 --- a/app/org/thp/cortex/Module.scala +++ b/app/org/thp/cortex/Module.scala @@ -17,7 +17,7 @@ import org.thp.cortex.services._ import org.elastic4play.models.BaseModelDef import org.elastic4play.services.auth.MultiAuthSrv -import org.elastic4play.services.{UserSrv ⇒ EUserSrv, AuthSrv, MigrationOperations} +import org.elastic4play.services.{UserSrv => EUserSrv, AuthSrv, MigrationOperations} import org.thp.cortex.controllers.{AssetCtrl, AssetCtrlDev, AssetCtrlProd} import services.mappers.{MultiUserMapperSrv, UserMapper} @@ -41,8 +41,8 @@ class Module(environment: Environment, configuration: Configuration) extends Abs reflectionClasses .getSubTypesOf(classOf[BaseModelDef]) .asScala - .filterNot(c ⇒ Modifier.isAbstract(c.getModifiers)) - .foreach { modelClass ⇒ + .filterNot(c => Modifier.isAbstract(c.getModifiers)) + .foreach { modelClass => logger.info(s"Loading model $modelClass") modelBindings.addBinding.to(modelClass) if (classOf[AuditedModel].isAssignableFrom(modelClass)) { @@ -54,9 +54,9 @@ class Module(environment: Environment, configuration: Configuration) extends Abs reflectionClasses .getSubTypesOf(classOf[AuthSrv]) .asScala - .filterNot(c ⇒ Modifier.isAbstract(c.getModifiers) || c.isMemberClass) - .filterNot(c ⇒ c == classOf[MultiAuthSrv] || c == classOf[CortexAuthSrv]) - .foreach { authSrvClass ⇒ + .filterNot(c => Modifier.isAbstract(c.getModifiers) || c.isMemberClass) + .filterNot(c => c == classOf[MultiAuthSrv] || c == classOf[CortexAuthSrv]) + .foreach { authSrvClass => logger.info(s"Loading authentication module $authSrvClass") authBindings.addBinding.to(authSrvClass) } @@ -65,9 +65,9 @@ class Module(environment: Environment, configuration: Configuration) extends Abs reflectionClasses .getSubTypesOf(classOf[UserMapper]) .asScala - .filterNot(c ⇒ Modifier.isAbstract(c.getModifiers) || c.isMemberClass) - .filterNot(c ⇒ c == classOf[MultiUserMapperSrv]) - .foreach(mapperCls ⇒ ssoMapperBindings.addBinding.to(mapperCls)) + .filterNot(c => Modifier.isAbstract(c.getModifiers) || c.isMemberClass) + .filterNot(c => c == classOf[MultiUserMapperSrv]) + .foreach(mapperCls => ssoMapperBindings.addBinding.to(mapperCls)) if (environment.mode == Mode.Prod) bind[AssetCtrl].to[AssetCtrlProd] diff --git a/app/org/thp/cortex/controllers/AnalyzerConfigCtrl.scala b/app/org/thp/cortex/controllers/AnalyzerConfigCtrl.scala index 80ce00d1d..e948f1af2 100644 --- a/app/org/thp/cortex/controllers/AnalyzerConfigCtrl.scala +++ b/app/org/thp/cortex/controllers/AnalyzerConfigCtrl.scala @@ -13,7 +13,7 @@ import org.elastic4play.BadRequestError import org.elastic4play.controllers.{Authenticated, Fields, FieldsBodyParser, Renderer} @Singleton -class AnalyzerConfigCtrl @Inject()( +class AnalyzerConfigCtrl @Inject() ( analyzerConfigSrv: AnalyzerConfigSrv, userSrv: UserSrv, authenticated: Authenticated, @@ -23,34 +23,34 @@ class AnalyzerConfigCtrl @Inject()( implicit val ec: ExecutionContext ) extends AbstractController(components) { - def get(analyzerConfigName: String): Action[AnyContent] = authenticated(Roles.orgAdmin).async { request ⇒ + def get(analyzerConfigName: String): Action[AnyContent] = authenticated(Roles.orgAdmin).async { request => analyzerConfigSrv .getForUser(request.userId, analyzerConfigName) .map(renderer.toOutput(OK, _)) } - def list(): Action[AnyContent] = authenticated(Roles.orgAdmin).async { request ⇒ + def list(): Action[AnyContent] = authenticated(Roles.orgAdmin).async { request => analyzerConfigSrv .listConfigForUser(request.userId) - .map { bc ⇒ + .map { bc => renderer.toOutput( OK, bc.sortWith { - case (BaseConfig("global", _, _, _), _) ⇒ true - case (_, BaseConfig("global", _, _, _)) ⇒ false - case (BaseConfig(a, _, _, _), BaseConfig(b, _, _, _)) ⇒ a.compareTo(b) < 0 + case (BaseConfig("global", _, _, _), _) => true + case (_, BaseConfig("global", _, _, _)) => false + case (BaseConfig(a, _, _, _), BaseConfig(b, _, _, _)) => a.compareTo(b) < 0 } ) } } - def update(analyzerConfigName: String): Action[Fields] = authenticated(Roles.orgAdmin).async(fieldsBodyParser) { implicit request ⇒ + def update(analyzerConfigName: String): Action[Fields] = authenticated(Roles.orgAdmin).async(fieldsBodyParser) { implicit request => request.body.getValue("config").flatMap(_.asOpt[JsObject]) match { - case Some(config) ⇒ + case Some(config) => analyzerConfigSrv .updateOrCreate(request.userId, analyzerConfigName, config) .map(renderer.toOutput(OK, _)) - case None ⇒ Future.failed(BadRequestError("attribute config has invalid format")) + case None => Future.failed(BadRequestError("attribute config has invalid format")) } } } diff --git a/app/org/thp/cortex/controllers/AnalyzerCtrl.scala b/app/org/thp/cortex/controllers/AnalyzerCtrl.scala index b967755a9..644f2c180 100644 --- a/app/org/thp/cortex/controllers/AnalyzerCtrl.scala +++ b/app/org/thp/cortex/controllers/AnalyzerCtrl.scala @@ -15,7 +15,7 @@ import org.elastic4play.services.JsonFormat.queryReads import org.elastic4play.services.{QueryDSL, QueryDef} @Singleton -class AnalyzerCtrl @Inject()( +class AnalyzerCtrl @Inject() ( workerSrv: WorkerSrv, userSrv: UserSrv, authenticated: Authenticated, @@ -26,7 +26,7 @@ class AnalyzerCtrl @Inject()( implicit val mat: Materializer ) extends AbstractController(components) { - def find: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { request ⇒ + def find: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) @@ -35,54 +35,54 @@ class AnalyzerCtrl @Inject()( renderer.toOutput(OK, analyzers.map(analyzerJson(isAdmin)), analyzerTotal) } - def get(analyzerId: String): Action[AnyContent] = authenticated(Roles.read).async { request ⇒ + def get(analyzerId: String): Action[AnyContent] = authenticated(Roles.read).async { request => val isAdmin = request.roles.contains(Roles.orgAdmin) workerSrv .getForUser(request.userId, analyzerId) - .map(a ⇒ renderer.toOutput(OK, analyzerJson(isAdmin)(a))) + .map(a => renderer.toOutput(OK, analyzerJson(isAdmin)(a))) } private def analyzerJson(isAdmin: Boolean)(analyzer: Worker): JsObject = if (isAdmin) - analyzer.toJson + ("configuration" → Json.parse(analyzer.configuration())) + ("analyzerDefinitionId" → JsString(analyzer.workerDefinitionId())) + analyzer.toJson + ("configuration" -> Json.parse(analyzer.configuration())) + ("analyzerDefinitionId" -> JsString(analyzer.workerDefinitionId())) else - analyzer.toJson + ("analyzerDefinitionId" → JsString(analyzer.workerDefinitionId())) + analyzer.toJson + ("analyzerDefinitionId" -> JsString(analyzer.workerDefinitionId())) - def listForType(dataType: String): Action[AnyContent] = authenticated(Roles.read).async { request ⇒ + def listForType(dataType: String): Action[AnyContent] = authenticated(Roles.read).async { request => import org.elastic4play.services.QueryDSL._ val (responderList, responderCount) = workerSrv.findAnalyzersForUser(request.userId, "dataTypeList" ~= dataType, Some("all"), Nil) renderer.toOutput(OK, responderList.map(analyzerJson(isAdmin = false)), responderCount) } - def create(analyzerDefinitionId: String): Action[Fields] = authenticated(Roles.orgAdmin).async(fieldsBodyParser) { implicit request ⇒ + def create(analyzerDefinitionId: String): Action[Fields] = authenticated(Roles.orgAdmin).async(fieldsBodyParser) { implicit request => for { - organizationId ← userSrv.getOrganizationId(request.userId) - workerDefinition ← Future.fromTry(workerSrv.getDefinition(analyzerDefinitionId)) - analyzer ← workerSrv.create(organizationId, workerDefinition, request.body) + organizationId <- userSrv.getOrganizationId(request.userId) + workerDefinition <- Future.fromTry(workerSrv.getDefinition(analyzerDefinitionId)) + analyzer <- workerSrv.create(organizationId, workerDefinition, request.body) } yield renderer.toOutput(CREATED, analyzerJson(isAdmin = false)(analyzer)) } - def listDefinitions: Action[AnyContent] = authenticated(Roles.orgAdmin, Roles.superAdmin).async { _ ⇒ + def listDefinitions: Action[AnyContent] = authenticated(Roles.orgAdmin, Roles.superAdmin).async { _ => val (analyzers, analyzerTotal) = workerSrv.listAnalyzerDefinitions renderer.toOutput(OK, analyzers, analyzerTotal) } - def scan: Action[AnyContent] = authenticated(Roles.orgAdmin, Roles.superAdmin) { _ ⇒ + def scan: Action[AnyContent] = authenticated(Roles.orgAdmin, Roles.superAdmin) { _ => workerSrv.rescan() NoContent } - def delete(analyzerId: String): Action[AnyContent] = authenticated(Roles.orgAdmin, Roles.superAdmin).async { implicit request ⇒ + def delete(analyzerId: String): Action[AnyContent] = authenticated(Roles.orgAdmin, Roles.superAdmin).async { implicit request => for { - analyzer ← workerSrv.getForUser(request.userId, analyzerId) - _ ← workerSrv.delete(analyzer) + analyzer <- workerSrv.getForUser(request.userId, analyzerId) + _ <- workerSrv.delete(analyzer) } yield NoContent } - def update(analyzerId: String): Action[Fields] = authenticated(Roles.orgAdmin).async(fieldsBodyParser) { implicit request ⇒ + def update(analyzerId: String): Action[Fields] = authenticated(Roles.orgAdmin).async(fieldsBodyParser) { implicit request => for { - analyzer ← workerSrv.getForUser(request.userId, analyzerId) - updatedAnalyzer ← workerSrv.update(analyzer, request.body) + analyzer <- workerSrv.getForUser(request.userId, analyzerId) + updatedAnalyzer <- workerSrv.update(analyzer, request.body) } yield renderer.toOutput(OK, analyzerJson(isAdmin = true)(updatedAnalyzer)) } } diff --git a/app/org/thp/cortex/controllers/AssetCtrl.scala b/app/org/thp/cortex/controllers/AssetCtrl.scala index b3d9e29aa..065aac1c4 100644 --- a/app/org/thp/cortex/controllers/AssetCtrl.scala +++ b/app/org/thp/cortex/controllers/AssetCtrl.scala @@ -13,12 +13,12 @@ trait AssetCtrl { } @Singleton -class AssetCtrlProd @Inject()(errorHandler: HttpErrorHandler, meta: AssetsMetadata) extends Assets(errorHandler, meta) with AssetCtrl { +class AssetCtrlProd @Inject() (errorHandler: HttpErrorHandler, meta: AssetsMetadata) extends Assets(errorHandler, meta) with AssetCtrl { def get(file: String): Action[AnyContent] = at("/www", file) } @Singleton -class AssetCtrlDev @Inject()(environment: Environment)(implicit ec: ExecutionContext, fileMimeTypes: FileMimeTypes) +class AssetCtrlDev @Inject() (environment: Environment)(implicit ec: ExecutionContext, fileMimeTypes: FileMimeTypes) extends ExternalAssets(environment) with AssetCtrl { def get(file: String): Action[AnyContent] = at("www/dist", file) diff --git a/app/org/thp/cortex/controllers/AttachmentCtrl.scala b/app/org/thp/cortex/controllers/AttachmentCtrl.scala index a3b534ec2..f5fe38258 100644 --- a/app/org/thp/cortex/controllers/AttachmentCtrl.scala +++ b/app/org/thp/cortex/controllers/AttachmentCtrl.scala @@ -49,15 +49,15 @@ class AttachmentCtrl( * open the document directly. It must be used only for safe file */ @Timed("controllers.AttachmentCtrl.download") - def download(hash: String, name: Option[String]): Action[AnyContent] = authenticated(Roles.read) { _ ⇒ + def download(hash: String, name: Option[String]): Action[AnyContent] = authenticated(Roles.read) { _ => if (hash.startsWith("{{")) // angularjs hack NoContent else if (!name.getOrElse("").intersect(AttachmentAttributeFormat.forbiddenChar).isEmpty) mvc.Results.BadRequest("File name is invalid") else Result( - header = ResponseHeader(200, Map("Content-Disposition" → s"""attachment; filename="${URLEncoder - .encode(name.getOrElse(hash), "utf-8")}"""", "Content-Transfer-Encoding" → "binary")), + header = ResponseHeader(200, Map("Content-Disposition" -> s"""attachment; filename="${URLEncoder + .encode(name.getOrElse(hash), "utf-8")}"""", "Content-Transfer-Encoding" -> "binary")), body = HttpEntity.Streamed(attachmentSrv.source(hash), None, None) ) } @@ -68,7 +68,7 @@ class AttachmentCtrl( * File name can be specified (zip extension is append) */ @Timed("controllers.AttachmentCtrl.downloadZip") - def downloadZip(hash: String, name: Option[String]): Action[AnyContent] = authenticated(Roles.read) { _ ⇒ + def downloadZip(hash: String, name: Option[String]): Action[AnyContent] = authenticated(Roles.read) { _ => if (!name.getOrElse("").intersect(AttachmentAttributeFormat.forbiddenChar).isEmpty) BadRequest("File name is invalid") else { @@ -88,10 +88,10 @@ class AttachmentCtrl( header = ResponseHeader( 200, Map( - "Content-Disposition" → s"""attachment; filename="${URLEncoder.encode(name.getOrElse(hash), "utf-8")}.zip"""", - "Content-Type" → "application/zip", - "Content-Transfer-Encoding" → "binary", - "Content-Length" → Files.size(f).toString + "Content-Disposition" -> s"""attachment; filename="${URLEncoder.encode(name.getOrElse(hash), "utf-8")}.zip"""", + "Content-Type" -> "application/zip", + "Content-Transfer-Encoding" -> "binary", + "Content-Length" -> Files.size(f).toString ) ), body = HttpEntity.Streamed(FileIO.fromPath(f), Some(Files.size(f)), Some("application/zip")) diff --git a/app/org/thp/cortex/controllers/AuthenticationCtrl.scala b/app/org/thp/cortex/controllers/AuthenticationCtrl.scala index d797e3619..c447f4bda 100644 --- a/app/org/thp/cortex/controllers/AuthenticationCtrl.scala +++ b/app/org/thp/cortex/controllers/AuthenticationCtrl.scala @@ -1,21 +1,21 @@ package org.thp.cortex.controllers -import scala.concurrent.{ExecutionContext, Future} - -import play.api.mvc._ - import javax.inject.{Inject, Singleton} -import org.thp.cortex.models.UserStatus -import org.thp.cortex.services.UserSrv - import org.elastic4play.controllers.{Authenticated, Fields, FieldsBodyParser, Renderer} import org.elastic4play.database.DBIndex import org.elastic4play.services.AuthSrv import org.elastic4play.services.JsonFormat.authContextWrites -import org.elastic4play.{AuthorizationError, MissingAttributeError, OAuth2Redirect, Timed} +import org.elastic4play.{AuthorizationError, MissingAttributeError, Timed} +import org.thp.cortex.models.UserStatus +import org.thp.cortex.services.UserSrv +import play.api.Configuration +import play.api.mvc._ + +import scala.concurrent.{ExecutionContext, Future} @Singleton -class AuthenticationCtrl @Inject()( +class AuthenticationCtrl @Inject() ( + configuration: Configuration, authSrv: AuthSrv, userSrv: UserSrv, authenticated: Authenticated, @@ -27,41 +27,40 @@ class AuthenticationCtrl @Inject()( ) extends AbstractController(components) { @Timed - def login: Action[Fields] = Action.async(fieldsBodyParser) { implicit request ⇒ + def login: Action[Fields] = Action.async(fieldsBodyParser) { implicit request => dbIndex.getIndexStatus.flatMap { - case false ⇒ Future.successful(Results.Status(520)) - case _ ⇒ + case false => Future.successful(Results.Status(520)) + case _ => for { - user ← request.body.getString("user").fold[Future[String]](Future.failed(MissingAttributeError("user")))(Future.successful) - password ← request.body.getString("password").fold[Future[String]](Future.failed(MissingAttributeError("password")))(Future.successful) - authContext ← authSrv.authenticate(user, password) + user <- request.body.getString("user").fold[Future[String]](Future.failed(MissingAttributeError("user")))(Future.successful) + password <- request.body.getString("password").fold[Future[String]](Future.failed(MissingAttributeError("password")))(Future.successful) + authContext <- authSrv.authenticate(user, password) } yield authenticated.setSessingUser(renderer.toOutput(OK, authContext), authContext) } } @Timed - def ssoLogin: Action[AnyContent] = Action.async { implicit request ⇒ + def ssoLogin: Action[AnyContent] = Action.async { implicit request => dbIndex.getIndexStatus.flatMap { - case false ⇒ Future.successful(Results.Status(520)) - case _ ⇒ - (for { - authContext ← authSrv.authenticate() - user ← userSrv.get(authContext.userId) - } yield { - if (user.status() == UserStatus.Ok) - authenticated.setSessingUser(Ok, authContext) - else - throw AuthorizationError("Your account is locked") - }) recover { - // A bit of a hack with the status code, so that Angular doesn't reject the origin - case OAuth2Redirect(redirectUrl, qp) ⇒ Redirect(redirectUrl, qp, status = OK) - case e ⇒ throw e - } + case false => Future.successful(Results.Status(520)) + case _ => + authSrv + .authenticate() + .flatMap { + case Right(authContext) => + userSrv.get(authContext.userId).map { user => + if (user.status() == UserStatus.Ok) + authenticated.setSessingUser(Redirect(configuration.get[String]("play.http.context").stripSuffix("/") + "/index.html"), authContext) + else + throw AuthorizationError("Your account is locked") + } + case Left(result) => Future.successful(result) + } } } @Timed - def logout = Action { + def logout: Action[AnyContent] = Action { Ok.withNewSession } } diff --git a/app/org/thp/cortex/controllers/DBListCtrl.scala b/app/org/thp/cortex/controllers/DBListCtrl.scala index 44325e20e..036475e20 100644 --- a/app/org/thp/cortex/controllers/DBListCtrl.scala +++ b/app/org/thp/cortex/controllers/DBListCtrl.scala @@ -14,7 +14,7 @@ import org.elastic4play.services.DBLists import org.elastic4play.MissingAttributeError @Singleton -class DBListCtrl @Inject()( +class DBListCtrl @Inject() ( dblists: DBLists, authenticated: Authenticated, renderer: Renderer, @@ -23,51 +23,51 @@ class DBListCtrl @Inject()( implicit val ec: ExecutionContext ) extends AbstractController(components) { - def list: Action[AnyContent] = authenticated(Roles.read).async { _ ⇒ - dblists.listAll.map { listNames ⇒ + def list: Action[AnyContent] = authenticated(Roles.read).async { _ => + dblists.listAll.map { listNames => renderer.toOutput(OK, listNames) } } - def listItems(listName: String): Action[AnyContent] = authenticated(Roles.read) { _ ⇒ + def listItems(listName: String): Action[AnyContent] = authenticated(Roles.read) { _ => val (src, _) = dblists(listName).getItems[JsValue] val items = src - .map { case (id, value) ⇒ s""""$id":$value""" } + .map { case (id, value) => s""""$id":$value""" } .intersperse("{", ",", "}") Ok.chunked(items).as("application/json") } - def addItem(listName: String): Action[Fields] = authenticated(Roles.superAdmin).async(fieldsBodyParser) { implicit request ⇒ - request.body.getValue("value").fold(Future.successful(NoContent)) { value ⇒ - dblists(listName).addItem(value).map { item ⇒ + def addItem(listName: String): Action[Fields] = authenticated(Roles.superAdmin).async(fieldsBodyParser) { implicit request => + request.body.getValue("value").fold(Future.successful(NoContent)) { value => + dblists(listName).addItem(value).map { item => renderer.toOutput(OK, item.id) } } } - def deleteItem(itemId: String): Action[AnyContent] = authenticated(Roles.superAdmin).async { implicit request ⇒ - dblists.deleteItem(itemId).map { _ ⇒ + def deleteItem(itemId: String): Action[AnyContent] = authenticated(Roles.superAdmin).async { implicit request => + dblists.deleteItem(itemId).map { _ => NoContent } } - def updateItem(itemId: String): Action[Fields] = authenticated(Roles.superAdmin).async(fieldsBodyParser) { implicit request ⇒ + def updateItem(itemId: String): Action[Fields] = authenticated(Roles.superAdmin).async(fieldsBodyParser) { implicit request => request .body .getValue("value") - .map { value ⇒ + .map { value => for { - item ← dblists.getItem(itemId) - _ ← dblists.deleteItem(item) - newItem ← dblists(item.dblist).addItem(value) + item <- dblists.getItem(itemId) + _ <- dblists.deleteItem(item) + newItem <- dblists(item.dblist).addItem(value) } yield renderer.toOutput(OK, newItem.id) } .getOrElse(Future.failed(MissingAttributeError("value"))) } - def itemExists(listName: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def itemExists(listName: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val itemKey = request.body.getString("key").getOrElse(throw MissingAttributeError("Parameter key is missing")) val itemValue = request.body.getValue("value").getOrElse(throw MissingAttributeError("Parameter value is missing")) - dblists(listName).exists(itemKey, itemValue).map(r ⇒ Ok(Json.obj("found" → r))) + dblists(listName).exists(itemKey, itemValue).map(r => Ok(Json.obj("found" -> r))) } } diff --git a/app/org/thp/cortex/controllers/Home.scala b/app/org/thp/cortex/controllers/Home.scala index 5e4dcfec8..edf225523 100644 --- a/app/org/thp/cortex/controllers/Home.scala +++ b/app/org/thp/cortex/controllers/Home.scala @@ -6,7 +6,7 @@ import play.api.mvc.{AbstractController, Action, AnyContent, ControllerComponent import javax.inject.{Inject, Singleton} @Singleton -class Home @Inject()(configuration: Configuration, components: ControllerComponents) extends AbstractController(components) { +class Home @Inject() (configuration: Configuration, components: ControllerComponents) extends AbstractController(components) { def redirect: Action[AnyContent] = Action { Redirect(configuration.get[String]("play.http.context").stripSuffix("/") + "/index.html") diff --git a/app/org/thp/cortex/controllers/JobCtrl.scala b/app/org/thp/cortex/controllers/JobCtrl.scala index b43142e42..f3a762217 100644 --- a/app/org/thp/cortex/controllers/JobCtrl.scala +++ b/app/org/thp/cortex/controllers/JobCtrl.scala @@ -2,6 +2,7 @@ package org.thp.cortex.controllers import scala.concurrent.duration.{Duration, FiniteDuration} import scala.concurrent.{ExecutionContext, Future} +import scala.concurrent.duration.DurationInt import play.api.http.Status import play.api.libs.json.{JsObject, JsString, JsValue, Json} @@ -24,7 +25,7 @@ import org.elastic4play.services.{QueryDSL, QueryDef} import org.elastic4play.utils.RichFuture @Singleton -class JobCtrl @Inject()( +class JobCtrl @Inject() ( jobSrv: JobSrv, @Named("audit") auditActor: ActorRef, fieldsBodyParser: FieldsBodyParser, @@ -38,15 +39,15 @@ class JobCtrl @Inject()( with Status { def list(dataTypeFilter: Option[String], dataFilter: Option[String], workerFilter: Option[String], range: Option[String]): Action[AnyContent] = - authenticated(Roles.read).async { implicit request ⇒ + authenticated(Roles.read).async { implicit request => val (jobs, jobTotal) = jobSrv.listForUser(request.userId, dataTypeFilter, dataFilter, workerFilter, range) renderer.toOutput(OK, jobs, jobTotal) } - def find: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def find: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => import QueryDSL._ val deleteFilter = "status" ~!= "Deleted" - val query = request.body.getValue("query").fold[QueryDef](deleteFilter)(q ⇒ and(q.as[QueryDef], deleteFilter)) + val query = request.body.getValue("query").fold[QueryDef](deleteFilter)(q => and(q.as[QueryDef], deleteFilter)) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) val (jobs, total) = jobSrv.findForUser(request.userId, query, range, sort) @@ -54,36 +55,36 @@ class JobCtrl @Inject()( } - def get(jobId: String): Action[AnyContent] = authenticated(Roles.read).async { implicit request ⇒ - jobSrv.getForUser(request.userId, jobId).map { job ⇒ + def get(jobId: String): Action[AnyContent] = authenticated(Roles.read).async { implicit request => + jobSrv.getForUser(request.userId, jobId).map { job => renderer.toOutput(OK, job) } } - def delete(jobId: String): Action[AnyContent] = authenticated(Roles.analyze, Roles.orgAdmin).async { implicit request ⇒ + def delete(jobId: String): Action[AnyContent] = authenticated(Roles.analyze, Roles.orgAdmin).async { implicit request => jobSrv .getForUser(request.userId, jobId) - .flatMap(job ⇒ jobSrv.delete(job)) - .map(_ ⇒ NoContent) + .flatMap(job => jobSrv.delete(job)) + .map(_ => NoContent) } - def createResponderJob(workerId: String): Action[Fields] = authenticated(Roles.analyze).async(fieldsBodyParser) { implicit request ⇒ + def createResponderJob(workerId: String): Action[Fields] = authenticated(Roles.analyze).async(fieldsBodyParser) { implicit request => val fields = request.body val fieldsWithStringData = fields.getValue("data") match { - case Some(d) ⇒ fields.set("data", d.toString) - case None ⇒ fields + case Some(d) => fields.set("data", d.toString) + case None => fields } jobSrv .create(workerId, fieldsWithStringData) - .map { job ⇒ + .map { job => renderer.toOutput(OK, job) } } - def createAnalyzerJob(workerId: String): Action[Fields] = authenticated(Roles.analyze).async(fieldsBodyParser) { implicit request ⇒ + def createAnalyzerJob(workerId: String): Action[Fields] = authenticated(Roles.analyze).async(fieldsBodyParser) { implicit request => jobSrv .create(workerId, request.body) - .map { job ⇒ + .map { job => renderer.toOutput(OK, job) } } @@ -93,72 +94,72 @@ class JobCtrl @Inject()( private def getJobWithReport(userId: String, job: Job): Future[JsValue] = (job.status() match { - case JobStatus.Success ⇒ + case JobStatus.Success => for { - report ← jobSrv.getReport(job) + report <- jobSrv.getReport(job) (artifactSource, _) = jobSrv.findArtifacts(userId, job.id, QueryDSL.any, Some("all"), Nil) - artifacts ← artifactSource + artifacts <- artifactSource .collect { - case artifact if artifact.data().isDefined ⇒ + case artifact if artifact.data().isDefined => Json.obj( - "data" → artifact.data(), - "dataType" → artifact.dataType(), - "message" → artifact.message(), - "tags" → artifact.tags(), - "tlp" → artifact.tlp() + "data" -> artifact.data(), + "dataType" -> artifact.dataType(), + "message" -> artifact.message(), + "tags" -> artifact.tags(), + "tlp" -> artifact.tlp() ) - case artifact if artifact.attachment().isDefined ⇒ + case artifact if artifact.attachment().isDefined => val attachment = artifact.attachment().get Json.obj( - "dataType" → artifact.dataType(), - "message" → artifact.message(), - "tags" → artifact.tags(), - "tlp" → artifact.tlp(), - "attachment" → Json - .obj("contentType" → attachment.contentType, "id" → attachment.id, "name" → attachment.name, "size" → attachment.size) + "dataType" -> artifact.dataType(), + "message" -> artifact.message(), + "tags" -> artifact.tags(), + "tlp" -> artifact.tlp(), + "attachment" -> Json + .obj("contentType" -> attachment.contentType, "id" -> attachment.id, "name" -> attachment.name, "size" -> attachment.size) ) } .runWith(Sink.seq) } yield Json.obj( - "summary" → Json.parse(report.summary()), - "full" → Json.parse(report.full()), - "success" → true, - "artifacts" → artifacts, - "operations" → Json.parse(report.operations()) + "summary" -> Json.parse(report.summary()), + "full" -> Json.parse(report.full()), + "success" -> true, + "artifacts" -> artifacts, + "operations" -> Json.parse(report.operations()) ) - case JobStatus.Failure ⇒ + case JobStatus.Failure => val errorMessage = job.errorMessage().getOrElse("") - Future.successful(Json.obj("errorMessage" → errorMessage, "input" → job.input(), "success" → false)) - case JobStatus.InProgress ⇒ Future.successful(JsString("Running")) - case JobStatus.Waiting ⇒ Future.successful(JsString("Waiting")) - case JobStatus.Deleted ⇒ Future.successful(JsString("Deleted")) - }).map { report ⇒ - Json.toJson(job).as[JsObject] + ("report" → report) + Future.successful(Json.obj("errorMessage" -> errorMessage, "input" -> job.input(), "success" -> false)) + case JobStatus.InProgress => Future.successful(JsString("Running")) + case JobStatus.Waiting => Future.successful(JsString("Waiting")) + case JobStatus.Deleted => Future.successful(JsString("Deleted")) + }).map { report => + Json.toJson(job).as[JsObject] + ("report" -> report) } - def report(jobId: String): Action[AnyContent] = authenticated(Roles.read).async { implicit request ⇒ + def report(jobId: String): Action[AnyContent] = authenticated(Roles.read).async { implicit request => getJobWithReport(request.userId, jobId).map(Ok(_)) } - def waitReport(jobId: String, atMost: String): Action[AnyContent] = authenticated(Roles.read).async { implicit request ⇒ + def waitReport(jobId: String, atMost: String): Action[AnyContent] = authenticated(Roles.read).async { implicit request => jobSrv .getForUser(request.userId, jobId) .flatMap { - case job if job.status() == JobStatus.InProgress || job.status() == JobStatus.Waiting ⇒ + case job if job.status() == JobStatus.InProgress || job.status() == JobStatus.Waiting => val duration = Duration(atMost).asInstanceOf[FiniteDuration] - implicit val timeout: Timeout = Timeout(duration) + implicit val timeout: Timeout = Timeout(duration + 1.second) (auditActor ? Register(jobId, duration)) .mapTo[JobEnded] - .map(_ ⇒ ()) + .map(_ => ()) .withTimeout(duration, ()) - .flatMap(_ ⇒ getJobWithReport(request.userId, jobId)) - case job ⇒ + .flatMap(_ => getJobWithReport(request.userId, jobId)) + case job => getJobWithReport(request.userId, job) } .map(Ok(_)) } - def listArtifacts(jobId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def listArtifacts(jobId: String): Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) diff --git a/app/org/thp/cortex/controllers/MispCtrl.scala b/app/org/thp/cortex/controllers/MispCtrl.scala index 47009d559..8af0db784 100644 --- a/app/org/thp/cortex/controllers/MispCtrl.scala +++ b/app/org/thp/cortex/controllers/MispCtrl.scala @@ -10,7 +10,7 @@ import play.api.mvc._ import scala.concurrent.{ExecutionContext, Future} -class MispCtrl @Inject()( +class MispCtrl @Inject() ( mispSrv: MispSrv, analyzerSrv: WorkerSrv, authenticated: Authenticated, @@ -22,25 +22,25 @@ class MispCtrl @Inject()( private[MispCtrl] lazy val logger = Logger(getClass) - def modules: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request ⇒ + def modules: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { implicit request => val (analyzers, analyzerCount) = mispSrv.moduleList renderer.toOutput(OK, analyzers, analyzerCount) } - def query: Action[JsValue] = authenticated(Roles.analyze)(parse.json).async { implicit request ⇒ + def query: Action[JsValue] = authenticated(Roles.analyze)(parse.json).async { implicit request => (request.body \ "module") .asOpt[String] - .fold(Future.successful(BadRequest("Module parameter is not present in request"))) { module ⇒ + .fold(Future.successful(BadRequest("Module parameter is not present in request"))) { module => request .body .as[JsObject] .fields .collectFirst { - case kv @ (k, _) if k != "module" ⇒ kv + case kv @ (k, _) if k != "module" => kv } .fold(Future.successful(BadRequest("Request doesn't contain data to analyze"))) { - case (mispType, dataJson) ⇒ - dataJson.asOpt[String].fold(Future.successful(BadRequest("Data has invalid type (expected string)"))) { data ⇒ + case (mispType, dataJson) => + dataJson.asOpt[String].fold(Future.successful(BadRequest("Data has invalid type (expected string)"))) { data => mispSrv .query(module, mispType, data) .map(Ok(_)) diff --git a/app/org/thp/cortex/controllers/OrganizationCtrl.scala b/app/org/thp/cortex/controllers/OrganizationCtrl.scala index a8d5127ec..29d721151 100644 --- a/app/org/thp/cortex/controllers/OrganizationCtrl.scala +++ b/app/org/thp/cortex/controllers/OrganizationCtrl.scala @@ -15,10 +15,10 @@ import org.elastic4play.{BadRequestError, NotFoundError} import org.elastic4play.controllers.{Authenticated, Fields, FieldsBodyParser, Renderer} import org.elastic4play.models.JsonFormat.baseModelEntityWrites import org.elastic4play.services.JsonFormat.{aggReads, queryReads} -import org.elastic4play.services.{UserSrv ⇒ _, _} +import org.elastic4play.services.{UserSrv => _, _} @Singleton -class OrganizationCtrl @Inject()( +class OrganizationCtrl @Inject() ( organizationSrv: OrganizationSrv, authSrv: AuthSrv, auxSrv: AuxSrv, @@ -33,43 +33,43 @@ class OrganizationCtrl @Inject()( private[OrganizationCtrl] lazy val logger = Logger(getClass) - def create: Action[Fields] = authenticated(Roles.superAdmin).async(fieldsBodyParser) { implicit request ⇒ + def create: Action[Fields] = authenticated(Roles.superAdmin).async(fieldsBodyParser) { implicit request => organizationSrv .create(request.body) - .map(organization ⇒ renderer.toOutput(CREATED, organization)) + .map(organization => renderer.toOutput(CREATED, organization)) } - def get(organizationId: String): Action[Fields] = authenticated(Roles.superAdmin, Roles.orgAdmin).async(fieldsBodyParser) { implicit request ⇒ + def get(organizationId: String): Action[Fields] = authenticated(Roles.superAdmin, Roles.orgAdmin).async(fieldsBodyParser) { implicit request => val withStats = request.body.getBoolean("nstats").getOrElse(false) (for { - userOrganizationId ← if (request.roles.contains(Roles.superAdmin)) Future.successful(organizationId) + userOrganizationId <- if (request.roles.contains(Roles.superAdmin)) Future.successful(organizationId) else userSrv.getOrganizationId(request.userId) if userOrganizationId == organizationId - organization ← organizationSrv.get(organizationId) - organizationWithStats ← auxSrv(organization, 0, withStats, removeUnaudited = false) + organization <- organizationSrv.get(organizationId) + organizationWithStats <- auxSrv(organization, 0, withStats, removeUnaudited = false) } yield renderer.toOutput(OK, organizationWithStats)) - .recoverWith { case _: NoSuchElementException ⇒ Future.failed(NotFoundError(s"organization $organizationId not found")) } + .recoverWith { case _: NoSuchElementException => Future.failed(NotFoundError(s"organization $organizationId not found")) } } - def update(organizationId: String): Action[Fields] = authenticated(Roles.superAdmin).async(fieldsBodyParser) { implicit request ⇒ + def update(organizationId: String): Action[Fields] = authenticated(Roles.superAdmin).async(fieldsBodyParser) { implicit request => if (organizationId == "cortex") Future.failed(BadRequestError("Cortex organization can't be updated")) else - organizationSrv.update(organizationId, request.body).map { organization ⇒ + organizationSrv.update(organizationId, request.body).map { organization => renderer.toOutput(OK, organization) } } - def delete(organizationId: String): Action[AnyContent] = authenticated(Roles.superAdmin).async { implicit request ⇒ + def delete(organizationId: String): Action[AnyContent] = authenticated(Roles.superAdmin).async { implicit request => if (organizationId == "cortex") Future.failed(BadRequestError("Cortex organization can't be removed")) else organizationSrv .delete(organizationId) - .map(_ ⇒ NoContent) + .map(_ => NoContent) } - def find: Action[Fields] = authenticated(Roles.superAdmin).async(fieldsBodyParser) { implicit request ⇒ + def find: Action[Fields] = authenticated(Roles.superAdmin).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) @@ -79,9 +79,9 @@ class OrganizationCtrl @Inject()( renderer.toOutput(OK, organizationWithStats, total) } - def stats(): Action[Fields] = authenticated(Roles.superAdmin).async(fieldsBodyParser) { implicit request ⇒ + def stats(): Action[Fields] = authenticated(Roles.superAdmin).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val aggs = request.body.getValue("stats").getOrElse(throw BadRequestError("Parameter \"stats\" is missing")).as[Seq[Agg]] - organizationSrv.stats(query, aggs).map(s ⇒ Ok(s)) + organizationSrv.stats(query, aggs).map(s => Ok(s)) } } diff --git a/app/org/thp/cortex/controllers/ResponderConfigCtrl.scala b/app/org/thp/cortex/controllers/ResponderConfigCtrl.scala index 52705f5c4..a4f232ba0 100644 --- a/app/org/thp/cortex/controllers/ResponderConfigCtrl.scala +++ b/app/org/thp/cortex/controllers/ResponderConfigCtrl.scala @@ -13,7 +13,7 @@ import org.elastic4play.BadRequestError import org.elastic4play.controllers.{Authenticated, Fields, FieldsBodyParser, Renderer} @Singleton -class ResponderConfigCtrl @Inject()( +class ResponderConfigCtrl @Inject() ( responderConfigSrv: ResponderConfigSrv, userSrv: UserSrv, authenticated: Authenticated, @@ -23,34 +23,34 @@ class ResponderConfigCtrl @Inject()( implicit val ec: ExecutionContext ) extends AbstractController(components) { - def get(analyzerConfigName: String): Action[AnyContent] = authenticated(Roles.orgAdmin).async { request ⇒ + def get(analyzerConfigName: String): Action[AnyContent] = authenticated(Roles.orgAdmin).async { request => responderConfigSrv .getForUser(request.userId, analyzerConfigName) .map(renderer.toOutput(OK, _)) } - def list(): Action[AnyContent] = authenticated(Roles.orgAdmin).async { request ⇒ + def list(): Action[AnyContent] = authenticated(Roles.orgAdmin).async { request => responderConfigSrv .listConfigForUser(request.userId) - .map { bc ⇒ + .map { bc => renderer.toOutput( OK, bc.sortWith { - case (BaseConfig("global", _, _, _), _) ⇒ true - case (_, BaseConfig("global", _, _, _)) ⇒ false - case (BaseConfig(a, _, _, _), BaseConfig(b, _, _, _)) ⇒ a.compareTo(b) < 0 + case (BaseConfig("global", _, _, _), _) => true + case (_, BaseConfig("global", _, _, _)) => false + case (BaseConfig(a, _, _, _), BaseConfig(b, _, _, _)) => a.compareTo(b) < 0 } ) } } - def update(analyzerConfigName: String): Action[Fields] = authenticated(Roles.orgAdmin).async(fieldsBodyParser) { implicit request ⇒ + def update(analyzerConfigName: String): Action[Fields] = authenticated(Roles.orgAdmin).async(fieldsBodyParser) { implicit request => request.body.getValue("config").flatMap(_.asOpt[JsObject]) match { - case Some(config) ⇒ + case Some(config) => responderConfigSrv .updateOrCreate(request.userId, analyzerConfigName, config) .map(renderer.toOutput(OK, _)) - case None ⇒ Future.failed(BadRequestError("attribute config has invalid format")) + case None => Future.failed(BadRequestError("attribute config has invalid format")) } } } diff --git a/app/org/thp/cortex/controllers/ResponderCtrl.scala b/app/org/thp/cortex/controllers/ResponderCtrl.scala index 694faa11b..e74648061 100644 --- a/app/org/thp/cortex/controllers/ResponderCtrl.scala +++ b/app/org/thp/cortex/controllers/ResponderCtrl.scala @@ -16,7 +16,7 @@ import org.elastic4play.services.JsonFormat.queryReads import org.elastic4play.services.{QueryDSL, QueryDef} @Singleton -class ResponderCtrl @Inject()( +class ResponderCtrl @Inject() ( workerSrv: WorkerSrv, userSrv: UserSrv, authenticated: Authenticated, @@ -27,7 +27,7 @@ class ResponderCtrl @Inject()( implicit val mat: Materializer ) extends AbstractController(components) { - def find: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { request ⇒ + def find: Action[Fields] = authenticated(Roles.read).async(fieldsBodyParser) { request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) @@ -36,71 +36,71 @@ class ResponderCtrl @Inject()( renderer.toOutput(OK, responders.map(responderJson(isAdmin)), responderTotal) } - def get(responderId: String): Action[AnyContent] = authenticated(Roles.read).async { request ⇒ + def get(responderId: String): Action[AnyContent] = authenticated(Roles.read).async { request => val isAdmin = request.roles.contains(Roles.orgAdmin) workerSrv .getForUser(request.userId, responderId) - .map(responder ⇒ renderer.toOutput(OK, responderJson(isAdmin)(responder))) + .map(responder => renderer.toOutput(OK, responderJson(isAdmin)(responder))) } private val emptyResponderDefinitionJson = - Json.obj("version" → "0.0", "description" → "unknown", "dataTypeList" → Nil, "author" → "unknown", "url" → "unknown", "license" → "unknown") + Json.obj("version" -> "0.0", "description" -> "unknown", "dataTypeList" -> Nil, "author" -> "unknown", "url" -> "unknown", "license" -> "unknown") private def responderJson(responder: Worker, responderDefinition: Option[WorkerDefinition]) = - responder.toJson ++ responderDefinition.fold(emptyResponderDefinitionJson) { ad ⇒ + responder.toJson ++ responderDefinition.fold(emptyResponderDefinitionJson) { ad => Json.obj( - "maxTlp" → (responder.config \ "max_tlp").asOpt[JsNumber], - "maxPap" → (responder.config \ "max_pap").asOpt[JsNumber], - "version" → ad.version, - "description" → ad.description, - "author" → ad.author, - "url" → ad.url, - "license" → ad.license, - "baseConfig" → ad.baseConfiguration + "maxTlp" -> (responder.config \ "max_tlp").asOpt[JsNumber], + "maxPap" -> (responder.config \ "max_pap").asOpt[JsNumber], + "version" -> ad.version, + "description" -> ad.description, + "author" -> ad.author, + "url" -> ad.url, + "license" -> ad.license, + "baseConfig" -> ad.baseConfiguration ) } private def responderJson(isAdmin: Boolean)(responder: Worker): JsObject = if (isAdmin) - responder.toJson + ("configuration" → Json.parse(responder.configuration())) + responder.toJson + ("configuration" -> Json.parse(responder.configuration())) else responder.toJson - def listForType(dataType: String): Action[AnyContent] = authenticated(Roles.read).async { request ⇒ + def listForType(dataType: String): Action[AnyContent] = authenticated(Roles.read).async { request => import org.elastic4play.services.QueryDSL._ val (responderList, responderCount) = workerSrv.findRespondersForUser(request.userId, "dataTypeList" ~= dataType, Some("all"), Nil) renderer.toOutput(OK, responderList.map(responderJson(false)), responderCount) } - def create(responderDefinitionId: String): Action[Fields] = authenticated(Roles.orgAdmin).async(fieldsBodyParser) { implicit request ⇒ + def create(responderDefinitionId: String): Action[Fields] = authenticated(Roles.orgAdmin).async(fieldsBodyParser) { implicit request => for { - organizationId ← userSrv.getOrganizationId(request.userId) - workerDefinition ← Future.fromTry(workerSrv.getDefinition(responderDefinitionId)) - responder ← workerSrv.create(organizationId, workerDefinition, request.body) + organizationId <- userSrv.getOrganizationId(request.userId) + workerDefinition <- Future.fromTry(workerSrv.getDefinition(responderDefinitionId)) + responder <- workerSrv.create(organizationId, workerDefinition, request.body) } yield renderer.toOutput(CREATED, responderJson(responder, Some(workerDefinition))) } - def listDefinitions: Action[AnyContent] = authenticated(Roles.orgAdmin, Roles.superAdmin).async { _ ⇒ + def listDefinitions: Action[AnyContent] = authenticated(Roles.orgAdmin, Roles.superAdmin).async { _ => val (responders, responderTotal) = workerSrv.listResponderDefinitions renderer.toOutput(OK, responders, responderTotal) } - def scan: Action[AnyContent] = authenticated(Roles.orgAdmin, Roles.superAdmin) { _ ⇒ + def scan: Action[AnyContent] = authenticated(Roles.orgAdmin, Roles.superAdmin) { _ => workerSrv.rescan() NoContent } - def delete(responderId: String): Action[AnyContent] = authenticated(Roles.orgAdmin, Roles.superAdmin).async { implicit request ⇒ + def delete(responderId: String): Action[AnyContent] = authenticated(Roles.orgAdmin, Roles.superAdmin).async { implicit request => for { - responder ← workerSrv.getForUser(request.userId, responderId) - _ ← workerSrv.delete(responder) + responder <- workerSrv.getForUser(request.userId, responderId) + _ <- workerSrv.delete(responder) } yield NoContent } - def update(responderId: String): Action[Fields] = authenticated(Roles.orgAdmin).async(fieldsBodyParser) { implicit request ⇒ + def update(responderId: String): Action[Fields] = authenticated(Roles.orgAdmin).async(fieldsBodyParser) { implicit request => for { - responder ← workerSrv.getForUser(request.userId, responderId) - updatedResponder ← workerSrv.update(responder, request.body) + responder <- workerSrv.getForUser(request.userId, responderId) + updatedResponder <- workerSrv.update(responder, request.body) } yield renderer.toOutput(OK, responderJson(isAdmin = true)(updatedResponder)) } } diff --git a/app/org/thp/cortex/controllers/StatusCtrl.scala b/app/org/thp/cortex/controllers/StatusCtrl.scala index a9d85792a..2e8900231 100644 --- a/app/org/thp/cortex/controllers/StatusCtrl.scala +++ b/app/org/thp/cortex/controllers/StatusCtrl.scala @@ -8,20 +8,18 @@ import play.api.libs.json.Json.toJsFieldJsValueWrapper import play.api.libs.json.{JsBoolean, JsString, Json} import play.api.mvc.{AbstractController, Action, AnyContent, ControllerComponents} -import com.sksamuel.elastic4s.http.ElasticDsl +import com.sksamuel.elastic4s.ElasticDsl import javax.inject.{Inject, Singleton} import org.elasticsearch.client.Node import org.thp.cortex.models.Worker -import org.elastic4play.database.DBIndex import org.elastic4play.services.AuthSrv import org.elastic4play.services.auth.MultiAuthSrv @Singleton -class StatusCtrl @Inject()( +class StatusCtrl @Inject() ( configuration: Configuration, authSrv: AuthSrv, - dbIndex: DBIndex, components: ControllerComponents, implicit val ec: ExecutionContext ) extends AbstractController(components) @@ -32,24 +30,24 @@ class StatusCtrl @Inject()( def get: Action[AnyContent] = Action { Ok( Json.obj( - "versions" → Json.obj( - "Cortex" → getVersion(classOf[Worker]), - "Elastic4Play" → getVersion(classOf[AuthSrv]), - "Play" → getVersion(classOf[AbstractController]), - "Elastic4s" → getVersion(classOf[ElasticDsl]), - "ElasticSearch client" → getVersion(classOf[Node]) + "versions" -> Json.obj( + "Cortex" -> getVersion(classOf[Worker]), + "Elastic4Play" -> getVersion(classOf[AuthSrv]), + "Play" -> getVersion(classOf[AbstractController]), + "Elastic4s" -> getVersion(classOf[ElasticDsl]), + "ElasticSearch client" -> getVersion(classOf[Node]) ), - "config" → Json.obj( - "protectDownloadsWith" → configuration.get[String]("datastore.attachment.password"), - "authType" → (authSrv match { - case multiAuthSrv: MultiAuthSrv ⇒ - multiAuthSrv.authProviders.map { a ⇒ + "config" -> Json.obj( + "protectDownloadsWith" -> configuration.get[String]("datastore.attachment.password"), + "authType" -> (authSrv match { + case multiAuthSrv: MultiAuthSrv => + multiAuthSrv.authProviders.map { a => JsString(a.name) } - case _ ⇒ JsString(authSrv.name) + case _ => JsString(authSrv.name) }), - "capabilities" → authSrv.capabilities.map(c ⇒ JsString(c.toString)), - "ssoAutoLogin" → JsBoolean(configuration.getOptional[Boolean]("auth.sso.autologin").getOrElse(false)) + "capabilities" -> authSrv.capabilities.map(c => JsString(c.toString)), + "ssoAutoLogin" -> JsBoolean(configuration.getOptional[Boolean]("auth.sso.autologin").getOrElse(false)) ) ) ) diff --git a/app/org/thp/cortex/controllers/StreamCtrl.scala b/app/org/thp/cortex/controllers/StreamCtrl.scala index 2059af71f..2a0450f5c 100644 --- a/app/org/thp/cortex/controllers/StreamCtrl.scala +++ b/app/org/thp/cortex/controllers/StreamCtrl.scala @@ -90,33 +90,34 @@ class StreamCtrl( * This call waits up to "refresh", if there is no event, return empty response */ @Timed("controllers.StreamCtrl.get") - def get(id: String): Action[AnyContent] = Action.async { implicit request ⇒ + def get(id: String): Action[AnyContent] = Action.async { implicit request => implicit val timeout: Timeout = Timeout(refresh + globalMaxWait + 1.second) if (!isValidStreamId(id)) { Future.successful(BadRequest("Invalid stream id")) } else { val futureStatus = authenticated.expirationStatus(request) match { - case ExpirationError if !migrationSrv.isMigrating ⇒ userSrv.getInitialUser(request).recoverWith { case _ => authenticated.getFromApiKey(request)}.map(_ ⇒ OK) - case _: ExpirationWarning ⇒ Future.successful(220) - case _ ⇒ Future.successful(OK) + case ExpirationError if !migrationSrv.isMigrating => + userSrv.getInitialUser(request).recoverWith { case _ => authenticated.getFromApiKey(request) }.map(_ => OK) + case _: ExpirationWarning => Future.successful(220) + case _ => Future.successful(OK) } - futureStatus.flatMap { status ⇒ + futureStatus.flatMap { status => (system.actorSelection(s"/user/stream-$id") ? StreamActor.GetOperations) map { - case StreamMessages(operations) ⇒ renderer.toOutput(status, operations) - case m ⇒ InternalServerError(s"Unexpected message : $m (${m.getClass})") + case StreamMessages(operations) => renderer.toOutput(status, operations) + case m => InternalServerError(s"Unexpected message : $m (${m.getClass})") } } } } @Timed("controllers.StreamCtrl.status") - def status = Action { implicit request ⇒ + def status = Action { implicit request => val status = authenticated.expirationStatus(request) match { - case ExpirationWarning(duration) ⇒ Json.obj("remaining" → duration.toSeconds, "warning" → true) - case ExpirationError ⇒ Json.obj("remaining" → 0, "warning" → true) - case ExpirationOk(duration) ⇒ Json.obj("remaining" → duration.toSeconds, "warning" → false) + case ExpirationWarning(duration) => Json.obj("remaining" -> duration.toSeconds, "warning" -> true) + case ExpirationError => Json.obj("remaining" -> 0, "warning" -> true) + case ExpirationOk(duration) => Json.obj("remaining" -> duration.toSeconds, "warning" -> false) } Ok(status) } diff --git a/app/org/thp/cortex/controllers/UserCtrl.scala b/app/org/thp/cortex/controllers/UserCtrl.scala index 5bd9033e0..21eca3f88 100644 --- a/app/org/thp/cortex/controllers/UserCtrl.scala +++ b/app/org/thp/cortex/controllers/UserCtrl.scala @@ -20,7 +20,7 @@ import org.elastic4play.services.{AuthContext, AuthSrv, QueryDSL, QueryDef} import org.elastic4play._ @Singleton -class UserCtrl @Inject()( +class UserCtrl @Inject() ( userSrv: UserSrv, authSrv: AuthSrv, organizationSrv: OrganizationSrv, @@ -35,72 +35,72 @@ class UserCtrl @Inject()( private[UserCtrl] lazy val logger = Logger(getClass) @Timed - def create: Action[Fields] = authenticated(Roles.orgAdmin, Roles.superAdmin).async(fieldsBodyParser) { implicit request ⇒ + def create: Action[Fields] = authenticated(Roles.orgAdmin, Roles.superAdmin).async(fieldsBodyParser) { implicit request => (for { - userOrganizationId ← if (request.userId == "init") Future.successful("cortex") else userSrv.getOrganizationId(request.userId) + userOrganizationId <- if (request.userId == "init") Future.successful("cortex") else userSrv.getOrganizationId(request.userId) organizationId = request.body.getString("organization").getOrElse(userOrganizationId) // Check if organization is valid - organization ← organizationSrv.get(organizationId) + organization <- organizationSrv.get(organizationId) if organization.status() == OrganizationStatus.Active && (request.roles.contains(Roles.superAdmin) || (userOrganizationId == organizationId && !request.body.getStrings("roles").getOrElse(Nil).contains(Roles.superAdmin.name))) - user ← userSrv.create(request.body.set("organization", organizationId)) + user <- userSrv.create(request.body.set("organization", organizationId)) } yield renderer.toOutput(CREATED, user)) .recoverWith { - case _: NoSuchElementException ⇒ Future.failed(AuthorizationError("You are not authorized to perform this action")) + case _: NoSuchElementException => Future.failed(AuthorizationError("You are not authorized to perform this action")) } } @Timed - def get(userId: String): Action[AnyContent] = authenticated(Roles.read, Roles.superAdmin).async { implicit request ⇒ + def get(userId: String): Action[AnyContent] = authenticated(Roles.read, Roles.superAdmin).async { implicit request => val isSuperAdmin = request.authContext.roles.contains(Roles.superAdmin) (for { - user ← userSrv.get(userId) - organizationId ← userSrv.getOrganizationId(request.userId) + user <- userSrv.get(userId) + organizationId <- userSrv.getOrganizationId(request.userId) if isSuperAdmin || organizationId == user.organization() } yield renderer.toOutput(OK, user)) .recoverWith { - case _: NoSuchElementException ⇒ Future.failed(NotFoundError(s"user $userId not found")) + case _: NoSuchElementException => Future.failed(NotFoundError(s"user $userId not found")) } } @Timed - def update(userId: String): Action[Fields] = authenticated().async(fieldsBodyParser) { implicit request ⇒ + def update(userId: String): Action[Fields] = authenticated().async(fieldsBodyParser) { implicit request => val fields = request.body def superAdminChecks: Future[Unit] = for { - userOrganizationId ← fields.getString("organization").fold(userSrv.getOrganizationId(userId))(Future.successful) - organization ← organizationSrv.get(userOrganizationId) - _ ← if (organization.status() == OrganizationStatus.Active) Future.successful(()) + userOrganizationId <- fields.getString("organization").fold(userSrv.getOrganizationId(userId))(Future.successful) + organization <- organizationSrv.get(userOrganizationId) + _ <- if (organization.status() == OrganizationStatus.Active) Future.successful(()) else Future.failed(BadRequestError(s"Organization $userOrganizationId is locked")) // check roles and organization - _ ← fields.getStrings("roles").map(_.flatMap(Roles.withName)).fold(Future.successful(())) { - case roles if userOrganizationId == "cortex" && roles == Seq(Roles.superAdmin) ⇒ Future.successful(()) - case roles if userOrganizationId != "cortex" && !roles.contains(Roles.superAdmin) ⇒ Future.successful(()) - case _ if userOrganizationId == "cortex" ⇒ Future.failed(BadRequestError("The organization \"cortex\" can contain only superadmin users")) - case _ ⇒ Future.failed(BadRequestError("The organization \"cortex\" alone can contain superadmin users")) + _ <- fields.getStrings("roles").map(_.flatMap(Roles.withName)).fold(Future.successful(())) { + case roles if userOrganizationId == "cortex" && roles == Seq(Roles.superAdmin) => Future.successful(()) + case roles if userOrganizationId != "cortex" && !roles.contains(Roles.superAdmin) => Future.successful(()) + case _ if userOrganizationId == "cortex" => Future.failed(BadRequestError("The organization \"cortex\" can contain only superadmin users")) + case _ => Future.failed(BadRequestError("The organization \"cortex\" alone can contain superadmin users")) } // check status - _ ← fields.getString("status").fold(Future.successful(())) { - case _ if userId != request.userId ⇒ Future.successful(()) - case _ ⇒ Future.failed(BadRequestError("You can't modify your status")) + _ <- fields.getString("status").fold(Future.successful(())) { + case _ if userId != request.userId => Future.successful(()) + case _ => Future.failed(BadRequestError("You can't modify your status")) } } yield () def orgAdminChecks: Future[Unit] = for { - subjectUserOrganization ← userSrv.getOrganizationId(request.userId) - targetUserOrganization ← userSrv.getOrganizationId(userId) - _ ← if (subjectUserOrganization == targetUserOrganization) Future.successful(()) else Future.failed(NotFoundError(s"user $userId not found")) + subjectUserOrganization <- userSrv.getOrganizationId(request.userId) + targetUserOrganization <- userSrv.getOrganizationId(userId) + _ <- if (subjectUserOrganization == targetUserOrganization) Future.successful(()) else Future.failed(NotFoundError(s"user $userId not found")) // check roles - _ ← fields.getStrings("roles").map(_.flatMap(Roles.withName)).fold(Future.successful(())) { - case roles if !roles.contains(Roles.superAdmin) ⇒ Future.successful(()) - case _ ⇒ Future.failed(AuthorizationError("You can't give superadmin right to an user")) + _ <- fields.getStrings("roles").map(_.flatMap(Roles.withName)).fold(Future.successful(())) { + case roles if !roles.contains(Roles.superAdmin) => Future.successful(()) + case _ => Future.failed(AuthorizationError("You can't give superadmin right to an user")) } // check organization - _ ← if (fields.getString("organization").fold(true)(_ == targetUserOrganization)) Future.successful(()) + _ <- if (fields.getString("organization").fold(true)(_ == targetUserOrganization)) Future.successful(()) else Future.failed(AuthorizationError("You can't move an user to another organization")) } yield () @@ -117,76 +117,76 @@ class UserCtrl @Inject()( else Future.successful(()) for { - _ ← if (userId == request.authContext.userId) userChecks + _ <- if (userId == request.authContext.userId) userChecks else if (request.authContext.roles.contains(Roles.superAdmin)) superAdminChecks else if (request.authContext.roles.contains(Roles.orgAdmin)) orgAdminChecks else Future.failed(AuthorizationError("You are not permitted to change user settings")) - _ ← authChecks - user ← userSrv.update(userId, request.body) + _ <- authChecks + user <- userSrv.update(userId, request.body) } yield renderer.toOutput(OK, user) } @Timed - def setPassword(userId: String): Action[Fields] = authenticated(Roles.orgAdmin, Roles.superAdmin).async(fieldsBodyParser) { implicit request ⇒ + def setPassword(userId: String): Action[Fields] = authenticated(Roles.orgAdmin, Roles.superAdmin).async(fieldsBodyParser) { implicit request => val isSuperAdmin = request.authContext.roles.contains(Roles.superAdmin) request .body .getString("password") - .fold(Future.failed[Result](MissingAttributeError("password"))) { password ⇒ + .fold(Future.failed[Result](MissingAttributeError("password"))) { password => for { - targetOrganization ← userSrv.getOrganizationId(userId) - userOrganization ← userSrv.getOrganizationId(request.userId) + targetOrganization <- userSrv.getOrganizationId(userId) + userOrganization <- userSrv.getOrganizationId(request.userId) if targetOrganization == userOrganization || isSuperAdmin - _ ← authSrv.setPassword(userId, password) + _ <- authSrv.setPassword(userId, password) } yield NoContent } - .recoverWith { case _: NoSuchElementException ⇒ Future.failed(NotFoundError(s"user $userId not found")) } + .recoverWith { case _: NoSuchElementException => Future.failed(NotFoundError(s"user $userId not found")) } } @Timed - def changePassword(userId: String): Action[Fields] = authenticated().async(fieldsBodyParser) { implicit request ⇒ + def changePassword(userId: String): Action[Fields] = authenticated().async(fieldsBodyParser) { implicit request => if (userId == request.authContext.userId) { for { - password ← request.body.getString("password").fold(Future.failed[String](MissingAttributeError("password")))(Future.successful) - currentPassword ← request + password <- request.body.getString("password").fold(Future.failed[String](MissingAttributeError("password")))(Future.successful) + currentPassword <- request .body .getString("currentPassword") .fold(Future.failed[String](MissingAttributeError("currentPassword")))(Future.successful) - _ ← authSrv.changePassword(userId, currentPassword, password) + _ <- authSrv.changePassword(userId, currentPassword, password) } yield NoContent } else Future.failed(AuthorizationError("You can't change password of another user")) } @Timed - def delete(userId: String): Action[AnyContent] = authenticated(Roles.orgAdmin, Roles.superAdmin).async { implicit request ⇒ + def delete(userId: String): Action[AnyContent] = authenticated(Roles.orgAdmin, Roles.superAdmin).async { implicit request => val isSuperAdmin = request.authContext.roles.contains(Roles.superAdmin) for { - targetOrganization ← userSrv.getOrganizationId(userId) - userOrganization ← userSrv.getOrganizationId(request.userId) - _ ← if (targetOrganization == userOrganization || isSuperAdmin) Future.successful(()) + targetOrganization <- userSrv.getOrganizationId(userId) + userOrganization <- userSrv.getOrganizationId(request.userId) + _ <- if (targetOrganization == userOrganization || isSuperAdmin) Future.successful(()) else Future.failed(NotFoundError(s"user $userId not found")) - _ ← if (userId != request.userId) Future.successful(()) else Future.failed(BadRequestError(s"You cannot disable your own account")) - _ ← userSrv.delete(userId) + _ <- if (userId != request.userId) Future.successful(()) else Future.failed(BadRequestError(s"You cannot disable your own account")) + _ <- userSrv.delete(userId) } yield NoContent } @Timed - def currentUser: Action[AnyContent] = Action.async { implicit request ⇒ + def currentUser: Action[AnyContent] = Action.async { implicit request => for { - authContext ← authenticated.getContext(request) - user ← userSrv.get(authContext.userId) + authContext <- authenticated.getContext(request) + user <- userSrv.get(authContext.userId) preferences = Try(Json.parse(user.preferences())) .getOrElse { logger.warn(s"User ${authContext.userId} has invalid preference format: ${user.preferences()}") JsObject.empty } - json = user.toJson + ("preferences" → preferences) + json = user.toJson + ("preferences" -> preferences) } yield renderer.toOutput(OK, json) } @Timed - def find: Action[Fields] = authenticated(Roles.superAdmin).async(fieldsBodyParser) { implicit request ⇒ + def find: Action[Fields] = authenticated(Roles.superAdmin).async(fieldsBodyParser) { implicit request => val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) val range = request.body.getString("range") val sort = request.body.getStrings("sort").getOrElse(Nil) @@ -196,7 +196,7 @@ class UserCtrl @Inject()( } def findForOrganization(organizationId: String): Action[Fields] = authenticated(Roles.orgAdmin, Roles.superAdmin).async(fieldsBodyParser) { - implicit request ⇒ + implicit request => import org.elastic4play.services.QueryDSL._ val isSuperAdmin = request.roles.contains(Roles.superAdmin) val query = request.body.getValue("query").fold[QueryDef](QueryDSL.any)(_.as[QueryDef]) @@ -212,37 +212,37 @@ class UserCtrl @Inject()( if (authContext.roles.contains(Roles.superAdmin)) Future.successful(()) else (for { - userOrganization1 ← userSrv.getOrganizationId(authContext.userId) - userOrganization2 ← userSrv.getOrganizationId(userId) + userOrganization1 <- userSrv.getOrganizationId(authContext.userId) + userOrganization2 <- userSrv.getOrganizationId(userId) if userOrganization1 == userOrganization2 } yield ()) - .recoverWith { case _ ⇒ Future.failed(NotFoundError(s"user $userId not found")) } + .recoverWith { case _ => Future.failed(NotFoundError(s"user $userId not found")) } @Timed - def getKey(userId: String): Action[AnyContent] = authenticated().async { implicit request ⇒ + def getKey(userId: String): Action[AnyContent] = authenticated().async { implicit request => for { - _ ← checkUserOrganization(userId) - _ ← if (userId == request.userId || request.roles.contains(Roles.orgAdmin) || request.roles.contains(Roles.superAdmin)) Future.successful(()) + _ <- checkUserOrganization(userId) + _ <- if (userId == request.userId || request.roles.contains(Roles.orgAdmin) || request.roles.contains(Roles.superAdmin)) Future.successful(()) else Future.failed(AuthorizationError("You are not authorized to perform this operation")) - key ← authSrv.getKey(userId) + key <- authSrv.getKey(userId) } yield Ok(key) } @Timed - def removeKey(userId: String): Action[AnyContent] = authenticated(Roles.orgAdmin, Roles.superAdmin).async { implicit request ⇒ + def removeKey(userId: String): Action[AnyContent] = authenticated(Roles.orgAdmin, Roles.superAdmin).async { implicit request => for { - _ ← checkUserOrganization(userId) - _ ← authSrv.removeKey(userId) + _ <- checkUserOrganization(userId) + _ <- authSrv.removeKey(userId) } yield NoContent } @Timed - def renewKey(userId: String): Action[AnyContent] = authenticated().async { implicit request ⇒ + def renewKey(userId: String): Action[AnyContent] = authenticated().async { implicit request => for { - _ ← checkUserOrganization(userId) - _ ← if (userId == request.userId || request.roles.contains(Roles.orgAdmin) || request.roles.contains(Roles.superAdmin)) Future.successful(()) + _ <- checkUserOrganization(userId) + _ <- if (userId == request.userId || request.roles.contains(Roles.orgAdmin) || request.roles.contains(Roles.superAdmin)) Future.successful(()) else Future.failed(AuthorizationError("You are not authorized to perform this operation")) - key ← authSrv.renewKey(userId) + key <- authSrv.renewKey(userId) } yield Ok(key) } } diff --git a/app/org/thp/cortex/models/Artifact.scala b/app/org/thp/cortex/models/Artifact.scala index 8159dfcde..b89f20810 100644 --- a/app/org/thp/cortex/models/Artifact.scala +++ b/app/org/thp/cortex/models/Artifact.scala @@ -4,9 +4,9 @@ import javax.inject.{Inject, Singleton} import play.api.libs.json.JsObject -import org.elastic4play.models.{AttributeDef, EntityDef, AttributeFormat ⇒ F, AttributeOption ⇒ O, ChildModelDef} +import org.elastic4play.models.{AttributeDef, EntityDef, AttributeFormat => F, AttributeOption => O, ChildModelDef} -trait ArtifactAttributes { _: AttributeDef ⇒ +trait ArtifactAttributes { _: AttributeDef => val dataType = attribute("dataType", F.stringFmt, "Type of the artifact", O.readonly) val data = optionalAttribute("data", F.rawFmt, "Content of the artifact", O.readonly) val attachment = optionalAttribute("attachment", F.attachmentFmt, "Artifact file content", O.readonly) @@ -16,7 +16,7 @@ trait ArtifactAttributes { _: AttributeDef ⇒ } @Singleton -class ArtifactModel @Inject()(reportModel: ReportModel) +class ArtifactModel @Inject() (reportModel: ReportModel) extends ChildModelDef[ArtifactModel, Artifact, ReportModel, Report](reportModel, "artifact", "Artifact", "/artifact") with ArtifactAttributes {} diff --git a/app/org/thp/cortex/models/Audit.scala b/app/org/thp/cortex/models/Audit.scala index d43ca5f5a..cf94b1eca 100644 --- a/app/org/thp/cortex/models/Audit.scala +++ b/app/org/thp/cortex/models/Audit.scala @@ -21,30 +21,30 @@ import org.elastic4play.models.{ ObjectAttributeFormat, OptionalAttributeFormat, StringAttributeFormat, - AttributeOption ⇒ O + AttributeOption => O } import org.elastic4play.services.AuditableAction import org.elastic4play.services.JsonFormat.auditableActionFormat -trait AuditedModel { self: BaseModelDef ⇒ +trait AuditedModel { self: BaseModelDef => def attributes: Seq[Attribute[_]] lazy val auditedAttributes: Map[String, Attribute[_]] = - attributes.collect { case a if !a.isUnaudited ⇒ a.attributeName → a }.toMap + attributes.collect { case a if !a.isUnaudited => a.attributeName -> a }.toMap def selectAuditedAttributes(attrs: JsObject) = JsObject { attrs.fields.flatMap { - case (attrName, value) ⇒ + case (attrName, value) => val attrNames = attrName.split("\\.").toSeq - auditedAttributes.get(attrNames.head).map { _ ⇒ + auditedAttributes.get(attrNames.head).map { _ => val reverseNames = attrNames.reverse - reverseNames.drop(1).foldLeft(reverseNames.head → value)((jsTuple, name) ⇒ name → JsObject(Seq(jsTuple))) + reverseNames.drop(1).foldLeft(reverseNames.head -> value)((jsTuple, name) => name -> JsObject(Seq(jsTuple))) } } } } -trait AuditAttributes { _: AttributeDef ⇒ +trait AuditAttributes { _: AttributeDef => def detailsAttributes: Seq[Attribute[_]] val operation: A[AuditableAction.Value] = attribute("operation", AttributeFormat.enumFmt(AuditableAction), "Operation", O.readonly) @@ -70,14 +70,14 @@ class AuditModel(auditName: String, auditedModels: immutable.Set[AuditedModel]) def mergeAttributeFormat(context: String, format1: AttributeFormat[_], format2: AttributeFormat[_]): Option[AttributeFormat[_]] = (format1, format2) match { - case (OptionalAttributeFormat(f1), f2) ⇒ mergeAttributeFormat(context, f1, f2) - case (f1, OptionalAttributeFormat(f2)) ⇒ mergeAttributeFormat(context, f1, f2) - case (MultiAttributeFormat(f1), MultiAttributeFormat(f2)) ⇒ mergeAttributeFormat(context, f1, f2).map(MultiAttributeFormat(_)) - case (f1, EnumerationAttributeFormat(_) | ListEnumerationAttributeFormat(_)) ⇒ mergeAttributeFormat(context, f1, StringAttributeFormat) - case (EnumerationAttributeFormat(_) | ListEnumerationAttributeFormat(_), f2) ⇒ mergeAttributeFormat(context, StringAttributeFormat, f2) - case (ObjectAttributeFormat(subAttributes1), ObjectAttributeFormat(subAttributes2)) ⇒ mergeAttributes(context, subAttributes1 ++ subAttributes2) - case (f1, f2) if f1 == f2 ⇒ Some(f1) - case (f1, f2) ⇒ + case (OptionalAttributeFormat(f1), f2) => mergeAttributeFormat(context, f1, f2) + case (f1, OptionalAttributeFormat(f2)) => mergeAttributeFormat(context, f1, f2) + case (MultiAttributeFormat(f1), MultiAttributeFormat(f2)) => mergeAttributeFormat(context, f1, f2).map(MultiAttributeFormat(_)) + case (f1, EnumerationAttributeFormat(_) | ListEnumerationAttributeFormat(_)) => mergeAttributeFormat(context, f1, StringAttributeFormat) + case (EnumerationAttributeFormat(_) | ListEnumerationAttributeFormat(_), f2) => mergeAttributeFormat(context, StringAttributeFormat, f2) + case (ObjectAttributeFormat(subAttributes1), ObjectAttributeFormat(subAttributes2)) => mergeAttributes(context, subAttributes1 ++ subAttributes2) + case (f1, f2) if f1 == f2 => Some(f1) + case (f1, f2) => logger.warn(s"Attribute $f1 != $f2") None @@ -87,22 +87,22 @@ class AuditModel(auditName: String, auditedModels: immutable.Set[AuditedModel]) val mergeAttributes: Iterable[Option[Attribute[_]]] = attributes .groupBy(_.attributeName) .map { - case (_name, _attributes) ⇒ + case (_name, _attributes) => _attributes - .map(a ⇒ Some(a.format)) + .map(a => Some(a.format)) .reduce[Option[AttributeFormat[_]]] { - case (Some(f1), Some(f2)) ⇒ mergeAttributeFormat(context + "." + _name, f1, f2) - case _ ⇒ None + case (Some(f1), Some(f2)) => mergeAttributeFormat(context + "." + _name, f1, f2) + case _ => None } .map { - case oaf: OptionalAttributeFormat[_] ⇒ oaf: AttributeFormat[_] - case maf: MultiAttributeFormat[_] ⇒ maf: AttributeFormat[_] - case f ⇒ OptionalAttributeFormat(f): AttributeFormat[_] + case oaf: OptionalAttributeFormat[_] => oaf: AttributeFormat[_] + case maf: MultiAttributeFormat[_] => maf: AttributeFormat[_] + case f => OptionalAttributeFormat(f): AttributeFormat[_] } - .map(format ⇒ Attribute("audit", _name, format, Nil, None, "")) + .map(format => Attribute("audit", _name, format, Nil, None, "")) .orElse { logger.error( - s"Mapping is not consistent on attribute $context:\n${_attributes.map(a ⇒ a.modelName + "/" + a.attributeName + ": " + a.format.name).mkString("\n")}" + s"Mapping is not consistent on attribute $context:\n${_attributes.map(a => a.modelName + "/" + a.attributeName + ": " + a.format.name).mkString("\n")}" ) None } @@ -119,7 +119,7 @@ class AuditModel(auditName: String, auditedModels: immutable.Set[AuditedModel]) "audit", auditedModels .flatMap(_.attributes) - .filter(a ⇒ a.isModel && !a.isUnaudited) + .filter(a => a.isModel && !a.isUnaudited) .toSeq ).map(_.subAttributes) .getOrElse(Nil) diff --git a/app/org/thp/cortex/models/BaseConfig.scala b/app/org/thp/cortex/models/BaseConfig.scala index 1764b5cae..1f5164955 100644 --- a/app/org/thp/cortex/models/BaseConfig.scala +++ b/app/org/thp/cortex/models/BaseConfig.scala @@ -12,19 +12,19 @@ case class BaseConfig(name: String, workerNames: Seq[String], items: Seq[Configu } object BaseConfig { - implicit val writes: Writes[BaseConfig] = Writes[BaseConfig] { baseConfig ⇒ + implicit val writes: Writes[BaseConfig] = Writes[BaseConfig] { baseConfig => Json.obj( - "name" → baseConfig.name, - "workers" → baseConfig.workerNames, - "configurationItems" → baseConfig.items, - "config" → baseConfig.config.fold(JsObject.empty)(_.jsonConfig) + "name" -> baseConfig.name, + "workers" -> baseConfig.workerNames, + "configurationItems" -> baseConfig.items, + "config" -> baseConfig.config.fold(JsObject.empty)(_.jsonConfig) ) } def global(tpe: WorkerType.Type, configuration: Configuration): BaseConfig = { val typedItems = tpe match { - case WorkerType.responder ⇒ Nil - case WorkerType.analyzer ⇒ + case WorkerType.responder => Nil + case WorkerType.analyzer => Seq( ConfigurationDefinitionItem( "auto_extract_artifacts", @@ -40,7 +40,7 @@ object BaseConfig { WorkerConfigItemType.number, multi = false, required = false, - configuration.getOptional[Duration]("cache.job").map(d ⇒ JsNumber(d.toMinutes)) + configuration.getOptional[Duration]("cache.job").map(d => JsNumber(d.toMinutes)) ) ) } @@ -57,7 +57,7 @@ object BaseConfig { WorkerConfigItemType.number, multi = false, required = false, - configuration.getOptional[Duration]("job.timeout").map(d ⇒ JsNumber(d.toMinutes)) + configuration.getOptional[Duration]("job.timeout").map(d => JsNumber(d.toMinutes)) ) ), None diff --git a/app/org/thp/cortex/models/Job.scala b/app/org/thp/cortex/models/Job.scala index 2bdfe17e2..eaaea43b4 100644 --- a/app/org/thp/cortex/models/Job.scala +++ b/app/org/thp/cortex/models/Job.scala @@ -8,7 +8,7 @@ import javax.inject.{Inject, Singleton} import org.thp.cortex.models.JsonFormat.workerTypeFormat import org.elastic4play.models.JsonFormat.enumFormat -import org.elastic4play.models.{AttributeDef, EntityDef, HiveEnumeration, ModelDef, AttributeFormat ⇒ F, AttributeOption ⇒ O} +import org.elastic4play.models.{AttributeDef, EntityDef, HiveEnumeration, ModelDef, AttributeFormat => F, AttributeOption => O} object JobStatus extends Enumeration with HiveEnumeration { type Type = Value @@ -17,7 +17,7 @@ object JobStatus extends Enumeration with HiveEnumeration { } trait JobAttributes { - _: AttributeDef ⇒ + _: AttributeDef => val workerDefinitionId = attribute("workerDefinitionId", F.stringFmt, "Worker definition id", O.readonly) val workerId = attribute("workerId", F.stringFmt, "Worker id", O.readonly) val workerName = attribute("workerName", F.stringFmt, "Worker name", O.readonly) @@ -41,9 +41,9 @@ trait JobAttributes { } @Singleton -class JobModel @Inject()() extends ModelDef[JobModel, Job]("job", "Job", "/job") with JobAttributes with AuditedModel { +class JobModel @Inject() () extends ModelDef[JobModel, Job]("job", "Job", "/job") with JobAttributes with AuditedModel { - override val removeAttribute: JsObject = Json.obj("status" → JobStatus.Deleted) + override val removeAttribute: JsObject = Json.obj("status" -> JobStatus.Deleted) override def defaultSortBy: Seq[String] = Seq("-createdAt") } @@ -52,19 +52,18 @@ class Job(model: JobModel, attributes: JsObject) extends EntityDef[JobModel, Job val params: JsObject = Try(Json.parse(parameters()).as[JsObject]).getOrElse(JsObject.empty) override def toJson: JsObject = { - val output = input().fold(super.toJson)( - i ⇒ - super.toJson + - ("input" → Json.parse(i)) + val output = input().fold(super.toJson)(i => + super.toJson + + ("input" -> Json.parse(i)) ) + - ("parameters" → params) + - ("analyzerId" → JsString(workerId())) + - ("analyzerName" → JsString(workerName())) + - ("analyzerDefinitionId" → JsString(workerDefinitionId())) + - ("date" → Json.toJson(createdAt)) + ("parameters" -> params) + + ("analyzerId" -> JsString(workerId())) + + ("analyzerName" -> JsString(workerName())) + + ("analyzerDefinitionId" -> JsString(workerDefinitionId())) + + ("date" -> Json.toJson(createdAt)) data() match { - case Some(d) if tpe() == WorkerType.responder ⇒ output + ("data" → Json.parse(d)) - case _ ⇒ output + case Some(d) if tpe() == WorkerType.responder => output + ("data" -> Json.parse(d)) + case _ => output } } } diff --git a/app/org/thp/cortex/models/JsonFormat.scala b/app/org/thp/cortex/models/JsonFormat.scala index e455bed5f..b51d728bd 100644 --- a/app/org/thp/cortex/models/JsonFormat.scala +++ b/app/org/thp/cortex/models/JsonFormat.scala @@ -6,10 +6,10 @@ import org.elastic4play.models.JsonFormat.enumFormat import org.elastic4play.services.Role object JsonFormat { - private val roleWrites: Writes[Role] = Writes((role: Role) ⇒ JsString(role.name.toLowerCase())) + private val roleWrites: Writes[Role] = Writes((role: Role) => JsString(role.name.toLowerCase())) private val roleReads: Reads[Role] = Reads { - case JsString(s) if Roles.isValid(s) ⇒ JsSuccess(Roles.withName(s).get) - case _ ⇒ JsError(Seq(JsPath → Seq(JsonValidationError(s"error.expected.role(${Roles.roleNames}")))) + case JsString(s) if Roles.isValid(s) => JsSuccess(Roles.withName(s).get) + case _ => JsError(Seq(JsPath -> Seq(JsonValidationError(s"error.expected.role(${Roles.roleNames}")))) } implicit val roleFormat: Format[Role] = Format[Role](roleReads, roleWrites) implicit val workerTypeFormat: Format[WorkerType.Value] = enumFormat(WorkerType) diff --git a/app/org/thp/cortex/models/Migration.scala b/app/org/thp/cortex/models/Migration.scala index bad438b00..77fdd5171 100644 --- a/app/org/thp/cortex/models/Migration.scala +++ b/app/org/thp/cortex/models/Migration.scala @@ -11,79 +11,77 @@ import org.thp.cortex.services.{OrganizationSrv, UserSrv, WorkerSrv} import org.elastic4play.controllers.Fields import org.elastic4play.services.Operation._ -import org.elastic4play.services.{DatabaseState, IndexType, MigrationOperations, Operation} +import org.elastic4play.services.{DatabaseState, MigrationOperations, Operation} import org.elastic4play.utils.Hasher @Singleton -class Migration @Inject()(userSrv: UserSrv, organizationSrv: OrganizationSrv, workerSrv: WorkerSrv, implicit val ec: ExecutionContext) +class Migration @Inject() (userSrv: UserSrv, organizationSrv: OrganizationSrv, workerSrv: WorkerSrv, implicit val ec: ExecutionContext) extends MigrationOperations { - lazy val logger = Logger(getClass) + lazy val logger: Logger = Logger(getClass) def beginMigration(version: Int): Future[Unit] = Future.successful(()) def endMigration(version: Int): Future[Unit] = - userSrv.inInitAuthContext { implicit authContext ⇒ + userSrv.inInitAuthContext { implicit authContext => organizationSrv - .create(Fields(Json.obj("name" → "cortex", "description" → "Default organization", "status" → "Active"))) - .transform(_ ⇒ Success(())) // ignore errors (already exist) + .create(Fields(Json.obj("name" -> "cortex", "description" -> "Default organization", "status" -> "Active"))) + .transform(_ => Success(())) // ignore errors (already exist) } - override def indexType(version: Int): IndexType.Value = if (version > 3) IndexType.indexWithoutMappingTypes else IndexType.indexWithMappingTypes - val operations: PartialFunction[DatabaseState, Seq[Operation]] = { - case DatabaseState(1) ⇒ + case DatabaseState(1) => val hasher = Hasher("MD5") Seq( // add type to analyzer - addAttribute("analyzer", "type" → JsString("analyzer")), + addAttribute("analyzer", "type" -> JsString("analyzer")), renameAttribute("job", "workerDefinitionId", "analyzerDefinitionId"), renameAttribute("job", "workerId", "analyzerId"), renameAttribute("job", "workerName", "analyzerName"), - addAttribute("job", "type" → JsString(WorkerType.analyzer.toString)), - addAttribute("report", "operations" → JsString("[]")), + addAttribute("job", "type" -> JsString(WorkerType.analyzer.toString)), + addAttribute("report", "operations" -> JsString("[]")), renameEntity("analyzer", "worker"), renameAttribute("worker", "workerDefinitionId", "analyzerDefinitionId"), - addAttribute("worker", "type" → JsString(WorkerType.analyzer.toString)), - mapEntity("worker") { worker ⇒ + addAttribute("worker", "type" -> JsString(WorkerType.analyzer.toString)), + mapEntity("worker") { worker => val id = for { - organizationId ← (worker \ "_parent").asOpt[String] - name ← (worker \ "name").asOpt[String] - tpe ← (worker \ "type").asOpt[String] + organizationId <- (worker \ "_parent").asOpt[String] + name <- (worker \ "name").asOpt[String] + tpe <- (worker \ "type").asOpt[String] } yield hasher.fromString(s"${organizationId}_${name}_$tpe").head.toString - worker + ("_id" → JsString(id.getOrElse(""))) + worker + ("_id" -> JsString(id.getOrElse(""))) }, renameEntity("analyzerConfig", "workerConfig"), - addAttribute("workerConfig", "type" → JsString(WorkerType.analyzer.toString)) + addAttribute("workerConfig", "type" -> JsString(WorkerType.analyzer.toString)) ) - case DatabaseState(2) ⇒ - Seq(mapEntity("worker") { worker ⇒ + case DatabaseState(2) => + Seq(mapEntity("worker") { worker => val definitionId = (worker \ "workerDefinitionId").asOpt[String] definitionId .flatMap(workerSrv.getDefinition(_).toOption) .fold { logger.warn(s"no definition found for worker ${definitionId.getOrElse(worker)}. You should probably have to disable and re-enable it") worker - } { definition ⇒ + } { definition => worker + - ("version" → JsString(definition.version)) + - ("author" → JsString(definition.author)) + - ("url" → JsString(definition.url)) + - ("license" → JsString(definition.license)) + - ("command" → definition.command.fold[JsValue](JsNull)(c ⇒ JsString(c.toString))) + - ("dockerImage" → definition.dockerImage.fold[JsValue](JsNull)(JsString.apply)) + - ("baseConfig" → definition.baseConfiguration.fold[JsValue](JsNull)(JsString.apply)) + ("version" -> JsString(definition.version)) + + ("author" -> JsString(definition.author)) + + ("url" -> JsString(definition.url)) + + ("license" -> JsString(definition.license)) + + ("command" -> definition.command.fold[JsValue](JsNull)(c => JsString(c.toString))) + + ("dockerImage" -> definition.dockerImage.fold[JsValue](JsNull)(JsString.apply)) + + ("baseConfig" -> definition.baseConfiguration.fold[JsValue](JsNull)(JsString.apply)) } }) - case DatabaseState(3) ⇒ + case DatabaseState(3) => Seq( - mapEntity("sequence") { seq ⇒ + mapEntity("sequence") { seq => val oldId = (seq \ "_id").as[String] val counter = (seq \ "counter").as[JsNumber] seq - "counter" - "_routing" + - ("_id" → JsString("sequence_" + oldId)) + - ("sequenceCounter" → counter) + ("_id" -> JsString("sequence_" + oldId)) + + ("sequenceCounter" -> counter) } ) } diff --git a/app/org/thp/cortex/models/Organization.scala b/app/org/thp/cortex/models/Organization.scala index 32a500e04..32934491f 100644 --- a/app/org/thp/cortex/models/Organization.scala +++ b/app/org/thp/cortex/models/Organization.scala @@ -8,7 +8,7 @@ import play.api.Logger import play.api.libs.json.{JsNumber, JsObject, JsString, Json} import org.elastic4play.models.JsonFormat.enumFormat -import org.elastic4play.models.{AttributeDef, BaseEntity, EntityDef, HiveEnumeration, ModelDef, AttributeFormat ⇒ F, AttributeOption ⇒ O} +import org.elastic4play.models.{AttributeDef, BaseEntity, EntityDef, HiveEnumeration, ModelDef, AttributeFormat => F, AttributeOption => O} import org.elastic4play.services.FindSrv object OrganizationStatus extends Enumeration with HiveEnumeration { @@ -17,7 +17,7 @@ object OrganizationStatus extends Enumeration with HiveEnumeration { implicit val reads = enumFormat(this) } -trait OrganizationAttributes { _: AttributeDef ⇒ +trait OrganizationAttributes { _: AttributeDef => val name = attribute("name", F.stringFmt, "Organization name", O.form) val _id = attribute("_id", F.stringFmt, "Organization name", O.model) val description = attribute("description", F.textFmt, "Organization description") @@ -25,7 +25,7 @@ trait OrganizationAttributes { _: AttributeDef ⇒ } @Singleton -class OrganizationModel @Inject()( +class OrganizationModel @Inject() ( findSrv: FindSrv, userModelProvider: Provider[UserModel], workerModelProvider: Provider[WorkerModel], @@ -37,49 +37,49 @@ class OrganizationModel @Inject()( private lazy val logger = Logger(getClass) lazy val userModel = userModelProvider.get lazy val workerModel = workerModelProvider.get - override def removeAttribute = Json.obj("status" → "Locked") + override def removeAttribute = Json.obj("status" -> "Locked") override def creationHook(parent: Option[BaseEntity], attrs: JsObject): Future[JsObject] = Future.successful { - (attrs \ "name").asOpt[JsString].fold(attrs) { orgName ⇒ - attrs - "name" + ("_id" → orgName) + (attrs \ "name").asOpt[JsString].fold(attrs) { orgName => + attrs - "name" + ("_id" -> orgName) } } private def buildUserStats(organization: Organization): Future[JsObject] = { import org.elastic4play.services.QueryDSL._ findSrv(userModel, "organization" ~= organization.id, groupByField("status", selectCount)) - .map { userStatsJson ⇒ + .map { userStatsJson => val (userCount, userStats) = userStatsJson.value.foldLeft((0L, JsObject.empty)) { - case ((total, s), (key, value)) ⇒ + case ((total, s), (key, value)) => val count = (value \ "count").as[Long] - (total + count, s + (key → JsNumber(count))) + (total + count, s + (key -> JsNumber(count))) } - Json.obj("users" → (userStats + ("total" → JsNumber(userCount)))) + Json.obj("users" -> (userStats + ("total" -> JsNumber(userCount)))) } } private def buildWorkerStats(organization: Organization): Future[JsObject] = { import org.elastic4play.services.QueryDSL._ findSrv(workerModel, withParent(organization), groupByField("status", selectCount)) - .map { workerStatsJson ⇒ + .map { workerStatsJson => val (workerCount, workerStats) = workerStatsJson.value.foldLeft((0L, JsObject.empty)) { - case ((total, s), (key, value)) ⇒ + case ((total, s), (key, value)) => val count = (value \ "count").as[Long] - (total + count, s + (key → JsNumber(count))) + (total + count, s + (key -> JsNumber(count))) } - Json.obj("workers" → (workerStats + ("total" → JsNumber(workerCount)))) + Json.obj("workers" -> (workerStats + ("total" -> JsNumber(workerCount)))) } } override def getStats(entity: BaseEntity): Future[JsObject] = entity match { - case organization: Organization ⇒ + case organization: Organization => for { - userStats ← buildUserStats(organization) - workerStats ← buildWorkerStats(organization) + userStats <- buildUserStats(organization) + workerStats <- buildWorkerStats(organization) } yield userStats ++ workerStats - case other ⇒ + case other => logger.warn(s"Request caseStats from a non-case entity ?! ${other.getClass}:$other") Future.successful(Json.obj()) } @@ -89,5 +89,5 @@ class OrganizationModel @Inject()( class Organization(model: OrganizationModel, attributes: JsObject) extends EntityDef[OrganizationModel, Organization](model, attributes) with OrganizationAttributes { - override def toJson: JsObject = super.toJson + ("name" → JsString(id)) + override def toJson: JsObject = super.toJson + ("name" -> JsString(id)) } diff --git a/app/org/thp/cortex/models/Report.scala b/app/org/thp/cortex/models/Report.scala index c1959e98a..73f90dfba 100644 --- a/app/org/thp/cortex/models/Report.scala +++ b/app/org/thp/cortex/models/Report.scala @@ -4,16 +4,16 @@ import javax.inject.{Inject, Singleton} import play.api.libs.json.JsObject -import org.elastic4play.models.{AttributeDef, EntityDef, AttributeFormat ⇒ F, AttributeOption ⇒ O, ChildModelDef} +import org.elastic4play.models.{AttributeDef, EntityDef, AttributeFormat => F, AttributeOption => O, ChildModelDef} -trait ReportAttributes { _: AttributeDef ⇒ +trait ReportAttributes { _: AttributeDef => val full = attribute("full", F.rawFmt, "Full content of the report", O.readonly) val summary = attribute("summary", F.rawFmt, "Summary of the report", O.readonly) val operations = attribute("operations", F.rawFmt, "Update operations applied at the end of the job", "[]", O.unaudited) } @Singleton -class ReportModel @Inject()(jobModel: JobModel) +class ReportModel @Inject() (jobModel: JobModel) extends ChildModelDef[ReportModel, Report, JobModel, Job](jobModel, "report", "Report", "/report") with ReportAttributes {} diff --git a/app/org/thp/cortex/models/Roles.scala b/app/org/thp/cortex/models/Roles.scala index 0c906f2b4..3116d4be0 100644 --- a/app/org/thp/cortex/models/Roles.scala +++ b/app/org/thp/cortex/models/Roles.scala @@ -2,8 +2,8 @@ package org.thp.cortex.models import play.api.libs.json.{JsString, JsValue} -import com.sksamuel.elastic4s.http.ElasticDsl.keywordField -import com.sksamuel.elastic4s.mappings.KeywordField +import com.sksamuel.elastic4s.ElasticDsl.keywordField +import com.sksamuel.elastic4s.requests.mappings.KeywordField import org.scalactic.{Every, Good, One, Or} import org.elastic4play.{AttributeError, InvalidFormatAttributeError} @@ -32,8 +32,8 @@ object Roles { object RoleAttributeFormat extends AttributeFormat[Role]("role") { override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, One[InvalidFormatAttributeError]] = value match { - case JsString(v) if subNames.isEmpty && Roles.isValid(v) ⇒ Good(value) - case _ ⇒ formatError(JsonInputValue(value)) + case JsString(v) if subNames.isEmpty && Roles.isValid(v) => Good(value) + case _ => formatError(JsonInputValue(value)) } override def fromInputValue(subNames: Seq[String], value: InputValue): Role Or Every[AttributeError] = @@ -41,10 +41,10 @@ object RoleAttributeFormat extends AttributeFormat[Role]("role") { formatError(value) else (value match { - case StringInputValue(Seq(v)) ⇒ Good(v) - case JsonInputValue(JsString(v)) ⇒ Good(v) - case _ ⇒ formatError(value) - }).flatMap(v ⇒ Roles.withName(v).fold[Role Or Every[AttributeError]](formatError(value))(role ⇒ Good(role))) + case StringInputValue(Seq(v)) => Good(v) + case JsonInputValue(JsString(v)) => Good(v) + case _ => formatError(value) + }).flatMap(v => Roles.withName(v).fold[Role Or Every[AttributeError]](formatError(value))(role => Good(role))) override def elasticType(attributeName: String): KeywordField = keywordField(attributeName) } diff --git a/app/org/thp/cortex/models/TlpAttributeFormat.scala b/app/org/thp/cortex/models/TlpAttributeFormat.scala index 7ba81d17c..5db5ea7cd 100644 --- a/app/org/thp/cortex/models/TlpAttributeFormat.scala +++ b/app/org/thp/cortex/models/TlpAttributeFormat.scala @@ -25,21 +25,21 @@ object TlpAttributeFormat extends NumberAttributeFormat { ) override def checkJson(subNames: Seq[String], value: JsValue): Or[JsValue, One[InvalidFormatAttributeError]] = value match { - case JsNumber(v) if subNames.isEmpty && isValidValue(v.toLong) ⇒ Good(value) - case _ ⇒ formatError(JsonInputValue(value)) + case JsNumber(v) if subNames.isEmpty && isValidValue(v.toLong) => Good(value) + case _ => formatError(JsonInputValue(value)) } override def fromInputValue(subNames: Seq[String], value: InputValue): Long Or Every[AttributeError] = value match { - case StringInputValue(Seq(v)) if subNames.isEmpty ⇒ + case StringInputValue(Seq(v)) if subNames.isEmpty => try { val longValue = v.toLong if (isValidValue(longValue)) Good(longValue) else formatError(value) } catch { - case _: Throwable ⇒ formatError(value) + case _: Throwable => formatError(value) } - case JsonInputValue(JsNumber(v)) ⇒ Good(v.longValue) - case _ ⇒ formatError(value) + case JsonInputValue(JsNumber(v)) => Good(v.longValue) + case _ => formatError(value) } } diff --git a/app/org/thp/cortex/models/User.scala b/app/org/thp/cortex/models/User.scala index 1f7af2afb..28a9bc592 100644 --- a/app/org/thp/cortex/models/User.scala +++ b/app/org/thp/cortex/models/User.scala @@ -5,8 +5,8 @@ import scala.concurrent.Future import play.api.libs.json.{JsArray, JsBoolean, JsObject, JsString} import org.elastic4play.models.JsonFormat.enumFormat -import org.elastic4play.models.{AttributeDef, BaseEntity, EntityDef, HiveEnumeration, ModelDef, AttributeFormat ⇒ F, AttributeOption ⇒ O} -import org.elastic4play.services.{User ⇒ EUser} +import org.elastic4play.models.{AttributeDef, BaseEntity, EntityDef, HiveEnumeration, ModelDef, AttributeFormat => F, AttributeOption => O} +import org.elastic4play.services.{User => EUser} object UserStatus extends Enumeration with HiveEnumeration { type Type = Value @@ -14,7 +14,7 @@ object UserStatus extends Enumeration with HiveEnumeration { implicit val reads = enumFormat(this) } -trait UserAttributes { _: AttributeDef ⇒ +trait UserAttributes { _: AttributeDef => val login = attribute("login", F.userFmt, "Login of the user", O.form) val userId = attribute("_id", F.stringFmt, "User id (login)", O.model) val key = optionalAttribute("key", F.stringFmt, "API key", O.sensitive, O.unaudited) @@ -29,8 +29,8 @@ trait UserAttributes { _: AttributeDef ⇒ class UserModel extends ModelDef[UserModel, User]("user", "User", "/user") with UserAttributes with AuditedModel { - private def setUserId(attrs: JsObject) = (attrs \ "login").asOpt[JsString].fold(attrs) { login ⇒ - attrs - "login" + ("_id" → login) + private def setUserId(attrs: JsObject) = (attrs \ "login").asOpt[JsString].fold(attrs) { login => + attrs - "login" + ("_id" -> login) } override def creationHook(parent: Option[BaseEntity], attrs: JsObject): Future[JsObject] = Future.successful(setUserId(attrs)) @@ -42,7 +42,7 @@ class User(model: UserModel, attributes: JsObject) extends EntityDef[UserModel, override def toJson: JsObject = super.toJson + - ("roles" → JsArray(roles().map(r ⇒ JsString(r.name.toLowerCase())))) + - ("hasKey" → JsBoolean(key().isDefined)) + - ("hasPassword" → JsBoolean(password().isDefined)) + ("roles" -> JsArray(roles().map(r => JsString(r.name.toLowerCase())))) + + ("hasKey" -> JsBoolean(key().isDefined)) + + ("hasPassword" -> JsBoolean(password().isDefined)) } diff --git a/app/org/thp/cortex/models/Worker.scala b/app/org/thp/cortex/models/Worker.scala index e5085c59f..381f086ba 100644 --- a/app/org/thp/cortex/models/Worker.scala +++ b/app/org/thp/cortex/models/Worker.scala @@ -2,7 +2,7 @@ package org.thp.cortex.models import javax.inject.{Inject, Singleton} import org.elastic4play.models.JsonFormat.enumFormat -import org.elastic4play.models.{AttributeDef, BaseEntity, ChildModelDef, EntityDef, HiveEnumeration, AttributeFormat ⇒ F, AttributeOption ⇒ O} +import org.elastic4play.models.{AttributeDef, BaseEntity, ChildModelDef, EntityDef, HiveEnumeration, AttributeFormat => F, AttributeOption => O} import org.elastic4play.utils.Hasher import org.thp.cortex.models.JsonFormat.workerTypeFormat import play.api.libs.json.{JsObject, JsString, Json} @@ -25,7 +25,7 @@ object WorkerType extends Enumeration with HiveEnumeration { val analyzer, responder = Value } -trait WorkerAttributes { _: AttributeDef ⇒ +trait WorkerAttributes { _: AttributeDef => val workerId = attribute("_id", F.stringFmt, "Worker id", O.model) val name = attribute("name", F.stringFmt, "Worker name") val vers = attribute("version", F.stringFmt, "Worker version", O.readonly) @@ -47,18 +47,18 @@ trait WorkerAttributes { _: AttributeDef ⇒ } @Singleton -class WorkerModel @Inject()(organizationModel: OrganizationModel) +class WorkerModel @Inject() (organizationModel: OrganizationModel) extends ChildModelDef[WorkerModel, Worker, OrganizationModel, Organization](organizationModel, "worker", "Worker", "/worker") with WorkerAttributes with AuditedModel { override def creationHook(parent: Option[BaseEntity], attrs: JsObject): Future[JsObject] = { val hasher = Hasher("md5") val id = for { - organizationId ← parent.map(_.id) - name ← (attrs \ "name").asOpt[String] - tpe ← (attrs \ "type").asOpt[String] + organizationId <- parent.map(_.id) + name <- (attrs \ "name").asOpt[String] + tpe <- (attrs \ "type").asOpt[String] } yield hasher.fromString(s"${organizationId}_${name}_$tpe").head.toString - Future.successful(attrs + ("_id" → JsString(id.getOrElse("")))) + Future.successful(attrs + ("_id" -> JsString(id.getOrElse("")))) } } diff --git a/app/org/thp/cortex/models/WorkerConfig.scala b/app/org/thp/cortex/models/WorkerConfig.scala index a98eedd61..c452393de 100644 --- a/app/org/thp/cortex/models/WorkerConfig.scala +++ b/app/org/thp/cortex/models/WorkerConfig.scala @@ -4,18 +4,18 @@ import javax.inject.{Inject, Singleton} import play.api.libs.json.{JsObject, Json} -import org.elastic4play.models.{AttributeDef, ChildModelDef, EntityDef, AttributeFormat ⇒ F, AttributeOption ⇒ O} +import org.elastic4play.models.{AttributeDef, ChildModelDef, EntityDef, AttributeFormat => F, AttributeOption => O} import org.thp.cortex.models.JsonFormat.workerTypeFormat -trait WorkerConfigAttributes { _: AttributeDef ⇒ +trait WorkerConfigAttributes { _: AttributeDef => val name = attribute("name", F.stringFmt, "Worker name") val config = attribute("config", F.rawFmt, "Configuration of worker", O.sensitive) val tpe = attribute("type", F.enumFmt(WorkerType), "", O.readonly) } @Singleton -class WorkerConfigModel @Inject()(organizationModel: OrganizationModel) +class WorkerConfigModel @Inject() (organizationModel: OrganizationModel) extends ChildModelDef[WorkerConfigModel, WorkerConfig, OrganizationModel, Organization]( organizationModel, "workerConfig", diff --git a/app/org/thp/cortex/models/WorkerDefinition.scala b/app/org/thp/cortex/models/WorkerDefinition.scala index 2eb22fdb6..7d0300f10 100644 --- a/app/org/thp/cortex/models/WorkerDefinition.scala +++ b/app/org/thp/cortex/models/WorkerDefinition.scala @@ -36,11 +36,11 @@ case class ConfigurationDefinitionItem( private def check(v: JsValue): JsValue Or Every[AttributeError] = { import WorkerConfigItemType._ v match { - case _: JsString if `type` == string || `type` == text ⇒ Good(v) - case _: JsNumber if `type` == number ⇒ Good(v) - case _: JsBoolean if `type` == boolean ⇒ Good(v) - case JsNull if !isRequired ⇒ Good(v) - case _ ⇒ Bad(One(InvalidFormatAttributeError(s"$name[]", `type`.toString, JsonInputValue(v)))) + case _: JsString if `type` == string || `type` == text => Good(v) + case _: JsNumber if `type` == number => Good(v) + case _: JsBoolean if `type` == boolean => Good(v) + case JsNull if !isRequired => Good(v) + case _ => Bad(One(InvalidFormatAttributeError(s"$name[]", `type`.toString, JsonInputValue(v)))) } } @@ -49,14 +49,14 @@ case class ConfigurationDefinitionItem( .toOption .orElse(defaultValue) .map { - case JsArray(values) if isMulti ⇒ values.validatedBy(check).map(a ⇒ name → JsArray(a)) - case value if !isMulti ⇒ check(value).map(name → _) - case value ⇒ Bad(One(InvalidFormatAttributeError(name, `type`.toString, JsonInputValue(value)))) + case JsArray(values) if isMulti => values.validatedBy(check).map(a => name -> JsArray(a)) + case value if !isMulti => check(value).map(name -> _) + case value => Bad(One(InvalidFormatAttributeError(name, `type`.toString, JsonInputValue(value)))) } .getOrElse { - if (isMulti) Good(name → JsArray.empty) + if (isMulti) Good(name -> JsArray.empty) else if (isRequired) Bad(One(MissingAttributeError(name))) - else Good(name → JsNull) + else Good(name -> JsNull) } } @@ -113,20 +113,20 @@ object WorkerDefinition { reads.map(List(_)) orElse Reads.list(reads) } - implicit val writes: Writes[WorkerDefinition] = Writes[WorkerDefinition] { workerDefinition ⇒ + implicit val writes: Writes[WorkerDefinition] = Writes[WorkerDefinition] { workerDefinition => Json.obj( - "id" → workerDefinition.id, - "name" → workerDefinition.name, - "version" → workerDefinition.version, - "description" → workerDefinition.description, - "dataTypeList" → workerDefinition.dataTypeList, - "author" → workerDefinition.author, - "url" → workerDefinition.url, - "license" → workerDefinition.license, - "baseConfig" → workerDefinition.baseConfiguration, - "configurationItems" → workerDefinition.configurationItems, - "dockerImage" → workerDefinition.dockerImage, - "command" → workerDefinition.command.map(_.getFileName.toString) + "id" -> workerDefinition.id, + "name" -> workerDefinition.name, + "version" -> workerDefinition.version, + "description" -> workerDefinition.description, + "dataTypeList" -> workerDefinition.dataTypeList, + "author" -> workerDefinition.author, + "url" -> workerDefinition.url, + "license" -> workerDefinition.license, + "baseConfig" -> workerDefinition.baseConfiguration, + "configurationItems" -> workerDefinition.configurationItems, + "dockerImage" -> workerDefinition.dockerImage, + "command" -> workerDefinition.command.map(_.getFileName.toString) ) } } diff --git a/app/org/thp/cortex/services/AnalyzerConfigSrv.scala b/app/org/thp/cortex/services/AnalyzerConfigSrv.scala index 0f59b4f5e..0ec6629af 100644 --- a/app/org/thp/cortex/services/AnalyzerConfigSrv.scala +++ b/app/org/thp/cortex/services/AnalyzerConfigSrv.scala @@ -11,7 +11,7 @@ import org.thp.cortex.models.{BaseConfig, WorkerConfigModel, WorkerType} import org.elastic4play.services.{CreateSrv, FindSrv, UpdateSrv} @Singleton -class AnalyzerConfigSrv @Inject()( +class AnalyzerConfigSrv @Inject() ( val configuration: Configuration, val workerConfigModel: WorkerConfigModel, val userSrv: UserSrv, diff --git a/app/org/thp/cortex/services/AuditSrv.scala b/app/org/thp/cortex/services/AuditSrv.scala index 7dbb9c3e7..8a73e7aff 100644 --- a/app/org/thp/cortex/services/AuditSrv.scala +++ b/app/org/thp/cortex/services/AuditSrv.scala @@ -20,7 +20,7 @@ object AuditActor { } @Singleton -class AuditActor @Inject()(eventSrv: EventSrv, implicit val ec: ExecutionContext) extends Actor { +class AuditActor @Inject() (eventSrv: EventSrv, implicit val ec: ExecutionContext) extends Actor { import AuditActor._ @@ -41,22 +41,22 @@ class AuditActor @Inject()(eventSrv: EventSrv, implicit val ec: ExecutionContext } override def receive: Receive = { - case Register(jobId, timeout) ⇒ + case Register(jobId, timeout) => logger.info(s"Register new listener for job $jobId ($sender)") val newActorList = registration.getOrElse(jobId, Nil) :+ sender - registration += (jobId → newActorList) + registration += (jobId -> newActorList) context.system.scheduler.scheduleOnce(timeout, self, Unregister(jobId, sender)) - case Unregister(jobId, actorRef) ⇒ + case Unregister(jobId, actorRef) => logger.info(s"Unregister listener for job $jobId ($actorRef)") val newActorList = registration.getOrElse(jobId, Nil).filterNot(_ == actorRef) - registration += (jobId → newActorList) + registration += (jobId -> newActorList) - case AuditOperation(EntityExtractor(model, id, routing), action, details, authContext, date) ⇒ + case AuditOperation(EntityExtractor(model, id, routing), action, details, authContext, date) => if (model.modelName == "job" && action == AuditableAction.Update) { logger.info(s"Job $id has be updated (${details \ "status"})") val status = (details \ "status").asOpt[JobStatus.Type].getOrElse(JobStatus.InProgress) - if (status != JobStatus.InProgress) registration.getOrElse(id, Nil).foreach { aref ⇒ + if (status != JobStatus.InProgress) registration.getOrElse(id, Nil).foreach { aref => aref ! JobEnded(id, status) } } diff --git a/app/org/thp/cortex/services/CSRFFilter.scala b/app/org/thp/cortex/services/CSRFFilter.scala index 4c78931a7..65c65d535 100644 --- a/app/org/thp/cortex/services/CSRFFilter.scala +++ b/app/org/thp/cortex/services/CSRFFilter.scala @@ -5,9 +5,9 @@ import javax.inject.{Inject, Provider, Singleton} import play.api.Logger import play.api.http.SessionConfiguration import play.api.libs.crypto.CSRFTokenSigner -import play.filters.csrf.{CSRFFilter ⇒ PCSRFFilter} +import play.filters.csrf.{CSRFFilter => PCSRFFilter} import play.api.mvc.RequestHeader -import play.filters.csrf.CSRF.{ErrorHandler ⇒ CSRFErrorHandler, TokenProvider} +import play.filters.csrf.CSRF.{ErrorHandler => CSRFErrorHandler, TokenProvider} import play.filters.csrf.CSRFConfig import akka.stream.Materializer @@ -27,7 +27,7 @@ object CSRFFilter { } @Singleton -class CSRFFilter @Inject()( +class CSRFFilter @Inject() ( config: Provider[CSRFConfig], tokenSignerProvider: Provider[CSRFTokenSigner], sessionConfiguration: SessionConfiguration, diff --git a/app/org/thp/cortex/services/CortexAuthSrv.scala b/app/org/thp/cortex/services/CortexAuthSrv.scala index 4533239a3..57f44e2bf 100644 --- a/app/org/thp/cortex/services/CortexAuthSrv.scala +++ b/app/org/thp/cortex/services/CortexAuthSrv.scala @@ -15,7 +15,7 @@ object CortexAuthSrv { def getAuthSrv(authTypes: Seq[String], authModules: immutable.Set[AuthSrv]): Seq[AuthSrv] = ("key" +: authTypes.filterNot(_ == "key")) - .flatMap { authType ⇒ + .flatMap { authType => authModules .find(_.name == authType) .orElse { @@ -26,7 +26,7 @@ object CortexAuthSrv { } @Singleton -class CortexAuthSrv @Inject()( +class CortexAuthSrv @Inject() ( configuration: Configuration, authModules: immutable.Set[AuthSrv], userSrv: UserSrv, diff --git a/app/org/thp/cortex/services/DockerJobRunnerSrv.scala b/app/org/thp/cortex/services/DockerJobRunnerSrv.scala index 6e75449a7..df3424a0e 100644 --- a/app/org/thp/cortex/services/DockerJobRunnerSrv.scala +++ b/app/org/thp/cortex/services/DockerJobRunnerSrv.scala @@ -47,7 +47,7 @@ class DockerJobRunnerSrv(client: DockerClient, autoUpdate: Boolean, implicit val logger.info(s"Docker is available:\n${client.info()}") true }.recover { - case error ⇒ + case error => logger.info(s"Docker is not available", error) false }.get @@ -91,21 +91,21 @@ class DockerJobRunnerSrv(client: DockerClient, autoUpdate: Boolean, implicit val client.waitContainer(containerCreation.id()) () }.andThen { - case r ⇒ + case r => val outputFile = jobDirectory.resolve("output").resolve("output.json") if (!Files.exists(outputFile) || Files.size(outputFile) == 0) { val output = Try(client.logs(containerCreation.id(), LogsParam.stdout(), LogsParam.stderr()).readFully()) - .fold(e ⇒ s"Container logs can't be read (${e.getMessage})", identity) - val message = r.fold(e ⇒ s"Docker creation error: ${e.getMessage}\n$output", _ ⇒ output) + .fold(e => s"Container logs can't be read (${e.getMessage})", identity) + val message = r.fold(e => s"Docker creation error: ${e.getMessage}\n$output", _ => output) - val report = Json.obj("success" → false, "errorMessage" → message) + val report = Json.obj("success" -> false, "errorMessage" -> message) Files.write(jobDirectory.resolve("output").resolve("output.json"), report.toString.getBytes(StandardCharsets.UTF_8)) } } timeout - .fold(execution)(t ⇒ execution.withTimeout(t, client.stopContainer(containerCreation.id(), 3))) + .fold(execution)(t => execution.withTimeout(t, client.stopContainer(containerCreation.id(), 3))) .andThen { - case _ ⇒ client.removeContainer(containerCreation.id(), DockerClient.RemoveContainerParam.forceKill()) + case _ => client.removeContainer(containerCreation.id(), DockerClient.RemoveContainerParam.forceKill()) } } diff --git a/app/org/thp/cortex/services/ErrorHandler.scala b/app/org/thp/cortex/services/ErrorHandler.scala index 0d8c8fb72..fefa006b4 100644 --- a/app/org/thp/cortex/services/ErrorHandler.scala +++ b/app/org/thp/cortex/services/ErrorHandler.scala @@ -39,42 +39,42 @@ class ErrorHandler extends HttpErrorHandler { def toErrorResult(ex: Throwable): Option[(Int, JsValue)] = ex match { - case AuthenticationError(message) ⇒ Some(Status.UNAUTHORIZED → Json.obj("type" → "AuthenticationError", "message" → message)) - case AuthorizationError(message) ⇒ Some(Status.FORBIDDEN → Json.obj("type" → "AuthorizationError", "message" → message)) - case UpdateError(_, message, attributes) ⇒ - Some(Status.INTERNAL_SERVER_ERROR → Json.obj("type" → "UpdateError", "message" → message, "object" → attributes)) - case rle: RateLimitExceeded ⇒ Some(Status.TOO_MANY_REQUESTS → Json.obj("type" → "RateLimitExceeded", "message" → rle.getMessage)) - case InternalError(message) ⇒ Some(Status.INTERNAL_SERVER_ERROR → Json.obj("type" → "InternalError", "message" → message)) - case nfe: NumberFormatException ⇒ - Some(Status.BAD_REQUEST → Json.obj("type" → "NumberFormatException", "message" → ("Invalid format " + nfe.getMessage))) - case NotFoundError(message) ⇒ Some(Status.NOT_FOUND → Json.obj("type" → "NotFoundError", "message" → message)) - case BadRequestError(message) ⇒ Some(Status.BAD_REQUEST → Json.obj("type" → "BadRequest", "message" → message)) - case SearchError(message) ⇒ Some(Status.BAD_REQUEST → Json.obj("type" → "SearchError", "message" → s"$message")) - case ace: AttributeCheckingError ⇒ Some(Status.BAD_REQUEST → Json.toJson(ace)) - case iae: IllegalArgumentException ⇒ Some(Status.BAD_REQUEST → Json.obj("type" → "IllegalArgument", "message" → iae.getMessage)) - case _: ConnectException ⇒ - Some(Status.INTERNAL_SERVER_ERROR → Json.obj("type" → "NoNodeAvailable", "message" → "ElasticSearch cluster is unreachable")) - case CreateError(_, message, attributes) ⇒ - Some(Status.INTERNAL_SERVER_ERROR → Json.obj("type" → "CreateError", "message" → message, "object" → attributes)) - case ErrorWithObject(tpe, message, obj) ⇒ Some(Status.BAD_REQUEST → Json.obj("type" → tpe, "message" → message, "object" → obj)) - case GetError(message) ⇒ Some(Status.INTERNAL_SERVER_ERROR → Json.obj("type" → "GetError", "message" → message)) - case MultiError(message, exceptions) ⇒ - val suberrors = exceptions.map(e ⇒ toErrorResult(e)).collect { - case Some((_, j)) ⇒ j + case AuthenticationError(message) => Some(Status.UNAUTHORIZED -> Json.obj("type" -> "AuthenticationError", "message" -> message)) + case AuthorizationError(message) => Some(Status.FORBIDDEN -> Json.obj("type" -> "AuthorizationError", "message" -> message)) + case UpdateError(_, message, attributes) => + Some(Status.INTERNAL_SERVER_ERROR -> Json.obj("type" -> "UpdateError", "message" -> message, "object" -> attributes)) + case rle: RateLimitExceeded => Some(Status.TOO_MANY_REQUESTS -> Json.obj("type" -> "RateLimitExceeded", "message" -> rle.getMessage)) + case InternalError(message) => Some(Status.INTERNAL_SERVER_ERROR -> Json.obj("type" -> "InternalError", "message" -> message)) + case nfe: NumberFormatException => + Some(Status.BAD_REQUEST -> Json.obj("type" -> "NumberFormatException", "message" -> ("Invalid format " + nfe.getMessage))) + case NotFoundError(message) => Some(Status.NOT_FOUND -> Json.obj("type" -> "NotFoundError", "message" -> message)) + case BadRequestError(message) => Some(Status.BAD_REQUEST -> Json.obj("type" -> "BadRequest", "message" -> message)) + case SearchError(message) => Some(Status.BAD_REQUEST -> Json.obj("type" -> "SearchError", "message" -> s"$message")) + case ace: AttributeCheckingError => Some(Status.BAD_REQUEST -> Json.toJson(ace)) + case iae: IllegalArgumentException => Some(Status.BAD_REQUEST -> Json.obj("type" -> "IllegalArgument", "message" -> iae.getMessage)) + case _: ConnectException => + Some(Status.INTERNAL_SERVER_ERROR -> Json.obj("type" -> "NoNodeAvailable", "message" -> "ElasticSearch cluster is unreachable")) + case CreateError(_, message, attributes) => + Some(Status.INTERNAL_SERVER_ERROR -> Json.obj("type" -> "CreateError", "message" -> message, "object" -> attributes)) + case ErrorWithObject(tpe, message, obj) => Some(Status.BAD_REQUEST -> Json.obj("type" -> tpe, "message" -> message, "object" -> obj)) + case GetError(message) => Some(Status.INTERNAL_SERVER_ERROR -> Json.obj("type" -> "GetError", "message" -> message)) + case MultiError(message, exceptions) => + val suberrors = exceptions.map(e => toErrorResult(e)).collect { + case Some((_, j)) => j } - Some(Status.MULTI_STATUS → Json.obj("type" → "MultiError", "error" → message, "suberrors" → suberrors)) - case JobNotFoundError(jobId) ⇒ Some(Status.NOT_FOUND → Json.obj("type" → "JobNotFoundError", "message" → s"Job $jobId not found")) - case WorkerNotFoundError(analyzerId) ⇒ - Some(Status.NOT_FOUND → Json.obj("type" → "AnalyzerNotFoundError", "message" → s"analyzer $analyzerId not found")) - case IndexNotFoundException ⇒ Some(520 → JsNull) - case t: Throwable ⇒ Option(t.getCause).flatMap(toErrorResult) + Some(Status.MULTI_STATUS -> Json.obj("type" -> "MultiError", "error" -> message, "suberrors" -> suberrors)) + case JobNotFoundError(jobId) => Some(Status.NOT_FOUND -> Json.obj("type" -> "JobNotFoundError", "message" -> s"Job $jobId not found")) + case WorkerNotFoundError(analyzerId) => + Some(Status.NOT_FOUND -> Json.obj("type" -> "AnalyzerNotFoundError", "message" -> s"analyzer $analyzerId not found")) + case IndexNotFoundException => Some(520 -> JsNull) + case t: Throwable => Option(t.getCause).flatMap(toErrorResult) } def toResult[C](status: Int, c: C)(implicit writeable: Writeable[C]) = Result(header = ResponseHeader(status), body = writeable.toEntity(c)) def onServerError(request: RequestHeader, exception: Throwable): Future[Result] = { val (status, body) = toErrorResult(exception).getOrElse( - Status.INTERNAL_SERVER_ERROR → Json.obj("type" → exception.getClass.getName, "message" → exception.getMessage) + Status.INTERNAL_SERVER_ERROR -> Json.obj("type" -> exception.getClass.getName, "message" -> exception.getMessage) ) logger.info(s"${request.method} ${request.uri} returned $status", exception) Future.successful(toResult(status, body)) diff --git a/app/org/thp/cortex/services/JobRunnerSrv.scala b/app/org/thp/cortex/services/JobRunnerSrv.scala index b1d172d03..b939f56f1 100644 --- a/app/org/thp/cortex/services/JobRunnerSrv.scala +++ b/app/org/thp/cortex/services/JobRunnerSrv.scala @@ -24,7 +24,7 @@ import org.elastic4play.controllers.{Fields, FileInputValue} import org.elastic4play.database.ModifyConfig import org.elastic4play.services.{AttachmentSrv, AuthContext, CreateSrv, UpdateSrv} -class JobRunnerSrv @Inject()( +class JobRunnerSrv @Inject() ( config: Configuration, reportModel: ReportModel, artifactModel: ArtifactModel, @@ -48,14 +48,14 @@ class JobRunnerSrv @Inject()( .getOrElse(Seq("docker", "process")) .map(_.toLowerCase) .collect { - case "docker" if dockerJobRunnerSrv.isAvailable ⇒ "docker" - case "process" ⇒ - Seq("", "2", "3").foreach { pythonVersion ⇒ + case "docker" if dockerJobRunnerSrv.isAvailable => "docker" + case "process" => + Seq("", "2", "3").foreach { pythonVersion => val cortexUtilsVersion = processJobRunnerSrv.checkCortexUtilsVersion(pythonVersion) cortexUtilsVersion.fold(logger.warn(s"The package cortexutils for python$pythonVersion hasn't been found")) { - case (major, minor, patch) if major >= 2 ⇒ + case (major, minor, patch) if major >= 2 => logger.info(s"The package cortexutils for python$pythonVersion has valid version: $major.$minor.$patch") - case (major, minor, patch) ⇒ + case (major, minor, patch) => logger.error( s"The package cortexutils for python$pythonVersion has invalid version: $major.$minor.$patch. Cortex 2 requires cortexutils >= 2.0" ) @@ -85,7 +85,7 @@ class JobRunnerSrv @Inject()( Files.walkFileTree(directory, deleteVisitor) () } catch { - case t: Throwable ⇒ logger.warn(s"Fail to remove temporary files ($directory) : $t") + case t: Throwable => logger.warn(s"Fail to remove temporary files ($directory) : $t") } private def prepareJobFolder(worker: Worker, job: Job): Future[Path] = { @@ -95,56 +95,53 @@ class JobRunnerSrv @Inject()( job .attachment() - .map { attachment ⇒ + .map { attachment => val attachmentFile = Files.createTempFile(inputJobFolder, "attachment", "") attachmentSrv .source(attachment.id) .runWith(FileIO.toPath(attachmentFile)) - .flatMap { - case ioresult if ioresult.status.isSuccess ⇒ Future.successful(Some(attachmentFile)) - case ioresult ⇒ Future.failed(ioresult.getError) - } + .map(_ => Some(attachmentFile)) } .getOrElse(Future.successful(None)) .map { - case Some(file) ⇒ - Json.obj("file" → file.getFileName.toString, "filename" → job.attachment().get.name, "contentType" → job.attachment().get.contentType) - case None if job.data().nonEmpty && job.tpe() == WorkerType.responder ⇒ - Json.obj("data" → Json.parse(job.data().get)) - case None if job.data().nonEmpty && job.tpe() == WorkerType.analyzer ⇒ - Json.obj("data" → job.data().get) + case Some(file) => + Json.obj("file" -> file.getFileName.toString, "filename" -> job.attachment().get.name, "contentType" -> job.attachment().get.contentType) + case None if job.data().nonEmpty && job.tpe() == WorkerType.responder => + Json.obj("data" -> Json.parse(job.data().get)) + case None if job.data().nonEmpty && job.tpe() == WorkerType.analyzer => + Json.obj("data" -> job.data().get) } - .map { artifact ⇒ - val proxy_http = (worker.config \ "proxy_http").asOpt[String].fold(JsObject.empty) { proxy ⇒ - Json.obj("proxy" → Json.obj("http" → proxy)) + .map { artifact => + val proxy_http = (worker.config \ "proxy_http").asOpt[String].fold(JsObject.empty) { proxy => + Json.obj("proxy" -> Json.obj("http" -> proxy)) } - val proxy_https = (worker.config \ "proxy_https").asOpt[String].fold(JsObject.empty) { proxy ⇒ - Json.obj("proxy" → Json.obj("https" → proxy)) + val proxy_https = (worker.config \ "proxy_https").asOpt[String].fold(JsObject.empty) { proxy => + Json.obj("proxy" -> Json.obj("https" -> proxy)) } val config = workerSrv .getDefinition(worker.workerDefinitionId()) - .fold(_ ⇒ JsObject.empty, _.configuration) + .fold(_ => JsObject.empty, _.configuration) .deepMerge(worker.config) .deepMerge(proxy_http) .deepMerge(proxy_https) - (worker.config \ "cacerts").asOpt[String].foreach { cacerts ⇒ + (worker.config \ "cacerts").asOpt[String].foreach { cacerts => val cacertsFile = jobFolder.resolve("input").resolve("cacerts") Files.write(cacertsFile, cacerts.getBytes) } artifact + - ("dataType" → JsString(job.dataType())) + - ("tlp" → JsNumber(job.tlp())) + - ("pap" → JsNumber(job.pap())) + - ("message" → JsString(job.message().getOrElse(""))) + - ("parameters" → job.params) + - ("config" → config) + ("dataType" -> JsString(job.dataType())) + + ("tlp" -> JsNumber(job.tlp())) + + ("pap" -> JsNumber(job.pap())) + + ("message" -> JsString(job.message().getOrElse(""))) + + ("parameters" -> job.params) + + ("config" -> config) } - .map { input ⇒ + .map { input => Files.write(inputJobFolder.resolve("input.json"), input.toString.getBytes(StandardCharsets.UTF_8)) jobFolder } .recoverWith { - case error ⇒ + case error => delete(jobFolder) Future.failed(error) } @@ -169,29 +166,29 @@ class JobRunnerSrv @Inject()( .set("summary", summaryReport) .set("operations", JsArray(operations).toString) createSrv[ReportModel, Report, Job](reportModel, job, reportFields) - .flatMap { report ⇒ + .flatMap { report => Future.sequence { for { - artifact ← artifacts - dataType ← (artifact \ "dataType").asOpt[String] - fields ← dataType match { - case "file" ⇒ + artifact <- artifacts + dataType <- (artifact \ "dataType").asOpt[String] + fields <- dataType match { + case "file" => for { - name ← (artifact \ "filename").asOpt[String] - file ← (artifact \ "file").asOpt[String] + name <- (artifact \ "filename").asOpt[String] + file <- (artifact \ "file").asOpt[String] path = jobFolder.resolve("output").resolve(file) if Files.exists(path) && path.getParent == jobFolder.resolve("output") contentType = (artifact \ "contentType").asOpt[String].getOrElse("application/octet-stream") fiv = FileInputValue(name, path, contentType) } yield Fields(artifact - "filename" - "file" - "contentType").set("attachment", fiv) - case _ ⇒ Some(Fields(artifact)) + case _ => Some(Fields(artifact)) } } yield createSrv[ArtifactModel, Artifact, Report](artifactModel, report, fields) } } .transformWith { - case Failure(e) ⇒ endJob(job, JobStatus.Failure, Some(s"Report creation failure: $e")) - case _ ⇒ endJob(job, JobStatus.Success) + case Failure(e) => endJob(job, JobStatus.Failure, Some(s"Report creation failure: $e")) + case _ => endJob(job, JobStatus.Success) } } else { endJob(job, JobStatus.Failure, (report \ "errorMessage").asOpt[String], (report \ "input").asOpt[JsValue].map(_.toString)) @@ -202,33 +199,33 @@ class JobRunnerSrv @Inject()( } def run(worker: Worker, job: Job)(implicit authContext: AuthContext): Future[Job] = - prepareJobFolder(worker, job).flatMap { jobFolder ⇒ + prepareJobFolder(worker, job).flatMap { jobFolder => val executionContext = worker.tpe() match { - case WorkerType.analyzer ⇒ analyzerExecutionContext - case WorkerType.responder ⇒ responderExecutionContext + case WorkerType.analyzer => analyzerExecutionContext + case WorkerType.responder => responderExecutionContext } val finishedJob = for { - _ ← startJob(job) - j ← runners + _ <- startJob(job) + j <- runners .foldLeft[Option[Future[Unit]]](None) { - case (None, "docker") ⇒ + case (None, "docker") => worker .dockerImage() - .map(dockerImage ⇒ dockerJobRunnerSrv.run(jobFolder, dockerImage, job, worker.jobTimeout().map(_.minutes))(executionContext)) + .map(dockerImage => dockerJobRunnerSrv.run(jobFolder, dockerImage, job, worker.jobTimeout().map(_.minutes))(executionContext)) .orElse { logger.warn(s"worker ${worker.id} can't be run with docker (doesn't have image)") None } - case (None, "process") ⇒ + case (None, "process") => worker .command() - .map(command ⇒ processJobRunnerSrv.run(jobFolder, command, job, worker.jobTimeout().map(_.minutes))(executionContext)) + .map(command => processJobRunnerSrv.run(jobFolder, command, job, worker.jobTimeout().map(_.minutes))(executionContext)) .orElse { logger.warn(s"worker ${worker.id} can't be run with process (doesn't have image)") None } - case (j: Some[_], _) ⇒ j - case (None, runner) ⇒ + case (j: Some[_], _) => j + case (None, runner) => logger.warn(s"Unknown job runner: $runner") None @@ -236,13 +233,13 @@ class JobRunnerSrv @Inject()( .getOrElse(Future.failed(BadRequestError("Worker cannot be run"))) } yield j finishedJob - .transformWith { r ⇒ + .transformWith { r => r.fold( - error ⇒ endJob(job, JobStatus.Failure, Option(error.getMessage), Some(readFile(jobFolder.resolve("input").resolve("input.json")))), - _ ⇒ extractReport(jobFolder, job) + error => endJob(job, JobStatus.Failure, Option(error.getMessage), Some(readFile(jobFolder.resolve("input").resolve("input.json")))), + _ => extractReport(jobFolder, job) ) } - .andThen { case _ ⇒ delete(jobFolder) } + .andThen { case _ => delete(jobFolder) } } private def readFile(input: Path): String = new String(Files.readAllBytes(input), StandardCharsets.UTF_8) diff --git a/app/org/thp/cortex/services/JobSrv.scala b/app/org/thp/cortex/services/JobSrv.scala index 1b2b6944b..cbf470bdb 100644 --- a/app/org/thp/cortex/services/JobSrv.scala +++ b/app/org/thp/cortex/services/JobSrv.scala @@ -76,27 +76,27 @@ class JobSrv( private def runPreviousJobs(): Unit = { import org.elastic4play.services.QueryDSL._ - userSrv.inInitAuthContext { implicit authContext ⇒ + userSrv.inInitAuthContext { implicit authContext => find("status" ~= JobStatus.Waiting, Some("all"), Nil) ._1 - .runForeach { job ⇒ + .runForeach { job => (for { - worker ← workerSrv.get(job.workerId()) - updatedJob ← jobRunnerSrv.run(worker, job) + worker <- workerSrv.get(job.workerId()) + updatedJob <- jobRunnerSrv.run(worker, job) } yield updatedJob) .onComplete { - case Success(j) ⇒ logger.info(s"Job ${job.id} has finished with status ${j.status()}") - case Failure(e) ⇒ logger.error(s"Job ${job.id} has failed", e) + case Success(j) => logger.info(s"Job ${job.id} has finished with status ${j.status()}") + case Failure(e) => logger.error(s"Job ${job.id} has failed", e) } } } } - private def withUserFilter[A](userId: String)(x: String ⇒ (Source[A, NotUsed], Future[Long])): (Source[A, NotUsed], Future[Long]) = { + private def withUserFilter[A](userId: String)(x: String => (Source[A, NotUsed], Future[Long])): (Source[A, NotUsed], Future[Long]) = { val a = userSrv.getOrganizationId(userId).map(x) - val aSource = Source.fromFutureSource(a.map(_._1)).mapMaterializedValue(_ ⇒ NotUsed) + val aSource = Source.futureSource(a.map(_._1)).mapMaterializedValue(_ => NotUsed) val aTotal = a.flatMap(_._2) - aSource → aTotal + aSource -> aTotal } def listForUser( @@ -112,7 +112,7 @@ class JobSrv( and( dataTypeFilter.map("dataType" like _).toList ::: dataFilter.map("data" like _).toList ::: - analyzerFilter.map(af ⇒ or("workerId" like af, "workerName" like af)).toList + analyzerFilter.map(af => or("workerId" like af, "workerName" like af)).toList ), range, Nil @@ -127,7 +127,7 @@ class JobSrv( sortBy: Seq[String] ): (Source[Artifact, NotUsed], Future[Long]) = { import org.elastic4play.services.QueryDSL._ - withUserFilter(userId) { organizationId ⇒ + withUserFilter(userId) { organizationId => findSrv[ArtifactModel, Artifact]( artifactModel, and(queryDef, parent("report", parent("job", and(withId(jobId), "organization" ~= organizationId)))), @@ -141,7 +141,7 @@ class JobSrv( findSrv[JobModel, Job](jobModel, queryDef, range, sortBy) def findForUser(userId: String, queryDef: QueryDef, range: Option[String], sortBy: Seq[String]): (Source[Job, NotUsed], Future[Long]) = - withUserFilter(userId) { organizationId ⇒ + withUserFilter(userId) { organizationId => findForOrganization(organizationId, queryDef, range, sortBy) } @@ -159,13 +159,13 @@ class JobSrv( def getForUser(userId: String, jobId: String): Future[Job] = { import org.elastic4play.services.QueryDSL._ - withUserFilter(userId) { organizationId ⇒ + withUserFilter(userId) { organizationId => findForOrganization(organizationId, withId(jobId), Some("0-1"), Nil) }._1 .runWith(Sink.headOption) .flatMap { - case Some(j) ⇒ Future.successful(j) - case None ⇒ Future.failed(NotFoundError(s"job $jobId not found")) + case Some(j) => Future.successful(j) + case None => Future.failed(NotFoundError(s"job $jobId not found")) } } @@ -174,11 +174,11 @@ class JobSrv( def legacyCreate(worker: Worker, attributes: JsObject, fields: Fields)(implicit authContext: AuthContext): Future[Job] = { val dataType = Or.from((attributes \ "dataType").asOpt[String], One(MissingAttributeError("dataType"))) val dataFiv = fields.get("data") match { - case Some(fiv: FileInputValue) ⇒ Good(Right(fiv)) - case Some(StringInputValue(Seq(data))) ⇒ Good(Left(data)) - case Some(JsonInputValue(JsString(data))) ⇒ Good(Left(data)) - case Some(iv) ⇒ Bad(One(InvalidFormatAttributeError("data", "string/attachment", iv))) - case None ⇒ Bad(One(MissingAttributeError("data"))) + case Some(fiv: FileInputValue) => Good(Right(fiv)) + case Some(StringInputValue(Seq(data))) => Good(Left(data)) + case Some(JsonInputValue(JsString(data))) => Good(Left(data)) + case Some(iv) => Bad(One(InvalidFormatAttributeError("data", "string/attachment", iv))) + case None => Bad(One(MissingAttributeError("data"))) } val tlp = (attributes \ "tlp").asOpt[Long].getOrElse(2L) val pap = (attributes \ "pap").asOpt[Long].getOrElse(2L) @@ -187,11 +187,11 @@ class JobSrv( val label = (attributes \ "label").asOpt[String] val force = fields.getBoolean("force").getOrElse(false) withGood(dataType, dataFiv) { - case (dt, Right(fiv)) ⇒ dt → attachmentSrv.save(fiv).map(Right.apply) - case (dt, Left(data)) ⇒ dt → Future.successful(Left(data)) + case (dt, Right(fiv)) => dt -> attachmentSrv.save(fiv).map(Right.apply) + case (dt, Left(data)) => dt -> Future.successful(Left(data)) }.fold( - typeDataAttachment ⇒ typeDataAttachment._2.flatMap(da ⇒ create(worker, typeDataAttachment._1, da, tlp, pap, message, parameters, label, force)), - errors ⇒ { + typeDataAttachment => typeDataAttachment._2.flatMap(da => create(worker, typeDataAttachment._1, da, tlp, pap, message, parameters, label, force)), + errors => { val attributeError = AttributeCheckingError("job", errors) logger.error("legacy job create fails", attributeError) Future.failed(attributeError) @@ -200,7 +200,7 @@ class JobSrv( } def create(workerId: String, fields: Fields)(implicit authContext: AuthContext): Future[Job] = - workerSrv.getForUser(authContext.userId, workerId).flatMap { worker ⇒ + workerSrv.getForUser(authContext.userId, workerId).flatMap { worker => /* In Cortex 1, fields looks like: { @@ -231,15 +231,15 @@ class JobSrv( "optional parameters": "value" } */ - fields.getValue("attributes").map(attributes ⇒ legacyCreate(worker, attributes.as[JsObject], fields)).getOrElse { + fields.getValue("attributes").map(attributes => legacyCreate(worker, attributes.as[JsObject], fields)).getOrElse { val dataType = Or.from(fields.getString("dataType"), One(MissingAttributeError("dataType"))) val dataFiv = (fields.get("data"), fields.getString("data"), fields.get("attachment")) match { - case (_, Some(data), None) ⇒ Good(Left(data)) - case (_, None, Some(fiv: FileInputValue)) ⇒ Good(Right(fiv)) - case (Some(fiv: FileInputValue), None, _) ⇒ Good(Right(fiv)) - case (_, None, Some(other)) ⇒ Bad(One(InvalidFormatAttributeError("attachment", "attachment", other))) - case (_, _, Some(fiv)) ⇒ Bad(One(InvalidFormatAttributeError("data/attachment", "string/attachment", fiv))) - case (_, None, None) ⇒ Bad(One(MissingAttributeError("data/attachment"))) + case (_, Some(data), None) => Good(Left(data)) + case (_, None, Some(fiv: FileInputValue)) => Good(Right(fiv)) + case (Some(fiv: FileInputValue), None, _) => Good(Right(fiv)) + case (_, None, Some(other)) => Bad(One(InvalidFormatAttributeError("attachment", "attachment", other))) + case (_, _, Some(fiv)) => Bad(One(InvalidFormatAttributeError("data/attachment", "string/attachment", fiv))) + case (_, None, None) => Bad(One(MissingAttributeError("data/attachment"))) } val tlp = fields.getLong("tlp").getOrElse(2L) @@ -249,19 +249,19 @@ class JobSrv( val parameters = fields .getValue("parameters") .collect { - case obj: JsObject ⇒ obj + case obj: JsObject => obj } .getOrElse(JsObject.empty) withGood(dataType, dataFiv) { - case (dt, Right(fiv)) ⇒ dt → attachmentSrv.save(fiv).map(Right.apply) - case (dt, Left(data)) ⇒ dt → Future.successful(Left(data)) + case (dt, Right(fiv)) => dt -> attachmentSrv.save(fiv).map(Right.apply) + case (dt, Left(data)) => dt -> Future.successful(Left(data)) }.fold( - typeDataAttachment ⇒ + typeDataAttachment => typeDataAttachment ._2 - .flatMap(da ⇒ create(worker, typeDataAttachment._1, da, tlp, pap, message, parameters, fields.getString("label"), force)), - errors ⇒ Future.failed(AttributeCheckingError("job", errors)) + .flatMap(da => create(worker, typeDataAttachment._1, da, tlp, pap, message, parameters, fields.getString("label"), force)), + errors => Future.failed(AttributeCheckingError("job", errors)) ) } } @@ -279,40 +279,40 @@ class JobSrv( )(implicit authContext: AuthContext): Future[Job] = { val previousJob = findSimilarJob(worker, dataType, dataAttachment, tlp, parameters, force) previousJob.flatMap { - case Right(job) ⇒ Future.successful(job) - case Left(cacheTag) ⇒ + case Right(job) => Future.successful(job) + case Left(cacheTag) => isUnderRateLimit(worker).flatMap { - case true ⇒ + case true => val fields = Fields( Json.obj( - "workerDefinitionId" → worker.workerDefinitionId(), - "workerId" → worker.id, - "workerName" → worker.name(), - "organization" → worker.parentId, - "status" → JobStatus.Waiting, - "dataType" → dataType, - "tlp" → tlp, - "pap" → pap, - "message" → message, - "parameters" → parameters.toString, - "type" → worker.tpe(), - "cacheTag" → cacheTag + "workerDefinitionId" -> worker.workerDefinitionId(), + "workerId" -> worker.id, + "workerName" -> worker.name(), + "organization" -> worker.parentId, + "status" -> JobStatus.Waiting, + "dataType" -> dataType, + "tlp" -> tlp, + "pap" -> pap, + "message" -> message, + "parameters" -> parameters.toString, + "type" -> worker.tpe(), + "cacheTag" -> cacheTag ) ).set("label", label.map(JsString.apply)) val fieldWithData = dataAttachment match { - case Left(data) ⇒ fields.set("data", data) - case Right(attachment) ⇒ fields.set("attachment", AttachmentInputValue(attachment)) + case Left(data) => fields.set("data", data) + case Right(attachment) => fields.set("attachment", AttachmentInputValue(attachment)) } createSrv[JobModel, Job](jobModel, fieldWithData).andThen { - case Success(job) ⇒ + case Success(job) => jobRunnerSrv .run(worker, job) .onComplete { - case Success(j) ⇒ logger.info(s"Job ${job.id} has finished with status ${j.status()}") - case Failure(e) ⇒ logger.error(s"Job ${job.id} has failed", e) + case Success(j) => logger.info(s"Job ${job.id} has finished with status ${j.status()}") + case Failure(e) => logger.error(s"Job ${job.id} has failed", e) } } - case false ⇒ + case false => Future.failed(RateLimitExceeded(worker)) } @@ -321,13 +321,13 @@ class JobSrv( private def isUnderRateLimit(worker: Worker): Future[Boolean] = (for { - rate ← worker.rate() - rateUnit ← worker.rateUnit() + rate <- worker.rate() + rateUnit <- worker.rateUnit() } yield { import org.elastic4play.services.QueryDSL._ val now = new Date().getTime logger.info(s"Checking rate limit on worker ${worker.name()} from ${new Date(now - rateUnit.id.toLong * 24 * 60 * 60 * 1000)}") - stats(and("createdAt" ~>= (now - rateUnit.id.toLong * 1000), "workerId" ~= worker.id), Seq(selectCount)).map { s ⇒ + stats(and("createdAt" ~>= (now - rateUnit.id.toLong * 1000), "workerId" ~= worker.id), Seq(selectCount)).map { s => val count = (s \ "count").as[Long] logger.info(s"$count analysis found (limit is $rate)") count < rate @@ -343,7 +343,7 @@ class JobSrv( force: Boolean ): Future[Either[String, Job]] = { val cacheTag = Hasher("MD5") - .fromString(s"${worker.id}|$dataType|$tlp|${dataAttachment.fold(data ⇒ data, attachment ⇒ attachment.id)}|$parameters") + .fromString(s"${worker.id}|$dataType|$tlp|${dataAttachment.fold(data => data, attachment => attachment.id)}|$parameters") .head .toString() lazy val cache = worker.jobCache().fold(jobCache)(_.minutes) @@ -362,7 +362,7 @@ class JobSrv( Some("0-1"), Seq("-createdAt") )._1 - .map(j ⇒ new Job(jobModel, j.attributes + ("fromCache" → JsBoolean(true)))) + .map(j => new Job(jobModel, j.attributes + ("fromCache" -> JsBoolean(true)))) .runWith(Sink.headOption) .map(_.toRight(cacheTag)) } diff --git a/app/org/thp/cortex/services/KeyAuthSrv.scala b/app/org/thp/cortex/services/KeyAuthSrv.scala index 4a6ce0c9d..a8dc8e130 100644 --- a/app/org/thp/cortex/services/KeyAuthSrv.scala +++ b/app/org/thp/cortex/services/KeyAuthSrv.scala @@ -17,7 +17,7 @@ import org.elastic4play.services.{AuthCapability, AuthContext, AuthSrv} import org.elastic4play.{AuthenticationError, BadRequestError} @Singleton -class KeyAuthSrv @Inject()(userSrv: UserSrv, implicit val ec: ExecutionContext, implicit val mat: Materializer) extends AuthSrv { +class KeyAuthSrv @Inject() (userSrv: UserSrv, implicit val ec: ExecutionContext, implicit val mat: Materializer) extends AuthSrv { override val name = "key" final protected def generateKey(): String = { @@ -37,19 +37,19 @@ class KeyAuthSrv @Inject()(userSrv: UserSrv, implicit val ec: ExecutionContext, .filter(_.key().contains(key)) .runWith(Sink.headOption) .flatMap { - case Some(user) ⇒ userSrv.getFromUser(request, user, name) - case None ⇒ Future.failed(AuthenticationError("Authentication failure")) + case Some(user) => userSrv.getFromUser(request, user, name) + case None => Future.failed(AuthenticationError("Authentication failure")) } } override def renewKey(username: String)(implicit authContext: AuthContext): Future[String] = { val newKey = generateKey() - userSrv.update(username, Fields.empty.set("key", newKey)).map(_ ⇒ newKey) + userSrv.update(username, Fields.empty.set("key", newKey)).map(_ => newKey) } override def getKey(username: String)(implicit authContext: AuthContext): Future[String] = userSrv.get(username).map(_.key().getOrElse(throw BadRequestError(s"User $username hasn't key"))) override def removeKey(username: String)(implicit authContext: AuthContext): Future[Unit] = - userSrv.update(username, Fields.empty.set("key", JsArray())).map(_ ⇒ ()) + userSrv.update(username, Fields.empty.set("key", JsArray())).map(_ => ()) } diff --git a/app/org/thp/cortex/services/LocalAuthSrv.scala b/app/org/thp/cortex/services/LocalAuthSrv.scala index d1b19a7f5..ae1af07b6 100644 --- a/app/org/thp/cortex/services/LocalAuthSrv.scala +++ b/app/org/thp/cortex/services/LocalAuthSrv.scala @@ -16,27 +16,27 @@ import org.elastic4play.utils.Hasher import org.elastic4play.{AuthenticationError, AuthorizationError} @Singleton -class LocalAuthSrv @Inject()(userSrv: UserSrv, implicit val ec: ExecutionContext, implicit val mat: Materializer) extends AuthSrv { +class LocalAuthSrv @Inject() (userSrv: UserSrv, implicit val ec: ExecutionContext, implicit val mat: Materializer) extends AuthSrv { val name = "local" override val capabilities = Set(AuthCapability.changePassword, AuthCapability.setPassword) private[services] def doAuthenticate(user: User, password: String): Boolean = user.password().map(_.split(",", 2)).fold(false) { - case Array(seed, pwd) ⇒ + case Array(seed, pwd) => val hash = Hasher("SHA-256").fromString(seed + password).head.toString hash == pwd - case _ ⇒ false + case _ => false } override def authenticate(username: String, password: String)(implicit request: RequestHeader): Future[AuthContext] = - userSrv.get(username).flatMap { user ⇒ + userSrv.get(username).flatMap { user => if (doAuthenticate(user, password)) userSrv.getFromUser(request, user, name) else Future.failed(AuthenticationError("Authentication failure")) } override def changePassword(username: String, oldPassword: String, newPassword: String)(implicit authContext: AuthContext): Future[Unit] = - userSrv.get(username).flatMap { user ⇒ + userSrv.get(username).flatMap { user => if (doAuthenticate(user, oldPassword)) setPassword(username, newPassword) else Future.failed(AuthorizationError("Authentication failure")) } @@ -44,6 +44,6 @@ class LocalAuthSrv @Inject()(userSrv: UserSrv, implicit val ec: ExecutionContext override def setPassword(username: String, newPassword: String)(implicit authContext: AuthContext): Future[Unit] = { val seed = Random.nextString(10).replace(',', '!') val newHash = seed + "," + Hasher("SHA-256").fromString(seed + newPassword).head.toString - userSrv.update(username, Fields.empty.set("password", newHash)).map(_ ⇒ ()) + userSrv.update(username, Fields.empty.set("password", newHash)).map(_ => ()) } } diff --git a/app/org/thp/cortex/services/MispSrv.scala b/app/org/thp/cortex/services/MispSrv.scala index 1f33ac5d6..bda43b571 100644 --- a/app/org/thp/cortex/services/MispSrv.scala +++ b/app/org/thp/cortex/services/MispSrv.scala @@ -21,7 +21,7 @@ import org.elastic4play.NotFoundError import org.elastic4play.services._ @Singleton -class MispSrv @Inject()( +class MispSrv @Inject() ( workerSrv: WorkerSrv, attachmentSrv: AttachmentSrv, jobSrv: JobSrv, @@ -36,21 +36,21 @@ class MispSrv @Inject()( val (analyzers, analyzerCount) = workerSrv.findAnalyzersForUser(authContext.userId, QueryDSL.any, Some("all"), Nil) val mispAnalyzers = analyzers - .map { analyzer ⇒ + .map { analyzer => Json.obj( - "name" → analyzer.name(), - "type" → "cortex", - "mispattributes" → Json.obj("input" → analyzer.dataTypeList().flatMap(dataType2mispType).distinct, "output" → Json.arr()), - "meta" → Json.obj( - "module-type" → Json.arr("cortex"), - "description" → analyzer.description(), - "author" → analyzer.author(), - "version" → analyzer.vers(), - "config" → Json.arr() + "name" -> analyzer.name(), + "type" -> "cortex", + "mispattributes" -> Json.obj("input" -> analyzer.dataTypeList().flatMap(dataType2mispType).distinct, "output" -> Json.arr()), + "meta" -> Json.obj( + "module-type" -> Json.arr("cortex"), + "description" -> analyzer.description(), + "author" -> analyzer.author(), + "version" -> analyzer.vers(), + "config" -> Json.arr() ) ) } - mispAnalyzers → analyzerCount + mispAnalyzers -> analyzerCount } def query(module: String, mispType: String, data: String)(implicit authContext: AuthContext): Future[JsObject] = { @@ -60,55 +60,55 @@ class MispSrv @Inject()( val duration = 20.minutes // TODO configurable for { - analyzer ← workerSrv.findAnalyzersForUser(authContext.userId, "name" ~= module, Some("0-1"), Nil)._1.runWith(Sink.headOption) - job ← analyzer + analyzer <- workerSrv.findAnalyzersForUser(authContext.userId, "name" ~= module, Some("0-1"), Nil)._1.runWith(Sink.headOption) + job <- analyzer .map(jobSrv.create(_, mispType2dataType(mispType), artifact, 0, 0, "", JsObject.empty, None, force = false)) .getOrElse(Future.failed(NotFoundError(s"Module $module not found"))) - _ ← auditActor.ask(Register(job.id, duration))(Timeout(duration)) - updatedJob ← jobSrv.getForUser(authContext.userId, job.id) - mispOutput ← toMispOutput(authContext.userId, updatedJob) + _ <- auditActor.ask(Register(job.id, duration))(Timeout(duration)) + updatedJob <- jobSrv.getForUser(authContext.userId, job.id) + mispOutput <- toMispOutput(authContext.userId, updatedJob) } yield mispOutput } private def toMispOutput(userId: String, job: Job): Future[JsObject] = job.status() match { - case JobStatus.Success ⇒ + case JobStatus.Success => for { - report ← jobSrv.getReport(job) - artifacts ← jobSrv + report <- jobSrv.getReport(job) + artifacts <- jobSrv .findArtifacts(userId, job.id, QueryDSL.any, Some("all"), Nil) ._1 - .map { artifact ⇒ + .map { artifact => toMispOutput(artifact) } .runWith(Sink.seq) - reportJson = Json.obj("full" → report.full(), "summary" → report.summary()) - cortexAttribute = Json.obj("types" → Json.arr("cortex"), "values" → Json.arr(reportJson.toString)) - } yield Json.obj("results" → (artifacts :+ cortexAttribute)) - case JobStatus.Waiting ⇒ Future.successful(Json.obj("error" → "This job hasn't finished yet")) - case JobStatus.Deleted ⇒ Future.successful(Json.obj("error" → "This job has been deleted")) - case JobStatus.Failure ⇒ + reportJson = Json.obj("full" -> report.full(), "summary" -> report.summary()) + cortexAttribute = Json.obj("types" -> Json.arr("cortex"), "values" -> Json.arr(reportJson.toString)) + } yield Json.obj("results" -> (artifacts :+ cortexAttribute)) + case JobStatus.Waiting => Future.successful(Json.obj("error" -> "This job hasn't finished yet")) + case JobStatus.Deleted => Future.successful(Json.obj("error" -> "This job has been deleted")) + case JobStatus.Failure => val message = job.errorMessage().getOrElse("This job has failed, without message!") - Future.successful(Json.obj("error" → message)) - case JobStatus.InProgress ⇒ Future.successful(Json.obj("error" → "This job hasn't finished yet")) + Future.successful(Json.obj("error" -> message)) + case JobStatus.InProgress => Future.successful(Json.obj("error" -> "This job hasn't finished yet")) } private def toArtifact(mispType: String, data: String): Either[String, Attachment] = mispType2dataType(mispType) match { - case "file" if mispType == "malware-sample" ⇒ ??? // TODO - case "file" ⇒ - val FAttachment = attachmentSrv.save("noname", "application/octet-stream", Base64.decodeBase64(data)).map { a ⇒ + case "file" if mispType == "malware-sample" => ??? // TODO + case "file" => + val FAttachment = attachmentSrv.save("noname", "application/octet-stream", Base64.decodeBase64(data)).map { a => a } Right(Await.result(FAttachment, 10.seconds)) - case _ ⇒ Left(data) + case _ => Left(data) } private def toMispOutput(artifact: Artifact): JsObject = (artifact.data(), artifact.attachment()) match { - case (Some(data), None) ⇒ Json.obj("types" → dataType2mispType(artifact.dataType()), "values" → Json.arr(data)) + case (Some(data), None) => Json.obj("types" -> dataType2mispType(artifact.dataType()), "values" -> Json.arr(data)) //case (None, Some(_)) ⇒ ??? - case _ ⇒ ??? + case _ => ??? } private def mispType2dataType(mispType: String): String = @@ -131,141 +131,141 @@ class MispSrv @Inject()( } private val typeLookup = Map( - "md5" → "hash", - "sha1" → "hash", - "sha256" → "hash", - "filename" → "filename", - "pdb" → "other", - "filename|md5" → "other", - "filename|sha1" → "other", - "filename|sha256" → "other", - "ip-src" → "ip", - "ip-dst" → "ip", - "hostname" → "fqdn", - "domain" → "domain", - "domain|ip" → "other", - "email-src" → "mail", - "email-dst" → "mail", - "email-subject" → "mail_subject", - "email-attachment" → "other", - "float" → "other", - "url" → "url", - "http-method" → "other", - "user-agent" → "user-agent", - "regkey" → "registry", - "regkey|value" → "registry", - "AS" → "other", - "snort" → "other", - "pattern-in-file" → "other", - "pattern-in-traffic" → "other", - "pattern-in-memory" → "other", - "yara" → "other", - "sigma" → "other", - "vulnerability" → "other", - "attachment" → "file", - "malware-sample" → "file", - "link" → "other", - "comment" → "other", - "text" → "other", - "hex" → "other", - "other" → "other", - "named" → "other", - "mutex" → "other", - "target-user" → "other", - "target-email" → "mail", - "target-machine" → "fqdn", - "target-org" → "other", - "target-location" → "other", - "target-external" → "other", - "btc" → "other", - "iban" → "other", - "bic" → "other", - "bank-account-nr" → "other", - "aba-rtn" → "other", - "bin" → "other", - "cc-number" → "other", - "prtn" → "other", - "threat-actor" → "other", - "campaign-name" → "other", - "campaign-id" → "other", - "malware-type" → "other", - "uri" → "uri_path", - "authentihash" → "other", - "ssdeep" → "hash", - "imphash" → "hash", - "pehash" → "hash", - "impfuzzy" → "hash", - "sha224" → "hash", - "sha384" → "hash", - "sha512" → "hash", - "sha512/224" → "hash", - "sha512/256" → "hash", - "tlsh" → "other", - "filename|authentihash" → "other", - "filename|ssdeep" → "other", - "filename|imphash" → "other", - "filename|impfuzzy" → "other", - "filename|pehash" → "other", - "filename|sha224" → "other", - "filename|sha384" → "other", - "filename|sha512" → "other", - "filename|sha512/224" → "other", - "filename|sha512/256" → "other", - "filename|tlsh" → "other", - "windows-scheduled-task" → "other", - "windows-service-name" → "other", - "windows-service-displayname" → "other", - "whois-registrant-email" → "mail", - "whois-registrant-phone" → "other", - "whois-registrant-name" → "other", - "whois-registrar" → "other", - "whois-creation-date" → "other", - "x509-fingerprint-sha1" → "other", - "dns-soa-email" → "other", - "size-in-bytes" → "other", - "counter" → "other", - "datetime" → "other", - "cpe" → "other", - "port" → "other", - "ip-dst|port" → "other", - "ip-src|port" → "other", - "hostname|port" → "other", - "email-dst-display-name" → "other", - "email-src-display-name" → "other", - "email-header" → "other", - "email-reply-to" → "other", - "email-x-mailer" → "other", - "email-mime-boundary" → "other", - "email-thread-index" → "other", - "email-message-id" → "other", - "github-username" → "other", - "github-repository" → "other", - "github-organisation" → "other", - "jabber-id" → "other", - "twitter-id" → "other", - "first-name" → "other", - "middle-name" → "other", - "last-name" → "other", - "date-of-birth" → "other", - "place-of-birth" → "other", - "gender" → "other", - "passport-number" → "other", - "passport-country" → "other", - "passport-expiration" → "other", - "redress-number" → "other", - "nationality" → "other", - "visa-number" → "other", - "issue-date-of-the-visa" → "other", - "primary-residence" → "other", - "country-of-residence" → "other", - "special-service-request" → "other", - "frequent-flyer-number" → "other", - "travel-details" → "other", - "payment-details" → "other", - "place-port-of-original-embarkation" → "other", - "place-port-of-clearance" → "other", - "place-port-of-onward-foreign-destination" → "other", - "passenger-name-record-locator-number" → "other", - "mobile-application-id" → "other" + "md5" -> "hash", + "sha1" -> "hash", + "sha256" -> "hash", + "filename" -> "filename", + "pdb" -> "other", + "filename|md5" -> "other", + "filename|sha1" -> "other", + "filename|sha256" -> "other", + "ip-src" -> "ip", + "ip-dst" -> "ip", + "hostname" -> "fqdn", + "domain" -> "domain", + "domain|ip" -> "other", + "email-src" -> "mail", + "email-dst" -> "mail", + "email-subject" -> "mail_subject", + "email-attachment" -> "other", + "float" -> "other", + "url" -> "url", + "http-method" -> "other", + "user-agent" -> "user-agent", + "regkey" -> "registry", + "regkey|value" -> "registry", + "AS" -> "other", + "snort" -> "other", + "pattern-in-file" -> "other", + "pattern-in-traffic" -> "other", + "pattern-in-memory" -> "other", + "yara" -> "other", + "sigma" -> "other", + "vulnerability" -> "other", + "attachment" -> "file", + "malware-sample" -> "file", + "link" -> "other", + "comment" -> "other", + "text" -> "other", + "hex" -> "other", + "other" -> "other", + "named" -> "other", + "mutex" -> "other", + "target-user" -> "other", + "target-email" -> "mail", + "target-machine" -> "fqdn", + "target-org" -> "other", + "target-location" -> "other", + "target-external" -> "other", + "btc" -> "other", + "iban" -> "other", + "bic" -> "other", + "bank-account-nr" -> "other", + "aba-rtn" -> "other", + "bin" -> "other", + "cc-number" -> "other", + "prtn" -> "other", + "threat-actor" -> "other", + "campaign-name" -> "other", + "campaign-id" -> "other", + "malware-type" -> "other", + "uri" -> "uri_path", + "authentihash" -> "other", + "ssdeep" -> "hash", + "imphash" -> "hash", + "pehash" -> "hash", + "impfuzzy" -> "hash", + "sha224" -> "hash", + "sha384" -> "hash", + "sha512" -> "hash", + "sha512/224" -> "hash", + "sha512/256" -> "hash", + "tlsh" -> "other", + "filename|authentihash" -> "other", + "filename|ssdeep" -> "other", + "filename|imphash" -> "other", + "filename|impfuzzy" -> "other", + "filename|pehash" -> "other", + "filename|sha224" -> "other", + "filename|sha384" -> "other", + "filename|sha512" -> "other", + "filename|sha512/224" -> "other", + "filename|sha512/256" -> "other", + "filename|tlsh" -> "other", + "windows-scheduled-task" -> "other", + "windows-service-name" -> "other", + "windows-service-displayname" -> "other", + "whois-registrant-email" -> "mail", + "whois-registrant-phone" -> "other", + "whois-registrant-name" -> "other", + "whois-registrar" -> "other", + "whois-creation-date" -> "other", + "x509-fingerprint-sha1" -> "other", + "dns-soa-email" -> "other", + "size-in-bytes" -> "other", + "counter" -> "other", + "datetime" -> "other", + "cpe" -> "other", + "port" -> "other", + "ip-dst|port" -> "other", + "ip-src|port" -> "other", + "hostname|port" -> "other", + "email-dst-display-name" -> "other", + "email-src-display-name" -> "other", + "email-header" -> "other", + "email-reply-to" -> "other", + "email-x-mailer" -> "other", + "email-mime-boundary" -> "other", + "email-thread-index" -> "other", + "email-message-id" -> "other", + "github-username" -> "other", + "github-repository" -> "other", + "github-organisation" -> "other", + "jabber-id" -> "other", + "twitter-id" -> "other", + "first-name" -> "other", + "middle-name" -> "other", + "last-name" -> "other", + "date-of-birth" -> "other", + "place-of-birth" -> "other", + "gender" -> "other", + "passport-number" -> "other", + "passport-country" -> "other", + "passport-expiration" -> "other", + "redress-number" -> "other", + "nationality" -> "other", + "visa-number" -> "other", + "issue-date-of-the-visa" -> "other", + "primary-residence" -> "other", + "country-of-residence" -> "other", + "special-service-request" -> "other", + "frequent-flyer-number" -> "other", + "travel-details" -> "other", + "payment-details" -> "other", + "place-port-of-original-embarkation" -> "other", + "place-port-of-clearance" -> "other", + "place-port-of-onward-foreign-destination" -> "other", + "passenger-name-record-locator-number" -> "other", + "mobile-application-id" -> "other" ) } diff --git a/app/org/thp/cortex/services/OAuth2Srv.scala b/app/org/thp/cortex/services/OAuth2Srv.scala index 62099d471..ae30d3348 100644 --- a/app/org/thp/cortex/services/OAuth2Srv.scala +++ b/app/org/thp/cortex/services/OAuth2Srv.scala @@ -1,19 +1,18 @@ package org.thp.cortex.services - -import scala.concurrent.{ExecutionContext, Future} - -import play.api.http.Status -import play.api.libs.json.{JsObject, JsValue} -import play.api.libs.ws.WSClient -import play.api.mvc.RequestHeader -import play.api.{Configuration, Logger} +import java.util.UUID import akka.stream.Materializer import javax.inject.{Inject, Singleton} +import org.elastic4play.services.{AuthContext, AuthSrv} +import org.elastic4play.{AuthenticationError, BadRequestError, NotFoundError} import org.thp.cortex.services.mappers.UserMapper +import play.api.libs.json.JsObject +import play.api.libs.ws.WSClient +import play.api.mvc.{RequestHeader, Result, Results} +import play.api.{Configuration, Logger} -import org.elastic4play.services.{AuthContext, AuthSrv} -import org.elastic4play.{AuthenticationError, AuthorizationError, OAuth2Redirect} +import scala.concurrent.{ExecutionContext, Future} +import scala.util.{Failure, Success} case class OAuth2Config( clientId: String, @@ -25,6 +24,8 @@ case class OAuth2Config( tokenUrl: String, userUrl: String, scope: String, + authorizationHeader: String, + autoupdate: Boolean, autocreate: Boolean ) @@ -32,17 +33,32 @@ object OAuth2Config { def apply(configuration: Configuration): Option[OAuth2Config] = for { - clientId ← configuration.getOptional[String]("auth.oauth2.clientId") - clientSecret ← configuration.getOptional[String]("auth.oauth2.clientSecret") - redirectUri ← configuration.getOptional[String]("auth.oauth2.redirectUri") - responseType ← configuration.getOptional[String]("auth.oauth2.responseType") - grantType ← configuration.getOptional[String]("auth.oauth2.grantType") - authorizationUrl ← configuration.getOptional[String]("auth.oauth2.authorizationUrl") - userUrl ← configuration.getOptional[String]("auth.oauth2.userUrl") - tokenUrl ← configuration.getOptional[String]("auth.oauth2.tokenUrl") - scope ← configuration.getOptional[String]("auth.oauth2.scope") - autocreate = configuration.getOptional[Boolean]("auth.sso.autocreate").getOrElse(false) - } yield OAuth2Config(clientId, clientSecret, redirectUri, responseType, grantType, authorizationUrl, tokenUrl, userUrl, scope, autocreate) + clientId <- configuration.getOptional[String]("auth.oauth2.clientId") + clientSecret <- configuration.getOptional[String]("auth.oauth2.clientSecret") + redirectUri <- configuration.getOptional[String]("auth.oauth2.redirectUri") + responseType <- configuration.getOptional[String]("auth.oauth2.responseType") + grantType = configuration.getOptional[String]("auth.oauth2.grantType").getOrElse("authorization_code") + authorizationUrl <- configuration.getOptional[String]("auth.oauth2.authorizationUrl") + tokenUrl <- configuration.getOptional[String]("auth.oauth2.tokenUrl") + userUrl <- configuration.getOptional[String]("auth.oauth2.userUrl") + scope <- configuration.getOptional[String]("auth.oauth2.scope") + authorizationHeader = configuration.getOptional[String]("auth.oauth2.authorizationHeader").getOrElse("Bearer") + autocreate = configuration.getOptional[Boolean]("auth.sso.autocreate").getOrElse(false) + autoupdate = configuration.getOptional[Boolean]("auth.sso.autoupdate").getOrElse(false) + } yield OAuth2Config( + clientId, + clientSecret, + redirectUri, + responseType, + grantType, + authorizationUrl, + tokenUrl, + userUrl, + scope, + authorizationHeader, + autocreate, + autoupdate + ) } @Singleton @@ -63,95 +79,149 @@ class OAuth2Srv( val Oauth2TokenQueryString = "code" - private def withOAuth2Config[A](body: OAuth2Config ⇒ Future[A]): Future[A] = + private def withOAuth2Config[A](body: OAuth2Config => Future[A]): Future[A] = oauth2Config.fold[Future[A]](Future.failed(AuthenticationError("OAuth2 not configured properly")))(body) - override def authenticate()(implicit request: RequestHeader): Future[AuthContext] = - withOAuth2Config { cfg ⇒ - request - .queryString - .get(Oauth2TokenQueryString) - .flatMap(_.headOption) - .fold(createOauth2Redirect(cfg.clientId)) { code ⇒ - getAuthTokenAndAuthenticate(cfg.clientId, code) + override def authenticate()(implicit request: RequestHeader): Future[Either[Result, AuthContext]] = + withOAuth2Config { oauth2Config => + if (!isSecuredAuthCode(request)) { + logger.debug("Code or state is not provided, redirect to authorizationUrl") + Future.successful(Left(authRedirect(oauth2Config))) + } else { + (for { + token <- getToken(oauth2Config, request) + userData <- getUserData(oauth2Config, token) + authContext <- authenticate(oauth2Config, request, userData) + } yield Right(authContext)).recoverWith { + case error => Future.failed(AuthenticationError(s"OAuth2 authentication failure: ${error.getMessage}")) } + } } - private def getAuthTokenAndAuthenticate(clientId: String, code: String)(implicit request: RequestHeader): Future[AuthContext] = { - logger.debug("Getting user token with the code from the response!") - withOAuth2Config { cfg ⇒ - val acceptHeader = "Accept" → cfg.responseType - ws.url(cfg.tokenUrl) - .addHttpHeaders(acceptHeader) - .post( - Map( - "code" → code, - "grant_type" → cfg.grantType, - "client_secret" → cfg.clientSecret, - "redirect_uri" → cfg.redirectUri, - "client_id" → clientId - ) - ) - .recoverWith { - case error ⇒ - logger.error(s"Token verification failure", error) - Future.failed(AuthenticationError("Token verification failure")) - } - .flatMap { r ⇒ - r.status match { - case Status.OK ⇒ - val accessToken = (r.json \ "access_token").asOpt[String].getOrElse("") - val authHeader = "Authorization" → s"bearer $accessToken" - ws.url(cfg.userUrl) - .addHttpHeaders(authHeader) - .get() - .flatMap { userResponse ⇒ - if (userResponse.status != Status.OK) { - Future.failed(AuthenticationError(s"unexpected response from server: ${userResponse.status} ${userResponse.body}")) - } else { - val response = userResponse.json.asInstanceOf[JsObject] - getOrCreateUser(response, authHeader) - } - } - case _ ⇒ - logger.error(s"unexpected response from server: ${r.status} ${r.body}") - Future.failed(AuthenticationError("unexpected response from server")) - } - } - } + private def isSecuredAuthCode(request: RequestHeader): Boolean = + request.queryString.contains("code") && request.queryString.contains("state") + + /** + * Filter checking whether we initiate the OAuth2 process + * and redirecting to OAuth2 server if necessary + * @return + */ + private def authRedirect(oauth2Config: OAuth2Config): Result = { + val state = UUID.randomUUID().toString + val queryStringParams = Map[String, Seq[String]]( + "scope" -> Seq(oauth2Config.scope), + "response_type" -> Seq(oauth2Config.responseType), + "redirect_uri" -> Seq(oauth2Config.redirectUri), + "client_id" -> Seq(oauth2Config.clientId), + "state" -> Seq(state) + ) + + logger.debug(s"Redirecting to ${oauth2Config.redirectUri} with $queryStringParams and state $state") + Results + .Redirect(oauth2Config.authorizationUrl, queryStringParams, status = 302) + .withSession("state" -> state) } - private def getOrCreateUser(response: JsValue, authHeader: (String, String))(implicit request: RequestHeader): Future[AuthContext] = - withOAuth2Config { cfg ⇒ - ssoMapper.getUserFields(response, Some(authHeader)).flatMap { userFields ⇒ - val userId = userFields.getString("login").getOrElse("") - userSrv - .get(userId) - .flatMap(user ⇒ { - userSrv.getFromUser(request, user, name) - }) - .recoverWith { - case authErr: AuthorizationError ⇒ Future.failed(authErr) - case _ if cfg.autocreate ⇒ - userSrv.inInitAuthContext { implicit authContext ⇒ - userSrv - .create(userFields) - .flatMap(user ⇒ { - userSrv.getFromUser(request, user, name) - }) - } - } + /** + * Enriching the initial request with OAuth2 token gotten + * from OAuth2 code + * @return + */ + private def getToken[A](oauth2Config: OAuth2Config, request: RequestHeader): Future[String] = { + val token = + for { + state <- request.session.get("state") + stateQs <- request.queryString.get("state").flatMap(_.headOption) + if state == stateQs + } yield request.queryString.get("code").flatMap(_.headOption) match { + case Some(code) => + logger.debug(s"Attempting to retrieve OAuth2 token from ${oauth2Config.tokenUrl} with code $code") + getAuthTokenFromCode(oauth2Config, code, state) + .map { t => + logger.trace(s"Got token $t") + t + } + case None => + Future.failed(AuthenticationError(s"OAuth2 server code missing ${request.queryString.get("error")}")) } - } + token.getOrElse(Future.failed(BadRequestError("OAuth2 states mismatch"))) + } - private def createOauth2Redirect(clientId: String): Future[AuthContext] = - withOAuth2Config { cfg ⇒ - val queryStringParams = Map[String, Seq[String]]( - "scope" → Seq(cfg.scope), - "response_type" → Seq(cfg.responseType), - "redirect_uri" → Seq(cfg.redirectUri), - "client_id" → Seq(clientId) + /** + * Querying the OAuth2 server for a token + * @param code the previously obtained code + * @return + */ + private def getAuthTokenFromCode(oauth2Config: OAuth2Config, code: String, state: String): Future[String] = { + logger.trace(s""" + |Request to ${oauth2Config.tokenUrl} with + | code: $code + | grant_type: ${oauth2Config.grantType} + | client_secret: ${oauth2Config.clientSecret} + | redirect_uri: ${oauth2Config.redirectUri} + | client_id: ${oauth2Config.clientId} + | state: $state + |""".stripMargin) + ws.url(oauth2Config.tokenUrl) + .withHttpHeaders("Accept" -> "application/json") + .post( + Map( + "code" -> code, + "grant_type" -> oauth2Config.grantType, + "client_secret" -> oauth2Config.clientSecret, + "redirect_uri" -> oauth2Config.redirectUri, + "client_id" -> oauth2Config.clientId, + "state" -> state + ) ) - Future.failed(OAuth2Redirect(cfg.authorizationUrl, queryStringParams)) - } + .transform { + case Success(r) if r.status == 200 => Success((r.json \ "access_token").asOpt[String].getOrElse("")) + case Failure(error) => Failure(AuthenticationError(s"OAuth2 token verification failure ${error.getMessage}")) + case Success(r) => Failure(AuthenticationError(s"OAuth2/token unexpected response from server (${r.status} ${r.statusText})")) + } + } + + /** + * Client query for user data with OAuth2 token + * @param token the token + * @return + */ + private def getUserData(oauth2Config: OAuth2Config, token: String): Future[JsObject] = { + logger.trace(s"Request to ${oauth2Config.userUrl} with authorization header: ${oauth2Config.authorizationHeader} $token") + ws.url(oauth2Config.userUrl) + .addHttpHeaders("Authorization" -> s"${oauth2Config.authorizationHeader} $token") + .get() + .transform { + case Success(r) if r.status == 200 => Success(r.json.as[JsObject]) + case Failure(error) => Failure(AuthenticationError(s"OAuth2 user data fetch failure ${error.getMessage}")) + case Success(r) => Failure(AuthenticationError(s"OAuth2/userinfo unexpected response from server (${r.status} ${r.statusText})")) + } + } + + private def authenticate(oauth2Config: OAuth2Config, request: RequestHeader, userData: JsObject): Future[AuthContext] = + for { + userFields <- ssoMapper.getUserFields(userData) + login <- userFields.getString("login").fold(Future.failed[String](AuthenticationError("")))(Future.successful) + user <- userSrv + .get(login) + .flatMap { + case u if oauth2Config.autoupdate => + logger.debug(s"Updating OAuth/OIDC user") + userSrv.inInitAuthContext { implicit authContext => + // Only update name and roles, not login (can't change it) + userSrv + .update(u, userFields.unset("login")) + + } + case u => Future.successful(u) + } + .recoverWith { + case _: NotFoundError if oauth2Config.autocreate => + logger.debug(s"Creating OAuth/OIDC user") + userSrv.inInitAuthContext { implicit authContext => + userSrv.create(userFields.set("login", userFields.getString("login").get.toLowerCase)) + } + } + authContext <- userSrv.getFromUser(request, user, name) + } yield authContext } diff --git a/app/org/thp/cortex/services/ProcessJobRunnerSrv.scala b/app/org/thp/cortex/services/ProcessJobRunnerSrv.scala index 444ea1b01..8870d1743 100644 --- a/app/org/thp/cortex/services/ProcessJobRunnerSrv.scala +++ b/app/org/thp/cortex/services/ProcessJobRunnerSrv.scala @@ -16,7 +16,7 @@ import scala.sys.process.{Process, ProcessLogger, _} import scala.util.Try @Singleton -class ProcessJobRunnerSrv @Inject()(implicit val system: ActorSystem) { +class ProcessJobRunnerSrv @Inject() (implicit val system: ActorSystem) { lazy val logger = Logger(getClass) @@ -27,7 +27,7 @@ class ProcessJobRunnerSrv @Inject()(implicit val system: ActorSystem) { (s"pip$pythonVersion" :: "show" :: "cortexutils" :: Nil) .lineStream .collectFirst { - case pythonPackageVersionRegex(major, minor, patch) ⇒ (major.toInt, minor.toInt, patch.toInt) + case pythonPackageVersionRegex(major, minor, patch) => (major.toInt, minor.toInt, patch.toInt) } }.getOrElse(None) @@ -36,7 +36,7 @@ class ProcessJobRunnerSrv @Inject()(implicit val system: ActorSystem) { val output = StringBuilder.newBuilder logger.info(s"Execute $command in $baseDirectory, timeout is ${timeout.fold("none")(_.toString)}") val process = Process(Seq(command, jobDirectory.toString), baseDirectory.toFile) - .run(ProcessLogger { s ⇒ + .run(ProcessLogger { s => logger.info(s" Job ${job.id}: $s") output ++= s }) @@ -45,24 +45,24 @@ class ProcessJobRunnerSrv @Inject()(implicit val system: ActorSystem) { process.exitValue() () } - .map { _ ⇒ + .map { _ => val outputFile = jobDirectory.resolve("output").resolve("output.json") if (!Files.exists(outputFile) || Files.size(outputFile) == 0) { - val report = Json.obj("success" → false, "errorMessage" → output.toString) + val report = Json.obj("success" -> false, "errorMessage" -> output.toString) Files.write(outputFile, report.toString.getBytes(StandardCharsets.UTF_8)) } () } .recoverWith { - case error ⇒ + case error => logger.error(s"Execution of command $command failed", error) Future.apply { - val report = Json.obj("success" → false, "errorMessage" → s"${error.getMessage}\n$output") + val report = Json.obj("success" -> false, "errorMessage" -> s"${error.getMessage}\n$output") Files.write(jobDirectory.resolve("output").resolve("output.json"), report.toString.getBytes(StandardCharsets.UTF_8)) () } } - timeout.fold(execution)(t ⇒ execution.withTimeout(t, killProcess(process))) + timeout.fold(execution)(t => execution.withTimeout(t, killProcess(process))) } def killProcess(process: Process): Unit = { diff --git a/app/org/thp/cortex/services/ResponderConfigSrv.scala b/app/org/thp/cortex/services/ResponderConfigSrv.scala index 6da517243..9a9c9fdd2 100644 --- a/app/org/thp/cortex/services/ResponderConfigSrv.scala +++ b/app/org/thp/cortex/services/ResponderConfigSrv.scala @@ -11,7 +11,7 @@ import org.thp.cortex.models.{BaseConfig, WorkerConfigModel, WorkerType} import org.elastic4play.services.{CreateSrv, FindSrv, UpdateSrv} @Singleton -class ResponderConfigSrv @Inject()( +class ResponderConfigSrv @Inject() ( val configuration: Configuration, val workerConfigModel: WorkerConfigModel, val userSrv: UserSrv, diff --git a/app/org/thp/cortex/services/StreamMessage.scala b/app/org/thp/cortex/services/StreamMessage.scala index 826ae0c8c..0b6cb53cc 100644 --- a/app/org/thp/cortex/services/StreamMessage.scala +++ b/app/org/thp/cortex/services/StreamMessage.scala @@ -28,29 +28,29 @@ case class AuditOperationGroup( val modelSummary = summary.getOrElse(operation.entity.model.modelName, Map.empty[String, Int]) val actionCount = modelSummary.getOrElse(operation.action.toString, 0) copy( - summary = summary + (operation.entity.model.modelName → (modelSummary + - (operation.action.toString → (actionCount + 1)))) + summary = summary + (operation.entity.model.modelName -> (modelSummary + + (operation.action.toString -> (actionCount + 1)))) ) } def makeReady: AuditOperationGroup = copy(isReady = true) - def toJson(implicit ec: ExecutionContext): Future[JsObject] = obj.map { o ⇒ + def toJson(implicit ec: ExecutionContext): Future[JsObject] = obj.map { o => Json.obj( - "base" → Json.obj( - "objectId" → operation.entity.id, - "objectType" → operation.entity.model.modelName, - "operation" → operation.action, - "startDate" → operation.date, - "rootId" → operation.entity.routing, - "user" → operation.authContext.userId, - "createdBy" → operation.authContext.userId, - "createdAt" → operation.date, - "requestId" → operation.authContext.requestId, - "object" → o, - "details" → auditedAttributes + "base" -> Json.obj( + "objectId" -> operation.entity.id, + "objectType" -> operation.entity.model.modelName, + "operation" -> operation.action, + "startDate" -> operation.date, + "rootId" -> operation.entity.routing, + "user" -> operation.authContext.userId, + "createdBy" -> operation.authContext.userId, + "createdAt" -> operation.date, + "requestId" -> operation.authContext.requestId, + "object" -> o, + "details" -> auditedAttributes ), - "summary" → summary + "summary" -> summary ) } } @@ -64,15 +64,15 @@ object AuditOperationGroup { .details .fields .map { - case (name, value) ⇒ + case (name, value) => val baseName = name.split("\\.").head (name, value, operation.entity.model.attributes.find(_.attributeName == baseName)) } - .collect { case (name, value, Some(attr)) if !attr.isUnaudited ⇒ (name, value) } + .collect { case (name, value, Some(attr)) if !attr.isUnaudited => (name, value) } } val obj = auxSrv(operation.entity, 10, withStats = false, removeUnaudited = true) .recover { - case error ⇒ + case error => logger.error("auxSrv fails", error) JsObject.empty } @@ -81,7 +81,7 @@ object AuditOperationGroup { operation, auditedAttributes, obj, - Map(operation.entity.model.modelName → Map(operation.action.toString → 1)), + Map(operation.entity.model.modelName -> Map(operation.action.toString -> 1)), false ) } @@ -101,7 +101,7 @@ case class MigrationEventGroup(tableName: String, current: Long, total: Long) ex def toJson(implicit ec: ExecutionContext): Future[JsObject] = Future.successful( - Json.obj("base" → Json.obj("rootId" → current, "objectType" → "migration", "tableName" → tableName, "current" → current, "total" → total)) + Json.obj("base" -> Json.obj("rootId" -> current, "objectType" -> "migration", "tableName" -> tableName, "current" -> current, "total" -> total)) ) } diff --git a/app/org/thp/cortex/services/StreamSrv.scala b/app/org/thp/cortex/services/StreamSrv.scala index 335029b98..de418907b 100644 --- a/app/org/thp/cortex/services/StreamSrv.scala +++ b/app/org/thp/cortex/services/StreamSrv.scala @@ -17,7 +17,7 @@ import scala.concurrent.{ExecutionContext, Future} * This actor monitors dead messages and log them */ @Singleton -class DeadLetterMonitoringActor @Inject()(system: ActorSystem) extends Actor { +class DeadLetterMonitoringActor @Inject() (system: ActorSystem) extends Actor { private[DeadLetterMonitoringActor] lazy val logger = Logger(getClass) override def preStart(): Unit = { @@ -31,10 +31,10 @@ class DeadLetterMonitoringActor @Inject()(system: ActorSystem) extends Actor { } override def receive: Receive = { - case DeadLetter(StreamActor.GetOperations, sender, recipient) ⇒ + case DeadLetter(StreamActor.GetOperations, sender, recipient) => logger.warn(s"receive dead GetOperations message, $sender → $recipient") sender ! StreamActor.StreamNotFound - case other ⇒ + case other => logger.error(s"receive dead message : $other") } } @@ -123,16 +123,16 @@ class StreamActor( private def normalizeOperation(operation: AuditOperation) = { val auditedDetails = operation.details.fields.flatMap { - case (attrName, value) ⇒ + case (attrName, value) => val attrNames = attrName.split("\\.").toSeq operation .entity .model .attributes - .find(a ⇒ a.attributeName == attrNames.head && !a.isUnaudited) - .map { _ ⇒ + .find(a => a.attributeName == attrNames.head && !a.isUnaudited) + .map { _ => val reverseNames = attrNames.reverse - reverseNames.drop(1).foldLeft(reverseNames.head → value)((jsTuple, name) ⇒ name → JsObject(Seq(jsTuple))) + reverseNames.drop(1).foldLeft(reverseNames.head -> value)((jsTuple, name) => name -> JsObject(Seq(jsTuple))) } } operation.copy(details = JsObject(auditedDetails)) @@ -140,80 +140,80 @@ class StreamActor( private def receiveWithState(waitingRequest: Option[WaitingRequest], currentMessages: Map[String, Option[StreamMessageGroup[_]]]): Receive = { /* End of HTTP request, mark received messages to ready*/ - case Commit(requestId) ⇒ + case Commit(requestId) => currentMessages.get(requestId).foreach { - case Some(message) ⇒ - context.become(receiveWithState(waitingRequest.map(_.renew), currentMessages + (requestId → Some(message.makeReady)))) - case None ⇒ + case Some(message) => + context.become(receiveWithState(waitingRequest.map(_.renew), currentMessages + (requestId -> Some(message.makeReady)))) + case None => } /* Migration process event */ - case event: MigrationEvent ⇒ + case event: MigrationEvent => val newMessages = currentMessages.get(event.modelName).flatten.fold(MigrationEventGroup(event)) { - case e: MigrationEventGroup ⇒ e :+ event + case e: MigrationEventGroup => e :+ event } - context.become(receiveWithState(waitingRequest.map(_.renew), currentMessages + (event.modelName → Some(newMessages)))) + context.become(receiveWithState(waitingRequest.map(_.renew), currentMessages + (event.modelName -> Some(newMessages)))) /* Database migration has just finished */ - case EndOfMigrationEvent ⇒ - context.become(receiveWithState(waitingRequest.map(_.renew), currentMessages + ("end" → Some(MigrationEventGroup.endOfMigration)))) + case EndOfMigrationEvent => + context.become(receiveWithState(waitingRequest.map(_.renew), currentMessages + ("end" -> Some(MigrationEventGroup.endOfMigration)))) /* */ - case operation: AuditOperation ⇒ + case operation: AuditOperation => val requestId = operation.authContext.requestId val normalizedOperation = normalizeOperation(operation) logger.debug(s"Receiving audit operation : $operation ⇒ $normalizedOperation") val updatedOperationGroup = currentMessages.get(requestId) match { - case None ⇒ + case None => logger.debug("Operation that comes after the end of request, make operation ready to send") AuditOperationGroup(auxSrv, normalizedOperation).makeReady // Operation that comes after the end of request - case Some(None) ⇒ + case Some(None) => logger.debug("First operation of the request, creating operation group") AuditOperationGroup(auxSrv, normalizedOperation) // First operation related to the given request - case Some(Some(aog: AuditOperationGroup)) ⇒ + case Some(Some(aog: AuditOperationGroup)) => logger.debug("Operation included in existing group") aog :+ normalizedOperation - case _ ⇒ + case _ => logger.debug("Impossible") sys.error("") } - context.become(receiveWithState(waitingRequest.map(_.renew), currentMessages + (requestId → Some(updatedOperationGroup)))) + context.become(receiveWithState(waitingRequest.map(_.renew), currentMessages + (requestId -> Some(updatedOperationGroup)))) - case GetOperations ⇒ + case GetOperations => renewExpiration() - waitingRequest.foreach { wr ⇒ + waitingRequest.foreach { wr => wr.submit(Nil) logger.error("Multiple requests !") } context.become(receiveWithState(Some(new WaitingRequest(sender)), currentMessages)) - case Submit ⇒ + case Submit => waitingRequest match { - case Some(wr) ⇒ + case Some(wr) => val (readyMessages, pendingMessages) = currentMessages.partition(_._2.fold(false)(_.isReady)) - Future.sequence(readyMessages.values.map(_.get.toJson)).foreach(messages ⇒ wr.submit(messages.toSeq)) + Future.sequence(readyMessages.values.map(_.get.toJson)).foreach(messages => wr.submit(messages.toSeq)) context.become(receiveWithState(None, pendingMessages)) - case None ⇒ + case None => logger.error("No request to submit !") } - case Initialize(requestId) ⇒ context.become(receiveWithState(waitingRequest, currentMessages + (requestId → None))) - case message ⇒ logger.warn(s"Unexpected message $message (${message.getClass})") + case Initialize(requestId) => context.become(receiveWithState(waitingRequest, currentMessages + (requestId -> None))) + case message => logger.warn(s"Unexpected message $message (${message.getClass})") } def receive: Receive = receiveWithState(None, Map.empty[String, Option[StreamMessageGroup[_]]]) } @Singleton -class StreamFilter @Inject()(eventSrv: EventSrv, implicit val mat: Materializer, implicit val ec: ExecutionContext) extends Filter { +class StreamFilter @Inject() (eventSrv: EventSrv, implicit val mat: Materializer, implicit val ec: ExecutionContext) extends Filter { private[StreamFilter] lazy val logger = Logger(getClass) - def apply(nextFilter: RequestHeader ⇒ Future[Result])(requestHeader: RequestHeader): Future[Result] = { + def apply(nextFilter: RequestHeader => Future[Result])(requestHeader: RequestHeader): Future[Result] = { val requestId = Instance.getRequestId(requestHeader) eventSrv.publish(StreamActor.Initialize(requestId)) nextFilter(requestHeader).andThen { - case _ ⇒ eventSrv.publish(StreamActor.Commit(requestId)) + case _ => eventSrv.publish(StreamActor.Commit(requestId)) } } } diff --git a/app/org/thp/cortex/services/UserSrv.scala b/app/org/thp/cortex/services/UserSrv.scala index 47f137de3..dbb943837 100644 --- a/app/org/thp/cortex/services/UserSrv.scala +++ b/app/org/thp/cortex/services/UserSrv.scala @@ -15,7 +15,7 @@ import org.thp.cortex.models._ import org.elastic4play.controllers.Fields import org.elastic4play.database.{DBIndex, ModifyConfig} -import org.elastic4play.services.{User ⇒ EUser, UserSrv ⇒ EUserSrv, _} +import org.elastic4play.services.{User => EUser, UserSrv => EUserSrv, _} import org.elastic4play.utils.Instance import org.elastic4play.{AuthenticationError, AuthorizationError, NotFoundError} @@ -75,41 +75,41 @@ class UserSrv( } override def getFromId(request: RequestHeader, userId: String, authMethod: String): Future[AuthContext] = - get(userId).flatMap { user ⇒ + get(userId).flatMap { user => getFromUser(request, user, authMethod) } override def getFromUser(request: RequestHeader, user: EUser, authMethod: String): Future[AuthContext] = user match { - case u: User if u.status() == UserStatus.Ok ⇒ + case u: User if u.status() == UserStatus.Ok => organizationSrv.get(u.organization()).flatMap { - case o if o.status() == OrganizationStatus.Active ⇒ + case o if o.status() == OrganizationStatus.Active => Future.successful(AuthContextImpl(user.id, user.getUserName, Instance.getRequestId(request), user.getRoles, authMethod)) - case _ ⇒ Future.failed(AuthorizationError("Your account is locked")) + case _ => Future.failed(AuthorizationError("Your account is locked")) } - case _ ⇒ Future.failed(AuthorizationError("Your account is locked")) + case _ => Future.failed(AuthorizationError("Your account is locked")) } override def getInitialUser(request: RequestHeader): Future[AuthContext] = dbIndex.getSize(userModel.modelName).map { - case size if size > 0 ⇒ throw AuthenticationError(s"Use of initial user is forbidden because users exist in database") - case _ ⇒ AuthContextImpl("init", "", Instance.getRequestId(request), Roles.roles, "init") + case size if size > 0 => throw AuthenticationError(s"Use of initial user is forbidden because users exist in database") + case _ => AuthContextImpl("init", "", Instance.getRequestId(request), Roles.roles, "init") } - override def inInitAuthContext[A](block: AuthContext ⇒ Future[A]): Future[A] = { + override def inInitAuthContext[A](block: AuthContext => Future[A]): Future[A] = { val authContext = AuthContextImpl("init", "", Instance.getInternalId, Roles.roles, "init") eventSrv.publish(StreamActor.Initialize(authContext.requestId)) block(authContext).andThen { - case _ ⇒ eventSrv.publish(StreamActor.Commit(authContext.requestId)) + case _ => eventSrv.publish(StreamActor.Commit(authContext.requestId)) } } def create(fields: Fields)(implicit authContext: AuthContext): Future[User] = fields.getString("password") match { - case None ⇒ createSrv[UserModel, User](userModel, fields) - case Some(password) ⇒ - createSrv[UserModel, User](userModel, fields.unset("password")).flatMap { user ⇒ - authSrv.get.setPassword(user.userId(), password).map(_ ⇒ user) + case None => createSrv[UserModel, User](userModel, fields) + case Some(password) => + createSrv[UserModel, User](userModel, fields.unset("password")).flatMap { user => + authSrv.get.setPassword(user.userId(), password).map(_ => user) } } @@ -157,13 +157,13 @@ class UserSrv( def findForUser(userId: String, queryDef: QueryDef, range: Option[String], sortBy: Seq[String]): (Source[User, NotUsed], Future[Long]) = { val users = (for { - user ← get(userId) + user <- get(userId) organizationId = user.organization() } yield findForOrganization(organizationId, queryDef, range, sortBy)) - .recover { case NotFoundError("user init not found") ⇒ Source.empty → Future.successful(0L) } + .recover { case NotFoundError("user init not found") => Source.empty -> Future.successful(0L) } - val userSource = Source.fromFutureSource(users.map(_._1)).mapMaterializedValue(_ ⇒ NotUsed) + val userSource = Source.futureSource(users.map(_._1)).mapMaterializedValue(_ => NotUsed) val userTotal = users.flatMap(_._2) - userSource → userTotal + userSource -> userTotal } } diff --git a/app/org/thp/cortex/services/WorkerConfigSrv.scala b/app/org/thp/cortex/services/WorkerConfigSrv.scala index 02208b50a..7cbdfe087 100644 --- a/app/org/thp/cortex/services/WorkerConfigSrv.scala +++ b/app/org/thp/cortex/services/WorkerConfigSrv.scala @@ -34,33 +34,33 @@ trait WorkerConfigSrv { protected def buildDefinitionMap(definitionSource: Source[WorkerDefinition, NotUsed]): Future[Map[String, BaseConfig]] = definitionSource .filter(_.baseConfiguration.isDefined) - .map(d ⇒ d.copy(configurationItems = d.configurationItems.map(_.copy(required = false)))) + .map(d => d.copy(configurationItems = d.configurationItems.map(_.copy(required = false)))) .groupBy(200, _.baseConfiguration.get) // TODO replace groupBy by fold to prevent "too many streams" error - .map(d ⇒ BaseConfig(d.baseConfiguration.get, Seq(d.name), d.configurationItems, None)) + .map(d => BaseConfig(d.baseConfiguration.get, Seq(d.name), d.configurationItems, None)) .reduce(_ + _) .filterNot(_.items.isEmpty) .mergeSubstreams - .mapMaterializedValue(_ ⇒ NotUsed) + .mapMaterializedValue(_ => NotUsed) .runWith(Sink.seq) - .map { baseConfigs ⇒ + .map { baseConfigs => (BaseConfig.global(workerType, configuration) +: baseConfigs) - .map(c ⇒ c.name → c) + .map(c => c.name -> c) .toMap } def getForUser(userId: String, configName: String): Future[BaseConfig] = userSrv .getOrganizationId(userId) - .flatMap(organizationId ⇒ getForOrganization(organizationId, configName)) + .flatMap(organizationId => getForOrganization(organizationId, configName)) def getForOrganization(organizationId: String, configName: String): Future[BaseConfig] = { import org.elastic4play.services.QueryDSL._ for { - workerConfig ← findForOrganization(organizationId, "name" ~= configName, Some("0-1"), Nil) + workerConfig <- findForOrganization(organizationId, "name" ~= configName, Some("0-1"), Nil) ._1 .runWith(Sink.headOption) - d ← definitions - baseConfig ← d.get(configName).fold[Future[BaseConfig]](Future.failed(NotFoundError(s"config $configName not found")))(Future.successful) + d <- definitions + baseConfig <- d.get(configName).fold[Future[BaseConfig]](Future.failed(NotFoundError(s"config $configName not found")))(Future.successful) } yield baseConfig.copy(config = workerConfig) } @@ -72,34 +72,34 @@ trait WorkerConfigSrv { def updateOrCreate(userId: String, workerConfigName: String, config: JsObject)(implicit authContext: AuthContext): Future[BaseConfig] = for { - organizationId ← userSrv.getOrganizationId(userId) - organization ← organizationSrv.get(organizationId) - baseConfig ← getForOrganization(organizationId, workerConfigName) - validatedConfig ← baseConfig + organizationId <- userSrv.getOrganizationId(userId) + organization <- organizationSrv.get(organizationId) + baseConfig <- getForOrganization(organizationId, workerConfigName) + validatedConfig <- baseConfig .items .validatedBy(_.read(config)) .map(_.filterNot(_._2 == JsNull)) .fold( - c ⇒ Future.successful(Fields.empty.set("config", JsObject(c).toString).set("name", workerConfigName)), - errors ⇒ Future.failed(AttributeCheckingError("workerConfig", errors.toSeq)) + c => Future.successful(Fields.empty.set("config", JsObject(c).toString).set("name", workerConfigName)), + errors => Future.failed(AttributeCheckingError("workerConfig", errors.toSeq)) ) - newWorkerConfig ← baseConfig.config.fold(create(organization, validatedConfig))(workerConfig ⇒ update(workerConfig, validatedConfig)) + newWorkerConfig <- baseConfig.config.fold(create(organization, validatedConfig))(workerConfig => update(workerConfig, validatedConfig)) } yield baseConfig.copy(config = Some(newWorkerConfig)) private def updateDefinitionConfig(definitionConfig: Map[String, BaseConfig], workerConfig: WorkerConfig): Map[String, BaseConfig] = definitionConfig .get(workerConfig.name()) - .fold(definitionConfig) { baseConfig ⇒ - definitionConfig + (workerConfig.name() → baseConfig.copy(config = Some(workerConfig))) + .fold(definitionConfig) { baseConfig => + definitionConfig + (workerConfig.name() -> baseConfig.copy(config = Some(workerConfig))) } def listConfigForUser(userId: String): Future[Seq[BaseConfig]] = { import org.elastic4play.services.QueryDSL._ for { - configItems ← definitions - workerConfigs ← findForUser(userId, any, Some("all"), Nil) + configItems <- definitions + workerConfigs <- findForUser(userId, any, Some("all"), Nil) ._1 - .runFold(configItems) { (definitionConfig, workerConfig) ⇒ + .runFold(configItems) { (definitionConfig, workerConfig) => updateDefinitionConfig(definitionConfig, workerConfig) } } yield workerConfigs.values.toSeq @@ -108,10 +108,10 @@ trait WorkerConfigSrv { def findForUser(userId: String, queryDef: QueryDef, range: Option[String], sortBy: Seq[String]): (Source[WorkerConfig, NotUsed], Future[Long]) = { val configs = userSrv .getOrganizationId(userId) - .map(organizationId ⇒ findForOrganization(organizationId, queryDef, range, sortBy)) - val configSource = Source.fromFutureSource(configs.map(_._1)).mapMaterializedValue(_ ⇒ NotUsed) + .map(organizationId => findForOrganization(organizationId, queryDef, range, sortBy)) + val configSource = Source.futureSource(configs.map(_._1)).mapMaterializedValue(_ => NotUsed) val configTotal = configs.flatMap(_._2) - configSource → configTotal + configSource -> configTotal } def findForOrganization( diff --git a/app/org/thp/cortex/services/WorkerSrv.scala b/app/org/thp/cortex/services/WorkerSrv.scala index 719d22375..b76fb9b51 100644 --- a/app/org/thp/cortex/services/WorkerSrv.scala +++ b/app/org/thp/cortex/services/WorkerSrv.scala @@ -26,7 +26,7 @@ import org.elastic4play.database.ModifyConfig import org.elastic4play.services._ @Singleton -class WorkerSrv @Inject()( +class WorkerSrv @Inject() ( config: Configuration, workerModel: WorkerModel, organizationSrv: OrganizationSrv, @@ -52,20 +52,20 @@ class WorkerSrv @Inject()( rescan() def getDefinition(workerId: String): Try[WorkerDefinition] = workerMap.get(workerId) match { - case Some(worker) ⇒ Success(worker) - case None ⇒ Failure(NotFoundError(s"Worker $workerId not found")) + case Some(worker) => Success(worker) + case None => Failure(NotFoundError(s"Worker $workerId not found")) } // def listDefinitions: (Source[WorkerDefinition, NotUsed], Future[Long]) = Source(workerMap.values.toList) → Future.successful(workerMap.size.toLong) def listAnalyzerDefinitions: (Source[WorkerDefinition, NotUsed], Future[Long]) = { val analyzerDefinitions = workerMap.values.filter(_.tpe == WorkerType.analyzer) - Source(analyzerDefinitions.toList) → Future.successful(analyzerDefinitions.size.toLong) + Source(analyzerDefinitions.toList) -> Future.successful(analyzerDefinitions.size.toLong) } def listResponderDefinitions: (Source[WorkerDefinition, NotUsed], Future[Long]) = { val responderDefinitions = workerMap.values.filter(_.tpe == WorkerType.responder) - Source(responderDefinitions.toList) → Future.successful(responderDefinitions.size.toLong) + Source(responderDefinitions.toList) -> Future.successful(responderDefinitions.size.toLong) } def get(workerId: String): Future[Worker] = getSrv[WorkerModel, Worker](workerModel, workerId) @@ -73,7 +73,7 @@ class WorkerSrv @Inject()( def getForUser(userId: String, workerId: String): Future[Worker] = userSrv .getOrganizationId(userId) - .flatMap(organization ⇒ getForOrganization(organization, workerId)) + .flatMap(organization => getForOrganization(organization, workerId)) def getForOrganization(organizationId: String, workerId: String): Future[Worker] = { import org.elastic4play.services.QueryDSL._ @@ -91,12 +91,12 @@ class WorkerSrv @Inject()( ): (Source[Worker, NotUsed], Future[Long]) = { import org.elastic4play.services.QueryDSL._ val analyzers = for { - user ← userSrv.get(userId) + user <- userSrv.get(userId) organizationId = user.organization() } yield findForOrganization(organizationId, and(queryDef, "type" ~= WorkerType.analyzer), range, sortBy) - val analyserSource = Source.fromFutureSource(analyzers.map(_._1)).mapMaterializedValue(_ ⇒ NotUsed) + val analyserSource = Source.futureSource(analyzers.map(_._1)).mapMaterializedValue(_ => NotUsed) val analyserTotal = analyzers.flatMap(_._2) - analyserSource → analyserTotal + analyserSource -> analyserTotal } def findRespondersForUser( @@ -107,12 +107,12 @@ class WorkerSrv @Inject()( ): (Source[Worker, NotUsed], Future[Long]) = { import org.elastic4play.services.QueryDSL._ val responders = for { - user ← userSrv.get(userId) + user <- userSrv.get(userId) organizationId = user.organization() } yield findForOrganization(organizationId, and(queryDef, "type" ~= WorkerType.responder), range, sortBy) - val analyserSource = Source.fromFutureSource(responders.map(_._1)).mapMaterializedValue(_ ⇒ NotUsed) + val analyserSource = Source.futureSource(responders.map(_._1)).mapMaterializedValue(_ => NotUsed) val analyserTotal = responders.flatMap(_._2) - analyserSource → analyserTotal + analyserSource -> analyserTotal } private def findForOrganization( @@ -131,14 +131,14 @@ class WorkerSrv @Inject()( def rescan(): Unit = { import org.elastic4play.services.QueryDSL._ scan( - analyzersURLs.map(_ → WorkerType.analyzer) ++ - respondersURLs.map(_ → WorkerType.responder) - ).onComplete { _ ⇒ - userSrv.inInitAuthContext { implicit authContext ⇒ - find(any, Some("all"), Nil)._1.runForeach { worker ⇒ + analyzersURLs.map(_ -> WorkerType.analyzer) ++ + respondersURLs.map(_ -> WorkerType.responder) + ).onComplete { _ => + userSrv.inInitAuthContext { implicit authContext => + find(any, Some("all"), Nil)._1.runForeach { worker => workerMap.get(worker.workerDefinitionId()) match { - case Some(wd) ⇒ update(worker, Fields.empty.set("dataTypeList", Json.toJson(wd.dataTypeList))) - case None ⇒ update(worker, Fields.empty.set("dataTypeList", JsArray.empty)) + case Some(wd) => update(worker, Fields.empty.set("dataTypeList", Json.toJson(wd.dataTypeList))) + case None => update(worker, Fields.empty.set("dataTypeList", JsArray.empty)) } } } @@ -148,12 +148,12 @@ class WorkerSrv @Inject()( def scan(workerUrls: Seq[(String, WorkerType.Type)]): Future[Unit] = { def readUrl(url: URL, workerType: WorkerType.Type): Future[Seq[WorkerDefinition]] = url.getProtocol match { - case "file" ⇒ Future.successful(readFile(Paths.get(url.toURI), workerType)) - case "http" | "https" ⇒ + case "file" => Future.successful(readFile(Paths.get(url.toURI), workerType)) + case "http" | "https" => val reads = WorkerDefinition.reads(workerType) ws.url(url.toString) .get() - .map(response ⇒ response.json.as(reads)) + .map(response => response.json.as(reads)) .map(_.filterNot(_.command.isDefined)) } @@ -163,18 +163,18 @@ class WorkerSrv @Inject()( lazy val basePath = path.getParent.getParent val workerDefinitions = for { - w ← Try(source.mkString).map(Json.parse(_).as(reads)).getOrElse { + w <- Try(source.mkString).map(Json.parse(_).as(reads)).getOrElse { logger.error(s"File $path has invalid format") Nil } - command = w.command.map(cmd ⇒ basePath.resolve(cmd)) + command = w.command.map(cmd => basePath.resolve(cmd)) if command.isEmpty || command.exists(_.normalize().startsWith(basePath)) } yield w.copy(command = command) source.close() workerDefinitions.filter { - case w if w.command.isDefined && jobRunnerSrv.processRunnerIsEnable ⇒ true - case w if w.dockerImage.isDefined && jobRunnerSrv.dockerRunnerIsEnable ⇒ true - case w ⇒ + case w if w.command.isDefined && jobRunnerSrv.processRunnerIsEnable => true + case w if w.dockerImage.isDefined && jobRunnerSrv.dockerRunnerIsEnable => true + case w => val reason = if (w.command.isDefined) "process runner is disabled" else if (w.dockerImage.isDefined) "Docker runner is disabled" @@ -187,19 +187,19 @@ class WorkerSrv @Inject()( def readDirectory(path: Path, workerType: WorkerType.Type): Seq[WorkerDefinition] = for { - workerDir ← Files.newDirectoryStream(path).asScala.toSeq + workerDir <- Files.newDirectoryStream(path).asScala.toSeq if Files.isDirectory(workerDir) - infoFile ← Files.newDirectoryStream(workerDir, "*.json").asScala - workerDefinition ← readFile(infoFile, workerType) + infoFile <- Files.newDirectoryStream(workerDir, "*.json").asScala + workerDefinition <- readFile(infoFile, workerType) } yield workerDefinition Future .traverse(workerUrls) { - case (workerUrl, workerType) ⇒ + case (workerUrl, workerType) => Future(new URL(workerUrl)) .flatMap(readUrl(_, workerType)) .recover { - case _ ⇒ + case _ => val path = Paths.get(workerUrl) if (Files.isRegularFile(path)) readFile(path, workerType) else if (Files.isDirectory(path)) readDirectory(path, workerType) @@ -209,10 +209,10 @@ class WorkerSrv @Inject()( } } } - .map { worker ⇒ - val wmap = worker.flatten.map(w ⇒ w.id → w).toMap + .map { worker => + val wmap = worker.flatten.map(w => w.id -> w).toMap workerMapLock.synchronized(workerMap = wmap) - logger.info(s"New worker list:\n\n\t${workerMap.values.map(a ⇒ s"${a.name} ${a.version}").mkString("\n\t")}\n") + logger.info(s"New worker list:\n\n\t${workerMap.values.map(a => s"${a.name} ${a.version}").mkString("\n\t")}\n") } } @@ -230,13 +230,13 @@ class WorkerSrv @Inject()( val unknownConfigItems = (rawConfig.value.keySet -- configItems.map(_.name)) .foldLeft[Unit Or Every[AttributeError]](Good(())) { - case (Good(_), ci) ⇒ Bad(One(UnknownAttributeError("worker.config", JsString(ci)))) - case (Bad(e), ci) ⇒ Bad(UnknownAttributeError("worker.config", JsString(ci)) +: e) + case (Good(_), ci) => Bad(One(UnknownAttributeError("worker.config", JsString(ci)))) + case (Bad(e), ci) => Bad(UnknownAttributeError("worker.config", JsString(ci)) +: e) } - withGood(configOrErrors, unknownConfigItems)((c, _) ⇒ c) + withGood(configOrErrors, unknownConfigItems)((c, _) => c) .fold( - cfg ⇒ { + cfg => createSrv[WorkerModel, Worker, Organization]( workerModel, organization, @@ -246,26 +246,24 @@ class WorkerSrv @Inject()( .set("author", workerDefinition.author) .set("version", workerDefinition.version) .set("dockerImage", workerDefinition.dockerImage.map(JsString)) - .set("command", workerDefinition.command.map(p ⇒ JsString(p.toString))) + .set("command", workerDefinition.command.map(p => JsString(p.toString))) .set("url", workerDefinition.url) .set("license", workerDefinition.license) .set("baseConfig", workerDefinition.baseConfiguration.map(JsString.apply)) .set("configuration", cfg.toString) .set("type", workerDefinition.tpe.toString) .addIfAbsent("dataTypeList", StringInputValue(workerDefinition.dataTypeList)) - ) - - }, { - case One(e) ⇒ Future.failed(e) - case Every(es @ _*) ⇒ Future.failed(AttributeCheckingError(s"worker(${workerDefinition.name}).configuration", es)) + ), { + case One(e) => Future.failed(e) + case Every(es @ _*) => Future.failed(AttributeCheckingError(s"worker(${workerDefinition.name}).configuration", es)) } ) } def create(organizationId: String, workerDefinition: WorkerDefinition, workerFields: Fields)(implicit authContext: AuthContext): Future[Worker] = for { - organization ← organizationSrv.get(organizationId) - worker ← create(organization, workerDefinition, workerFields) + organization <- organizationSrv.get(organizationId) + worker <- create(organization, workerDefinition, workerFields) } yield worker def delete(worker: Worker)(implicit authContext: AuthContext): Future[Unit] = @@ -277,12 +275,12 @@ class WorkerSrv @Inject()( def update(worker: Worker, fields: Fields)(implicit authContext: AuthContext): Future[Worker] = update(worker, fields, ModifyConfig.default) def update(worker: Worker, fields: Fields, modifyConfig: ModifyConfig)(implicit authContext: AuthContext): Future[Worker] = { - val workerFields = fields.getValue("configuration").fold(fields)(cfg ⇒ fields.set("configuration", cfg.toString)) + val workerFields = fields.getValue("configuration").fold(fields)(cfg => fields.set("configuration", cfg.toString)) updateSrv(worker, workerFields, modifyConfig) } def update(workerId: String, fields: Fields)(implicit authContext: AuthContext): Future[Worker] = update(workerId, fields, ModifyConfig.default) def update(workerId: String, fields: Fields, modifyConfig: ModifyConfig)(implicit authContext: AuthContext): Future[Worker] = - get(workerId).flatMap(worker ⇒ update(worker, fields, modifyConfig)) + get(workerId).flatMap(worker => update(worker, fields, modifyConfig)) } diff --git a/app/org/thp/cortex/services/mappers/GroupUserMapper.scala b/app/org/thp/cortex/services/mappers/GroupUserMapper.scala index 7dcd91698..993f64834 100644 --- a/app/org/thp/cortex/services/mappers/GroupUserMapper.scala +++ b/app/org/thp/cortex/services/mappers/GroupUserMapper.scala @@ -27,8 +27,8 @@ class GroupUserMapper( @Inject() def this(configuration: Configuration, ws: WSClient, ec: ExecutionContext) = this( - configuration.getOptional[String]("auth.sso.attributes.login").getOrElse("name"), - configuration.getOptional[String]("auth.sso.attributes.name").getOrElse("username"), + configuration.getOptional[String]("auth.sso.attributes.login").getOrElse("login"), + configuration.getOptional[String]("auth.sso.attributes.name").getOrElse("name"), configuration.getOptional[String]("auth.sso.attributes.roles"), configuration.getOptional[String]("auth.sso.attributes.groups").getOrElse(""), configuration.getOptional[String]("auth.sso.attributes.organization"), @@ -44,23 +44,23 @@ class GroupUserMapper( override def getUserFields(jsValue: JsValue, authHeader: Option[(String, String)]): Future[Fields] = { - val apiCall = authHeader.fold(ws.url(groupsUrl))(headers ⇒ ws.url(groupsUrl).addHttpHeaders(headers)) - apiCall.get.flatMap { r ⇒ + val apiCall = authHeader.fold(ws.url(groupsUrl))(headers => ws.url(groupsUrl).addHttpHeaders(headers)) + apiCall.get.flatMap { r => val jsonGroups = (r.json \ groupAttrName).as[Seq[String]] val mappedRoles = jsonGroups.flatMap(mappings.get).maxBy(_.length) val roles = if (mappedRoles.nonEmpty) mappedRoles else defaultRoles val fields = for { - login ← (jsValue \ loginAttrName).validate[String] - name ← (jsValue \ nameAttrName).validate[String] - organization ← organizationAttrName - .flatMap(o ⇒ (jsValue \ o).asOpt[String]) + login <- (jsValue \ loginAttrName).validate[String] + name <- (jsValue \ nameAttrName).validate[String] + organization <- organizationAttrName + .flatMap(o => (jsValue \ o).asOpt[String]) .orElse(defaultOrganization) - .fold[JsResult[String]](JsError())(o ⇒ JsSuccess(o)) - } yield Fields(Json.obj("login" → login, "name" → name, "roles" → roles, "organization" → organization)) + .fold[JsResult[String]](JsError())(o => JsSuccess(o)) + } yield Fields(Json.obj("login" -> login, "name" -> name, "roles" -> roles, "organization" -> organization)) fields match { - case JsSuccess(f, _) ⇒ Future.successful(f) - case JsError(errors) ⇒ Future.failed(AuthenticationError(s"User info fails: ${errors.map(_._1).mkString}")) + case JsSuccess(f, _) => Future.successful(f) + case JsError(errors) => Future.failed(AuthenticationError(s"User info fails: ${errors.map(_._1).mkString}")) } } } diff --git a/app/org/thp/cortex/services/mappers/MultiUserMapperSrv.scala b/app/org/thp/cortex/services/mappers/MultiUserMapperSrv.scala index f574e0e07..60806e250 100644 --- a/app/org/thp/cortex/services/mappers/MultiUserMapperSrv.scala +++ b/app/org/thp/cortex/services/mappers/MultiUserMapperSrv.scala @@ -19,7 +19,7 @@ object MultiUserMapperSrv { } @Singleton -class MultiUserMapperSrv @Inject()(configuration: Configuration, ssoMapperModules: immutable.Set[UserMapper]) extends UserMapper { +class MultiUserMapperSrv @Inject() (configuration: Configuration, ssoMapperModules: immutable.Set[UserMapper]) extends UserMapper { override val name: String = "usermapper" private lazy val mapper: UserMapper = MultiUserMapperSrv.getMapper(configuration, ssoMapperModules) diff --git a/app/org/thp/cortex/services/mappers/SimpleUserMapper.scala b/app/org/thp/cortex/services/mappers/SimpleUserMapper.scala index bd3bd54c3..9e1665e8a 100644 --- a/app/org/thp/cortex/services/mappers/SimpleUserMapper.scala +++ b/app/org/thp/cortex/services/mappers/SimpleUserMapper.scala @@ -22,8 +22,8 @@ class SimpleUserMapper( @Inject() def this(configuration: Configuration, ec: ExecutionContext) = this( - configuration.getOptional[String]("auth.sso.attributes.login").getOrElse("name"), - configuration.getOptional[String]("auth.sso.attributes.name").getOrElse("username"), + configuration.getOptional[String]("auth.sso.attributes.login").getOrElse("login"), + configuration.getOptional[String]("auth.sso.attributes.name").getOrElse("name"), configuration.getOptional[String]("auth.sso.attributes.roles"), configuration.getOptional[String]("auth.sso.attributes.organization"), configuration.getOptional[Seq[String]]("auth.sso.defaultRoles").getOrElse(Seq()), @@ -35,17 +35,17 @@ class SimpleUserMapper( override def getUserFields(jsValue: JsValue, authHeader: Option[(String, String)]): Future[Fields] = { val fields = for { - login ← (jsValue \ loginAttrName).validate[String] - name ← (jsValue \ nameAttrName).validate[String] - roles = rolesAttrName.fold(defaultRoles)(r ⇒ (jsValue \ r).asOpt[Seq[String]].getOrElse(defaultRoles)) - organization ← organizationAttrName - .flatMap(o ⇒ (jsValue \ o).asOpt[String]) + login <- (jsValue \ loginAttrName).validate[String] + name <- (jsValue \ nameAttrName).validate[String] + roles = rolesAttrName.fold(defaultRoles)(r => (jsValue \ r).asOpt[Seq[String]].getOrElse(defaultRoles)) + organization <- organizationAttrName + .flatMap(o => (jsValue \ o).asOpt[String]) .orElse(defaultOrganization) - .fold[JsResult[String]](JsError())(o ⇒ JsSuccess(o)) - } yield Fields(Json.obj("login" → login, "name" → name, "roles" → roles, "organization" → organization)) + .fold[JsResult[String]](JsError())(o => JsSuccess(o)) + } yield Fields(Json.obj("login" -> login, "name" -> name, "roles" -> roles, "organization" -> organization)) fields match { - case JsSuccess(f, _) ⇒ Future.successful(f) - case JsError(errors) ⇒ Future.failed(AuthenticationError(s"User info fails: ${errors.map(_._1).mkString}")) + case JsSuccess(f, _) => Future.successful(f) + case JsError(errors) => Future.failed(AuthenticationError(s"User info fails: ${errors.map(_._1).mkString}")) } } } diff --git a/app/org/thp/cortex/util/JsonConfig.scala b/app/org/thp/cortex/util/JsonConfig.scala index d30a5637a..33bce0f5b 100644 --- a/app/org/thp/cortex/util/JsonConfig.scala +++ b/app/org/thp/cortex/util/JsonConfig.scala @@ -8,19 +8,18 @@ import play.api.libs.json._ import scala.collection.JavaConverters._ object JsonConfig { - implicit val configValueWrites: Writes[ConfigValue] = Writes( - (value: ConfigValue) ⇒ - value match { - case v: ConfigObject ⇒ configWrites.writes(Configuration(v.toConfig)) - case v: ConfigList ⇒ JsArray(v.asScala.map(x ⇒ configValueWrites.writes(x))) - case v if v.valueType == NUMBER ⇒ JsNumber(BigDecimal(v.unwrapped.asInstanceOf[Number].toString)) - case v if v.valueType == BOOLEAN ⇒ JsBoolean(v.unwrapped.asInstanceOf[Boolean]) - case v if v.valueType == NULL ⇒ JsNull - case v if v.valueType == STRING ⇒ JsString(v.unwrapped.asInstanceOf[String]) - } + implicit val configValueWrites: Writes[ConfigValue] = Writes((value: ConfigValue) => + value match { + case v: ConfigObject => configWrites.writes(Configuration(v.toConfig)) + case v: ConfigList => JsArray(v.asScala.map(x => configValueWrites.writes(x))) + case v if v.valueType == NUMBER => JsNumber(BigDecimal(v.unwrapped.asInstanceOf[Number].toString)) + case v if v.valueType == BOOLEAN => JsBoolean(v.unwrapped.asInstanceOf[Boolean]) + case v if v.valueType == NULL => JsNull + case v if v.valueType == STRING => JsString(v.unwrapped.asInstanceOf[String]) + } ) - implicit def configWrites = OWrites { (cfg: Configuration) ⇒ - JsObject(cfg.subKeys.map(key ⇒ key → configValueWrites.writes(cfg.underlying.getValue(key))).toSeq) + implicit def configWrites = OWrites { (cfg: Configuration) => + JsObject(cfg.subKeys.map(key => key -> configValueWrites.writes(cfg.underlying.getValue(key))).toSeq) } } diff --git a/build.sbt b/build.sbt index bd6fc016e..b314eb7a7 100644 --- a/build.sbt +++ b/build.sbt @@ -2,7 +2,6 @@ import Common._ lazy val cortex = (project in file(".")) .enablePlugins(PlayScala) - .enablePlugins(Bintray) .settings(projectSettings) libraryDependencies ++= Seq( @@ -24,7 +23,7 @@ resolvers += "scalaz-bintray" at "http://dl.bintray.com/scalaz/releases" resolvers += "elasticsearch-releases" at "https://artifacts.elastic.co/maven" publishArtifact in (Compile, packageDoc) := false publishArtifact in packageDoc := false -sources in (Compile,doc) := Seq.empty +sources in (Compile, doc) := Seq.empty // Front-end // mappings in packageBin in Assets ++= frontendFiles.value @@ -34,14 +33,3 @@ packageBin := { (packageBin in Rpm).value (packageBin in Universal).value } - -// Bintray // -bintrayOrganization := Some("thehive-project") -bintrayRepository := "cortex" -publish := { - (publish in Docker).value - publishRelease.value - publishLatest.value - publishRpm.value - publishDebian.value -} diff --git a/conf/application.sample b/conf/application.sample index 407ab6387..023bee3a2 100644 --- a/conf/application.sample +++ b/conf/application.sample @@ -65,6 +65,7 @@ auth { # the "ad" section below. # - ldap : use LDAP to authenticate users. The associated configuration shall be done in the # "ldap" section below. + # - oauth2 : use OAuth/OIDC to authenticate users. Configuration is under "auth.oauth2" and "auth.sso" keys provider = [local] ad { @@ -107,6 +108,64 @@ auth { # If 'true', use SSL to connect to the LDAP directory server. #useSSL = true } + + oauth2 { + # URL of the authorization server + #clientId = "client-id" + #clientSecret = "client-secret" + #redirectUri = "https://my-thehive-instance.example/index.html#!/login" + #responseType = "code" + #grantType = "authorization_code" + + # URL from where to get the access token + #authorizationUrl = "https://auth-site.com/OAuth/Authorize" + #tokenUrl = "https://auth-site.com/OAuth/Token" + + # The endpoint from which to obtain user details using the OAuth token, after successful login + #userUrl = "https://auth-site.com/api/User" + #scope = "openid profile" + # Type of authorization header + #authorizationHeader = "Bearer" # or token + } + + # Single-Sign On + sso { + # Autocreate user in database? + #autocreate = false + + # Autoupdate its profile and roles? + #autoupdate = false + + # Autologin user using SSO? + #autologin = false + + # Attributes mappings + #attributes { + # login = "login" + # name = "name" + # groups = "groups" + # roles = "roles" # list of roles, separated with comma + # organisation = "org" + #} + + # Name of mapping class from user resource to backend user ('simple' or 'group') + #mapper = group + # Default roles for users with no groups mapped ("read", "analyze", "orgadmin") + #defaultRoles = [] + # Default organization + #defaultOrganization = "MyOrga" + + #groups { + # # URL to retreive groups (leave empty if you are using OIDC) + # #url = "https://auth-site.com/api/Groups" + # # Group mappings, you can have multiple roles for each group: they are merged + # mappings { + # admin-profile-name = ["admin"] + # editor-profile-name = ["write"] + # reader-profile-name = ["read"] + # } + #} + } } ## ANALYZERS diff --git a/conf/routes b/conf/routes index 300dd360d..8389c614d 100644 --- a/conf/routes +++ b/conf/routes @@ -7,6 +7,7 @@ GET / org.thp.cort GET /api/health org.thp.cortex.controllers.StatusCtrl.health GET /api/logout org.thp.cortex.controllers.AuthenticationCtrl.logout() POST /api/login org.thp.cortex.controllers.AuthenticationCtrl.login() +GET /api/ssoLogin org.thp.cortex.controllers.AuthenticationCtrl.ssoLogin() POST /api/ssoLogin org.thp.cortex.controllers.AuthenticationCtrl.ssoLogin() ################### @@ -106,4 +107,4 @@ POST /api/organization org.thp.cort PATCH /api/organization/:id org.thp.cortex.controllers.OrganizationCtrl.update(id) DELETE /api/organization/:id org.thp.cortex.controllers.OrganizationCtrl.delete(id) -GET /*file org.thp.cortex.controllers.AssetCtrl.get(file) \ No newline at end of file +GET /*file org.thp.cortex.controllers.AssetCtrl.get(file) diff --git a/debian.sbt b/debian.sbt index c4d194d5b..f9d804d14 100644 --- a/debian.sbt +++ b/debian.sbt @@ -1,14 +1,16 @@ -import Common.{stableVersion, betaVersion, snapshotVersion} +import Common.{betaVersion, snapshotVersion, stableVersion, versionUsage} linuxPackageMappings in Debian += packageMapping(file("LICENSE") -> "/usr/share/doc/cortex/copyright").withPerms("644") version in Debian := { version.value match { - case stableVersion(_, _) => version.value - case betaVersion(v1, v2) => v1 + "-0.1RC" + v2 - case snapshotVersion(_, _) => version.value + "-SNAPSHOT" - case _ => sys.error("Invalid version: " + version.value) + case stableVersion(_, _) => version.value + case betaVersion(v1, v2, v3) => v1 + "-0." + v3 + "RC" + v2 + case snapshotVersion(stableVersion(v1, v2)) => v1 + "-" + v2 + "-SNAPSHOT" + case snapshotVersion(betaVersion(v1, v2, v3)) => v1 + "-0." + v3 + "RC" + v2 + "-SNAPSHOT" + case _ => versionUsage(version.value) } } + debianPackageRecommends := Seq("elasticsearch") debianPackageDependencies += "java8-runtime | java8-runtime-headless" maintainerScripts in Debian := maintainerScriptsFromDirectory( diff --git a/docker.sbt b/docker.sbt index fbb12a50c..2349e11ba 100644 --- a/docker.sbt +++ b/docker.sbt @@ -1,12 +1,13 @@ -import Common.{betaVersion, snapshotVersion, stableVersion} +import Common.{betaVersion, snapshotVersion, stableVersion, versionUsage} import com.typesafe.sbt.packager.docker.{Cmd, ExecCmd} version in Docker := { version.value match { - case stableVersion(_, _) => version.value - case betaVersion(v1, v2) => v1 + "-0.1RC" + v2 - case snapshotVersion(_, _) => version.value + "-SNAPSHOT" - case _ => sys.error("Invalid version: " + version.value) + case stableVersion(_, _) => version.value + case betaVersion(v1, v2, v3) => v1 + "-0." + v3 + "RC" + v2 + case snapshotVersion(stableVersion(v1, v2)) => v1 + "-" + v2 + "-SNAPSHOT" + case snapshotVersion(betaVersion(v1, v2, v3)) => v1 + "-0." + v3 + "RC" + v2 + "-SNAPSHOT" + case _ => versionUsage(version.value) } } defaultLinuxInstallLocation in Docker := "/opt/cortex" @@ -16,44 +17,38 @@ dockerEntrypoint := Seq("/opt/cortex/entrypoint") dockerExposedPorts := Seq(9001) mappings in Docker ++= Seq( file("package/docker/entrypoint") -> "/opt/cortex/entrypoint", - file("package/logback.xml") -> "/etc/cortex/logback.xml", - file("package/empty") -> "/var/log/cortex/application.log") + file("package/logback.xml") -> "/etc/cortex/logback.xml", + file("package/empty") -> "/var/log/cortex/application.log" +) mappings in Docker ~= (_.filterNot { case (_, filepath) => filepath == "/opt/cortex/conf/application.conf" }) -dockerCommands ~= { dc => - val (dockerInitCmds, dockerTailCmds) = dc - .flatMap { - case ExecCmd("RUN", "chown", _*) => Some(ExecCmd("RUN", "chown", "-R", "daemon:root", ".")) - case Cmd("USER", _) => None - case other => Some(other) - } - .splitAt(4) - dockerInitCmds ++ - Seq( - Cmd("USER", "root"), - ExecCmd("RUN", "bash", "-c", - "wget -q -O - https://download.docker.com/linux/static/stable/x86_64/docker-18.09.0.tgz | " + - "tar -xzC /usr/local/bin/ --strip-components 1 && " + - "addgroup --system dockremap && " + - "adduser --system --ingroup dockremap dockremap && " + - "addgroup --system docker && " + - "usermod --append --groups docker daemon &&" + - "echo 'dockremap:165536:65536' >> /etc/subuid && " + - "echo 'dockremap:165536:65536' >> /etc/subgid && " + - "apt-get update && " + - "apt-get upgrade -y && " + - "apt-get install -y --no-install-recommends python-pip python2.7-dev python3-pip python3-dev ssdeep libfuzzy-dev libfuzzy2 libimage-exiftool-perl libmagic1 build-essential git libssl-dev dnsutils iptables && " + - "pip2 install -U pip setuptools && " + - "pip3 install -U pip setuptools && " + - "hash -r && " + - "cd /opt && " + - "git clone https://github.com/TheHive-Project/Cortex-Analyzers.git && " + - "for I in $(find Cortex-Analyzers -name 'requirements.txt'); do pip2 install -r $I; done && " + - "for I in $(find Cortex-Analyzers -name 'requirements.txt'); do pip3 install -r $I || true; done"), - Cmd("ADD", "var", "/var"), - Cmd("ADD", "etc", "/etc"), - ExecCmd("RUN", "chown", "-R", "daemon:root", "/var/log/cortex"), - ExecCmd("RUN", "chmod", "+x", "/opt/cortex/bin/cortex", "/opt/cortex/entrypoint")) ++ - dockerTailCmds -} +dockerCommands := Seq( + Cmd("FROM", "openjdk:8"), + Cmd("LABEL", "MAINTAINER=\"TheHive Project \"", "repository=\"https://github.com/TheHive-Project/TheHive\""), + Cmd("WORKDIR", "/opt/cortex"), + // format: off + Cmd("RUN", + "apt", "update", "&&", + "apt", "upgrade", "-y", "&&", + "apt", "autoclean", "-y", "-q", "&&", + "apt", "autoremove", "-y", "-q", "&&", + "rm", "-rf", "/var/lib/apt/lists/*", "&&", + "(", "type", "groupadd", "1>/dev/null", "2>&1", "&&", + "groupadd", "-g", "1000", "cortex", "||", + "addgroup", "-g", "1000", "-S", "cortex", + ")", "&&", + "(", "type", "useradd", "1>/dev/null", "2>&1", "&&", + "useradd", "--system", "--uid", "1000", "--gid", "1000", "cortex", "||", + "adduser", "-S", "-u", "1000", "-G", "cortex", "cortex", + ")"), + //format: on + Cmd("ADD", "--chown=root:root", "opt", "/opt"), + Cmd("ADD", "--chown=cortex:cortex", "var", "/var"), + Cmd("ADD", "--chown=cortex:cortex", "etc", "/etc"), + ExecCmd("RUN", "chmod", "+x", "/opt/cortex/bin/cortex", "/opt/cortex/entrypoint"), + Cmd("EXPOSE", "9001"), + Cmd("USER", "thehive"), + ExecCmd("ENTRYPOINT", "/opt/cortex/entrypoint"), + ExecCmd("CMD") +) diff --git a/docker/cortex/docker-compose.yml b/docker/cortex/docker-compose.yml index d1f79fa40..f9a628502 100644 --- a/docker/cortex/docker-compose.yml +++ b/docker/cortex/docker-compose.yml @@ -1,14 +1,16 @@ version: "2" services: elasticsearch: - image: elasticsearch:6.8.0 + image: elasticsearch:7.8.1 environment: - http.host=0.0.0.0 - - thread_pool.index.queue_size=100000 + - discovery.type=single-node + - script.allowed_types=inline - thread_pool.search.queue_size=100000 - - thread_pool.bulk.queue_size=100000 + - thread_pool.write.queue_size=10000 +path.repo: backup cortex: - image: thehiveproject/cortex:3.0.0-RC4 + image: thehiveproject/cortex:latest depends_on: - elasticsearch ports: diff --git a/package.sbt b/package.sbt index c93261385..5ca968275 100644 --- a/package.sbt +++ b/package.sbt @@ -1,50 +1,51 @@ // Add information in manifest import Package.ManifestAttributes import java.util.jar.Attributes.Name._ -packageOptions ++= Seq( - ManifestAttributes(IMPLEMENTATION_TITLE -> name.value), +packageOptions ++= Seq( + ManifestAttributes(IMPLEMENTATION_TITLE -> name.value), ManifestAttributes(IMPLEMENTATION_VERSION -> version.value), - ManifestAttributes(SPECIFICATION_VENDOR -> "TheHive Project"), - ManifestAttributes(SPECIFICATION_TITLE -> name.value), - ManifestAttributes(SPECIFICATION_VERSION -> "TheHive Project") + ManifestAttributes(SPECIFICATION_VENDOR -> "TheHive Project"), + ManifestAttributes(SPECIFICATION_TITLE -> name.value), + ManifestAttributes(SPECIFICATION_VERSION -> "TheHive Project") ) // Install files // mappings in Universal ~= { _.flatMap { - case (_, "conf/application.conf") => Nil + case (_, "conf/application.conf") => Nil case (file, "conf/apllication.sample") => Seq(file -> "conf/application.conf") - case (_, "conf/logback.xml") => Nil - case other => Seq(other) + case (_, "conf/logback.xml") => Nil + case other => Seq(other) } ++ Seq( file("package/cortex.service") -> "package/cortex.service", - file("package/cortex.conf") -> "package/cortex.conf", - file("package/cortex") -> "package/cortex", - file("package/logback.xml") -> "conf/logback.xml" + file("package/cortex.conf") -> "package/cortex.conf", + file("package/cortex") -> "package/cortex", + file("package/logback.xml") -> "conf/logback.xml" ) } maintainer := "TheHive Project " packageSummary := "Powerful Observable Analysis Engine" packageDescription := """Cortex tries to solve a common problem frequently encountered by SOCs, CSIRTs and security - | researchers in the course of threat intelligence, digital forensics and incident response: how to analyze - | observables they have collected, at scale, by querying a single tool instead of several? - | Cortex, an open source and free software, has been created by TheHive Project for this very purpose. Observables, - | such as IP and email addresses, URLs, domain names, files or hashes, can be analyzed one by one or in bulk mode - | using a Web interface. Analysts can also automate these operations thanks to the Cortex REST API. """.stripMargin + | researchers in the course of threat intelligence, digital forensics and incident response: how to analyze + | observables they have collected, at scale, by querying a single tool instead of several? + | Cortex, an open source and free software, has been created by TheHive Project for this very purpose. Observables, + | such as IP and email addresses, URLs, domain names, files or hashes, can be analyzed one by one or in bulk mode + | using a Web interface. Analysts can also automate these operations thanks to the Cortex REST API. """.stripMargin defaultLinuxInstallLocation := "/opt" -linuxPackageMappings ~= { _.map { pm => - val mappings = pm.mappings.filterNot { - case (_, path) => path.startsWith("/opt/cortex/package") || (path.startsWith("/opt/cortex/conf") && path != "/opt/cortex/conf/reference.conf") - } - com.typesafe.sbt.packager.linux.LinuxPackageMapping(mappings, pm.fileData).withConfig() -} :+ packageMapping( - file("package/cortex.service") -> "/etc/systemd/system/cortex.service", - file("package/cortex.conf") -> "/etc/init/cortex.conf", - file("package/cortex") -> "/etc/init.d/cortex", - file("conf/application.sample") -> "/etc/cortex/application.conf", - file("package/logback.xml") -> "/etc/cortex/logback.xml" -).withConfig() +linuxPackageMappings ~= { + _.map { pm => + val mappings = pm.mappings.filterNot { + case (_, path) => path.startsWith("/opt/cortex/package") || (path.startsWith("/opt/cortex/conf") && path != "/opt/cortex/conf/reference.conf") + } + com.typesafe.sbt.packager.linux.LinuxPackageMapping(mappings, pm.fileData).withConfig() + } :+ packageMapping( + file("package/cortex.service") -> "/etc/systemd/system/cortex.service", + file("package/cortex.conf") -> "/etc/init/cortex.conf", + file("package/cortex") -> "/etc/init.d/cortex", + file("conf/application.sample") -> "/etc/cortex/application.conf", + file("package/logback.xml") -> "/etc/cortex/logback.xml" + ).withConfig() } packageBin := { diff --git a/project/Bintray.scala b/project/Bintray.scala deleted file mode 100644 index b2a22b324..000000000 --- a/project/Bintray.scala +++ /dev/null @@ -1,167 +0,0 @@ -import scala.concurrent.Await -import scala.concurrent.ExecutionContext.Implicits.global -import scala.concurrent.duration.Duration - -import bintray.BintrayCredentials -import bintray.BintrayKeys.{ bintrayEnsureCredentials, bintrayOrganization, bintrayPackage } -import bintry.Client -import com.typesafe.sbt.packager.Keys._ -import com.typesafe.sbt.packager.debian.DebianPlugin.autoImport.Debian -import com.typesafe.sbt.packager.rpm.RpmPlugin.autoImport.Rpm -import com.typesafe.sbt.packager.universal.UniversalPlugin.autoImport.Universal -import dispatch.{ FunctionHandler, Http } -import sbt.Keys._ -import sbt._ - -object Bintray extends AutoPlugin { - - object autoImport { - val publishRelease: TaskKey[Unit] = taskKey[Unit]("Publish binary in bintray") - val publishLatest: TaskKey[Unit] = taskKey[Unit]("Publish latest binary in bintray") - val publishDebian: TaskKey[Unit] = taskKey[Unit]("publish debian package in Bintray") - val publishRpm: TaskKey[Unit] = taskKey[Unit]("publish rpm package in Bintray") - val rpmReleaseFile = taskKey[File]("The rpm release package file") - } - - import autoImport._ - - override lazy val projectSettings = Seq( - - publishRelease in ThisBuild := { - val file = (packageBin in Universal).value - btPublish(file.getName, - file, - bintrayEnsureCredentials.value, - bintrayOrganization.value, - "binary", - bintrayPackage.value, - (version in ThisBuild).value, - sLog.value) - }, - - publishLatest in ThisBuild := Def.taskDyn { - if ((version in ThisBuild).value.endsWith("-SNAPSHOT")) sys.error("Snapshot version can't be released") - val file = (packageBin in Universal).value - val latestVersion = if (version.value.contains('-')) "latest-beta" else "latest" - val latestName = file.getName.replace(version.value, latestVersion) - if (latestName == file.getName) - Def.task { - sLog.value.warn(s"Latest package name can't be built using package name [$latestName], publish aborted") - } - else Def.task { - removeVersion(bintrayEnsureCredentials.value, - bintrayOrganization.value, - "binary", - bintrayPackage.value, - latestVersion, - sLog.value) - btPublish(latestName, - file, - bintrayEnsureCredentials.value, - bintrayOrganization.value, - "binary", - bintrayPackage.value, - latestVersion, - sLog.value) - } - } - .value, - - publishDebian in ThisBuild := { - if ((version in ThisBuild).value.endsWith("-SNAPSHOT")) sys.error("Snapshot version can't be released") - val file = (debianSign in Debian).value - val bintrayCredentials = bintrayEnsureCredentials.value - btPublish(file.getName, - file, - bintrayCredentials, - bintrayOrganization.value, - "debian-beta", - bintrayPackage.value, - version.value, - sLog.value, - "deb_distribution" → "any", - "deb_component" → "main", - "deb_architecture" → "all" - ) - if (!version.value.contains('-')) - btPublish(file.getName, - file, - bintrayCredentials, - bintrayOrganization.value, - "debian-stable", - bintrayPackage.value, - version.value, - sLog.value, - "deb_distribution" → "any", - "deb_component" → "main", - "deb_architecture" → "all" - ) - }, - - publishRpm in ThisBuild := { - if ((version in ThisBuild).value.endsWith("-SNAPSHOT")) sys.error("Snapshot version can't be released") - val file = (packageBin in Rpm).value - val bintrayCredentials = bintrayEnsureCredentials.value - btPublish(file.getName, - file, - bintrayCredentials, - bintrayOrganization.value, - "rpm-beta", - bintrayPackage.value, - (version in Rpm).value + '-' + (rpmRelease in Rpm).value, - sLog.value) - if (!version.value.contains('-')) - btPublish(file.getName, - file, - bintrayCredentials, - bintrayOrganization.value, - "rpm-stable", - bintrayPackage.value, - (version in Rpm).value + '-' + (rpmRelease in Rpm).value, - sLog.value) - } - ) - - private def asStatusAndBody = new FunctionHandler({ r => (r.getStatusCode, r.getResponseBody) }) - - def removeVersion(credential: BintrayCredentials, - org: Option[String], - repoName: String, - packageName: String, - version: String, - log: Logger): Unit = { - val BintrayCredentials(user, key) = credential - val client: Client = Client(user, key, new Http()) - val repo: Client#Repo = client.repo(org.getOrElse(user), repoName) - Await.result(repo.get(packageName).version(version).delete(asStatusAndBody), Duration.Inf) match { - case (status, body) => log.info(s"Delete version $packageName $version: $status ($body)") - } - } - - private def btPublish(filename: String, - file: File, - credential: BintrayCredentials, - org: Option[String], - repoName: String, - packageName: String, - version: String, - log: Logger, - additionalParams: (String, String)*): Unit = { - val BintrayCredentials(user, key) = credential - val owner: String = org.getOrElse(user) - val client: Client = Client(user, key, new Http()) - val repo: Client#Repo = client.repo(org.getOrElse(user), repoName) - - - val params = additionalParams - .map { case (k, v) => s"$k=$v" } - .mkString(";", ";", "") - val upload = repo.get(packageName).version(version).upload(filename + params, file) - - log.info(s"Uploading $file ... (${org.getOrElse(user)}/$repoName/$packageName/$version/$filename$params)") - Await.result(upload(asStatusAndBody), Duration.Inf) match { - case (201, _) => log.info(s"$file was uploaded to $owner/$packageName@$version") - case (_, fail) => sys.error(s"failed to upload $file to $owner/$packageName@$version: $fail") - } - } -} \ No newline at end of file diff --git a/project/Common.scala b/project/Common.scala index a382ef5ee..a0c28485d 100644 --- a/project/Common.scala +++ b/project/Common.scala @@ -8,44 +8,51 @@ object Common { val projectSettings = Seq( organizationName := "TheHive-Project", organization := "org.thehive-project", - licenses += "AGPL-V3" → url("https://www.gnu.org/licenses/agpl-3.0.html"), + licenses += "AGPL-V3" -> url("https://www.gnu.org/licenses/agpl-3.0.html"), organizationHomepage := Some(url("http://thehive-project.org/")), resolvers += Resolver.bintrayRepo("thehive-project", "maven"), resolvers += "elasticsearch-releases" at "https://artifacts.elastic.co/maven", scalaVersion := Dependencies.scalaVersion, scalacOptions ++= Seq( "-deprecation", // Emit warning and location for usages of deprecated APIs. - "-feature", // Emit warning and location for usages of features that should be imported explicitly. - "-unchecked", // Enable additional warnings where generated code depends on assumptions. + "-feature", // Emit warning and location for usages of features that should be imported explicitly. + "-unchecked", // Enable additional warnings where generated code depends on assumptions. //"-Xfatal-warnings", // Fail the compilation if there are any warnings. - "-Xlint", // Enable recommended additional warnings. - "-Ywarn-adapted-args", // Warn if an argument list is modified to match the receiver. - "-Ywarn-dead-code", // Warn when dead code is identified. - "-Ywarn-inaccessible", // Warn about inaccessible types in method signatures. + "-Xlint", // Enable recommended additional warnings. + "-Ywarn-adapted-args", // Warn if an argument list is modified to match the receiver. + "-Ywarn-dead-code", // Warn when dead code is identified. + "-Ywarn-inaccessible", // Warn about inaccessible types in method signatures. "-Ywarn-nullary-override", // Warn when non-nullary overrides nullary, e.g. def foo() over def foo. - "-Ywarn-numeric-widen" // Warn when numerics are widened. + "-Ywarn-numeric-widen" // Warn when numerics are widened. ), - scalacOptions in Test ~= { options ⇒ + scalacOptions in Test ~= { options => options filterNot (_ == "-Ywarn-dead-code") // Allow dead code in tests (to support using mockito). }, parallelExecution in Test := false, fork in Test := true, javaOptions += "-Xmx1G", - // Redirect logs from ElasticSearch (which uses log4j2) to slf4j libraryDependencies += "org.apache.logging.log4j" % "log4j-to-slf4j" % "2.9.1", excludeDependencies += "org.apache.logging.log4j" % "log4j-core" ) val stableVersion: Regex = "(\\d+\\.\\d+\\.\\d+)-(\\d+)".r - val betaVersion: Regex = "(\\d+\\.\\d+\\.\\d+)-[Rr][Cc](\\d+)".r + val betaVersion: Regex = "(\\d+\\.\\d+\\.\\d+)-[Rr][Cc](\\d+)-(\\d+)".r + object snapshotVersion { - def unapplySeq(version: String): Option[List[String]] = { - if (version.endsWith("-SNAPSHOT")) { - val v = version.dropRight(9) - stableVersion.unapplySeq(v) orElse betaVersion.unapplySeq(v) - } + + def unapply(version: String): Option[String] = + if (version.endsWith("-SNAPSHOT")) Some(version.dropRight(9)) else None - } } -} \ No newline at end of file + + def versionUsage(version: String): Nothing = + sys.error( + s"Invalid version: $version\n" + + "The accepted formats for version are:\n" + + " - 1.2.3-4\n" + + " - 1.2.3-RC4-5\n" + + " - 1.2.3-4-SNAPSHOT\n" + + " - 1.2.3-RC4-5-SNAPSHOT" + ) +} diff --git a/project/Dependencies.scala b/project/Dependencies.scala index 92a9f4ad3..5f4a361e3 100644 --- a/project/Dependencies.scala +++ b/project/Dependencies.scala @@ -5,21 +5,20 @@ object Dependencies { object Play { val version = play.core.PlayVersion.current - val ws = "com.typesafe.play" %% "play-ws" % version - val ahc = "com.typesafe.play" %% "play-ahc-ws" % version - val cache = "com.typesafe.play" %% "play-ehcache" % version - val test = "com.typesafe.play" %% "play-test" % version - val specs2 = "com.typesafe.play" %% "play-specs2" % version + val ws = "com.typesafe.play" %% "play-ws" % version + val ahc = "com.typesafe.play" %% "play-ahc-ws" % version + val cache = "com.typesafe.play" %% "play-ehcache" % version + val test = "com.typesafe.play" %% "play-test" % version + val specs2 = "com.typesafe.play" %% "play-specs2" % version val filters = "com.typesafe.play" %% "filters-helpers" % version - val guice = "com.typesafe.play" %% "play-guice" % version + val guice = "com.typesafe.play" %% "play-guice" % version } val scalaGuice = "net.codingwell" %% "scala-guice" % "4.1.0" - val reflections = "org.reflections" % "reflections" % "0.9.11" - val zip4j = "net.lingala.zip4j" % "zip4j" % "1.3.2" - val elastic4play = "org.thehive-project" %% "elastic4play" % "1.11.5" - val dockerClient = "com.spotify" % "docker-client" % "8.14.4" - val akkaCluster = "com.typesafe.akka" %% "akka-cluster" % "2.5.21" + val reflections = "org.reflections" % "reflections" % "0.9.11" + val zip4j = "net.lingala.zip4j" % "zip4j" % "1.3.2" + val elastic4play = "org.thehive-project" %% "elastic4play" % "1.12.1" + val dockerClient = "com.spotify" % "docker-client" % "8.14.4" + val akkaCluster = "com.typesafe.akka" %% "akka-cluster" % play.core.PlayVersion.akkaVersion } - diff --git a/project/FrontEnd.scala b/project/FrontEnd.scala index c7a6f12cc..de418c40f 100644 --- a/project/FrontEnd.scala +++ b/project/FrontEnd.scala @@ -13,8 +13,8 @@ object FrontEnd extends AutoPlugin { override def trigger = allRequirements - override def projectSettings = Seq[Setting[_]]( - frontendFiles := { + override def projectSettings = + Seq[Setting[_]](frontendFiles := { val s = streams.value s.log.info("Building front-end ...") s.log.info("npm install") @@ -24,4 +24,4 @@ object FrontEnd extends AutoPlugin { val dir = baseDirectory.value / "www" / "dist" dir.**(AllPassFilter) pair rebase(dir, "www") }) -} \ No newline at end of file +} diff --git a/project/build.properties b/project/build.properties index 72f902892..a919a9b5f 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=1.2.7 +sbt.version=1.3.8 diff --git a/project/plugins.sbt b/project/plugins.sbt index 838f26ac7..531c594a0 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -2,6 +2,6 @@ logLevel := Level.Info // The Play plugin -addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.6.23") -addSbtPlugin("org.foundweekends" % "sbt-bintray" % "0.5.1") -addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.0.0") \ No newline at end of file +addSbtPlugin("com.typesafe.play" % "sbt-plugin" % "2.8.2") +addSbtPlugin("org.scalameta" % "sbt-scalafmt" % "2.4.2") +addSbtPlugin("org.thehive-project" % "sbt-github-changelog" % "0.3.0") diff --git a/rpm.sbt b/rpm.sbt index 0f5ba9ba9..754782ab8 100644 --- a/rpm.sbt +++ b/rpm.sbt @@ -1,21 +1,24 @@ -import Common.{stableVersion, betaVersion, snapshotVersion} +import Common.{betaVersion, snapshotVersion, stableVersion, versionUsage} version in Rpm := { version.value match { - case stableVersion(v1, v2) => v1 - case betaVersion(v1, v2) => v1 - case snapshotVersion(v1, v2) => v1 - case _ => sys.error("Invalid version: " + version.value) + case stableVersion(v1, _) => v1 + case betaVersion(v1, _, _) => v1 + case snapshotVersion(stableVersion(v1, _)) => v1 + case snapshotVersion(betaVersion(v1, _, _)) => v1 + case _ => versionUsage(version.value) } } rpmRelease := { version.value match { - case stableVersion(_, v2) => v2 - case betaVersion(v1, v2) => "0.1RC" + v2 - case snapshotVersion(v1, v2) => v2 + "-SNAPSHOT" - case _ => sys.error("Invalid version: " + version.value) + case stableVersion(_, v2) => v2 + case betaVersion(_, v2, v3) => "0." + v3 + "RC" + v2 + case snapshotVersion(stableVersion(_, v2)) => v2 + "-SNAPSHOT" + case snapshotVersion(betaVersion(_, v2, v3)) => "0." + v3 + "RC" + v2 + "-SNAPSHOT" + case _ => versionUsage(version.value) } } + rpmVendor := organizationName.value rpmUrl := organizationHomepage.value.map(_.toString) rpmLicense := Some("AGPL") @@ -35,12 +38,14 @@ linuxPackageMappings in Rpm := configWithNoReplace((linuxPackageMappings in Rpm) packageBin in Rpm := { import scala.sys.process._ val rpmFile = (packageBin in Rpm).value - Process("rpm" :: - "--define" :: "_gpg_name TheHive Project" :: - "--define" :: "_signature gpg" :: - "--define" :: "__gpg_check_password_cmd /bin/true" :: - "--define" :: "__gpg_sign_cmd %{__gpg} gpg --batch --no-verbose --no-armor --use-agent --no-secmem-warning -u \"%{_gpg_name}\" -sbo %{__signature_filename} %{__plaintext_filename}" :: - "--addsign" :: rpmFile.toString :: - Nil).!! + Process( + "rpm" :: + "--define" :: "_gpg_name TheHive Project" :: + "--define" :: "_signature gpg" :: + "--define" :: "__gpg_check_password_cmd /bin/true" :: + "--define" :: "__gpg_sign_cmd %{__gpg} gpg --batch --no-verbose --no-armor --use-agent --no-secmem-warning -u \"%{_gpg_name}\" -sbo %{__signature_filename} %{__plaintext_filename}" :: + "--addsign" :: rpmFile.toString :: + Nil + ).!! rpmFile } diff --git a/version.sbt b/version.sbt index 4cdd74dab..869c6fb5b 100644 --- a/version.sbt +++ b/version.sbt @@ -1 +1 @@ -version in ThisBuild := "3.0.1-1" +version in ThisBuild := "3.1.0-RC1-1" diff --git a/www/package.json b/www/package.json index 117b1ebbe..9a3753b2c 100755 --- a/www/package.json +++ b/www/package.json @@ -1,6 +1,6 @@ { "name": "cortex", - "version": "3.0.1", + "version": "3.1.0-RC1", "description": "A powerfull observable analysis engine", "license": "AGPL-3.0-or-later", "homepage": "https://github.com/TheHive-Project/Cortex", @@ -75,4 +75,4 @@ "webpack": "^3.5.0", "webpack-dev-server": "^2.2.0" } -} \ No newline at end of file +} diff --git a/www/src/app/pages/login/login.page.html b/www/src/app/pages/login/login.page.html index 90a3af416..403d860c8 100644 --- a/www/src/app/pages/login/login.page.html +++ b/www/src/app/pages/login/login.page.html @@ -2,7 +2,7 @@
-