diff --git a/.github/workflows/build-docs.yml b/.github/workflows/build-docs.yml index 02c97851e..41c56b0cf 100644 --- a/.github/workflows/build-docs.yml +++ b/.github/workflows/build-docs.yml @@ -40,7 +40,7 @@ jobs: ./scripts/generate_datafed.sh - name: Install documentation build dependencies run: | - sudo ./scripts/install_docs_dependencies.sh + sudo ./external/DataFedDependencies/scripts/install_docs_dependencies.sh - name: Build documentation run: | cmake -S. -B build -DBUILD_AUTHZ=OFF -DBUILD_CORE_SERVER=OFF -DBUILD_COMMON=OFF -DBUILD_DOCS=ON -DBUILD_FOXX=OFF -DBUILD_REPO_SERVER=OFF -DBUILD_PYTHON_CLIENT=ON -DBUILD_TESTS=OFF -DBUILD_WEB_SERVER=OFF -DENABLE_UNIT_TESTS=OFF diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index fdf076171..b59915ac1 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -10,9 +10,12 @@ jobs: - name: Update debian run: apt update - name: Install dependencies + with: + submodules: recursive + fetch-depth: 0 run: | ./scripts/generate_datafed.sh - ./scripts/install_core_dependencies.sh + ./external/DataFedDependencies/scripts/install_core_dependencies.sh - name: Build run: | /opt/datafed/dependencies/bin/cmake -S. -B build -DCMAKE_BUILD_TYPE=Debug -DBUILD_WEB_SERVER=OFF diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index 29e1410b6..d3c5504aa 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -58,3 +58,4 @@ variables: REGISTRY: "camden.ornl.gov" DATAFED_DEPENDENCIES_INSTALL_PATH: "/shared/install" DOCKER_TLS_CERTDIR: "" # Required for running docker in docker + GIT_SUBMODULE_STRATEGY: recursive diff --git a/.gitlab/build/build_gcs_base_image.yml b/.gitlab/build/build_gcs_base_image.yml index 5661df305..95946a418 100644 --- a/.gitlab/build/build_gcs_base_image.yml +++ b/.gitlab/build/build_gcs_base_image.yml @@ -10,7 +10,6 @@ build-gcs-base: variables: PROJECT: "datafed" COMPONENT: "gcs-base" - GIT_SUBMODULE_STRATEGY: recursive GIT_STRATEGY: clone DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count BUILD_INTERMEDIATE: "FALSE" diff --git a/.gitlab/build/force_build_gcs_base_image.yml b/.gitlab/build/force_build_gcs_base_image.yml index 06448779a..f3931f80b 100644 --- a/.gitlab/build/force_build_gcs_base_image.yml +++ b/.gitlab/build/force_build_gcs_base_image.yml @@ -7,7 +7,6 @@ build-gcs-base: variables: PROJECT: "datafed" COMPONENT: "gcs-base" - GIT_SUBMODULE_STRATEGY: recursive GIT_STRATEGY: clone DATAFED_HARBOR_REGISTRY: "$REGISTRY" # needed by c_harbor_artifact_count GCS_BASE_IMAGE_DISTRO: "debian-12" diff --git a/.gitlab/stage_provision_client.yml b/.gitlab/stage_provision_client.yml index 3e2404429..21f3c6549 100644 --- a/.gitlab/stage_provision_client.yml +++ b/.gitlab/stage_provision_client.yml @@ -7,13 +7,15 @@ provision-client: variables: GIT_STRATEGY: clone DATAFED_PYTHON_DEPENDENCIES_DIR: "${CI_PROJECT_DIR}/dependencies/python" + DATAFED_DEPENDENCIES_INSTALL_PATH: "/opt/datafed/dependencies/" stage: provision-client tags: - ci-datafed-client before_script: - - export PATH=/opt/datafed/dependencies/bin:$PATH - - rm -rf $DATAFED_PYTHON_DEPENDENCIES_DIR + - sudo chown -R gitlab-runner:gitlab-runner "$DATAFED_DEPENDENCIES_INSTALL_PATH" + - rm -rf "$DATAFED_PYTHON_DEPENDENCIES_DIR" script: - - ./scripts/generate_datafed.sh - - ./scripts/install_client_dependencies.sh - - ./scripts/install_end_to_end_test_dependencies.sh + - export PATH="/opt/datafed/dependencies/bin:$DATAFED_PYTHON_DEPENDENCIES_DIR/bin:$PATH" + - ./external/DataFedDependencies/scripts/generate_dependencies_config.sh + - ./external/DataFedDependencies/scripts/install_client_dependencies.sh + - ./external/DataFedDependencies/scripts/install_end_to_end_test_dependencies.sh diff --git a/.gitmodules b/.gitmodules index bd31a9653..2f55771cf 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,9 +1,6 @@ [submodule "external/globus-connect-server-deploy"] path = external/globus-connect-server-deploy url = https://github.com/globus/globus-connect-server-deploy.git -[submodule "external/protobuf"] - path = external/protobuf - url = https://github.com/protocolbuffers/protobuf.git [submodule "external/DataFedDependencies"] path = external/DataFedDependencies url = https://github.com/ORNL/DataFedDependencies.git diff --git a/CMakeLists.txt b/CMakeLists.txt index 72c004182..1654f1eb6 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -52,7 +52,14 @@ if(NOT EXISTS ${DATAFED_CONFIG_SH}) "${PROJECT_SOURCE_DIR}/scripts/generate_datafed.sh") endif() -file(READ "${PROJECT_SOURCE_DIR}/scripts/dependency_versions.sh" DEPENDENCY_VERSIONS) +set(DATAFED_DEPENDENCIES_SH "${DataFed_SOURCE_DIR}/external/DataFedDependencies/config/dependencies.sh") +if(NOT EXISTS ${DATAFED_DEPENDENCIES_SH}) + message(FATAL_ERROR "Error: File '${DATAFED_DEPENDENCIES_SH}' does not exist. " + "Please run generate_dependencies.sh first to populate defaults." + "${PROJECT_SOURCE_DIR}/external/DataFedDependencies/scripts/generate_datafed.sh") +endif() + +file(READ "${PROJECT_SOURCE_DIR}/external/DataFedDependencies/scripts/dependency_versions.sh" DEPENDENCY_VERSIONS) get_version_from_script(${DEPENDENCY_VERSIONS} "DATAFED_DYNAMIC_LIBRARY_PROTOBUF_VERSION" PROTOBUF_LIBRARY_VERSION) get_version_from_script(${DEPENDENCY_VERSIONS} "DATAFED_PROTOBUF_VERSION" PROTOBUF_COMPILER_VERSION) @@ -76,7 +83,7 @@ if(NOT DEFINED DATAFED_DOMAIN) endif() if(NOT DEFINED DATAFED_DEPENDENCIES_INSTALL_PATH) - get_value_from_datafed_sh("DATAFED_DEPENDENCIES_INSTALL_PATH" DEPENDENCY_INSTALL_PATH) + get_value_from_dependencies_sh("DATAFED_DEPENDENCIES_INSTALL_PATH" DEPENDENCY_INSTALL_PATH) endif() set(CMAKE_PREFIX_PATH "${DEPENDENCY_INSTALL_PATH}") @@ -187,6 +194,7 @@ endif() if( BUILD_PYTHON_CLIENT ) # make target = pydatafed + file(COPY ${PROJECT_SOURCE_DIR}/external/DataFedDependencies/python/datafed_pkg/requirements.txt DESTINATION ${PROJECT_SOURCE_DIR}/python/datafed_pkg/requirements.txt) add_subdirectory( python EXCLUDE_FROM_ALL ) endif() diff --git a/cmake/Utils.cmake b/cmake/Utils.cmake index 9ae7fabab..e4f8e6819 100644 --- a/cmake/Utils.cmake +++ b/cmake/Utils.cmake @@ -45,3 +45,24 @@ function(get_value_from_datafed_sh INPUT_KEY OUTPUT_VALUE) set(${OUTPUT_VALUE} "${OUTPUT_VAR}" PARENT_SCOPE) endfunction() +# Function will get exported value from a shell script +# +# i.e. if dependencies.sh has +# +# dependencies.sh +# export MY_NAME="Barry" +# +# set(DATAFED_CONFIG_SH "external/DataFedDependencies/config/dependencies.sh") +# get_value_from_dependencies_sh "MY_NAME" name) +# message("$name") +# +# Will output "Barry" +function(get_value_from_dependencies_sh INPUT_KEY OUTPUT_VALUE) + execute_process( + COMMAND bash "-c" "source ${DATAFED_CONFIG_SH} && echo \$${INPUT_KEY}" + OUTPUT_VARIABLE OUTPUT_VAR + OUTPUT_STRIP_TRAILING_WHITESPACE + ) + set(${OUTPUT_VALUE} "${OUTPUT_VAR}" PARENT_SCOPE) +endfunction() + diff --git a/core/database/tests/test_fixture_setup.sh b/core/database/tests/test_fixture_setup.sh index b88b2c521..1f54992f2 100755 --- a/core/database/tests/test_fixture_setup.sh +++ b/core/database/tests/test_fixture_setup.sh @@ -4,10 +4,10 @@ set -uef -o pipefail SCRIPT=$(realpath "$BASH_SOURCE[0]") SOURCE=$(dirname "$SCRIPT") -PROJECT_ROOT=$(realpath "${SOURCE}/../../../") -source "${PROJECT_ROOT}/config/datafed.sh" -source "${PROJECT_ROOT}/scripts/dependency_versions.sh" -source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh" +DATAFED_PROJECT_ROOT=$(realpath "${SOURCE}/../../../") +source "${DATAFED_PROJECT_ROOT}/config/datafed.sh" +source "${DATAFED_PROJECT_ROOT}/external/DataFedDependencies/scripts/dependency_versions.sh" +source "${DATAFED_PROJECT_ROOT}/external/DataFedDependencies/scripts/dependency_install_functions.sh" Help() { echo "$(basename $0) Will initialize fixtures for Foxx tests" @@ -42,7 +42,7 @@ else fi if [ -z "${FOXX_MAJOR_API_VERSION:-}" ]; then - local_FOXX_MAJOR_API_VERSION=$(cat ${PROJECT_ROOT}/cmake/Version.cmake | grep -o -P "(?<=FOXX_API_MAJOR).*(?=\))" | xargs) + local_FOXX_MAJOR_API_VERSION=$(cat ${DATAFED_PROJECT_ROOT}/cmake/Version.cmake | grep -o -P "(?<=FOXX_API_MAJOR).*(?=\))" | xargs) else local_FOXX_MAJOR_API_VERSION=$(printenv FOXX_MAJOR_API_VERSION) fi diff --git a/core/database/tests/test_foxx.sh b/core/database/tests/test_foxx.sh index 5265d6100..9d91b4318 100755 --- a/core/database/tests/test_foxx.sh +++ b/core/database/tests/test_foxx.sh @@ -12,10 +12,10 @@ set -euf -o pipefail SCRIPT=$(realpath "$BASH_SOURCE[0]") SOURCE=$(dirname "$SCRIPT") -PROJECT_ROOT=$(realpath ${SOURCE}/../../../) -source ${PROJECT_ROOT}/config/datafed.sh -source "${PROJECT_ROOT}/scripts/dependency_versions.sh" -source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh" +DATAFED_PROJECT_ROOT=$(realpath ${SOURCE}/../../../) +source "${DATAFED_PROJECT_ROOT}/config/datafed.sh" +source "${DATAFED_PROJECT_ROOT}/external/DataFedDependencies/scripts/dependency_versions.sh" +source "${DATAFED_PROJECT_ROOT}/external/DataFedDependencies/scripts/dependency_install_functions.sh" Help() { echo "$(basename $0) Will run a Foxx unit test" @@ -46,7 +46,7 @@ else fi if [ -z "${FOXX_MAJOR_API_VERSION:-}" ]; then - local_FOXX_MAJOR_API_VERSION=$(cat ${PROJECT_ROOT}/cmake/Version.cmake | grep -o -P "(?<=FOXX_API_MAJOR).*(?=\))" | xargs) + local_FOXX_MAJOR_API_VERSION=$(cat ${DATAFED_PROJECT_ROOT}/cmake/Version.cmake | grep -o -P "(?<=FOXX_API_MAJOR).*(?=\))" | xargs) else local_FOXX_MAJOR_API_VERSION=$(printenv FOXX_MAJOR_API_VERSION) fi diff --git a/core/database/tests/test_setup.sh b/core/database/tests/test_setup.sh index 33b542bf6..93195eb04 100755 --- a/core/database/tests/test_setup.sh +++ b/core/database/tests/test_setup.sh @@ -13,10 +13,10 @@ set -uef -o pipefail SCRIPT=$(realpath "$BASH_SOURCE[0]") SOURCE=$(dirname "$SCRIPT") -PROJECT_ROOT=$(realpath "${SOURCE}/../../../") -source "${PROJECT_ROOT}/config/datafed.sh" -source "${PROJECT_ROOT}/scripts/dependency_versions.sh" -source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh" +DATAFED_PROJECT_ROOT=$(realpath "${SOURCE}/../../../") +source "${DATAFED_PROJECT_ROOT}/config/datafed.sh" +source "${DATAFED_PROJECT_ROOT}/external/DataFedDependencies/scripts/dependency_versions.sh" +source "${DATAFED_PROJECT_ROOT}/external/DataFedDependencies/scripts/dependency_install_functions.sh" Help() { echo "$(basename $0) Will set up a configuration file for the core server" @@ -52,7 +52,7 @@ else fi if [ -z "${FOXX_MAJOR_API_VERSION:-}" ]; then - local_FOXX_MAJOR_API_VERSION=$(cat ${PROJECT_ROOT}/cmake/Version.cmake | grep -o -P "(?<=FOXX_API_MAJOR).*(?=\))" | xargs) + local_FOXX_MAJOR_API_VERSION=$(cat ${DATAFED_PROJECT_ROOT}/cmake/Version.cmake | grep -o -P "(?<=FOXX_API_MAJOR).*(?=\))" | xargs) else local_FOXX_MAJOR_API_VERSION=$(printenv FOXX_MAJOR_API_VERSION) fi @@ -115,7 +115,7 @@ if [[ "$output" =~ .*"sdms".* ]]; then echo "SDMS already exists do nothing" else echo "Creating SDMS" - arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:8529" --server.password "${local_DATAFED_DATABASE_PASSWORD}" --server.username "${local_DATABASE_USER}" --javascript.execute "${PROJECT_ROOT}/core/database/foxx/db_create.js" + arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:8529" --server.password "${local_DATAFED_DATABASE_PASSWORD}" --server.username "${local_DATABASE_USER}" --javascript.execute "${DATAFED_PROJECT_ROOT}/core/database/foxx/db_create.js" # Give time for the database to be created sleep 2 arangosh --server.endpoint "tcp://${local_DATAFED_DATABASE_HOST}:8529" --server.password "${local_DATAFED_DATABASE_PASSWORD}" --server.username "${local_DATABASE_USER}" --javascript.execute-string 'db._useDatabase("sdms"); db.config.insert({"_key": "msg_daily", "msg" : "DataFed servers will be off-line for regular maintenance every Sunday night from 11:45 pm until 12:15 am EST Monday morning."}, {overwrite: true});' diff --git a/core/database/tests/test_teardown.sh b/core/database/tests/test_teardown.sh index b7a1652a6..9ff83e0cc 100755 --- a/core/database/tests/test_teardown.sh +++ b/core/database/tests/test_teardown.sh @@ -12,8 +12,8 @@ set -euf -o pipefail SCRIPT=$(realpath "$0") SOURCE=$(dirname "$SCRIPT") -PROJECT_ROOT=$(realpath "${SOURCE}/../../../") -source "${PROJECT_ROOT}/config/datafed.sh" +DATAFED_PROJECT_ROOT=$(realpath "${SOURCE}/../../../") +source "${DATAFED_PROJECT_ROOT}/config/datafed.sh" PATH_TO_PASSWD_FILE="${SOURCE}/database_temp.password" rm "${PATH_TO_PASSWD_FILE}" diff --git a/core/docker/Dockerfile b/core/docker/Dockerfile index d8e1b1123..d88ad42cb 100644 --- a/core/docker/Dockerfile +++ b/core/docker/Dockerfile @@ -2,15 +2,16 @@ # cd ${PROJECT_ROOT} or cd DataFed # docker build -f core/docker/Dockerfile . -ARG BUILD_BASE="debian:bookworm-slim" -ARG DEPENDENCIES="dependencies" -ARG RUNTIME="runtime" -ARG DATAFED_DIR="/datafed" -ARG DATAFED_INSTALL_PATH="/opt/datafed" +ARG BUILD_BASE="debian:bookworm-slim" +ARG DEPENDENCIES="dependencies" +ARG RUNTIME="runtime" +ARG DATAFED_DIR="/datafed" +ARG DATAFED_INSTALL_PATH="/opt/datafed" ARG DATAFED_DEPENDENCIES_INSTALL_PATH="/opt/datafed/dependencies" -ARG GCS_IMAGE="code.ornl.gov:4567/dlsw/datafed/gcs-ubuntu-focal" -ARG BUILD_DIR="$DATAFED_DIR/source" -ARG LIB_DIR="/usr/local/lib" +ARG BUILD_DIR="$DATAFED_DIR/source" +ARG DATAFED_DEPENDENCIES_ROOT="$BUILD_DIR/external/DataFedDependencies" +ARG GCS_IMAGE="code.ornl.gov:4567/dlsw/datafed/gcs-ubuntu-focal" +ARG LIB_DIR="/usr/local/lib" FROM ${DEPENDENCIES} AS core-build @@ -20,6 +21,7 @@ ARG DATAFED_DIR ARG BUILD_DIR ARG DATAFED_INSTALL_PATH ARG DATAFED_DEPENDENCIES_INSTALL_PATH +ARG DATAFED_DEPENDENCIES_ROOT ENV DATAFED_INSTALL_PATH="${DATAFED_INSTALL_PATH}" # For communicating with repo server @@ -27,10 +29,13 @@ EXPOSE 7512 # For listening to web server EXPOSE 7513 +RUN mkdir -p ${DATAFED_DEPENDENCIES_ROOT}/scripts/ && \ + mv ./scripts/dependency_versions.sh ${DATAFED_DEPENDENCIES_ROOT}/scripts/ && \ + mv ./scripts/generate_dependencies_config.sh ${DATAFED_DEPENDENCIES_ROOT}/scripts/ + COPY ./common ${BUILD_DIR}/common COPY ./core/CMakeLists.txt ${BUILD_DIR}/core/CMakeLists.txt COPY ./CMakeLists.txt ${BUILD_DIR} -COPY ./scripts/dependency_versions.sh ${BUILD_DIR}/scripts/ COPY ./scripts/generate_datafed.sh ${BUILD_DIR}/scripts/ COPY ./scripts/generate_core_config.sh ${BUILD_DIR}/scripts/ COPY ./scripts/install_core.sh ${BUILD_DIR}/scripts/ @@ -38,15 +43,16 @@ COPY ./cmake ${BUILD_DIR}/cmake COPY ./core/docker/entrypoint.sh ${BUILD_DIR}/core/docker/ COPY ./core/server ${BUILD_DIR}/core/server -RUN ${BUILD_DIR}/scripts/generate_datafed.sh && \ - ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake -S. -B build \ - -DBUILD_REPO_SERVER=False \ - -DBUILD_AUTHZ=False \ - -DBUILD_CORE_SERVER=True \ - -DBUILD_WEB_SERVER=False \ - -DBUILD_DOCS=False \ - -DBUILD_PYTHON_CLIENT=False \ - -DBUILD_FOXX=False +RUN ${DATAFED_DEPENDENCIES_ROOT}/scripts/generate_dependencies_config.sh && \ + ${BUILD_DIR}/scripts/generate_datafed.sh && \ + ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake -S. -B build \ + -DBUILD_REPO_SERVER=False \ + -DBUILD_AUTHZ=False \ + -DBUILD_CORE_SERVER=True \ + -DBUILD_WEB_SERVER=False \ + -DBUILD_DOCS=False \ + -DBUILD_PYTHON_CLIENT=False \ + -DBUILD_FOXX=False RUN ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake --build build -j 8 RUN ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake --build build --target install @@ -56,6 +62,7 @@ SHELL ["/bin/bash", "-c"] ARG DATAFED_DIR ARG DATAFED_INSTALL_PATH +ARG DATAFED_DEPENDENCIES_ROOT ARG DATAFED_DEPENDENCIES_INSTALL_PATH ARG BUILD_DIR ARG LIB_DIR @@ -63,6 +70,7 @@ ARG LIB_DIR # The above should also be available at runtime ENV DATAFED_INSTALL_PATH="$DATAFED_INSTALL_PATH" ENV DATAFED_DEPENDENCIES_INSTALL_PATH="${DATAFED_DEPENDENCIES_INSTALL_PATH}" +ENV DATAFED_DEPENDENCIES_ROOT="${DATAFED_DEPENDENCIES_ROOT}" ENV DATAFED_DIR="$DATAFED_DIR" ENV BUILD_DIR="$BUILD_DIR" ENV LIB_DIR="$LIB_DIR" @@ -77,18 +85,21 @@ COPY --chown=datafed:root ./scripts/generate_core_config.sh ${BUILD_DIR}/scripts COPY --chown=datafed:root ./scripts/install_core.sh ${BUILD_DIR}/scripts/install_core.sh COPY --chown=datafed:root ./cmake/Version.cmake ${BUILD_DIR}/cmake/Version.cmake COPY --from=core-build --chown=datafed:root ${BUILD_DIR}/core/docker/entrypoint.sh ${BUILD_DIR}/core/docker/entrypoint.sh +COPY --from=core-build --chown=datafed:root ${DATAFED_DEPENDENCIES_ROOT}/scripts ${DATAFED_DEPENDENCIES_ROOT}/scripts COPY --from=core-build --chown=datafed:root ${DATAFED_INSTALL_PATH}/core/datafed-core ${DATAFED_INSTALL_PATH}/core/datafed-core USER root -RUN chown -R datafed:root ${DATAFED_DIR} /opt /home/datafed && \ +RUN chown -R datafed:root ${DATAFED_DIR} /opt /home/datafed \ + ${DATAFED_DEPENDENCIES_ROOT} && \ chmod 774 ${DATAFED_DIR} ${BUILD_DIR} ${BUILD_DIR}/core/docker/entrypoint.sh \ ${DATAFED_INSTALL_PATH}/core/datafed-core \ ${BUILD_DIR}/scripts/generate_datafed.sh \ ${BUILD_DIR}/scripts/generate_core_config.sh \ ${BUILD_DIR}/scripts/install_core.sh \ ${DATAFED_INSTALL_PATH}/core && \ + find ${DATAFED_DEPENDENCIES_ROOT} -type d -exec chmod 0774 {} + && \ chmod 664 ${BUILD_DIR}/cmake/Version.cmake && \ chmod +t ${DATAFED_DIR} ${DATAFED_INSTALL_PATH} diff --git a/doc_source/source/admin/install_bare_metal.rst b/doc_source/source/admin/install_bare_metal.rst index 44171394d..960eed40e 100644 --- a/doc_source/source/admin/install_bare_metal.rst +++ b/doc_source/source/admin/install_bare_metal.rst @@ -71,9 +71,9 @@ The npm packages needed primarily by the web server are: This can be done with a helper scripts these scripts are for ubuntu:: - ./DataFed/scripts/install_core_dependencies.sh - ./DataFed/scripts/install_repo_dependencies.sh - ./DataFed/scripts/install_ws_dependencies.sh + ./DataFed/external/DataFedDependencies/scripts/install_core_dependencies.sh + ./DataFed/external/DataFedDependencies/scripts/install_repo_dependencies.sh + ./DataFed/external/DataFedDependencies/scripts/install_ws_dependencies.sh The next step is to enter configuration options that are listed in ./config/datafed.sh. To generate a template for this file you will first need to run:: diff --git a/docker/Dockerfile.foxx b/docker/Dockerfile.foxx index 4fdfa5df6..a94405282 100644 --- a/docker/Dockerfile.foxx +++ b/docker/Dockerfile.foxx @@ -8,6 +8,7 @@ ARG DATAFED_DIR="/datafed" ARG DATAFED_INSTALL_PATH="/opt/datafed" ARG DATAFED_DEPENDENCIES_INSTALL_PATH="/opt/datafed/dependencies" ARG BUILD_DIR="$DATAFED_DIR/source" +ARG DATAFED_DEPENDENCIES_ROOT="$BUILD_DIR/external/DataFedDependencies" ARG NVM_DIR="$DATAFED_DIR/.nvm" ARG NVM_INC="$DATAFED_DIR/.nvm/versions/node/v20.18.2/include/node" ARG NVM_BIN="$DATAFED_DIR/.nvm/versions/node/v20.18.2/bin" @@ -19,6 +20,7 @@ ARG DATAFED_DIR ARG BUILD_DIR ARG DATAFED_INSTALL_PATH ARG DATAFED_DEPENDENCIES_INSTALL_PATH +ARG DATAFED_DEPENDENCIES_ROOT # WARNING # @@ -30,14 +32,20 @@ ENV BUILD_DIR="${BUILD_DIR}" ENV HOME="${BUILD_DIR}" ENV DATAFED_DIR="${DATAFED_DIR}" ENV DATAFED_DEPENDENCIES_INSTALL_PATH="${DATAFED_DEPENDENCIES_INSTALL_PATH}" +ENV DATAFED_DEPENDENCIES_ROOT="${DATAFED_DEPENDENCIES_ROOT}" ENV DATAFED_INSTALL_PATH="$DATAFED_INSTALL_PATH" ENV DATAFED_DEFAULT_LOG_PATH="$DATAFED_INSTALL_PATH/logs" # Set to false by default to avoid wiping the database ENV ENABLE_FOXX_TESTS="FALSE" +RUN mkdir -p ${BUILD_DIR}/external/DataFedDependencies/scripts/ && \ + mv ./scripts/dependency_versions.sh ${BUILD_DIR}/external/DataFedDependencies/scripts/ && \ + mv ./scripts/dependency_install_functions.sh ${BUILD_DIR}/external/DataFedDependencies/scripts/ && \ + mv ./scripts/generate_dependencies_config.sh ${BUILD_DIR}/external/DataFedDependencies/scripts/ && \ + mv ./scripts/utils.sh ${BUILD_DIR}/external/DataFedDependencies/scripts/ + COPY ./core/CMakeLists.txt ${BUILD_DIR}/core/CMakeLists.txt COPY ./CMakeLists.txt ${BUILD_DIR} -COPY ./scripts/dependency_versions.sh ${BUILD_DIR}/scripts/ COPY ./scripts/generate_datafed.sh ${BUILD_DIR}/scripts/ COPY ./cmake ${BUILD_DIR}/cmake COPY ./docker/entrypoint_foxx.sh ${BUILD_DIR}/docker/entrypoint_foxx.sh @@ -72,13 +80,15 @@ RUN chown -R datafed:root /home/datafed \ ${BUILD_DIR}/scripts \ ${BUILD_DIR}/core/ \ ${BUILD_DIR}/common/ \ - ${DATAFED_DEPENDENCIES_INSTALL_PATH}/nvm && \ + ${DATAFED_DEPENDENCIES_INSTALL_PATH}/nvm \ + ${BUILD_DIR}/external/DataFedDependencies/ && \ chown datafed:root ${BUILD_DIR} ${DATAFED_DEPENDENCIES_INSTALL_PATH} && \ find ${BUILD_DIR}/config -maxdepth 1 -type f -exec chmod 0664 {} + && \ find ${BUILD_DIR}/core/database -type f -exec chmod 0664 {} + && \ find ${BUILD_DIR}/core -type d -exec chmod 0774 {} + && \ find ${BUILD_DIR}/config -type d -exec chmod 0774 {} + && \ find ${DATAFED_DEPENDENCIES_INSTALL_PATH}/nvm -type d -exec chmod 0774 {} + && \ + find ${DATAFED_DEPENDENCIES_ROOT} -type d -exec chmod 0774 {} + && \ chmod 774 ${BUILD_DIR} \ ${BUILD_DIR}/scripts \ ${DATAFED_DEPENDENCIES_INSTALL_PATH} \ diff --git a/docs/_sources/admin/general.rst.txt b/docs/_sources/admin/general.rst.txt index 4003f9ee3..4c402fbf5 100644 --- a/docs/_sources/admin/general.rst.txt +++ b/docs/_sources/admin/general.rst.txt @@ -74,9 +74,9 @@ The npm packages needed primarily by the web server are: This can be done with a helper scripts these scripts are for ubuntu:: - ./DataFed/scripts/install_core_dependencies.sh - ./DataFed/scripts/install_repo_dependencies.sh - ./DataFed/scripts/install_ws_dependencies.sh + ./DataFed/external/DataFedDependencies/scripts/install_core_dependencies.sh + ./DataFed/external/DataFedDependencies/scripts/install_repo_dependencies.sh + ./DataFed/external/DataFedDependencies/scripts/install_ws_dependencies.sh The next step is to enter configuration options that are listed in ./config/datafed.sh. To generate a template for this file you will first need to run:: diff --git a/docs/admin/general.html b/docs/admin/general.html index ef58285ea..23cbd285e 100644 --- a/docs/admin/general.html +++ b/docs/admin/general.html @@ -178,9 +178,9 @@
client-oauth2
This can be done with a helper scripts these scripts are for ubuntu:
-./DataFed/scripts/install_core_dependencies.sh
-./DataFed/scripts/install_repo_dependencies.sh
-./DataFed/scripts/install_ws_dependencies.sh
+./DataFed/external/DataFedDependencies/scripts/install_core_dependencies.sh
+./DataFed/external/DataFedDependencies/scripts/install_repo_dependencies.sh
+./DataFed/external/DataFedDependencies/scripts/install_ws_dependencies.sh
The next step is to enter configuration options that are listed in ./config/datafed.sh. To
diff --git a/external/DataFedDependencies b/external/DataFedDependencies
index eee5cfb8d..b3528638d 160000
--- a/external/DataFedDependencies
+++ b/external/DataFedDependencies
@@ -1 +1 @@
-Subproject commit eee5cfb8dac44b806f2a78526603cb363bc2a002
+Subproject commit b3528638dc7e78633b60b607fd0227aa5862bf53
diff --git a/external/protobuf b/external/protobuf
deleted file mode 160000
index a9b006bdd..000000000
--- a/external/protobuf
+++ /dev/null
@@ -1 +0,0 @@
-Subproject commit a9b006bddd52e289029f16aa77b77e8e0033d9ee
diff --git a/python/datafed_pkg/requirements.txt b/python/datafed_pkg/requirements.txt
deleted file mode 100644
index ab83b039c..000000000
--- a/python/datafed_pkg/requirements.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-protobuf>=5.27.1
-pyzmq>=16
-wget>=3
-requests>=2
-click>=7
-prompt_toolkit>=2
diff --git a/python/docker/Dockerfile.python-client-base.ubuntu b/python/docker/Dockerfile.python-client-base.ubuntu
index 53102ac43..be84235cd 100644
--- a/python/docker/Dockerfile.python-client-base.ubuntu
+++ b/python/docker/Dockerfile.python-client-base.ubuntu
@@ -14,7 +14,7 @@ COPY ./scripts/dependency_install_functions.sh ${BUILD_DIR}/scripts/
COPY ./scripts/dependency_versions.sh ${BUILD_DIR}/scripts/
COPY ./scripts/generate_datafed.sh ${BUILD_DIR}/scripts/
COPY ./scripts/utils.sh ${BUILD_DIR}/scripts/
-COPY ./scripts/install_python_client_dependencies.sh ${BUILD_DIR}/scripts/
+COPY ./external/DataFedDependencies/scripts/install_python_client_dependencies.sh ${BUILD_DIR}/scripts/
RUN echo "#!/bin/bash\n\$@" > /usr/bin/sudo && chmod +x /usr/bin/sudo
RUN ${BUILD_DIR}/scripts/generate_datafed.sh
diff --git a/repository/docker/Dockerfile b/repository/docker/Dockerfile
index a013691c8..9fa693142 100644
--- a/repository/docker/Dockerfile
+++ b/repository/docker/Dockerfile
@@ -2,15 +2,16 @@
# cd ${PROJECT_ROOT} or cd DataFed
# docker build -f repository/docker/Dockerfile .
-ARG BUILD_BASE="debian:bookworm-slim"
-ARG DEPENDENCIES="dependencies"
-ARG RUNTIME="runtime"
-ARG DATAFED_DIR="/datafed"
-ARG DATAFED_INSTALL_PATH="/opt/datafed"
+ARG BUILD_BASE="debian:bookworm-slim"
+ARG DEPENDENCIES="dependencies"
+ARG RUNTIME="runtime"
+ARG DATAFED_DIR="/datafed"
+ARG DATAFED_INSTALL_PATH="/opt/datafed"
ARG DATAFED_DEPENDENCIES_INSTALL_PATH="/opt/datafed/dependencies"
-ARG GCS_IMAGE="code.ornl.gov:4567/dlsw/datafed/gcs-ubuntu-focal"
-ARG BUILD_DIR="$DATAFED_DIR/source"
-ARG LIB_DIR="/usr/local/lib"
+ARG GCS_IMAGE="code.ornl.gov:4567/dlsw/datafed/gcs-ubuntu-focal"
+ARG BUILD_DIR="$DATAFED_DIR/source"
+ARG LIB_DIR="/usr/local/lib"
+ARG DATAFED_DEPENDENCIES_ROOT="$BUILD_DIR/external/DataFedDependencies"
FROM ${DEPENDENCIES} AS repo-build
@@ -18,35 +19,41 @@ SHELL ["/bin/bash", "-c"]
ARG DATAFED_DIR
ARG BUILD_DIR
ARG DATAFED_INSTALL_PATH
+ARG DATAFED_DEPENDENCIES_ROOT
ARG DATAFED_DEPENDENCIES_INSTALL_PATH
ENV DATAFED_INSTALL_PATH="$DATAFED_INSTALL_PATH"
# This port is needed to communicate with the DataFed core server
EXPOSE 7512
# Not quite sure what 9000 is doing that 7512 isn't, difference between egress
+
# and ingress?
EXPOSE 9000
+RUN mkdir -p ${DATAFED_DEPENDENCIES_ROOT}/scripts/ && \
+ mv ./scripts/dependency_versions.sh ${DATAFED_DEPENDENCIES_ROOT}/scripts/ && \
+ mv ./scripts/dependency_install_functions.sh ${DATAFED_DEPENDENCIES_ROOT}/scripts/ && \
+ mv ./scripts/generate_dependencies_config.sh ${DATAFED_DEPENDENCIES_ROOT}/scripts/
+
COPY ./common ${BUILD_DIR}/common
COPY ./repository/CMakeLists.txt ${BUILD_DIR}/repository/CMakeLists.txt
COPY ./CMakeLists.txt ${BUILD_DIR}
-COPY ./scripts/dependency_versions.sh ${BUILD_DIR}/scripts/
-COPY ./scripts/dependency_install_functions.sh ${BUILD_DIR}/scripts/
COPY ./scripts/generate_datafed.sh ${BUILD_DIR}/scripts/
COPY ./scripts/generate_repo_config.sh ${BUILD_DIR}/scripts/
COPY ./scripts/install_repo.sh ${BUILD_DIR}/scripts/
COPY ./cmake ${BUILD_DIR}/cmake
COPY ./repository/server ${BUILD_DIR}/repository/server
-RUN ${BUILD_DIR}/scripts/generate_datafed.sh && \
- ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake -S. -B build \
- -DBUILD_REPO_SERVER=True \
- -DBUILD_AUTHZ=False \
- -DBUILD_CORE_SERVER=False \
- -DBUILD_WEB_SERVER=False \
- -DBUILD_DOCS=False \
- -DBUILD_PYTHON_CLIENT=False \
- -DBUILD_FOXX=False
+RUN ${DATAFED_DEPENDENCIES_ROOT}/scripts/generate_dependencies_config.sh && \
+ ${BUILD_DIR}/scripts/generate_datafed.sh && \
+ ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake -S. -B build \
+ -DBUILD_REPO_SERVER=True \
+ -DBUILD_AUTHZ=False \
+ -DBUILD_CORE_SERVER=False \
+ -DBUILD_WEB_SERVER=False \
+ -DBUILD_DOCS=False \
+ -DBUILD_PYTHON_CLIENT=False \
+ -DBUILD_FOXX=False
RUN ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake --build build -j 8
RUN ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake --build build --target install
@@ -58,19 +65,22 @@ ARG DATAFED_INSTALL_PATH
ARG BUILD_DIR
ARG LIB_DIR
ARG DATAFED_DEPENDENCIES_INSTALL_PATH
+ARG DATAFED_DEPENDENCIES_ROOT
# The above should also be available at runtime
ENV DATAFED_INSTALL_PATH="$DATAFED_INSTALL_PATH"
ENV DATAFED_DEPENDENCIES_INSTALL_PATH="${DATAFED_DEPENDENCIES_INSTALL_PATH}"
ENV DATAFED_GCS_COLLECTION_BASE_PATH="/mnt/datafed"
ENV DATAFED_GCS_COLLECTION_ROOT_PATH="/mnt/datafed"
+ENV DATAFED_DEPENDENCIES_ROOT="$DATAFED_DEPENDENCIES_ROOT"
ENV DATAFED_REPO_ID_AND_DIR="home"
ENV DATAFED_DIR="$DATAFED_DIR"
ENV BUILD_DIR="$BUILD_DIR"
ENV LIB_DIR="$LIB_DIR"
-WORKDIR /datafed
+WORKDIR ${BUILD_DIR}
+COPY --from=repo-build --chown=datafed:root ${DATAFED_DEPENDENCIES_ROOT}/scripts ${DATAFED_DEPENDENCIES_ROOT}/scripts
COPY --from=repo-build /usr/lib/x86_64-linux-gnu/libboost_program_options.so /usr/lib/x86_64-linux-gnu/libboost_program_options.so
COPY --from=repo-build /usr/lib/x86_64-linux-gnu/libboost_filesystem.so /usr/lib/x86_64-linux-gnu/libboost_filesystem.so
@@ -78,16 +88,17 @@ RUN ldconfig
USER datafed
-COPY --chown=datafed:root ./scripts/generate_datafed.sh ${DATAFED_DIR}/scripts/generate_datafed.sh
-COPY --chown=datafed:root ./scripts/generate_repo_config.sh ${DATAFED_DIR}/scripts/generate_repo_config.sh
-COPY --chown=datafed:root ./scripts/install_repo.sh ${DATAFED_DIR}/scripts/install_repo.sh
-COPY --chown=datafed:root ./cmake/Version.cmake ${DATAFED_DIR}/cmake/Version.cmake
-COPY --chown=datafed:root ./repository/docker/entrypoint_repo.sh ${BUILD_DIR}/repository/entrypoint.sh
+COPY --chown=datafed:root ./scripts/generate_datafed.sh ${BUILD_DIR}/scripts/generate_datafed.sh
+COPY --chown=datafed:root ./scripts/generate_repo_config.sh ${BUILD_DIR}/scripts/generate_repo_config.sh
+COPY --chown=datafed:root ./scripts/install_repo.sh ${BUILD_DIR}/scripts/install_repo.sh
+COPY --chown=datafed:root ./cmake/Version.cmake ${BUILD_DIR}/cmake/Version.cmake
+COPY --chown=datafed:root ./repository/docker/entrypoint_repo.sh ${BUILD_DIR}/repository/docker/entrypoint.sh
COPY --from=repo-build --chown=datafed:root ${DATAFED_INSTALL_PATH}/repo/datafed-repo ${DATAFED_INSTALL_PATH}/repo/datafed-repo
USER root
-RUN chown -R datafed:root /datafed
-RUN chown -R datafed:root /opt
+RUN chown -R datafed:root /datafed /opt \
+ ${DATAFED_DEPENDENCIES_ROOT} && \
+ find ${DATAFED_DEPENDENCIES_ROOT} -type d -exec chmod 0774 {} +
-ENTRYPOINT [ "/datafed/source/repository/entrypoint.sh", "/opt/datafed/repo/datafed-repo","--cfg","/opt/datafed/repo/datafed-repo.cfg"]
+ENTRYPOINT [ "/datafed/source/repository/docker/entrypoint.sh", "/opt/datafed/repo/datafed-repo","--cfg","/opt/datafed/repo/datafed-repo.cfg"]
diff --git a/repository/docker/Dockerfile.gcs b/repository/docker/Dockerfile.gcs
index 06ba36cb4..80c0a8726 100644
--- a/repository/docker/Dockerfile.gcs
+++ b/repository/docker/Dockerfile.gcs
@@ -11,6 +11,7 @@ ARG NVM_DIR="$DATAFED_DIR/.nvm"
ARG NVM_INC="$DATAFED_DIR/.nvm/versions/node/v13.14.0/include/node"
ARG NVM_BIN="$DATAFED_DIR/.nvm/versions/node/v13.14.0/bin"
ARG LIB_DIR="/usr/local/lib"
+ARG DATAFED_DEPENDENCIES_ROOT="$BUILD_DIR/external/DataFedDependencies"
FROM ${DEPENDENCIES} AS dependencies
@@ -18,6 +19,7 @@ ARG DATAFED_DIR
ARG BUILD_DIR
ARG DATAFED_INSTALL_PATH
ARG DATAFED_DEPENDENCIES_INSTALL_PATH
+ARG DATAFED_DEPENDENCIES_ROOT
ARG LIB_DIR
ENV DATAFED_DEPENDENCIES_INSTALL_PATH="$DATAFED_DEPENDENCIES_INSTALL_PATH"
@@ -30,10 +32,14 @@ ENV BUILD_DIR="$BUILD_DIR"
ENV LIB_DIR="$LIB_DIR"
ENV DATAFED_GLOBUS_REPO_USER="datafed"
ENV DATAFED_DEFAULT_LOG_PATH="$DATAFED_INSTALL_PATH/logs"
+ENV DATAFED_DEPENDENCIES_ROOT="$DATAFED_DEPENDENCIES_ROOT"
+
+RUN mkdir -p ${DATAFED_DEPENDENCIES_ROOT}/scripts/ && \
+ mv ./scripts/dependency_versions.sh ${DATAFED_DEPENDENCIES_ROOT}/scripts/ && \
+ mv ./scripts/utils.sh ${DATAFED_DEPENDENCIES_ROOT}/scripts/ && \
+ mv ./scripts/generate_dependencies_config.sh ${DATAFED_DEPENDENCIES_ROOT}/scripts/
-COPY --chown=datafed:root ./scripts/dependency_versions.sh ${BUILD_DIR}/scripts/
COPY --chown=datafed:root ./scripts/generate_authz_config.sh ${BUILD_DIR}/scripts/generate_authz_config.sh
-COPY --chown=datafed:root ./scripts/utils.sh ${BUILD_DIR}/scripts/utils.sh
COPY --chown=datafed:root ./CMakeLists.txt ${BUILD_DIR}
COPY --chown=datafed:root ./cmake ${BUILD_DIR}/cmake
COPY --chown=datafed:root ./repository/CMakeLists.txt ${BUILD_DIR}/repository/CMakeLists.txt
@@ -53,7 +59,8 @@ COPY --chown=datafed:root ./scripts/generate_datafed.sh ${BUILD_DI
RUN ${BUILD_DIR}/scripts/generate_datafed.sh
RUN ${BUILD_DIR}/scripts/generate_gsi-authz_config.sh
# Don't build with syslog the container does not seem to support syslog
-RUN ${BUILD_DIR}/scripts/generate_authz_config.sh && \
+RUN ${DATAFED_DEPENDENCIES_ROOT}/scripts/generate_dependencies_config.sh && \
+ ${BUILD_DIR}/scripts/generate_authz_config.sh && \
${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake -S. -B build \
-DBUILD_REPO_SERVER=False \
-DBUILD_AUTHZ_TESTS=True \
@@ -64,7 +71,7 @@ RUN ${BUILD_DIR}/scripts/generate_authz_config.sh && \
-DBUILD_DOCS=False \
-DBUILD_PYTHON_CLIENT=False \
-DBUILD_FOXX=False
-RUN ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake --build build -j 8
+RUN ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake --build build -j 8
RUN ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake --build build --target install
FROM ${GCS_IMAGE}
@@ -73,6 +80,7 @@ ARG DATAFED_DIR
ARG BUILD_DIR
ARG DATAFED_INSTALL_PATH
ARG DATAFED_DEPENDENCIES_INSTALL_PATH
+ARG DATAFED_DEPENDENCIES_ROOT
ARG LIB_DIR
ENV DATAFED_DEPENDENCIES_INSTALL_PATH="$DATAFED_DEPENDENCIES_INSTALL_PATH"
@@ -85,6 +93,7 @@ ENV BUILD_DIR="$BUILD_DIR"
ENV LIB_DIR="$LIB_DIR"
ENV DATAFED_GLOBUS_REPO_USER="datafed"
ENV DATAFED_DEFAULT_LOG_PATH="$DATAFED_INSTALL_PATH/logs"
+ENV DATAFED_DEPENDENCIES_ROOT="$DATAFED_DEPENDENCIES_ROOT"
# Value needed so tput command doesn't crash
ENV TERM="xterm"
ENV DATAFED_GCS_IP=""
@@ -104,6 +113,7 @@ COPY --from=dependencies ${DATAFED_DEPENDENCIES_INSTALL_PATH}/lib64/libcrypto.so
COPY --from=dependencies ${DATAFED_DEPENDENCIES_INSTALL_PATH}/lib64/libcrypto.so.4 ${DATAFED_DEPENDENCIES_INSTALL_PATH}/lib64/libcrypto.so.4
COPY --from=dependencies ${DATAFED_DEPENDENCIES_INSTALL_PATH}/python ${DATAFED_DEPENDENCIES_INSTALL_PATH}/python
COPY --from=dependencies ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/python3.9 ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/python3.9
+COPY --from=dependencies --chown=datafed:root ${DATAFED_DEPENDENCIES_ROOT}/scripts/ ${DATAFED_DEPENDENCIES_ROOT}/scripts/
# Needed for tests
COPY --from=dependencies ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake
@@ -121,12 +131,7 @@ RUN adduser --disabled-password --gecos "" datafed
RUN echo "#!/bin/bash\n\$@" > /usr/bin/sudo && chmod +x /usr/bin/sudo
-COPY ./scripts/dependency_versions.sh ${BUILD_DIR}/scripts/
-COPY ./scripts/utils.sh ${BUILD_DIR}/scripts/utils.sh
-
-COPY --chown=datafed:root ./scripts/dependency_versions.sh ${BUILD_DIR}/scripts/
COPY --chown=datafed:root ./scripts/generate_authz_config.sh ${BUILD_DIR}/scripts/generate_authz_config.sh
-COPY --chown=datafed:root ./scripts/utils.sh ${BUILD_DIR}/scripts/utils.sh
COPY --chown=datafed:root ./CMakeLists.txt ${BUILD_DIR}
COPY --chown=datafed:root ./cmake ${BUILD_DIR}/cmake
COPY --chown=datafed:root ./repository/CMakeLists.txt ${BUILD_DIR}/repository/CMakeLists.txt
diff --git a/repository/docker/entrypoint_authz.sh b/repository/docker/entrypoint_authz.sh
index 18f4d1dbb..aace86497 100755
--- a/repository/docker/entrypoint_authz.sh
+++ b/repository/docker/entrypoint_authz.sh
@@ -194,7 +194,7 @@ fi
su -m -c "${BUILD_DIR}/scripts/globus/setup_globus.sh" datafed
source "${DATAFED_PYTHON_ENV}/bin/activate"
-source "${BUILD_DIR}/scripts/dependency_versions.sh"
+source "${BUILD_DIR}/external/DataFedDependencies/scripts/dependency_versions.sh"
# Must be passed in directly
GCS_CLI_ENDPOINT_ID="$GCS_CLI_ENDPOINT_ID" \
diff --git a/scripts/compose_build_images.sh b/scripts/compose_build_images.sh
index 43ab49fa0..1536367b7 100755
--- a/scripts/compose_build_images.sh
+++ b/scripts/compose_build_images.sh
@@ -101,7 +101,7 @@ if [[ "$BUILD_METADATA" == "TRUE" ]]; then
fi
if [[ "$BUILD_REPO" == "TRUE" ]]; then
- source "${PROJECT_ROOT}/scripts/dependency_versions.sh"
+ source "${PROJECT_ROOT}/external/DataFedDependencies/scripts/dependency_versions.sh"
cd "${PROJECT_ROOT}/external/globus-connect-server-deploy/docker"
git checkout "$DATAFED_GCS_SUBMODULE_VERSION"
docker build --progress plain --tag "gcs-ubuntu-base:latest" - <"./docker-files/Dockerfile.debian-12"
diff --git a/scripts/compose_cleanup_globus_files.sh b/scripts/compose_cleanup_globus_files.sh
index ae8c3e105..30b35956a 100755
--- a/scripts/compose_cleanup_globus_files.sh
+++ b/scripts/compose_cleanup_globus_files.sh
@@ -3,7 +3,7 @@ SCRIPT=$(realpath "$0")
SOURCE=$(dirname "$SCRIPT")
PROJECT_ROOT=$(realpath "${SOURCE}/..")
-source "${SOURCE}/dependency_versions.sh"
+source "${PROJECT_ROOT}/external/DataFedDependencies/scripts/dependency_versions.sh"
# This script should be run after generating the .env file as it will pull
# values from the .env file
diff --git a/scripts/compose_generate_globus_files.sh b/scripts/compose_generate_globus_files.sh
index 9af8903bb..85d8fafd6 100755
--- a/scripts/compose_generate_globus_files.sh
+++ b/scripts/compose_generate_globus_files.sh
@@ -3,7 +3,7 @@ SCRIPT=$(realpath "$0")
SOURCE=$(dirname "$SCRIPT")
PROJECT_ROOT=$(realpath "${SOURCE}/..")
-source "${SOURCE}/dependency_versions.sh"
+source "${PROJECT_ROOT}/external/DataFedDependencies/scripts/dependency_versions.sh"
# This script should be run after generating the .env file as it will pull
# values from the .env file
diff --git a/scripts/dependency_install_functions.sh b/scripts/dependency_install_functions.sh
deleted file mode 100644
index 0d75e4c11..000000000
--- a/scripts/dependency_install_functions.sh
+++ /dev/null
@@ -1,727 +0,0 @@
-#!/bin/bash
-SCRIPT=$(realpath "$BASH_SOURCE[0]")
-SOURCE=$(dirname "$SCRIPT")
-source "${SOURCE}/dependency_versions.sh"
-PROJECT_ROOT=$(realpath "${SOURCE}/..")
-source "${SOURCE}/utils.sh"
-
-# Ensures the shell returns the exit code of the first failed command in a pipeline
-set -o pipefail
-
-sudo_command
-# these are the dependencies to be installed by apt
-export apt_file_path="${PROJECT_ROOT}/tmp/apt_deps"
-export pip_file_path="${PROJECT_ROOT}/tmp/pip_deps"
-# these are the dependencies to be installed and built via cmake
-export ext_file_path="${PROJECT_ROOT}/tmp/ext_deps"
-
-if [ ! -d "${PROJECT_ROOT}/tmp" ]; then
- mkdir -p "${PROJECT_ROOT}/tmp"
-fi
-
-if [ ! -e "${PROJECT_ROOT}/config/datafed.sh" ]; then
- echo "Please run generate_datafed.sh before installing dependencies"
- exit 1
-fi
-
-source "${PROJECT_ROOT}/config/datafed.sh"
-
-if [ ! -e "$DATAFED_DEPENDENCIES_INSTALL_PATH" ] || [ ! -d "$DATAFED_DEPENDENCIES_INSTALL_PATH" ]; then
- parent_dir=$(dirname "${DATAFED_DEPENDENCIES_INSTALL_PATH}")
- if [ -w "${parent_dir}" ]; then
- mkdir -p "$DATAFED_DEPENDENCIES_INSTALL_PATH"
- else
- echo "Sudo command $SUDO_CMD"
- "$SUDO_CMD" mkdir -p "$DATAFED_DEPENDENCIES_INSTALL_PATH"
- user=$(whoami)
- "$SUDO_CMD" chown "$user" "$DATAFED_DEPENDENCIES_INSTALL_PATH"
- fi
-fi
-
-# NOTE - LD_LIBRARY_PATH must not be a variable for this to work. You cannot
-# replace ! -v LD_LIBRARY_PATH with ! -v ${LD_LIBRARY_PATH} because this is
-# checking if the variable even exists.
-if [[ ! -v LD_LIBRARY_PATH ]]; then
- LD_LIBRARY_PATH="$DATAFED_DEPENDENCIES_INSTALL_PATH/lib"
-else
- if [[ -n "$LD_LIBRARY_PATH" ]]; then
- LD_LIBRARY_PATH="$DATAFED_DEPENDENCIES_INSTALL_PATH/lib:$LD_LIBRARY_PATH"
- else
- LD_LIBRARY_PATH="$DATAFED_DEPENDENCIES_INSTALL_PATH/lib"
- fi
-fi
-
-# This if statement is to make sure PKG_CONFIG_PATH is defined for cmake, and
-# that it contains the necessary paths from the datafed depedencies install path
-# to compile other dependencies
-if [[ ! -v PKG_CONFIG_PATH ]]; then
- PKG_CONFIG_PATH="$DATAFED_DEPENDENCIES_INSTALL_PATH/lib/pkgconfig"
-else
- if [[ -n "$PKG_CONFIG_PATH" ]]; then
- PKG_CONFIG_PATH="$DATAFED_DEPENDENCIES_INSTALL_PATH/lib/pkgconfig:$PKG_CONFIG_PATH"
- else
- PKG_CONFIG_PATH="$DATAFED_DEPENDENCIES_INSTALL_PATH/lib/pkgconfig"
- fi
-fi
-
-# WARNING: overwriting PATH can be very dangerous
-# In Docker builds this must follow the pattern:
-# PATH=":$PATH"
-# Curly braces around PATH, like ${PATH} may pull from the host's PATH
-# Please see StackOverflow answer: https://stackoverflow.com/a/38742545
-if [[ ! -v PATH ]]; then
- PATH="$DATAFED_DEPENDENCIES_INSTALL_PATH/bin"
-else
- if [[ -n "$PATH" ]]; then
- PATH="$DATAFED_DEPENDENCIES_INSTALL_PATH/bin:$PATH"
- else
- PATH="$DATAFED_DEPENDENCIES_INSTALL_PATH/bin"
- fi
-fi
-
-# Function to clean up multiple installation flag files with a given prefix
-clean_install_flags() {
- local install_path="${DATAFED_DEPENDENCIES_INSTALL_PATH}"
- local prefix="$1" # The first argument is now the prefix
-
- # Validate that a prefix was provided
- if [ -z "$prefix" ]; then
- echo "Error: No prefix provided for clean_install_flags." >&2
- return 1 # Indicate an error
- fi
-
- # Count files matching the pattern
- local count=$(find "${install_path}" -maxdepth 1 -type f -name "${prefix}*" 2>/dev/null | wc -l)
-
- if [ "${count}" -gt 1 ]; then
- echo "Warning: Found ${count} installation flag files with prefix '${prefix}'. Cleaning up..."
- # Remove all files matching the pattern
- find "${install_path}" -maxdepth 1 -type f -name "${prefix}*" -delete
- echo "Removed all existing installation flag files with prefix '${prefix}'."
- fi
-}
-
-install_python() {
- local original_dir=$(pwd)
-
- local PYTHON_FLAG_PREFIX=".python_installed-"
- clean_install_flags "$PYTHON_FLAG_PREFIX"
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${PYTHON_FLAG_PREFIX}${DATAFED_PYTHON_VERSION}" ]; then
- local original_dir=$(pwd)
-
- # Check if openssl is already installed, otherwise error since openssl is required
- local OPENSSL_FLAG_PREFIX=".openssl_installed-"
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${OPENSSL_FLAG_PREFIX}${DATAFED_OPENSSL}" ]; then
- echo "You must first install openssl before installing python"
- exit 1
- fi
-
- cd "${PROJECT_ROOT}"
- "$SUDO_CMD" apt update
- "$SUDO_CMD" apt install -y build-essential libreadline-dev zlib1g-dev libffi-dev wget libsqlite3-dev
-
- wget "https://www.python.org/ftp/python/${DATAFED_PYTHON_VERSION_FULL}/Python-${DATAFED_PYTHON_VERSION_FULL}.tgz"
- tar -xf "Python-${DATAFED_PYTHON_VERSION_FULL}.tgz"
- cd "Python-${DATAFED_PYTHON_VERSION_FULL}"
-
- export CPPFLAGS="-I${DATAFED_DEPENDENCIES_INSTALL_PATH}/include $CPPFLAGS"
- export LDFLAGS="-L${DATAFED_DEPENDENCIES_INSTALL_PATH}/lib -Wl,-rpath,${DATAFED_DEPENDENCIES_INSTALL_PATH}/lib $LDFLAGS"
- ./configure --prefix="${DATAFED_PYTHON_DEPENDENCIES_DIR}" --with-openssl="${DATAFED_DEPENDENCIES_INSTALL_PATH}" --with-openssl-rpath=auto --enable-loadable-sqlite-extensions
- make -j$(nproc)
- make altinstall
-
- mkdir -p "${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin"
- # Delete link if it exists
- rm -rf "${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/python${DATAFED_PYTHON_VERSION}"
- ln -s "${DATAFED_PYTHON_DEPENDENCIES_DIR}/bin/python${DATAFED_PYTHON_VERSION}" "${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/python${DATAFED_PYTHON_VERSION}"
- export PYTHON="${DATAFED_PYTHON_DEPENDENCIES_DIR}/bin/python${DATAFED_PYTHON_VERSION}"
-
- touch "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${PYTHON_FLAG_PREFIX}${DATAFED_PYTHON_VERSION}"
- cd "$original_dir"
- else
- echo "Python already installed, skipping..."
- fi
-}
-
-init_python() {
-
- if [[ ! -v DATAFED_PYTHON_DEPENDENCIES_DIR ]]; then
- echo "DATAFED_PYTHON_DEPENDENCIES_DIR is not defined please make sure it is defined in the ${PROJECT_ROOT}/config/datafed.sh file."
- exit 1
- else
- if [[ -z "$DATAFED_PYTHON_DEPENDENCIES_DIR" ]]; then
- echo "DATAFED_PYTHON_DEPENDENCIES_DIR is defined but is empty please make sure it is defined in ${PROJECT_ROOT}/config/datafed.sh file."
- exit 1
- fi
- fi
-
- if [ ! -e "$DATAFED_DEPENDENCIES_INSTALL_PATH" ] || [ ! -d "$DATAFED_PYTHON_DEPENDENCIES_DIR" ]; then
- mkdir -p "$DATAFED_PYTHON_DEPENDENCIES_DIR"
- fi
-
- "python${DATAFED_PYTHON_VERSION}" -m venv "${DATAFED_PYTHON_ENV}"
- # Make sure that pip is installed and upgraded
- "python${DATAFED_PYTHON_VERSION}" -m ensurepip --upgrade
-}
-
-install_cmake() {
-
- local CMAKE_FLAG_PREFIX=".cmake_installed-"
- clean_install_flags "$CMAKE_FLAG_PREFIX"
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${CMAKE_FLAG_PREFIX}${DATAFED_CMAKE_VERSION}" ]; then
- # Version 3.20 of cmake and onwards starting using all lower case in the package names, previos versions use a
- # a capital L in the name.
- wget https://github.com/Kitware/CMake/releases/download/v${DATAFED_CMAKE_VERSION}/cmake-${DATAFED_CMAKE_VERSION}-linux-x86_64.tar.gz
- tar -xzvf "cmake-${DATAFED_CMAKE_VERSION}-linux-x86_64.tar.gz" >/dev/null 2>&1
- cp -r "cmake-${DATAFED_CMAKE_VERSION}-linux-x86_64/bin" "${DATAFED_DEPENDENCIES_INSTALL_PATH}"
- cp -r "cmake-${DATAFED_CMAKE_VERSION}-linux-x86_64/share" "${DATAFED_DEPENDENCIES_INSTALL_PATH}"
-
- # Cleanup
- rm -rf "cmake-${DATAFED_CMAKE_VERSION}-linux-x86_64"
- rm -rf "cmake-${DATAFED_CMAKE_VERSION}-linux-x86_64.tar.gz"
-
- # Mark cmake as installed
- touch "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${CMAKE_FLAG_PREFIX}${DATAFED_CMAKE_VERSION}"
- fi
- # WARNING: overwriting PATH can be very dangerous
- # In Docker builds this must follow the pattern:
- # PATH=":$PATH"
- # Curly braces around PATH, like ${PATH} may pull from the host's PATH
- # Please see StackOverflow answer: https://stackoverflow.com/a/38742545
- export PATH="${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin:$PATH"
-}
-
-install_protobuf() {
- local PROTOBUF_FLAG_PREFIX=".protobuf_installed-"
- clean_install_flags "$PROTOBUF_FLAG_PREFIX"
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${PROTOBUF_FLAG_PREFIX}${DATAFED_PROTOBUF_VERSION}" ]; then
- local original_dir=$(pwd)
- cd "${PROJECT_ROOT}"
- if [ -d "${PROJECT_ROOT}/external/protobuf" ]; then
- # sudo required because of egg file
- "$SUDO_CMD" rm -rf "${PROJECT_ROOT}/external/protobuf"
- fi
- # Here we are using clone instead of submodule update, because submodule
- # requires the .git folder exist and the current folder be considered a repo
- # this creates problems in docker because each time a commit is made the
- # .git folder contents are changed causing a fresh rebuild of all containers
- git clone "https://github.com/protocolbuffers/protobuf.git" \
- "${PROJECT_ROOT}/external/protobuf"
-
- cd "${PROJECT_ROOT}/external/protobuf"
- git checkout "v${DATAFED_PROTOBUF_VERSION}"
- git submodule update --init --recursive
- # Build static library, cannot build shared library at same time apparently
- # there cannot be a shared libsodium file in the
- # DATAFED_DEPENDENCIES_INSTALL_PREFIX if you want to have everything static
- # libzmq picks up any shared file regardless of whether you have told it to
- # only use static libraries or not.
- # NOTE - static libraries must be built first
- cmake -S . -B build \
- -DCMAKE_POSITION_INDEPENDENT_CODE=ON \
- -DBUILD_SHARED_LIBS=OFF \
- -Dprotobuf_BUILD_TESTS=OFF \
- -DABSL_PROPAGATE_CXX_STD=ON \
- -DCMAKE_INSTALL_PREFIX="${DATAFED_DEPENDENCIES_INSTALL_PATH}"
- cmake --build build -j 8
- if [ -w "${DATAFED_DEPENDENCIES_INSTALL_PATH}" ]; then
- cmake --build build --target install
- else
- "$SUDO_CMD" cmake --build build --target install
- fi
- # Build Shared library
- # Don't build shared, it messes up the static library linking because the
- # cmake file installed are not compatible
- # WARNING - static library will break if build with shared options on
-
- cd python
- init_python
- source "${DATAFED_PYTHON_ENV}/bin/activate"
- LD_LIBRARY_PATH="$LD_LIBRARY_PATH" PATH="$PATH" python${DATAFED_PYTHON_VERSION} -m pip install numpy tzdata
- LD_LIBRARY_PATH="$LD_LIBRARY_PATH" PATH="$PATH" python${DATAFED_PYTHON_VERSION} setup.py build
- LD_LIBRARY_PATH="$LD_LIBRARY_PATH" PATH="$PATH" python${DATAFED_PYTHON_VERSION} setup.py test
- # Because we have activaited a venv we don't want to use the --user flag
- # with the install command
- LD_LIBRARY_PATH="$LD_LIBRARY_PATH" PATH="$PATH" "python${DATAFED_PYTHON_VERSION}" setup.py install
- cd ../
- # Cleanup build file with root ownership
- if [ -f build/install_manifest.txt ]; then
- "$SUDO_CMD" rm build/install_manifest.txt
- fi
- cd "${PROJECT_ROOT}"
-
- # Mark protobuf as installed
- touch "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${PROTOBUF_FLAG_PREFIX}${DATAFED_PROTOBUF_VERSION}"
- cd "$original_dir"
- fi
-}
-
-install_libsodium() {
- local LIBSODIUM_FLAG_PREFIX=".libsodium_installed-"
- clean_install_flags "$LIBSODIUM_FLAG_PREFIX"
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${LIBSODIUM_FLAG_PREFIX}${DATAFED_LIBSODIUM_VERSION}" ]; then
- local original_dir=$(pwd)
- if [ -d "${PROJECT_ROOT}/external/libsodium" ]; then
- # sudo required because of egg file
- "$SUDO_CMD" rm -rf "${PROJECT_ROOT}/external/libsodium"
- fi
- # Official documentation for libsodium indicates this is the preferred way to build libsodium.
- # Using the git repo directly results in build instability because of additional network calls when running
- # autogen.sh.
- wget "https://download.libsodium.org/libsodium/releases/libsodium-${DATAFED_LIBSODIUM_VERSION}.tar.gz" -P "${PROJECT_ROOT}/external"
- tar -xvzf "${PROJECT_ROOT}/external/libsodium-${DATAFED_LIBSODIUM_VERSION}.tar.gz" -C "${PROJECT_ROOT}/external/"
- cd "${PROJECT_ROOT}/external/libsodium-${DATAFED_LIBSODIUM_VERSION}"
- # Build static ONLY!!!!
- # Note if zmq detects a shared sodium library it will grab it no matter what
- # --enable-shared=no must be set here
- SODIUM_STATIC=1 ./configure --enable-static=yes --enable-shared=no --with-pic=yes --prefix="${DATAFED_DEPENDENCIES_INSTALL_PATH}"
- make -j 8
- make check
- if [ -w "${DATAFED_DEPENDENCIES_INSTALL_PATH}" ]; then
- make install
- else
- "$SUDO_CMD" make install
- fi
-
- # Mark libsodium as installed
- touch "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${LIBSODIUM_FLAG_PREFIX}${DATAFED_LIBSODIUM_VERSION}"
- cd "$original_dir"
- fi
-}
-
-install_libzmq() {
- local LIBZMQ_FLAG_PREFIX=".libzmq_installed-"
- clean_install_flags "$LIBZMQ_FLAG_PREFIX"
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${LIBZMQ_FLAG_PREFIX}${DATAFED_LIBZMQ_VERSION}" ]; then
- local original_dir=$(pwd)
- if [ -d "${PROJECT_ROOT}/external/libzmq" ]; then
- "$SUDO_CMD" rm -rf "${PROJECT_ROOT}/external/libzmq"
- fi
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/.libsodium_installed-${DATAFED_LIBSODIUM_VERSION}" ]; then
- echo "You must first install libsodium before installing libzmq"
- exit 1
- fi
- # Here we are using clone instead of submodule update, because submodule
- # requires the .git folder exist and the current folder be considered a repo
- # this creates problems in docker because each time a commit is made the
- # .git folder contents are changed causing a fresh rebuild of all containers
- git clone https://github.com/zeromq/libzmq.git "${PROJECT_ROOT}/external/libzmq"
- cd "${PROJECT_ROOT}/external/libzmq"
- git checkout "v${DATAFED_LIBZMQ_VERSION}"
- # Build static only
- cmake -S. -B build \
- -DBUILD_STATIC=ON \
- -DBUILD_SHARED_LIBS=OFF \
- -DBUILD_SHARED=OFF \
- -DWITH_LIBSODIUM_STATIC=ON \
- -DBUILD_TESTS=OFF \
- -DCMAKE_POSITION_INDEPENDENT_CODE=ON \
- -DCMAKE_PREFIX_PATH="${DATAFED_DEPENDENCIES_INSTALL_PATH}/lib" \
- -DCMAKE_INSTALL_PREFIX="${DATAFED_DEPENDENCIES_INSTALL_PATH}"
- cmake --build build -j 8
- if [ -w "${DATAFED_DEPENDENCIES_INSTALL_PATH}" ]; then
- cmake --build build --target install
- else
- "$SUDO_CMD" cmake --build build --target install
- fi
-
- if [ -d "${PROJECT_ROOT}/external/cppzmq" ]; then
- # sudo required because of egg file
- "$SUDO_CMD" rm -rf "${PROJECT_ROOT}/external/cppzmq"
- fi
- git clone https://github.com/zeromq/cppzmq.git "${PROJECT_ROOT}/external/cppzmq"
- cd "${PROJECT_ROOT}/external/cppzmq"
- git checkout v"${DATAFED_LIB_ZMQCPP_VERSION}"
- # Will will not build the unit tests because there are not enough controls
- # to link to the correct static library.
- # NOTE - static libraries must be built first
- cmake -S. -B build \
- -DBUILD_SHARED_LIBS=OFF \
- -DCMAKE_POSITION_INDEPENDENT_CODE=ON \
- -DCPPZMQ_BUILD_TESTS=OFF \
- -DCMAKE_INSTALL_PREFIX="${DATAFED_DEPENDENCIES_INSTALL_PATH}"
- cmake --build build -j 8
- if [ -w "${DATAFED_DEPENDENCIES_INSTALL_PATH}" ]; then
- cmake --build build --target install
- else
- "$SUDO_CMD" cmake --build build --target install
- fi
-
- cd "$original_dir"
- # Mark libzmq as installed
- touch "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${LIBZMQ_FLAG_PREFIX}${DATAFED_LIBZMQ_VERSION}"
- fi
-}
-
-install_nlohmann_json() {
- local NLOHMANN_FLAG_PREFIX=".nlohmann_json_installed-"
- clean_install_flags "$NLOHMANN_FLAG_PREFIX"
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${NLOHMANN_FLAG_PREFIX}${DATAFED_NLOHMANN_JSON_VERSION}" ]; then
- local original_dir=$(pwd)
- if [ -d "${PROJECT_ROOT}/external/json" ]; then
- "$SUDO_CMD" rm -rf "${PROJECT_ROOT}/external/json"
- fi
- git clone https://github.com/nlohmann/json.git "${PROJECT_ROOT}/external/json"
- cd "${PROJECT_ROOT}/external/json"
- git checkout v${DATAFED_NLOHMANN_JSON_VERSION}
- echo "FILE STRUCTURE $(ls)"
- # Build static
- cmake -S . -B build \
- -DBUILD_SHARED_LIBS=OFF \
- -DCMAKE_POSITION_INDEPENDENT_CODE=ON \
- -DCMAKE_INSTALL_PREFIX="${DATAFED_DEPENDENCIES_INSTALL_PATH}"
- cmake --build build -j 8
- if [ -w "${DATAFED_DEPENDENCIES_INSTALL_PATH}" ]; then
- cmake --build build --target install
- else
- "$SUDO_CMD" cmake --build build --target install
- fi
- # Build shared
- cmake -S . -B build \
- -DBUILD_SHARED_LIBS=ON \
- -DCMAKE_POSITION_INDEPENDENT_CODE=ON \
- -DCMAKE_INSTALL_PREFIX="${DATAFED_DEPENDENCIES_INSTALL_PATH}"
- cmake --build build -j 8
- if [ -w "${DATAFED_DEPENDENCIES_INSTALL_PATH}" ]; then
- cmake --build build --target install
- else
- "$SUDO_CMD" cmake --build build --target install
- fi
-
- # Mark nlohmann_json as installed
- touch "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${NLOHMANN_FLAG_PREFIX}${DATAFED_NLOHMANN_JSON_VERSION}"
- cd "$original_dir"
- fi
-}
-
-install_json_schema_validator() {
- local NLOHMANN_SCHEMA_FLAG_PREFIX=".nlohmann_schema_validator_installed-"
- clean_install_flags "$NLOHMANN_SCHEMA_FLAG_PREFIX"
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${NLOHMANN_SCHEMA_FLAG_PREFIX}${DATAFED_JSON_SCHEMA_VALIDATOR_VERSION}" ]; then
- local original_dir=$(pwd)
- if [ -d "${PROJECT_ROOT}/external/json-schema-validator" ]; then
- "$SUDO_CMD" rm -rf "${PROJECT_ROOT}/external/json-schema-validator"
- fi
- git clone https://github.com/pboettch/json-schema-validator "${PROJECT_ROOT}/external/json-schema-validator"
- cd "${PROJECT_ROOT}/external/json-schema-validator"
- git checkout ${DATAFED_JSON_SCHEMA_VALIDATOR_VERSION}
- # Build static
- cmake -S . -B build \
- -DBUILD_SHARED_LIBS=OFF \
- -DCMAKE_POSITION_INDEPENDENT_CODE=ON \
- -DCMAKE_INSTALL_PREFIX="${DATAFED_DEPENDENCIES_INSTALL_PATH}"
- cmake --build build -j 8
- if [ -w "${DATAFED_DEPENDENCIES_INSTALL_PATH}" ]; then
- cmake --build build --target install
- else
- "$SUDO_CMD" cmake --build build --target install
- fi
- # WARNING building shared library will overwrite cmake file for static
- # library, does not appear to support both targets at the same time, similar
- # to protobuf
- # Mark json-schema-validator as installed
- touch "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${NLOHMANN_SCHEMA_FLAG_PREFIX}${DATAFED_JSON_SCHEMA_VALIDATOR_VERSION}"
- cd "$original_dir"
- fi
-}
-
-install_gcs() {
- local GCS_FLAG_PREFIX=".gcs_installed-"
- clean_install_flags "$GCS_FLAG_PREFIX"
- if [ ! -e "${GCS_FLAG_PREFIX}${DATAFED_GLOBUS_VERSION}" ]; then
- "$SUDO_CMD" apt update
- "$SUDO_CMD" apt install -y curl git gnupg
- curl -LOs \
- "https://downloads.globus.org/globus-connect-server/stable/installers/repo/deb/globus-repo_${DATAFED_GLOBUS_VERSION}_all.deb"
- "$SUDO_CMD" dpkg -i "globus-repo_${DATAFED_GLOBUS_VERSION}_all.deb"
- "$SUDO_CMD" apt-key add /usr/share/globus-repo/RPM-GPG-KEY-Globus
- # Need a second update command after adding the globus GPG key
- "$SUDO_CMD" apt update
- "$SUDO_CMD" apt-get install globus-connect-server54 -y
-
- # Mark gcs as installed
- touch "${GCS_FLAG_PREFIX}${DATAFED_GLOBUS_VERSION}"
- fi
-}
-
-install_nvm() {
- local NVM_FLAG_PREFIX=".nvm_installed-"
- clean_install_flags "$NVM_FLAG_PREFIX"
- # By default this will place NVM in $HOME/.nvm
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${NVM_FLAG_PREFIX}${DATAFED_NVM_VERSION}" ]; then
- # By setting NVM_DIR beforehand when the scirpt is run it
- # will use it to set the install path
- export NVM_DIR="${DATAFED_DEPENDENCIES_INSTALL_PATH}/nvm"
- mkdir -p "${NVM_DIR}"
- # --fail makes curl return a non-zero exit code for HTTP errors like 404 or 500.
- curl --fail -o- "https://raw.githubusercontent.com/nvm-sh/nvm/${DATAFED_NVM_VERSION}/install.sh" | bash
- # Mark nvm as installed
- touch "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${NVM_FLAG_PREFIX}${DATAFED_NVM_VERSION}"
- else
- export NVM_DIR="${DATAFED_DEPENDENCIES_INSTALL_PATH}/nvm"
- fi
-}
-
-install_ws_node_packages() {
-
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/.nvm_installed-${DATAFED_NVM_VERSION}" ]; then
- echo "You must first install nvm before installing ws node packages."
- exit 1
- fi
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/.node_installed-${DATAFED_NODE_VERSION}" ]; then
- echo "You must first install node before installing ws node packages"
- exit 1
- fi
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/.cmake_installed-${DATAFED_CMAKE_VERSION}" ]; then
- echo "You must first install cmake before installing ws node packages"
- exit 1
- fi
-
- # Configure the package.json.in file -> package.json
- cmake -P "${PROJECT_ROOT}/cmake/Web.cmake"
- export NVM_DIR="${DATAFED_DEPENDENCIES_INSTALL_PATH}/nvm"
- export NODE_VERSION="$DATAFED_NODE_VERSION"
- "$NVM_DIR/nvm-exec" npm --prefix "${PROJECT_ROOT}/web" install "${PROJECT_ROOT}/web"
-}
-
-install_node() {
- local NODE_FLAG_PREFIX=".node_installed-"
- clean_install_flags "$NODE_FLAG_PREFIX"
- # By default this will place NVM in $HOME/.nvm
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${NODE_FLAG_PREFIX}${DATAFED_NODE_VERSION}" ]; then
- local original_dir=$(pwd)
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/.nvm_installed-${DATAFED_NVM_VERSION}" ]; then
- echo "You must first install nvm before installing node."
- exit 1
- fi
-
- export NVM_DIR="${DATAFED_DEPENDENCIES_INSTALL_PATH}/nvm"
-
- [ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh" # This loads nvm
- nvm install "$DATAFED_NODE_VERSION"
- nvm use "$DATAFED_NODE_VERSION"
- # Mark node as installed
- touch "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${NODE_FLAG_PREFIX}${DATAFED_NODE_VERSION}"
- cd "$original_dir"
- else
- export NVM_DIR="${DATAFED_DEPENDENCIES_INSTALL_PATH}/nvm"
- # Used by nvm
- export NODE_VERSION="$DATAFED_NODE_VERSION"
- [ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh" # This loads nvm
- nvm use "$DATAFED_NODE_VERSION"
- fi
- echo "NODE VERSION USED/INSTALLED $DATAFED_NODE_VERSION"
-}
-
-install_foxx_cli() {
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/.nvm_installed-${DATAFED_NVM_VERSION}" ]; then
- echo "You must first install nvm before installing foxx_cli."
- exit 1
- fi
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/.node_installed-${DATAFED_NODE_VERSION}" ]; then
- echo "You must first install node before installing foxx_cli"
- exit 1
- fi
- local FOXX_FLAG_PREFIX=".foxx_cli_installed-"
- clean_install_flags "$FOXX_FLAG_PREFIX"
- # By default this will place NVM in $HOME/.nvm
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${FOXX_FLAG_PREFIX}" ]; then
- local original_dir=$(pwd)
- export NVM_DIR="${DATAFED_DEPENDENCIES_INSTALL_PATH}/nvm"
- [ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh" # This loads nvm
- export NODE_VERSION="$DATAFED_NODE_VERSION"
- "$NVM_DIR/nvm-exec" npm install --global foxx-cli --prefix "${DATAFED_DEPENDENCIES_INSTALL_PATH}/npm"
- # Mark foxx_cli as installed
- touch "${DATAFED_DEPENDENCIES_INSTALL_PATH}/${FOXX_FLAG_PREFIX}"
- cd "$original_dir"
- else
- export NVM_DIR="${DATAFED_DEPENDENCIES_INSTALL_PATH}/nvm"
- [ -s "$NVM_DIR/nvm.sh" ] && . "$NVM_DIR/nvm.sh" # This loads nvm
- export NODE_VERSION="$DATAFED_NODE_VERSION"
-
- # check that foxx can be found
- if [ ! -d "${DATAFED_DEPENDENCIES_INSTALL_PATH}/npm" ]; then
- echo "Something went wrong Foxx is supposed to be installed i.e. "
- echo "(${DATAFED_DEPENDENCIES_INSTALL_PATH}/.foxx_cli_installed) "
- echo "exists. But there is no npm folder in: ${DATAFED_DEPENDENCIES_INSTALL_PATH}"
- exit 1
- fi
- if [ ! -e "${DATAFED_DEPENDENCIES_INSTALL_PATH}/npm/bin/foxx" ]; then
- echo "Something went wrong Foxx is supposed to be installed i.e. "
- echo "(${DATAFED_DEPENDENCIES_INSTALL_PATH}/.foxx_cli_installed) "
- echo "exists. But there is no foxx binary here: ${DATAFED_DEPENDENCIES_INSTALL_PATH}/npm/bin/foxx"
- exit 1
- fi
- fi
-}
-
-install_arangodb() {
- curl -OL https://download.arangodb.com/arangodb312/DEBIAN/Release.key
- "$SUDO_CMD" apt-key add - >"$apt_file_path"
- echo -n "${pip_packages[@]} " >>"$pip_file_path"
- echo -n "${externals[@]} " >>"$ext_file_path"
- local_UNIFY=true
- ;;
- *)
- echo "Invalid Argument"
- ;;
- esac
-fi
-
-if [[ $local_UNIFY = false ]]; then
- sudo_command
- "$SUDO_CMD" apt-get update
- "$SUDO_CMD" dpkg --configure -a
- "$SUDO_CMD" apt-get install -y "${packages[@]}"
-
- for ext in "${externals[@]}"; do
- install_dep_by_name "$ext"
- done
-
- init_python
- source "${DATAFED_PYTHON_ENV}/bin/activate"
- "python${DATAFED_PYTHON_VERSION}" -m pip install "${pip_packages[@]}"
-fi
diff --git a/scripts/install_client_dependencies.sh b/scripts/install_client_dependencies.sh
deleted file mode 100755
index aa3fdc033..000000000
--- a/scripts/install_client_dependencies.sh
+++ /dev/null
@@ -1,32 +0,0 @@
-#!/bin/bash
-
-# Exit on error
-set -e
-
-SCRIPT=$(realpath "$0")
-SOURCE=$(dirname "$SCRIPT")
-PROJECT_ROOT=$(realpath "${SOURCE}/..")
-
-source "${PROJECT_ROOT}/scripts/utils.sh"
-source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh"
-
-packages=("pkg-config")
-pip_packages=("setuptools")
-externals=("cmake" "libopenssl" "python" "protobuf")
-
-sudo_command
-# This script will install all of the dependencies needed by DataFed 1.0
-"$SUDO_CMD" apt-get update
-"$SUDO_CMD" dpkg --configure -a
-"$SUDO_CMD" apt-get install -y "${packages[@]}"
-
-for ext in "${externals[@]}"; do
- install_dep_by_name "$ext"
-done
-
-init_python
-source "${DATAFED_PYTHON_ENV}/bin/activate"
-"python${DATAFED_PYTHON_VERSION}" -m pip install "${pip_packages[@]}"
-"python${DATAFED_PYTHON_VERSION}" -m pip install -r "${PROJECT_ROOT}/python/datafed_pkg/requirements.txt"
-
-cd ~
diff --git a/scripts/install_core_dependencies.sh b/scripts/install_core_dependencies.sh
deleted file mode 100755
index 039123b02..000000000
--- a/scripts/install_core_dependencies.sh
+++ /dev/null
@@ -1,54 +0,0 @@
-#!/bin/bash
-
-# Exit on error
-set -e
-
-SCRIPT=$(realpath "$0")
-SOURCE=$(dirname "$SCRIPT")
-PROJECT_ROOT=$(realpath ${SOURCE}/..)
-
-source "${PROJECT_ROOT}/scripts/utils.sh"
-source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh"
-
-packages=("libtool" "build-essential" "g++" "gcc" "make" "libboost-all-dev" "libboost-program-options-dev" "pkg-config" "autoconf" "automake" "unzip" "wget" "rapidjson-dev" "libkrb5-dev" "git")
-pip_packages=("setuptools")
-# NOTE the order matters here
-externals=("cmake" "libopenssl" "python" "nlohmann_json" "json_schema_validator" "protobuf"
- "libsodium" "libzmq" "zlib" "libcurl")
-
-local_UNIFY=false
-
-if [ $# -eq 1 ]; then
- case "$1" in
- -h | --help)
- # If -h or --help is provided, print help
- echo "Usage: $0 [-h|--help] [unify]"
- ;;
- unify)
- # If 'unify' is provided, print the packages
- # The extra space is necessary to not conflict with the other install scripts
- echo -n "${packages[@]} " >>"$apt_file_path"
- echo -n "${externals[@]} " >>"$ext_file_path"
- echo -n "${pip_packages[@]} " >>"$pip_file_path"
- local_UNIFY=true
- ;;
- *)
- echo "Invalid Argument"
- ;;
- esac
-fi
-
-if [[ $local_UNIFY = false ]]; then
- sudo_command
- "$SUDO_CMD" apt-get update
- "$SUDO_CMD" dpkg --configure -a
- "$SUDO_CMD" apt-get install -y "${packages[@]}"
-
- for ext in "${externals[@]}"; do
- install_dep_by_name "$ext"
- done
-
- init_python
- source "${DATAFED_PYTHON_ENV}/bin/activate"
- "python${DATAFED_PYTHON_VERSION}" -m pip install "${pip_packages[@]}"
-fi
diff --git a/scripts/install_dependencies.sh b/scripts/install_dependencies.sh
deleted file mode 100755
index 7c2fbf511..000000000
--- a/scripts/install_dependencies.sh
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/bin/bash
-
-# Exit on error
-set -e
-
-SCRIPT=$(realpath "$0")
-SOURCE=$(dirname "$SCRIPT")
-PROJECT_ROOT=$(realpath ${SOURCE}/..)
-
-source "${PROJECT_ROOT}/scripts/utils.sh"
-source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh"
-source "${SOURCE}/dependency_versions.sh"
-
-Help() {
- echo $(basename "$0")" Will install all datafed dependencies"
- echo
- echo "Syntax: "$(basename "$0")" [-h|a|w|c|r]"
- echo "options:"
- echo "-h, --help Print this help message"
- echo "-a, --disable-arango-deps-install Don't install arango"
- echo "-w, --disable-web-deps-install Don't install web deps"
- echo "-c, --disable-core-deps-install Don't install core deps"
- echo "-r, --disable-repo-deps-install Don't install repo deps"
- echo "-z, --disable-authz-deps-install Don't install authz deps"
-}
-
-local_INSTALL_ARANGO="TRUE"
-local_INSTALL_WEB="TRUE"
-local_INSTALL_CORE="TRUE"
-local_INSTALL_REPO="TRUE"
-local_INSTALL_AUTHZ="TRUE"
-
-VALID_ARGS=$(getopt -o hawcrz --long 'help',disable-arango-deps-install,disable-web-deps-install,disable-core-deps-install,disable-repo-debs-install,disable-authz-deps-install -- "$@")
-if [[ $? -ne 0 ]]; then
- exit 1
-fi
-eval set -- "$VALID_ARGS"
-while [ : ]; do
- case "$1" in
- -h | --help)
- Help
- exit 0
- ;;
- -a | --disable-arango-deps-install)
- local_INSTALL_ARANGO="FALSE"
- shift
- ;;
- -w | --disable-web-deps-install)
- local_INSTALL_WEB="FALSE"
- shift
- ;;
- -c | --disable-core-deps-install)
- local_INSTALL_CORE="FALSE"
- shift
- ;;
- -r | --disable-repo-deps-install)
- local_INSTALL_REPO="FALSE"
- shift
- ;;
- -z | --disable-authz-deps-install)
- local_INSTALL_AUTHZ="FALSE"
- shift
- ;;
- --)
- shift
- break
- ;;
- \?) # incorrect option
- echo "Error: Invalid option"
- exit
- ;;
- esac
-done
-
-sudo_command
-
-touch "$apt_file_path"
-touch "$ext_file_path"
-touch "$pip_file_path"
-
-# Defines SUDO_CMD which is empty if root
-# sudo path if exists
-# throws error otherwise
-
-"$SUDO_CMD" apt-get update
-"$SUDO_CMD" apt install -y wget git curl
-
-# This script will install all of the dependencies needed by DataFed 1.0
-"$SUDO_CMD" dpkg --configure -a
-
-if [ "$local_INSTALL_CORE" == "TRUE" ]; then
- "$SUDO_CMD" "$SOURCE/install_core_dependencies.sh" unify
-fi
-if [ "$local_INSTALL_REPO" == "TRUE" ]; then
- "$SUDO_CMD" "$SOURCE/install_repo_dependencies.sh" unify
-fi
-if [ "$local_INSTALL_WEB" == "TRUE" ]; then
- "$SUDO_CMD" "$SOURCE/install_ws_dependencies.sh" unify
-fi
-if [ "$local_INSTALL_AUTHZ" == "TRUE" ]; then
- "$SUDO_CMD" "$SOURCE/install_authz_dependencies.sh" unify
-fi
-"$SUDO_CMD" "$SOURCE/install_docs_dependencies.sh" unify
-
-all_packages=$(cat "$apt_file_path")
-IFS=' ' read -r -a all_packages_array <<<"$all_packages"
-deduplicated_packages_array=($(printf "%s\n" "${all_packages_array[@]}" | sort -u))
-echo "DEPENDENCIES (${deduplicated_packages_array[@]})"
-"$SUDO_CMD" apt-get install -y "${deduplicated_packages_array[@]}"
-
-all_pip_packages=$(cat "$pip_file_path")
-IFS=' ' read -ra all_pip_packages_array <<<"$all_pip_packages"
-if [ ${#all_pip_packages_array[@]} -gt 0 ]; then
- echo "DEPENDENCIES (${all_pip_packages_array[@]})"
- init_python
- source "${DATAFED_PYTHON_ENV}/bin/activate"
- "python${DATAFED_PYTHON_VERSION}" -m pip install "${all_pip_packages_array[@]}"
-fi
-
-all_externals=$(cat "$ext_file_path")
-IFS=' ' read -r -a all_externals_array <<<"$all_externals"
-# Deduplication must preserve order
-deduplicated_externals_array=($(echo "${all_externals_array[@]}" | awk '{ for (i=1;i<=NF;i++) if (!seen[$i]++) printf("%s ", $i) }'))
-echo "DEPENDENCIES (${deduplicated_externals_array[@]})"
-for ext in "${deduplicated_externals_array[@]}"; do
- echo "===== INSTALLING $ext ======"
- install_dep_by_name "$ext"
-done
-
-rm "$apt_file_path"
-rm "$ext_file_path"
-rm "$pip_file_path"
-
-if [ "$local_INSTALL_ARANGO" == "TRUE" ]; then
- install_arangodb
-fi
diff --git a/scripts/install_docker_dependencies.sh b/scripts/install_docker_dependencies.sh
deleted file mode 100755
index c8713d7d2..000000000
--- a/scripts/install_docker_dependencies.sh
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/bin/bash
-
-# Exit on error
-set -e
-
-# This script will install all of the dependencies needed by DataFed 1.0
-sudo apt-get update
-sudo dpkg --configure -a
-
-sudo apt-get install \
- ca-certificates \
- curl \
- gnupg \
- lsb-release
-
-sudo mkdir -p /etc/apt/keyrings
-curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg
-
-echo \
- "deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.gpg] https://download.docker.com/linux/ubuntu \
- $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list >/dev/null
-
-sudo apt-get update
-sudo apt-get install docker-ce docker-ce-cli containerd.io docker-compose-plugin
diff --git a/scripts/install_docs_dependencies.sh b/scripts/install_docs_dependencies.sh
deleted file mode 100755
index ad27d83ce..000000000
--- a/scripts/install_docs_dependencies.sh
+++ /dev/null
@@ -1,53 +0,0 @@
-#!/bin/bash
-
-# Exit on error
-set -e
-
-SCRIPT=$(realpath "$0")
-SOURCE=$(dirname "$SCRIPT")
-PROJECT_ROOT=$(realpath ${SOURCE}/..)
-
-source "${PROJECT_ROOT}/scripts/utils.sh"
-source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh"
-
-packages=("g++" "gcc" "make" "pkg-config")
-pip_packages=("setuptools" "sphinx" "sphinx-rtd-theme" "sphinx-autoapi")
-externals=("cmake" "libopenssl" "python" "protobuf")
-
-local_UNIFY=false
-
-if [ $# -eq 1 ]; then
- case "$1" in
- -h | --help)
- # If -h or --help is provided, print help
- echo "Usage: $0 [-h|--help] [unify]"
- ;;
- unify)
- # If 'unify' is provided, print the packages
- # The extra space is necessary to not conflict with the other install scripts
- echo -n "${packages[@]} " >>"$apt_file_path"
- echo -n "${pip_packages[@]} " >>"$pip_file_path"
- echo -n "${externals[@]} " >>"$ext_file_path"
- local_UNIFY=true
- ;;
- *)
- echo "Invalid Argument"
- ;;
- esac
-fi
-
-if [[ $local_UNIFY = false ]]; then
- sudo_command
- "$SUDO_CMD" apt-get update
- "$SUDO_CMD" dpkg --configure -a
- "$SUDO_CMD" apt-get install -y "${packages[@]}"
-
- for ext in "${externals[@]}"; do
- install_dep_by_name "$ext"
- done
-
- init_python
- source "${DATAFED_PYTHON_ENV}/bin/activate"
- "python${DATAFED_PYTHON_VERSION}" -m pip install --upgrade pip
- "python${DATAFED_PYTHON_VERSION}" -m pip install "${pip_packages[@]}"
-fi
diff --git a/scripts/install_end_to_end_test_dependencies.sh b/scripts/install_end_to_end_test_dependencies.sh
deleted file mode 100755
index ff4d8f661..000000000
--- a/scripts/install_end_to_end_test_dependencies.sh
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/bin/bash
-
-# Exit on error
-set -e
-
-SCRIPT=$(realpath "$0")
-SOURCE=$(dirname "$SCRIPT")
-PROJECT_ROOT=$(realpath ${SOURCE}/..)
-
-source "${PROJECT_ROOT}/scripts/utils.sh"
-source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh"
-
-packages=("libtool" "build-essential" "g++" "gcc" "make" "libboost-all-dev" "pkg-config" "autoconf" "automake" "unzip" "libcurl4-openssl-dev" "wget"
- "rapidjson-dev" "libkrb5-dev" "git" "libssl-dev")
-
-pip_packages=("setuptools")
-# NOTE the order matters here
-externals=("cmake" "libopenssl" "python" "protobuf" "nvm" "node" "foxx")
-
-local_UNIFY=false
-
-if [ $# -eq 1 ]; then
- case "$1" in
- -h | --help)
- # If -h or --help is provided, print help
- echo "Usage: $0 [-h|--help] [unify]"
- ;;
- unify)
- # If 'unify' is provided, print the packages
- # The extra space is necessary to not conflict with the other install scripts
- echo -n "${packages[@]} " >>"$apt_file_path"
- echo -n "${externals[@]} " >>"$ext_file_path"
- echo -n "${pip_packages[@]} " >>"$pip_file_path"
- local_UNIFY=true
- ;;
- *)
- echo "Invalid Argument"
- ;;
- esac
-fi
-
-if [[ $local_UNIFY = false ]]; then
- sudo_command
- "$SUDO_CMD" apt-get update
- "$SUDO_CMD" dpkg --configure -a
- "$SUDO_CMD" apt-get install -y "${packages[@]}"
-
- for ext in "${externals[@]}"; do
- install_dep_by_name "$ext"
- done
-
- init_python
- source "${DATAFED_PYTHON_ENV}/bin/activate"
- "python${DATAFED_PYTHON_VERSION}" -m pip install --upgrade pip
- "python${DATAFED_PYTHON_VERSION}" -m pip install "${pip_packages[@]}"
-fi
diff --git a/scripts/install_foxx.sh b/scripts/install_foxx.sh
index edc98e6fb..f5790d1de 100755
--- a/scripts/install_foxx.sh
+++ b/scripts/install_foxx.sh
@@ -15,10 +15,10 @@ set -ef -o pipefail
SCRIPT=$(realpath "$0")
SOURCE=$(dirname "$SCRIPT")
-PROJECT_ROOT=$(realpath ${SOURCE}/..)
-source "${PROJECT_ROOT}/config/datafed.sh"
-source "${SOURCE}/dependency_versions.sh"
-source "${SOURCE}/dependency_install_functions.sh"
+DATAFED_PROJECT_ROOT=$(realpath ${SOURCE}/..)
+source "${DATAFED_PROJECT_ROOT}/config/datafed.sh"
+source "${DATAFED_PROJECT_ROOT}/external/DataFedDependencies/scripts/dependency_versions.sh"
+source "${DATAFED_PROJECT_ROOT}/external/DataFedDependencies/scripts/dependency_install_functions.sh"
Help() {
echo "$(basename $0) Will set up a configuration file for the core server"
@@ -65,7 +65,7 @@ else
fi
if [ -z "${FOXX_MAJOR_API_VERSION}" ]; then
- local_FOXX_MAJOR_API_VERSION=$(cat ${PROJECT_ROOT}/cmake/Version.cmake | grep -o -P "(?<=FOXX_API_MAJOR).*(?=\))" | xargs)
+ local_FOXX_MAJOR_API_VERSION=$(cat ${DATAFED_PROJECT_ROOT}/cmake/Version.cmake | grep -o -P "(?<=FOXX_API_MAJOR).*(?=\))" | xargs)
else
local_FOXX_MAJOR_API_VERSION=$(printenv FOXX_MAJOR_API_VERSION)
fi
@@ -177,7 +177,7 @@ else
"${local_ARANGOSH_SERVER_ENDPOINT_SCHEME}://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \
--server.password "${local_DATAFED_DATABASE_PASSWORD}" \
--server.username "${local_DATABASE_USER}" \
- --javascript.execute "${PROJECT_ROOT}/core/database/foxx/db_create.js"
+ --javascript.execute "${DATAFED_PROJECT_ROOT}/core/database/foxx/db_create.js"
# Give time for the database to be created
sleep 2
arangosh --server.endpoint "${local_ARANGOSH_SERVER_ENDPOINT_SCHEME}://${local_DATAFED_DATABASE_HOST}:${local_DATABASE_PORT}" \
@@ -258,7 +258,7 @@ echo "$local_DATAFED_DATABASE_PASSWORD" >"${PATH_TO_PASSWD_FILE}"
-p "${PATH_TO_PASSWD_FILE}" \
--database "${local_DATABASE_NAME}" \
"/api/${local_FOXX_MAJOR_API_VERSION}" \
- "${PROJECT_ROOT}/core/database/foxx/"
+ "${DATAFED_PROJECT_ROOT}/core/database/foxx/"
else
echo "DataFed Foxx Services have already been uploaded, replacing to ensure consisency"
# WARNING Foxx and arangosh arguments differ --server is used for Foxx not --server.endpoint
@@ -267,8 +267,8 @@ echo "$local_DATAFED_DATABASE_PASSWORD" >"${PATH_TO_PASSWD_FILE}"
-u "${local_DATABASE_USER}" \
-p "${PATH_TO_PASSWD_FILE}" \
--database "${local_DATABASE_NAME}" \
- "/api/${local_FOXX_MAJOR_API_VERSION}" "${PROJECT_ROOT}/core/database/foxx/"
- echo "foxx replace -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${PROJECT_ROOT}/core/database/foxx"
+ "/api/${local_FOXX_MAJOR_API_VERSION}" "${DATAFED_PROJECT_ROOT}/core/database/foxx/"
+ echo "foxx replace -u ${local_DATABASE_USER} -p ${PATH_TO_PASSWD_FILE} --database ${local_DATABASE_NAME} /api/${local_FOXX_MAJOR_API_VERSION} ${DATAFED_PROJECT_ROOT}/core/database/foxx"
fi
rm "${PATH_TO_PASSWD_FILE}"
} || { # catch
diff --git a/scripts/install_gcs.sh b/scripts/install_gcs.sh
deleted file mode 100755
index dea94d8c7..000000000
--- a/scripts/install_gcs.sh
+++ /dev/null
@@ -1,10 +0,0 @@
-#!/bin/bash
-
-set -euf -o pipefail
-
-SCRIPT=$(realpath "$0")
-SOURCE=$(dirname "$SCRIPT")
-PROJECT_ROOT=$(realpath "${SOURCE}/..")
-source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh"
-
-install_gcs
diff --git a/scripts/install_lego_and_certificates.sh b/scripts/install_lego_and_certificates.sh
index ed9b28fb3..44ed61c8d 100755
--- a/scripts/install_lego_and_certificates.sh
+++ b/scripts/install_lego_and_certificates.sh
@@ -6,7 +6,7 @@ SCRIPT=$(realpath "$0")
SOURCE=$(dirname "$SCRIPT")
PROJECT_ROOT=$(realpath "${SOURCE}/..")
source "${PROJECT_ROOT}/config/datafed.sh"
-source "${PROJECT_ROOT}/scripts/utils.sh"
+source "${PROJECT_ROOT}/external/DataFedDependencies/scripts/utils.sh"
Help() {
echo "$(basename $0) Will install lego and use Let's Encrypt to create certificates."
diff --git a/scripts/install_python_client_dependencies.sh b/scripts/install_python_client_dependencies.sh
deleted file mode 100755
index 0d2c2cd3c..000000000
--- a/scripts/install_python_client_dependencies.sh
+++ /dev/null
@@ -1,35 +0,0 @@
-#!/bin/bash
-
-# Exit on error
-set -e
-
-SCRIPT=$(realpath "$0")
-SOURCE=$(dirname "$SCRIPT")
-PROJECT_ROOT=$(realpath "${SOURCE}/..")
-
-source "${PROJECT_ROOT}/scripts/utils.sh"
-source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh"
-
-sudo_command
-
-# This script will install all of the dependencies needed by DataFed 1.0
-"$SUDO_CMD" apt-get update
-"$SUDO_CMD" dpkg --configure -a
-"$SUDO_CMD" apt-get install -y libtool build-essential g++ gcc make libboost-all-dev \
- pkg-config autoconf automake unzip libcurl4-openssl-dev wget \
- rapidjson-dev libkrb5-dev git libssl-dev
-
-cd ~
-install_python
-install_cmake
-cd ~
-
-# Install cmake 3.17
-
-init_python
-source "${DATAFED_PYTHON_ENV}/bin/activate"
-"python${DATAFED_PYTHON_VERSION}" -m pip install --upgrade pip
-"python${DATAFED_PYTHON_VERSION}" -m pip install setuptools
-
-install_protobuf
-cd ~
diff --git a/scripts/install_repo_dependencies.sh b/scripts/install_repo_dependencies.sh
deleted file mode 100755
index e14acff4b..000000000
--- a/scripts/install_repo_dependencies.sh
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/bin/bash
-
-# Exit on error
-set -e
-
-SCRIPT=$(realpath "$0")
-SOURCE=$(dirname "$SCRIPT")
-PROJECT_ROOT=$(realpath ${SOURCE}/..)
-
-source "${PROJECT_ROOT}/scripts/utils.sh"
-source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh"
-
-packages=("libtool" "wget" "build-essential" "g++" "gcc" "libboost-all-dev" "pkg-config" "autoconf" "automake" "make" "unzip" "git")
-pip_packages=("setuptools")
-externals=("cmake" "libopenssl" "python" "protobuf" "libsodium" "libzmq")
-
-local_UNIFY=false
-
-if [ $# -eq 1 ]; then
- case "$1" in
- -h | --help)
- # If -h or --help is provided, print help
- echo "Usage: $0 [-h|--help] [unify]"
- ;;
- unify)
- # If 'unify' is provided, print the packages
- # The extra space is necessary to not conflict with the other install scripts
- echo -n "${packages[@]} " >>"$apt_file_path"
- echo -n "${externals[@]} " >>"$ext_file_path"
- echo -n "${pip_packages[@]} " >>"$pip_file_path"
- local_UNIFY=true
- ;;
- *)
- # If any other argument is provided, install the packages
- echo "Invalid Argument"
- ;;
- esac
-fi
-
-sudo_command
-
-if [[ $local_UNIFY = false ]]; then
- "$SUDO_CMD" apt-get update
- "$SUDO_CMD" dpkg --configure -a
- "$SUDO_CMD" apt-get install -y "${packages[@]}"
-
- for ext in "${externals[@]}"; do
- install_dep_by_name "$ext"
- done
-
- init_python
- source "${DATAFED_PYTHON_ENV}/bin/activate"
- "python${DATAFED_PYTHON_VERSION}" -m pip install --upgrade pip
- "python${DATAFED_PYTHON_VERSION}" -m pip install "${pip_packages[@]}"
-fi
diff --git a/scripts/install_ws.sh b/scripts/install_ws.sh
index 247e26bb2..f5a7750fc 100755
--- a/scripts/install_ws.sh
+++ b/scripts/install_ws.sh
@@ -6,8 +6,8 @@ SCRIPT=$(realpath "$0")
SOURCE=$(dirname "$SCRIPT")
PROJECT_ROOT=$(realpath "${SOURCE}/..")
source "${PROJECT_ROOT}/config/datafed.sh"
-source "${SOURCE}/dependency_versions.sh"
-source "${SOURCE}/dependency_install_functions.sh"
+source "${PROJECT_ROOT}/external/DataFedDependencies/scripts/dependency_versions.sh"
+source "${PROJECT_ROOT}/external/DataFedDependencies/scripts/dependency_install_functions.sh"
#NVM_DIR=/home/cades/.nvm
#[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"
diff --git a/scripts/install_ws_dependencies.sh b/scripts/install_ws_dependencies.sh
deleted file mode 100755
index 3c0d2ae77..000000000
--- a/scripts/install_ws_dependencies.sh
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/bin/bash
-
-# Exit on error
-set -e
-
-SCRIPT=$(realpath "$0")
-SOURCE=$(dirname "$SCRIPT")
-PROJECT_ROOT=$(realpath "${SOURCE}/..")
-
-source "${PROJECT_ROOT}/scripts/utils.sh"
-source "${SOURCE}/dependency_versions.sh"
-source "${PROJECT_ROOT}/scripts/dependency_install_functions.sh"
-
-packages=("curl" "g++" "make" "wget")
-externals=("cmake" "libopenssl" "python" "nvm" "node" "ws_node_packages")
-
-Help() {
- echo "$(basename $0) install web dependencies."
- echo
- echo "Syntax: $(basename $0) [-h|n]"
- echo "options:"
- echo "-h, --help Print this help message"
- echo "-n, --node_install_dir Install directory, defaults to"
- echo " whatever is defined in the datafed.sh file"
- echo " DATAFED_DEPENDENCIES_INSTALL_PATH"
- echo " ${DATAFED_DEPENDENCIES_INSTALL_PATH}"
- echo "-u, --unify Unifies install scripts to be used in docker builds"
-}
-
-# Equivalent to the .nvm directory
-local_NODE_INSTALL="$DATAFED_DEPENDENCIES_INSTALL_PATH"
-local_UNIFY=false
-
-VALID_ARGS=$(getopt -o hn: --long 'help',node_install_dir: -- "$@")
-if [[ $? -ne 0 ]]; then
- exit 1
-fi
-eval set -- "$VALID_ARGS"
-while [ : ]; do
- case "$1" in
- -h | --help)
- Help
- exit 0
- ;;
- -n | --node_install_dir)
- local_NODE_INSTALL=$2
- shift 2
- ;;
- unify)
- # The extra space is necessary to not conflict with the other install scripts
- echo -n "${packages[@]} " >>"$apt_file_path"
- echo -n "${externals[@]} " >>"$ext_file_path"
- local_UNIFY=true
- shift
- ;;
- --)
- shift
- break
- ;;
- \?) # incorrect option
- echo "Error: Invalid option"
- exit
- ;;
- esac
-done
-
-sudo_command
-
-if [[ $local_UNIFY = false ]]; then
- "$SUDO_CMD" apt-get update
- "$SUDO_CMD" dpkg --configure -a
- "$SUDO_CMD" apt-get install -y "${packages[@]}"
-
- for ext in "${externals[@]}"; do
- install_dep_by_name "$ext"
- done
-fi
diff --git a/scripts/utils.sh b/scripts/utils.sh
deleted file mode 100755
index 61d273c24..000000000
--- a/scripts/utils.sh
+++ /dev/null
@@ -1,126 +0,0 @@
-#!/bin/bash
-SCRIPT=$(realpath "$0")
-SOURCE=$(dirname "$SCRIPT")
-
-if [ -z "${PROJECT_ROOT}" ]; then
- PROJECT_ROOT=$(realpath "${SOURCE}/..")
-fi
-
-echo "PROJECT ROOT $PROJECT_ROOT"
-
-export_dependency_version_numbers() {
- # Get the content of the function and remove comments
- variables=$(cat "${PROJECT_ROOT}/scripts/dependency_versions.sh")
-
- local content="$(echo "${variables}" | sed '/^$/d;/^#/d')"
-
- # Extract variable assignments from the content
- local assignments=$(echo "$content" | grep -Eo '\b[a-zA-Z_][a-zA-Z_0-9]*="[^\"]*"')
-
- echo "Variables are $variables"
- echo "Content is $content"
- echo "Assignments is $assignments"
- # Loop through each assignment, export the variable
- # Note: This may override existing variables
- for assignment in $assignments; do
- echo "export $assignment"
- export "$assignment"
- done
-}
-
-empty_command() {
- "$@"
-}
-
-# The purpose of this function is to detect the sudo command
-# if it exists use it, if we are running as root set SUDO_CMD to empty_command
-# empty_command is needed so that I can do this where sudo doesn't exist
-#
-# "$SUDO_CMD" apt install curl
-#
-# If running as root this will expand to
-#
-# empty_command apt install curl
-#
-# which expands to
-#
-# apt install curl
-#
-# If I left SUDO_CMD blank i.e. "" apt install curl bash would complain
-sudo_command() {
- if [ "$(id -u)" -eq 0 ]; then
- export SUDO_CMD="empty_command" # Ignore sudo running as root
- else
- # Check if sudo is available
- if command -v sudo &>/dev/null; then
- export SUDO_CMD=$(command -v sudo)
- return 0
- else
- echo "Error: This script requires sudo but sudo is not installed." >&2
- echo "You are not running as root!" >&2
- exit 1
- fi
- exit $? # Exit with the same status as the sudo command
- fi
-}
-
-# Only recognized x.x.x format where all "x" are integers
-# Returns true if first version is greater or equal to second version
-#
-# semantic_version_compatible "1.2.3" "1.1.8"
-# echo $?
-# Should print 1
-#
-# semantic_version_compatible "1.2.3" "1.2.8"
-# echo $?
-# Should print 0
-#
-#semantic_version_compatible "1.1.1" "1.1.1"
-#echo "Should return true 1.1.1 >= 1.1.1"
-#
-#semantic_version_compatible "1.2.1" "1.1.1"
-#echo "Should return true 1.2.1 >= 1.1.1"
-#
-#semantic_version_compatible "1.2.1" "3.1.1"
-#echo "Should return false 1.2.1 >= 3.1.1"
-#
-#semantic_version_compatible "v1.2.1" "v3.1.1"
-#echo "Should return false v1.2.1 >= v3.1.1"
-#
-#semantic_version_compatible "v1.2.1" "1.1.1"
-#echo "Should return true v1.2.1 >= 1.1.1"
-
-semantic_version_compatible() {
- local VER1="$1"
- local VER2="$2"
-
- # Remove any preceding v from version i.e. v1.1.2
- VER1=$(echo "$VER1" | sed 's/v//g')
- VER2=$(echo "$VER2" | sed 's/v//g')
-
- maj_1=$(echo "$VER1" | sed 's/\./ /g' | awk '{print $1}')
- min_1=$(echo "$VER1" | sed 's/\./ /g' | awk '{print $2}')
- patch_1=$(echo "$VER1" | sed 's/\./ /g' | awk '{print $3}')
- maj_2=$(echo "$VER2" | sed 's/\./ /g' | awk '{print $1}')
- min_2=$(echo "$VER2" | sed 's/\./ /g' | awk '{print $2}')
- patch_2=$(echo "$VER2" | sed 's/\./ /g' | awk '{print $3}')
-
- if [ "$maj_1" -gt "$maj_2" ]; then
- return 1
- elif [ "$maj_1" -lt "$maj_2" ]; then
- return 0
- fi
-
- if [ "$min_1" -gt "$min_2" ]; then
- return 1
- elif [ "$min_1" -lt "$min_2" ]; then
- return 0
- fi
-
- if [ "$patch_1" -gt "$patch_2" ]; then
- return 1
- elif [ "$patch_1" -lt "$patch_2" ]; then
- return 0
- fi
- return 1
-}
diff --git a/tests/end-to-end/setup.sh b/tests/end-to-end/setup.sh
index 1ed7d0bdc..c117ec984 100755
--- a/tests/end-to-end/setup.sh
+++ b/tests/end-to-end/setup.sh
@@ -92,13 +92,13 @@ if [ -z "${DATAFED_USER99_GLOBUS_UUID}" ]; then
exit 1
fi
-SCRIPT=$(realpath "$0")
+SCRIPT=$(realpath "${BASH_SOURCE[0]}")
SOURCE=$(dirname "$SCRIPT")
-PROJECT_ROOT=$(realpath ${SOURCE}/../../)
-source ${PROJECT_ROOT}/config/datafed.sh
+DATAFED_PROJECT_ROOT=$(realpath ${SOURCE}/../../)
+source ${DATAFED_PROJECT_ROOT}/config/datafed.sh
if [ -z "${FOXX_MAJOR_API_VERSION}" ]; then
- local_FOXX_MAJOR_API_VERSION=$(cat ${PROJECT_ROOT}/cmake/Version.cmake | grep -o -P "(?<=FOXX_API_MAJOR).*(?=\))" | xargs)
+ local_FOXX_MAJOR_API_VERSION=$(cat ${DATAFED_PROJECT_ROOT}/cmake/Version.cmake | grep -o -P "(?<=FOXX_API_MAJOR).*(?=\))" | xargs)
else
local_FOXX_MAJOR_API_VERSION=$(printenv FOXX_MAJOR_API_VERSION)
fi
@@ -119,11 +119,11 @@ fi
# First step is to clear the database
echo "Clearing old database"
-${PROJECT_ROOT}/scripts/clear_db.sh
+${DATAFED_PROJECT_ROOT}/scripts/clear_db.sh
# Second install foxx
echo "Installing foxx services and API"
-${PROJECT_ROOT}/scripts/install_foxx.sh
+${DATAFED_PROJECT_ROOT}/scripts/install_foxx.sh
echo "Completed"
if [ -z "${DATAFED_DATABASE_HOST}" ]; then
diff --git a/web/docker/Dockerfile b/web/docker/Dockerfile
index 6af086e1e..9fa6cfa3a 100644
--- a/web/docker/Dockerfile
+++ b/web/docker/Dockerfile
@@ -1,21 +1,23 @@
# NOTE this image must be built with respect to the base of the project i.e.
# cd ${PROJECT_ROOT} or cd DataFed
# docker build -f web/docker/Dockerfile .
-ARG BUILD_BASE="debian:bookworm-slim"
-ARG DEPENDENCIES="dependencies"
-ARG RUNTIME="runtime"
-ARG DATAFED_DIR="/datafed"
-ARG DATAFED_INSTALL_PATH="/opt/datafed"
+ARG BUILD_BASE="debian:bookworm-slim"
+ARG DEPENDENCIES="dependencies"
+ARG RUNTIME="runtime"
+ARG DATAFED_DIR="/datafed"
+ARG DATAFED_INSTALL_PATH="/opt/datafed"
ARG DATAFED_DEPENDENCIES_INSTALL_PATH="/opt/datafed/dependencies"
-ARG GCS_IMAGE="code.ornl.gov:4567/dlsw/datafed/gcs-ubuntu-focal"
-ARG BUILD_DIR="$DATAFED_DIR/source"
-ARG LIB_DIR="/usr/local/lib"
+ARG GCS_IMAGE="code.ornl.gov:4567/dlsw/datafed/gcs-ubuntu-focal"
+ARG BUILD_DIR="$DATAFED_DIR/source"
+ARG LIB_DIR="/usr/local/lib"
+ARG DATAFED_DEPENDENCIES_ROOT="$BUILD_DIR/external/DataFedDependencies"
FROM ${DEPENDENCIES} AS ws-build
ARG DATAFED_DIR
ARG BUILD_DIR
ARG DATAFED_INSTALL_PATH
+ARG DATAFED_DEPENDENCIES_ROOT
ARG DATAFED_DEPENDENCIES_INSTALL_PATH
ENV DATAFED_INSTALL_PATH="${DATAFED_INSTALL_PATH}"
@@ -24,6 +26,12 @@ EXPOSE 7513
# For communication with the public
EXPOSE 443
+RUN mkdir -p ${DATAFED_DEPENDENCIES_ROOT}/scripts && \
+ mv ./scripts/utils.sh ${DATAFED_DEPENDENCIES_ROOT}/scripts/utils.sh && \
+ mv ./scripts/dependency_install_functions.sh ${DATAFED_DEPENDENCIES_ROOT}/scripts/dependency_install_functions.sh && \
+ mv ./scripts/dependency_versions.sh ${DATAFED_DEPENDENCIES_ROOT}/scripts/dependency_versions.sh && \
+ mv ./scripts/generate_dependencies_config.sh ${DATAFED_DEPENDENCIES_ROOT}/scripts/generate_dependencies_config.sh
+
COPY ./CMakeLists.txt ${BUILD_DIR}
COPY ./scripts/dependency_versions.sh ${BUILD_DIR}/scripts/
COPY ./scripts/generate_datafed.sh ${BUILD_DIR}/scripts/
@@ -34,16 +42,17 @@ COPY ./cmake ${BUILD_DIR}/cmake
COPY ./common/proto ${BUILD_DIR}/common/proto
COPY ./web ${BUILD_DIR}/web
-RUN ${BUILD_DIR}/scripts/generate_datafed.sh && \
- ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake -S. -B build \
- -DBUILD_REPO_SERVER=False \
- -DBUILD_AUTHZ=False \
- -DBUILD_CORE_SERVER=False \
- -DBUILD_WEB_SERVER=True \
- -DBUILD_DOCS=False \
- -DBUILD_PYTHON_CLIENT=False \
- -DBUILD_FOXX=False \
- -DBUILD_COMMON=False
+RUN ${DATAFED_DEPENDENCIES_ROOT}/scripts/generate_dependencies_config.sh && \
+ ${BUILD_DIR}/scripts/generate_datafed.sh && \
+ ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake -S. -B build \
+ -DBUILD_REPO_SERVER=False \
+ -DBUILD_AUTHZ=False \
+ -DBUILD_CORE_SERVER=False \
+ -DBUILD_WEB_SERVER=True \
+ -DBUILD_DOCS=False \
+ -DBUILD_PYTHON_CLIENT=False \
+ -DBUILD_FOXX=False \
+ -DBUILD_COMMON=False
RUN ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake --build build
RUN ${DATAFED_DEPENDENCIES_INSTALL_PATH}/bin/cmake --build build --target install
# Only part of the final install can be done here
@@ -57,10 +66,12 @@ ARG DATAFED_DIR
ARG DATAFED_INSTALL_PATH
ARG BUILD_DIR
ARG DATAFED_DEPENDENCIES_INSTALL_PATH
+ARG DATAFED_DEPENDENCIES_ROOT
# The above should also be available at runtime
ENV DATAFED_INSTALL_PATH="$DATAFED_INSTALL_PATH"
ENV DATAFED_DEPENDENCIES_INSTALL_PATH="$DATAFED_DEPENDENCIES_INSTALL_PATH"
+ENV DATAFED_DEPENDENCIES_ROOT="$DATAFED_DEPENDENCIES_ROOT"
ENV DATAFED_DIR="$DATAFED_DIR"
ENV BUILD_DIR="$BUILD_DIR"
ENV DATAFED_DEFAULT_LOG_PATH="$DATAFED_INSTALL_PATH/logs"
@@ -71,16 +82,17 @@ WORKDIR ${DATAFED_DIR}
USER datafed
-COPY --chown=datafed:root ./scripts/generate_datafed.sh ${BUILD_DIR}/scripts/generate_datafed.sh
-COPY --chown=datafed:root ./scripts/dependency_versions.sh ${BUILD_DIR}/scripts/dependency_versions.sh
-COPY --chown=datafed:root ./scripts/dependency_install_functions.sh ${BUILD_DIR}/scripts/dependency_install_functions.sh
-COPY --chown=datafed:root ./scripts/generate_ws_config.sh ${BUILD_DIR}/scripts/generate_ws_config.sh
-COPY --chown=datafed:root ./scripts/install_ws.sh ${BUILD_DIR}/scripts/install_ws.sh
-COPY --chown=datafed:root ./cmake/Version.cmake ${BUILD_DIR}/cmake/Version.cmake
-COPY --chown=datafed:root ./scripts/utils.sh ${BUILD_DIR}/scripts/utils.sh
-COPY --chown=datafed:root ./scripts/export_dependency_version.sh ${BUILD_DIR}/scripts/export_dependency_version.sh
+COPY --from=ws-build --chown=datafed:root ${DATAFED_DEPENDENCIES_ROOT}/scripts/ {DATAFED_DEPENDENCIES_ROOT}/scripts/
+
+COPY --chown=datafed:root ./scripts/generate_datafed.sh ${BUILD_DIR}/scripts/generate_datafed.sh
+COPY --chown=datafed:root ./scripts/export_dependency_version.sh ${BUILD_DIR}/scripts/export_dependency_version.sh
+COPY --chown=datafed:root ./scripts/generate_ws_config.sh ${BUILD_DIR}/scripts/generate_ws_config.sh
+COPY --chown=datafed:root ./scripts/install_ws.sh ${BUILD_DIR}/scripts/install_ws.sh
+COPY --chown=datafed:root ./cmake/Version.cmake ${BUILD_DIR}/cmake/Version.cmake
+
COPY --from=ws-build --chown=datafed:root ${DATAFED_DEPENDENCIES_INSTALL_PATH}/nvm ${DATAFED_DEPENDENCIES_INSTALL_PATH}/nvm
COPY --from=ws-build --chown=datafed:root ${BUILD_DIR}/web ${BUILD_DIR}/web
+COPY --from=ws-build --chown=datafed:root ${DATAFED_DEPENDENCIES_ROOT}/scripts ${DATAFED_DEPENDENCIES_ROOT}/scripts
COPY --from=ws-build --chown=datafed:root ${DATAFED_INSTALL_PATH}/web ${DATAFED_INSTALL_PATH}/web
COPY --from=ws-build --chown=datafed:root /usr/bin/curl /usr/bin/curl