diff --git a/core/database/foxx/tests/repo.test.js b/core/database/foxx/tests/repo.test.js index 017f3aaa3..fd8645c08 100644 --- a/core/database/foxx/tests/repo.test.js +++ b/core/database/foxx/tests/repo.test.js @@ -6,6 +6,27 @@ const { Repo, PathType } = require("../api/repo"); const g_db = require("@arangodb").db; const g_lib = require("../api/support"); const arangodb = require("@arangodb"); +const { RepositoryType, Result, ExecutionMethod } = require("../api/repository/types"); +const { + validateNonEmptyString, + validatePOSIXPath, + validateGlobusConfig, + validateMetadataConfig, +} = require("../api/repository/validation"); +const { createRepositoryByType, executeRepositoryOperation } = require("../api/repository/factory"); + +const { RepositoryOps } = require("../api/repository/operations"); + +const { + ValidationTestCases, + RepositoryTestData, + runParameterizedTest, +} = require("./test-fixtures"); + +const { cleanupDatabase, setupTestUsers, RepositoryBuilder } = require("./test-helpers"); + +const { createAPIAdapter } = require("./helpers/api-adapters"); +const { createTestSetup } = require("./helpers/test-setup"); describe("Testing Repo class", () => { beforeEach(() => { @@ -235,3 +256,250 @@ describe("Testing Repo class", () => { ); }); }); + +describe("Repository Tests", () => { + describe("Legacy Repo Class", () => { + beforeEach(() => { + ["d", "alloc", "loc", "repo"].forEach((coll) => { + if (g_db._collection(coll)) g_db[coll].truncate(); + }); + }); + + it("should throw an error if the repo does not exist", () => { + const repo = new Repo("invalidKey"); + expect(repo.exists()).to.be.false; + expect(repo.key()).to.equal("invalidKey"); + expect(repo.error()).to.equal(g_lib.ERR_NOT_FOUND); + }); + + it("should integrate with new repository type system", () => { + const createResult = createRepositoryByType({ + id: "legacy_compat", + type: RepositoryType.METADATA_ONLY, + title: "Legacy Compatible", + capacity: 1000000, + admins: ["u/admin"], + }); + + RepositoryOps.save(createResult.value); + + const legacyRepo = new Repo("legacy_compat"); + expect(legacyRepo.exists()).to.be.true; + expect(legacyRepo.key()).to.equal("legacy_compat"); + expect(legacyRepo.id()).to.equal("repo/legacy_compat"); + + const newRepo = legacyRepo.getRepository(); + expect(newRepo).to.not.be.null; + expect(newRepo.type).to.equal(RepositoryType.METADATA_ONLY); + }); + + describe("Path Type Detection", () => { + beforeEach(() => { + g_db.repo.save({ + _key: "path_test", + path: "/mnt/repo_root", + }); + }); + + const pathTests = [ + { path: "/mnt/repo_root", expected: PathType.REPO_ROOT_PATH }, + { + path: "/mnt/repo_root/", + expected: PathType.REPO_ROOT_PATH, + }, + { path: "/mnt", expected: PathType.REPO_BASE_PATH }, + { + path: "/mnt/", + expected: PathType.REPO_BASE_PATH, + }, + { path: "/", expected: PathType.REPO_BASE_PATH }, + { + path: "/mnt/repo_root/project/bam", + expected: PathType.PROJECT_PATH, + }, + { + path: "/mnt/repo_root/user/george", + expected: PathType.USER_PATH, + }, + { + path: "/mnt/repo_root/project/bam/4243", + expected: PathType.PROJECT_RECORD_PATH, + }, + { path: "/mnt/repo_root/user/jane/4243", expected: PathType.USER_RECORD_PATH }, + { + path: "/invalid_path", + expected: PathType.UNKNOWN, + }, + { path: "/mnt/re", expected: PathType.UNKNOWN }, + { path: "/m", expected: PathType.UNKNOWN }, + ]; + + runParameterizedTest(pathTests, (test) => { + const repo = new Repo("path_test"); + expect(repo.pathType(test.path)).to.equal(test.expected); + }); + }); + }); + + describe("Repository API Routers", () => { + describe("Legacy-Specific Endpoints", () => { + let setup, apiAdapter; + + beforeEach(() => { + setup = createTestSetup(); + apiAdapter = createAPIAdapter("legacy"); + }); + + it("lists allocations by repository", () => { + g_db.alloc.save({ + _from: setup.users.regular._id, + _to: setup.repos.globus._id, + data_limit: 1000000000, + data_size: 0, + rec_limit: 1000, + rec_count: 0, + }); + + const response = apiAdapter.listAllocationsByRepo( + setup.repos.globus._id, + setup.users.admin._key, + ); + + expect(response.status).to.equal(200); + expect(response.json).to.be.an("array"); + }); + + it("calculates repository size", () => { + const dataset = g_db.d.save({ + _key: "size_test", + size: 1000000, + }); + + g_db.loc.save({ + _from: dataset._id, + _to: setup.repos.globus._id, + uid: setup.users.admin._id, + }); + + const response = apiAdapter.calculateSize( + dataset._id, + false, + setup.users.admin._key, + ); + + expect(response.status).to.equal(200); + expect(response.json).to.be.an("array"); + }); + + it("deletes repository with dependency checks", () => { + const response = apiAdapter.deleteRepository( + setup.repos.metadata._id, + setup.users.admin._key, + ); + + expect(response.status).to.equal(204); + }); + }); + + describe("New API-Specific Features", () => { + let setup, apiAdapter; + + beforeEach(() => { + setup = createTestSetup(); + apiAdapter = createAPIAdapter("new"); + }); + + it("creates repository with comprehensive validation", () => { + const globusRepo = RepositoryBuilder.globus().withId("new_api_repo").build(); + + const response = apiAdapter.createRepository(setup.users.admin._key, globusRepo); + + expect(response.status).to.equal(200); + expect(response.json.type).to.equal(RepositoryType.GLOBUS); + }); + + it("validates repository type constraints", () => { + const invalidRepo = { + ...RepositoryBuilder.metadata().build(), + pub_key: "should-not-be-here", + }; + + const response = apiAdapter.createRepository(setup.users.admin._key, invalidRepo); + + expect(response.status).to.equal(400); + expect(response.json.errorMessage).to.include("should not have"); + }); + }); + }); + + describe("Integration Scenarios", () => { + beforeEach(() => { + cleanupDatabase(); + setupTestUsers(); + }); + + it("completes full repository lifecycle", () => { + const createResult = createRepositoryByType({ + id: "lifecycle_test", + type: RepositoryType.GLOBUS, + title: "Lifecycle Test", + capacity: 5000000000, + admins: ["u/test_admin"], + pub_key: "ssh-rsa lifecycle...", + address: "lifecycle.test.org", + endpoint: "lifecycle-ep", + path: "/data/lifecycle_test", + domain: "test.org", + }); + + expect(createResult.ok).to.be.true; + + const saveResult = RepositoryOps.save(createResult.value); + expect(saveResult.ok).to.be.true; + + const findResult = RepositoryOps.find("lifecycle_test"); + expect(findResult.ok).to.be.true; + + const legacyRepo = new Repo("lifecycle_test"); + expect(legacyRepo.exists()).to.be.true; + expect(legacyRepo.pathType("/data/lifecycle_test")).to.equal(PathType.REPO_ROOT_PATH); + + const allocResult = RepositoryOps.createAllocation(findResult.value, { + subject: "d/lifecycle_dataset", + size: 1000000000, + path: "/data/lifecycle_test/dataset1", + }); + + expect(allocResult.ok).to.be.true; + expect(allocResult.value.execution_method).to.equal(ExecutionMethod.TASK); + }); + + it("handles type migration scenarios", () => { + const createResult = createRepositoryByType({ + id: "migration_test", + type: RepositoryType.METADATA_ONLY, + title: "Migration Test", + capacity: 1000000, + admins: ["u/test_admin"], + }); + + RepositoryOps.save(createResult.value); + + g_db.repo.update("migration_test", { + type: RepositoryType.GLOBUS, + endpoint: "migrated-ep", + pub_key: "migrated-key", + address: "migrated.server", + path: "/data/migration_test", + domain: "migrated.org", + }); + + const findResult = RepositoryOps.find("migration_test"); + expect(findResult.ok).to.be.true; + expect(findResult.value.type).to.equal(RepositoryType.GLOBUS); + + const dataOpsResult = RepositoryOps.supportsDataOperations(findResult.value); + expect(dataOpsResult.value).to.be.true; + }); + }); +});