Skip to content
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
273 changes: 273 additions & 0 deletions src/common/utils/history.util.spec.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,273 @@
import { HistoryClass } from "src/datasets/schemas/history.schema";
import {
convertGenericHistoriesToObsoleteHistories,
convertGenericHistoryToObsoleteHistory,
convertObsoleteHistoryToGenericHistory,
} from "./history.util";
import {
DatasetClass,
DatasetDocument,
} from "src/datasets/schemas/dataset.schema";
import { GenericHistory } from "../schemas/generic-history.schema";

describe("History Utility Functions", () => {
it("should convert obsolete history to generic history", () => {
const obsoleteHistory: HistoryClass = {
updatedAt: new Date("2023-10-01T12:00:00Z"),
updatedBy: "user123",
isPublished: {
currentValue: true,
previousValue: false,
},
datasetlifecycle: {
currentValue: {
publishedOn: new Date("2023-10-01T12:00:00Z"),
archivable: true,
retrievable: true,
},
previousValue: {
archivable: false,
retrievable: true,
publishable: false,
archiveRetentionTime: new Date("2031-10-01T12:00:00Z"),
dateOfPublishing: new Date("2024-10-01T12:00:00Z"),
isOnCentralDisk: true,
archiveStatusMessage: "datasetOnArchiveDisk",
retrieveStatusMessage: "",
retrieveIntegrityCheck: false,
},
},
_id: "",
};
const documentId = "pid123";
const genericHistory = convertObsoleteHistoryToGenericHistory(
obsoleteHistory,
documentId,
);

expect(genericHistory).toEqual({
subsystem: "Dataset",
documentId: "pid123",
user: "user123",
operation: "update",
timestamp: new Date("2023-10-01T12:00:00Z"),
before: {
isPublished: false,
datasetlifecycle: {
publishedOn: undefined,
archivable: false,
},
},
after: {
datasetlifecycle: {
publishedOn: new Date("2023-10-01T12:00:00Z"),
archivable: true,
},
isPublished: true,
},
});
});

it("should convert generic history to obsolete history", () => {
const genericHistory: GenericHistory = {
subsystem: "Dataset",
documentId: "pid123",
user: "user123",
operation: "update",
timestamp: new Date("2023-10-01T12:00:00Z"),
before: {
isPublished: false,
datasetlifecycle: {
publishedOn: undefined,
archivable: false,
},
},
after: {
datasetlifecycle: {
publishedOn: new Date("2023-10-01T12:00:00Z"),
archivable: true,
},
isPublished: true,
},
};

const currentDataset: Partial<DatasetClass> = {
isPublished: true,
datasetlifecycle: {
publishedOn: new Date("2023-10-01T12:00:00Z"),
archivable: true,
retrievable: true,
publishable: false,
archiveRetentionTime: new Date("2031-10-01T12:00:00Z"),
dateOfPublishing: new Date("2024-10-01T12:00:00Z"),
isOnCentralDisk: true,
archiveStatusMessage: "datasetOnArchiveDisk",
retrieveStatusMessage: "",
retrieveIntegrityCheck: false,
},
};
const obsoleteHistory = convertGenericHistoryToObsoleteHistory(
genericHistory,
currentDataset,
);

expect(obsoleteHistory).toEqual({
updatedAt: new Date("2023-10-01T12:00:00Z"),
updatedBy: "user123",
isPublished: {
previousValue: false,
currentValue: true,
},
datasetlifecycle: {
previousValue: {
publishedOn: undefined,
archivable: false,
retrievable: true,
publishable: false,
archiveRetentionTime: new Date("2031-10-01T12:00:00Z").toISOString(),
dateOfPublishing: new Date("2024-10-01T12:00:00Z").toISOString(),
isOnCentralDisk: true,
archiveStatusMessage: "datasetOnArchiveDisk",
retrieveStatusMessage: "",
retrieveIntegrityCheck: false,
},
currentValue: {
publishedOn: new Date("2023-10-01T12:00:00Z").toISOString(),
archivable: true,
},
},
_id: "",
});
});

it("should convert history list to obsolete histories", () => {
const genericHistories: GenericHistory[] = [
{
subsystem: "Dataset",
documentId: "pid123",
user: "user123",
operation: "update",
timestamp: new Date("2023-10-02T12:00:00Z"),
before: {
isPublished: false,
datasetlifecycle: {
publishedOn: undefined,
archivable: false,
},
},
after: {
datasetlifecycle: {
publishedOn: new Date("2023-10-02T12:00:00Z"),
archivable: true,
},
isPublished: true,
},
},
{
subsystem: "Dataset",
documentId: "pid123",
user: "user456",
operation: "update",
timestamp: new Date("2023-10-01T12:00:00Z"),
before: {
isPublished: false,
datasetlifecycle: {
publishedOn: undefined,
archivable: false,
},
},
after: {
datasetlifecycle: {
publishedOn: new Date("2023-10-01T12:00:00Z"),
archivable: false,
},
isPublished: false,
},
},
];

const currentDataset: Partial<DatasetDocument> = {
isPublished: true,
datasetlifecycle: {
publishedOn: new Date("2023-10-02T12:00:00Z"),
archivable: true,
retrievable: true,
publishable: false,
archiveRetentionTime: new Date("2031-10-01T12:00:00Z"),
dateOfPublishing: new Date("2024-10-01T12:00:00Z"),
isOnCentralDisk: true,
archiveStatusMessage: "datasetOnArchiveDisk",
retrieveStatusMessage: "",
retrieveIntegrityCheck: false,
},
};

currentDataset.$clone = () => currentDataset as DatasetDocument; // Mock the $clone method

const obsoleteHistories = convertGenericHistoriesToObsoleteHistories(
genericHistories,
currentDataset as DatasetDocument,
);

expect(obsoleteHistories).toEqual([
{
updatedAt: new Date("2023-10-02T12:00:00Z"),
updatedBy: "user123",
isPublished: {
previousValue: false,
currentValue: true,
},
datasetlifecycle: {
previousValue: {
publishedOn: undefined,
archivable: false,
retrievable: true,
publishable: false,
archiveRetentionTime: new Date(
"2031-10-01T12:00:00Z",
).toISOString(),
dateOfPublishing: new Date("2024-10-01T12:00:00Z").toISOString(),
isOnCentralDisk: true,
archiveStatusMessage: "datasetOnArchiveDisk",
retrieveStatusMessage: "",
retrieveIntegrityCheck: false,
},
currentValue: {
publishedOn: new Date("2023-10-02T12:00:00Z").toISOString(),
archivable: true,
},
},
_id: "",
},
{
updatedAt: new Date("2023-10-01T12:00:00Z"),
updatedBy: "user456",
isPublished: {
previousValue: false,
currentValue: false,
},
datasetlifecycle: {
previousValue: {
publishedOn: undefined,
archivable: false,
retrievable: true,
publishable: false,
archiveRetentionTime: new Date(
"2031-10-01T12:00:00Z",
).toISOString(),
dateOfPublishing: new Date("2024-10-01T12:00:00Z").toISOString(),
isOnCentralDisk: true,
archiveStatusMessage: "datasetOnArchiveDisk",
retrieveStatusMessage: "",
retrieveIntegrityCheck: false,
},
currentValue: {
publishedOn: new Date("2023-10-01T12:00:00Z").toISOString(),
archivable: false,
},
},
_id: "",
},
]);
});
});
112 changes: 112 additions & 0 deletions src/common/utils/history.util.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
import { HistoryClass } from "src/datasets/schemas/history.schema";
import { GenericHistory } from "../schemas/generic-history.schema";
import {
DatasetClass,
DatasetDocument,
} from "src/datasets/schemas/dataset.schema";

const IGNORE_FIELDS = ["updatedAt", "updatedBy", "_id"];

export function convertObsoleteHistoryToGenericHistory(
history: HistoryClass,
documentId: string,
): GenericHistory {
const result: GenericHistory = {
subsystem: "Dataset",
documentId: documentId,
user: history.updatedBy,
operation: "update",
timestamp: history.updatedAt,
before: {},
after: {},
};
const changeList = Object.keys(history).filter(
(key) => !IGNORE_FIELDS.includes(key),
);
for (const key of changeList) {
if (
!history[key] ||
!history[key].hasOwnProperty("previousValue") ||
!history[key].hasOwnProperty("currentValue")
) {
continue;
}
const fieldChange = history[key] as {
previousValue: unknown;
currentValue: unknown;
};
if (key === "datasetlifecycle") {
const currentValue = fieldChange.currentValue as Record<string, unknown>;
const previousValue = fieldChange.previousValue as Record<
string,
unknown
>;
// only retain the intersection of keys in currentValue and previousValue and whose value has changed. drop all others
const prunedPreviousValue: Record<string, unknown> = {};
const prunedCurrentValue: Record<string, unknown> = {};
for (const subKey of Object.keys(currentValue)) {
if (currentValue[subKey] !== previousValue[subKey]) {
prunedPreviousValue[subKey] = previousValue[subKey];
prunedCurrentValue[subKey] = currentValue[subKey];
}
}
fieldChange.previousValue = prunedPreviousValue;
fieldChange.currentValue = prunedCurrentValue;
}
result.before![key] = fieldChange.previousValue;
result.after![key] = fieldChange.currentValue;
}
return result;
}

// Given a dataset snapshot and a history entry, reconstruct the obsolete history entry
export function convertGenericHistoryToObsoleteHistory(
history: GenericHistory,
datasetSnapshot: Partial<DatasetClass>,
): HistoryClass {
const result: HistoryClass = {
updatedAt: history.timestamp,
updatedBy: history.user ?? "",
_id: "",
};
for (const field in history.before) {
if (IGNORE_FIELDS.includes(field)) {
continue;
}
if (field === "datasetlifecycle") {
history.before[field] = {
...JSON.parse(JSON.stringify(datasetSnapshot.datasetlifecycle)),
...(history.before[field] as Record<string, unknown>),
};
history.after![field] = JSON.parse(
JSON.stringify(history.after![field] as Record<string, unknown>),
);
}
result[field] = {
previousValue: history.before[field],
currentValue: history.after?.[field],
};
}
return result;
}

// starting from the latest dataset, replay the history entries in reverse order to reconstruct the obsolete history entries
export function convertGenericHistoriesToObsoleteHistories(
histories: GenericHistory[],
currentDataset: DatasetDocument,
): HistoryClass[] {
currentDataset = currentDataset.$clone();
const result: HistoryClass[] = [];
for (const history of histories) {
const obsoleteHistory = convertGenericHistoryToObsoleteHistory(
history,
currentDataset,
);
for (const key of Object.keys(history.before || {})) {
(currentDataset as unknown as Record<string, unknown>)[key] =
history.before?.[key];
}
result.push(obsoleteHistory);
}
return result;
}
Loading