From 8ddea7f2161a5cd9b81bac72f236f4635ab485d9 Mon Sep 17 00:00:00 2001 From: ElanaC Date: Wed, 20 Aug 2025 17:16:25 -0400 Subject: [PATCH 1/4] this is all the updates for having the patients.csv completed, so the projects main Step1 is completed ONLY, the other two backend steps are in progress --- apps/backend/package.json | 6 +++- apps/backend/scripts/make_patientcsv.ts | 48 +++++++++++++++++++++++++ apps/backend/scripts/patients.csv | 23 ++++++++++++ apps/backend/scripts/previewpatients.ts | 28 +++++++++++++++ apps/backend/src/index.ts | 5 +++ apps/backend/src/models/Metadata.ts | 1 + apps/backend/src/test.ts | 12 +++++++ 7 files changed, 122 insertions(+), 1 deletion(-) create mode 100644 apps/backend/scripts/make_patientcsv.ts create mode 100644 apps/backend/scripts/patients.csv create mode 100644 apps/backend/scripts/previewpatients.ts create mode 100644 apps/backend/src/test.ts diff --git a/apps/backend/package.json b/apps/backend/package.json index a1b420a2..f393bcfb 100644 --- a/apps/backend/package.json +++ b/apps/backend/package.json @@ -8,6 +8,8 @@ "axios": "^1.6.0", "body-parser": "^1.19.0", "cors": "^2.8.5", + "csv-writer": "^1.6.0", + "dotenv": "^17.2.1", "express": "^4.17.1", "express-async-errors": "^3.1.1", "express-fileupload": "^1.2.0", @@ -16,7 +18,7 @@ "lodash": "^4.17.21", "loglevel": "^1.7.1", "mongodb-memory-server": "^7.4.0", - "mongoose": "^6.0.6", + "mongoose": "^6.13.8", "mongoose-encryption": "^2.1.0", "node-2fa": "^2.0.2", "omit-deep-lodash": "^1.1.5", @@ -41,6 +43,7 @@ "node-loader": "^2.1.0", "rimraf": "^5.0.5", "ts-loader": "^9.5.1", + "tsx": "^4.20.4", "typescript": "^5.3.3", "webpack": "^5.89.0", "webpack-cli": "^5.1.4" @@ -61,6 +64,7 @@ "lint": "eslint --fix src/**/*.ts", "lint:check": "eslint src/**/*.ts", "start": "rm -rf ./dist && tsc && doppler run -- node ./dist/src/index.js", + "dev": "rm -rf ./dist && tsc && doppler run -- node ./dist/src/test.js", "test": "cross-env S3_BUCKET_NAME=test jest --runInBand --forceExit" } } diff --git a/apps/backend/scripts/make_patientcsv.ts b/apps/backend/scripts/make_patientcsv.ts new file mode 100644 index 00000000..81eb6d09 --- /dev/null +++ b/apps/backend/scripts/make_patientcsv.ts @@ -0,0 +1,48 @@ +import fs from 'fs'; +import path from 'path'; +import { createObjectCsvWriter } from 'csv-writer'; +import mongoose from 'mongoose'; +import { PatientModel } from '../src/models/Patient'; +import { initDB } from "../src/utils/initDb"; + +export async function main() { + await initDB(); + //console.log('URI from env:', process.env.MONGO_URI); + console.log("db initialized"); + + const patients = await PatientModel.find(); + const patientRecords = patients.map(p => { + const obj = p.toObject(); + + return { + ...obj, + dateCreated: obj.dateCreated?.toISOString().slice(0, 10), + lastEdited: obj.lastEdited?.toISOString().slice(0, 10), + }; + }); + + const csvWriter = createObjectCsvWriter({ + path: path.join(__dirname, 'patients.csv'), + header: [ + { id: 'dateCreated', title: 'Date Created' }, + { id: 'orderId', title: 'Order ID' }, + { id: 'lastEdited', title: 'Last Edited' }, + { id: 'lastEditedBy', title: 'Last Edited By' }, + { id: 'status', title: 'Status' }, + { id: 'phoneNumber', title: 'Phone Number' }, + { id: 'orderYear', title: 'Order Year' }, + { id: 'firstName', title: 'First Name' }, + { id: 'fathersName', title: 'Father\'s Name' }, + { id: 'grandfathersName', title: 'Grandfather\'s Name' }, + { id: 'familyName', title: 'Family Name' }, + ] + }); + + await csvWriter.writeRecords(patientRecords); + console.log('patients.csv generated!'); + await mongoose.disconnect(); + } + + main().catch(err => { + console.error('error generating CSV:', err); + }); diff --git a/apps/backend/scripts/patients.csv b/apps/backend/scripts/patients.csv new file mode 100644 index 00000000..5e588327 --- /dev/null +++ b/apps/backend/scripts/patients.csv @@ -0,0 +1,23 @@ +Date Created,Order ID,Last Edited,Last Edited By,Status,Phone Number,Order Year,First Name,Father's Name,Grandfather's Name,Family Name +2024-01-14,B00001,2024-01-14,Matthew Walowski,Active,,2024,Matthew,Logan,,Walowski +2024-01-14,B00002,2024-01-14,Matthew Walowski,Active,,2024,Matthew2,,, +2024-01-14,B00003,2024-01-14,Matthew Walowski,Active,,2024,Matthew3,,,W +2024-01-14,B00004,2024-01-14,Matthew Walowski,Active,,2024,Matthew4,,,W +2024-01-14,B00005,2024-01-14,Matthew Walowski,Active,,2024,Matthew5,,,W +2024-01-14,B00006,2025-03-02,Matthew Walowski,Active,,2024,Matthew5,,,W +2024-01-14,B00007,2024-02-04,Matthew Walowski,Active,,2024,Matthew7,,,W +2024-01-14,B00008,2024-01-14,Matthew Walowski,Archived,,2024,Matthew8,,,W +2024-01-14,B00009,2025-03-02,Matthew Walowski,Active,,2024,Matthew9,,,W +2024-01-14,B00010,2024-01-14,Matthew Walowski,Active,,2024,Matthew10,,,W +2024-01-14,B00011,2024-02-04,Matthew Walowski,Active,,2024,Matthew11,,,W +2024-01-14,B00012,2025-03-01,Matthew Walowski,Archived,,2024,Matthew12,,,W +2024-01-14,B00013,2024-04-13,Matthew Walowski,Active,,2024,Matthew13,,,W +2024-01-14,B00014,2025-03-02,ISJHH000 None,Active,,2024,I CHANGE IT!,,,W +2024-01-14,B00015,2024-01-15,Matthew Walowski,Active,,2024,Matthew15,,,W +2024-01-15,B00016,2024-01-15,Matthew Walowski,Active,,2024,Test,test,asdf,asdf +2024-01-28,B00017,2025-01-04,Matthew Walowski,Active,,2024,Tester,,,Patient +2024-04-06,B00018,2024-12-26,Matthew Walowski,Active,,2024,Tester,,,Patient +2025-02-02,C00001,2025-03-01,Matthew Walowski,Archived,,2025,NewOne,,,No +2025-03-02,C00002,2025-03-02,ISJHH000 None,Active,,2025,Should Have Gaza,,,f +2025-03-02,C00003,2025-06-01,Matthew Walowski,Active,,2025,NO TAGS,rrgda,, +2025-06-01,C00004,2025-06-01,Matthew Walowski,Active,,2025,New Patient,,,I TEST diff --git a/apps/backend/scripts/previewpatients.ts b/apps/backend/scripts/previewpatients.ts new file mode 100644 index 00000000..33316ac2 --- /dev/null +++ b/apps/backend/scripts/previewpatients.ts @@ -0,0 +1,28 @@ +// gets directly from PatientModel from Patient to double check that make_patientcsv is getting all the patientss + +import { initDB } from '../src/utils/initDb'; +import { PatientModel } from '../src/models/Patient'; + +async function main() { + await initDB(); // uses Doppler-managed MONGO_URI + console.log('โœ… DB connected'); + + const patients = await PatientModel.find(); // no `.lean()` so encryption is handled + console.log('๐Ÿงพ Found patients:', patients.length); + + for (const p of patients) { + console.log('๐Ÿ‘ค', { + orderId: p.orderId, + name: `${p.firstName} ${p.fathersName} ${p.familyName}`, + phone: p.phoneNumber, + status: p.status, + }); + } + + // Optional: show full raw object of first patient + console.log('\n๐Ÿ” Full data of first patient:\n', patients[0].toObject()); +} + +main().catch(err => { + console.error('โŒ Error:', err); +}); diff --git a/apps/backend/src/index.ts b/apps/backend/src/index.ts index b3d19e11..7cf090c2 100644 --- a/apps/backend/src/index.ts +++ b/apps/backend/src/index.ts @@ -1,3 +1,8 @@ +import { initDB } from './utils/initDb'; +import { main } from './test'; + +initDB(main) + /** * Module dependencies. diff --git a/apps/backend/src/models/Metadata.ts b/apps/backend/src/models/Metadata.ts index 68affe3d..3ddc7198 100644 --- a/apps/backend/src/models/Metadata.ts +++ b/apps/backend/src/models/Metadata.ts @@ -143,6 +143,7 @@ const stepSchema = new mongoose.Schema({ }, }, isHidden: { type: Boolean, required: false, default: false }, + isDeleted: { type: Boolean, required: false, default: false }, }) diff --git a/apps/backend/src/test.ts b/apps/backend/src/test.ts new file mode 100644 index 00000000..9a6c3813 --- /dev/null +++ b/apps/backend/src/test.ts @@ -0,0 +1,12 @@ +import { initDB } from "utils/initDb"; +import { PatientModel } from "./models/Patient"; + + +export async function main() { + console.log("db initialized"); + + const patients = await PatientModel.find().lean(); + console.log(patients); +} + + From 411c79e8b07a2228fbed09037d8d59c72ac4d474 Mon Sep 17 00:00:00 2001 From: ElanaC Date: Sat, 6 Sep 2025 04:46:40 -0400 Subject: [PATCH 2/4] backend work for exporting complete --- apps/backend/scripts/dataextraction.ts | 584 +++++++++++++++++++++++++ 1 file changed, 584 insertions(+) create mode 100644 apps/backend/scripts/dataextraction.ts diff --git a/apps/backend/scripts/dataextraction.ts b/apps/backend/scripts/dataextraction.ts new file mode 100644 index 00000000..5faddf39 --- /dev/null +++ b/apps/backend/scripts/dataextraction.ts @@ -0,0 +1,584 @@ +/* + * Combined Export Script - Steps 1, 2, and 3 + * STEP 1: Generate CSV of all basic patient info + * STEP 2: Generate CSV of all steps with string-convertible data: String, MultilineString, + * Number, Date, Phone, RadioButton, MultiSelect, Tags + * STEP 3: Export media files (File, Audio, Photo, Signature) from S3 to local filesystem + * STEP 4: Package everything into a ZIP file + * + * String CSV excludes: Header, Divider, File, Audio, Photo, Signature, Map + */ + +import fs from 'fs'; +import path from 'path'; +import mongoose from 'mongoose'; +import { createObjectCsvWriter } from 'csv-writer'; +import { initDB } from '../src/utils/initDb'; +import { PatientModel } from '../src/models/Patient'; +import { StepModel } from '../src/models/Metadata'; +import { FieldType } from '@3dp4me/types'; +import { downloadFile } from '../src/utils/aws/awsS3Helpers'; +import archiver from 'archiver'; + +mongoose.set('strictQuery', false); + +const EXPORT_DIR = path.join(__dirname, 'step_exports'); +const MEDIA_EXPORT_DIR = path.join(__dirname, 'patients'); +const ZIP_OUTPUT_DIR = path.join(__dirname, 'exports'); + +const INCLUDED_TYPES = [ + FieldType.STRING, + FieldType.MULTILINE_STRING, + FieldType.NUMBER, + FieldType.DATE, + FieldType.PHONE, + FieldType.RADIO_BUTTON, + FieldType.MULTI_SELECT, + FieldType.TAGS, +]; + +const IGNORED_FIELD_TYPES = [ + FieldType.FILE, + FieldType.AUDIO, + FieldType.PHOTO, + FieldType.SIGNATURE, + FieldType.MAP, + FieldType.DIVIDER, + FieldType.HEADER, +]; + +const MEDIA_FIELD_TYPES = [ + FieldType.FILE, + FieldType.AUDIO, + FieldType.PHOTO, + FieldType.SIGNATURE, +]; + +// What to export? +interface ExportOptions { + includeDeleted?: boolean; + includeHidden?: boolean; + zipFilename?: string; +} + +// Function to check if a file exists in Step3 (not a string, unable to export to CSV) +async function fileExistsInS3(s3Key: string): Promise { + try { + await downloadFile(s3Key); + return true; + } catch (error) { + return false; + } +} + +// Function to download and save a file from Step3 with type detection (uses package to detect type, defaults to .png if no type is detected) +async function downloadAndSaveFileWithTypeDetection( + s3Key: string, + localPath: string, + originalFilename: string +): Promise { + try { + const s3Stream = await downloadFile(s3Key); + + const chunks: Buffer[] = []; + + return new Promise((resolve, reject) => { + s3Stream + .on('data', (chunk) => { + chunks.push(chunk); + }) + .on('end', async () => { + try { + const buffer = Buffer.concat(chunks); + const detectedType = await detectFileTypeFromBuffer(buffer); + + const properFilename = addProperExtension(originalFilename, detectedType); + const properLocalPath = path.join(path.dirname(localPath), sanitizeFilename(properFilename)); + + fs.writeFileSync(properLocalPath, buffer); + + if (detectedType) { + console.log(`Downloaded with detected type '${detectedType}': ${properLocalPath}`); + } else { + console.log(`No type detected, defaulted to PNG: ${properLocalPath}`); + } + + // Delete original file (only if it's a different file and exists) + if ( + path.resolve(properLocalPath) !== path.resolve(localPath) && + fs.existsSync(localPath) + ) { + fs.unlinkSync(localPath); + console.log(`Deleted duplicate: ${localPath}`); + } + + resolve(true); + } catch (error) { + console.error(`Error processing file ${s3Key}:`, error); + resolve(false); + } + }) + .on('error', (error) => { + console.error(`Error downloading ${s3Key}:`, error); + resolve(false); + }); + }); + } catch (error) { + console.error(`Error downloading ${s3Key}:`, error); + return false; + } +} + + +function sanitizeFilename(filename: string): string { + const ext = path.extname(filename); + const name = path.basename(filename, ext); + const sanitizedName = name.replace(/[^a-z0-9.-]/gi, '_').toLowerCase(); + return sanitizedName + ext; +} + +// Function to create ZIP archive +async function createZipArchive(zipFilename: string): Promise { + console.log('\n=== STEP 4: Creating ZIP Archive ==='); + + if (!fs.existsSync(ZIP_OUTPUT_DIR)) { + fs.mkdirSync(ZIP_OUTPUT_DIR, { recursive: true }); + } + + const zipPath = path.join(ZIP_OUTPUT_DIR, zipFilename); + const output = fs.createWriteStream(zipPath); + const archive = archiver('zip', { + zlib: { level: 9 } // Maximum compression + }); + + return new Promise((resolve, reject) => { + output.on('close', () => { + const sizeInMB = (archive.pointer() / 1024 / 1024).toFixed(2); + console.log(`ZIP archive created: ${zipPath}`); + console.log(`Archive size: ${sizeInMB} MB`); + resolve(zipPath); + }); + + archive.on('error', (err) => { + console.error('Error creating ZIP archive:', err); + reject(err); + }); + + archive.pipe(output); + + // Add patients.csv if it exists + const patientsCsvPath = path.join(__dirname, 'patients.csv'); + if (fs.existsSync(patientsCsvPath)) { + archive.file(patientsCsvPath, { name: 'patients.csv' }); + console.log('Added patients.csv to archive'); + } + + // Add step CSV files if directory exists + if (fs.existsSync(EXPORT_DIR)) { + archive.directory(EXPORT_DIR, 'step_csvs'); + console.log('Added step CSV files to archive'); + } + + // Add media files if directory exists + if (fs.existsSync(MEDIA_EXPORT_DIR)) { + archive.directory(MEDIA_EXPORT_DIR, 'patients'); + console.log('Added media files to archive'); + } + + archive.finalize(); + }); +} + +// STEP 1: Generate patient CSV (makes patients.csv) +async function generatePatientCSV() { + console.log('\n=== STEP 1: Generating Patient CSV ==='); + + const patients = await PatientModel.find(); + const patientRecords = patients.map(p => { + const obj = p.toObject(); + + return { + ...obj, + dateCreated: obj.dateCreated?.toISOString().slice(0, 10), + lastEdited: obj.lastEdited?.toISOString().slice(0, 10), + }; + }); + + const csvWriter = createObjectCsvWriter({ + path: path.join(__dirname, 'patients.csv'), + header: [ + { id: 'dateCreated', title: 'Date Created' }, + { id: 'orderId', title: 'Order ID' }, + { id: 'lastEdited', title: 'Last Edited' }, + { id: 'lastEditedBy', title: 'Last Edited By' }, + { id: 'status', title: 'Status' }, + { id: 'phoneNumber', title: 'Phone Number' }, + { id: 'orderYear', title: 'Order Year' }, + { id: 'firstName', title: 'First Name' }, + { id: 'fathersName', title: 'Father\'s Name' }, + { id: 'grandfathersName', title: 'Grandfather\'s Name' }, + { id: 'familyName', title: 'Family Name' }, + ] + }); + + await csvWriter.writeRecords(patientRecords); + console.log(`Generated patients.csv with ${patientRecords.length} records`); +} + +// STEP 2: Generate step CSVs (makes step_csvs/*.csv) +async function generateStepCSVs(options: ExportOptions = {}) { + const { includeDeleted = false, includeHidden = false } = options; // default not include hidden or deleted, can be changed + + console.log('\n=== STEP 2: Generating Step CSVs ==='); + console.log(`Options: includeDeleted=${includeDeleted}, includeHidden=${includeHidden}`); + + if (!fs.existsSync(EXPORT_DIR)) fs.mkdirSync(EXPORT_DIR); + + // Build query for step definitions based on options + const stepQuery: any = {}; + if (!includeDeleted) { + stepQuery.isDeleted = { $ne: true }; + } + if (!includeHidden) { + stepQuery.isHidden = { $ne: true }; + } + + // Get all step definitions from the database + const stepDefinitions = await StepModel.find(stepQuery); + console.log(`Found ${stepDefinitions.length} step definitions`); + + const patients = await PatientModel.find(); + console.log(`Found ${patients.length} patients`); + + for (const stepDef of stepDefinitions) { + const stepKey = stepDef.key; + console.log(`Processing step: ${stepKey}`); + + // Get the mongoose model for this step + let StepDataModel; + try { + StepDataModel = mongoose.model(stepKey); + } catch (error) { + console.log(`โš ๏ธ No model found for step ${stepKey}, skipping`); + continue; + } + + const records = []; + + for (const patient of patients) { + const stepDoc = await StepDataModel.findOne({ patientId: patient._id }); + if (!stepDoc) continue; + + const row: Record = { + patientId: patient.orderId, + }; + + // Process each field in the step definition + for (const field of stepDef.fields) { + // Skip hidden fields if not including them + if (!includeHidden && field.isHidden) continue; + + if (IGNORED_FIELD_TYPES.includes(field.fieldType)) continue; + + if (field.fieldType === FieldType.FIELD_GROUP && Array.isArray(field.subFields)) { + // Handle field groups (nested fields) + for (const subField of field.subFields) { + // Skip hidden subfields if not including them + if (!includeHidden && subField.isHidden) continue; + + if (INCLUDED_TYPES.includes(subField.fieldType)) { + row[subField.key] = formatField(stepDoc[subField.key], subField.fieldType); + } + } + } else if (INCLUDED_TYPES.includes(field.fieldType)) { + // Handle regular fields + row[field.key] = formatField(stepDoc[field.key], field.fieldType); + } + } + + records.push(row); + } + + if (records.length > 0) { + const csvWriter = createObjectCsvWriter({ + path: path.join(EXPORT_DIR, `${stepKey}.csv`), + header: Object.keys(records[0]).map(key => ({ id: key, title: key })), + }); + + await csvWriter.writeRecords(records); + console.log(`Wrote ${records.length} records to ${stepKey}.csv`); + } else { + console.log(`No records found for step ${stepKey}`); + } + } +} + +// STEP 3: Export media files (makes patients/*/*.jpg) +async function exportStepMedia(options: ExportOptions = {}) { + const { includeDeleted = false, includeHidden = false } = options; + + console.log('\n=== STEP 3: Exporting Media Files ==='); + console.log(`Options: includeDeleted=${includeDeleted}, includeHidden=${includeHidden}`); + + if (!fs.existsSync(MEDIA_EXPORT_DIR)) fs.mkdirSync(MEDIA_EXPORT_DIR); + + // Build query filter based on options + const stepFilter: any = {}; + if (!includeDeleted) { + stepFilter.isDeleted = { $ne: true }; + } + if (!includeHidden) { + stepFilter.isHidden = { $ne: true }; + } + + // Get step definitions based on filter + const stepDefinitions = await StepModel.find(stepFilter); + console.log(`Found ${stepDefinitions.length} step definitions`); + + const patients = await PatientModel.find(); + console.log(`Found ${patients.length} patients`); + + let totalFilesDownloaded = 0; + + for (const stepDef of stepDefinitions) { + const stepKey = stepDef.key; + console.log(`Processing step: ${stepKey}`); + + // Get the mongoose model for this step + let StepDataModel; + try { + StepDataModel = mongoose.model(stepKey); + } catch (error) { + console.log(`No model found for step ${stepKey}, skip`); + continue; + } + + for (const patient of patients) { + const stepDoc = await StepDataModel.findOne({ patientId: patient._id }); + if (!stepDoc) continue; + + const patientDir = path.join(MEDIA_EXPORT_DIR, patient.orderId); + const stepDir = path.join(patientDir, stepKey); + + // Track if we actually downloaded any files for this step + let hasDownloadedFiles = false; + + // Process regular fields + for (const field of stepDef.fields) { + // Check if field should be included based on options + if (!includeHidden && field.isHidden) continue; + + if (MEDIA_FIELD_TYPES.includes(field.fieldType)) { + const fileData = stepDoc[field.key]; + if (Array.isArray(fileData)) { + // Handle array of files + for (const file of fileData) { + if (file && file.filename) { + const s3Key = `${patient._id}/${stepKey}/${field.key}/${file.filename}`; + const fileExists = await fileExistsInS3(s3Key); + + if (fileExists) { + // Create directory only when we have actual files + if (!hasDownloadedFiles) { + if (!fs.existsSync(patientDir)) fs.mkdirSync(patientDir, { recursive: true }); + if (!fs.existsSync(stepDir)) fs.mkdirSync(stepDir, { recursive: true }); + hasDownloadedFiles = true; + } + + const sanitizedFilename = sanitizeFilename(file.filename); + const localPath = path.join(stepDir, sanitizedFilename); + const success = await downloadAndSaveFileWithTypeDetection(s3Key, localPath, file.filename); + + if (success) { + console.log(`Downloaded: ${localPath}`); + totalFilesDownloaded++; + } + } + } + } + } else if (fileData && fileData.filename) { + // Handle single file + const s3Key = `${patient._id}/${stepKey}/${field.key}/${fileData.filename}`; + const fileExists = await fileExistsInS3(s3Key); + + if (fileExists) { + // Create directory only when we have actual files + if (!hasDownloadedFiles) { + if (!fs.existsSync(patientDir)) fs.mkdirSync(patientDir, { recursive: true }); + if (!fs.existsSync(stepDir)) fs.mkdirSync(stepDir, { recursive: true }); + hasDownloadedFiles = true; + } + + const sanitizedFilename = sanitizeFilename(fileData.filename); + const localPath = path.join(stepDir, sanitizedFilename); + const success = await downloadAndSaveFileWithTypeDetection(s3Key, localPath, fileData.filename); + + if (success) { + console.log(`Downloaded: ${localPath}`); + totalFilesDownloaded++; + } + } + } + } + + // Handle field groups + if (field.fieldType === FieldType.FIELD_GROUP && Array.isArray(field.subFields)) { + for (const subField of field.subFields) { + // Check if subfield should be included based on options + if (!includeHidden && subField.isHidden) continue; + + if (MEDIA_FIELD_TYPES.includes(subField.fieldType)) { + const fileData = stepDoc[subField.key]; + if (Array.isArray(fileData)) { + // Handle array of files in subfield + for (const file of fileData) { + if (file && file.filename) { + const s3Key = `${patient._id}/${stepKey}/${subField.key}/${file.filename}`; + const fileExists = await fileExistsInS3(s3Key); + + if (fileExists) { + // Create directory only when we have actual files + if (!hasDownloadedFiles) { + if (!fs.existsSync(patientDir)) fs.mkdirSync(patientDir, { recursive: true }); + if (!fs.existsSync(stepDir)) fs.mkdirSync(stepDir, { recursive: true }); + hasDownloadedFiles = true; + } + + const sanitizedFilename = sanitizeFilename(file.filename); + const localPath = path.join(stepDir, sanitizedFilename); + const success = await downloadAndSaveFileWithTypeDetection(s3Key, localPath, file.filename); + + if (success) { + console.log(`Downloaded: ${localPath}`); + totalFilesDownloaded++; + } + } + } + } + } else if (fileData && fileData.filename) { + // Handle single file in subfield + const s3Key = `${patient._id}/${stepKey}/${subField.key}/${fileData.filename}`; + const fileExists = await fileExistsInS3(s3Key); + + if (fileExists) { + // Create directory only when we have actual files + if (!hasDownloadedFiles) { + if (!fs.existsSync(patientDir)) fs.mkdirSync(patientDir, { recursive: true }); + if (!fs.existsSync(stepDir)) fs.mkdirSync(stepDir, { recursive: true }); + hasDownloadedFiles = true; + } + + const sanitizedFilename = sanitizeFilename(fileData.filename); + const localPath = path.join(stepDir, sanitizedFilename); + const success = await downloadAndSaveFileWithTypeDetection(s3Key, localPath, fileData.filename); + + if (success) { + console.log(`Downloaded: ${localPath}`); + totalFilesDownloaded++; + } + } + } + } + } + } + } + } + } + + console.log(`Media export completed. Downloaded ${totalFilesDownloaded} files.`); +} + +// Function to format field values based on type +function formatField(value: any, type: FieldType): any { + if (!value) return ''; + if (type === FieldType.DATE) return new Date(value).toISOString().slice(0, 10); + if (type === FieldType.MULTI_SELECT || type === FieldType.TAGS) { + return Array.isArray(value) ? value.join(', ') : value; + } + return value; +} + +// Combined export function +async function runCombinedExport(options: ExportOptions = {}) { + const { + includeDeleted = false, + includeHidden = false, + zipFilename = `3dp4me_export_${new Date().toISOString().slice(0, 19).replace(/[:-]/g, '')}.zip` // zipFilename = `3dp4me_export_${new Date().toISOString().slice(0, 10).replace(/-/g, '')}.zip` <== this will name the zip without the min/sec + } = options; + + await initDB(); + console.log('Connected to DB'); + console.log('Export Configuration:', { includeDeleted, includeHidden, zipFilename }); + + try { + await generatePatientCSV(); + await generateStepCSVs({ includeDeleted, includeHidden }); + await exportStepMedia({ includeDeleted, includeHidden }); + + const zipPath = await createZipArchive(zipFilename); + + console.log('\nAll exports completed successfully'); + console.log(`ZIP archive: ${zipPath}`); + + return { zipPath, success: true }; + } catch (error) { + console.error('Error during export process:', error); + throw error; + } finally { + await mongoose.disconnect(); + console.log('Disconnected from DB'); + } +} + +// Main function for command line usage +async function main() { + const includeDeleted = process.argv.includes('--include-deleted'); + const includeHidden = process.argv.includes('--include-hidden'); + + const zipFilenameArg = process.argv.find(arg => arg.startsWith('--zip-filename=')); + const customZipFilename = zipFilenameArg ? zipFilenameArg.split('=')[1] : undefined; + + await runCombinedExport({ + includeDeleted, + includeHidden, + zipFilename: customZipFilename + }); +} + +// File type detection using file-type package +async function detectFileTypeFromBuffer(buffer: Buffer): Promise { + try { + const { fileTypeFromBuffer } = await import('file-type'); + const result = await fileTypeFromBuffer(buffer); + return result?.ext || null; + } catch (error) { + console.error('Error detecting file type:', error); // defaults to .png + return null; + } +} + +// Function to add the proper extension to a filename, default to .png if no type is detected +function addProperExtension(originalFilename: string, detectedType: string | null): string { + // If file already has an extension, keep it + const hasExtension = path.extname(originalFilename).length > 0; + if (hasExtension) { + return originalFilename; + } + + // If we detected a type, add the extension + if (detectedType) { + return `${originalFilename}.${detectedType}`; + } + + // Default fallback - most files are images + return `${originalFilename}.png`; +} + + +// Run if called directly +if (require.main === module) { + main().catch(err => { + console.error('Error in main function:', err); + process.exit(1); + }); +} \ No newline at end of file From 9646dff67dbfad822ce347a7affac9f21f79447c Mon Sep 17 00:00:00 2001 From: ElanaC Date: Sat, 6 Sep 2025 04:54:15 -0400 Subject: [PATCH 3/4] backend data export implementations completed --- apps/backend/scripts/make_patientcsv.ts | 48 ------------------------- apps/backend/scripts/patients.csv | 23 ------------ apps/backend/scripts/previewpatients.ts | 28 --------------- 3 files changed, 99 deletions(-) delete mode 100644 apps/backend/scripts/make_patientcsv.ts delete mode 100644 apps/backend/scripts/patients.csv delete mode 100644 apps/backend/scripts/previewpatients.ts diff --git a/apps/backend/scripts/make_patientcsv.ts b/apps/backend/scripts/make_patientcsv.ts deleted file mode 100644 index 81eb6d09..00000000 --- a/apps/backend/scripts/make_patientcsv.ts +++ /dev/null @@ -1,48 +0,0 @@ -import fs from 'fs'; -import path from 'path'; -import { createObjectCsvWriter } from 'csv-writer'; -import mongoose from 'mongoose'; -import { PatientModel } from '../src/models/Patient'; -import { initDB } from "../src/utils/initDb"; - -export async function main() { - await initDB(); - //console.log('URI from env:', process.env.MONGO_URI); - console.log("db initialized"); - - const patients = await PatientModel.find(); - const patientRecords = patients.map(p => { - const obj = p.toObject(); - - return { - ...obj, - dateCreated: obj.dateCreated?.toISOString().slice(0, 10), - lastEdited: obj.lastEdited?.toISOString().slice(0, 10), - }; - }); - - const csvWriter = createObjectCsvWriter({ - path: path.join(__dirname, 'patients.csv'), - header: [ - { id: 'dateCreated', title: 'Date Created' }, - { id: 'orderId', title: 'Order ID' }, - { id: 'lastEdited', title: 'Last Edited' }, - { id: 'lastEditedBy', title: 'Last Edited By' }, - { id: 'status', title: 'Status' }, - { id: 'phoneNumber', title: 'Phone Number' }, - { id: 'orderYear', title: 'Order Year' }, - { id: 'firstName', title: 'First Name' }, - { id: 'fathersName', title: 'Father\'s Name' }, - { id: 'grandfathersName', title: 'Grandfather\'s Name' }, - { id: 'familyName', title: 'Family Name' }, - ] - }); - - await csvWriter.writeRecords(patientRecords); - console.log('patients.csv generated!'); - await mongoose.disconnect(); - } - - main().catch(err => { - console.error('error generating CSV:', err); - }); diff --git a/apps/backend/scripts/patients.csv b/apps/backend/scripts/patients.csv deleted file mode 100644 index 5e588327..00000000 --- a/apps/backend/scripts/patients.csv +++ /dev/null @@ -1,23 +0,0 @@ -Date Created,Order ID,Last Edited,Last Edited By,Status,Phone Number,Order Year,First Name,Father's Name,Grandfather's Name,Family Name -2024-01-14,B00001,2024-01-14,Matthew Walowski,Active,,2024,Matthew,Logan,,Walowski -2024-01-14,B00002,2024-01-14,Matthew Walowski,Active,,2024,Matthew2,,, -2024-01-14,B00003,2024-01-14,Matthew Walowski,Active,,2024,Matthew3,,,W -2024-01-14,B00004,2024-01-14,Matthew Walowski,Active,,2024,Matthew4,,,W -2024-01-14,B00005,2024-01-14,Matthew Walowski,Active,,2024,Matthew5,,,W -2024-01-14,B00006,2025-03-02,Matthew Walowski,Active,,2024,Matthew5,,,W -2024-01-14,B00007,2024-02-04,Matthew Walowski,Active,,2024,Matthew7,,,W -2024-01-14,B00008,2024-01-14,Matthew Walowski,Archived,,2024,Matthew8,,,W -2024-01-14,B00009,2025-03-02,Matthew Walowski,Active,,2024,Matthew9,,,W -2024-01-14,B00010,2024-01-14,Matthew Walowski,Active,,2024,Matthew10,,,W -2024-01-14,B00011,2024-02-04,Matthew Walowski,Active,,2024,Matthew11,,,W -2024-01-14,B00012,2025-03-01,Matthew Walowski,Archived,,2024,Matthew12,,,W -2024-01-14,B00013,2024-04-13,Matthew Walowski,Active,,2024,Matthew13,,,W -2024-01-14,B00014,2025-03-02,ISJHH000 None,Active,,2024,I CHANGE IT!,,,W -2024-01-14,B00015,2024-01-15,Matthew Walowski,Active,,2024,Matthew15,,,W -2024-01-15,B00016,2024-01-15,Matthew Walowski,Active,,2024,Test,test,asdf,asdf -2024-01-28,B00017,2025-01-04,Matthew Walowski,Active,,2024,Tester,,,Patient -2024-04-06,B00018,2024-12-26,Matthew Walowski,Active,,2024,Tester,,,Patient -2025-02-02,C00001,2025-03-01,Matthew Walowski,Archived,,2025,NewOne,,,No -2025-03-02,C00002,2025-03-02,ISJHH000 None,Active,,2025,Should Have Gaza,,,f -2025-03-02,C00003,2025-06-01,Matthew Walowski,Active,,2025,NO TAGS,rrgda,, -2025-06-01,C00004,2025-06-01,Matthew Walowski,Active,,2025,New Patient,,,I TEST diff --git a/apps/backend/scripts/previewpatients.ts b/apps/backend/scripts/previewpatients.ts deleted file mode 100644 index 33316ac2..00000000 --- a/apps/backend/scripts/previewpatients.ts +++ /dev/null @@ -1,28 +0,0 @@ -// gets directly from PatientModel from Patient to double check that make_patientcsv is getting all the patientss - -import { initDB } from '../src/utils/initDb'; -import { PatientModel } from '../src/models/Patient'; - -async function main() { - await initDB(); // uses Doppler-managed MONGO_URI - console.log('โœ… DB connected'); - - const patients = await PatientModel.find(); // no `.lean()` so encryption is handled - console.log('๐Ÿงพ Found patients:', patients.length); - - for (const p of patients) { - console.log('๐Ÿ‘ค', { - orderId: p.orderId, - name: `${p.firstName} ${p.fathersName} ${p.familyName}`, - phone: p.phoneNumber, - status: p.status, - }); - } - - // Optional: show full raw object of first patient - console.log('\n๐Ÿ” Full data of first patient:\n', patients[0].toObject()); -} - -main().catch(err => { - console.error('โŒ Error:', err); -}); From 2b9aec24a9cd893af3298f8246c19bf96bdef9ad Mon Sep 17 00:00:00 2001 From: ElanaC Date: Wed, 10 Sep 2025 22:10:26 -0400 Subject: [PATCH 4/4] backend fixes and frontend tries --- apps/backend/package.json | 6 +- apps/backend/scripts/dataextraction.ts | 339 +++++++--------- apps/backend/src/index.ts | 3 - apps/backend/src/routes/api/export.ts | 44 ++ apps/backend/src/routes/api/index.ts | 1 + apps/backend/src/test.ts | 12 - apps/backend/src/utils/aws/awsS3Helpers.ts | 137 ++++++- apps/backend/src/utils/initDb.ts | 18 +- apps/frontend/src/api/api.ts | 375 +++--------------- .../components/ExportButton/ExportButton.tsx | 105 +++++ .../frontend/src/components/Navbar/Navbar.tsx | 7 + 11 files changed, 501 insertions(+), 546 deletions(-) create mode 100644 apps/backend/src/routes/api/export.ts delete mode 100644 apps/backend/src/test.ts create mode 100644 apps/frontend/src/components/ExportButton/ExportButton.tsx diff --git a/apps/backend/package.json b/apps/backend/package.json index f393bcfb..88257e5c 100644 --- a/apps/backend/package.json +++ b/apps/backend/package.json @@ -4,15 +4,16 @@ "dependencies": { "@aws-sdk/client-cognito-identity-provider": "^3.496.0", "@aws-sdk/client-s3": "^3.496.0", + "archiver": "^7.0.0", "aws-sdk-mock": "^5.1.0", "axios": "^1.6.0", "body-parser": "^1.19.0", "cors": "^2.8.5", "csv-writer": "^1.6.0", - "dotenv": "^17.2.1", "express": "^4.17.1", "express-async-errors": "^3.1.1", "express-fileupload": "^1.2.0", + "file-type": "^21.0.0", "helmet": "^7.1.0", "join-images": "^1.1.5", "lodash": "^4.17.21", @@ -30,6 +31,7 @@ "devDependencies": { "@3dp4me/types": "workspace:*", "@smithy/types": "^4.1.0", + "@types/archiver": "^6.0.0", "@types/body-parser": "^1.19.5", "@types/cors": "^2.8.17", "@types/express": "^4.17.21", @@ -43,7 +45,6 @@ "node-loader": "^2.1.0", "rimraf": "^5.0.5", "ts-loader": "^9.5.1", - "tsx": "^4.20.4", "typescript": "^5.3.3", "webpack": "^5.89.0", "webpack-cli": "^5.1.4" @@ -64,7 +65,6 @@ "lint": "eslint --fix src/**/*.ts", "lint:check": "eslint src/**/*.ts", "start": "rm -rf ./dist && tsc && doppler run -- node ./dist/src/index.js", - "dev": "rm -rf ./dist && tsc && doppler run -- node ./dist/src/test.js", "test": "cross-env S3_BUCKET_NAME=test jest --runInBand --forceExit" } } diff --git a/apps/backend/scripts/dataextraction.ts b/apps/backend/scripts/dataextraction.ts index 5faddf39..970d6bd6 100644 --- a/apps/backend/scripts/dataextraction.ts +++ b/apps/backend/scripts/dataextraction.ts @@ -3,7 +3,7 @@ * STEP 1: Generate CSV of all basic patient info * STEP 2: Generate CSV of all steps with string-convertible data: String, MultilineString, * Number, Date, Phone, RadioButton, MultiSelect, Tags - * STEP 3: Export media files (File, Audio, Photo, Signature) from S3 to local filesystem + * STEP 3: Export media files (File, Audio, Photo, Signature) from S3 to local filesystem (functions related to S3 are in awsS3Helpers.ts) * STEP 4: Package everything into a ZIP file * * String CSV excludes: Header, Divider, File, Audio, Photo, Signature, Map @@ -17,8 +17,9 @@ import { initDB } from '../src/utils/initDb'; import { PatientModel } from '../src/models/Patient'; import { StepModel } from '../src/models/Metadata'; import { FieldType } from '@3dp4me/types'; -import { downloadFile } from '../src/utils/aws/awsS3Helpers'; +import { downloadFile, fileExistsInS3, downloadAndSaveFileWithTypeDetection, sanitizeFilename } from '../src/utils/aws/awsS3Helpers'; import archiver from 'archiver'; +import { fileTypeFromBuffer } from 'file-type'; mongoose.set('strictQuery', false); @@ -51,7 +52,7 @@ const MEDIA_FIELD_TYPES = [ FieldType.FILE, FieldType.AUDIO, FieldType.PHOTO, - FieldType.SIGNATURE, + // FieldType.SIGNATURE, (not media, stored as an array of points on a canvas in mongo. generate an image of this signature and save it) ]; // What to export? @@ -61,83 +62,6 @@ interface ExportOptions { zipFilename?: string; } -// Function to check if a file exists in Step3 (not a string, unable to export to CSV) -async function fileExistsInS3(s3Key: string): Promise { - try { - await downloadFile(s3Key); - return true; - } catch (error) { - return false; - } -} - -// Function to download and save a file from Step3 with type detection (uses package to detect type, defaults to .png if no type is detected) -async function downloadAndSaveFileWithTypeDetection( - s3Key: string, - localPath: string, - originalFilename: string -): Promise { - try { - const s3Stream = await downloadFile(s3Key); - - const chunks: Buffer[] = []; - - return new Promise((resolve, reject) => { - s3Stream - .on('data', (chunk) => { - chunks.push(chunk); - }) - .on('end', async () => { - try { - const buffer = Buffer.concat(chunks); - const detectedType = await detectFileTypeFromBuffer(buffer); - - const properFilename = addProperExtension(originalFilename, detectedType); - const properLocalPath = path.join(path.dirname(localPath), sanitizeFilename(properFilename)); - - fs.writeFileSync(properLocalPath, buffer); - - if (detectedType) { - console.log(`Downloaded with detected type '${detectedType}': ${properLocalPath}`); - } else { - console.log(`No type detected, defaulted to PNG: ${properLocalPath}`); - } - - // Delete original file (only if it's a different file and exists) - if ( - path.resolve(properLocalPath) !== path.resolve(localPath) && - fs.existsSync(localPath) - ) { - fs.unlinkSync(localPath); - console.log(`Deleted duplicate: ${localPath}`); - } - - resolve(true); - } catch (error) { - console.error(`Error processing file ${s3Key}:`, error); - resolve(false); - } - }) - .on('error', (error) => { - console.error(`Error downloading ${s3Key}:`, error); - resolve(false); - }); - }); - } catch (error) { - console.error(`Error downloading ${s3Key}:`, error); - return false; - } -} - - -function sanitizeFilename(filename: string): string { - const ext = path.extname(filename); - const name = path.basename(filename, ext); - const sanitizedName = name.replace(/[^a-z0-9.-]/gi, '_').toLowerCase(); - return sanitizedName + ext; -} - -// Function to create ZIP archive async function createZipArchive(zipFilename: string): Promise { console.log('\n=== STEP 4: Creating ZIP Archive ==='); @@ -159,6 +83,18 @@ async function createZipArchive(zipFilename: string): Promise { resolve(zipPath); }); + // Good practice to catch warnings (ie stat failures and other non-blocking errors) + archive.on('warning', (err) => { + if (err.code === 'ENOENT') { + // Log warning for missing files but don't fail + console.warn('Archive warning - file not found:', err.message); + } else { + // Reject promise for other types of warnings as they indicate real issues + console.error('Archive warning (treating as error):', err); + reject(err); + } + }); + archive.on('error', (err) => { console.error('Error creating ZIP archive:', err); reject(err); @@ -199,8 +135,8 @@ async function generatePatientCSV() { return { ...obj, - dateCreated: obj.dateCreated?.toISOString().slice(0, 10), - lastEdited: obj.lastEdited?.toISOString().slice(0, 10), + dateCreated: obj.dateCreated?.toISOString(), + lastEdited: obj.lastEdited?.toISOString(), }; }); @@ -226,6 +162,26 @@ async function generatePatientCSV() { } // STEP 2: Generate step CSVs (makes step_csvs/*.csv) +// Helper function to get filtered step definitions (moved to global scope) +async function getSteps(options: ExportOptions): Promise { + const { includeDeleted = false, includeHidden = false } = options; + + // Build query filter based on options + const stepFilter: any = {}; + if (!includeDeleted) { + stepFilter.isDeleted = { $ne: true }; + } + if (!includeHidden) { + stepFilter.isHidden = { $ne: true }; + } + + // Get step definitions based on filter + const stepDefinitions = await StepModel.find(stepFilter).lean(); + console.log(`Found ${stepDefinitions.length} step definitions`); + + return stepDefinitions; +} + async function generateStepCSVs(options: ExportOptions = {}) { const { includeDeleted = false, includeHidden = false } = options; // default not include hidden or deleted, can be changed @@ -234,20 +190,10 @@ async function generateStepCSVs(options: ExportOptions = {}) { if (!fs.existsSync(EXPORT_DIR)) fs.mkdirSync(EXPORT_DIR); - // Build query for step definitions based on options - const stepQuery: any = {}; - if (!includeDeleted) { - stepQuery.isDeleted = { $ne: true }; - } - if (!includeHidden) { - stepQuery.isHidden = { $ne: true }; - } - - // Get all step definitions from the database - const stepDefinitions = await StepModel.find(stepQuery); - console.log(`Found ${stepDefinitions.length} step definitions`); + // Get step definitions using the global getSteps function + const stepDefinitions = await getSteps(options); - const patients = await PatientModel.find(); + const patients = await PatientModel.find().lean(); console.log(`Found ${patients.length} patients`); for (const stepDef of stepDefinitions) { @@ -259,7 +205,7 @@ async function generateStepCSVs(options: ExportOptions = {}) { try { StepDataModel = mongoose.model(stepKey); } catch (error) { - console.log(`โš ๏ธ No model found for step ${stepKey}, skipping`); + console.log(`No model found for step ${stepKey}, skipping`); continue; } @@ -275,16 +221,18 @@ async function generateStepCSVs(options: ExportOptions = {}) { // Process each field in the step definition for (const field of stepDef.fields) { - // Skip hidden fields if not including them + // Skip hidden or deleted fields if not including them if (!includeHidden && field.isHidden) continue; + if (!includeDeleted && field.isDeleted) continue; if (IGNORED_FIELD_TYPES.includes(field.fieldType)) continue; if (field.fieldType === FieldType.FIELD_GROUP && Array.isArray(field.subFields)) { // Handle field groups (nested fields) for (const subField of field.subFields) { - // Skip hidden subfields if not including them + // Skip hidden or deleted subfields if not including them if (!includeHidden && subField.isHidden) continue; + if (!includeDeleted && subField.isDeleted) continue; if (INCLUDED_TYPES.includes(subField.fieldType)) { row[subField.key] = formatField(stepDoc[subField.key], subField.fieldType); @@ -300,15 +248,42 @@ async function generateStepCSVs(options: ExportOptions = {}) { } if (records.length > 0) { + // Create a mapping of field keys to their display names + const fieldDisplayNames = new Map(); + // Add patient ID display name + fieldDisplayNames.set('patientId', 'Patient ID'); + + // Map field keys to display names from step definition + for (const field of stepDef.fields) { + if (!includeHidden && field.isHidden) continue; + if (!includeDeleted && field.isDeleted) continue; + if (IGNORED_FIELD_TYPES.includes(field.fieldType)) continue; + + if (field.fieldType === FieldType.FIELD_GROUP && Array.isArray(field.subFields)) { + for (const subField of field.subFields) { + if (!includeHidden && subField.isHidden) continue; + if (!includeDeleted && subField.isDeleted) continue; + if (INCLUDED_TYPES.includes(subField.fieldType)) { + fieldDisplayNames.set(subField.key, subField.displayName?.EN || subField.key); + } + } + } else if (INCLUDED_TYPES.includes(field.fieldType)) { + fieldDisplayNames.set(field.key, field.displayName?.EN || field.key); + } + } + const csvWriter = createObjectCsvWriter({ path: path.join(EXPORT_DIR, `${stepKey}.csv`), - header: Object.keys(records[0]).map(key => ({ id: key, title: key })), + header: Object.keys(records[0]).map(key => ({ + id: key, + title: fieldDisplayNames.get(key) || key + })), }); await csvWriter.writeRecords(records); console.log(`Wrote ${records.length} records to ${stepKey}.csv`); } else { - console.log(`No records found for step ${stepKey}`); + console.log(`No records found for step ${stepKey}, skip`); } } } @@ -322,18 +297,7 @@ async function exportStepMedia(options: ExportOptions = {}) { if (!fs.existsSync(MEDIA_EXPORT_DIR)) fs.mkdirSync(MEDIA_EXPORT_DIR); - // Build query filter based on options - const stepFilter: any = {}; - if (!includeDeleted) { - stepFilter.isDeleted = { $ne: true }; - } - if (!includeHidden) { - stepFilter.isHidden = { $ne: true }; - } - - // Get step definitions based on filter - const stepDefinitions = await StepModel.find(stepFilter); - console.log(`Found ${stepDefinitions.length} step definitions`); + const stepDefinitions = await getSteps(options); const patients = await PatientModel.find(); console.log(`Found ${patients.length} patients`); @@ -398,89 +362,23 @@ async function exportStepMedia(options: ExportOptions = {}) { } } else if (fileData && fileData.filename) { // Handle single file - const s3Key = `${patient._id}/${stepKey}/${field.key}/${fileData.filename}`; - const fileExists = await fileExistsInS3(s3Key); + const result = await downloadSingleFile( + patient, + stepKey, + field.key, + fileData, + patientDir, + stepDir, + hasDownloadedFiles + ); - if (fileExists) { - // Create directory only when we have actual files - if (!hasDownloadedFiles) { - if (!fs.existsSync(patientDir)) fs.mkdirSync(patientDir, { recursive: true }); - if (!fs.existsSync(stepDir)) fs.mkdirSync(stepDir, { recursive: true }); - hasDownloadedFiles = true; - } - - const sanitizedFilename = sanitizeFilename(fileData.filename); - const localPath = path.join(stepDir, sanitizedFilename); - const success = await downloadAndSaveFileWithTypeDetection(s3Key, localPath, fileData.filename); - - if (success) { - console.log(`Downloaded: ${localPath}`); - totalFilesDownloaded++; - } + if (result.success) { + totalFilesDownloaded++; } + hasDownloadedFiles = result.hasDownloadedFiles; } } - // Handle field groups - if (field.fieldType === FieldType.FIELD_GROUP && Array.isArray(field.subFields)) { - for (const subField of field.subFields) { - // Check if subfield should be included based on options - if (!includeHidden && subField.isHidden) continue; - - if (MEDIA_FIELD_TYPES.includes(subField.fieldType)) { - const fileData = stepDoc[subField.key]; - if (Array.isArray(fileData)) { - // Handle array of files in subfield - for (const file of fileData) { - if (file && file.filename) { - const s3Key = `${patient._id}/${stepKey}/${subField.key}/${file.filename}`; - const fileExists = await fileExistsInS3(s3Key); - - if (fileExists) { - // Create directory only when we have actual files - if (!hasDownloadedFiles) { - if (!fs.existsSync(patientDir)) fs.mkdirSync(patientDir, { recursive: true }); - if (!fs.existsSync(stepDir)) fs.mkdirSync(stepDir, { recursive: true }); - hasDownloadedFiles = true; - } - - const sanitizedFilename = sanitizeFilename(file.filename); - const localPath = path.join(stepDir, sanitizedFilename); - const success = await downloadAndSaveFileWithTypeDetection(s3Key, localPath, file.filename); - - if (success) { - console.log(`Downloaded: ${localPath}`); - totalFilesDownloaded++; - } - } - } - } - } else if (fileData && fileData.filename) { - // Handle single file in subfield - const s3Key = `${patient._id}/${stepKey}/${subField.key}/${fileData.filename}`; - const fileExists = await fileExistsInS3(s3Key); - - if (fileExists) { - // Create directory only when we have actual files - if (!hasDownloadedFiles) { - if (!fs.existsSync(patientDir)) fs.mkdirSync(patientDir, { recursive: true }); - if (!fs.existsSync(stepDir)) fs.mkdirSync(stepDir, { recursive: true }); - hasDownloadedFiles = true; - } - - const sanitizedFilename = sanitizeFilename(fileData.filename); - const localPath = path.join(stepDir, sanitizedFilename); - const success = await downloadAndSaveFileWithTypeDetection(s3Key, localPath, fileData.filename); - - if (success) { - console.log(`Downloaded: ${localPath}`); - totalFilesDownloaded++; - } - } - } - } - } - } } } } @@ -491,19 +389,69 @@ async function exportStepMedia(options: ExportOptions = {}) { // Function to format field values based on type function formatField(value: any, type: FieldType): any { if (!value) return ''; - if (type === FieldType.DATE) return new Date(value).toISOString().slice(0, 10); + if (type === FieldType.DATE) return new Date(value).toISOString(); if (type === FieldType.MULTI_SELECT || type === FieldType.TAGS) { return Array.isArray(value) ? value.join(', ') : value; } + if (type === FieldType.MAP) { + // Format MAP data as "lat,lng" + if (value && typeof value === 'object') { + const lat = value.lat || value.latitude; + const lng = value.lng || value.longitude; + if (lat !== undefined && lng !== undefined) { + return `${lat},${lng}`; + } + } + return value; + } return value; } +// Add this helper function near the top of the file +async function downloadSingleFile( + patient: any, + stepKey: string, + fieldKey: string, + fileData: any, + patientDir: string, + stepDir: string, + hasDownloadedFiles: boolean +): Promise<{ success: boolean; hasDownloadedFiles: boolean }> { + if (!fileData || !fileData.filename) { + return { success: false, hasDownloadedFiles }; + } + + const s3Key = `${patient._id}/${stepKey}/${fieldKey}/${fileData.filename}`; + const fileExists = await fileExistsInS3(s3Key); + + if (!fileExists) { + return { success: false, hasDownloadedFiles }; + } + + // Create directory only when we have actual files + if (!hasDownloadedFiles) { + if (!fs.existsSync(patientDir)) fs.mkdirSync(patientDir, { recursive: true }); + if (!fs.existsSync(stepDir)) fs.mkdirSync(stepDir, { recursive: true }); + hasDownloadedFiles = true; + } + + const sanitizedFilename = sanitizeFilename(fileData.filename); + const localPath = path.join(stepDir, sanitizedFilename); + const downloadSuccess = await downloadAndSaveFileWithTypeDetection(s3Key, localPath, fileData.filename); + + if (downloadSuccess) { + console.log(`Downloaded: ${localPath}`); + } + + return { success: downloadSuccess, hasDownloadedFiles }; +} + // Combined export function -async function runCombinedExport(options: ExportOptions = {}) { +export async function runCombinedExport(options: ExportOptions = {}) { const { includeDeleted = false, includeHidden = false, - zipFilename = `3dp4me_export_${new Date().toISOString().slice(0, 19).replace(/[:-]/g, '')}.zip` // zipFilename = `3dp4me_export_${new Date().toISOString().slice(0, 10).replace(/-/g, '')}.zip` <== this will name the zip without the min/sec + zipFilename = `3dp4me_export_${new Date().toISOString().slice(0, 19).replace(/[:-]/g, '')}.zip` } = options; await initDB(); @@ -545,10 +493,11 @@ async function main() { }); } -// File type detection using file-type package + + +// Replace the detectFileTypeFromBuffer function async function detectFileTypeFromBuffer(buffer: Buffer): Promise { try { - const { fileTypeFromBuffer } = await import('file-type'); const result = await fileTypeFromBuffer(buffer); return result?.ext || null; } catch (error) { diff --git a/apps/backend/src/index.ts b/apps/backend/src/index.ts index 7cf090c2..e26d2cc4 100644 --- a/apps/backend/src/index.ts +++ b/apps/backend/src/index.ts @@ -1,7 +1,4 @@ import { initDB } from './utils/initDb'; -import { main } from './test'; - -initDB(main) /** diff --git a/apps/backend/src/routes/api/export.ts b/apps/backend/src/routes/api/export.ts new file mode 100644 index 00000000..04c540b1 --- /dev/null +++ b/apps/backend/src/routes/api/export.ts @@ -0,0 +1,44 @@ +import express, { Response } from 'express'; +import { AuthenticatedRequest } from '../../middleware/types'; +import { runCombinedExport } from '../../../scripts/dataextraction'; +import errorWrap from '../../utils/errorWrap'; +import path from 'path'; +import fs from 'fs'; + +export const router = express.Router(); + +router.get( + '/download', + errorWrap(async (req: AuthenticatedRequest, res: Response) => { + // Extract query parameters + const includeDeleted = req.query.includeDeleted === 'true'; + const includeHidden = req.query.includeHidden === 'true'; + + const { zipPath } = await runCombinedExport({ + includeDeleted, + includeHidden, + }); + + // Validate ZIP file exists and has content + await fs.promises.access(zipPath).catch(() => { + return res.status(500).send('ZIP file not found'); + }); + + const stats = await fs.promises.stat(zipPath); + if (stats.size === 0) { + return res.status(500).send('ZIP file is empty'); + } + + // Add a small delay to ensure file is fully written + await new Promise(resolve => setTimeout(resolve, 100)); + + res.download(zipPath, path.basename(zipPath), (err) => { + if (err) { + console.error('Error sending ZIP file:', err); + res.status(500).send('Export failed'); + } + }); + }), +); + +module.exports = router; \ No newline at end of file diff --git a/apps/backend/src/routes/api/index.ts b/apps/backend/src/routes/api/index.ts index 2737d2bb..296fa71f 100644 --- a/apps/backend/src/routes/api/index.ts +++ b/apps/backend/src/routes/api/index.ts @@ -9,6 +9,7 @@ router.use('/metadata', require('./metadata')); router.use('/users', require('./users')); router.use('/roles', require('./roles')); router.use('/public', require('./public')); +router.use('/export', require('./export')); // for export button // Disable the Twilio stuff for now // router.use('/messages', require('./messages')); diff --git a/apps/backend/src/test.ts b/apps/backend/src/test.ts deleted file mode 100644 index 9a6c3813..00000000 --- a/apps/backend/src/test.ts +++ /dev/null @@ -1,12 +0,0 @@ -import { initDB } from "utils/initDb"; -import { PatientModel } from "./models/Patient"; - - -export async function main() { - console.log("db initialized"); - - const patients = await PatientModel.find().lean(); - console.log(patients); -} - - diff --git a/apps/backend/src/utils/aws/awsS3Helpers.ts b/apps/backend/src/utils/aws/awsS3Helpers.ts index fb4e59fe..efc077b5 100644 --- a/apps/backend/src/utils/aws/awsS3Helpers.ts +++ b/apps/backend/src/utils/aws/awsS3Helpers.ts @@ -9,6 +9,9 @@ import { import { Readable } from 'stream'; import type { StreamingBlobPayloadInputTypes } from '@smithy/types'; import { BucketConfig } from './awsExports'; +import fs from 'fs'; +import path from 'path'; +import { fileTypeFromBuffer } from 'file-type'; // S3 Credential Object created with access id and secret key const S3_CREDENTIALS = { @@ -117,4 +120,136 @@ function getS3(credentials: typeof S3_CREDENTIALS, region: string) { }); return s3; -} \ No newline at end of file +} + +export const fileExistsInS3 = async (s3Key: string): Promise => { + try { + await downloadFile(s3Key); + return true; + } catch (error) { + return false; + } +}; + + +// Function to download and save a file from Step3 with type detection (uses package to detect type, defaults to .png if no type is detected) + +// Step 1: Download file from S3 to local temporary path using streaming +export const downloadFileToLocal = async ( + s3Key: string, + localPath: string +): Promise => { + const s3Stream = await downloadFile(s3Key); + const writeStream = fs.createWriteStream(localPath); + + return new Promise((resolve, reject) => { + s3Stream.pipe(writeStream) + .on('finish', () => { + console.log(`Downloaded to temporary location: ${localPath}`); + resolve(); + }) + .on('error', (error) => { + reject(error); + }); + }); +}; + +// Step 2: Determine file type from saved file on disk +const detectFileTypeFromFile = async (filePath: string): Promise => { + try { + const buffer = fs.readFileSync(filePath, { encoding: null }); // Read first 4KB for type detection + const fileTypeResult = await fileTypeFromBuffer(buffer); + return fileTypeResult?.ext || null; + } catch (error) { + console.error('Error detecting file type:', error); + return null; + } +}; + +// Step 3: Rename file with proper extension if needed +const renameFileWithProperExtension = async ( + currentPath: string, + originalFilename: string, + detectedType: string | null +): Promise => { + const properFilename = addProperExtension(originalFilename, detectedType); + const properLocalPath = path.join(path.dirname(currentPath), sanitizeFilename(properFilename)); + + // Only rename if the path is different + if (path.resolve(properLocalPath) !== path.resolve(currentPath)) { + fs.renameSync(currentPath, properLocalPath); + console.log(`Renamed file to: ${properLocalPath}`); + } + + return properLocalPath; +}; + +// Main function that orchestrates the three steps +export const downloadAndSaveFileWithTypeDetection = async ( + s3Key: string, + localPath: string, + originalFilename: string +): Promise => { + try { + // Create directory if it doesn't exist + const dir = path.dirname(localPath); + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }); + } + + // Step 1: Download to temporary location + const tempPath = `${localPath}.tmp`; + await downloadFileToLocal(s3Key, tempPath); + + // Step 2: Detect file type from saved file + const detectedType = await detectFileTypeFromFile(tempPath); + + // Step 3: Rename with proper extension + const finalPath = await renameFileWithProperExtension(tempPath, originalFilename, detectedType); + + if (detectedType) { + console.log(`Downloaded with detected type '${detectedType}': ${finalPath}`); + } else { + console.log(`No type detected, kept original name: ${finalPath}`); + } + + return true; + } catch (error) { + console.error('Error in downloadAndSaveFileWithTypeDetection:', error); + return false; + } +}; + +export const sanitizeFilename = (filename: string): string => { + const ext = path.extname(filename); + const name = path.basename(filename, ext); + const sanitizedName = name.replace(/[^a-z0-9.-]/gi, '_').toLowerCase(); + return sanitizedName + ext; +}; + +const detectFileTypeFromBuffer = async (buffer: Buffer): Promise => { + try { + const result = await fileTypeFromBuffer(buffer); + return result?.ext || null; + } catch (error) { + console.error('Error detecting file type:', error); // defaults to .png + return null; + } +}; + +// Function to add the proper extension to a filename, default to .png if no type is detected +const addProperExtension = (originalFilename: string, detectedType: string | null): string => { + // If file already has an extension, keep it + const hasExtension = path.extname(originalFilename).length > 0; + if (hasExtension) { + return originalFilename; + } + + // If we detected a type, add the extension + if (detectedType) { + return `${originalFilename}.${detectedType}`; + } + + // Default fallback - most files are images + return `${originalFilename}.png`; +}; \ No newline at end of file diff --git a/apps/backend/src/utils/initDb.ts b/apps/backend/src/utils/initDb.ts index 07a7c82f..8e55add6 100644 --- a/apps/backend/src/utils/initDb.ts +++ b/apps/backend/src/utils/initDb.ts @@ -2,6 +2,7 @@ import { Field, FieldType, PatientTagsField, + PatientTagSyria, ReservedStep, RootStep, RootStepFieldKeys, @@ -16,7 +17,6 @@ import encrypt from 'mongoose-encryption' import { StepModel } from '../models/Metadata' import { fileSchema } from '../schemas/fileSchema' import { signatureSchema } from '../schemas/signatureSchema' -import { PatientTagSyria } from '@3dp4me/types'; /** * Initalizes and connects to the DB. Should be called at app startup. @@ -47,17 +47,17 @@ const clearModels = async () => { // Migrations for root step const initReservedSteps = async () => { - log.info("Initializing the reserved step") + log.info('Initializing the reserved step') const rootStep = await StepModel.findOne({ key: ReservedStep.Root }).lean() if (!rootStep) { - log.info("Creating the reserved step") + log.info('Creating the reserved step') return StepModel.create(RootStep) } // Older version missing the tag field const tagField = rootStep.fields.find((f) => f.key === RootStepFieldKeys.Tags) if (!tagField) { - log.info("Tags is missing from reserved step, adding it") + log.info('Tags is missing from reserved step, adding it') return StepModel.updateOne( { key: ReservedStep.Root }, { $push: { fields: PatientTagsField } } @@ -67,17 +67,17 @@ const initReservedSteps = async () => { // Older version missing the syria option const syriaOption = tagField.options.find((o) => o.Question.EN === PatientTagSyria.Question.EN) if (!syriaOption) { - log.info("Syria is missing from tag options, adding it") + log.info('Syria is missing from tag options, adding it') return StepModel.updateOne( - { + { key: ReservedStep.Root, - "fields.key": RootStepFieldKeys.Tags + 'fields.key': RootStepFieldKeys.Tags, }, - { $push: { "fields.$.options": PatientTagSyria } } + { $push: { 'fields.$.options': PatientTagSyria } } ) } - log.info("Reserved step is up to date") + log.info('Reserved step is up to date') return null } diff --git a/apps/frontend/src/api/api.ts b/apps/frontend/src/api/api.ts index a394f431..4aa5b0b4 100644 --- a/apps/frontend/src/api/api.ts +++ b/apps/frontend/src/api/api.ts @@ -1,323 +1,52 @@ -import { BasePatient, Nullish, OmitDeep, Patient, Role, Step } from '@3dp4me/types' -import { CognitoIdentityServiceProvider } from 'aws-sdk' -import fileDownload from 'js-file-download' - -import instance from './axios-config' - -export type ApiResponse = { - success: boolean - message: string - result: T -} - -export type Paginated = { - data: T - count: number -} - -export const getPatientsCount = async (): Promise> => { - const requestString = '/patients/count' - const res = await instance.get(requestString) - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const getPatientsByPageNumberAndSearch = async ( - pageNumber: number, - nPerPage: number, - searchQuery = '' -): Promise>> => { - const requestString = `/patients?pageNumber=${pageNumber}&nPerPage=${nPerPage}&searchQuery=${searchQuery}` - const res = await instance.get(requestString) - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const getPatientsByStageAndPageNumberAndSearch = async ( - stage: string, - pageNumber: number, - nPerPage: number, - searchQuery = '' -): Promise>> => { - const requestString = `/stages/${stage}?pageNumber=${pageNumber}&nPerPage=${nPerPage}&searchQuery=${searchQuery}` - const res = await instance.get(requestString) - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const getPatientById = async (id: string): Promise> => { - const requestString = `/patients/${id}` - const res = await instance.get(requestString) - - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const postNewPatient = async ( - patientInfo: OmitDeep -): Promise> => { - const requestString = `/patients/` - const res = await instance.post(requestString, patientInfo) - - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const updateStage = async ( - patientId: string, - stage: string, - updatedStage: Record -): Promise> => { - const requestString = `/patients/${patientId}/${stage}` - const res = await instance.post(requestString, updatedStage) - - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const updatePatient = async ( - patientId: string, - updatedData: Partial -): Promise> => { - const requestString = `/patients/${patientId}` - const res = await instance.put(requestString, updatedData) - - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const deletePatientById = async (patientId: string): Promise> => { - const requestString = `/patients/${patientId}` - const res = await instance.delete(requestString) - - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const getAllStepsMetadata = async ( - showHiddenFieldsAndSteps = false, - showReservedSteps = false -): Promise> => { - const requestString = `/metadata/steps?showHiddenFields=${showHiddenFieldsAndSteps}&showHiddenSteps=${showHiddenFieldsAndSteps}&showReservedSteps=${showReservedSteps}` - - /** - * In order to test this method and its steps, hardcode an entire object (subFields, displayName, etc.) from the database - * and push it to res.data.result[0].fields - */ - const res = await instance.get(requestString) - if (!res?.data?.success) throw new Error(res?.data?.message) - return res.data -} - -export const updateMultipleSteps = async (updatedSteps: Step[]): Promise> => { - const requestString = '/metadata/steps' - - const res = await instance.put(requestString, updatedSteps) - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const downloadBlobWithoutSaving = async ( - patientId: string, - stepKey: string, - fieldKey: string, - filename: string -): Promise> => { - const requestString = `/patients/${patientId}/files/${stepKey}/${fieldKey}/${filename}` - let res = null - - try { - res = await instance.get(requestString, { - responseType: 'blob', - }) - } catch (error) { - console.error(error) - return null - } - - if (!res) return null - - return res.data -} - -export const downloadFile = async ( - patientId: string, - stepKey: string, - fieldKey: string, - filename: string -) => { - const blob = await downloadBlobWithoutSaving(patientId, stepKey, fieldKey, filename) - - if (!blob) throw new Error('Could not download file') - - try { - await fileDownload(blob, filename) - } catch (error) { - throw new Error('Could not download file') - } -} - -export const uploadFile = async ( - patientId: string, - stepKey: string, - fieldKey: string, - filename: string, - filedata: File -) => { - const requestString = `/patients/${patientId}/files/${stepKey}/${fieldKey}/${filename}` - const formData = new FormData() - formData.append('uploadedFile', filedata) - formData.append('uploadedFileName', filename || filedata.name) - - const res = await instance.post(requestString, formData, { - headers: { - 'Content-Type': 'multipart/form-data', - }, - }) - - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const uploadSignatureDocument = async (filename: string, filedata: File) => { - const requestString = `/public/upload/signatureDocument` - const formData = new FormData() - formData.append('uploadedFile', filedata) - formData.append('uploadedFileName', filename || filedata.name) - - const res = await instance.post(requestString, formData, { - headers: { - 'Content-Type': 'multipart/form-data', - }, - }) - - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const deleteFile = async ( - patientId: string, - stepKey: string, - fieldKey: string, - filename: string -) => { - const requestString = `/patients/${patientId}/files/${stepKey}/${fieldKey}/${filename}` - const res = await instance.delete(requestString) - - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const getAllRoles = async () => { - const requestString = `/roles` - const res = await instance.get(requestString) - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const addRole = async (roleInfo: Partial) => { - const requestString = `/roles` - const res = await instance.post(requestString, roleInfo) - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const deleteRole = async (userId: string) => { - const requestString = `/roles/${userId}` - const res = await instance.delete(requestString) - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const editRole = async (userId: string, updatedRoleInfo: Partial) => { - const requestString = `/roles/${userId}` - const res = await instance.put(requestString, updatedRoleInfo) - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -// TODO: test endpoint or create issue for it -export const deleteUser = async (username: string) => { - const requestString = `/users/${username}` - const res = await instance.delete(requestString) - if (!res?.data?.success) throw new Error(res?.data?.message) - return res.data -} - -export const addUserRole = async (username: string, roleName: string) => { - const requestString = `/users/${username}/roles/${roleName}` - const res = await instance.put(requestString) - - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const removeUserRole = async (username: string, roleName: string) => { - const requestString = `/users/${username}/roles/${roleName}` - const res = await instance.delete(requestString) - - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const getUsersByPageNumber = async ( - nPerPage: number -): Promise> => { - const requestString = `/users?nPerPage=${nPerPage}` - - const res = await instance.get(requestString) - - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const getUsersByPageNumberAndToken = async ( - token: string, - nPerPage: number -): Promise> => { - const encodedToken = encodeURIComponent(token) - const requestString = `/users?token=${encodedToken}&nPerPage=${nPerPage}` - - const res = await instance.get(requestString) - - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const setUserAccess = async (username: string, access: string) => { - const requestString = `/users/${username}/access/${access}` - const res = await instance.put(requestString) - - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} - -export const getSelf = async (): Promise> => { - const requestString = `/users/self` - const res = await instance.get(requestString) - - if (!res?.data?.success) throw new Error(res?.data?.message) - - return res.data -} + + + + + + + + + + + + + + + + 3DP4ME + + + +
+ + diff --git a/apps/frontend/src/components/ExportButton/ExportButton.tsx b/apps/frontend/src/components/ExportButton/ExportButton.tsx new file mode 100644 index 00000000..25ecc8a0 --- /dev/null +++ b/apps/frontend/src/components/ExportButton/ExportButton.tsx @@ -0,0 +1,105 @@ +import React, { useState } from 'react' +import { + Dialog, + DialogTitle, + DialogContent, + DialogActions, + Button, + Checkbox, + FormControlLabel, + CircularProgress, +} from '@mui/material' +import { triggerExportDownload } from '../../api/api' + +interface ExportButtonProps { + buttonText?: string + onExportComplete?: () => void + onExportError?: (error: Error) => void +} + +const ExportButton: React.FC = ({ + buttonText = 'Export Data', + onExportComplete, + onExportError +}) => { + const [open, setOpen] = useState(false) + const [includeDeleted, setIncludeDeleted] = useState(false) + const [includeHidden, setIncludeHidden] = useState(false) + const [loading, setLoading] = useState(false) + + const handleDownload = async () => { + setLoading(true) + try { + await triggerExportDownload(includeDeleted, includeHidden) + setOpen(false) + onExportComplete?.() + } catch (error) { + console.error('Export failed:', error) + onExportError?.(error as Error) + } finally { + setLoading(false) + } + } + + return ( + <> + + + setOpen(false)} + aria-labelledby="export-dialog-title" + > + Export Options + + setIncludeDeleted(e.target.checked)} + disabled={loading} + /> + } + label="Include Deleted Steps" + /> + setIncludeHidden(e.target.checked)} + disabled={loading} + /> + } + label="Include Hidden Fields" + /> + + + + + + + + ) +} + +export default ExportButton diff --git a/apps/frontend/src/components/Navbar/Navbar.tsx b/apps/frontend/src/components/Navbar/Navbar.tsx index c0030c56..b7f249f9 100644 --- a/apps/frontend/src/components/Navbar/Navbar.tsx +++ b/apps/frontend/src/components/Navbar/Navbar.tsx @@ -11,6 +11,7 @@ import { useTranslations } from '../../hooks/useTranslations' import { Context } from '../../store/Store' import { Routes } from '../../utils/constants' import AccountDropdown from '../AccountDropdown/AccountDropdown' +import ExportButton from '../ExportButton/ExportButton' export interface NavbarProps { username: string @@ -141,6 +142,12 @@ const Navbar = ({ username, userEmail }: NavbarProps) => { {renderLinks()} + alert('Export successful!')} + onExportError={(err) => alert(`Export failed: ${err.message}`)} + /> +