diff --git a/fileUpdate.js b/fileUpdate.js new file mode 100644 index 000000000..6d7b80f26 --- /dev/null +++ b/fileUpdate.js @@ -0,0 +1,120 @@ +const fs = require('fs'); +const path = require('path'); +const { cliux, messageHandler } = require('@contentstack/cli-utilities'); +const isEmpty = (value) => value === null || value === undefined || + (typeof value === 'object' && Object.keys(value).length === 0) || + (typeof value === 'string' && value.trim().length === 0); +const config = { + plan: { + dropdown: { optionLimit: 100 } + }, + cmsType: null, + isLocalPath: true, + awsData: { + awsRegion: '', + awsAccessKeyId: '', + awsSecretAccessKey: '', + awsSessionToken: '', + bucketName: '', + bucketKey: '' + }, + localPath: null +}; + +const configFilePath = path.resolve(path?.join?.('upload-api', 'src', 'config', 'index.ts')); + +const ensureDirectoryExists = (filePath) => { + const dir = path.dirname(filePath); + if (!fs.existsSync(dir)) { + fs.mkdirSync(dir, { recursive: true }); + console.log('📂 Created missing directory:', dir); + } +}; + +const inquireRequireFieldValidation = (input) => { + if (isEmpty(input)) { + return messageHandler.parse('Please enter the path'); + } + if (!fs.existsSync(input)) { + return messageHandler.parse('The specified path does not exist. Please enter a valid path.'); + } + return true; +}; + +const typeSwitcher = async (type) => { + switch (type) { + case 'Aws S3': { + const awsData = { + awsRegion: await cliux.inquire({ + type: 'input', + message: 'Enter AWS Region', + name: 'awsRegion', + validate: inquireRequireFieldValidation + }), + awsAccessKeyId: await cliux.inquire({ + type: 'input', + message: 'Enter AWS Access Key Id', + name: 'awsAccessKeyId', + validate: inquireRequireFieldValidation + }), + awsSecretAccessKey: await cliux.inquire({ + type: 'input', + message: 'Enter AWS Secret Access Key', + name: 'awsSecretAccessKey', + validate: inquireRequireFieldValidation + }), + }; + const isSessionToken = await cliux.inquire({ + choices: ['yes', 'no'], + type: 'list', + name: 'isSessionToken', + message: 'Do you have a Session Token?' + }); + if (isSessionToken === 'yes') { + awsData.awsSessionToken = await cliux.inquire({ + type: 'input', + message: 'Enter AWS Session Token', + name: 'awsSessionToken', + validate: inquireRequireFieldValidation + }); + } + return awsData; + } + case 'Locale Path': { + return await cliux.inquire({ + type: 'input', + message: 'Enter file path', + name: 'filePath', + validate: inquireRequireFieldValidation + }); + } + default: + console.log('⚠️ Invalid type provided'); + return; + } +}; + +const XMLMigration = async () => { + const typeOfcms = await cliux.inquire({ + choices: ['sitecore', 'contentful'], + type: 'list', + name: 'value', + message: 'Choose the option to proceed with your legacy CMS:' + }); + + const data = await typeSwitcher('Locale Path'); + if (typeof typeOfcms === 'string') { + config.cmsType = typeOfcms; + } else { + console.log('⚠️ Error: Expected a string for typeOfcms but got an object.'); + } + if (typeof data === 'string') { + config.localPath = data; + } else { + console.log('⚠️ Error: Expected a string for localPath but got an object.'); + } + ensureDirectoryExists(configFilePath); + fs.writeFileSync(configFilePath, `export default ${JSON.stringify(config, null, 2)};`, 'utf8'); +}; + +XMLMigration(); diff --git a/package.json b/package.json index 305fec691..f58d5feaa 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,9 @@ "api": "cd ./api && npm run dev", "upload": "cd ./upload-api && npm start", "ui": "cd ./ui && npm start", - "start": "node index.js" + "setup:file": "npm i && node fileUpdate.js", + "create:env": "node index.js", + "setup:mac": "bash setup.sh" }, "repository": { "type": "git", @@ -36,5 +38,8 @@ "validate-branch-name": { "pattern": "^(feature|bugfix|hotfix)/[a-z0-9-]{5,30}$", "errorMsg": "Please add valid branch name!" + }, + "dependencies": { + "@contentstack/cli-utilities": "^1.8.4" } -} +} \ No newline at end of file diff --git a/setup.sh b/setup.sh new file mode 100755 index 000000000..fb877e1b3 --- /dev/null +++ b/setup.sh @@ -0,0 +1,95 @@ +#!/bin/bash + +# Get the script's directory (ensures correct paths) +SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" +cd "$SCRIPT_DIR" || exit 1 + +# Install NVM if not installed +if ! command -v nvm &> /dev/null; then + echo "Installing NVM..." + curl -o- https://raw.githubusercontent.com/nvm-sh/nvm/v0.39.4/install.sh | bash + + export NVM_DIR="$HOME/.nvm" + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" + [ -s "$NVM_DIR/bash_completion" ] && \. "$NVM_DIR/bash_completion" +else + export NVM_DIR="$HOME/.nvm" + [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh" +fi + +# Ensure Node.js 21 is installed and used +NODE_VERSION=$(node -v 2>/dev/null) +if [[ "$NODE_VERSION" != v21.* ]]; then + echo "Installing and using Node.js 21..." + nvm install 21 +fi +nvm use 21 + +Setup CLI +echo "Setting up CLI repo..." +cd "$SCRIPT_DIR/cli" || exit 1 + +# Check if current user can write to node_modules +if [ -w node_modules ] || [ ! -d node_modules ]; then + npm run setup-repo --force +else + echo "Permission issue detected. Trying with sudo..." + sudo npm run setup-repo --force +fi + +# Return to script root +cd "$SCRIPT_DIR" || exit 1 + +# Fix npm cache permissions +echo "Fixing npm cache permissions..." +sudo chown -R $(id -u):$(id -g) "$HOME/.npm" + +# Start With Env File +echo "Creating .env file..." +npm run create:env + +echo "Updating config file..." +npm run setup:file + +# Start services in new terminals +echo "Starting services in new terminals..." + +if [[ "$OSTYPE" == "darwin"* ]]; then + # macOS + osascript -e "tell application \"Terminal\" to do script \" + source \$HOME/.nvm/nvm.sh && nvm use 21 && + cd '$SCRIPT_DIR/api' && + echo 'Cleaning API dependencies...' && + rm -rf node_modules package-lock.json && + npm install && + npm run dev + \"" + osascript -e "tell application \"Terminal\" to do script \" + source \$HOME/.nvm/nvm.sh && nvm use 21 && + cd '$SCRIPT_DIR/upload-api' && + echo 'Cleaning upload-api dependencies...' && + rm -rf node_modules package-lock.json && + rm -rf migration-sitecore/node_modules migration-sitecore/package-lock.json && + npm install && + npm run start + \"" + osascript -e "tell application \"Terminal\" to do script \" + source \$HOME/.nvm/nvm.sh && nvm use 21 && + cd '$SCRIPT_DIR/ui' && + echo 'Cleaning UI dependencies...' && + rm -rf node_modules package-lock.json && + npm install && + npm run start + \"" + +elif [[ "$OSTYPE" == "linux-gnu"* ]]; then + # Linux (GNOME Terminal) + gnome-terminal -- bash -c "source $HOME/.nvm/nvm.sh && nvm use 21 && cd '$SCRIPT_DIR/api' && npm install && npm run dev; exec bash" + gnome-terminal -- bash -c "source $HOME/.nvm/nvm.sh && nvm use 21 && cd '$SCRIPT_DIR/upload-api' && npm install && npm run start; exec bash" + gnome-terminal -- bash -c "source $HOME/.nvm/nvm.sh && nvm use 21 && cd '$SCRIPT_DIR/ui' && npm install && npm run start; exec bash" +else + echo "Unsupported OS: $OSTYPE" + exit 1 +fi + +echo "All services started!" \ No newline at end of file diff --git a/upload-api/src/config/index.ts b/upload-api/src/config/index.ts index a3e627499..ee1d411a1 100644 --- a/upload-api/src/config/index.ts +++ b/upload-api/src/config/index.ts @@ -9,8 +9,8 @@ export default { awsAccessKeyId: '', awsSecretAccessKey: '', awsSessionToken: '', - bucketName: 'migartion-test', - buketKey: 'project/package 45.zip' + bucketName: '', + bucketKey: '' }, // localPath: '/Users/sayali.joshi/Downloads/contentfulDummyEmbedData.json' //package 45.zip' localPath: '/Users/umesh.more/Documents/ui-migration/migration-v2-node-server/upload-api/extracted_files/package 45.zip' diff --git a/upload-api/src/models/types.ts b/upload-api/src/models/types.ts index c86ca5ca2..2492a9b94 100644 --- a/upload-api/src/models/types.ts +++ b/upload-api/src/models/types.ts @@ -12,7 +12,7 @@ export interface Config { awsSecretAccessKey: string; awsSessionToken: string; bucketName: string; - buketKey: string; + bucketKey: string; }; localPath: string; } diff --git a/upload-api/src/routes/index.ts b/upload-api/src/routes/index.ts index 2d6c91b80..8ef1cb7bb 100644 --- a/upload-api/src/routes/index.ts +++ b/upload-api/src/routes/index.ts @@ -136,7 +136,7 @@ router.get('/validator', express.json(), fileOperationLimiter, async function (r throw new Error('No data collected from the stream.'); } - const data = await handleFileProcessing(fileExt, xmlData, cmsType,name); + const data = await handleFileProcessing(fileExt, xmlData, cmsType, name); res.status(data?.status || 200).json(data); if (data?.status === 200) { const filePath = path.join(__dirname, '..', '..', 'extracted_files', `${name}.json`); @@ -144,7 +144,7 @@ router.get('/validator', express.json(), fileOperationLimiter, async function (r } }); } - else{ + else { // Create a writable stream to save the downloaded zip file let zipBuffer = Buffer.alloc(0); @@ -162,67 +162,67 @@ router.get('/validator', express.json(), fileOperationLimiter, async function (r if (!zipBuffer) { throw new Error('No data collected from the stream.'); } - const data = await handleFileProcessing(fileExt, zipBuffer, cmsType,name); + const data = await handleFileProcessing(fileExt, zipBuffer, cmsType, name); res.status(data?.status || 200).json(data); if (data?.status === 200) { const filePath = path.join(__dirname, '..', '..', 'extracted_files', name); createMapper(filePath, projectId, app_token, affix, config); } }); - } - } - }else { - const params = { - Bucket: config?.awsData?.bucketName, - Key: config?.awsData?.buketKey - }; - const getObjectCommand = new GetObjectCommand(params); - // Get the object from S3 - const s3File = await client.send(getObjectCommand); - //file Name From key - const fileName = params?.Key?.split?.('/')?.pop?.() ?? ''; - //file ext from fileName - const fileExt = fileName?.split?.('.')?.pop?.() ?? 'test'; - - if (!s3File?.Body) { - throw new Error('Empty response body from S3'); - } + } + } + } else { + const params = { + Bucket: config?.awsData?.bucketName, + Key: config?.awsData?.bucketKey + }; + const getObjectCommand = new GetObjectCommand(params); + // Get the object from S3 + const s3File = await client.send(getObjectCommand); + //file Name From key + const fileName = params?.Key?.split?.('/')?.pop?.() ?? ''; + //file ext from fileName + const fileExt = fileName?.split?.('.')?.pop?.() ?? 'test'; + + if (!s3File?.Body) { + throw new Error('Empty response body from S3'); + } - const bodyStream: Readable = s3File?.Body as Readable; + const bodyStream: Readable = s3File?.Body as Readable; - // Create a writable stream to save the downloaded zip file - const zipFileStream = createWriteStream(`${fileName}`); + // Create a writable stream to save the downloaded zip file + const zipFileStream = createWriteStream(`${fileName}`); - // // Pipe the S3 object's body to the writable stream - bodyStream.pipe(zipFileStream); + // // Pipe the S3 object's body to the writable stream + bodyStream.pipe(zipFileStream); - // Create a writable stream to save the downloaded zip file - let zipBuffer: Buffer | null = null; + // Create a writable stream to save the downloaded zip file + let zipBuffer: Buffer | null = null; - // Collect the data from the stream into a buffer - bodyStream.on('data', (chunk) => { - if (zipBuffer === null) { - zipBuffer = chunk; - } else { - zipBuffer = Buffer.concat([zipBuffer, chunk]); - } - }); + // Collect the data from the stream into a buffer + bodyStream.on('data', (chunk) => { + if (zipBuffer === null) { + zipBuffer = chunk; + } else { + zipBuffer = Buffer.concat([zipBuffer, chunk]); + } + }); - //buffer fully stremd - bodyStream.on('end', async () => { - if (!zipBuffer) { - throw new Error('No data collected from the stream.'); - } + //buffer fully stremd + bodyStream.on('end', async () => { + if (!zipBuffer) { + throw new Error('No data collected from the stream.'); + } - const data = await handleFileProcessing(fileExt, zipBuffer, cmsType,fileName); - res.json(data); - res.send('file valited sucessfully.'); - const filePath = path.join(__dirname, '..', '..', 'extracted_files', fileName); - console.log("🚀 ~ bodyStream.on ~ filePath:", filePath) - createMapper(filePath, projectId, app_token, affix, config); - }); + const data = await handleFileProcessing(fileExt, zipBuffer, cmsType, fileName); + res.json(data); + res.send('file valited sucessfully.'); + const filePath = path.join(__dirname, '..', '..', 'extracted_files', fileName); + console.log("🚀 ~ bodyStream.on ~ filePath:", filePath) + createMapper(filePath, projectId, app_token, affix, config); + }); + } } -} catch (err: any) { console.error('🚀 ~ router.get ~ err:', err); }