diff --git a/templates/cli/lib/commands/generic.js.twig b/templates/cli/lib/commands/generic.js.twig index 9a6ac64fd..5ca4212a7 100644 --- a/templates/cli/lib/commands/generic.js.twig +++ b/templates/cli/lib/commands/generic.js.twig @@ -188,7 +188,8 @@ const logout = new Command("logout") const sessions = globalConfig.getSessions(); const current = globalConfig.getCurrentSession(); - if (current === '') { + if (current === '' || !sessions.length) { + log('No active sessions found.'); return; } if (sessions.length === 1) { diff --git a/templates/cli/lib/commands/pull.js.twig b/templates/cli/lib/commands/pull.js.twig index 5855af39e..50e901286 100644 --- a/templates/cli/lib/commands/pull.js.twig +++ b/templates/cli/lib/commands/pull.js.twig @@ -27,7 +27,7 @@ const pullResources = async () => { if (cliConfig.all) { for (let action of Object.values(actions)) { cliConfig.all = true; - await action(); + await action({}); } } else { const answers = await inquirer.prompt(questionsPullResources[0]); @@ -92,7 +92,7 @@ const pullFunctions = async ({ code }) => { if (!fs.existsSync(func['path'])) { fs.mkdirSync(func['path'], { recursive: true }); } - + if(code === false) { warn("Source code download skipped."); } else if(!func['deployment']) { @@ -114,14 +114,14 @@ const pullFunctions = async ({ code }) => { overrideForCli: true, parseOutput: false }); - + tar.extract({ sync: true, cwd: func['path'], file: compressedFileName, strict: false, }); - + fs.rmSync(compressedFileName); } } diff --git a/templates/cli/lib/commands/push.js.twig b/templates/cli/lib/commands/push.js.twig index d516f8bf2..7a0072bbf 100644 --- a/templates/cli/lib/commands/push.js.twig +++ b/templates/cli/lib/commands/push.js.twig @@ -2,7 +2,7 @@ const chalk = require('chalk'); const inquirer = require("inquirer"); const JSONbig = require("json-bigint")({ storeAsString: false }); const { Command } = require("commander"); -const { localConfig, globalConfig } = require("../config"); +const { localConfig, globalConfig, KeyAttributes } = require("../config"); const { Spinner, SPINNER_ARC, SPINNER_DOTS } = require('../spinner'); const { paginate } = require('../paginate'); const { questionsPushBuckets, questionsPushTeams, questionsPushFunctions, questionsGetEntrypoint, questionsPushCollections, questionsConfirmPushCollections, questionsPushMessagingTopics, questionsPushResources } = require("../questions"); @@ -37,6 +37,7 @@ const { databasesUpdateRelationshipAttribute, databasesCreateRelationshipAttribute, databasesDeleteAttribute, + databasesDeleteIndex, databasesListAttributes, databasesListIndexes, databasesUpdateCollection @@ -68,8 +69,9 @@ const { checkDeployConditions } = require('../utils'); const STEP_SIZE = 100; // Resources const POLL_DEBOUNCE = 2000; // Milliseconds const POLL_MAX_DEBOUNCE = 1800; // Times of POLL_DEBOUNCE (1 hour) +const POLL_DEFAULT_VALUE = 30; -let pollMaxDebounces = 30; +let pollMaxDebounces = POLL_DEFAULT_VALUE; const changeableKeys = ['status', 'required', 'xdefault', 'elements', 'min', 'max', 'default', 'error']; @@ -90,11 +92,13 @@ const awaitPools = { return true; } - let steps = Math.max(1, Math.ceil(total / STEP_SIZE)); - if (steps > 1 && iteration === 1) { - pollMaxDebounces *= steps; + if (pollMaxDebounces === POLL_DEFAULT_VALUE) { + let steps = Math.max(1, Math.ceil(total / STEP_SIZE)); + if (steps > 1 && iteration === 1) { + pollMaxDebounces *= steps; - log('Found a large number of attributes, increasing timeout to ' + (pollMaxDebounces * POLL_DEBOUNCE / 1000 / 60) + ' minutes') + log('Found a large number of attributes, increasing timeout to ' + (pollMaxDebounces * POLL_DEBOUNCE / 1000 / 60) + ' minutes') + } } await new Promise(resolve => setTimeout(resolve, POLL_DEBOUNCE)); @@ -121,11 +125,13 @@ const awaitPools = { return true; } - let steps = Math.max(1, Math.ceil(total / STEP_SIZE)); - if (steps > 1 && iteration === 1) { - pollMaxDebounces *= steps; + if (pollMaxDebounces === POLL_DEFAULT_VALUE) { + let steps = Math.max(1, Math.ceil(total / STEP_SIZE)); + if (steps > 1 && iteration === 1) { + pollMaxDebounces *= steps; - log('Found a large number of indexes, increasing timeout to ' + (pollMaxDebounces * POLL_DEBOUNCE / 1000 / 60) + ' minutes') + log('Found a large number of indexes, increasing timeout to ' + (pollMaxDebounces * POLL_DEBOUNCE / 1000 / 60) + ' minutes') + } } await new Promise(resolve => setTimeout(resolve, POLL_DEBOUNCE)); @@ -151,11 +157,13 @@ const awaitPools = { return true; } - let steps = Math.max(1, Math.ceil(total / STEP_SIZE)); - if (steps > 1 && iteration === 1) { - pollMaxDebounces *= steps; + if (pollMaxDebounces === POLL_DEFAULT_VALUE) { + let steps = Math.max(1, Math.ceil(total / STEP_SIZE)); + if (steps > 1 && iteration === 1) { + pollMaxDebounces *= steps; - log('Found a large number of variables, increasing timeout to ' + (pollMaxDebounces * POLL_DEBOUNCE / 1000 / 60) + ' minutes') + log('Found a large number of variables, increasing timeout to ' + (pollMaxDebounces * POLL_DEBOUNCE / 1000 / 60) + ' minutes') + } } await new Promise(resolve => setTimeout(resolve, POLL_DEBOUNCE)); @@ -170,11 +178,13 @@ const awaitPools = { return false; } - let steps = Math.max(1, Math.ceil(attributeKeys.length / STEP_SIZE)); - if (steps > 1 && iteration === 1) { - pollMaxDebounces *= steps; + if (pollMaxDebounces === POLL_DEFAULT_VALUE) { + let steps = Math.max(1, Math.ceil(attributeKeys.length / STEP_SIZE)); + if (steps > 1 && iteration === 1) { + pollMaxDebounces *= steps; - log('Found a large number of attributes to be deleted. Increasing timeout to ' + (pollMaxDebounces * POLL_DEBOUNCE / 1000 / 60) + ' minutes') + log('Found a large number of attributes to be deleted. Increasing timeout to ' + (pollMaxDebounces * POLL_DEBOUNCE / 1000 / 60) + ' minutes') + } } const { attributes } = await paginate(databasesListAttributes, { @@ -203,11 +213,13 @@ const awaitPools = { return false; } - let steps = Math.max(1, Math.ceil(attributeKeys.length / STEP_SIZE)); - if (steps > 1 && iteration === 1) { - pollMaxDebounces *= steps; + if (pollMaxDebounces === POLL_DEFAULT_VALUE) { + let steps = Math.max(1, Math.ceil(attributeKeys.length / STEP_SIZE)); + if (steps > 1 && iteration === 1) { + pollMaxDebounces *= steps; - log('Creating a large number of attributes, increasing timeout to ' + (pollMaxDebounces * POLL_DEBOUNCE / 1000 / 60) + ' minutes') + log('Creating a large number of attributes, increasing timeout to ' + (pollMaxDebounces * POLL_DEBOUNCE / 1000 / 60) + ' minutes') + } } const { attributes } = await paginate(databasesListAttributes, { @@ -243,16 +255,53 @@ const awaitPools = { iteration + 1 ); }, + deleteIndexes: async (databaseId, collectionId, indexesKeys, iteration = 1) => { + if (iteration > pollMaxDebounces) { + return false; + } + + if (pollMaxDebounces === POLL_DEFAULT_VALUE) { + let steps = Math.max(1, Math.ceil(attributeKeys.length / STEP_SIZE)); + if (steps > 1 && iteration === 1) { + pollMaxDebounces *= steps; + + log('Found a large number of indexes to be deleted. Increasing timeout to ' + (pollMaxDebounces * POLL_DEBOUNCE / 1000 / 60) + ' minutes') + } + } + + const { indexes } = await paginate(databasesListIndexes, { + databaseId, + collectionId, + parseOutput: false + }, 100, 'indexes'); + + const ready = indexesKeys.filter(index => indexes.includes(index.key)); + + if (ready.length === 0) { + return true; + } + + await new Promise(resolve => setTimeout(resolve, POLL_DEBOUNCE)); + + return await awaitPools.expectIndexes( + databaseId, + collectionId, + indexesKeys, + iteration + 1 + ); + }, expectIndexes: async (databaseId, collectionId, indexKeys, iteration = 1) => { if (iteration > pollMaxDebounces) { return false; } - let steps = Math.max(1, Math.ceil(indexKeys.length / STEP_SIZE)); - if (steps > 1 && iteration === 1) { - pollMaxDebounces *= steps; + if (pollMaxDebounces === POLL_DEFAULT_VALUE) { + let steps = Math.max(1, Math.ceil(indexKeys.length / STEP_SIZE)); + if (steps > 1 && iteration === 1) { + pollMaxDebounces *= steps; - log('Creating a large number of indexes, increasing timeout to ' + (pollMaxDebounces * POLL_DEBOUNCE / 1000 / 60) + ' minutes') + log('Creating a large number of indexes, increasing timeout to ' + (pollMaxDebounces * POLL_DEBOUNCE / 1000 / 60) + ' minutes') + } } const { indexes } = await paginate(databasesListIndexes, { @@ -523,8 +572,18 @@ const updateAttribute = async (databaseId, collectionId, attribute) => { }) } } -const deleteAttribute = async (collection, attribute) => { - log(`Deleting attribute ${attribute.key} of ${collection.name} ( ${collection['$id']} )`); +const deleteAttribute = async (collection, attribute, isIndex = false) => { + log(`Deleting ${isIndex ? 'index' : 'attribute'} ${attribute.key} of ${collection.name} ( ${collection['$id']} )`); + + if (isIndex) { + await databasesDeleteIndex({ + databaseId: collection['databaseId'], + collectionId: collection['$id'], + key: attribute.key, + parseOutput: false + }); + return; + } await databasesDeleteAttribute({ databaseId: collection['databaseId'], @@ -555,6 +614,10 @@ const checkAttributeChanges = (remote, local, collection, recraeting = true) => let attribute = remote; for (let key of Object.keys(remote)) { + if (!KeyAttributes.has(key)) { + continue; + } + if (changeableKeys.includes(key)) { if (!recraeting) { if (remote[key] !== local[key]) { @@ -570,7 +633,12 @@ const checkAttributeChanges = (remote, local, collection, recraeting = true) => continue; } - if (remote[key] !== local[key]) { + if (Array.isArray(remote[key]) && Array.isArray(local[key])) { + if (JSON.stringify(remote[key]) !== JSON.stringify(local[key])) { + const bol = reason === '' ? '' : '\n'; + reason += `${bol}${key} changed from ${chalk.red(remote[key])} to ${chalk.green(local[key])}`; + } + } else if (remote[key] !== local[key]) { const bol = reason === '' ? '' : '\n'; reason += `${bol}${key} changed from ${chalk.red(remote[key])} to ${chalk.green(local[key])}`; } @@ -599,12 +667,9 @@ const generateChangesObject = (attribute, collection, isAdding) => { /** * Filter deleted and recreated attributes, * return list of attributes to create - * @param remoteAttributes - * @param localAttributes - * @param collection * @returns {Promise<*|*[]>} */ -const attributesToCreate = async (remoteAttributes, localAttributes, collection) => { +const attributesToCreate = async (remoteAttributes, localAttributes, collection, isIndex = false) => { const deleting = remoteAttributes.filter((attribute) => !attributesContains(attribute, localAttributes)).map((attr) => generateChangesObject(attr, collection, false)); const adding = localAttributes.filter((attribute) => !attributesContains(attribute, remoteAttributes)).map((attr) => generateChangesObject(attr, collection, true)); @@ -626,10 +691,10 @@ const attributesToCreate = async (remoteAttributes, localAttributes, collection) })); if (!cliConfig.force) { - if (deleting.length > 0) { + if (deleting.length > 0 && !isIndex) { log(`Attribute deletion will cause ${chalk.red('loss of data')}`); } - if (conflicts.length > 0) { + if (conflicts.length > 0 && !isIndex) { log(`Attribute recreation will cause ${chalk.red('loss of data')}`); } @@ -642,17 +707,17 @@ const attributesToCreate = async (remoteAttributes, localAttributes, collection) if (conflicts.length > 0) { changedAttributes = conflicts.map((change) => change.attribute); - await Promise.all(changedAttributes.map((changed) => deleteAttribute(collection, changed))); + await Promise.all(changedAttributes.map((changed) => deleteAttribute(collection, changed, isIndex))); remoteAttributes = remoteAttributes.filter((attribute) => !attributesContains(attribute, changedAttributes)) } if (changes.length > 0) { changedAttributes = changes.map((change) => change.attribute); - await Promise.all(changedAttributes.map((changed) => updateAttribute(collection['databaseId'],collection['$id'], changed))); + await Promise.all(changedAttributes.map((changed) => updateAttribute(collection['databaseId'], collection['$id'], changed))); } const deletingAttributes = deleting.map((change) => change.attribute); - await Promise.all(deletingAttributes.map((attribute) => deleteAttribute(collection, attribute))); + await Promise.all(deletingAttributes.map((attribute) => deleteAttribute(collection, attribute, isIndex))); const attributeKeys = [...remoteAttributes.map(attribute => attribute.key), ...deletingAttributes.map(attribute => attribute.key)] if (attributeKeys.length) { @@ -737,6 +802,8 @@ const pushResources = async () => { }; const pushSettings = async () => { + checkDeployConditions(localConfig); + try { log("Pushing project settings ..."); @@ -797,8 +864,6 @@ const pushSettings = async () => { } const pushFunction = async ({ functionId, async, returnOnZero } = { returnOnZero: false }) => { - let response = {}; - const functionIds = []; if (functionId) { @@ -852,6 +917,8 @@ const pushFunction = async ({ functionId, async, returnOnZero } = { returnOnZero const failedDeployments = []; await Promise.all(functions.map(async (func) => { + let response = {}; + const ignore = func.ignore ? 'appwrite.json' : '.gitignore'; let functionExists = false; let deploymentCreated = false; @@ -859,7 +926,6 @@ const pushFunction = async ({ functionId, async, returnOnZero } = { returnOnZero const updaterRow = new Spinner({ status: '', resource: func.name, id: func['$id'], end: `Ignoring using: ${ignore}` }); updaterRow.update({ status: 'Getting' }).startSpinner(SPINNER_DOTS); - try { response = await functionsGet({ functionId: func['$id'], @@ -961,11 +1027,6 @@ const pushFunction = async ({ functionId, async, returnOnZero } = { returnOnZero let pollChecks = 0; while (true) { - if (pollChecks >= POLL_MAX_DEBOUNCE) { - updaterRow.update({ end: 'Deployment is taking too long. Please check the console for more details.' }) - break; - } - response = await functionsGetDeployment({ functionId: func['$id'], deploymentId: deploymentId, @@ -1004,7 +1065,7 @@ const pushFunction = async ({ functionId, async, returnOnZero } = { returnOnZero } pollChecks++; - await new Promise(resolve => setTimeout(resolve, POLL_DEBOUNCE)); + await new Promise(resolve => setTimeout(resolve, POLL_DEBOUNCE * 1.5)); } } catch (e) { updaterRow.fail({ errorMessage: e.message ?? 'Unknown error occurred. Please try again' }) @@ -1018,15 +1079,15 @@ const pushFunction = async ({ functionId, async, returnOnZero } = { returnOnZero failedDeployments.forEach((failed) => { const { name, deployment, $id } = failed; - const failUrl = `${globalConfig.getEndpoint().replace('/v1', '')}/console/project-${localConfig.getProject().projectId}/functions/function-${$id}/deployment-${deployment}`; + const failUrl = `${globalConfig.getEndpoint().slice(0, -3)}/console/project-${localConfig.getProject().projectId}/functions/function-${$id}/deployment-${deployment}`; error(`Deployment of ${name} has failed. Check at ${failUrl} for more details\n`); }); if (!async) { - if(successfullyPushed === 0) { + if (successfullyPushed === 0) { error('No functions were pushed.'); - } else if(successfullyDeployed != successfullyPushed) { + } else if (successfullyDeployed != successfullyPushed) { warn(`Successfully pushed ${successfullyDeployed} of ${successfullyPushed} functions`) } else { success(`Successfully pushed ${successfullyPushed} functions.`); @@ -1036,9 +1097,13 @@ const pushFunction = async ({ functionId, async, returnOnZero } = { returnOnZero } } -const pushCollection = async ({ returnOnZero } = { returnOnZero: false }) => { +const pushCollection = async ({ returnOnZero, attempts } = { returnOnZero: false }) => { const collections = []; + if (attempts) { + pollMaxDebounces = attempts; + } + if (cliConfig.all) { checkDeployConditions(localConfig); if (localConfig.getCollections().length === 0) { @@ -1136,13 +1201,16 @@ const pushCollection = async ({ returnOnZero } = { returnOnZero: false }) => { // Serialize attribute actions for (let collection of collections) { let attributes = collection.attributes; + let indexes = collection.indexes; if (collection.isExisted) { attributes = await attributesToCreate(collection.remoteVersion.attributes, collection.attributes, collection); + indexes = await attributesToCreate(collection.remoteVersion.indexes, collection.indexes, collection, true); - if (Array.isArray(attributes) && attributes.length <= 0) { + if ((Array.isArray(attributes) && attributes.length <= 0) && (Array.isArray(indexes) && indexes.length <= 0)) { continue; } + } log(`Pushing collection ${collection.name} ( ${collection['databaseId']} - ${collection['$id']} ) attributes`) @@ -1154,7 +1222,7 @@ const pushCollection = async ({ returnOnZero } = { returnOnZero: false }) => { } try { - await createIndexes(collection.indexes, collection); + await createIndexes(indexes, collection); } catch (e) { throw e; } @@ -1410,6 +1478,7 @@ push .command("collection") .alias("collections") .description("Push collections in the current project.") + .option(`-a, --attempts `, `Max number of attempts before timing out. default: 30.`) .action(actionRunner(pushCollection)); push diff --git a/templates/cli/lib/config.js.twig b/templates/cli/lib/config.js.twig index 2c43f5338..670c0ef15 100644 --- a/templates/cli/lib/config.js.twig +++ b/templates/cli/lib/config.js.twig @@ -17,7 +17,7 @@ const KeyAttributes = new Set([ "size", "default", // integer and float - "min", + "min", "max", // email, enum, URL, IP, and datetime "format", @@ -29,7 +29,10 @@ const KeyAttributes = new Set([ "twoWay", "twoWayKey", "onDelete", - "side" + "side", + // Indexes + "attributes", + "orders" ]); const KeyIndexes = new Set(["key", "type", "status", "attributes", "orders"]); @@ -604,4 +607,5 @@ class Global extends Config { module.exports = { localConfig: new Local(), globalConfig: new Global(), + KeyAttributes, }; diff --git a/templates/cli/lib/spinner.js.twig b/templates/cli/lib/spinner.js.twig index 2f5b3ad11..bcc7e6bf6 100644 --- a/templates/cli/lib/spinner.js.twig +++ b/templates/cli/lib/spinner.js.twig @@ -22,6 +22,7 @@ class Spinner { hideCursor, clearOnComplete, stopOnComplete: true, + linewrap: true, noTTYOutput: true }); }