diff --git a/apps/cli/commands/ai/sessions/helpers.ts b/apps/cli/commands/ai/sessions/helpers.ts index 4533e68a8a..e190272512 100644 --- a/apps/cli/commands/ai/sessions/helpers.ts +++ b/apps/cli/commands/ai/sessions/helpers.ts @@ -167,7 +167,10 @@ async function pickSessionInteractively( loop: false, theme: { style: { - keysHelpTip: () => chalk.dim( __( '↑↓ navigate · ⏎ select · esc cancel' ) ), + keysHelpTip: () => + chalk.dim( + [ __( '↑↓ navigate' ), __( '⏎ select' ), __( 'esc cancel' ) ].join( ' · ' ) + ), }, }, }, diff --git a/apps/cli/commands/export.ts b/apps/cli/commands/export.ts new file mode 100644 index 0000000000..f602466f2b --- /dev/null +++ b/apps/cli/commands/export.ts @@ -0,0 +1,204 @@ +import path from 'path'; +import { DEFAULT_PHP_VERSION } from '@studio/common/constants'; +import { SiteCommandLoggerAction as LoggerAction } from '@studio/common/logger-actions'; +import { __, _n, sprintf } from '@wordpress/i18n'; +import { getSiteByFolder } from 'cli/lib/cli-config/sites'; +import { connectToDaemon, disconnectFromDaemon } from 'cli/lib/daemon-client'; +import { ExportEvents } from 'cli/lib/import-export/export/events'; +import { exportBackup } from 'cli/lib/import-export/export/export-manager'; +import { BackupCreateProgressEventData, ExportOptions } from 'cli/lib/import-export/export/types'; +import { ImportExportEventData } from 'cli/lib/import-export/handle-events'; +import { keepSqliteIntegrationUpdated } from 'cli/lib/sqlite-integration'; +import { untildify } from 'cli/lib/utils'; +import { Logger, LoggerError } from 'cli/logger'; +import { StudioArgv } from 'cli/types'; + +const logger = new Logger< LoggerAction >(); + +export function exportEventHandler( { event, data }: ImportExportEventData ): void { + switch ( event ) { + case ExportEvents.EXPORT_START: + logger.reportStart( LoggerAction.EXPORT_SITE, __( 'Starting export…' ) ); + break; + + case ExportEvents.BACKUP_CREATE_START: + logger.reportStart( LoggerAction.CREATE_BACKUP, __( 'Creating backup file…' ) ); + break; + + case ExportEvents.WP_CONTENT_EXPORT_START: + logger.reportStart( LoggerAction.EXPORT_WP_CONTENT, __( 'Traversing WordPress content…' ) ); + break; + case ExportEvents.WP_CONTENT_EXPORT_COMPLETE: + logger.reportSuccess( __( 'WordPress content traversed' ) ); + break; + + case ExportEvents.DATABASE_EXPORT_START: + logger.reportStart( LoggerAction.EXPORT_DATABASE, __( 'Exporting database…' ) ); + break; + case ExportEvents.DATABASE_EXPORT_COMPLETE: + logger.reportSuccess( __( 'Database exported' ) ); + break; + + case ExportEvents.BACKUP_CREATE_PROGRESS: { + const progressData = data as BackupCreateProgressEventData; + const processed = progressData?.progress?.entries?.processed; + + if ( processed != null ) { + logger.reportProgress( + sprintf( + _n( 'Backing up file… (%d processed)', 'Backing up files… (%d processed)', processed ), + processed + ) + ); + } + break; + } + case ExportEvents.BACKUP_CREATE_COMPLETE: + logger.reportSuccess( __( 'Backup file created' ) ); + break; + + case ExportEvents.CONFIG_EXPORT_START: + logger.reportStart( LoggerAction.EXPORT_CONFIG, __( 'Exporting configuration…' ) ); + break; + case ExportEvents.CONFIG_EXPORT_COMPLETE: + logger.reportSuccess( __( 'Configuration exported' ) ); + break; + + case ExportEvents.EXPORT_COMPLETE: + logger.reportSuccess( __( 'Site exported successfully' ) ); + break; + + case ExportEvents.EXPORT_ERROR: + throw new LoggerError( __( 'Export failed' ), data instanceof Error ? data : undefined ); + } +} + +export async function runCommand( + siteFolder: string, + exportPath: string, + mode?: 'full' | 'db' +): Promise< void > { + try { + logger.reportStart( LoggerAction.START_DAEMON, __( 'Starting process daemon…' ) ); + await connectToDaemon(); + logger.reportSuccess( __( 'Process daemon started' ) ); + + logger.reportStart( LoggerAction.LOAD_SITES, __( 'Loading site…' ) ); + const site = await getSiteByFolder( siteFolder ); + logger.reportSuccess( __( 'Site loaded' ) ); + + logger.reportStart( + LoggerAction.INSTALL_SQLITE, + __( 'Setting up SQLite integration, if needed…' ) + ); + await keepSqliteIntegrationUpdated( siteFolder ); + logger.reportSuccess( __( 'SQLite integration configured as needed' ) ); + + const includes: ExportOptions[ 'includes' ] = { database: true, wpContent: true }; + + if ( mode === 'db' ) { + includes.wpContent = false; + } + + const isExported = await exportBackup( + { + site, + backupFile: exportPath, + phpVersion: DEFAULT_PHP_VERSION, + includes, + }, + exportEventHandler + ); + + logger.reportSuccess( sprintf( __( '%s successfully exported' ), exportPath ) ); + + if ( ! isExported ) { + throw new LoggerError( __( 'No suitable exporter found for the provided backup file' ) ); + } + } finally { + await disconnectFromDaemon(); + } +} + +function getTimestamp( date = new Date() ): string { + return [ + date.getFullYear(), + date.getMonth() + 1, + date.getDate(), + date.getHours(), + date.getMinutes(), + date.getSeconds(), + ] + .map( ( part ) => String( part ).padStart( 2, '0' ) ) + .join( '-' ); +} + +export const registerCommand = ( yargs: StudioArgv ) => { + return yargs.command( { + command: 'export [export-file]', + describe: __( 'Export site to a backup file' ), + builder: ( yargs ) => { + return yargs + .positional( 'export-file', { + type: 'string', + normalize: true, + demandOption: false, + description: __( + 'Path to the export file. Full-site exports use .zip or .tar.gz. Database-only exports use .sql.' + ), + coerce: ( value ) => { + return path.resolve( untildify( value ) ); + }, + } ) + .option( 'mode', { + type: 'string', + choices: [ 'full', 'db' ] as const, + default: 'full' as const, + description: __( + 'Export the full site or just the database. Default exports full site.' + ), + } ); + }, + handler: async ( argv ) => { + try { + let exportFile: string; + const timestamp = getTimestamp(); + + if ( argv.exportFile ) { + exportFile = argv.exportFile; + } else if ( argv.mode === 'full' ) { + exportFile = path.join( process.cwd(), `studio-backup-${ timestamp }.zip` ); + } else { + exportFile = path.join( process.cwd(), `studio-backup-${ timestamp }.sql` ); + } + + if ( + argv.mode === 'full' && + ! exportFile.endsWith( '.zip' ) && + ! exportFile.endsWith( '.tar.gz' ) + ) { + throw new LoggerError( + __( + 'Invalid export file extension. Must be .zip or .tar.gz when exporting the full site.' + ) + ); + } + + if ( argv.mode === 'db' && ! exportFile.endsWith( '.sql' ) ) { + throw new LoggerError( + __( 'Invalid export file extension. Must be .sql when exporting database only.' ) + ); + } + + await runCommand( argv.path, exportFile, argv.mode ); + } catch ( error ) { + if ( error instanceof LoggerError ) { + logger.reportError( error ); + } else { + const loggerError = new LoggerError( __( 'Failed to export site' ), error ); + logger.reportError( loggerError ); + } + } + }, + } ); +}; diff --git a/apps/cli/commands/import.ts b/apps/cli/commands/import.ts new file mode 100644 index 0000000000..c3c1ca66c9 --- /dev/null +++ b/apps/cli/commands/import.ts @@ -0,0 +1,298 @@ +import fs from 'fs'; +import path from 'path'; +import { isWordPressDirectory, recursiveCopyDirectory } from '@studio/common/lib/fs-utils'; +import { getServerFilesPath } from '@studio/common/lib/well-known-paths'; +import { SiteCommandLoggerAction as LoggerAction } from '@studio/common/logger-actions'; +import { __, _n, sprintf } from '@wordpress/i18n'; +import { SiteData } from 'cli/lib/cli-config/core'; +import { clearSiteLatestCliPid, getSiteByFolder, getSiteUrl } from 'cli/lib/cli-config/sites'; +import { connectToDaemon, disconnectFromDaemon } from 'cli/lib/daemon-client'; +import { ImportExportEventData } from 'cli/lib/import-export/handle-events'; +import { + BackupExtractEvents, + ImporterEvents, + ValidatorEvents, +} from 'cli/lib/import-export/import/events'; +import { + DEFAULT_IMPORTER_OPTIONS, + importBackup, +} from 'cli/lib/import-export/import/import-manager'; +import { + BackupExtractProgressEventData, + ImportDatabaseProgressEventData, + ImportWpContentProgressEventData, +} from 'cli/lib/import-export/import/types'; +import { getBackupFileType } from 'cli/lib/import-export/utils'; +import { keepSqliteIntegrationUpdated } from 'cli/lib/sqlite-integration'; +import { untildify } from 'cli/lib/utils'; +import { + isServerRunning, + startWordPressServer, + stopWordPressServer, +} from 'cli/lib/wordpress-server-manager'; +import { Logger, LoggerError } from 'cli/logger'; +import { StudioArgv } from 'cli/types'; + +const logger = new Logger< LoggerAction >(); + +const WP_CONTENT_TYPE_LABELS: Record< string, string > = { + plugins: __( 'Importing plugins…' ), + themes: __( 'Importing themes…' ), + uploads: __( 'Importing media uploads…' ), + other: __( 'Importing other files…' ), +}; + +async function setupWordPressFilesOnly( sitePath: string ): Promise< void > { + const bundledWpPath = path.join( getServerFilesPath(), 'wordpress-versions', 'latest' ); + + if ( ! fs.existsSync( bundledWpPath ) ) { + throw new LoggerError( + __( + 'Cannot set up WordPress. Bundled WordPress files not found. Please connect to the internet or reinstall Studio.' + ) + ); + } + + await recursiveCopyDirectory( bundledWpPath, sitePath ); +} + +export function importEventHandler( { event, data }: ImportExportEventData ): void { + switch ( event ) { + case ValidatorEvents.IMPORT_VALIDATION_START: + logger.reportSuccess( sprintf( __( 'Started import…' ) ) ); + logger.reportStart( LoggerAction.VALIDATE, __( 'Validating backup…' ) ); + break; + case ValidatorEvents.IMPORT_VALIDATION_COMPLETE: + logger.reportSuccess( __( 'Backup validated' ) ); + break; + case ValidatorEvents.IMPORT_VALIDATION_ERROR: + throw new LoggerError( + __( 'Backup validation failed' ), + data instanceof Error ? data : undefined + ); + + case BackupExtractEvents.BACKUP_EXTRACT_START: + logger.reportStart( LoggerAction.EXTRACT_BACKUP, __( 'Extracting backup files…' ) ); + break; + case BackupExtractEvents.BACKUP_EXTRACT_PROGRESS: { + const progressData = data as BackupExtractProgressEventData; + if ( + progressData.processedFiles != null && + progressData.totalFiles != null && + progressData.totalFiles > 0 + ) { + logger.reportProgress( + sprintf( + _n( + 'Extracting backup file… (%1$d/%2$d)', + 'Extracting backup files… (%1$d/%2$d)', + progressData.totalFiles + ), + progressData.processedFiles, + progressData.totalFiles + ) + ); + } + break; + } + case BackupExtractEvents.BACKUP_EXTRACT_COMPLETE: + logger.reportSuccess( __( 'Backup extraction completed' ) ); + break; + case BackupExtractEvents.BACKUP_EXTRACT_WARNING: + logger.reportWarning( + typeof data === 'string' ? data : __( 'A warning occurred while extracting backup' ) + ); + break; + case BackupExtractEvents.BACKUP_EXTRACT_ERROR: + throw new LoggerError( + __( 'Failed to extract backup' ), + data instanceof Error ? data : undefined + ); + + case ImporterEvents.IMPORT_START: + logger.reportStart( LoggerAction.IMPORT_SITE, __( 'Importing backup…' ) ); + break; + case ImporterEvents.IMPORT_DATABASE_START: + logger.reportStart( LoggerAction.IMPORT_DATABASE, __( 'Importing database…' ) ); + break; + case ImporterEvents.IMPORT_DATABASE_PROGRESS: { + const progressData = data as ImportDatabaseProgressEventData; + if ( + progressData.processedFiles != null && + progressData.totalFiles != null && + progressData.totalFiles > 0 + ) { + logger.reportProgress( + sprintf( + _n( + 'Importing database file… (%1$d/%2$d)', + 'Importing database files… (%1$d/%2$d)', + progressData.totalFiles + ), + progressData.processedFiles, + progressData.totalFiles + ) + ); + } + break; + } + case ImporterEvents.IMPORT_DATABASE_COMPLETE: + logger.reportSuccess( __( 'Database import completed' ) ); + break; + + case ImporterEvents.IMPORT_WP_CONTENT_START: + logger.reportStart( LoggerAction.IMPORT_WP_CONTENT, __( 'Importing WordPress content…' ) ); + break; + case ImporterEvents.IMPORT_WP_CONTENT_PROGRESS: { + const progressData = data as ImportWpContentProgressEventData; + if ( + progressData.processedItems != null && + progressData.totalItems != null && + progressData.totalItems > 0 + ) { + const baseMessage = + WP_CONTENT_TYPE_LABELS[ progressData.type || 'other' ] || + __( 'Importing WordPress content…' ); + logger.reportProgress( + sprintf( + /* translators: %1$s is a content type label, %2$d is processed items, %3$d is total items */ + __( '%1$s (%2$d/%3$d)' ), + baseMessage, + progressData.processedItems, + progressData.totalItems + ) + ); + } + break; + } + case ImporterEvents.IMPORT_WP_CONTENT_COMPLETE: + logger.reportSuccess( __( 'WordPress content import completed' ) ); + break; + + case ImporterEvents.IMPORT_META_START: + logger.reportStart( LoggerAction.IMPORT_META, __( 'Importing metadata…' ) ); + break; + case ImporterEvents.IMPORT_META_COMPLETE: + logger.reportSuccess( __( 'Metadata import completed' ) ); + break; + case ImporterEvents.IMPORT_COMPLETE: + logger.reportSuccess( __( 'Site imported successfully' ) ); + break; + + case ImporterEvents.IMPORT_ERROR: + throw new LoggerError( __( 'Import failed' ), data instanceof Error ? data : undefined ); + } +} + +export async function runCommand( siteFolder: string, importFile: string ): Promise< void > { + let site: SiteData | undefined; + let wasServerRunning = false; + let importError: unknown; + let restartSiteError: unknown; + + try { + logger.reportStart( LoggerAction.START_DAEMON, __( 'Starting process daemon…' ) ); + await connectToDaemon(); + logger.reportSuccess( __( 'Process daemon started' ) ); + + logger.reportStart( LoggerAction.LOAD_SITES, __( 'Loading site…' ) ); + site = await getSiteByFolder( siteFolder ); + logger.reportSuccess( __( 'Site loaded' ) ); + + if ( ! fs.existsSync( importFile ) ) { + throw new LoggerError( sprintf( __( 'Import file not found: %s' ), importFile ) ); + } + + wasServerRunning = !! ( await isServerRunning( site.id ) ); + + if ( wasServerRunning ) { + logger.reportStart( LoggerAction.STOP_SITE, __( 'Stopping WordPress server…' ) ); + await stopWordPressServer( site.id ); + await clearSiteLatestCliPid( site.id ); + logger.reportSuccess( __( 'WordPress server stopped' ) ); + } + + if ( ! isWordPressDirectory( site.path ) ) { + logger.reportStart( LoggerAction.SETUP_WORDPRESS, __( 'Copying bundled WordPress…' ) ); + await setupWordPressFilesOnly( site.path ); + logger.reportSuccess( __( 'WordPress files copied' ) ); + } + + logger.reportStart( LoggerAction.IMPORT_SITE, __( 'Starting import…' ) ); + + await importBackup( + { path: importFile, type: getBackupFileType( importFile ) }, + site, + importEventHandler, + DEFAULT_IMPORTER_OPTIONS + ); + + // Something in Playground makes it so the front-end of the site sometimes returns an error page + // on the first request. Send that first request from here to hide the error from the user. + const siteUrl = getSiteUrl( site ); + await fetch( siteUrl ).catch( () => {} ); + } catch ( error ) { + importError = error; + } finally { + try { + if ( site && wasServerRunning ) { + logger.reportStart( + LoggerAction.INSTALL_SQLITE, + __( 'Setting up SQLite integration, if needed…' ) + ); + await keepSqliteIntegrationUpdated( siteFolder ); + logger.reportSuccess( __( 'SQLite integration configured as needed' ) ); + + logger.reportStart( LoggerAction.START_SITE, __( 'Starting WordPress server…' ) ); + await startWordPressServer( site, logger ); + logger.reportSuccess( __( 'WordPress server started' ) ); + } + } catch ( error ) { + restartSiteError = error; + } finally { + await disconnectFromDaemon(); + } + } + + if ( importError instanceof LoggerError && restartSiteError instanceof Error ) { + importError.previousError = restartSiteError; + } + + if ( importError instanceof Error ) { + throw importError; + } + + if ( restartSiteError instanceof Error ) { + throw restartSiteError; + } +} + +export const registerCommand = ( yargs: StudioArgv ) => { + return yargs.command( { + command: 'import ', + describe: __( 'Import a backup file to site' ), + builder: ( yargs ) => { + return yargs.positional( 'import-file', { + type: 'string', + normalize: true, + demandOption: true, + description: __( 'Path to the import file' ), + coerce: ( value ) => { + return path.resolve( untildify( value ) ); + }, + } ); + }, + handler: async ( argv ) => { + try { + await runCommand( argv.path, argv.importFile ); + } catch ( error ) { + if ( error instanceof LoggerError ) { + logger.reportError( error ); + } else { + const loggerError = new LoggerError( __( 'Failed to import site' ), error ); + logger.reportError( loggerError ); + } + } + }, + } ); +}; diff --git a/apps/cli/commands/pull.ts b/apps/cli/commands/pull.ts new file mode 100644 index 0000000000..58401c2f6f --- /dev/null +++ b/apps/cli/commands/pull.ts @@ -0,0 +1,270 @@ +import fs from 'fs'; +import os from 'os'; +import path from 'path'; +import { confirm } from '@inquirer/prompts'; +import { readAuthToken } from '@studio/common/lib/shared-config'; +import { + SYNC_MAX_STALLED_ATTEMPTS, + SYNC_POLL_INTERVAL_MS, + SYNC_PUSH_SIZE_LIMIT_BYTES, + SYNC_PUSH_SIZE_LIMIT_GB, +} from '@studio/common/lib/sync/constants'; +import { SyncCommandLoggerAction as LoggerAction } from '@studio/common/logger-actions'; +import { __, sprintf } from '@wordpress/i18n'; +import { SiteData } from 'cli/lib/cli-config/core'; +import { clearSiteLatestCliPid, getSiteByFolder, getSiteUrl } from 'cli/lib/cli-config/sites'; +import { connectToDaemon, disconnectFromDaemon } from 'cli/lib/daemon-client'; +import { + DEFAULT_IMPORTER_OPTIONS, + importBackup, +} from 'cli/lib/import-export/import/import-manager'; +import { keepSqliteIntegrationUpdated } from 'cli/lib/sqlite-integration'; +import { + checkBackupSize, + fetchSyncableSites, + initiateBackup, + parseSyncOptions, + pollBackupStatus, + downloadBackup, +} from 'cli/lib/sync-api'; +import { fetchPullTree, selectSyncItemsForPull } from 'cli/lib/sync-selector'; +import { findSyncSiteByIdentifier, pickSyncSite } from 'cli/lib/sync-site-picker'; +import { + isServerRunning, + startWordPressServer, + stopWordPressServer, +} from 'cli/lib/wordpress-server-manager'; +import { Logger, LoggerError } from 'cli/logger'; +import { StudioArgv } from 'cli/types'; +import { importEventHandler } from './import'; +import type { SyncOption } from '@studio/common/types/sync'; + +const logger = new Logger< LoggerAction >(); + +export async function runCommand( + siteFolder: string, + syncOptions?: SyncOption[], + siteIdentifier?: string +): Promise< void > { + let site: SiteData | undefined; + let wasServerRunning = false; + let pullError: unknown; + let restartSiteError: unknown; + + try { + const token = await readAuthToken(); + if ( ! token ) { + throw new LoggerError( + __( 'Authentication required. Please log in with `studio auth login`.' ) + ); + } + + logger.reportStart( LoggerAction.START_DAEMON, __( 'Starting process daemon…' ) ); + await connectToDaemon(); + logger.reportSuccess( __( 'Process daemon started' ) ); + + logger.reportStart( LoggerAction.LOAD_SITES, __( 'Loading site…' ) ); + site = await getSiteByFolder( siteFolder ); + logger.reportSuccess( __( 'Site loaded' ) ); + + logger.reportStart( LoggerAction.FETCH_REMOTE_SITES, __( 'Fetching WordPress.com sites…' ) ); + const remoteSites = await fetchSyncableSites( token.accessToken ); + logger.spinner.stop(); + logger.reportSuccess( sprintf( __( 'Found %d sites' ), remoteSites.length ), true ); + + let remoteSite; + if ( siteIdentifier ) { + remoteSite = findSyncSiteByIdentifier( remoteSites, siteIdentifier ); + } else { + remoteSite = await pickSyncSite( remoteSites, __( 'Select a site to pull from' ) ); + if ( ! remoteSite ) { + return; + } + } + + let optionsToSync: SyncOption[]; + let includePathList: string[] | undefined; + + if ( syncOptions ) { + optionsToSync = syncOptions; + } else { + logger.reportStart( LoggerAction.FETCH_REMOTE_SITES, __( 'Fetching file tree…' ) ); + const { tree } = await fetchPullTree( token.accessToken, remoteSite.id ); + logger.spinner.stop(); + + const selection = await selectSyncItemsForPull( token.accessToken, remoteSite.id, tree ); + if ( ! selection ) { + return; + } + optionsToSync = selection.optionsToSync; + includePathList = selection.includePathList; + } + + // Pull progress: Backup (0-50%) → Download (50-80%) → Import (80-100%) + logger.reportStart( + LoggerAction.INITIATE_BACKUP, + sprintf( __( 'Initializing remote backup… (%d%%)' ), 0 ) + ); + const backupId = await initiateBackup( token.accessToken, remoteSite.id, { + optionsToSync, + includePathList, + } ); + + let downloadUrl: string | null = null; + let lastPercent = -1; + let stalledAttempts = 0; + + while ( stalledAttempts < SYNC_MAX_STALLED_ATTEMPTS ) { + const status = await pollBackupStatus( token.accessToken, remoteSite.id, backupId ); + + if ( status.status === 'failed' ) { + throw new LoggerError( __( 'Remote backup failed' ) ); + } + + if ( status.status === 'finished' && status.downloadUrl ) { + downloadUrl = status.downloadUrl; + break; + } + + const currentPercent = Math.round( status.percent ); + if ( currentPercent !== lastPercent ) { + stalledAttempts = 0; + lastPercent = currentPercent; + } else { + stalledAttempts++; + } + + // Backup phase: 0-50% + const backupProgress = Math.round( status.percent * 0.5 ); + logger.spinner.text = sprintf( __( 'Creating remote backup… (%d%%)' ), backupProgress ); + + await new Promise( ( resolve ) => setTimeout( resolve, SYNC_POLL_INTERVAL_MS ) ); + } + + if ( ! downloadUrl ) { + throw new LoggerError( __( 'Backup timed out — no progress detected' ) ); + } + + // Check backup size before downloading + const backupFileSize = await checkBackupSize( downloadUrl ); + if ( backupFileSize > SYNC_PUSH_SIZE_LIMIT_BYTES ) { + logger.spinner.stop(); + const shouldContinue = await confirm( { + message: sprintf( + __( + "Your site's backup exceeds %d GB. Pulling it will prevent you from pushing the site back. Do you want to continue?" + ), + SYNC_PUSH_SIZE_LIMIT_GB + ), + default: true, + } ); + if ( ! shouldContinue ) { + return; + } + } + + // Download phase: 50-80% + logger.reportProgress( sprintf( __( 'Downloading backup… (%d%%)' ), 50 ) ); + const tempDir = await fs.promises.mkdtemp( path.join( os.tmpdir(), 'studio-sync' ) ); + + try { + fs.mkdirSync( tempDir, { recursive: true } ); + const destPath = path.join( tempDir, `pull-${ remoteSite.id }-${ Date.now() }.tar.gz` ); + await downloadBackup( downloadUrl, destPath ); + + wasServerRunning = !! ( await isServerRunning( site.id ) ); + + if ( wasServerRunning ) { + logger.reportStart( LoggerAction.STOP_SITE, __( 'Stopping WordPress server…' ) ); + await stopWordPressServer( site.id ); + await clearSiteLatestCliPid( site.id ); + logger.reportSuccess( __( 'WordPress server stopped' ) ); + } + + await importBackup( + { path: destPath, type: 'application/gzip' }, + site, + importEventHandler, + DEFAULT_IMPORTER_OPTIONS + ); + + // Something in Playground makes it so the front-end of the site sometimes returns an error page + // on the first request. Send that first request from here to hide the error from the user. + const siteUrl = getSiteUrl( site ); + await fetch( siteUrl ).catch( () => {} ); + + logger.reportSuccess( + sprintf( __( 'Pulled from %s (%s)' ), remoteSite.name, remoteSite.url ) + ); + } finally { + fs.rmSync( tempDir, { recursive: true, force: true } ); + } + } catch ( error ) { + pullError = error; + } finally { + try { + if ( site && wasServerRunning ) { + logger.reportStart( + LoggerAction.INSTALL_SQLITE, + __( 'Setting up SQLite integration, if needed…' ) + ); + await keepSqliteIntegrationUpdated( siteFolder ); + logger.reportSuccess( __( 'SQLite integration configured as needed' ) ); + + logger.reportStart( LoggerAction.START_SITE, __( 'Starting WordPress server…' ) ); + await startWordPressServer( site, logger ); + logger.reportSuccess( __( 'WordPress server started' ) ); + } + } catch ( error ) { + restartSiteError = error; + } finally { + await disconnectFromDaemon(); + } + } + + if ( pullError instanceof LoggerError && restartSiteError instanceof Error ) { + pullError.previousError = restartSiteError; + } + + if ( pullError instanceof Error ) { + throw pullError; + } + + if ( restartSiteError instanceof Error ) { + throw restartSiteError; + } +} + +export const registerCommand = ( yargs: StudioArgv ) => { + return yargs.command( { + command: 'pull', + describe: __( 'Pull a WordPress.com site to your local site' ), + builder: ( yargs ) => { + return yargs + .option( 'options', { + type: 'string', + description: __( + 'Comma-separated sync options: all, sqls, uploads, plugins, themes, contents' + ), + coerce: ( val: string | undefined ) => + val !== undefined ? parseSyncOptions( val ) : undefined, + } ) + .option( 'remote-site', { + type: 'string', + description: __( 'Remote site URL or ID' ), + } ); + }, + handler: async ( argv ) => { + try { + await runCommand( argv.path, argv.options as SyncOption[] | undefined, argv.remoteSite ); + } catch ( error ) { + if ( error instanceof LoggerError ) { + logger.reportError( error ); + } else { + const loggerError = new LoggerError( __( 'Pull failed' ), error ); + logger.reportError( loggerError ); + } + } + }, + } ); +}; diff --git a/apps/cli/commands/push.ts b/apps/cli/commands/push.ts new file mode 100644 index 0000000000..e3c614e2c2 --- /dev/null +++ b/apps/cli/commands/push.ts @@ -0,0 +1,291 @@ +import fs from 'fs'; +import os from 'os'; +import path from 'path'; +import { DEFAULT_PHP_VERSION } from '@studio/common/constants'; +import { readAuthToken } from '@studio/common/lib/shared-config'; +import { + SYNC_MAX_STALLED_ATTEMPTS, + SYNC_POLL_INTERVAL_MS, + SYNC_PUSH_SIZE_LIMIT_BYTES, + SYNC_PUSH_SIZE_LIMIT_GB, +} from '@studio/common/lib/sync/constants'; +import { createTusUpload } from '@studio/common/lib/sync/tus-upload'; +import { SyncCommandLoggerAction as LoggerAction } from '@studio/common/logger-actions'; +import { SyncOption } from '@studio/common/types/sync'; +import { __, sprintf } from '@wordpress/i18n'; +import { getSiteByFolder } from 'cli/lib/cli-config/sites'; +import { exportBackup } from 'cli/lib/import-export/export/export-manager'; +import { ExportOptions } from 'cli/lib/import-export/export/types'; +import { keepSqliteIntegrationUpdated } from 'cli/lib/sqlite-integration'; +import { + fetchSyncableSites, + initiateImport, + parseSyncOptions, + pollImportStatus, +} from 'cli/lib/sync-api'; +import { selectSyncItemsForPush } from 'cli/lib/sync-selector'; +import { findSyncSiteByIdentifier, pickSyncSite } from 'cli/lib/sync-site-picker'; +import { Logger, LoggerError } from 'cli/logger'; +import { StudioArgv } from 'cli/types'; +import { exportEventHandler } from './export'; + +const logger = new Logger< LoggerAction >(); + +export async function runCommand( + siteFolder: string, + syncOptions?: SyncOption[], + remoteSiteIdentifier?: string +): Promise< void > { + const token = await readAuthToken(); + if ( ! token ) { + throw new LoggerError( + __( 'Authentication required. Please log in with `studio auth login`.' ) + ); + } + + logger.reportStart( LoggerAction.LOAD_SITES, __( 'Loading site…' ) ); + const site = await getSiteByFolder( siteFolder ); + logger.reportSuccess( __( 'Site loaded' ) ); + + logger.reportStart( + LoggerAction.INSTALL_SQLITE, + __( 'Setting up SQLite integration, if needed…' ) + ); + await keepSqliteIntegrationUpdated( siteFolder ); + logger.reportSuccess( __( 'SQLite integration configured as needed' ) ); + + logger.reportStart( LoggerAction.FETCH_REMOTE_SITES, __( 'Fetching WordPress.com sites…' ) ); + const remoteSites = await fetchSyncableSites( token.accessToken ); + logger.spinner.stop(); + logger.reportSuccess( sprintf( __( 'Found %d sites' ), remoteSites.length ), true ); + + let remoteSite; + if ( remoteSiteIdentifier ) { + remoteSite = findSyncSiteByIdentifier( remoteSites, remoteSiteIdentifier ); + } else { + remoteSite = await pickSyncSite( remoteSites, __( 'Select a site to push to' ) ); + if ( ! remoteSite ) { + return; + } + } + + let optionsToSync: SyncOption[]; + let specificSelectionPaths: string[] | undefined; + + if ( syncOptions ) { + optionsToSync = syncOptions; + } else { + const selection = await selectSyncItemsForPush( site.path ); + if ( ! selection ) { + return; + } + optionsToSync = selection.optionsToSync; + specificSelectionPaths = selection.specificSelectionPaths; + } + + const tempDir = await fs.promises.mkdtemp( path.join( os.tmpdir(), 'studio-sync' ) ); + + try { + fs.mkdirSync( tempDir, { recursive: true } ); + const archivePath = path.join( tempDir, `push-${ site.id }-${ Date.now() }.tar.gz` ); + + let includes: ExportOptions[ 'includes' ]; + + if ( optionsToSync.includes( 'all' ) ) { + includes = { + database: true, + wpContent: true, + }; + } else { + includes = { + database: optionsToSync.includes( 'sqls' ), + wpContent: + optionsToSync.includes( 'uploads' ) || + optionsToSync.includes( 'plugins' ) || + optionsToSync.includes( 'themes' ) || + optionsToSync.includes( 'contents' ), + }; + } + + const isExported = await exportBackup( + { + site, + backupFile: archivePath, + includes, + phpVersion: DEFAULT_PHP_VERSION, + splitDatabaseDumpByTable: true, + specificSelectionPaths, + }, + exportEventHandler + ); + + if ( ! isExported ) { + throw new LoggerError( __( 'No suitable exporter found for the provided backup file' ) ); + } + + const archiveSize = fs.statSync( archivePath ).size; + if ( archiveSize > SYNC_PUSH_SIZE_LIMIT_BYTES ) { + throw new LoggerError( + sprintf( + __( + 'The archive exceeds the %d GB size limit. Please reduce the size of your site and try again.' + ), + SYNC_PUSH_SIZE_LIMIT_GB + ) + ); + } + + // Push progress: Export (0-20%) → Upload (20-40%) → Remote backup (40-60%) → Import (60-99%) → Done (100%) + // Export phase skipped when using --archive, so upload starts at 20% + + // Suppress DEP0169 warning from tus-js-client's internal use of url.parse() + const originalEmit = process.emit.bind( process ); + // @ts-expect-error Overriding process.emit to filter deprecation warnings + process.emit = ( event: string, ...args: unknown[] ) => { + if ( event === 'warning' && ( args[ 0 ] as { code?: string } )?.code === 'DEP0169' ) { + return false; + } + return ( originalEmit as ( ...a: any[] ) => boolean )( event, ...args ); + }; + + logger.reportStart( LoggerAction.UPLOAD, sprintf( __( 'Uploading archive… (%d%%)' ), 20 ) ); + const { promise: uploadPromise, abort: abortUpload } = createTusUpload( { + token: token.accessToken, + remoteSiteId: remoteSite.id, + archivePath, + onProgress: ( percent ) => { + // Upload phase: 20-40% + const progress = Math.round( 20 + percent * 0.2 ); + logger.spinner.text = sprintf( __( 'Uploading archive… (%d%%)' ), progress ); + }, + } ); + + let cancelCount = 0; + const onSigint = () => { + cancelCount++; + if ( cancelCount === 1 ) { + console.error( + __( 'Press Ctrl+C again to cancel. The upload cannot be safely cancelled mid-transfer.' ) + ); + } else { + abortUpload(); + logger.reportError( new LoggerError( __( 'Upload cancelled' ) ) ); + } + }; + process.on( 'SIGINT', onSigint ); + + let attachmentId: string; + try { + attachmentId = await uploadPromise; + } finally { + process.removeListener( 'SIGINT', onSigint ); + process.emit = originalEmit; + } + + // Initiate import: 40% + logger.spinner.text = sprintf( __( 'Initiating import… (%d%%)' ), 40 ); + await initiateImport( token.accessToken, remoteSite.id, attachmentId, { + optionsToSync, + specificSelectionPaths, + } ); + + // Poll import with stale-progress detection + let lastProgress = -1; + let stalledAttempts = 0; + let importFinished = false; + + while ( stalledAttempts < SYNC_MAX_STALLED_ATTEMPTS ) { + const status = await pollImportStatus( token.accessToken, remoteSite.id ); + + if ( status.status === 'failed' ) { + throw new LoggerError( sprintf( __( 'Import failed on %s' ), remoteSite.name ) ); + } + + if ( status.status === 'finished' ) { + importFinished = true; + break; + } + + let statusMessage: string; + let progress: number; + + switch ( status.status ) { + case 'started': + case 'initial_backup_started': + case 'initial_backup_finished': + statusMessage = __( 'Backing up remote site…' ); + progress = 40 + ( ( status.backup_progress ?? 0 ) / 100 ) * 20; + break; + case 'archive_import_started': + statusMessage = __( 'Applying changes…' ); + progress = 60 + ( ( status.import_progress ?? 0 ) / 100 ) * 35; + break; + case 'archive_import_finished': + statusMessage = __( 'Almost there…' ); + progress = 99; + break; + default: + statusMessage = __( 'Applying changes…' ); + progress = 50; + } + + const roundedProgress = Math.round( progress ); + if ( roundedProgress !== lastProgress ) { + stalledAttempts = 0; + lastProgress = roundedProgress; + } else { + stalledAttempts++; + } + + logger.spinner.text = sprintf( '%s (%d%%)', statusMessage, roundedProgress ); + + await new Promise( ( resolve ) => setTimeout( resolve, SYNC_POLL_INTERVAL_MS ) ); + } + + if ( ! importFinished ) { + throw new LoggerError( + sprintf( __( 'Import timed out on %s — no progress detected' ), remoteSite.name ) + ); + } + + logger.reportSuccess( + sprintf( __( 'Successfully pushed to %s (%s)' ), remoteSite.name, remoteSite.url ) + ); + } finally { + fs.rmSync( tempDir, { recursive: true, force: true } ); + } +} + +export const registerCommand = ( yargs: StudioArgv ) => { + return yargs.command( { + command: 'push', + describe: __( 'Push your local site to a WordPress.com site' ), + builder: ( yargs ) => { + return yargs + .option( 'options', { + type: 'string', + description: __( + 'Comma-separated sync options: all, sqls, uploads, plugins, themes, contents' + ), + coerce: ( val: string | undefined ) => + val !== undefined ? parseSyncOptions( val ) : undefined, + } ) + .option( 'remote-site', { + type: 'string', + description: __( 'Remote site URL or ID' ), + } ); + }, + handler: async ( argv ) => { + try { + await runCommand( argv.path, argv.options, argv.remoteSite ); + } catch ( error ) { + if ( error instanceof LoggerError ) { + logger.reportError( error ); + } else { + const loggerError = new LoggerError( __( 'Push failed' ), error ); + logger.reportError( loggerError ); + } + } + }, + } ); +}; diff --git a/apps/cli/commands/site/delete.ts b/apps/cli/commands/site/delete.ts index 2a9200f8db..0aa36c3c8a 100644 --- a/apps/cli/commands/site/delete.ts +++ b/apps/cli/commands/site/delete.ts @@ -4,6 +4,7 @@ import { arePathsEqual } from '@studio/common/lib/fs-utils'; import { readAuthToken, type StoredAuthToken } from '@studio/common/lib/shared-config'; import { SiteCommandLoggerAction as LoggerAction } from '@studio/common/logger-actions'; import { __, _n, sprintf } from '@wordpress/i18n'; +import trash from 'trash'; import { deleteSnapshot } from 'cli/lib/api'; import { deleteSiteCertificate } from 'cli/lib/certificate-manager'; import { @@ -118,10 +119,6 @@ export async function runCommand( if ( deleteFiles ) { if ( fs.existsSync( siteFolder ) ) { logger.reportStart( LoggerAction.DELETE_FILES, __( 'Moving site files to trash…' ) ); - // We configure `trash` as an external module, since it includes a native macOS binary that Vite - // inlines as a base64 string, which produces a runtime error. Since `trash` is also an ESM-only - // module, we need to import it dynamically (since Rollup doesn't get a chance to process it) - const trash = ( await import( 'trash' ) ).default; await trash( siteFolder ); logger.reportSuccess( __( 'Site files moved to trash' ) ); } else { diff --git a/apps/cli/commands/site/set.ts b/apps/cli/commands/site/set.ts index 8fd35941fd..faecbd3ddf 100644 --- a/apps/cli/commands/site/set.ts +++ b/apps/cli/commands/site/set.ts @@ -274,7 +274,7 @@ export async function runCommand( sitePath: string, options: SetCommandOptions ) const phpVersion = validatePhpVersion( site.phpVersion ); const zipUrl = getWordPressVersionUrl( wp ); - const [ response, exitPhp ] = await runWpCliCommand( sitePath, phpVersion, [ + await using command = await runWpCliCommand( sitePath, phpVersion, [ 'core', 'update', zipUrl, @@ -283,9 +283,8 @@ export async function runCommand( sitePath: string, options: SetCommandOptions ) '--skip-themes', ] ); - const exitCode = await response.exitCode; + const exitCode = await command.response.exitCode; if ( exitCode !== 0 ) { - exitPhp(); throw new LoggerError( sprintf( __( 'Failed to update WordPress version to %s' ), wp ) ); } logger.reportSuccess( __( 'WordPress version updated' ) ); @@ -302,8 +301,6 @@ export async function runCommand( sitePath: string, options: SetCommandOptions ) } finally { await unlockCliConfig(); } - - exitPhp(); } if ( needsRestart && wasRunning ) { diff --git a/apps/cli/commands/site/stop.ts b/apps/cli/commands/site/stop.ts index 2f9ab82b0e..2c8a716cb3 100644 --- a/apps/cli/commands/site/stop.ts +++ b/apps/cli/commands/site/stop.ts @@ -55,7 +55,7 @@ export async function runCommand( return; } - logger.reportStart( LoggerAction.STOP_SITE, __( 'Stopping WordPress servers…' ) ); + logger.reportStart( LoggerAction.STOP_SITE, __( 'Stopping WordPress server…' ) ); try { await stopWordPressServer( site.id ); diff --git a/apps/cli/commands/site/tests/export.test.ts b/apps/cli/commands/site/tests/export.test.ts new file mode 100644 index 0000000000..479454c6c7 --- /dev/null +++ b/apps/cli/commands/site/tests/export.test.ts @@ -0,0 +1,168 @@ +import { SiteCommandLoggerAction as LoggerAction } from '@studio/common/logger-actions'; +import { vi } from 'vitest'; +import yargs from 'yargs'; +import { getSiteByFolder } from 'cli/lib/cli-config/sites'; +import { connectToDaemon, disconnectFromDaemon } from 'cli/lib/daemon-client'; +import { ExportEvents } from 'cli/lib/import-export/export/events'; +import { exportBackup } from 'cli/lib/import-export/export/export-manager'; +import { keepSqliteIntegrationUpdated } from 'cli/lib/sqlite-integration'; +import { Logger, LoggerError } from 'cli/logger'; +import { registerCommand, runCommand } from '../../export'; +import type { SiteData } from 'cli/lib/cli-config/core'; + +function getYargsArgvMock() { + return yargs().option( 'path', { + type: 'string', + normalize: true, + default: process.cwd(), + } ); +} + +vi.mock( 'cli/lib/cli-config/sites', () => ( { + getSiteByFolder: vi.fn(), +} ) ); +vi.mock( 'cli/lib/daemon-client' ); +vi.mock( 'cli/lib/sqlite-integration' ); +vi.mock( 'cli/lib/import-export/export/export-manager', () => ( { + exportBackup: vi.fn(), +} ) ); + +describe( 'CLI: studio export', () => { + const testSitePath = '/test/site'; + const testExportPath = '/tmp/site-backup.zip'; + const testSite: SiteData = { + id: 'site-1', + name: 'Test Site', + path: testSitePath, + port: 8080, + phpVersion: '8.0', + adminUsername: 'admin', + adminPassword: 'password123', + }; + + beforeEach( () => { + vi.clearAllMocks(); + + vi.mocked( connectToDaemon ).mockResolvedValue( undefined ); + vi.mocked( disconnectFromDaemon ).mockResolvedValue( undefined ); + vi.mocked( getSiteByFolder ).mockResolvedValue( testSite ); + vi.mocked( keepSqliteIntegrationUpdated ).mockResolvedValue( false ); + vi.mocked( exportBackup ).mockResolvedValue( true ); + } ); + + afterEach( () => { + vi.restoreAllMocks(); + } ); + + it( 'loads site and exports backup', async () => { + await runCommand( testSitePath, testExportPath ); + + expect( connectToDaemon ).toHaveBeenCalled(); + expect( getSiteByFolder ).toHaveBeenCalledWith( testSitePath ); + expect( keepSqliteIntegrationUpdated ).toHaveBeenCalledWith( testSitePath ); + expect( exportBackup ).toHaveBeenCalledWith( + { + site: testSite, + backupFile: testExportPath, + phpVersion: '8.3', + includes: { + wpContent: true, + database: true, + }, + }, + expect.any( Function ) + ); + expect( disconnectFromDaemon ).toHaveBeenCalled(); + } ); + + it( 'maps export events to logger actions', async () => { + const reportStartSpy = vi.spyOn( Logger.prototype, 'reportStart' ); + const reportProgressSpy = vi.spyOn( Logger.prototype, 'reportProgress' ); + const reportSuccessSpy = vi.spyOn( Logger.prototype, 'reportSuccess' ); + + vi.mocked( exportBackup ).mockImplementation( async ( _options, onEvent ) => { + onEvent( { event: ExportEvents.EXPORT_START, data: undefined } ); + onEvent( { event: ExportEvents.BACKUP_CREATE_START, data: undefined } ); + onEvent( { + event: ExportEvents.BACKUP_CREATE_PROGRESS, + data: { progress: { entries: { processed: 1, total: 2 } } }, + } ); + onEvent( { event: ExportEvents.EXPORT_COMPLETE, data: undefined } ); + return true; + } ); + + await runCommand( testSitePath, testExportPath ); + + expect( reportStartSpy ).toHaveBeenCalledWith( LoggerAction.EXPORT_SITE, 'Starting export…' ); + expect( reportStartSpy ).toHaveBeenCalledWith( + LoggerAction.CREATE_BACKUP, + 'Creating backup file…' + ); + expect( reportProgressSpy ).toHaveBeenCalledWith( 'Backing up file… (1 processed)' ); + expect( reportSuccessSpy ).toHaveBeenCalledWith( 'Site exported successfully' ); + } ); + + it( 'throws when no suitable exporter is found', async () => { + vi.mocked( exportBackup ).mockResolvedValue( false ); + + const command = runCommand( testSitePath, testExportPath ); + await expect( command ).rejects.toThrow( LoggerError ); + await expect( command ).rejects.toThrow( + 'No suitable exporter found for the provided backup file' + ); + } ); + + it( 'allows .sql exports when --mode db is used', async () => { + const reportErrorSpy = vi.spyOn( Logger.prototype, 'reportError' ); + + const argv = getYargsArgvMock(); + registerCommand( argv ); + + await argv.parse( [ 'export', 'site-backup.sql', '--path', testSitePath, '--mode', 'db' ] ); + + expect( exportBackup ).toHaveBeenCalledWith( + expect.objectContaining( { + backupFile: expect.stringContaining( 'site-backup.sql' ), + includes: { + database: true, + wpContent: false, + }, + } ), + expect.any( Function ) + ); + expect( reportErrorSpy ).not.toHaveBeenCalled(); + } ); + + it( 'rejects non-.sql file paths when --mode db is used', async () => { + const reportErrorSpy = vi.spyOn( Logger.prototype, 'reportError' ); + + const argv = getYargsArgvMock(); + registerCommand( argv ); + + await argv.parse( [ 'export', 'site-backup.zip', '--path', testSitePath, '--mode', 'db' ] ); + + expect( exportBackup ).not.toHaveBeenCalled(); + expect( reportErrorSpy ).toHaveBeenCalledWith( + expect.objectContaining( { + message: 'Invalid export file extension. Must be .sql when exporting database only.', + } ) + ); + } ); + + it( 'rejects .sql exports with --mode full', async () => { + const reportErrorSpy = vi.spyOn( Logger.prototype, 'reportError' ); + + const argv = getYargsArgvMock(); + registerCommand( argv ); + + await argv.parse( [ 'export', 'site-backup.sql', '--path', testSitePath, '--mode', 'full' ] ); + + expect( exportBackup ).not.toHaveBeenCalled(); + expect( reportErrorSpy ).toHaveBeenCalledWith( + expect.objectContaining( { + message: + 'Invalid export file extension. Must be .zip or .tar.gz when exporting the full site.', + } ) + ); + } ); +} ); diff --git a/apps/cli/commands/site/tests/import.test.ts b/apps/cli/commands/site/tests/import.test.ts new file mode 100644 index 0000000000..6d47c49177 --- /dev/null +++ b/apps/cli/commands/site/tests/import.test.ts @@ -0,0 +1,175 @@ +import fs from 'fs'; +import path from 'path'; +import { isWordPressDirectory, recursiveCopyDirectory } from '@studio/common/lib/fs-utils'; +import { getServerFilesPath } from '@studio/common/lib/well-known-paths'; +import { SiteCommandLoggerAction as LoggerAction } from '@studio/common/logger-actions'; +import { vi } from 'vitest'; +import { getSiteByFolder } from 'cli/lib/cli-config/sites'; +import { connectToDaemon, disconnectFromDaemon } from 'cli/lib/daemon-client'; +import { ImporterEvents, ValidatorEvents } from 'cli/lib/import-export/import/events'; +import { + DEFAULT_IMPORTER_OPTIONS, + importBackup, +} from 'cli/lib/import-export/import/import-manager'; +import { keepSqliteIntegrationUpdated } from 'cli/lib/sqlite-integration'; +import { isServerRunning, stopWordPressServer } from 'cli/lib/wordpress-server-manager'; +import { Logger, LoggerError } from 'cli/logger'; +import { runCommand } from '../../import'; +import type { SiteData } from 'cli/lib/cli-config/core'; + +vi.mock( 'cli/lib/cli-config/sites', () => ( { + clearSiteLatestCliPid: vi.fn(), + getSiteByFolder: vi.fn(), + getSiteUrl: vi.fn(), +} ) ); +vi.mock( 'cli/lib/daemon-client' ); +vi.mock( 'cli/lib/sqlite-integration', () => ( { + keepSqliteIntegrationUpdated: vi.fn(), +} ) ); +vi.mock( 'cli/lib/wordpress-server-manager', () => ( { + isServerRunning: vi.fn(), + stopWordPressServer: vi.fn(), +} ) ); +vi.mock( 'cli/lib/import-export/import/import-manager', () => ( { + DEFAULT_IMPORTER_OPTIONS: [], + importBackup: vi.fn(), +} ) ); +vi.mock( '@studio/common/lib/well-known-paths' ); +vi.mock( '@studio/common/lib/fs-utils', () => ( { + isWordPressDirectory: vi.fn(), + recursiveCopyDirectory: vi.fn(), +} ) ); + +describe( 'CLI: studio import', () => { + const testSitePath = '/test/site'; + const testImportPath = '/tmp/backup.zip'; + const testSite: SiteData = { + id: 'site-1', + name: 'Test Site', + path: testSitePath, + port: 8080, + phpVersion: '8.0', + adminUsername: 'admin', + adminPassword: 'password123', + }; + + beforeEach( () => { + vi.clearAllMocks(); + + vi.mocked( connectToDaemon ).mockResolvedValue( undefined ); + vi.mocked( disconnectFromDaemon ).mockResolvedValue( undefined ); + vi.mocked( getSiteByFolder ).mockResolvedValue( testSite ); + vi.mocked( isServerRunning ).mockResolvedValue( undefined ); + vi.mocked( stopWordPressServer ).mockResolvedValue( undefined ); + vi.mocked( keepSqliteIntegrationUpdated ).mockResolvedValue( false ); + vi.mocked( isWordPressDirectory ).mockReturnValue( true ); + vi.mocked( getServerFilesPath ).mockReturnValue( '/server-files' ); + vi.mocked( recursiveCopyDirectory ).mockResolvedValue( undefined ); + vi.spyOn( fs, 'existsSync' ).mockImplementation( ( p ) => p === testImportPath ); + vi.mocked( importBackup ).mockResolvedValue( { + extractionDirectory: '/tmp/extracted', + sqlFiles: [], + wpContentFiles: [], + wpContentDirectory: '/tmp/extracted/wp-content', + wpConfig: '/tmp/extracted/wp-config.php', + importerType: 'LocalImporter', + } ); + } ); + + afterEach( () => { + vi.restoreAllMocks(); + } ); + + it( 'loads site and imports backup', async () => { + await runCommand( testSitePath, testImportPath ); + + expect( connectToDaemon ).toHaveBeenCalled(); + expect( getSiteByFolder ).toHaveBeenCalledWith( testSitePath ); + expect( importBackup ).toHaveBeenCalledWith( + { + path: testImportPath, + type: 'application/zip', + }, + testSite, + expect.any( Function ), + DEFAULT_IMPORTER_OPTIONS + ); + expect( disconnectFromDaemon ).toHaveBeenCalled(); + } ); + + it( 'sets up WordPress files when site path is not a WordPress directory', async () => { + vi.mocked( isWordPressDirectory ).mockReturnValue( false ); + vi.spyOn( fs, 'existsSync' ).mockImplementation( + ( p ) => + p === testImportPath || p === path.join( '/server-files', 'wordpress-versions', 'latest' ) + ); + + await runCommand( testSitePath, testImportPath ); + + expect( recursiveCopyDirectory ).toHaveBeenCalledWith( + path.join( '/server-files', 'wordpress-versions', 'latest' ), + testSitePath + ); + } ); + + it( 'throws when bundled WordPress files are unavailable', async () => { + vi.mocked( isWordPressDirectory ).mockReturnValue( false ); + vi.spyOn( fs, 'existsSync' ).mockImplementation( ( p ) => p === testImportPath ); + + const command = runCommand( testSitePath, testImportPath ); + await expect( command ).rejects.toThrow( LoggerError ); + await expect( command ).rejects.toThrow( 'Bundled WordPress files not found' ); + } ); + + it( 'maps importer events to logger actions', async () => { + const reportStartSpy = vi.spyOn( Logger.prototype, 'reportStart' ); + const reportProgressSpy = vi.spyOn( Logger.prototype, 'reportProgress' ); + const reportSuccessSpy = vi.spyOn( Logger.prototype, 'reportSuccess' ); + + vi.mocked( importBackup ).mockImplementation( async ( _backupFile, _site, onEvent ) => { + onEvent( { + event: ValidatorEvents.IMPORT_VALIDATION_START, + data: undefined, + } ); + onEvent( { + event: ImporterEvents.IMPORT_DATABASE_START, + data: undefined, + } ); + onEvent( { + event: ImporterEvents.IMPORT_DATABASE_PROGRESS, + data: { processedFiles: 1, totalFiles: 2 }, + } ); + onEvent( { + event: ImporterEvents.IMPORT_COMPLETE, + data: undefined, + } ); + return { + extractionDirectory: '/tmp/extracted', + sqlFiles: [], + wpContentFiles: [], + wpContentDirectory: '/tmp/extracted/wp-content', + wpConfig: '/tmp/extracted/wp-config.php', + importerType: 'LocalImporter', + }; + } ); + + await runCommand( testSitePath, testImportPath ); + + expect( reportStartSpy ).toHaveBeenCalledWith( LoggerAction.VALIDATE, 'Validating backup…' ); + expect( reportStartSpy ).toHaveBeenCalledWith( + LoggerAction.IMPORT_DATABASE, + 'Importing database…' + ); + expect( reportProgressSpy ).toHaveBeenCalledWith( 'Importing database files… (1/2)' ); + expect( reportSuccessSpy ).toHaveBeenCalledWith( 'Site imported successfully' ); + } ); + + it( 'preserves import error when restore steps fail', async () => { + vi.mocked( isServerRunning ).mockResolvedValue( { pid: 1234 } as never ); + vi.mocked( importBackup ).mockRejectedValue( new Error( 'import failed' ) ); + vi.mocked( keepSqliteIntegrationUpdated ).mockRejectedValue( new Error( 'restart failed' ) ); + + await expect( runCommand( testSitePath, testImportPath ) ).rejects.toThrow( 'import failed' ); + expect( stopWordPressServer ).toHaveBeenCalledWith( testSite.id ); + } ); +} ); diff --git a/apps/cli/commands/site/tests/set.test.ts b/apps/cli/commands/site/tests/set.test.ts index 24b4f15b2b..5a288e31b7 100644 --- a/apps/cli/commands/site/tests/set.test.ts +++ b/apps/cli/commands/site/tests/set.test.ts @@ -255,10 +255,10 @@ describe( 'CLI: studio site set', () => { const mockResponse: Partial< StreamedPHPResponse > = { exitCode: Promise.resolve( 0 ), }; - vi.mocked( runWpCliCommand ).mockResolvedValue( [ - mockResponse as StreamedPHPResponse, - vi.fn().mockResolvedValue( undefined ), - ] ); + vi.mocked( runWpCliCommand ).mockResolvedValue( { + response: mockResponse as StreamedPHPResponse, + [ Symbol.dispose ]: vi.fn().mockResolvedValue( undefined ), + } ); } ); it( 'should run WP-CLI to update WordPress version', async () => { @@ -285,10 +285,10 @@ describe( 'CLI: studio site set', () => { const mockResponse: Partial< StreamedPHPResponse > = { exitCode: Promise.resolve( 1 ), }; - vi.mocked( runWpCliCommand ).mockResolvedValue( [ - mockResponse as StreamedPHPResponse, - vi.fn().mockResolvedValue( undefined ), - ] ); + vi.mocked( runWpCliCommand ).mockResolvedValue( { + response: mockResponse as StreamedPHPResponse, + [ Symbol.dispose ]: vi.fn().mockResolvedValue( undefined ), + } ); await expect( runCommand( testSitePath, { wp: '6.7' } ) ).rejects.toThrow( 'Failed to update WordPress version to 6.7' diff --git a/apps/cli/commands/wp.ts b/apps/cli/commands/wp.ts index 3ef1cc6258..19011d6050 100644 --- a/apps/cli/commands/wp.ts +++ b/apps/cli/commands/wp.ts @@ -48,11 +48,10 @@ export async function runCommand( ): Promise< void > { // Handle global WP-CLI commands that don't require a site path (--studio-no-path) if ( mode === Mode.GLOBAL ) { - const [ response, exitPhp ] = await runGlobalWpCliCommand( args ); + await using command = await runGlobalWpCliCommand( args ); - await pipePHPResponse( response ); - process.exitCode = await response.exitCode; - exitPhp(); + await pipePHPResponse( command.response ); + process.exitCode = await command.response.exitCode; return; } @@ -86,11 +85,10 @@ export async function runCommand( process.on( 'SIGTERM', () => process.exit( 1 ) ); // …If not, run the command in a new PHP-WASM instance - const [ response, exitPhp ] = await runWpCliCommand( siteFolder, phpVersion, args ); + await using command = await runWpCliCommand( siteFolder, phpVersion, args ); - await pipePHPResponse( response ); - process.exitCode = await response.exitCode; - exitPhp(); + await pipePHPResponse( command.response ); + process.exitCode = await command.response.exitCode; } function removeArgumentFromArgv( diff --git a/apps/cli/index.ts b/apps/cli/index.ts index 2f3454f3df..5e09aae664 100644 --- a/apps/cli/index.ts +++ b/apps/cli/index.ts @@ -2,6 +2,11 @@ import path from 'node:path'; import { suppressPunycodeWarning } from '@studio/common/lib/suppress-punycode-warning'; import { __ } from '@wordpress/i18n'; import yargs from 'yargs'; +import { registerCommand as registerExportCommand } from 'cli/commands/export'; +import { registerCommand as registerImportCommand } from 'cli/commands/import'; +import { registerCommand as registerMcpCommand } from 'cli/commands/mcp'; +import { registerCommand as registerPullCommand } from 'cli/commands/pull'; +import { registerCommand as registerPushCommand } from 'cli/commands/push'; import { bumpAggregatedUniqueStat, getPlatformMetric } from 'cli/lib/bump-stat'; import { setupServerFiles } from 'cli/lib/dependency-management/setup'; import { loadTranslations } from 'cli/lib/i18n'; @@ -85,29 +90,39 @@ async function main() { registerAuthLogoutCommand( authYargs ); registerAuthStatusCommand( authYargs ); authYargs.version( false ).demandCommand( 1, __( 'You must provide a valid auth command' ) ); - } ) - .command( 'preview', __( 'Manage preview sites' ), async ( previewYargs ) => { - const [ - { registerCommand: registerPreviewCreateCommand }, - { registerCommand: registerPreviewListCommand }, - { registerCommand: registerPreviewDeleteCommand }, - { registerCommand: registerPreviewUpdateCommand }, - { registerCommand: registerPreviewSetCommand }, - ] = await Promise.all( [ - import( 'cli/commands/preview/create' ), - import( 'cli/commands/preview/list' ), - import( 'cli/commands/preview/delete' ), - import( 'cli/commands/preview/update' ), - import( 'cli/commands/preview/set' ), - ] ); + } ); - registerPreviewCreateCommand( previewYargs ); - registerPreviewListCommand( previewYargs ); - registerPreviewDeleteCommand( previewYargs ); - registerPreviewUpdateCommand( previewYargs ); - registerPreviewSetCommand( previewYargs ); - previewYargs.version( false ).demandCommand( 1, __( 'You must provide a valid command' ) ); - } ) + registerExportCommand( studioArgv ); + registerImportCommand( studioArgv ); + registerMcpCommand( studioArgv ); + + studioArgv.command( 'preview', __( 'Manage preview sites' ), async ( previewYargs ) => { + const [ + { registerCommand: registerPreviewCreateCommand }, + { registerCommand: registerPreviewListCommand }, + { registerCommand: registerPreviewDeleteCommand }, + { registerCommand: registerPreviewUpdateCommand }, + { registerCommand: registerPreviewSetCommand }, + ] = await Promise.all( [ + import( 'cli/commands/preview/create' ), + import( 'cli/commands/preview/list' ), + import( 'cli/commands/preview/delete' ), + import( 'cli/commands/preview/update' ), + import( 'cli/commands/preview/set' ), + ] ); + + registerPreviewCreateCommand( previewYargs ); + registerPreviewListCommand( previewYargs ); + registerPreviewDeleteCommand( previewYargs ); + registerPreviewUpdateCommand( previewYargs ); + registerPreviewSetCommand( previewYargs ); + previewYargs.version( false ).demandCommand( 1, __( 'You must provide a valid command' ) ); + } ); + + registerPullCommand( studioArgv ); + registerPushCommand( studioArgv ); + + studioArgv .command( 'site', __( 'Manage sites' ), async ( sitesYargs ) => { const [ { registerCommand: registerSiteStatusCommand }, @@ -134,6 +149,9 @@ async function main() { registerSiteStopCommand( sitesYargs ); registerSiteDeleteCommand( sitesYargs ); registerSiteSetCommand( sitesYargs ); + registerImportCommand( studioArgv ); + registerExportCommand( studioArgv ); + sitesYargs.version( false ).demandCommand( 1, __( 'You must provide a valid command' ) ); } ) .command( { @@ -208,8 +226,6 @@ async function main() { ); studioArgv.command( 'ai', false, studioCodeCommandBuilder ); } - const { registerCommand: registerMcpCommand } = await import( 'cli/commands/mcp' ); - registerMcpCommand( studioArgv ); await studioArgv.argv; } diff --git a/apps/cli/lib/dependency-management/wp-cli.ts b/apps/cli/lib/dependency-management/wp-cli.ts index 32bbb0071e..e1346bed4a 100644 --- a/apps/cli/lib/dependency-management/wp-cli.ts +++ b/apps/cli/lib/dependency-management/wp-cli.ts @@ -9,15 +9,11 @@ import { getWpCliPharPath } from '../server-files'; import { downloadFile, fetchLatestGithubRelease } from './utils'; async function getWPCliVersionFromInstallation(): Promise< string > { - const [ response, exitPhp ] = await runGlobalWpCliCommand( [ 'wp', '--version' ] ); + await using command = await runGlobalWpCliCommand( [ 'wp', '--version' ] ); + const stdout = await command.response.stdoutText; - try { - const stdout = await response.stdoutText; - if ( stdout.startsWith( 'WP-CLI ' ) ) { - return stdout.split( ' ' )[ 1 ]; - } - } finally { - exitPhp(); + if ( stdout.startsWith( 'WP-CLI ' ) ) { + return stdout.split( ' ' )[ 1 ]; } return ''; diff --git a/apps/cli/lib/import-export/export/events.ts b/apps/cli/lib/import-export/export/events.ts new file mode 100644 index 0000000000..d5b6550618 --- /dev/null +++ b/apps/cli/lib/import-export/export/events.ts @@ -0,0 +1,18 @@ +export const ExportEvents = { + EXPORT_START: 'export_start', + EXPORT_COMPLETE: 'export_complete', + EXPORT_ERROR: 'export_error', + BACKUP_CREATE_START: 'backup_create_start', + BACKUP_CREATE_PROGRESS: 'backup_create_progress', + BACKUP_CREATE_COMPLETE: 'backup_create_complete', + WP_CONTENT_EXPORT_START: 'wp_content_export_start', + WP_CONTENT_EXPORT_PROGRESS: 'wp_content_export_progress', + WP_CONTENT_EXPORT_COMPLETE: 'wp_content_export_complete', + DATABASE_EXPORT_START: 'database_export_start', + DATABASE_EXPORT_PROGRESS: 'database_export_progress', + DATABASE_EXPORT_COMPLETE: 'database_export_complete', + CONFIG_EXPORT_START: 'config_export_start', + CONFIG_EXPORT_COMPLETE: 'config_export_complete', +} as const; + +export type ExportEventType = ( typeof ExportEvents )[ keyof typeof ExportEvents ]; diff --git a/apps/cli/lib/import-export/export/export-database.ts b/apps/cli/lib/import-export/export/export-database.ts new file mode 100644 index 0000000000..0752b3cf58 --- /dev/null +++ b/apps/cli/lib/import-export/export/export-database.ts @@ -0,0 +1,106 @@ +import path from 'path'; +import { DEFAULT_PHP_VERSION } from '@studio/common/constants'; +import { parseJsonFromPhpOutput } from '@studio/common/lib/php-output-parser'; +import { __, sprintf } from '@wordpress/i18n'; +import { move } from 'fs-extra'; +import { runWpCliCommand } from 'cli/lib/run-wp-cli-command'; +import { generateBackupFilename } from './generate-backup-filename'; + +export async function exportDatabaseToFile( + siteFolder: string, + finalDestination: string +): Promise< void > { + // Generate a temporary file name in the project directory + const tempFileName = `${ generateBackupFilename( 'db-export' ) }.sql`; + + // Execute the command to export directly to the temp file + // Use absolute path /wordpress/ because that's where site.path is mounted in the WASM filesystem + await using command = await runWpCliCommand( siteFolder, DEFAULT_PHP_VERSION, [ + 'sqlite', + 'export', + `/wordpress/${ tempFileName }`, + '--require=/tmp/sqlite-command/command.php', + '--enable-ast-driver', + '--skip-plugins', + '--skip-themes', + ] ); + + const exitCode = await command.response.exitCode; + if ( exitCode !== 0 ) { + throw new Error( __( 'Database export failed' ) ); + } + + // Move the file to its final destination + const tempFilePath = path.join( siteFolder, tempFileName ); + await move( tempFilePath, finalDestination ); +} + +export async function exportDatabaseToMultipleFiles( + siteFolder: string, + finalDestinationDir: string +): Promise< string[] > { + await using command = await runWpCliCommand( siteFolder, DEFAULT_PHP_VERSION, [ + 'sqlite', + 'tables', + '--format=json', + '--require=/tmp/sqlite-command/command.php', + '--enable-ast-driver', + '--skip-plugins', + '--skip-themes', + ] ); + + const tablesStdout = await command.response.stdoutText; + const exitCode = await command.response.exitCode; + if ( exitCode !== 0 ) { + throw new Error( __( 'Database export failed' ) ); + } + + let tables; + + try { + tables = parseJsonFromPhpOutput( tablesStdout ); + } catch ( error ) { + console.error( + sprintf( __( 'Could not get list of database tables. The WP CLI output: %s' ), tablesStdout ) + ); + throw new Error( __( 'Could not get list of database tables to export.' ) ); + } + + const tmpFiles: string[] = []; + + for ( const table of tables ) { + if ( table === 'wp_users' || table === 'wp_usermeta' ) { + // Skip the wp_users and wp_usermeta tables as they are not needed + continue; + } + + const fileName = `${ table }.sql`; + + // Execute the command to export directly to a temporary file in the project directory + await using command = await runWpCliCommand( siteFolder, DEFAULT_PHP_VERSION, [ + 'sqlite', + 'export', + // Use absolute path /wordpress/ because that's where site.path is mounted in the WASM filesystem + `/wordpress/${ fileName }`, + `--tables=${ table }`, + '--require=/tmp/sqlite-command/command.php', + '--enable-ast-driver', + '--skip-plugins', + '--skip-themes', + ] ); + + const exitCode = await command.response.exitCode; + if ( exitCode !== 0 ) { + throw new Error( sprintf( __( 'Database export failed for table %s' ), table ) ); + } + + // Move the file to its final destination + const tempFilePath = path.join( siteFolder, fileName ); + const finalDestination = path.join( finalDestinationDir, fileName ); + await move( tempFilePath, finalDestination ); + + tmpFiles.push( finalDestination ); + } + + return tmpFiles; +} diff --git a/apps/cli/lib/import-export/export/export-manager.ts b/apps/cli/lib/import-export/export/export-manager.ts new file mode 100644 index 0000000000..1a9b342065 --- /dev/null +++ b/apps/cli/lib/import-export/export/export-manager.ts @@ -0,0 +1,32 @@ +import { ImportExportEventData, handleEvents } from '../handle-events'; +import { ExportEvents } from './events'; +import { DefaultExporter, SqlExporter } from './exporters'; +import { ExportOptions, NewExporter } from './types'; + +export async function exportBackup( + exportOptions: ExportOptions, + onEvent: ( data: ImportExportEventData ) => void, + exporters: NewExporter[] = defaultExporterOptions +): Promise< boolean > { + let foundValidExporter; + for ( const Exporter of exporters ) { + const exporterInstance = new Exporter( exportOptions ); + const removeExportListeners = handleEvents( exporterInstance, onEvent, ExportEvents ); + foundValidExporter = await exporterInstance.canHandle(); + if ( foundValidExporter ) { + try { + await exporterInstance.export(); + } finally { + removeExportListeners(); + } + break; + } + } + if ( ! foundValidExporter ) { + onEvent( { event: ExportEvents.EXPORT_ERROR, data: null } ); + return false; + } + return true; +} + +const defaultExporterOptions: NewExporter[] = [ DefaultExporter, SqlExporter ]; diff --git a/apps/cli/lib/import-export/export/exporters/default-exporter.ts b/apps/cli/lib/import-export/export/exporters/default-exporter.ts new file mode 100644 index 0000000000..7ffcf8bac8 --- /dev/null +++ b/apps/cli/lib/import-export/export/exporters/default-exporter.ts @@ -0,0 +1,372 @@ +import { EventEmitter } from 'events'; +import fs from 'fs'; +import fsPromises from 'fs/promises'; +import os from 'os'; +import path from 'path'; +import { ARCHIVER_OPTIONS, DEFAULT_PHP_VERSION } from '@studio/common/constants'; +import { parseJsonFromPhpOutput } from '@studio/common/lib/php-output-parser'; +import { + hasDefaultDbBlock, + removeDbConstants, +} from '@studio/common/lib/remove-default-db-constants'; +import { __, sprintf } from '@wordpress/i18n'; +import archiver from 'archiver'; +import { getSiteUrl } from 'cli/lib/cli-config/sites'; +import { getWordPressVersionFromInstallation } from 'cli/lib/dependency-management/wordpress'; +import { runWpCliCommand } from 'cli/lib/run-wp-cli-command'; +import { ExportEvents } from '../events'; +import { exportDatabaseToFile, exportDatabaseToMultipleFiles } from '../export-database'; +import { generateBackupFilename } from '../generate-backup-filename'; +import { + ExportOptions, + BackupContents, + Exporter, + BackupCreateProgressEventData, + StudioJson, +} from '../types'; + +export class DefaultExporter extends EventEmitter implements Exporter { + private archiveBuilder!: archiver.Archiver; + private backup: BackupContents; + private readonly options: ExportOptions; + + isExactPathExcluded( pathToCheck: string ) { + const PATHS_TO_EXCLUDE = [ + 'wp-content/mu-plugins/sqlite-database-integration', + 'wp-content/database', + 'wp-content/db.php', + 'wp-content/debug.log', + 'wp-content/mu-plugins/0-allowed-redirect-hosts.php', + 'wp-content/mu-plugins/0-check-theme-availability.php', + 'wp-content/mu-plugins/0-deactivate-jetpack-modules.php', + 'wp-content/mu-plugins/0-dns-functions.php', + 'wp-content/mu-plugins/0-permalinks.php', + 'wp-content/mu-plugins/0-wp-config-constants-polyfill.php', + 'wp-content/mu-plugins/0-sqlite.php', + 'wp-content/mu-plugins/0-thumbnails.php', + 'wp-content/mu-plugins/0-https-for-reverse-proxy.php', + 'wp-content/mu-plugins/0-sqlite-command.php', + ]; + + return PATHS_TO_EXCLUDE.some( ( pathToExclude ) => + pathToCheck.startsWith( path.normalize( pathToExclude ) ) + ); + } + + // Look for disallowed directory names in a given path. If found, determine whether that part of + // the path is a directory or not. + isPathExcludedByPattern( pathToCheck: string ) { + const DIRECTORY_NAMES_TO_EXCLUDE = [ '.git', 'node_modules', 'cache' ]; + const pathParts = pathToCheck.split( path.sep ); + + for ( const directoryName of DIRECTORY_NAMES_TO_EXCLUDE ) { + if ( ! pathParts.includes( directoryName ) ) { + continue; + } + const offenderIndex = pathToCheck.lastIndexOf( directoryName ); + const offenderPath = pathToCheck.substring( 0, offenderIndex + directoryName.length ); + try { + const stat = fs.statSync( offenderPath ); + return stat.isDirectory(); + } catch ( error ) { + return false; + } + } + + return false; + } + + constructor( options: ExportOptions ) { + super(); + this.options = options; + this.backup = { + backupFile: options.backupFile, + sqlFiles: [], + }; + } + + async canHandle(): Promise< boolean > { + const supportedExtension = [ 'tar.gz', 'tzg', 'zip' ].find( ( ext ) => + this.options.backupFile.endsWith( ext ) + ); + + if ( ! supportedExtension ) { + return false; + } + + const requiredPaths = [ + { path: 'wp-content', isDir: true }, + { path: 'wp-includes', isDir: true }, + { path: 'wp-load.php', isDir: false }, + { path: 'wp-config.php', isDir: false }, + ]; + + try { + for ( const requiredPath of requiredPaths ) { + const stats = await fsPromises.stat( + path.join( this.options.site.path, requiredPath.path ) + ); + if ( requiredPath.isDir && ! stats.isDirectory() ) { + return false; + } + if ( ! requiredPath.isDir && ! stats.isFile() ) { + return false; + } + } + return true; + } catch ( error ) { + return false; + } + } + + async export(): Promise< void > { + this.emit( ExportEvents.EXPORT_START ); + const output = fs.createWriteStream( this.options.backupFile ); + this.archiveBuilder = this.createArchiveBuilder(); + + const archiveClosedPromise = this.setupArchiveListeners( output ); + + this.archiveBuilder.pipe( output ); + + try { + this.addWpConfig(); + await this.addWpContent(); + await this.addDatabase(); + const studioJsonPath = await this.createStudioJsonFile(); + this.archiveBuilder.file( studioJsonPath, { name: 'meta.json' } ); + await this.archiveBuilder.finalize(); + this.emit( ExportEvents.BACKUP_CREATE_COMPLETE ); + await archiveClosedPromise; + this.emit( ExportEvents.EXPORT_COMPLETE ); + } catch ( error ) { + this.archiveBuilder.abort(); + this.emit( ExportEvents.EXPORT_ERROR, error ); + throw error; + } finally { + if ( this.options.includes.database ) { + await this.cleanupTempFiles(); + } + } + } + + private createArchiveBuilder(): archiver.Archiver { + this.emit( ExportEvents.BACKUP_CREATE_START ); + const isZip = this.options.backupFile.endsWith( '.zip' ); + const format = isZip ? 'zip' : 'tar'; + return archiver( format, ARCHIVER_OPTIONS[ format ] ); + } + + private setupArchiveListeners( output: fs.WriteStream ): Promise< void > { + return new Promise( ( resolve, reject ) => { + output.on( 'close', () => { + resolve(); + } ); + + this.archiveBuilder.on( 'warning', ( err ) => { + if ( err.code === 'ENOENT' ) { + console.warn( __( 'Archiver warning:' ), err ); + } else { + reject( err ); + } + } ); + this.archiveBuilder.on( 'progress', ( progress ) => { + this.emit( ExportEvents.BACKUP_CREATE_PROGRESS, { + progress, + } as BackupCreateProgressEventData ); + } ); + + this.archiveBuilder.on( 'error', reject ); + } ); + } + + private addWpConfig(): void { + const wpConfigPath = path.join( this.options.site.path, 'wp-config.php' ); + if ( fs.existsSync( wpConfigPath ) ) { + const content = fs.readFileSync( wpConfigPath, 'utf-8' ); + if ( hasDefaultDbBlock( content ) ) { + const modifiedContent = removeDbConstants( content ); + fs.writeFileSync( wpConfigPath, modifiedContent, 'utf-8' ); + } + this.archiveBuilder.file( wpConfigPath, { + name: 'wp-config.php', + } ); + } + } + + private async addWpContent(): Promise< void > { + if ( ! this.options.includes.wpContent ) { + return; + } + this.emit( ExportEvents.WP_CONTENT_EXPORT_START ); + + let pathsToArchive = this.options.specificSelectionPaths; + if ( ! pathsToArchive ) { + // Read the wp-content directory and get all the paths to be archived + pathsToArchive = fs.readdirSync( path.join( this.options.site.path, 'wp-content' ) ); + } + + if ( Array.isArray( pathsToArchive ) ) { + for ( const itemPath of pathsToArchive ) { + const fullPath = path.join( this.options.site.path, 'wp-content', itemPath ); + const archivePath = path.join( 'wp-content', itemPath ); + + if ( ! fs.existsSync( fullPath ) ) { + continue; + } + + const stat = await fsPromises.stat( fullPath ); + if ( stat.isDirectory() ) { + this.archiveBuilder.directory( fullPath, archivePath, ( entry ) => { + const entryPathRelativeToArchiveRoot = path.join( archivePath, entry.name ); + const fullEntryPathOnDisk = path.join( + this.options.site.path, + entryPathRelativeToArchiveRoot + ); + if ( + this.isExactPathExcluded( entryPathRelativeToArchiveRoot ) || + this.isPathExcludedByPattern( fullEntryPathOnDisk ) + ) { + return false; + } + return entry; + } ); + } else { + if ( this.isExactPathExcluded( archivePath ) ) { + continue; + } + this.archiveBuilder.file( fullPath, { name: archivePath } ); + } + } + } + + this.emit( ExportEvents.WP_CONTENT_EXPORT_COMPLETE ); + } + + private async addDatabase(): Promise< void > { + if ( ! this.options.includes.database ) { + return; + } + + this.emit( ExportEvents.DATABASE_EXPORT_START ); + const tmpFolder = await fsPromises.mkdtemp( path.join( os.tmpdir(), 'studio_export' ) ); + + if ( this.options.splitDatabaseDumpByTable ) { + const sqlFiles = await exportDatabaseToMultipleFiles( this.options.site.path, tmpFolder ); + sqlFiles.forEach( ( file ) => + this.archiveBuilder.file( file, { name: `sql/${ path.basename( file ) }` } ) + ); + this.backup.sqlFiles.push( ...sqlFiles ); + } else { + const fileName = `${ generateBackupFilename( 'db-export' ) }.sql`; + const sqlDumpPath = path.join( tmpFolder, fileName ); + await exportDatabaseToFile( this.options.site.path, sqlDumpPath ); + this.archiveBuilder.file( sqlDumpPath, { name: `sql/${ fileName }` } ); + this.backup.sqlFiles.push( sqlDumpPath ); + } + + this.emit( ExportEvents.DATABASE_EXPORT_COMPLETE ); + } + + private async cleanupTempFiles(): Promise< void > { + for ( const sqlFile of this.backup.sqlFiles ) { + await fsPromises + .unlink( sqlFile ) + .catch( ( err ) => console.error( `Failed to delete temporary file ${ sqlFile }:`, err ) ); + } + } + + private async createStudioJsonFile(): Promise< string > { + const wpVersion = await getWordPressVersionFromInstallation( this.options.site.path ); + const studioJson: StudioJson = { + siteUrl: getSiteUrl( this.options.site ), + phpVersion: this.options.phpVersion, + wordpressVersion: wpVersion ? wpVersion : '', + plugins: [], + themes: [], + }; + + const [ plugins, themes ] = await Promise.all( [ + this.getSitePlugins( this.options.site.path ), + this.getSiteThemes( this.options.site.path ), + ] ); + + studioJson.plugins = plugins; + studioJson.themes = themes; + + const tempDir = await fsPromises.mkdtemp( path.join( os.tmpdir(), 'studio-export-' ) ); + const studioJsonPath = path.join( tempDir, 'meta.json' ); + await fsPromises.writeFile( studioJsonPath, JSON.stringify( studioJson, null, 2 ) ); + return studioJsonPath; + } + + private async getSitePlugins( siteFolder: string ) { + await using command = await runWpCliCommand( siteFolder, DEFAULT_PHP_VERSION, [ + 'plugin', + 'list', + '--status=active,inactive', + '--fields=name,status,version', + '--format=json', + '--skip-plugins', + '--skip-themes', + ] ); + + const exitCode = await command.response.exitCode; + const stderr = await command.response.stderrText; + const stdout = await command.response.stdoutText; + + if ( exitCode !== 0 ) { + throw new Error( sprintf( __( 'Failed to get site plugins: %s' ), stderr ) ); + } + + try { + return parseJsonFromPhpOutput( stdout ); + } catch ( error ) { + if ( stderr ) { + console.error( sprintf( __( 'Could not get information about plugins: %s' ), stderr ) ); + } else { + console.error( + sprintf( __( 'Could not parse plugins list. The WP CLI output: %s' ), stdout ) + ); + } + + throw new Error( + __( 'Could not parse information about installed plugins to create meta.json file.' ) + ); + } + } + + private async getSiteThemes( siteFolder: string ) { + await using command = await runWpCliCommand( siteFolder, DEFAULT_PHP_VERSION, [ + 'theme', + 'list', + '--fields=name,status,version', + '--format=json', + '--skip-plugins', + '--skip-themes', + ] ); + + const exitCode = await command.response.exitCode; + const stderr = await command.response.stderrText; + const stdout = await command.response.stdoutText; + + if ( exitCode !== 0 ) { + throw new Error( sprintf( __( 'Failed to get site themes: %s' ), stderr ) ); + } + + try { + return parseJsonFromPhpOutput( stdout ); + } catch ( error ) { + if ( stderr ) { + console.error( sprintf( __( 'Could not get information about themes: %s' ), stderr ) ); + } else { + console.error( + sprintf( __( 'Could not parse themes list. The WP CLI output: %s' ), stdout ) + ); + } + + throw new Error( + __( 'Could not parse information about installed themes to create meta.json file.' ) + ); + } + } +} diff --git a/apps/cli/lib/import-export/export/exporters/index.ts b/apps/cli/lib/import-export/export/exporters/index.ts new file mode 100644 index 0000000000..99fcc24b48 --- /dev/null +++ b/apps/cli/lib/import-export/export/exporters/index.ts @@ -0,0 +1,2 @@ +export * from './default-exporter'; +export * from './sql-exporter'; diff --git a/apps/cli/lib/import-export/export/exporters/sql-exporter.ts b/apps/cli/lib/import-export/export/exporters/sql-exporter.ts new file mode 100644 index 0000000000..6f2afb11cf --- /dev/null +++ b/apps/cli/lib/import-export/export/exporters/sql-exporter.ts @@ -0,0 +1,29 @@ +import { EventEmitter } from 'events'; +import { ExportEvents } from '../events'; +import { exportDatabaseToFile } from '../export-database'; +import { ExportOptions, Exporter } from '../types'; + +export class SqlExporter extends EventEmitter implements Exporter { + constructor( private options: ExportOptions ) { + super(); + } + async export(): Promise< void > { + this.emit( ExportEvents.EXPORT_START ); + try { + await exportDatabaseToFile( this.options.site.path, this.options.backupFile ); + this.emit( ExportEvents.EXPORT_COMPLETE ); + } catch ( error ) { + this.emit( ExportEvents.EXPORT_ERROR, error ); + throw error; + } + } + + async canHandle(): Promise< boolean > { + // Check for extension of the backup file to be sql. + if ( ! this.options.backupFile.toLowerCase().endsWith( '.sql' ) ) { + return false; + } + + return this.options.includes.database; + } +} diff --git a/apps/cli/lib/import-export/export/generate-backup-filename.ts b/apps/cli/lib/import-export/export/generate-backup-filename.ts new file mode 100644 index 0000000000..29d3f14103 --- /dev/null +++ b/apps/cli/lib/import-export/export/generate-backup-filename.ts @@ -0,0 +1,7 @@ +import { sanitizeFolderName } from '@studio/common/lib/sanitize-folder-name'; +import { format } from 'date-fns'; + +export function generateBackupFilename( name: string ) { + const timestamp = format( new Date(), 'yyyy-MM-dd-HH-mm-ss' ); + return sanitizeFolderName( `studio-backup-${ name }-${ timestamp }` ); +} diff --git a/apps/cli/lib/import-export/export/types.ts b/apps/cli/lib/import-export/export/types.ts new file mode 100644 index 0000000000..35cd57884a --- /dev/null +++ b/apps/cli/lib/import-export/export/types.ts @@ -0,0 +1,44 @@ +import { SiteData } from 'cli/lib/cli-config/core'; +import type { ProgressData } from 'archiver'; +import type { EventEmitter } from 'events'; + +export interface ExportOptions { + site: SiteData; + backupFile: string; + includes: { [ index in ExportOptionsIncludes ]: boolean }; + phpVersion: string; + splitDatabaseDumpByTable?: boolean; + specificSelectionPaths?: string[]; +} + +export type ExportOptionsIncludes = 'wpContent' | 'database'; + +export interface BackupContents { + backupFile: string; + sqlFiles: string[]; +} + +export interface Exporter extends Partial< EventEmitter > { + canHandle(): Promise< boolean >; + export(): Promise< void >; +} + +export interface BackupCreateProgressEventData { + progress: ProgressData; +} + +export type NewExporter = new ( options: ExportOptions ) => Exporter; + +export interface StudioJson { + phpVersion: string; + wordpressVersion?: string; + siteUrl: string; + plugins: StudioJsonPluginOrTheme[]; + themes: StudioJsonPluginOrTheme[]; +} + +export interface StudioJsonPluginOrTheme { + name: string; + status: 'active' | 'inactive'; + version: string; +} diff --git a/apps/cli/lib/import-export/handle-events.ts b/apps/cli/lib/import-export/handle-events.ts new file mode 100644 index 0000000000..33633d3d59 --- /dev/null +++ b/apps/cli/lib/import-export/handle-events.ts @@ -0,0 +1,31 @@ +import { EventEmitter } from 'events'; +import { ExportEventType } from './export/events'; +import { ImportEventType } from './import/events'; + +export type ImportExportEventType = ImportEventType | ExportEventType; + +export interface ImportExportEventData { + event: ImportExportEventType; + data: unknown; +} + +export const handleEvents = ( + emitter: Partial< EventEmitter >, + onEvent: ( data: ImportExportEventData ) => void, + events: Record< string, string > +) => { + const removeListeners: ( () => void )[] = []; + Object.values( events ).forEach( ( eventName ) => { + const listener = ( data: unknown ) => { + onEvent( { + event: eventName as ImportExportEventType, + data, + } ); + }; + emitter.on?.( eventName, listener ); + removeListeners.push( () => emitter.off?.( eventName, listener ) ); + } ); + return () => { + removeListeners.forEach( ( remove ) => remove() ); + }; +}; diff --git a/apps/cli/lib/import-export/import/events.ts b/apps/cli/lib/import-export/import/events.ts new file mode 100644 index 0000000000..620e3b6099 --- /dev/null +++ b/apps/cli/lib/import-export/import/events.ts @@ -0,0 +1,36 @@ +export const BackupExtractEvents = { + BACKUP_EXTRACT_START: 'backup_extract_start', + BACKUP_EXTRACT_PROGRESS: 'backup_extract_progress', + BACKUP_EXTRACT_FILE_START: 'backup_extract_file_start', + BACKUP_EXTRACT_COMPLETE: 'backup_extract_complete', + BACKUP_EXTRACT_WARNING: 'backup_extract_warning', + BACKUP_EXTRACT_ERROR: 'backup_extract_error', +} as const; + +export const ValidatorEvents = { + IMPORT_VALIDATION_START: 'import_validation_start', + IMPORT_VALIDATION_COMPLETE: 'import_validation_complete', + IMPORT_VALIDATION_ERROR: 'import_validation_error', +} as const; + +export const ImporterEvents = { + IMPORT_START: 'import_start', + IMPORT_DATABASE_START: 'import_database_start', + IMPORT_DATABASE_PROGRESS: 'import_database_progress', + IMPORT_DATABASE_COMPLETE: 'import_database_complete', + IMPORT_WP_CONTENT_START: 'import_wp_content_start', + IMPORT_WP_CONTENT_PROGRESS: 'import_wp_content_progress', + IMPORT_WP_CONTENT_COMPLETE: 'import_wp_content_complete', + IMPORT_META_START: 'import_meta', + IMPORT_META_COMPLETE: 'import_meta_complete', + IMPORT_COMPLETE: 'import_complete', + IMPORT_ERROR: 'import_error', +} as const; + +export const ImportEvents = { + ...BackupExtractEvents, + ...ValidatorEvents, + ...ImporterEvents, +} as const; + +export type ImportEventType = ( typeof ImportEvents )[ keyof typeof ImportEvents ]; diff --git a/apps/cli/lib/import-export/import/handlers/backup-handler-factory.ts b/apps/cli/lib/import-export/import/handlers/backup-handler-factory.ts new file mode 100644 index 0000000000..6b43909af9 --- /dev/null +++ b/apps/cli/lib/import-export/import/handlers/backup-handler-factory.ts @@ -0,0 +1,86 @@ +import { EventEmitter } from 'events'; +import { BackupArchiveInfo } from '../types'; +import { BackupHandlerSql } from './backup-handler-sql'; +import { BackupHandlerTarGz } from './backup-handler-tar-gz'; +import { BackupHandlerWpress } from './backup-handler-wpress'; +import { BackupHandlerZip } from './backup-handler-zip'; + +export interface BackupHandler extends Partial< EventEmitter > { + listFiles( file: BackupArchiveInfo ): Promise< string[] >; + extractFiles( file: BackupArchiveInfo, extractionDirectory: string ): Promise< void >; +} + +const EXCLUDED_FILES_PATTERNS = [ + /^__MACOSX\/.*/, // macOS resource fork folder in zip archives + /(^|\/)\.DS_Store$/, // macOS Finder metadata +]; + +export function isFileAllowed( filePath: string ) { + return EXCLUDED_FILES_PATTERNS.every( ( pattern ) => ! pattern.test( filePath ) ); +} + +export class BackupHandlerFactory { + private static zipTypes = [ + 'application/zip', + 'application/x-zip', + 'application/x-zip-compressed', + 'application/octet-stream', + ]; + private static zipExtensions = [ '.zip' ]; + + private static tarGzTypes = [ + 'application/gzip', + 'application/x-gzip', + 'application/x-gtar', + 'application/x-tgz', + 'application/x-compressed-tar', + 'application/tar+gzip', + ]; + private static tarGzExtensions = [ '.tar.gz', '.tgz' ]; + + private static sqlTypes = [ + 'application/sql', + 'application/x-sql', + 'text/sql', + 'text/x-sql', + 'text/plain', + ]; + private static sqlExtensions = [ '.sql' ]; + + static create( file: BackupArchiveInfo ): BackupHandler | undefined { + if ( this.isZip( file ) ) { + return new BackupHandlerZip(); + } else if ( this.isTarGz( file ) ) { + return new BackupHandlerTarGz(); + } else if ( this.isSql( file ) ) { + return new BackupHandlerSql(); + } else if ( this.isWpress( file ) ) { + return new BackupHandlerWpress(); + } + } + + private static isZip( file: BackupArchiveInfo ): boolean { + return ( + this.zipTypes.includes( file.type ) && + this.zipExtensions.some( ( ext ) => file.path.endsWith( ext ) ) + ); + } + + private static isTarGz( file: BackupArchiveInfo ): boolean { + return ( + this.tarGzTypes.includes( file.type ) && + this.tarGzExtensions.some( ( ext ) => file.path.endsWith( ext ) ) + ); + } + + private static isSql( file: BackupArchiveInfo ): boolean { + return ( + ( this.sqlTypes.includes( file.type ) || ! file.type ) && + this.sqlExtensions.some( ( ext ) => file.path.endsWith( ext ) ) + ); + } + + private static isWpress( file: BackupArchiveInfo ): boolean { + return file.path.endsWith( '.wpress' ); + } +} diff --git a/apps/cli/lib/import-export/import/handlers/backup-handler-sql.ts b/apps/cli/lib/import-export/import/handlers/backup-handler-sql.ts new file mode 100644 index 0000000000..e6cc0f94c7 --- /dev/null +++ b/apps/cli/lib/import-export/import/handlers/backup-handler-sql.ts @@ -0,0 +1,48 @@ +import { EventEmitter } from 'events'; +import fs from 'fs'; +import path from 'path'; +import { ImportEvents } from '../events'; +import { BackupHandler } from '../handlers/backup-handler-factory'; +import { BackupArchiveInfo } from '../types'; + +export class BackupHandlerSql extends EventEmitter implements BackupHandler { + constructor() { + super(); + } + async listFiles( backup: BackupArchiveInfo ): Promise< string[] > { + return [ path.basename( backup.path ) ]; + } + + async extractFiles( file: BackupArchiveInfo, extractionDirectory: string ): Promise< void > { + const fileName = path.basename( file.path ); + const destPath = path.join( extractionDirectory, fileName ); + + this.emit( ImportEvents.BACKUP_EXTRACT_START, { + progress: 0, + totalFiles: 1, + processedFiles: 0, + } ); + + this.emit( ImportEvents.BACKUP_EXTRACT_FILE_START, { + progress: 0, + processedFiles: 0, + totalFiles: 1, + currentFile: fileName, + } ); + + await fs.promises.copyFile( file.path, destPath ); + + this.emit( ImportEvents.BACKUP_EXTRACT_PROGRESS, { + progress: 100, + processedFiles: 1, + totalFiles: 1, + currentFile: fileName, + } ); + + this.emit( ImportEvents.BACKUP_EXTRACT_COMPLETE, { + progress: 100, + processedFiles: 1, + totalFiles: 1, + } ); + } +} diff --git a/apps/cli/lib/import-export/import/handlers/backup-handler-tar-gz.ts b/apps/cli/lib/import-export/import/handlers/backup-handler-tar-gz.ts new file mode 100644 index 0000000000..6f6bbb9ae4 --- /dev/null +++ b/apps/cli/lib/import-export/import/handlers/backup-handler-tar-gz.ts @@ -0,0 +1,88 @@ +import { EventEmitter } from 'events'; +import fs from 'fs'; +import zlib from 'zlib'; +import * as tar from 'tar'; +import { ImportEvents } from '../events'; +import { BackupArchiveInfo, BackupExtractProgressEventData } from '../types'; +import { BackupHandler, isFileAllowed } from './backup-handler-factory'; + +export class BackupHandlerTarGz extends EventEmitter implements BackupHandler { + async listFiles( backup: BackupArchiveInfo ): Promise< string[] > { + const filesSet = new Set< string >(); + await tar.t( { + file: backup.path, + onReadEntry: ( entry ) => { + if ( isFileAllowed( entry.path ) ) { + let path = entry.path; + if ( entry.path.startsWith( '/' ) ) { + path = path.slice( 1 ); + } + filesSet.add( path ); + } + }, + } ); + return Array.from( filesSet ); + } + + async extractFiles( file: BackupArchiveInfo, extractionDirectory: string ): Promise< void > { + let totalSize: number; + let processedSize = 0; + let processedFiles = 0; + let currentFile = ''; + + try { + totalSize = fs.statSync( file.path ).size; + } catch ( error ) { + this.emit( ImportEvents.BACKUP_EXTRACT_ERROR, error ); + throw error; + } + + // Get total file count first + const fileList = await this.listFiles( file ); + const totalFiles = fileList.length; + + return new Promise< void >( ( resolve, reject ) => { + this.emit( ImportEvents.BACKUP_EXTRACT_START ); + fs.createReadStream( file.path ) + .on( 'data', ( chunk ) => { + processedSize += chunk.length; + this.emit( ImportEvents.BACKUP_EXTRACT_PROGRESS, { + progress: processedSize / totalSize, + processedFiles, + totalFiles, + currentFile, + extractedBytes: processedSize, + totalBytes: totalSize, + } as BackupExtractProgressEventData ); + } ) + .on( 'error', ( error ) => { + this.emit( ImportEvents.BACKUP_EXTRACT_ERROR, error ); + reject( error ); + } ) + .pipe( zlib.createGunzip() ) + .pipe( + tar.extract( { + cwd: extractionDirectory, + onwarn: ( _code, message ) => { + this.emit( ImportEvents.BACKUP_EXTRACT_WARNING, message ); + }, + onReadEntry: ( entry ) => { + if ( isFileAllowed( entry.path ) ) { + currentFile = entry.path; + processedFiles++; + this.emit( ImportEvents.BACKUP_EXTRACT_FILE_START, { + currentFile, + processedFiles, + totalFiles, + } as BackupExtractProgressEventData ); + } + }, + } ) + ) + .on( 'finish', () => { + this.emit( ImportEvents.BACKUP_EXTRACT_COMPLETE ); + resolve(); + } ); + } ); + } +} diff --git a/apps/cli/lib/import-export/import/handlers/backup-handler-wpress.ts b/apps/cli/lib/import-export/import/handlers/backup-handler-wpress.ts new file mode 100644 index 0000000000..2ed818ba27 --- /dev/null +++ b/apps/cli/lib/import-export/import/handlers/backup-handler-wpress.ts @@ -0,0 +1,257 @@ +import { EventEmitter } from 'events'; +import * as fs from 'fs'; +import { constants } from 'fs'; +import * as path from 'path'; +import { __, sprintf } from '@wordpress/i18n'; +import * as fse from 'fs-extra'; +import { ImportEvents } from '../events'; +import { BackupArchiveInfo } from '../types'; +import { BackupHandler } from './backup-handler-factory'; + +/** + * The .wpress format is a custom archive format used by All-In-One WP Migration. + * It is designed to encapsulate all necessary components of a WordPress site, including the database, + * plugins, themes, uploads, and other wp-content files, into a single file for easy transport and restoration. + * + * The .wpress file is structured as follows: + * 1. Header: Contains metadata about the file, such as the name, size, modification time, and prefix. + * The header is a fixed size of 4377 bytes. + * 2. Data Blocks: The actual content of the files, stored in 512-byte blocks. Each file's data is stored + * sequentially, following its corresponding header. + * 3. End of File Marker: A special marker indicating the end of the archive. This is represented by a + * block of 4377 bytes filled with zeroes. + * + * The .wpress format ensures that all necessary components of a WordPress site are included in the backup, + * making it easy to restore the site to its original state. The format is designed to be efficient and + * easy to parse, allowing for quick extraction and restoration of the site's contents. + */ + +const HEADER_SIZE = 4377; +const HEADER_CHUNK_EOF = Buffer.alloc( HEADER_SIZE ); +const CHUNK_SIZE_TO_READ = 1024; + +interface Header { + name: string; + size: number; + mTime: string; + prefix: string; +} + +/** + * Reads a string from a buffer at a given start and end position. + * + * @param {Buffer} buffer - The buffer to read from. + * @param {number} start - The start position of the string in the buffer. + * @param {number} end - The end position of the string in the buffer. + * @returns {string} - The substring buffer, stopping at a null-terminator if present. + */ +function readFromBuffer( buffer: Buffer, start: number, end: number ): string { + const _buffer = buffer.subarray( start, end ); + return _buffer.subarray( 0, _buffer.indexOf( 0x00 ) ).toString(); +} + +/** + * Reads the header of a .wpress file. + * + * @param {fs.promises.FileHandle} fd - The file handle to read from. + * @returns {Promise
} - A promise that resolves to the header or null if the end of the file is reached. + */ +async function readHeader( fd: fs.promises.FileHandle ): Promise< Header | null > { + const headerChunk = Buffer.alloc( HEADER_SIZE ); + await fd.read( headerChunk, 0, HEADER_SIZE ); + + if ( Buffer.compare( headerChunk, HEADER_CHUNK_EOF ) === 0 ) { + return null; + } + + const name = readFromBuffer( headerChunk, 0, 255 ); + const size = parseInt( readFromBuffer( headerChunk, 255, 269 ), 10 ); + const mTime = readFromBuffer( headerChunk, 269, 281 ); + const prefix = readFromBuffer( headerChunk, 281, HEADER_SIZE ); + + return { + name, + size, + mTime, + prefix, + }; +} + +/** + * Reads a block of data from a .wpress file and writes it to a file. + * + * @param {fs.promises.FileHandle} fd - The file handle to read from. + * @param {Header} header - The header of the file to read. + * @param {string} outputPath - The path to write the file to. + */ +async function readBlockToFile( fd: fs.promises.FileHandle, header: Header, outputPath: string ) { + const outputFilePath = path.join( outputPath, header.prefix, header.name ); + await fse.ensureDir( path.dirname( outputFilePath ) ); + const outputStream = fs.createWriteStream( outputFilePath ); + + let totalBytesToRead = header.size; + let errored = false; + let streamEnded = false; + + const errorHandler = () => { + if ( ! errored ) { + errored = true; + } + }; + + const endStream = () => { + if ( ! streamEnded && ! outputStream.destroyed ) { + streamEnded = true; + outputStream.end(); + } + }; + + outputStream.once( 'error', errorHandler ); + + try { + while ( totalBytesToRead > 0 ) { + let bytesToRead = CHUNK_SIZE_TO_READ; + if ( bytesToRead > totalBytesToRead ) { + bytesToRead = totalBytesToRead; + } + if ( bytesToRead === 0 ) break; + const buffer = Buffer.alloc( bytesToRead ); + const data = await fd.read( buffer, 0, bytesToRead ); + if ( errored || outputStream.destroyed ) { + return; + } + outputStream.write( buffer ); + totalBytesToRead -= data.bytesRead; + } + } catch ( err ) { + errorHandler(); + } finally { + endStream(); + } +} + +export class BackupHandlerWpress extends EventEmitter implements BackupHandler { + private bytesRead: number; + private eof: Buffer; + private totalFiles: number = 0; + private processedFiles: number = 0; + + constructor() { + super(); + this.bytesRead = 0; + this.eof = Buffer.alloc( HEADER_SIZE, '\0' ); + } + + private calculateProgress(): number { + return this.totalFiles > 0 ? Math.round( ( this.processedFiles / this.totalFiles ) * 100 ) : 0; + } + + /** + * Lists all files in a .wpress backup file by reading the headers sequentially. + * + * It opens the .wpress file, reads each header to get the file names, and stores them in an array. + * The function continues reading headers until it reaches the end of the file. + * + * @param {BackupArchiveInfo} file - The backup archive information, including the file path. + * @returns {Promise} - A promise that resolves to an array of file names. + */ + async listFiles( file: BackupArchiveInfo ): Promise< string[] > { + const fileNames: string[] = []; + + try { + await fs.promises.access( file.path, constants.F_OK ); + } catch ( error ) { + throw new Error( + sprintf( __( 'Input file at location "%s" could not be found.' ), file.path ) + ); + } + + const inputFile = await fs.promises.open( file.path, 'r' ); + + // Read all of the headers and file data into memory. + try { + let header; + do { + header = await readHeader( inputFile ); + if ( header ) { + fileNames.push( path.join( header.prefix, header.name ) ); + await inputFile.read( Buffer.alloc( header.size ), 0, header.size, null ); + } + } while ( header ); + } finally { + await inputFile.close(); + } + + return fileNames; + } + + /** + * Extracts files from a .wpress backup file into a specified extraction directory. + * + * @param {BackupArchiveInfo} file - The backup archive information, including the file path. + * @param {string} extractionDirectory - The directory where the files will be extracted. + * @returns {Promise} - A promise that resolves when the extraction is complete. + */ + async extractFiles( file: BackupArchiveInfo, extractionDirectory: string ): Promise< void > { + try { + await fs.promises.access( file.path, constants.F_OK ); + } catch ( error ) { + throw new Error( + sprintf( __( 'Input file at location "%s" could not be found.' ), file.path ) + ); + } + + await fse.emptyDir( extractionDirectory ); + + // First pass: count total files + const fileNames = await this.listFiles( file ); + this.totalFiles = fileNames.length; + this.processedFiles = 0; + + this.emit( ImportEvents.BACKUP_EXTRACT_START, { + progress: 0, + totalFiles: this.totalFiles, + processedFiles: 0, + } ); + + const inputFile = await fs.promises.open( file.path, 'r' ); + + let header; + try { + while ( ( header = await readHeader( inputFile ) ) !== null ) { + if ( ! header ) { + break; + } + + // Emit progress before processing file + const currentFile = path.join( header.prefix, header.name ); + + this.emit( ImportEvents.BACKUP_EXTRACT_FILE_START, { + progress: this.calculateProgress(), + processedFiles: this.processedFiles, + totalFiles: this.totalFiles, + currentFile, + } ); + + await readBlockToFile( inputFile, header, extractionDirectory ); + this.processedFiles++; + + // Emit progress after processing file + this.emit( ImportEvents.BACKUP_EXTRACT_PROGRESS, { + progress: this.calculateProgress(), + processedFiles: this.processedFiles, + totalFiles: this.totalFiles, + currentFile, + } ); + } + + this.emit( ImportEvents.BACKUP_EXTRACT_COMPLETE, { + progress: 100, + processedFiles: this.totalFiles, + totalFiles: this.totalFiles, + } ); + } finally { + await inputFile.close(); + } + } +} diff --git a/apps/cli/lib/import-export/import/handlers/backup-handler-zip.ts b/apps/cli/lib/import-export/import/handlers/backup-handler-zip.ts new file mode 100644 index 0000000000..a9d95a7fe5 --- /dev/null +++ b/apps/cli/lib/import-export/import/handlers/backup-handler-zip.ts @@ -0,0 +1,133 @@ +import { EventEmitter } from 'events'; +import fs from 'fs'; +import path from 'path'; +import { promisify } from 'util'; +import fse from 'fs-extra'; +import yauzl from 'yauzl'; +import { ImportEvents } from '../events'; +import { BackupArchiveInfo, BackupExtractProgressEventData } from '../types'; +import { BackupHandler, isFileAllowed } from './backup-handler-factory'; + +const openZip = promisify< string, yauzl.Options, yauzl.ZipFile >( yauzl.open ); + +export class BackupHandlerZip extends EventEmitter implements BackupHandler { + async listFiles( backup: BackupArchiveInfo ): Promise< string[] > { + const zipFile = await openZip( backup.path, { lazyEntries: true } ); + const fileNames: string[] = []; + + return new Promise( ( resolve, reject ) => { + zipFile.on( 'entry', ( entry ) => { + if ( isFileAllowed( entry.fileName ) ) { + fileNames.push( entry.fileName ); + } + zipFile.readEntry(); + } ); + + zipFile.on( 'end', () => { + resolve( fileNames ); + } ); + + zipFile.on( 'error', reject ); + zipFile.readEntry(); + } ); + } + + async extractFiles( file: BackupArchiveInfo, extractionDirectory: string ): Promise< void > { + const zipFile = await openZip( file.path, { lazyEntries: true } ); + const openReadStream = promisify( zipFile.openReadStream.bind( zipFile ) ); + const totalSize = fs.statSync( file.path ).size; + let processedSize = 0; + let processedFiles = 0; + const totalFiles = zipFile.entryCount; + + this.emit( ImportEvents.BACKUP_EXTRACT_START ); + + return new Promise( ( resolve, reject ) => { + let extractionFailed = false; + const failOnce = ( err: Error ) => { + if ( ! extractionFailed ) { + extractionFailed = true; + reject( err ); + } + }; + + zipFile.on( 'entry', async ( entry ) => { + if ( ! isFileAllowed( entry.fileName ) ) { + zipFile.readEntry(); + return; + } + + const fullPath = path.join( extractionDirectory, entry.fileName ); + await fse.ensureDir( path.dirname( fullPath ) ); + + if ( entry.fileName.endsWith( '/' ) ) { + zipFile.readEntry(); + return; + } + + this.emit( ImportEvents.BACKUP_EXTRACT_FILE_START, { + currentFile: entry.fileName, + processedFiles, + totalFiles, + } as BackupExtractProgressEventData ); + + try { + const readStream = await openReadStream( entry ); + const writeStream = fs.createWriteStream( fullPath ); + + const onError = ( err: Error ) => { + if ( ! readStream.destroyed ) { + readStream.destroy(); + } + if ( ! writeStream.destroyed ) { + writeStream.destroy(); + } + failOnce( err ); + }; + + readStream.once( 'error', onError ); + writeStream.once( 'error', onError ); + + readStream.on( 'data', ( chunk ) => { + processedSize += chunk.length; + this.emit( ImportEvents.BACKUP_EXTRACT_PROGRESS, { + progress: processedSize / totalSize, + processedFiles, + totalFiles, + currentFile: entry.fileName, + extractedBytes: processedSize, + totalBytes: totalSize, + } as BackupExtractProgressEventData ); + } ); + + writeStream.once( 'finish', () => { + if ( ! extractionFailed ) { + processedFiles++; + zipFile.readEntry(); + } + } ); + + readStream.pipe( writeStream ); + } catch ( err ) { + if ( err instanceof Error ) { + failOnce( err ); + } + } + } ); + + zipFile.on( 'end', () => { + if ( ! extractionFailed ) { + this.emit( ImportEvents.BACKUP_EXTRACT_COMPLETE ); + resolve(); + } + } ); + + zipFile.on( 'error', ( error ) => { + failOnce( error ); + this.emit( ImportEvents.BACKUP_EXTRACT_ERROR, error ); + } ); + + zipFile.readEntry(); + } ); + } +} diff --git a/apps/cli/lib/import-export/import/import-manager.ts b/apps/cli/lib/import-export/import/import-manager.ts new file mode 100644 index 0000000000..8d9f40b9f8 --- /dev/null +++ b/apps/cli/lib/import-export/import/import-manager.ts @@ -0,0 +1,89 @@ +import fs from 'fs'; +import os from 'os'; +import path from 'path'; +import { __ } from '@wordpress/i18n'; +import { SiteData } from 'cli/lib/cli-config/core'; +import { ImportExportEventData, handleEvents } from '../handle-events'; +import { BackupExtractEvents, ImporterEvents, ValidatorEvents } from './events'; +import { BackupHandlerFactory } from './handlers/backup-handler-factory'; +import { + Importer, + ImporterResult, + JetpackImporter, + LocalImporter, + PlaygroundImporter, + SQLImporter, + WpressImporter, +} from './importers/importer'; +import { BackupArchiveInfo, NewImporter } from './types'; +import { JetpackValidator } from './validators/jetpack-validator'; +import { LocalValidator } from './validators/local-validator'; +import { PlaygroundValidator } from './validators/playground-validator'; +import { SqlValidator } from './validators/sql-validator'; +import { Validator } from './validators/validator'; +import { WpressValidator } from './validators/wpress-validator'; + +interface ImporterOption { + validator: Validator; + importer: NewImporter; +} + +export function selectImporter( + allFiles: string[], + extractionDirectory: string, + onEvent: ( data: ImportExportEventData ) => void, + options: ImporterOption[] +): Importer | null { + for ( const { validator, importer } of options ) { + if ( validator.canHandle( allFiles ) ) { + const removeValidatorListeners = handleEvents( validator, onEvent, ValidatorEvents ); + const files = validator.parseBackupContents( allFiles, extractionDirectory ); + removeValidatorListeners(); + return new importer( files ); + } + } + return null; +} + +export async function importBackup( + backupFile: BackupArchiveInfo, + site: SiteData, + onEvent: ( data: ImportExportEventData ) => void, + options: ImporterOption[] +): Promise< ImporterResult > { + const backupHandler = BackupHandlerFactory.create( backupFile ); + if ( ! backupHandler ) { + throw new Error( __( 'No suitable backup handler found for the provided backup file' ) ); + } + + const extractionDirectory = await fs.promises.mkdtemp( + path.join( os.tmpdir(), 'studio_backup' ) + ); + const fileList = await backupHandler.listFiles( backupFile ); + const importer = selectImporter( fileList, extractionDirectory, onEvent, options ); + + if ( ! importer ) { + throw new Error( __( 'No suitable importer found for the provided backup contents' ) ); + } + + let removeBackupListeners; + let removeImportListeners; + try { + removeBackupListeners = handleEvents( backupHandler, onEvent, BackupExtractEvents ); + removeImportListeners = handleEvents( importer, onEvent, ImporterEvents ); + await backupHandler.extractFiles( backupFile, extractionDirectory ); + return await importer.import( site ); + } finally { + removeBackupListeners?.(); + removeImportListeners?.(); + await fs.promises.rm( extractionDirectory, { recursive: true } ); + } +} + +export const DEFAULT_IMPORTER_OPTIONS: ImporterOption[] = [ + { validator: new PlaygroundValidator(), importer: PlaygroundImporter }, + { validator: new JetpackValidator(), importer: JetpackImporter }, + { validator: new LocalValidator(), importer: LocalImporter }, + { validator: new SqlValidator(), importer: SQLImporter }, + { validator: new WpressValidator(), importer: WpressImporter }, +]; diff --git a/apps/cli/lib/import-export/import/importers/importer.ts b/apps/cli/lib/import-export/import/importers/importer.ts new file mode 100644 index 0000000000..888d264cb6 --- /dev/null +++ b/apps/cli/lib/import-export/import/importers/importer.ts @@ -0,0 +1,468 @@ +import { EventEmitter } from 'events'; +import fs from 'fs'; +import path from 'path'; +import { createInterface } from 'readline'; +import { DEFAULT_PHP_VERSION } from '@studio/common/constants'; +import { serializePlugins } from '@studio/common/lib/serialize-plugins'; +import { SupportedPHPVersionsList } from '@studio/common/types/php-versions'; +import { __, sprintf } from '@wordpress/i18n'; +import { move } from 'fs-extra'; +import semver from 'semver'; +import trash from 'trash'; +import { SiteData } from 'cli/lib/cli-config/core'; +import { runWpCliCommand } from 'cli/lib/run-wp-cli-command'; +import { generateBackupFilename } from '../../export/generate-backup-filename'; +import { ImportEvents } from '../events'; +import { BackupContents, MetaFileData, ImportWpContentProgressEventData } from '../types'; +import { updateSiteUrl } from '../update-site-url'; + +export interface ImporterResult extends Omit< BackupContents, 'metaFile' > { + meta?: MetaFileData; + importerType?: string; +} + +export interface Importer extends Partial< EventEmitter > { + import( site: SiteData ): Promise< ImporterResult >; +} + +abstract class BaseImporter extends EventEmitter implements Importer { + protected meta?: MetaFileData; + + constructor( protected backup: BackupContents ) { + super(); + } + + abstract import( site: SiteData ): Promise< ImporterResult >; + + protected async importDatabase( site: SiteData, sqlFiles: string[] ): Promise< void > { + if ( ! sqlFiles.length ) { + return; + } + + this.emit( ImportEvents.IMPORT_DATABASE_START ); + + const sortedSqlFiles = sqlFiles.sort( ( a, b ) => a.localeCompare( b ) ); + let processedFiles = 0; + const totalFiles = sortedSqlFiles.length; + + for ( const sqlFile of sortedSqlFiles ) { + const sqlTempFile = `${ generateBackupFilename( 'sql' ) }.sql`; + const tmpPath = path.join( site.path, sqlTempFile ); + processedFiles++; + + this.emit( ImportEvents.IMPORT_DATABASE_PROGRESS, { + currentFile: path.basename( sqlFile ), + processedFiles, + totalFiles, + } ); + + try { + await move( sqlFile, tmpPath ); + await this.prepareSqlFile( tmpPath ); + + await using command = await runWpCliCommand( site.path, DEFAULT_PHP_VERSION, [ + 'sqlite', + 'import', + `/wordpress/${ sqlTempFile }`, + '--require=/tmp/sqlite-command/command.php', + '--enable-ast-driver', + '--skip-plugins', + '--skip-themes', + ] ); + + const exitCode = await command.response.exitCode; + const stderr = await command.response.stderrText; + + if ( stderr ) { + console.error( sprintf( __( 'Error during import of %s:' ), sqlFile ), stderr ); + } + + if ( exitCode !== 0 ) { + throw new Error( sprintf( __( 'Database import failed: %s' ), stderr ) ); + } + } finally { + await this.safelyDeletePath( tmpPath ); + } + } + + await updateSiteUrl( site ); + this.emit( ImportEvents.IMPORT_DATABASE_COMPLETE ); + } + + protected async prepareSqlFile( _tmpPath: string ): Promise< void > { + // This method can be overridden by subclasses to prepare the SQL file before import. + } + + protected async safelyDeletePath( path: string ): Promise< void > { + try { + await fs.promises.rm( path, { recursive: true, force: true } ); + } catch ( error ) { + console.error( `Failed to safely delete path ${ path }:`, error ); + } + } +} + +abstract class BaseBackupImporter extends BaseImporter { + protected shouldCleanUpBeforeImport: boolean = true; + + async import( site: SiteData ): Promise< ImporterResult > { + this.emit( ImportEvents.IMPORT_START ); + + try { + if ( this.shouldCleanUpBeforeImport ) { + await this.moveExistingWpContentToTrash( site.path ); + } + await this.importWpConfig( site.path ); + await this.importWpContent( site.path ); + if ( this.backup.metaFile ) { + this.meta = await this.parseMetaFile(); + } + if ( this.backup.sqlFiles.length ) { + const databaseDir = path.join( site.path, 'wp-content', 'database' ); + const dbPath = path.join( databaseDir, '.ht.sqlite' ); + + await this.moveExistingDatabaseToTrash( dbPath ); + await this.createEmptyDatabase( dbPath ); + await this.importDatabase( site, this.backup.sqlFiles ); + } + + this.emit( ImportEvents.IMPORT_COMPLETE ); + return { + extractionDirectory: this.backup.extractionDirectory, + sqlFiles: this.backup.sqlFiles, + wpContentFiles: this.backup.wpContentFiles, + wpContentDirectory: this.backup.wpContentDirectory, + wpConfig: this.backup.wpConfig, + meta: this.meta, + importerType: this.constructor.name, + }; + } catch ( error ) { + this.emit( ImportEvents.IMPORT_ERROR, error ); + throw error; + } + } + + protected abstract parseMetaFile(): Promise< MetaFileData | undefined >; + + protected async createEmptyDatabase( dbPath: string ): Promise< void > { + await fs.promises.writeFile( dbPath, '' ); + } + + protected async moveExistingDatabaseToTrash( dbPath: string ): Promise< void > { + if ( ! fs.existsSync( dbPath ) ) { + return; + } + await trash( dbPath ); + } + + protected async moveExistingWpContentToTrash( rootPath: string ): Promise< void > { + const wpContentDir = path.join( rootPath, 'wp-content' ); + try { + if ( ! fs.existsSync( wpContentDir ) ) { + return; + } + const contentToKeep = [ + /^mu-plugins$/, // Match mu-plugins directory exactly + /^mu-plugins(\/|\\)sqlite-database-integration(\/|\\)?.*/, // Match sqlite-database-integration dir and contents + /^database(\/|\\)?.*/, // Match database dir and all contents + /^db\.php$/, // Exact match for db.php + /^index\.php$/, // Exact match for index.php + /^languages(\/|\\)?.*/, // Match languages dir and all contents + ]; + + const contents = await fs.promises.readdir( wpContentDir, { recursive: true } ); + + for ( const content of contents ) { + if ( contentToKeep.some( ( pattern ) => pattern.test( content ) ) ) { + continue; + } + await this.safelyDeletePath( path.join( wpContentDir, content ) ); + } + } catch { + return; + } + } + + protected async importWpConfig( rootPath: string ): Promise< void > { + const wpConfigPath = path.join( rootPath, 'wp-config.php' ); + const wpConfigSamplePath = path.join( rootPath, 'wp-config-sample.php' ); + + if ( this.backup.wpConfig ) { + await fs.promises.copyFile( this.backup.wpConfig, wpConfigPath ); + } else if ( ! fs.existsSync( wpConfigPath ) && fs.existsSync( wpConfigSamplePath ) ) { + await fs.promises.copyFile( wpConfigSamplePath, wpConfigPath ); + } + } + + protected async importWpContent( rootPath: string ): Promise< void > { + this.emit( ImportEvents.IMPORT_WP_CONTENT_START ); + const extractionDirectory = this.backup.extractionDirectory; + const wpContentSourceDir = this.backup.wpContentDirectory; + const wpContentDestDir = path.join( rootPath, 'wp-content' ); + + // Group files by type + const filesByType = this.categorizeWpContentFiles( this.backup.wpContentFiles ); + let processedItems = 0; + const totalItems = this.backup.wpContentFiles.length; + + for ( const [ type, files ] of Object.entries( filesByType ) ) { + for ( const file of files ) { + try { + const stats = await fs.promises.lstat( file ); + // Skip if it's a directory + if ( stats.isDirectory() ) { + continue; + } + } catch { + /** + * If the file does not exist, skip it. + * This can happen if a empty directory is included in the backup + * because the empty directory won't be included in the extraction. + */ + continue; + } + + const relativePath = path.relative( + path.join( extractionDirectory, wpContentSourceDir ), + file + ); + + const destPath = path.join( wpContentDestDir, relativePath ); + await fs.promises.mkdir( path.dirname( destPath ), { recursive: true } ); + await fs.promises.copyFile( file, destPath ); + + processedItems++; + + // Emit progress event after file is copied + this.emit( ImportEvents.IMPORT_WP_CONTENT_PROGRESS, { + type: type as 'plugins' | 'themes' | 'uploads' | 'other', + currentItem: relativePath, + processedItems, + totalItems, + } as ImportWpContentProgressEventData ); + } + } + this.emit( ImportEvents.IMPORT_WP_CONTENT_COMPLETE ); + } + + protected categorizeWpContentFiles( files: string[] ): Record< string, string[] > { + const categorized: Record< string, string[] > = { + plugins: [], + themes: [], + uploads: [], + other: [], + }; + + for ( const file of files ) { + const segments = file.split( /[/\\]/ ); + + if ( segments.includes( 'plugins' ) ) { + categorized.plugins.push( file ); + } else if ( segments.includes( 'themes' ) ) { + categorized.themes.push( file ); + } else if ( segments.includes( 'uploads' ) ) { + categorized.uploads.push( file ); + } else { + categorized.other.push( file ); + } + } + + return categorized; + } + + protected parsePhpVersion( version: string | undefined ): string { + if ( ! version ) { + return DEFAULT_PHP_VERSION; + } + const phpVersion = semver.coerce( version ); + if ( ! phpVersion ) { + return DEFAULT_PHP_VERSION; + } + + const parsedVersion = `${ phpVersion.major }.${ phpVersion.minor }`; + + return SupportedPHPVersionsList.includes( parsedVersion ) ? parsedVersion : DEFAULT_PHP_VERSION; + } +} + +export class JetpackImporter extends BaseBackupImporter { + // Jetpack importer follows merge strategy to support selective sync + protected shouldCleanUpBeforeImport = false; + + protected async parseMetaFile(): Promise< MetaFileData | undefined > { + const metaFilePath = this.backup.metaFile; + if ( ! metaFilePath ) { + return; + } + this.emit( ImportEvents.IMPORT_META_START ); + try { + const metaContent = await fs.promises.readFile( metaFilePath, 'utf-8' ); + const meta = JSON.parse( metaContent ); + return { + phpVersion: this.parsePhpVersion( meta?.phpVersion ), + wordpressVersion: meta?.wordpressVersion || '', + }; + } catch ( e ) { + return; + } finally { + this.emit( ImportEvents.IMPORT_META_COMPLETE ); + } + } +} + +export class LocalImporter extends BaseBackupImporter { + protected async parseMetaFile(): Promise< MetaFileData | undefined > { + const metaFilePath = this.backup.metaFile; + if ( ! metaFilePath ) { + return; + } + this.emit( ImportEvents.IMPORT_META_START ); + try { + const metaContent = await fs.promises.readFile( metaFilePath, 'utf-8' ); + const meta = JSON.parse( metaContent ); + return { + phpVersion: this.parsePhpVersion( meta?.services?.php?.version ), + wordpressVersion: '', + }; + } catch ( e ) { + return; + } finally { + this.emit( ImportEvents.IMPORT_META_COMPLETE ); + } + } +} + +export class PlaygroundImporter extends BaseBackupImporter { + protected async importDatabase( site: SiteData, sqlFiles: string[] ): Promise< void > { + if ( ! sqlFiles.length ) { + return; + } + + this.emit( ImportEvents.IMPORT_DATABASE_START ); + + for ( const sqlFile of sqlFiles ) { + await move( sqlFile, path.join( site.path, 'wp-content', 'database', '.ht.sqlite' ), { + overwrite: true, + } ); + } + await updateSiteUrl( site ); + + this.emit( ImportEvents.IMPORT_DATABASE_COMPLETE ); + } + + protected async parseMetaFile(): Promise< MetaFileData | undefined > { + return undefined; + } +} + +export class SQLImporter extends BaseImporter { + async import( site: SiteData ): Promise< ImporterResult > { + this.emit( ImportEvents.IMPORT_START ); + + try { + await this.importDatabase( site, this.backup.sqlFiles ); + + this.emit( ImportEvents.IMPORT_COMPLETE ); + return { + extractionDirectory: this.backup.extractionDirectory, + sqlFiles: this.backup.sqlFiles, + wpConfig: this.backup.wpConfig, + wpContentFiles: this.backup.wpContentFiles, + wpContentDirectory: this.backup.wpContentDirectory, + importerType: this.constructor.name, + }; + } catch ( error ) { + this.emit( ImportEvents.IMPORT_ERROR, error ); + throw error; + } + } +} + +export class WpressImporter extends BaseBackupImporter { + protected async parseMetaFile(): Promise< MetaFileData > { + const packageJsonPath = path.join( this.backup.extractionDirectory, 'package.json' ); + try { + const packageContent = await fs.promises.readFile( packageJsonPath, 'utf8' ); + const { + Template: template = '', + Stylesheet: stylesheet = '', + Plugins: plugins = [], + } = JSON.parse( packageContent ); + return { template, stylesheet, plugins }; + } catch ( error ) { + console.error( 'Error reading package.json:', error ); + return { template: '', stylesheet: '', plugins: [] }; + } + } + + protected async prepareSqlFile( tmpPath: string ): Promise< void > { + const tempOutputPath = `${ tmpPath }.tmp`; + const readStream = fs.createReadStream( tmpPath, 'utf8' ); + const writeStream = fs.createWriteStream( tempOutputPath, 'utf8' ); + + const rl = createInterface( { + input: readStream, + crlfDelay: Infinity, + } ); + + rl.on( 'line', ( line: string ) => { + writeStream.write( line.replace( /SERVMASK_PREFIX/g, 'wp' ) + '\n' ); + } ); + + await new Promise( ( resolve, reject ) => { + rl.on( 'close', resolve ); + rl.on( 'error', reject ); + } ); + + await new Promise( ( resolve, reject ) => { + writeStream.end( resolve ); + writeStream.on( 'error', reject ); + } ); + + await fs.promises.rename( tempOutputPath, tmpPath ); + } + + protected async addSqlToSetTheme( sqlFiles: string[] ): Promise< void > { + const { template, stylesheet } = this.meta || {}; + if ( ! template || ! stylesheet ) { + return; + } + + const themeUpdateSql = ` + UPDATE wp_options SET option_value = '${ template }' WHERE option_name = 'template'; + UPDATE wp_options SET option_value = '${ stylesheet }' WHERE option_name = 'stylesheet'; + `; + const sqliteSetThemePath = path.join( + this.backup.extractionDirectory, + 'studio-wpress-theme.sql' + ); + await fs.promises.writeFile( sqliteSetThemePath, themeUpdateSql ); + sqlFiles.push( sqliteSetThemePath ); + } + + protected async addSqlToActivatePlugins( sqlFiles: string[] ): Promise< void > { + const { plugins = [] } = this.meta || {}; + if ( plugins.length === 0 ) { + return; + } + + const serializedPlugins = serializePlugins( plugins ); + const activatePluginsSql = ` + INSERT INTO wp_options (option_name, option_value, autoload) VALUES ('active_plugins', '${ serializedPlugins }', 'yes') + ON DUPLICATE KEY UPDATE option_value = VALUES(option_value), autoload = VALUES(autoload); + `; + + const sqliteActivatePluginsPath = path.join( + this.backup.extractionDirectory, + 'studio-wpress-activate-plugins.sql' + ); + await fs.promises.writeFile( sqliteActivatePluginsPath, activatePluginsSql ); + sqlFiles.push( sqliteActivatePluginsPath ); + } + + protected async importDatabase( site: SiteData, sqlFiles: string[] ): Promise< void > { + await this.addSqlToSetTheme( sqlFiles ); + await this.addSqlToActivatePlugins( sqlFiles ); + await super.importDatabase( site, sqlFiles ); + } +} diff --git a/apps/cli/lib/import-export/import/types.ts b/apps/cli/lib/import-export/import/types.ts new file mode 100644 index 0000000000..381fffd471 --- /dev/null +++ b/apps/cli/lib/import-export/import/types.ts @@ -0,0 +1,51 @@ +import { Importer } from './importers/importer'; + +export interface MetaFileData { + phpVersion?: string; + wordpressVersion?: string; + template?: string; + stylesheet?: string; + plugins?: string[]; +} +export interface BackupContents { + extractionDirectory: string; + wpConfig: string; + sqlFiles: string[]; + wpContentFiles: string[]; + wpContentDirectory: string; + metaFile?: string; +} + +export interface BackupArchiveInfo { + path: string; + type: string; +} + +export type NewImporter = new ( backup: BackupContents ) => Importer; + +export interface BackupExtractProgressEventData { + progress: number; + processedFiles?: number; + totalFiles?: number; + currentFile?: string; + extractedBytes?: number; + totalBytes?: number; +} + +export interface ImportDatabaseProgressEventData { + currentTable?: string; + processedTables?: number; + totalTables?: number; + currentFile?: string; + processedFiles?: number; + totalFiles?: number; +} + +export interface ImportWpContentProgressEventData { + type?: 'plugins' | 'themes' | 'uploads' | 'other'; + currentItem?: string; + processedItems?: number; + totalItems?: number; + processedBytes?: number; + totalBytes?: number; +} diff --git a/apps/cli/lib/import-export/import/update-site-url.ts b/apps/cli/lib/import-export/import/update-site-url.ts new file mode 100644 index 0000000000..f29cf576c6 --- /dev/null +++ b/apps/cli/lib/import-export/import/update-site-url.ts @@ -0,0 +1,62 @@ +import { DEFAULT_PHP_VERSION } from '@studio/common/constants'; +import { getSiteUrl } from 'cli/lib/cli-config/sites'; +import { runWpCliCommand } from 'cli/lib/run-wp-cli-command'; +import type { SiteData } from 'cli/lib/cli-config/core'; + +// Search the database for the old site URL and replace it with the new one +export const updateSiteUrl = async ( site: SiteData ) => { + const newUrl = getSiteUrl( site ); + + await using command = await runWpCliCommand( site.path, DEFAULT_PHP_VERSION, [ + 'option', + 'get', + 'siteurl', + '--skip-plugins', + '--skip-themes', + ] ); + + const currentSiteUrl = await command.response.stdoutText; + + if ( ! currentSiteUrl ) { + return; + } + + const oldUrl = currentSiteUrl.trim(); + if ( newUrl === oldUrl ) { + return; + } + const urlWithoutProtocol = oldUrl.replace( /^https?:\/\//, '' ); + + const oldUrlVariants = [ + `http://${ urlWithoutProtocol }`, + `https://${ urlWithoutProtocol }`, + // e.g. "posterUrl" for videos uses encoded URLs + `http%3A%2F%2F${ urlWithoutProtocol }`, + `https%3A%2F%2F${ urlWithoutProtocol }`, + // e.g. Elementor plugin uses escaped URLs + String.raw`http:\/\/${ urlWithoutProtocol }`, + String.raw`https:\/\/${ urlWithoutProtocol }`, + ]; + + for ( const urlToReplace of oldUrlVariants ) { + await using command = await runWpCliCommand( site.path, DEFAULT_PHP_VERSION, [ + 'search-replace', + urlToReplace, + newUrl, + '--skip-columns=guid', + '--skip-plugins', + '--skip-themes', + ] ); + + const stderr = await command.response.stderrText; + const exitCode = await command.response.exitCode; + + if ( stderr ) { + console.error( `Warning during replacing URLs (${ urlToReplace }): ${ stderr }` ); + } + + if ( exitCode ) { + console.error( `Error during replacing URLs (${ urlToReplace }), Exit Code: ${ exitCode }` ); + } + } +}; diff --git a/apps/cli/lib/import-export/import/validators/jetpack-validator.ts b/apps/cli/lib/import-export/import/validators/jetpack-validator.ts new file mode 100644 index 0000000000..a333822ed2 --- /dev/null +++ b/apps/cli/lib/import-export/import/validators/jetpack-validator.ts @@ -0,0 +1,63 @@ +import { EventEmitter } from 'events'; +import path from 'path'; +import { ImportEvents } from '../events'; +import { BackupContents } from '../types'; +import { Validator } from './validator'; + +export class JetpackValidator extends EventEmitter implements Validator { + canHandle( fileList: string[] ): boolean { + const optionalDirs = [ + 'sql', + 'wp-content', + 'wp-content/uploads', + 'wp-content/plugins', + 'wp-content/themes', + ]; + const optionalFiles = [ 'wp-config.php', 'meta.json' ]; + + const hasOptionalDir = optionalDirs.some( ( dir ) => + fileList.some( ( file ) => file.startsWith( dir + '/' ) ) + ); + const hasOptionalFile = optionalFiles.some( ( file ) => fileList.includes( file ) ); + + return hasOptionalDir || hasOptionalFile; + } + + parseBackupContents( fileList: string[], extractionDirectory: string ): BackupContents { + this.emit( ImportEvents.IMPORT_VALIDATION_START ); + const extractedBackup: BackupContents = { + extractionDirectory: extractionDirectory, + sqlFiles: [], + wpConfig: '', + wpContentFiles: [], + wpContentDirectory: 'wp-content', + }; + /* File rules: + * - Accept .zip in addition to tar.gz ( Handled by backup handler ) + * - Do not reject the archive that includes core WP files in addition to files and directories required by Jetpack format, and ignore those instead. + * - Support optional meta file, e.g., meta.json, that stores desired PHP and WP versions. + * */ + + for ( const file of fileList ) { + const fullPath = path.join( extractionDirectory, file ); + if ( file === 'wp-config.php' ) { + extractedBackup.wpConfig = fullPath; + continue; + } + + if ( file.startsWith( 'sql/' ) && file.endsWith( '.sql' ) ) { + extractedBackup.sqlFiles.push( fullPath ); + } else if ( file.startsWith( 'wp-content/' ) ) { + extractedBackup.wpContentFiles.push( fullPath ); + } else if ( file === 'studio.json' || file === 'meta.json' ) { + extractedBackup.metaFile = fullPath; + } + } + extractedBackup.sqlFiles.sort( ( a: string, b: string ) => + path.basename( a ).localeCompare( path.basename( b ) ) + ); + + this.emit( ImportEvents.IMPORT_VALIDATION_COMPLETE ); + return extractedBackup; + } +} diff --git a/apps/cli/lib/import-export/import/validators/local-validator.ts b/apps/cli/lib/import-export/import/validators/local-validator.ts new file mode 100644 index 0000000000..bbe4e05ab0 --- /dev/null +++ b/apps/cli/lib/import-export/import/validators/local-validator.ts @@ -0,0 +1,58 @@ +import { EventEmitter } from 'events'; +import path from 'path'; +import { ImportEvents } from '../events'; +import { BackupContents } from '../types'; +import { Validator } from './validator'; + +export class LocalValidator extends EventEmitter implements Validator { + canHandle( fileList: string[] ): boolean { + const requiredDirs = [ + 'app/sql', + 'app/public/wp-content/uploads', + 'app/public/wp-content/plugins', + 'app/public/wp-content/themes', + ]; + return ( + requiredDirs.some( ( dir ) => fileList.some( ( file ) => file.startsWith( dir + '/' ) ) ) && + fileList.some( ( file ) => file.startsWith( 'app/sql/' ) && file.endsWith( '.sql' ) ) + ); + } + + parseBackupContents( fileList: string[], extractionDirectory: string ): BackupContents { + this.emit( ImportEvents.IMPORT_VALIDATION_START ); + const extractedBackup: BackupContents = { + extractionDirectory: extractionDirectory, + sqlFiles: [], + wpConfig: '', + wpContentFiles: [], + wpContentDirectory: path.normalize( 'app/public/wp-content' ), + }; + /* File rules: + * - Accept .zip + * - Do not reject the archive that includes core WP files, and ignore those instead. + * - Support optional meta file, local-site.json, that stores desired PHP and WP versions. + * */ + + for ( const file of fileList ) { + const fullPath = path.join( extractionDirectory, file ); + if ( file.startsWith( 'app/public/' ) && file.endsWith( 'wp-config.php' ) ) { + extractedBackup.wpConfig = fullPath; + continue; + } + + if ( file.startsWith( 'app/sql/' ) && file.endsWith( '.sql' ) ) { + extractedBackup.sqlFiles.push( fullPath ); + } else if ( file.startsWith( 'app/public/wp-content/' ) ) { + extractedBackup.wpContentFiles.push( fullPath ); + } else if ( file === 'local-site.json' ) { + extractedBackup.metaFile = fullPath; + } + } + extractedBackup.sqlFiles.sort( ( a: string, b: string ) => + path.basename( a ).localeCompare( path.basename( b ) ) + ); + + this.emit( ImportEvents.IMPORT_VALIDATION_COMPLETE ); + return extractedBackup; + } +} diff --git a/apps/cli/lib/import-export/import/validators/playground-validator.ts b/apps/cli/lib/import-export/import/validators/playground-validator.ts new file mode 100644 index 0000000000..0bc54ce692 --- /dev/null +++ b/apps/cli/lib/import-export/import/validators/playground-validator.ts @@ -0,0 +1,55 @@ +import { EventEmitter } from 'events'; +import path from 'path'; +import { ImportEvents } from '../events'; +import { BackupContents } from '../types'; +import { Validator } from './validator'; + +export class PlaygroundValidator extends EventEmitter implements Validator { + canHandle( fileList: string[] ): boolean { + const requiredDirs = [ + 'wp-content/database', + 'wp-content/uploads', + 'wp-content/plugins', + 'wp-content/themes', + ]; + return ( + requiredDirs.some( ( dir ) => fileList.some( ( file ) => file.startsWith( dir + '/' ) ) ) && + fileList.some( + ( file ) => file.startsWith( 'wp-content/database' ) && file.endsWith( '.ht.sqlite' ) + ) + ); + } + + parseBackupContents( fileList: string[], extractionDirectory: string ): BackupContents { + this.emit( ImportEvents.IMPORT_VALIDATION_START ); + const extractedBackup: BackupContents = { + extractionDirectory: extractionDirectory, + sqlFiles: [], + wpConfig: '', + wpContentFiles: [], + wpContentDirectory: 'wp-content', + }; + + /* File rules: + * - Accept .zip + * - Do not reject the archive that includes core WP files, and ignore those instead. + * - Support .ht.sqlite database files + * */ + + for ( const file of fileList ) { + const fullPath = path.join( extractionDirectory, file ); + if ( file === 'wp-config.php' ) { + extractedBackup.wpConfig = fullPath; + continue; + } + + if ( file.startsWith( 'wp-content/database' ) && file.endsWith( '.ht.sqlite' ) ) { + extractedBackup.sqlFiles.push( fullPath ); + } else if ( file.startsWith( 'wp-content/' ) ) { + extractedBackup.wpContentFiles.push( fullPath ); + } + } + this.emit( ImportEvents.IMPORT_VALIDATION_COMPLETE ); + return extractedBackup; + } +} diff --git a/apps/cli/lib/import-export/import/validators/sql-validator.ts b/apps/cli/lib/import-export/import/validators/sql-validator.ts new file mode 100644 index 0000000000..fbf9038fbf --- /dev/null +++ b/apps/cli/lib/import-export/import/validators/sql-validator.ts @@ -0,0 +1,32 @@ +import { EventEmitter } from 'events'; +import path from 'path'; +import { ImportEvents } from '../events'; +import { BackupContents } from '../types'; +import { Validator } from './validator'; + +export class SqlValidator extends EventEmitter implements Validator { + canHandle( fileList: string[] ): boolean { + return fileList.length === 1 && fileList[ 0 ].endsWith( '.sql' ); + } + + parseBackupContents( fileList: string[], extractionDirectory: string ): BackupContents { + this.emit( ImportEvents.IMPORT_VALIDATION_START ); + const extractedBackup: BackupContents = { + extractionDirectory: extractionDirectory, + sqlFiles: [], + wpConfig: '', + wpContentFiles: [], + wpContentDirectory: '', + }; + + for ( const file of fileList ) { + const fullPath = path.join( extractionDirectory, file ); + + if ( file.endsWith( '.sql' ) ) { + extractedBackup.sqlFiles.push( fullPath ); + } + } + this.emit( ImportEvents.IMPORT_VALIDATION_COMPLETE ); + return extractedBackup; + } +} diff --git a/apps/cli/lib/import-export/import/validators/validator.ts b/apps/cli/lib/import-export/import/validators/validator.ts new file mode 100644 index 0000000000..0cd62eeab2 --- /dev/null +++ b/apps/cli/lib/import-export/import/validators/validator.ts @@ -0,0 +1,7 @@ +import { EventEmitter } from 'events'; +import { BackupContents } from '../types'; + +export interface Validator extends Partial< EventEmitter > { + canHandle( allFiles: string[] ): boolean; + parseBackupContents( allFiles: string[], extractionDirectory: string ): BackupContents; +} diff --git a/apps/cli/lib/import-export/import/validators/wpress-validator.ts b/apps/cli/lib/import-export/import/validators/wpress-validator.ts new file mode 100644 index 0000000000..6930b4c9db --- /dev/null +++ b/apps/cli/lib/import-export/import/validators/wpress-validator.ts @@ -0,0 +1,55 @@ +import { EventEmitter } from 'events'; +import path from 'path'; +import { ImportEvents } from '../events'; +import { BackupContents } from '../types'; +import { Validator } from './validator'; + +export class WpressValidator extends EventEmitter implements Validator { + canHandle( fileList: string[] ): boolean { + const requiredFiles = [ 'database.sql', 'package.json' ]; + const optionalDirs = [ 'uploads', 'plugins', 'themes', 'fonts' ]; + return ( + requiredFiles.every( ( file ) => fileList.includes( file ) ) && + optionalDirs.some( ( dir ) => fileList.some( ( file ) => file.startsWith( dir + path.sep ) ) ) + ); + } + + parseBackupContents( fileList: string[], extractionDirectory: string ): BackupContents { + this.emit( ImportEvents.IMPORT_VALIDATION_START ); + const extractedBackup: BackupContents = { + extractionDirectory, + sqlFiles: [], + wpConfig: '', + wpContentFiles: [], + wpContentDirectory: '', + }; + /* File rules: + * - Accept .wpress + * - Must include database.sql in the root + * - Support optional directories: uploads, plugins, themes, mu-plugins, fonts + * */ + + for ( const file of fileList ) { + const fullPath = path.join( extractionDirectory, file ); + if ( file === 'database.sql' ) { + extractedBackup.sqlFiles.push( fullPath ); + } else if ( + file.startsWith( 'uploads' + path.sep ) || + file.startsWith( 'plugins' + path.sep ) || + file.startsWith( 'themes' + path.sep ) || + file.startsWith( 'mu-plugins' + path.sep ) || + file.startsWith( 'fonts' + path.sep ) + ) { + extractedBackup.wpContentFiles.push( fullPath ); + } else if ( file === 'package.json' ) { + extractedBackup.metaFile = fullPath; + } + } + extractedBackup.sqlFiles.sort( ( a: string, b: string ) => + path.basename( a ).localeCompare( path.basename( b ) ) + ); + + this.emit( ImportEvents.IMPORT_VALIDATION_COMPLETE ); + return extractedBackup; + } +} diff --git a/apps/cli/lib/import-export/utils.ts b/apps/cli/lib/import-export/utils.ts new file mode 100644 index 0000000000..d255430d80 --- /dev/null +++ b/apps/cli/lib/import-export/utils.ts @@ -0,0 +1,17 @@ +export function getBackupFileType( importFile: string ): string { + const normalizedPath = importFile.toLowerCase(); + + if ( normalizedPath.endsWith( '.tar.gz' ) || normalizedPath.endsWith( '.tgz' ) ) { + return 'application/gzip'; + } + + if ( normalizedPath.endsWith( '.zip' ) ) { + return 'application/zip'; + } + + if ( normalizedPath.endsWith( '.sql' ) ) { + return 'application/sql'; + } + + return ''; +} diff --git a/apps/cli/lib/run-wp-cli-command.ts b/apps/cli/lib/run-wp-cli-command.ts index 0e58b60e67..54bcdd8ab7 100644 --- a/apps/cli/lib/run-wp-cli-command.ts +++ b/apps/cli/lib/run-wp-cli-command.ts @@ -40,6 +40,10 @@ export interface RunWpCliCommandOptions { siteUrl?: string; } +interface DisposableWpCliResponse extends Disposable { + response: StreamedPHPResponse; +} + // Run a WP-CLI command in a PHP-WASM instance. This function can be used even if the targeted // Studio site is already running, but it is typically faster to use the `sendWpCliCommand` // function in that case. @@ -47,7 +51,7 @@ export async function runWpCliCommand( siteFolder: string, phpVersion: SupportedPHPVersion, args: string[] -): Promise< [ StreamedPHPResponse, exitPhp: () => void ] > { +): Promise< DisposableWpCliResponse > { const id = await loadNodeRuntime( phpVersion, { followSymlinks: true, withRedis: IS_JSPI_AVAILABLE, @@ -94,11 +98,16 @@ export async function runWpCliCommand( await setupPlatformLevelMuPlugins( php ); - return [ - await php.cli( [ 'php', '/tmp/wp-cli.phar', '--path=/wordpress', ...args ] ), - () => php.exit(), - ]; + const response = await php.cli( [ 'php', '/tmp/wp-cli.phar', '--path=/wordpress', ...args ] ); + + return { + response, + [ Symbol.dispose ]() { + php.exit(); + }, + }; } catch ( error ) { + php.exit(); throw new Error( __( 'An error occurred while running the WP-CLI command.' ) ); } } @@ -107,9 +116,7 @@ export async function runWpCliCommand( * Run a global WP-CLI command without requiring a site. * Useful for commands like --version that don't need a WordPress installation. */ -export async function runGlobalWpCliCommand( - args: string[] -): Promise< [ StreamedPHPResponse, exitPhp: () => void ] > { +export async function runGlobalWpCliCommand( args: string[] ): Promise< DisposableWpCliResponse > { const id = await loadNodeRuntime( LatestSupportedPHPVersion, { followSymlinks: true, withRedis: false, @@ -134,8 +141,16 @@ export async function runGlobalWpCliCommand( await php.mount( '/tmp/wp-cli.phar', createNodeFsMountHandler( getWpCliPharPath() ) ); - return [ await php.cli( [ 'php', '/tmp/wp-cli.phar', ...args ] ), () => php.exit() ]; + const response = await php.cli( [ 'php', '/tmp/wp-cli.phar', ...args ] ); + + return { + response, + [ Symbol.dispose ]() { + php.exit(); + }, + }; } catch ( error ) { + php.exit(); throw new Error( __( 'An error occurred while running the WP-CLI command.' ) ); } } diff --git a/apps/cli/lib/sync-api.ts b/apps/cli/lib/sync-api.ts new file mode 100644 index 0000000000..0a671ac280 --- /dev/null +++ b/apps/cli/lib/sync-api.ts @@ -0,0 +1,145 @@ +import { + checkBackupSize as checkBackupSizeBase, + fetchSyncableSites as fetchSyncableSitesBase, + initiateBackup as initiateBackupBase, + pollBackupStatus as pollBackupStatusBase, + initiateImport as initiateImportBase, + pollImportStatus as pollImportStatusBase, + downloadBackup as downloadBackupBase, + fetchLatestRewindId as fetchLatestRewindIdBase, + fetchRemoteFileTree as fetchRemoteFileTreeBase, +} from '@studio/common/lib/sync/sync-api'; +import { syncOptionSchema } from '@studio/common/types/sync'; +import { __, sprintf } from '@wordpress/i18n'; +import { z } from 'zod'; +import { LoggerError } from 'cli/logger'; +import type { SyncSite, ImportResponse, SyncOption } from '@studio/common/types/sync'; + +export function parseSyncOptions( optionsString?: string ): SyncOption[] { + if ( ! optionsString ) { + return [ 'all' ]; + } + + return optionsString.split( ',' ).map( ( o ) => { + const result = syncOptionSchema.safeParse( o.trim() ); + if ( ! result.success ) { + throw new LoggerError( + sprintf( + __( 'Invalid sync option: %s. Valid options: %s' ), + o.trim(), + syncOptionSchema.options.join( ', ' ) + ) + ); + } + return result.data; + } ); +} + +function wrapError( message: string, error: unknown ): LoggerError { + if ( error instanceof LoggerError ) { + return error; + } + if ( error instanceof z.ZodError ) { + return new LoggerError( __( 'Invalid API response format' ), error ); + } + return new LoggerError( message, error ); +} + +export async function fetchSyncableSites( token: string ): Promise< SyncSite[] > { + try { + return await fetchSyncableSitesBase( token ); + } catch ( error ) { + throw wrapError( __( 'Failed to fetch WordPress.com sites' ), error ); + } +} + +export async function initiateBackup( + token: string, + remoteSiteId: number, + options: { optionsToSync: SyncOption[]; includePathList?: string[] } +): Promise< number > { + try { + return await initiateBackupBase( token, remoteSiteId, options ); + } catch ( error ) { + throw wrapError( __( 'Failed to initiate backup' ), error ); + } +} + +export async function pollBackupStatus( token: string, remoteSiteId: number, backupId: number ) { + try { + return await pollBackupStatusBase( token, remoteSiteId, backupId ); + } catch ( error ) { + throw wrapError( __( 'Failed to check backup status' ), error ); + } +} + +export async function initiateImport( + token: string, + remoteSiteId: number, + attachmentId: string, + options?: { optionsToSync?: SyncOption[]; specificSelectionPaths?: string[] } +): Promise< void > { + try { + await initiateImportBase( token, remoteSiteId, attachmentId, options ); + } catch ( error ) { + if ( error instanceof Error && 'statusCode' in error && error.statusCode === 409 ) { + throw new LoggerError( + __( + 'A sync operation is already in progress on this site. Please wait for it to finish and try again.' + ) + ); + } + throw wrapError( __( 'Failed to initiate import on remote site' ), error ); + } +} + +export async function pollImportStatus( + token: string, + remoteSiteId: number +): Promise< ImportResponse > { + try { + return await pollImportStatusBase( token, remoteSiteId ); + } catch ( error ) { + throw wrapError( __( 'Failed to check import status' ), error ); + } +} + +export async function checkBackupSize( url: string ): Promise< number > { + try { + return await checkBackupSizeBase( url ); + } catch ( error ) { + throw wrapError( __( 'Failed to check backup size' ), error ); + } +} + +export async function downloadBackup( url: string, destPath: string ): Promise< void > { + try { + await downloadBackupBase( url, destPath ); + } catch ( error ) { + throw wrapError( __( 'Failed to download backup' ), error ); + } +} + +export async function fetchLatestRewindId( + token: string, + remoteSiteId: number +): Promise< string > { + try { + return await fetchLatestRewindIdBase( token, remoteSiteId ); + } catch ( error ) { + throw wrapError( __( 'Failed to fetch latest rewind ID' ), error ); + } +} + +export async function fetchRemoteFileTree( + token: string, + remoteSiteId: number, + rewindId: string, + treePath: string = 'wp-content/' +) { + try { + return await fetchRemoteFileTreeBase( token, remoteSiteId, rewindId, treePath ); + } catch ( error ) { + throw wrapError( __( 'Failed to fetch remote file tree' ), error ); + } +} diff --git a/apps/cli/lib/sync-file-tree.ts b/apps/cli/lib/sync-file-tree.ts new file mode 100644 index 0000000000..797564bdfa --- /dev/null +++ b/apps/cli/lib/sync-file-tree.ts @@ -0,0 +1,53 @@ +import fs from 'fs'; +import nodePath from 'path'; +import { shouldExcludeFromSync, shouldLimitDepth } from '@studio/common/lib/sync/tree-utils'; +import type { RawDirectoryEntry } from '@studio/common/types/sync-tree'; + +export async function listLocalFileTree( + sitePath: string, + relativePath: string, + maxDepth: number = 2, + currentDepth: number = 0 +): Promise< RawDirectoryEntry[] > { + const fullPath = nodePath.join( sitePath, relativePath ); + + try { + const entries = await fs.promises.readdir( fullPath, { withFileTypes: true } ); + const result: RawDirectoryEntry[] = []; + + for ( const entry of entries ) { + if ( shouldExcludeFromSync( entry.name ) ) { + continue; + } + + const isDirectory = entry.isDirectory(); + const itemPath = nodePath.join( relativePath, entry.name ).replace( /\\/g, '/' ); + + const directoryEntry: RawDirectoryEntry = { + name: entry.name, + isDirectory, + path: itemPath, + }; + + const shouldLimit = shouldLimitDepth( itemPath ); + if ( isDirectory && currentDepth < maxDepth && ! shouldLimit ) { + try { + directoryEntry.children = await listLocalFileTree( + sitePath, + itemPath, + maxDepth, + currentDepth + 1 + ); + } catch { + directoryEntry.children = []; + } + } + + result.push( directoryEntry ); + } + + return result; + } catch { + return []; + } +} diff --git a/apps/cli/lib/sync-selector.ts b/apps/cli/lib/sync-selector.ts new file mode 100644 index 0000000000..80194bf1f2 --- /dev/null +++ b/apps/cli/lib/sync-selector.ts @@ -0,0 +1,219 @@ +import { categorizePath } from '@studio/common/lib/sync/tree-utils'; +import { __ } from '@wordpress/i18n'; +import { fetchLatestRewindId, fetchRemoteFileTree } from 'cli/lib/sync-api'; +import { listLocalFileTree } from 'cli/lib/sync-file-tree'; +import treeCheckbox from 'cli/lib/tree-checkbox'; +import type { RemoteFileEntry } from '@studio/common/lib/sync/sync-api'; +import type { SyncOption } from '@studio/common/types/sync'; +import type { RawDirectoryEntry } from '@studio/common/types/sync-tree'; +import type { TreeNode } from 'cli/lib/tree-checkbox'; + +function sortNodes( nodes: TreeNode[] ): TreeNode[] { + return [ ...nodes ].sort( ( a, b ) => a.name.localeCompare( b.name ) ); +} + +function buildTreeFromLocal( entries: RawDirectoryEntry[], depth: number = 1 ): TreeNode[] { + return sortNodes( + entries.map( ( entry ) => { + const relativePath = entry.path.replace( /^wp-content\//, '' ); + return { + name: entry.name + ( entry.isDirectory ? '/' : '' ), + value: relativePath, + isDirectory: entry.isDirectory, + checked: true, + expanded: false, + depth, + children: entry.children?.length + ? buildTreeFromLocal( entry.children, depth + 1 ) + : undefined, + }; + } ) + ); +} + +function buildTreeFromRemote( entries: RemoteFileEntry[], depth: number = 1 ): TreeNode[] { + return sortNodes( + entries.map( ( entry ) => ( { + name: entry.name + ( entry.isDirectory ? '/' : '' ), + value: entry.path.replace( /^\/?wp-content\//, '' ), + isDirectory: entry.isDirectory, + checked: true, + expanded: false, + depth, + pathId: entry.pathId, + } ) ) + ); +} + +function buildRootTree( wpContentChildren: TreeNode[] ): TreeNode[] { + return [ + { + name: __( 'Database (SQL)' ), + value: 'database', + isDirectory: false, + checked: true, + expanded: false, + depth: 0, + }, + { + name: 'wp-content/', + value: 'wp-content', + isDirectory: true, + checked: true, + expanded: true, + depth: 0, + children: wpContentChildren, + }, + ]; +} + +function convertCheckedToSyncOptions( selected: TreeNode[] ): { + optionsToSync: SyncOption[]; + specificSelectionPaths?: string[]; +} { + const hasDatabase = selected.some( ( n ) => n.value === 'database' ); + const wpContentItems = selected.filter( + ( n ) => n.value !== 'database' && n.value !== 'wp-content' + ); + + if ( hasDatabase && wpContentItems.length === 0 ) { + return { optionsToSync: [ 'sqls' ] }; + } + + const optionsToSync: SyncOption[] = []; + const specificSelectionPaths: string[] = []; + + if ( hasDatabase ) { + optionsToSync.push( 'sqls' ); + } + + const categories = new Set< SyncOption >(); + for ( const node of wpContentItems ) { + categories.add( categorizePath( node.value ) ); + specificSelectionPaths.push( node.value ); + } + + optionsToSync.push( ...categories ); + + return { + optionsToSync, + specificSelectionPaths: specificSelectionPaths.length > 0 ? specificSelectionPaths : undefined, + }; +} + +function convertCheckedToPullOptions( selected: TreeNode[] ): { + optionsToSync: SyncOption[]; + includePathList?: string[]; +} { + const hasDatabase = selected.some( ( n ) => n.value === 'database' ); + const wpContentItems = selected.filter( + ( n ) => n.value !== 'database' && n.value !== 'wp-content' + ); + + if ( hasDatabase && wpContentItems.length === 0 ) { + return { optionsToSync: [ 'sqls' ] }; + } + + const optionsToSync: SyncOption[] = []; + const pathIds: string[] = []; + + if ( hasDatabase ) { + optionsToSync.push( 'sqls' ); + } + + for ( const node of wpContentItems ) { + if ( node.pathId ) { + pathIds.push( node.pathId ); + } + } + + if ( pathIds.length > 0 ) { + optionsToSync.unshift( 'paths' ); + } + + return { + optionsToSync, + includePathList: pathIds.length > 0 ? pathIds : undefined, + }; +} + +function isAllSelected( selected: TreeNode[] ): boolean { + const hasDatabase = selected.some( ( n ) => n.value === 'database' ); + const hasWpContent = selected.some( ( n ) => n.value === 'wp-content' && n.checked ); + return hasDatabase && hasWpContent; +} + +export async function selectSyncItemsForPush( + sitePath: string +): Promise< { optionsToSync: SyncOption[]; specificSelectionPaths?: string[] } | undefined > { + const entries = await listLocalFileTree( sitePath, 'wp-content', 2 ); + + if ( entries.length === 0 ) { + return { optionsToSync: [ 'all' ] }; + } + + const wpContentChildren = buildTreeFromLocal( entries ); + const tree = buildRootTree( wpContentChildren ); + + const selected = await treeCheckbox( { + message: __( 'Select items to sync' ), + tree, + } ); + + if ( selected.length === 0 ) { + return undefined; + } + + if ( isAllSelected( selected ) ) { + return { optionsToSync: [ 'all' ] }; + } + + return convertCheckedToSyncOptions( selected ); +} + +export async function fetchPullTree( + token: string, + remoteSiteId: number +): Promise< { tree: TreeNode[]; rewindId: string } > { + const rewindId = await fetchLatestRewindId( token, remoteSiteId ); + const entries = await fetchRemoteFileTree( token, remoteSiteId, rewindId, '/wp-content/' ); + + const wpContentChildren = buildTreeFromRemote( entries ); + const tree = buildRootTree( wpContentChildren ); + return { tree, rewindId }; +} + +export async function selectSyncItemsForPull( + token: string, + remoteSiteId: number, + tree: TreeNode[] +): Promise< { optionsToSync: SyncOption[]; includePathList?: string[] } | undefined > { + if ( tree.length === 0 ) { + return { optionsToSync: [ 'all' ] }; + } + + const selected = await treeCheckbox( { + message: __( 'Select items to sync' ), + tree, + onExpand: async ( node ) => { + const rewindId = await fetchLatestRewindId( token, remoteSiteId ); + const entries = await fetchRemoteFileTree( + token, + remoteSiteId, + rewindId, + `/wp-content/${ node.value }` + ); + return buildTreeFromRemote( entries, node.depth + 1 ); + }, + } ); + + if ( selected.length === 0 ) { + return undefined; + } + + if ( isAllSelected( selected ) ) { + return { optionsToSync: [ 'all' ] }; + } + + return convertCheckedToPullOptions( selected ); +} diff --git a/apps/cli/lib/sync-site-picker.ts b/apps/cli/lib/sync-site-picker.ts new file mode 100644 index 0000000000..b922555d41 --- /dev/null +++ b/apps/cli/lib/sync-site-picker.ts @@ -0,0 +1,171 @@ +import { search } from '@inquirer/prompts'; +import { __, sprintf } from '@wordpress/i18n'; +import chalk from 'chalk'; +import { normalizeHostname } from 'cli/lib/utils'; +import { LoggerError } from 'cli/logger'; +import type { SyncSite } from '@studio/common/types/sync'; + +export function findSyncSiteByIdentifier( sites: SyncSite[], identifier: string ): SyncSite { + // Try numeric ID match first + const numericId = Number( identifier ); + if ( ! isNaN( numericId ) ) { + const site = sites.find( ( s ) => s.id === numericId ); + if ( site ) { + if ( site.syncSupport !== 'syncable' ) { + throw new LoggerError( + sprintf( __( 'Site %s is not syncable (%s)' ), site.name, site.syncSupport ) + ); + } + return site; + } + } + + // Try URL/hostname match + const normalizedIdentifier = normalizeHostname( identifier ); + const matched = sites.filter( ( s ) => normalizeHostname( s.url ) === normalizedIdentifier ); + + if ( matched.length === 0 ) { + throw new LoggerError( sprintf( __( 'No site found matching "%s"' ), identifier ) ); + } + + if ( matched.length > 1 ) { + throw new LoggerError( + sprintf( + __( 'Multiple sites match "%s". Use the site ID instead: %s' ), + identifier, + matched.map( ( s ) => `${ s.name } (ID: ${ s.id })` ).join( ', ' ) + ) + ); + } + + const site = matched[ 0 ]; + if ( site.syncSupport !== 'syncable' ) { + throw new LoggerError( + sprintf( __( 'Site %s is not syncable (%s)' ), site.name, site.syncSupport ) + ); + } + + return site; +} + +function getSyncSupportLabel( syncSupport: SyncSite[ 'syncSupport' ] ): string { + switch ( syncSupport ) { + case 'needs-upgrade': + return __( 'Plan upgrade required' ); + case 'needs-transfer': + return __( 'Transfer required' ); + case 'unsupported': + return __( 'Unsupported site' ); + case 'deleted': + return __( 'Deleted' ); + case 'missing-permissions': + return __( 'Missing permissions' ); + default: + return syncSupport; + } +} + +function formatSiteChoice( site: SyncSite ): string { + const parts = [ site.name ]; + + const hostname = normalizeHostname( site.url ); + parts.push( chalk.dim( hostname ) ); + + if ( site.isStaging ) { + parts.push( chalk.yellow( __( '[staging]' ) ) ); + } + + return parts.join( ' ' ); +} + +export async function pickSyncSite( + sites: SyncSite[], + message: string +): Promise< SyncSite | undefined > { + const syncable = sites.filter( ( s ) => s.syncSupport === 'syncable' ); + const nonSyncable = sites.filter( ( s ) => s.syncSupport !== 'syncable' ); + + if ( syncable.length === 0 ) { + console.log( __( 'No syncable sites found.' ) ); + return undefined; + } + + const allChoices = [ + ...syncable.map( ( site ) => ( { + name: formatSiteChoice( site ), + value: site.id, + } ) ), + ...nonSyncable.map( ( site ) => ( { + name: formatSiteChoice( site ), + value: site.id, + disabled: chalk.red( `(${ getSyncSupportLabel( site.syncSupport ) })` ), + } ) ), + ]; + + const abortController = new AbortController(); + const handleEscKey = ( chunk: Buffer | string ) => { + const bytes = Buffer.isBuffer( chunk ) ? chunk : Buffer.from( chunk ); + if ( bytes.length === 1 && bytes[ 0 ] === 0x1b ) { + abortController.abort(); + } + }; + + if ( process.stdin.isTTY ) { + process.stdin.on( 'data', handleEscKey ); + } + + try { + const selectedId = await search( + { + message, + source: ( term ) => { + if ( ! term ) { + return allChoices; + } + const lowerTerm = term.toLowerCase(); + return allChoices.filter( ( choice ) => { + const site = sites.find( ( s ) => s.id === choice.value ); + if ( ! site ) { + return false; + } + return ( + site.name.toLowerCase().includes( lowerTerm ) || + normalizeHostname( site.url ).toLowerCase().includes( lowerTerm ) + ); + } ); + }, + pageSize: 12, + theme: { + style: { + keysHelpTip: () => + chalk.dim( + [ + __( '↑↓ navigate' ), + __( 'type to filter' ), + __( '⏎ select' ), + __( 'esc cancel' ), + ].join( ' · ' ) + ), + }, + }, + }, + { + signal: abortController.signal, + } + ); + + return sites.find( ( site ) => site.id === selectedId ); + } catch ( error ) { + if ( + error instanceof Error && + ( error.name === 'AbortPromptError' || error.name === 'ExitPromptError' ) + ) { + return undefined; + } + throw error; + } finally { + if ( process.stdin.isTTY ) { + process.stdin.off( 'data', handleEscKey ); + } + } +} diff --git a/apps/cli/lib/tree-checkbox.ts b/apps/cli/lib/tree-checkbox.ts new file mode 100644 index 0000000000..01d7644c91 --- /dev/null +++ b/apps/cli/lib/tree-checkbox.ts @@ -0,0 +1,319 @@ +import { cursorHide } from '@inquirer/ansi'; +import { + createPrompt, + useState, + useKeypress, + usePrefix, + usePagination, + makeTheme, + isUpKey, + isDownKey, + isSpaceKey, + isEnterKey, +} from '@inquirer/core'; +import figures from '@inquirer/figures'; +import { __, sprintf } from '@wordpress/i18n'; +import chalk from 'chalk'; + +export type TreeNode = { + name: string; + value: string; + isDirectory: boolean; + checked: boolean; + indeterminate?: boolean; + expanded: boolean; + depth: number; + children?: TreeNode[]; + loading?: boolean; + pathId?: string; +}; + +type TreeCheckboxConfig = { + message: string; + tree: TreeNode[]; + onExpand?: ( node: TreeNode ) => Promise< TreeNode[] >; + pageSize?: number; +}; + +type FlatItem = { + node: TreeNode; + path: number[]; +}; + +function flattenTree( nodes: TreeNode[], path: number[] = [] ): FlatItem[] { + const result: FlatItem[] = []; + for ( let i = 0; i < nodes.length; i++ ) { + const currentPath = [ ...path, i ]; + result.push( { node: nodes[ i ], path: currentPath } ); + if ( nodes[ i ].expanded && nodes[ i ].children?.length ) { + result.push( ...flattenTree( nodes[ i ].children!, currentPath ) ); + } + } + return result; +} + +function updateNodeAtPath( + tree: TreeNode[], + path: number[], + updater: ( node: TreeNode ) => TreeNode +): TreeNode[] { + const newTree = [ ...tree ]; + if ( path.length === 1 ) { + newTree[ path[ 0 ] ] = updater( newTree[ path[ 0 ] ] ); + return newTree; + } + newTree[ path[ 0 ] ] = { + ...newTree[ path[ 0 ] ], + children: updateNodeAtPath( newTree[ path[ 0 ] ].children || [], path.slice( 1 ), updater ), + }; + return newTree; +} + +function setCheckedRecursive( node: TreeNode, checked: boolean ): TreeNode { + return { + ...node, + checked, + children: node.children?.map( ( child ) => setCheckedRecursive( child, checked ) ), + }; +} + +function propagateCheckedState( nodes: TreeNode[] ): TreeNode[] { + return nodes.map( ( node ) => { + if ( ! node.children?.length ) { + return { ...node, indeterminate: false }; + } + + const updatedChildren = propagateCheckedState( node.children ); + const allChecked = updatedChildren.every( ( c ) => c.checked ); + const noneChecked = updatedChildren.every( ( c ) => ! c.checked && ! c.indeterminate ); + + return { + ...node, + children: updatedChildren, + checked: allChecked, + indeterminate: ! allChecked && ! noneChecked, + }; + } ); +} + +function collectCheckedValues( nodes: TreeNode[] ): TreeNode[] { + const result: TreeNode[] = []; + for ( const node of nodes ) { + if ( node.checked ) { + result.push( node ); + } + if ( node.children?.length ) { + result.push( ...collectCheckedValues( node.children ) ); + } + } + return result; +} + +const treeTheme = { + icon: { + checked: chalk.green( figures.circleFilled ), + unchecked: figures.circle, + cursor: figures.pointer, + expanded: '▾', + collapsed: '▸', + }, + style: { + renderSelectedChoices: ( selectedChoices: TreeNode[] ) => + selectedChoices.map( ( c ) => c.name ).join( ', ' ), + }, +}; + +export default createPrompt< TreeNode[], TreeCheckboxConfig >( ( config, done ) => { + const { pageSize = 15, onExpand } = config; + const theme = makeTheme( treeTheme, {} ); + const [ status, setStatus ] = useState< 'idle' | 'loading' | 'done' >( 'idle' ); + const prefix = usePrefix( { status: status === 'loading' ? 'loading' : status, theme } ); + const [ tree, setTree ] = useState< TreeNode[] >( config.tree ); + const [ active, setActive ] = useState( 0 ); + + const flatItems = flattenTree( tree ); + + useKeypress( async ( key ) => { + if ( status === 'loading' ) { + return; + } + + if ( key.name === 'escape' ) { + setStatus( 'done' ); + done( [] ); + return; + } + + if ( isEnterKey( key ) ) { + setStatus( 'done' ); + done( collectCheckedValues( tree ) ); + return; + } + + if ( isUpKey( key ) || isDownKey( key ) ) { + const offset = isUpKey( key ) ? -1 : 1; + const next = Math.max( 0, Math.min( flatItems.length - 1, active + offset ) ); + setActive( next ); + return; + } + + if ( isSpaceKey( key ) ) { + const item = flatItems[ active ]; + if ( ! item ) { + return; + } + const toggled = updateNodeAtPath( tree, item.path, ( node ) => + setCheckedRecursive( node, ! node.checked ) + ); + setTree( propagateCheckedState( toggled ) ); + return; + } + + // Right arrow: expand folder + if ( key.name === 'right' ) { + const item = flatItems[ active ]; + if ( ! item || ! item.node.isDirectory ) { + return; + } + + if ( item.node.expanded ) { + return; // already expanded + } + + if ( item.node.children && item.node.children.length > 0 ) { + // Already has children, just expand + const newTree = updateNodeAtPath( tree, item.path, ( node ) => ( { + ...node, + expanded: true, + } ) ); + setTree( newTree ); + return; + } + + if ( onExpand ) { + setStatus( 'loading' ); + const newTree = updateNodeAtPath( tree, item.path, ( node ) => ( { + ...node, + loading: true, + } ) ); + setTree( newTree ); + + try { + const children = await onExpand( item.node ); + const updatedTree = updateNodeAtPath( tree, item.path, ( node ) => ( { + ...node, + expanded: true, + loading: false, + children: children.map( ( child ) => ( { + ...child, + checked: node.checked, + } ) ), + } ) ); + setTree( updatedTree ); + } catch { + const updatedTree = updateNodeAtPath( tree, item.path, ( node ) => ( { + ...node, + loading: false, + } ) ); + setTree( updatedTree ); + } + setStatus( 'idle' ); + } + return; + } + + // Left arrow: collapse folder + if ( key.name === 'left' ) { + const item = flatItems[ active ]; + if ( ! item ) { + return; + } + + if ( item.node.isDirectory && item.node.expanded ) { + const newTree = updateNodeAtPath( tree, item.path, ( node ) => ( { + ...node, + expanded: false, + } ) ); + setTree( newTree ); + return; + } + + // If on a child, move cursor to parent + if ( item.path.length > 1 ) { + const parentPath = item.path.slice( 0, -1 ); + const parentIndex = flatItems.findIndex( + ( fi ) => + fi.path.length === parentPath.length && + fi.path.every( ( v, i ) => v === parentPath[ i ] ) + ); + if ( parentIndex >= 0 ) { + setActive( parentIndex ); + } + } + return; + } + + // 'a' to toggle all + if ( key.name === 'a' ) { + const anyUnchecked = flatItems.some( ( item ) => ! item.node.checked ); + const newTree = tree.map( ( node ) => setCheckedRecursive( node, anyUnchecked ) ); + setTree( newTree ); + } + } ); + + const message = theme.style.message( config.message, status ); + + if ( status === 'done' ) { + const selected = collectCheckedValues( tree ); + const summary = + selected.length > 3 + ? sprintf( __( '%d items selected' ), selected.length ) + : selected.map( ( n ) => n.name ).join( ', ' ); + return `${ prefix } ${ message } ${ chalk.cyan( summary ) }`; + } + + const page = usePagination( { + items: flatItems, + active, + renderItem: ( { item, isActive }: { item: FlatItem; isActive: boolean } ) => { + const { node } = item; + const indent = ' '.repeat( node.depth ); + const cursor = isActive ? chalk.cyan( figures.pointer ) : ' '; + let check = figures.circle; + if ( node.checked ) { + check = chalk.green( figures.circleFilled ); + } else if ( node.indeterminate ) { + check = chalk.yellow( '◐' ); + } + + let icon = ''; + if ( node.isDirectory ) { + if ( node.loading ) { + icon = chalk.yellow( '⟳ ' ); + } else if ( node.expanded ) { + icon = '▾ '; + } else { + icon = '▸ '; + } + } + + const label = isActive ? chalk.cyan( node.name ) : node.name; + return `${ cursor } ${ indent }${ check } ${ icon }${ label }`; + }, + pageSize, + loop: false, + } ); + + const helpTip = chalk.dim( + [ + __( '↑↓ navigate' ), + __( 'space toggle' ), + __( '→← expand/collapse' ), + __( 'a all' ), + __( '⏎ confirm' ), + __( 'esc cancel' ), + ].join( ' · ' ) + ); + + return `${ prefix } ${ message }${ cursorHide }\n${ page }\n\n${ helpTip }`; +} ); diff --git a/apps/cli/package.json b/apps/cli/package.json index 90208eaf47..82136e13e1 100644 --- a/apps/cli/package.json +++ b/apps/cli/package.json @@ -37,18 +37,22 @@ "@wp-playground/common": "3.1.18", "@wp-playground/storage": "3.1.18", "@wp-playground/wordpress": "3.1.18", + "archiver": "^6.0.2", "atomically": "^2.1.1", "chalk": "^5.6.2", "cli-table3": "^0.6.5", + "fs-extra": "^11.3.4", "http-proxy": "^1.18.1", "node-forge": "^1.3.3", "ora": "^8.2.0", "patch-package": "^8.0.1", "playwright": "^1.52.0", "semver": "^7.7.4", + "tar": "^7.5.13", "trash": "^10.0.1", "yargs": "^18.0.0", "yargs-parser": "^22.0.0", + "yauzl": "^3.3.0", "zod": "^4.3.6" }, "scripts": { @@ -68,6 +72,7 @@ "@types/http-proxy": "^1.17.17", "@types/node-forge": "^1.3.14", "@types/yargs": "^17.0.35", + "glob": "^13.0.6", "vite": "^7.3.1", "vite-plugin-static-copy": "^3.1.5" } diff --git a/apps/cli/vite.config.prod.ts b/apps/cli/vite.config.prod.ts index 5c8f4e0d15..f2ce360ab7 100644 --- a/apps/cli/vite.config.prod.ts +++ b/apps/cli/vite.config.prod.ts @@ -3,13 +3,13 @@ import { resolve } from 'path'; import { globSync } from 'glob'; import { defineConfig, mergeConfig } from 'vite'; import { viteStaticCopy } from 'vite-plugin-static-copy'; -import devConfig from './vite.config.dev'; +import { baseConfig } from './vite.config.base'; const cliNodeModulesPath = resolve( __dirname, 'node_modules' ); const distCliNodeModulesPath = resolve( __dirname, 'dist/cli/node_modules' ); export default mergeConfig( - devConfig, + baseConfig, defineConfig( { plugins: [ ...( existsSync( cliNodeModulesPath ) diff --git a/apps/studio/e2e/page-objects/add-site-modal.ts b/apps/studio/e2e/page-objects/add-site-modal.ts index 3a597fead7..be787bf253 100644 --- a/apps/studio/e2e/page-objects/add-site-modal.ts +++ b/apps/studio/e2e/page-objects/add-site-modal.ts @@ -1,5 +1,5 @@ import { type Page } from '@playwright/test'; -import { ACCEPTED_IMPORT_FILE_TYPES } from 'src/constants'; +import { ACCEPTED_IMPORT_FILE_TYPES } from '@studio/common/constants'; import SiteForm from './site-form'; export default class AddSiteModal { diff --git a/apps/studio/src/components/content-tab-import-export.tsx b/apps/studio/src/components/content-tab-import-export.tsx index 4dda81361d..4d72fef792 100644 --- a/apps/studio/src/components/content-tab-import-export.tsx +++ b/apps/studio/src/components/content-tab-import-export.tsx @@ -1,3 +1,4 @@ +import { ACCEPTED_IMPORT_FILE_TYPES } from '@studio/common/constants'; import { speak } from '@wordpress/a11y'; import { Notice } from '@wordpress/components'; import { createInterpolateElement } from '@wordpress/element'; @@ -11,7 +12,6 @@ import { ErrorIcon } from 'src/components/error-icon'; import { LearnMoreLink } from 'src/components/learn-more'; import ProgressBar from 'src/components/progress-bar'; import { Tooltip } from 'src/components/tooltip'; -import { ACCEPTED_IMPORT_FILE_TYPES } from 'src/constants'; import { useAuth } from 'src/hooks/use-auth'; import { useConfirmationDialog } from 'src/hooks/use-confirmation-dialog'; import { useDragAndDropFile } from 'src/hooks/use-drag-and-drop-file'; diff --git a/apps/studio/src/constants.ts b/apps/studio/src/constants.ts index 9c55c6ed2e..541b6603a6 100644 --- a/apps/studio/src/constants.ts +++ b/apps/studio/src/constants.ts @@ -11,8 +11,6 @@ export const SCREENSHOT_HEIGHT = 1248; export const LIMIT_OF_ZIP_SITES_PER_USER = 10; export const LIMIT_OF_PROMPTS_PER_USER = 200; export const UPDATED_MESSAGE_DURATION_MS = 60000; // 1 minute -export const SYNC_PUSH_SIZE_LIMIT_GB = 5; -export const SYNC_PUSH_SIZE_LIMIT_BYTES = SYNC_PUSH_SIZE_LIMIT_GB * 1024 * 1024 * 1024; // 5GB export const AUTO_UPDATE_INTERVAL_MS = 60 * 60 * 1000; export const MACOS_TRAFFIC_LIGHT_POSITION = { x: 20, y: 20 }; export const WINDOWS_TITLEBAR_HEIGHT = 32; @@ -33,23 +31,6 @@ export const LOCAL_STORAGE_CHAT_MESSAGES_KEY = 'ai_chat_messages'; export const LOCAL_STORAGE_CHAT_API_IDS_KEY = 'ai_chat_ids'; export const DEFAULT_TERMINAL = 'terminal'; -//Import file constants - -export const ACCEPTED_IMPORT_FILE_TYPES = [ '.zip', '.gz', '.gzip', '.tar', '.tar.gz', '.wpress' ]; - -// Archiver options -export const ARCHIVER_OPTIONS = { - zip: { - zlib: { level: 9 }, - followSymlinks: true, - }, - tar: { - gzip: true, - gzipOptions: { level: 9 }, - followSymlinks: true, - }, -}; - export const SYNC_OPTIONS = { // Options sent for pull and push all: 'all', diff --git a/apps/studio/src/hooks/sync-sites/use-listen-deep-link-connection.ts b/apps/studio/src/hooks/sync-sites/use-listen-deep-link-connection.ts index 04983732ad..c9a5e45f6e 100644 --- a/apps/studio/src/hooks/sync-sites/use-listen-deep-link-connection.ts +++ b/apps/studio/src/hooks/sync-sites/use-listen-deep-link-connection.ts @@ -1,9 +1,9 @@ +import { SyncSite } from '@studio/common/types/sync'; import { useAuth } from 'src/hooks/use-auth'; import { useContentTabs } from 'src/hooks/use-content-tabs'; import { useIpcListener } from 'src/hooks/use-ipc-listener'; import { useSiteDetails } from 'src/hooks/use-site-details'; import { getIpcApi } from 'src/lib/get-ipc-api'; -import { SyncSite } from 'src/modules/sync/types'; import { useAppDispatch } from 'src/stores'; import { connectedSitesActions, diff --git a/apps/studio/src/hooks/tests/get-sync-support.test.ts b/apps/studio/src/hooks/tests/get-sync-support.test.ts index 26524d0bfb..118747594b 100644 --- a/apps/studio/src/hooks/tests/get-sync-support.test.ts +++ b/apps/studio/src/hooks/tests/get-sync-support.test.ts @@ -1,5 +1,5 @@ +import { getSyncSupport } from '@studio/common/lib/sync/sync-support'; import { vi } from 'vitest'; -import { getSyncSupport } from 'src/modules/sync/lib/sync-support'; // Mocks for site shapes const baseSite = { diff --git a/apps/studio/src/hooks/tests/reconcile-connected-sites.test.ts b/apps/studio/src/hooks/tests/reconcile-connected-sites.test.ts index e13e6192d5..266e512fb0 100644 --- a/apps/studio/src/hooks/tests/reconcile-connected-sites.test.ts +++ b/apps/studio/src/hooks/tests/reconcile-connected-sites.test.ts @@ -1,5 +1,5 @@ import { reconcileConnectedSites } from 'src/modules/sync/lib/reconcile-connected-sites'; -import type { SyncSite } from 'src/modules/sync/types'; +import type { SyncSite } from '@studio/common/types/sync'; describe( 'reconcileConnectedSites', () => { test( 'should update relevant properties', () => { diff --git a/apps/studio/src/hooks/tests/use-add-site.test.tsx b/apps/studio/src/hooks/tests/use-add-site.test.tsx index e6eb4a1630..db203d7afc 100644 --- a/apps/studio/src/hooks/tests/use-add-site.test.tsx +++ b/apps/studio/src/hooks/tests/use-add-site.test.tsx @@ -8,7 +8,7 @@ import { useAuth } from 'src/hooks/use-auth'; import { useContentTabs } from 'src/hooks/use-content-tabs'; import { useSiteDetails } from 'src/hooks/use-site-details'; import { store } from 'src/stores'; -import type { SyncSite } from 'src/modules/sync/types'; +import type { SyncSite } from '@studio/common/types/sync'; import type { WPCOM } from 'wpcom/types'; vi.mock( 'src/hooks/use-site-details' ); diff --git a/apps/studio/src/hooks/use-add-site.ts b/apps/studio/src/hooks/use-add-site.ts index 9c9b9941a8..aa9e2b13c7 100644 --- a/apps/studio/src/hooks/use-add-site.ts +++ b/apps/studio/src/hooks/use-add-site.ts @@ -16,7 +16,7 @@ import { syncOperationsThunks } from 'src/stores/sync'; import { useConnectSiteMutation } from 'src/stores/sync/connected-sites'; import { Blueprint } from 'src/stores/wpcom-api'; import type { BlueprintPreferredVersions } from '@studio/common/lib/blueprint-validation'; -import type { SyncSite } from 'src/modules/sync/types'; +import type { SyncSite } from '@studio/common/types/sync'; import type { SyncOption } from 'src/types'; /** diff --git a/apps/studio/src/ipc-handlers.ts b/apps/studio/src/ipc-handlers.ts index 2287e671df..592311509a 100644 --- a/apps/studio/src/ipc-handlers.ts +++ b/apps/studio/src/ipc-handlers.ts @@ -40,6 +40,7 @@ import { getAuthenticationUrl } from '@studio/common/lib/oauth'; import { decodePassword, encodePassword } from '@studio/common/lib/passwords'; import { sanitizeFolderName } from '@studio/common/lib/sanitize-folder-name'; import { readSharedConfig, updateSharedConfig } from '@studio/common/lib/shared-config'; +import { shouldExcludeFromSync, shouldLimitDepth } from '@studio/common/lib/sync/tree-utils'; import { isWordPressDevVersion } from '@studio/common/lib/wordpress-version-utils'; import { __, sprintf, LocaleData, defaultI18n } from '@wordpress/i18n'; import { MACOS_TRAFFIC_LIGHT_POSITION, MAIN_MIN_WIDTH, SIDEBAR_WIDTH } from 'src/constants'; @@ -94,7 +95,6 @@ import { import { editSiteViaCli, EditSiteOptions } from 'src/modules/cli/lib/cli-site-editor'; import { isStudioCliInstalled } from 'src/modules/cli/lib/ipc-handlers'; import { STABLE_BIN_DIR_PATH } from 'src/modules/cli/lib/windows-installation-manager'; -import { shouldExcludeFromSync, shouldLimitDepth } from 'src/modules/sync/lib/tree-utils'; import { supportedEditorConfig, SupportedEditor } from 'src/modules/user-settings/lib/editor'; import { getUserEditor, getUserTerminal } from 'src/modules/user-settings/lib/ipc-handlers'; import { winFindEditorPath } from 'src/modules/user-settings/lib/win-editor-path'; @@ -109,7 +109,7 @@ import { } from 'src/storage/user-data'; import { Blueprint } from 'src/stores/wpcom-api'; import { captureSiteThumbnail } from './lib/capture-site-thumbnail'; -import type { RawDirectoryEntry } from 'src/modules/sync/types'; +import type { RawDirectoryEntry } from '@studio/common/types/sync-tree'; import type { WpCliResult } from 'src/site-server'; export { diff --git a/apps/studio/src/ipc-utils.ts b/apps/studio/src/ipc-utils.ts index f10917f959..481a3a9a58 100644 --- a/apps/studio/src/ipc-utils.ts +++ b/apps/studio/src/ipc-utils.ts @@ -4,8 +4,8 @@ import { BlueprintValidationWarning } from '@studio/common/lib/blueprint-validat import { SiteEvent, SnapshotEvent } from '@studio/common/lib/cli-events'; import { PreviewCommandLoggerAction } from '@studio/common/logger-actions'; import { ImportExportEventData } from 'src/lib/import-export/handle-events'; -import { StoredAuthToken } from 'src/lib/oauth'; import { getMainWindow } from 'src/main-window'; +import type { StoredAuthToken } from '@studio/common/lib/shared-config'; import type { UserData } from 'src/storage/storage-types'; type SnapshotEventData = { diff --git a/apps/studio/src/lib/import-export/export/exporters/default-exporter.ts b/apps/studio/src/lib/import-export/export/exporters/default-exporter.ts index 5508d1b32e..9d2ccff1e0 100644 --- a/apps/studio/src/lib/import-export/export/exporters/default-exporter.ts +++ b/apps/studio/src/lib/import-export/export/exporters/default-exporter.ts @@ -2,13 +2,13 @@ import { EventEmitter } from 'events'; import fs from 'fs'; import os from 'os'; import path from 'path'; +import { ARCHIVER_OPTIONS } from '@studio/common/constants'; import { parseJsonFromPhpOutput } from '@studio/common/lib/php-output-parser'; import { hasDefaultDbBlock, removeDbConstants, } from '@studio/common/lib/remove-default-db-constants'; import archiver from 'archiver'; -import { ARCHIVER_OPTIONS } from 'src/constants'; import { getSiteUrl } from 'src/lib/get-site-url'; import { ExportEvents } from 'src/lib/import-export/export/events'; import { diff --git a/apps/studio/src/lib/import-export/import/importers/importer.ts b/apps/studio/src/lib/import-export/import/importers/importer.ts index 3e67ba3437..44c97f57f2 100644 --- a/apps/studio/src/lib/import-export/import/importers/importer.ts +++ b/apps/studio/src/lib/import-export/import/importers/importer.ts @@ -4,6 +4,7 @@ import fs from 'fs'; import path from 'path'; import { createInterface } from 'readline'; import { DEFAULT_PHP_VERSION } from '@studio/common/constants'; +import { serializePlugins } from '@studio/common/lib/serialize-plugins'; import { SupportedPHPVersionsList } from '@studio/common/types/php-versions'; import { lstat, move } from 'fs-extra'; import semver from 'semver'; @@ -15,7 +16,6 @@ import { MetaFileData, ImportWpContentProgressEventData, } from 'src/lib/import-export/import/types'; -import { serializePlugins } from 'src/lib/serialize-plugins'; import { updateSiteUrl } from 'src/lib/update-site-url'; import { SiteServer } from 'src/site-server'; diff --git a/apps/studio/src/lib/oauth.ts b/apps/studio/src/lib/oauth.ts index 5db8c37c65..0a081c30e2 100644 --- a/apps/studio/src/lib/oauth.ts +++ b/apps/studio/src/lib/oauth.ts @@ -3,8 +3,6 @@ import { SupportedLocale } from '@studio/common/lib/locale'; import { getAuthenticationUrl } from '@studio/common/lib/oauth'; import { readAuthToken, type StoredAuthToken } from '@studio/common/lib/shared-config'; -export type { StoredAuthToken } from '@studio/common/lib/shared-config'; - export function getSignUpUrl( locale: SupportedLocale ) { const oauth2Redirect = encodeURIComponent( getAuthenticationUrl( locale ) ); return `https://wordpress.com/start/wpcc/oauth2-user?oauth2_client_id=${ CLIENT_ID }&oauth2_redirect=${ oauth2Redirect }&locale=${ locale }`; diff --git a/apps/studio/src/lib/tests/windows-helpers.test.ts b/apps/studio/src/lib/tests/windows-helpers.test.ts index d891cf0131..8ddd8b8e14 100644 --- a/apps/studio/src/lib/tests/windows-helpers.test.ts +++ b/apps/studio/src/lib/tests/windows-helpers.test.ts @@ -9,13 +9,12 @@ import { promptWindowsSpeedUpSites } from '../windows-helpers'; vi.mock( 'src/main-window' ); vi.mock( 'src/storage/user-data' ); -vi.mock( 'electron', async () => { - const actual = await vi.importActual< typeof import('electron') >( 'electron' ); - // Mock BrowserWindow class +vi.mock( import( 'electron' ), async ( importActual ) => { + const actual = await importActual(); class MockBrowserWindow {} return { ...actual, - BrowserWindow: MockBrowserWindow, + BrowserWindow: MockBrowserWindow as typeof actual.BrowserWindow, app: { ...actual.app, getVersion: vi.fn(), diff --git a/apps/studio/src/modules/add-site/components/import-backup.tsx b/apps/studio/src/modules/add-site/components/import-backup.tsx index 97f36806a1..1675dc6ed1 100644 --- a/apps/studio/src/modules/add-site/components/import-backup.tsx +++ b/apps/studio/src/modules/add-site/components/import-backup.tsx @@ -1,3 +1,4 @@ +import { ACCEPTED_IMPORT_FILE_TYPES } from '@studio/common/constants'; import { __experimentalVStack as VStack, __experimentalHStack as HStack, @@ -11,7 +12,6 @@ import { useI18n } from '@wordpress/react-i18n'; import { useCallback, useRef, useState } from 'react'; import { ErrorIcon } from 'src/components/error-icon'; import { LearnMoreLink } from 'src/components/learn-more'; -import { ACCEPTED_IMPORT_FILE_TYPES } from 'src/constants'; import { cx } from 'src/lib/cx'; const formatFileSize = ( bytes: number ) => { diff --git a/apps/studio/src/modules/add-site/components/pull-remote-site.tsx b/apps/studio/src/modules/add-site/components/pull-remote-site.tsx index a239b61a3e..2b75022f74 100644 --- a/apps/studio/src/modules/add-site/components/pull-remote-site.tsx +++ b/apps/studio/src/modules/add-site/components/pull-remote-site.tsx @@ -16,7 +16,7 @@ import { NoWpcomSitesContent } from 'src/modules/sync/components/no-wpcom-sites- import { SitesListContent } from 'src/modules/sync/components/sync-sites-modal-selector'; import { SyncTabImage } from 'src/modules/sync/components/sync-tab-image'; import { useGetWpComSitesQuery } from 'src/stores/sync/wpcom-sites'; -import type { SyncSite } from 'src/modules/sync/types'; +import type { SyncSite } from '@studio/common/types/sync'; function SiteSyncDescription( { children }: PropsWithChildren ) { const { __ } = useI18n(); diff --git a/apps/studio/src/modules/add-site/index.tsx b/apps/studio/src/modules/add-site/index.tsx index e57ff4abf2..644007c957 100644 --- a/apps/studio/src/modules/add-site/index.tsx +++ b/apps/studio/src/modules/add-site/index.tsx @@ -9,6 +9,7 @@ import { BlueprintValidationWarning, } from '@studio/common/lib/blueprint-validation'; import { SupportedPHPVersion, SupportedPHPVersionsList } from '@studio/common/types/php-versions'; +import { SyncSite } from '@studio/common/types/sync'; import { speak } from '@wordpress/a11y'; import { Navigator, useNavigator } from '@wordpress/components'; import { sprintf } from '@wordpress/i18n'; @@ -21,7 +22,6 @@ import { useIpcListener } from 'src/hooks/use-ipc-listener'; import { useSiteDetails } from 'src/hooks/use-site-details'; import { getIpcApi } from 'src/lib/get-ipc-api'; import { useBlueprintDeeplink } from 'src/modules/add-site/hooks/use-blueprint-deeplink'; -import { SyncSite } from 'src/modules/sync/types'; import { useRootSelector, useAppDispatch, useI18nLocale } from 'src/stores'; import { formatRtkError } from 'src/stores/format-rtk-error'; import { openAddSiteModal, closeAddSiteModal, selectIsAddSiteModalOpen } from 'src/stores/ui-slice'; diff --git a/apps/studio/src/modules/sync/components/sync-connected-sites.tsx b/apps/studio/src/modules/sync/components/sync-connected-sites.tsx index a98a9457b4..e4d1792d7f 100644 --- a/apps/studio/src/modules/sync/components/sync-connected-sites.tsx +++ b/apps/studio/src/modules/sync/components/sync-connected-sites.tsx @@ -46,7 +46,7 @@ import { connectedSitesSelectors, useGetConnectedSitesForLocalSiteQuery, } from 'src/stores/sync/connected-sites'; -import type { SyncSite } from 'src/modules/sync/types'; +import type { SyncSite } from '@studio/common/types/sync'; const SyncConnectedSiteControls = ( { connectedSite, diff --git a/apps/studio/src/modules/sync/components/sync-dialog.tsx b/apps/studio/src/modules/sync/components/sync-dialog.tsx index 0b022ec94c..96075b08b1 100644 --- a/apps/studio/src/modules/sync/components/sync-dialog.tsx +++ b/apps/studio/src/modules/sync/components/sync-dialog.tsx @@ -1,3 +1,4 @@ +import { SYNC_PUSH_SIZE_LIMIT_GB } from '@studio/common/lib/sync/constants'; import { Icon, SelectControl, @@ -17,7 +18,6 @@ import Modal from 'src/components/modal'; import { TwoColorProgressBar } from 'src/components/progress-bar'; import { Tooltip } from 'src/components/tooltip'; import { TreeView, TreeNode, updateNodeById } from 'src/components/tree-view'; -import { SYNC_PUSH_SIZE_LIMIT_GB } from 'src/constants'; import { useGetWpVersion } from 'src/hooks/use-get-wp-version'; import { useIsMultisite } from 'src/hooks/use-is-multisite'; import { cx } from 'src/lib/cx'; @@ -34,7 +34,7 @@ import { useI18nLocale } from 'src/stores'; import { useLatestRewindId, useRemoteFileTree, useLocalFileTree } from 'src/stores/sync'; import { useGetWordPressVersions } from 'src/stores/wordpress-versions-api'; import { TreeViewLoadingSkeleton } from './tree-view-loading-skeleton'; -import type { SyncSite } from 'src/modules/sync/types'; +import type { SyncSite } from '@studio/common/types/sync'; type SyncDialogProps = { type: 'push' | 'pull'; diff --git a/apps/studio/src/modules/sync/components/sync-sites-modal-selector.tsx b/apps/studio/src/modules/sync/components/sync-sites-modal-selector.tsx index 07de65d3ff..df46e25473 100644 --- a/apps/studio/src/modules/sync/components/sync-sites-modal-selector.tsx +++ b/apps/studio/src/modules/sync/components/sync-sites-modal-selector.tsx @@ -24,7 +24,8 @@ import { useGetConnectedSitesForLocalSiteQuery, } from 'src/stores/sync/connected-sites'; import { useGetWpComSitesQuery } from 'src/stores/sync/wpcom-sites'; -import type { SyncSite, SyncModalMode } from 'src/modules/sync/types'; +import type { SyncSite } from '@studio/common/types/sync'; +import type { SyncModalMode } from 'src/modules/sync/types'; const SearchControl = process.env.NODE_ENV === 'test' ? () => null : SearchControlWp; diff --git a/apps/studio/src/modules/sync/constants.ts b/apps/studio/src/modules/sync/constants.ts deleted file mode 100644 index 8fd0cd8e10..0000000000 --- a/apps/studio/src/modules/sync/constants.ts +++ /dev/null @@ -1,11 +0,0 @@ -export const SYNC_EXCLUSIONS = [ - 'database', - 'db.php', - 'debug.log', - 'sqlite-database-integration', - '.DS_Store', - 'Thumbs.db', - '.git', - 'node_modules', - 'cache', -]; diff --git a/apps/studio/src/modules/sync/hooks/use-selected-items-push-size.ts b/apps/studio/src/modules/sync/hooks/use-selected-items-push-size.ts index 7d3f6391ad..25c3c83fbc 100644 --- a/apps/studio/src/modules/sync/hooks/use-selected-items-push-size.ts +++ b/apps/studio/src/modules/sync/hooks/use-selected-items-push-size.ts @@ -1,6 +1,6 @@ +import { SYNC_PUSH_SIZE_LIMIT_BYTES } from '@studio/common/lib/sync/constants'; import { useState, useEffect, useCallback } from 'react'; import { TreeNode } from 'src/components/tree-view'; -import { SYNC_PUSH_SIZE_LIMIT_BYTES } from 'src/constants'; import { getIpcApi } from 'src/lib/get-ipc-api'; const formatFileSize = ( bytes: number ) => { diff --git a/apps/studio/src/modules/sync/index.tsx b/apps/studio/src/modules/sync/index.tsx index b65642cef0..df1c298032 100644 --- a/apps/studio/src/modules/sync/index.tsx +++ b/apps/studio/src/modules/sync/index.tsx @@ -27,7 +27,7 @@ import { useGetConnectedSitesForLocalSiteQuery, } from 'src/stores/sync/connected-sites'; import { useGetWpComSitesQuery } from 'src/stores/sync/wpcom-sites'; -import type { SyncSite } from 'src/modules/sync/types'; +import type { SyncSite } from '@studio/common/types/sync'; function SiteSyncDescription( { children }: PropsWithChildren ) { const { __ } = useI18n(); diff --git a/apps/studio/src/modules/sync/lib/convert-tree-to-sync-options.ts b/apps/studio/src/modules/sync/lib/convert-tree-to-sync-options.ts index 1a4172a800..6a1d683396 100644 --- a/apps/studio/src/modules/sync/lib/convert-tree-to-sync-options.ts +++ b/apps/studio/src/modules/sync/lib/convert-tree-to-sync-options.ts @@ -1,3 +1,4 @@ +import { categorizePath } from '@studio/common/lib/sync/tree-utils'; import { SYNC_OPTIONS } from 'src/constants'; import { PullSiteOptions } from 'src/stores/sync'; import type { TreeNode } from 'src/components/tree-view'; @@ -44,17 +45,7 @@ const convertTreeToSyncCategories = ( const nodePath = node.path.replace( /^\/?wp-content\//, '' ); paths.add( nodePath ); - - // Determine which category this belongs to for optionsToSync - if ( nodePath.startsWith( 'plugins/' ) || nodePath === 'plugins' ) { - options.add( SYNC_OPTIONS.plugins ); - } else if ( nodePath.startsWith( 'themes/' ) || nodePath === 'themes' ) { - options.add( SYNC_OPTIONS.themes ); - } else if ( nodePath.startsWith( 'uploads/' ) || nodePath === 'uploads' ) { - options.add( SYNC_OPTIONS.uploads ); - } else { - options.add( SYNC_OPTIONS.contents ); - } + options.add( categorizePath( nodePath ) ); } return { paths: [ ...paths ], options: [ ...options ] }; diff --git a/apps/studio/src/modules/sync/lib/environment-utils.ts b/apps/studio/src/modules/sync/lib/environment-utils.ts index bd30a8488a..204f82a251 100644 --- a/apps/studio/src/modules/sync/lib/environment-utils.ts +++ b/apps/studio/src/modules/sync/lib/environment-utils.ts @@ -1,6 +1,6 @@ +import { SyncSite } from '@studio/common/types/sync'; import { __ } from '@wordpress/i18n'; import { z } from 'zod'; -import { SyncSite } from 'src/modules/sync/types'; const EnvironmentSchema = z.enum( [ 'production', 'staging', 'development' ] ); export type EnvironmentType = z.infer< typeof EnvironmentSchema >; diff --git a/apps/studio/src/modules/sync/lib/ipc-handlers.ts b/apps/studio/src/modules/sync/lib/ipc-handlers.ts index 6ff3a7f51c..801d710868 100644 --- a/apps/studio/src/modules/sync/lib/ipc-handlers.ts +++ b/apps/studio/src/modules/sync/lib/ipc-handlers.ts @@ -6,6 +6,7 @@ import path from 'node:path'; import { getCurrentUserId } from '@studio/common/lib/shared-config'; import wpcomFactory from '@studio/common/lib/wpcom-factory'; import wpcomXhrRequest from '@studio/common/lib/wpcom-xhr-request-factory'; +import { SyncSite } from '@studio/common/types/sync'; import { Upload } from 'tus-js-client'; import { z } from 'zod'; import { @@ -20,7 +21,6 @@ import { exportBackup } from 'src/lib/import-export/export/export-manager'; import { ExportOptions } from 'src/lib/import-export/export/types'; import { getAuthenticationToken } from 'src/lib/oauth'; import { keepSqliteIntegrationUpdated } from 'src/lib/sqlite-versions'; -import { SyncSite } from 'src/modules/sync/types'; import { SiteServer } from 'src/site-server'; import { loadUserData, lockAppdata, saveUserData, unlockAppdata } from 'src/storage/user-data'; import { SyncOption } from 'src/types'; diff --git a/apps/studio/src/modules/sync/lib/reconcile-connected-sites.tsx b/apps/studio/src/modules/sync/lib/reconcile-connected-sites.tsx index faea53b67d..1bc0b9e8ed 100644 --- a/apps/studio/src/modules/sync/lib/reconcile-connected-sites.tsx +++ b/apps/studio/src/modules/sync/lib/reconcile-connected-sites.tsx @@ -1,4 +1,4 @@ -import { SyncSite } from 'src/modules/sync/types'; +import { SyncSite } from '@studio/common/types/sync'; /** * Generate updated site data to be stored in `appdata-v1.json`: diff --git a/apps/studio/src/modules/sync/lib/tree-utils.ts b/apps/studio/src/modules/sync/lib/tree-utils.ts index e4a27a1623..18a2b3f6fb 100644 --- a/apps/studio/src/modules/sync/lib/tree-utils.ts +++ b/apps/studio/src/modules/sync/lib/tree-utils.ts @@ -1,39 +1,6 @@ +import { shouldLimitDepth } from '@studio/common/lib/sync/tree-utils'; import { TreeNode } from 'src/components/tree-view'; -import { SYNC_EXCLUSIONS } from '../constants'; -import type { RawDirectoryEntry } from '../types'; - -export const shouldExcludeFromSync = ( itemName: string ): boolean => { - if ( itemName.startsWith( '.' ) ) { - return true; - } - - if ( SYNC_EXCLUSIONS.includes( itemName ) ) { - return true; - } - - return false; -}; - -export const shouldLimitDepth = ( relativePath: string ): boolean => { - const normalizedPath = relativePath.replace( /^wp-content\//, '' ); - - // Match plugins/plugin-name or plugins/plugin-name/ - if ( normalizedPath.match( /^plugins\/[^/]+\/?$/ ) ) { - return true; - } - - // Match themes/theme-name or themes/theme-name/ - if ( normalizedPath.match( /^themes\/[^/]+\/?$/ ) ) { - return true; - } - - // Match mu-plugins/mu-plugin or mu-plugins/mu-plugin/ - if ( normalizedPath.match( /^mu-plugins\/[^/]+\/?$/ ) ) { - return true; - } - - return false; -}; +import type { RawDirectoryEntry } from '@studio/common/types/sync-tree'; export const convertRawToTreeNodes = ( rawNodes: RawDirectoryEntry[] ): TreeNode[] => { const pluginRegex = /^plugins\/[^/]+$/; diff --git a/apps/studio/src/modules/sync/tests/index.test.tsx b/apps/studio/src/modules/sync/tests/index.test.tsx index 0a92ff1531..9480077470 100644 --- a/apps/studio/src/modules/sync/tests/index.test.tsx +++ b/apps/studio/src/modules/sync/tests/index.test.tsx @@ -1,4 +1,5 @@ // To run tests, execute `npm run test -- src/modules/sync/tests/index.test.tsx` from the root directory +import { SyncSite } from '@studio/common/types/sync'; import { render, screen, fireEvent } from '@testing-library/react'; import { Provider } from 'react-redux'; import { vi } from 'vitest'; @@ -9,7 +10,6 @@ import { useFeatureFlags } from 'src/hooks/use-feature-flags'; import { getIpcApi } from 'src/lib/get-ipc-api'; import { ContentTabSync } from 'src/modules/sync'; import { useSelectedItemsPushSize } from 'src/modules/sync/hooks/use-selected-items-push-size'; -import { SyncSite } from 'src/modules/sync/types'; import { store } from 'src/stores'; import { syncOperationsActions, useLatestRewindId, useRemoteFileTree } from 'src/stores/sync'; import { useGetWpComSitesQuery } from 'src/stores/sync/wpcom-sites'; diff --git a/apps/studio/src/modules/sync/tests/use-selected-items-push-size.test.ts b/apps/studio/src/modules/sync/tests/use-selected-items-push-size.test.ts index 766745a13f..52bb604ac8 100644 --- a/apps/studio/src/modules/sync/tests/use-selected-items-push-size.test.ts +++ b/apps/studio/src/modules/sync/tests/use-selected-items-push-size.test.ts @@ -1,7 +1,7 @@ +import { SYNC_PUSH_SIZE_LIMIT_BYTES } from '@studio/common/lib/sync/constants'; import { renderHook, waitFor } from '@testing-library/react'; import { vi } from 'vitest'; import { TreeNode } from 'src/components/tree-view'; -import { SYNC_PUSH_SIZE_LIMIT_BYTES } from 'src/constants'; import { getIpcApi } from 'src/lib/get-ipc-api'; import { useSelectedItemsPushSize } from 'src/modules/sync/hooks/use-selected-items-push-size'; diff --git a/apps/studio/src/modules/sync/types.ts b/apps/studio/src/modules/sync/types.ts index f2ea55a873..cf0c65f4e8 100644 --- a/apps/studio/src/modules/sync/types.ts +++ b/apps/studio/src/modules/sync/types.ts @@ -1,30 +1 @@ -export type RawDirectoryEntry = { - name: string; - isDirectory: boolean; - path: string; - children?: RawDirectoryEntry[]; -}; - export type SyncModalMode = 'push' | 'pull' | 'connect'; - -export type SyncSupport = - | 'unsupported' - | 'syncable' - | 'needs-transfer' - | 'already-connected' - | 'needs-upgrade' - | 'deleted' - | 'missing-permissions'; - -export type SyncSite = { - id: number; - localSiteId: string; - name: string; - url: string; - isStaging: boolean; - isPressable: boolean; - environmentType?: string | null; - syncSupport: SyncSupport; - lastPullTimestamp: string | null; - lastPushTimestamp: string | null; -}; diff --git a/apps/studio/src/storage/storage-types.ts b/apps/studio/src/storage/storage-types.ts index 8579ade614..d99f2b2e15 100644 --- a/apps/studio/src/storage/storage-types.ts +++ b/apps/studio/src/storage/storage-types.ts @@ -1,6 +1,6 @@ import { StatsMetric } from 'src/lib/bump-stats'; import { SupportedEditor } from 'src/modules/user-settings/lib/editor'; -import type { SyncSite } from 'src/modules/sync/types'; +import type { SyncSite } from '@studio/common/types/sync'; import type { SupportedTerminal } from 'src/modules/user-settings/lib/terminal'; export interface WindowBounds { diff --git a/apps/studio/src/stores/sync/connected-sites.ts b/apps/studio/src/stores/sync/connected-sites.ts index 4342eafc2c..53786891d1 100644 --- a/apps/studio/src/stores/sync/connected-sites.ts +++ b/apps/studio/src/stores/sync/connected-sites.ts @@ -2,7 +2,8 @@ import { createSlice, PayloadAction } from '@reduxjs/toolkit'; import { createApi, fetchBaseQuery } from '@reduxjs/toolkit/query/react'; import { getIpcApi } from 'src/lib/get-ipc-api'; import { RootState } from 'src/stores'; -import type { SyncSite, SyncModalMode } from 'src/modules/sync/types'; +import type { SyncSite } from '@studio/common/types/sync'; +import type { SyncModalMode } from 'src/modules/sync/types'; type ConnectedSitesState = { isModalOpen: boolean; diff --git a/apps/studio/src/stores/sync/index.ts b/apps/studio/src/stores/sync/index.ts index df83eb1f79..ec8bcadc48 100644 --- a/apps/studio/src/stores/sync/index.ts +++ b/apps/studio/src/stores/sync/index.ts @@ -14,4 +14,3 @@ export type { SyncPushState, PushStates, } from './sync-operations-slice'; -export * from './sync-types'; diff --git a/apps/studio/src/stores/sync/sync-api.ts b/apps/studio/src/stores/sync/sync-api.ts index dc954efe5b..3073849bfd 100644 --- a/apps/studio/src/stores/sync/sync-api.ts +++ b/apps/studio/src/stores/sync/sync-api.ts @@ -1,14 +1,13 @@ import { createAsyncThunk } from '@reduxjs/toolkit'; +import { + backupLsItemSchema, + backupLsResponseSchema, + latestRewindIdResponseSchema, +} from '@studio/common/types/sync-tree'; import { TreeNode } from 'src/components/tree-view'; import { SYNC_OPTIONS } from 'src/constants'; import { wpcomApi } from 'src/stores/wpcom-api'; -import { - BackupLsItemSchema, - BackupLsRequest, - BackupLsResponseSchema, - LatestRewindIdResponseSchema, - type BackupLsItem, -} from './sync-types'; +import type { BackupLsItem, BackupLsRequest } from '@studio/common/types/sync-tree'; const getParentFolder = ( parentPath: string ) => { return parentPath.split( '/' ).filter( Boolean ).pop() ?? ''; @@ -63,7 +62,7 @@ const syncApi = wpcomApi.injectEndpoints( { apiNamespace: 'wpcom/v2', } ), transformResponse: ( response: unknown ) => { - const validationResult = LatestRewindIdResponseSchema.safeParse( { + const validationResult = latestRewindIdResponseSchema.safeParse( { body: response, status: 200, } ); @@ -123,7 +122,7 @@ export const fetchRemoteFileTree = createAsyncThunk( throw new Error( errorMessage ); } - const validationResult = BackupLsResponseSchema.shape.body.safeParse( rawResponse ); + const validationResult = backupLsResponseSchema.shape.body.safeParse( rawResponse ); if ( ! validationResult.success ) { console.error( 'Invalid response format:', validationResult.error ); throw new Error( 'Invalid response format from server' ); @@ -136,7 +135,7 @@ export const fetchRemoteFileTree = createAsyncThunk( const children: TreeNode[] = []; for ( const [ name, rawItem ] of Object.entries( response.contents ) ) { - const itemValidation = BackupLsItemSchema.safeParse( rawItem ); + const itemValidation = backupLsItemSchema.safeParse( rawItem ); if ( itemValidation.success ) { const node = convertBackupItemToTreeNode( name, itemValidation.data, path, parentChecked ); children.push( node ); diff --git a/apps/studio/src/stores/sync/sync-operations-slice.ts b/apps/studio/src/stores/sync/sync-operations-slice.ts index 2e7dd736ed..0479e6c27a 100644 --- a/apps/studio/src/stores/sync/sync-operations-slice.ts +++ b/apps/studio/src/stores/sync/sync-operations-slice.ts @@ -1,18 +1,25 @@ import { createSlice, createAsyncThunk, PayloadAction } from '@reduxjs/toolkit'; import * as Sentry from '@sentry/electron/renderer'; +import { + SYNC_PUSH_SIZE_LIMIT_BYTES, + SYNC_PUSH_SIZE_LIMIT_GB, +} from '@studio/common/lib/sync/constants'; +import { + pullSiteResponseSchema, + syncBackupResponseSchema, + importResponseSchema, +} from '@studio/common/types/sync'; import { __, sprintf } from '@wordpress/i18n'; -import { z } from 'zod'; -import { SYNC_PUSH_SIZE_LIMIT_BYTES, SYNC_PUSH_SIZE_LIMIT_GB } from 'src/constants'; import { generateStateId } from 'src/hooks/sync-sites/use-pull-push-states'; import { getIpcApi } from 'src/lib/get-ipc-api'; import { getHostnameFromUrl } from 'src/lib/url-utils'; import { store } from 'src/stores'; import { connectedSitesApi } from 'src/stores/sync/connected-sites'; +import type { ImportResponse, SyncSite } from '@studio/common/types/sync'; import type { PullStateProgressInfo, PushStateProgressInfo, } from 'src/hooks/use-sync-states-progress-info'; -import type { SyncSite } from 'src/modules/sync/types'; import type { AppDispatch, RootState } from 'src/stores'; import type { SyncOption } from 'src/types'; import type { WPCOM } from 'wpcom/types'; @@ -482,49 +489,6 @@ type PullSiteResult = { remoteSiteId: number; }; -const pullSiteResponseSchema = z.object( { - success: z.boolean(), - backup_id: z.number(), -} ); - -const importFailedResponseSchema = z.object( { - status: z.literal( 'failed' ), - success: z.boolean(), - error: z.string(), - error_data: z - .object( { - vp_restore_status: z.string().nullable(), - vp_restore_message: z.string().nullable(), - vp_rewind_id: z.string().nullable(), - } ) - .nullable(), -} ); - -const importWorkingResponseSchema = z.object( { - status: z.enum( [ - 'started', - 'initial_backup_started', - 'initial_backup_finished', - 'archive_import_started', - 'archive_import_finished', - 'finished', - ] ), - success: z.boolean(), - backup_progress: z.number().nullable(), - import_progress: z.number().nullable(), -} ); - -const importResponseSchema = z.discriminatedUnion( 'status', [ - importWorkingResponseSchema, - importFailedResponseSchema, -] ); - -const syncBackupResponseSchema = z.object( { - status: z.enum( [ 'in-progress', 'finished', 'failed' ] ), - download_url: z.string().nullable().optional(), - percent: z.number(), -} ); - export const pullSiteThunk = createTypedAsyncThunk< PullSiteResult, PullSitePayload >( 'syncOperations/pullSite', async ( { client, connectedSite, selectedSite, options }, { dispatch, rejectWithValue } ) => { @@ -599,8 +563,6 @@ type PollPushProgressPayload = { remoteSiteId: number; }; -type ImportResponse = z.infer< typeof importResponseSchema >; - const pollPushProgressThunk = createTypedAsyncThunk( 'syncOperations/pollPushProgress', async ( diff --git a/apps/studio/src/stores/sync/sync-types.ts b/apps/studio/src/stores/sync/sync-types.ts deleted file mode 100644 index a88d32dce9..0000000000 --- a/apps/studio/src/stores/sync/sync-types.ts +++ /dev/null @@ -1,35 +0,0 @@ -import { z } from 'zod'; - -export const LatestRewindIdResponseSchema = z.object( { - body: z.object( { - success: z.boolean(), - rewind_id: z.string(), - } ), - status: z.number(), -} ); - -export const BackupLsItemSchema = z.object( { - type: z.string(), - has_children: z.boolean().optional(), - period: z.string().optional(), - id: z.string(), - manifest_path: z.string().optional(), -} ); - -export const BackupLsResponseSchema = z.object( { - body: z.object( { - ok: z.boolean(), - error: z.string(), - contents: z.record( z.string(), BackupLsItemSchema ), - } ), - status: z.number(), - headers: z.object( { - Allow: z.string(), - } ), -} ); - -export type BackupLsItem = z.infer< typeof BackupLsItemSchema >; -export type BackupLsRequest = { - backup_id: string; - path: string; -}; diff --git a/apps/studio/src/stores/sync/wpcom-sites.ts b/apps/studio/src/stores/sync/wpcom-sites.ts index 9d1f634b56..ff7eb96d83 100644 --- a/apps/studio/src/stores/sync/wpcom-sites.ts +++ b/apps/studio/src/stores/sync/wpcom-sites.ts @@ -1,121 +1,16 @@ import { createApi, fetchBaseQuery } from '@reduxjs/toolkit/query/react'; import * as Sentry from '@sentry/electron/renderer'; -import { z } from 'zod'; +import { getSyncSupport } from '@studio/common/lib/sync/sync-support'; +import { + transformSingleSiteResponse, + transformSitesResponse, +} from '@studio/common/lib/sync/transform-sites'; +import { sitesEndpointSiteSchema, sitesEndpointResponseSchema } from '@studio/common/types/sync'; import { getIpcApi } from 'src/lib/get-ipc-api'; import { reconcileConnectedSites } from 'src/modules/sync/lib/reconcile-connected-sites'; -import { getSyncSupport, isPressableSite } from 'src/modules/sync/lib/sync-support'; import { withOfflineCheck } from 'src/stores/utils/with-offline-check'; import { getWpcomClient } from 'src/stores/wpcom-api'; -import type { SyncSite, SyncSupport } from 'src/modules/sync/types'; - -// Schema for WordPress.com sites endpoint -const sitesEndpointSiteSchema = z.object( { - ID: z.number(), - is_wpcom_atomic: z.boolean(), - name: z.string(), - URL: z.string(), - jetpack: z.boolean().optional(), - is_deleted: z.boolean(), - hosting_provider_guess: z.string().optional(), - environment_type: z - .enum( [ 'production', 'staging', 'development', 'sandbox', 'local' ] ) - .nullable() - .optional(), - is_a8c: z.boolean().optional(), - options: z - .object( { - created_at: z.string(), - wpcom_staging_blog_ids: z.array( z.number() ), - } ) - .optional(), - capabilities: z - .object( { - manage_options: z.boolean(), - } ) - .optional(), - plan: z - .object( { - expired: z.boolean().optional(), - features: z.object( { - active: z.array( z.string() ), - available: z.record( z.string(), z.array( z.string() ) ).optional(), - } ), - is_free: z.boolean().optional(), - product_id: z.coerce.number(), - product_name_short: z.string(), - product_slug: z.string(), - user_is_owner: z.boolean().optional(), - } ) - .optional(), -} ); - -export type SitesEndpointSite = z.infer< typeof sitesEndpointSiteSchema >; - -// We use a permissive schema for the API response to fail gracefully if a single site is malformed -const sitesEndpointResponseSchema = z.object( { - sites: z.array( z.unknown() ), -} ); - -function transformSingleSiteResponse( - site: SitesEndpointSite, - syncSupport: SyncSupport, - isStaging: boolean -): SyncSite { - return { - id: site.ID, - localSiteId: '', - name: site.name, - url: site.URL, - isStaging, - isPressable: isPressableSite( site ), - environmentType: site.environment_type, - syncSupport, - lastPullTimestamp: null, - lastPushTimestamp: null, - }; -} - -/** - * Transforms the WordPress.com sites API response into SyncSite objects. - * - * @param sites - Raw site data from the WordPress.com API - * @param connectedSiteIds - Optional IDs of sites already connected to the current local site. - * When provided, used to: 1) keep deleted sites in the list if they're connected, and - * 2) determine sync support status (already-connected vs syncable). - * When not provided, no filtering based on connected sites is applied. - */ -function transformSitesResponse( sites: unknown[], connectedSiteIds?: number[] ): SyncSite[] { - const validatedSites = sites.reduce< SitesEndpointSite[] >( ( acc, rawSite ) => { - try { - const site = sitesEndpointSiteSchema.parse( rawSite ); - return [ ...acc, site ]; - } catch ( error ) { - Sentry.captureException( error ); - return acc; - } - }, [] ); - - const allStagingSiteIds = validatedSites.flatMap( ( site ) => { - return site.options?.wpcom_staging_blog_ids ?? []; - } ); - - return validatedSites - .filter( ( site ) => ! site.is_a8c ) - .filter( - // Filter out deleted sites, except if they're in the connectedSiteIds list - ( site ) => - ! site.is_deleted || - ( connectedSiteIds && connectedSiteIds.some( ( id ) => id === site.ID ) ) - ) - .map( ( site ) => { - // The API returns the wrong value for the `is_wpcom_staging_site` prop while staging sites - // are being created. Hence the check in other sites' `wpcom_staging_blog_ids` arrays. - const isStaging = allStagingSiteIds.includes( site.ID ); - const syncSupport = getSyncSupport( site, connectedSiteIds ?? [] ); - - return transformSingleSiteResponse( site, syncSupport, isStaging ); - } ); -} +import type { SyncSite } from '@studio/common/types/sync'; const SITE_FIELDS = [ 'name', @@ -214,10 +109,12 @@ export const wpcomSitesApi = createApi( { const parsedResponse = sitesEndpointResponseSchema.parse( response ); - const syncSitesForReconciliation = transformSitesResponse( - parsedResponse.sites, - allConnectedSites.map( ( { id } ) => id ) - ); + const sentryOptions = { onParseError: Sentry.captureException }; + + const syncSitesForReconciliation = transformSitesResponse( parsedResponse.sites, { + connectedSiteIds: allConnectedSites.map( ( { id } ) => id ), + ...sentryOptions, + } ); const { updatedConnectedSites } = reconcileConnectedSites( allConnectedSites, @@ -225,10 +122,10 @@ export const wpcomSitesApi = createApi( { ); await getIpcApi().updateConnectedWpcomSites( updatedConnectedSites ); - const syncSitesForSelectedSite = transformSitesResponse( - parsedResponse.sites, - connectedSiteIds - ); + const syncSitesForSelectedSite = transformSitesResponse( parsedResponse.sites, { + connectedSiteIds, + ...sentryOptions, + } ); return { data: syncSitesForSelectedSite }; } catch ( error ) { diff --git a/package-lock.json b/package-lock.json index 91a3911190..b79e45bc15 100644 --- a/package-lock.json +++ b/package-lock.json @@ -65,18 +65,22 @@ "@wp-playground/common": "3.1.18", "@wp-playground/storage": "3.1.18", "@wp-playground/wordpress": "3.1.18", + "archiver": "^6.0.2", "atomically": "^2.1.1", "chalk": "^5.6.2", "cli-table3": "^0.6.5", + "fs-extra": "^11.3.4", "http-proxy": "^1.18.1", "node-forge": "^1.3.3", "ora": "^8.2.0", "patch-package": "^8.0.1", "playwright": "^1.52.0", "semver": "^7.7.4", + "tar": "^7.5.13", "trash": "^10.0.1", "yargs": "^18.0.0", "yargs-parser": "^22.0.0", + "yauzl": "^3.3.0", "zod": "^4.3.6" }, "bin": { @@ -88,6 +92,7 @@ "@types/http-proxy": "^1.17.17", "@types/node-forge": "^1.3.14", "@types/yargs": "^17.0.35", + "glob": "^13.0.6", "vite": "^7.3.1", "vite-plugin-static-copy": "^3.1.5" }, @@ -6274,6 +6279,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/base64/-/base64-1.1.2.tgz", "integrity": "sha512-q6XAnWQDIMA3+FTiOYajoYqySkO+JSat0ytXGSuRdq9uXE7o92gzuQwQM14xaCRlBLGq3v5miDGC4vkVTn54xA==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=10.0" }, @@ -6290,6 +6296,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/buffers/-/buffers-17.67.0.tgz", "integrity": "sha512-tfExRpYxBvi32vPs9ZHaTjSP4fHAfzSmcahOfNxtvGHcyJel+aibkPlGeBB+7AoC6hL7lXIE++8okecBxx7lcw==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=10.0" }, @@ -6306,6 +6313,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/codegen/-/codegen-1.0.0.tgz", "integrity": "sha512-E8Oy+08cmCf0EK/NMxpaJZmOxPqM+6iSe2S4nlSBrPZOORoDJILxtbSUEDKQyTamm/BVAhIGllOBNU79/dwf0g==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=10.0" }, @@ -6322,6 +6330,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/fs-core/-/fs-core-4.57.1.tgz", "integrity": "sha512-YrEi/ZPmgc+GfdO0esBF04qv8boK9Dg9WpRQw/+vM8Qt3nnVIJWIa8HwZ/LXVZ0DB11XUROM8El/7yYTJX+WtA==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@jsonjoy.com/fs-node-builtins": "4.57.1", "@jsonjoy.com/fs-node-utils": "4.57.1", @@ -6343,6 +6352,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/fs-fsa/-/fs-fsa-4.57.1.tgz", "integrity": "sha512-ooEPvSW/HQDivPDPZMibHGKZf/QS4WRir1czGZmXmp3MsQqLECZEpN0JobrD8iV9BzsuwdIv+PxtWX9WpPLsIA==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@jsonjoy.com/fs-core": "4.57.1", "@jsonjoy.com/fs-node-builtins": "4.57.1", @@ -6365,6 +6375,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/fs-node/-/fs-node-4.57.1.tgz", "integrity": "sha512-3YaKhP8gXEKN+2O49GLNfNb5l2gbnCFHyAaybbA2JkkbQP3dpdef7WcUaHAulg/c5Dg4VncHsA3NWAUSZMR5KQ==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@jsonjoy.com/fs-core": "4.57.1", "@jsonjoy.com/fs-node-builtins": "4.57.1", @@ -6390,6 +6401,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/fs-node-builtins/-/fs-node-builtins-4.57.1.tgz", "integrity": "sha512-XHkFKQ5GSH3uxm8c3ZYXVrexGdscpWKIcMWKFQpMpMJc8gA3AwOMBJXJlgpdJqmrhPyQXxaY9nbkNeYpacC0Og==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=10.0" }, @@ -6406,6 +6418,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/fs-node-to-fsa/-/fs-node-to-fsa-4.57.1.tgz", "integrity": "sha512-pqGHyWWzNck4jRfaGV39hkqpY5QjRUQ/nRbNT7FYbBa0xf4bDG+TE1Gt2KWZrSkrkZZDE3qZUjYMbjwSliX6pg==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@jsonjoy.com/fs-fsa": "4.57.1", "@jsonjoy.com/fs-node-builtins": "4.57.1", @@ -6427,6 +6440,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/fs-node-utils/-/fs-node-utils-4.57.1.tgz", "integrity": "sha512-vp+7ZzIB8v43G+GLXTS4oDUSQmhAsRz532QmmWBbdYA20s465JvwhkSFvX9cVTqRRAQg+vZ7zWDaIEh0lFe2gw==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@jsonjoy.com/fs-node-builtins": "4.57.1" }, @@ -6446,6 +6460,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/fs-print/-/fs-print-4.57.1.tgz", "integrity": "sha512-Ynct7ZJmfk6qoXDOKfpovNA36ITUx8rChLmRQtW08J73VOiuNsU8PB6d/Xs7fxJC2ohWR3a5AqyjmLojfrw5yw==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@jsonjoy.com/fs-node-utils": "4.57.1", "tree-dump": "^1.1.0" @@ -6466,6 +6481,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/fs-snapshot/-/fs-snapshot-4.57.1.tgz", "integrity": "sha512-/oG8xBNFMbDXTq9J7vepSA1kerS5vpgd3p5QZSPd+nX59uwodGJftI51gDYyHRpP57P3WCQf7LHtBYPqwUg2Bg==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@jsonjoy.com/buffers": "^17.65.0", "@jsonjoy.com/fs-node-utils": "4.57.1", @@ -6488,6 +6504,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/base64/-/base64-17.67.0.tgz", "integrity": "sha512-5SEsJGsm15aP8TQGkDfJvz9axgPwAEm98S5DxOuYe8e1EbfajcDmgeXXzccEjh+mLnjqEKrkBdjHWS5vFNwDdw==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=10.0" }, @@ -6504,6 +6521,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/codegen/-/codegen-17.67.0.tgz", "integrity": "sha512-idnkUplROpdBOV0HMcwhsCUS5TRUi9poagdGs70A6S4ux9+/aPuKbh8+UYRTLYQHtXvAdNfQWXDqZEx5k4Dj2Q==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=10.0" }, @@ -6520,6 +6538,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/json-pack/-/json-pack-17.67.0.tgz", "integrity": "sha512-t0ejURcGaZsn1ClbJ/3kFqSOjlryd92eQY465IYrezsXmPcfHPE/av4twRSxf6WE+TkZgLY+71vCZbiIiFKA/w==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@jsonjoy.com/base64": "17.67.0", "@jsonjoy.com/buffers": "17.67.0", @@ -6546,6 +6565,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/json-pointer/-/json-pointer-17.67.0.tgz", "integrity": "sha512-+iqOFInH+QZGmSuaybBUNdh7yvNrXvqR+h3wjXm0N/3JK1EyyFAeGJvqnmQL61d1ARLlk/wJdFKSL+LHJ1eaUA==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@jsonjoy.com/util": "17.67.0" }, @@ -6565,6 +6585,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/util/-/util-17.67.0.tgz", "integrity": "sha512-6+8xBaz1rLSohlGh68D1pdw3AwDi9xydm8QNlAFkvnavCJYSze+pxoW2VKP8p308jtlMRLs5NTHfPlZLd4w7ew==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@jsonjoy.com/buffers": "17.67.0", "@jsonjoy.com/codegen": "17.67.0" @@ -6585,6 +6606,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/json-pack/-/json-pack-1.21.0.tgz", "integrity": "sha512-+AKG+R2cfZMShzrF2uQw34v3zbeDYUqnQ+jg7ORic3BGtfw9p/+N6RJbq/kkV8JmYZaINknaEQ2m0/f693ZPpg==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@jsonjoy.com/base64": "^1.1.2", "@jsonjoy.com/buffers": "^1.2.0", @@ -6611,6 +6633,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/buffers/-/buffers-1.2.1.tgz", "integrity": "sha512-12cdlDwX4RUM3QxmUbVJWqZ/mrK6dFQH4Zxq6+r1YXKXYBNgZXndx2qbCJwh3+WWkCSn67IjnlG3XYTvmvYtgA==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=10.0" }, @@ -6627,6 +6650,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/json-pointer/-/json-pointer-1.0.2.tgz", "integrity": "sha512-Fsn6wM2zlDzY1U+v4Nc8bo3bVqgfNTGcn6dMgs6FjrEnt4ZCe60o6ByKRjOGlI2gow0aE/Q41QOigdTqkyK5fg==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@jsonjoy.com/codegen": "^1.0.0", "@jsonjoy.com/util": "^1.9.0" @@ -6647,6 +6671,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/util/-/util-1.9.0.tgz", "integrity": "sha512-pLuQo+VPRnN8hfPqUTLTHk126wuYdXVxE6aDmjSeV4NCAgyxWbiOIeNJVtID3h1Vzpoi9m4jXezf73I6LgabgQ==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@jsonjoy.com/buffers": "^1.0.0", "@jsonjoy.com/codegen": "^1.0.0" @@ -6667,6 +6692,7 @@ "resolved": "https://registry.npmjs.org/@jsonjoy.com/buffers/-/buffers-1.2.1.tgz", "integrity": "sha512-12cdlDwX4RUM3QxmUbVJWqZ/mrK6dFQH4Zxq6+r1YXKXYBNgZXndx2qbCJwh3+WWkCSn67IjnlG3XYTvmvYtgA==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=10.0" }, @@ -17516,9 +17542,9 @@ } }, "node_modules/fs-extra": { - "version": "11.3.3", - "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.3.tgz", - "integrity": "sha512-VWSRii4t0AFm6ixFFmLLx1t7wS1gh+ckoa84aOeapGum0h+EZd1EhEumSB+ZdDLnEPuucsVB9oB7cxJHap6Afg==", + "version": "11.3.4", + "resolved": "https://registry.npmjs.org/fs-extra/-/fs-extra-11.3.4.tgz", + "integrity": "sha512-CTXd6rk/M3/ULNQj8FBqBWHYBVYybQ3VPBw0xGKFe3tuH7ytT6ACnvzpIQ3UZtB8yvUKC2cXn1a+x+5EVQLovA==", "license": "MIT", "dependencies": { "graceful-fs": "^4.2.0", @@ -17804,6 +17830,7 @@ "resolved": "https://registry.npmjs.org/glob-to-regex.js/-/glob-to-regex.js-1.2.0.tgz", "integrity": "sha512-QMwlOQKU/IzqMUOAZWubUOT8Qft+Y0KQWnX9nK3ch0CJg0tTp4TvGZsTfudYKv2NzoQSyPcnA6TYeIQ3jGichQ==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=10.0" }, @@ -18402,6 +18429,7 @@ "resolved": "https://registry.npmjs.org/hyperdyperid/-/hyperdyperid-1.2.0.tgz", "integrity": "sha512-Y93lCzHYgGWdrJ66yIktxiaGULYc6oGiABxhcO5AufBeOyoIdZF7bIfLaOrbM0iGIOXQQgxxRrFEnb+Y6w1n4A==", "dev": true, + "license": "MIT", "engines": { "node": ">=10.18" } @@ -20287,6 +20315,7 @@ "resolved": "https://registry.npmjs.org/memfs/-/memfs-4.57.1.tgz", "integrity": "sha512-WvzrWPwMQT+PtbX2Et64R4qXKK0fj/8pO85MrUCzymX3twwCiJCdvntW3HdhG1teLJcHDDLIKx5+c3HckWYZtQ==", "dev": true, + "license": "Apache-2.0", "dependencies": { "@jsonjoy.com/fs-core": "4.57.1", "@jsonjoy.com/fs-fsa": "4.57.1", @@ -24748,9 +24777,9 @@ } }, "node_modules/tar": { - "version": "7.5.11", - "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.11.tgz", - "integrity": "sha512-ChjMH33/KetonMTAtpYdgUFr0tbz69Fp2v7zWxQfYZX4g5ZN2nOBXm1R2xyA+lMIKrLKIoKAwFj93jE/avX9cQ==", + "version": "7.5.13", + "resolved": "https://registry.npmjs.org/tar/-/tar-7.5.13.tgz", + "integrity": "sha512-tOG/7GyXpFevhXVh8jOPJrmtRpOTsYqUIkVdVooZYJS/z8WhfQUX8RJILmeuJNinGAMSu1veBr4asSHFt5/hng==", "license": "BlueOak-1.0.0", "dependencies": { "@isaacs/fs-minipass": "^4.0.0", @@ -24993,6 +25022,7 @@ "resolved": "https://registry.npmjs.org/thingies/-/thingies-2.6.0.tgz", "integrity": "sha512-rMHRjmlFLM1R96UYPvpmnc3LYtdFrT33JIB7L9hetGue1qAPfn1N2LJeEjxUSidu1Iku+haLZXDuEXUHNGO/lg==", "dev": true, + "license": "MIT", "engines": { "node": ">=10.18" }, @@ -25247,6 +25277,7 @@ "resolved": "https://registry.npmjs.org/tree-dump/-/tree-dump-1.1.0.tgz", "integrity": "sha512-rMuvhU4MCDbcbnleZTFezWsaZXRFemSqAM+7jPnzUl1fo9w3YEKOxAeui0fz3OI4EU4hf23iyA7uQRVko+UaBA==", "dev": true, + "license": "Apache-2.0", "engines": { "node": ">=10.0" }, @@ -27236,9 +27267,9 @@ } }, "node_modules/yauzl": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-3.2.1.tgz", - "integrity": "sha512-k1isifdbpNSFEHFJ1ZY4YDewv0IH9FR61lDetaRMD3j2ae3bIXGV+7c+LHCqtQGofSd8PIyV4X6+dHMAnSr60A==", + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-3.3.0.tgz", + "integrity": "sha512-PtGEvEP30p7sbIBJKUBjUnqgTVOyMURc4dLo9iNyAJnNIEz9pm88cCXF21w94Kg3k6RXkeZh5DHOGS0qEONvNQ==", "license": "MIT", "dependencies": { "buffer-crc32": "~0.2.3", diff --git a/tools/common/constants.ts b/tools/common/constants.ts index c9f3c9134a..ef04d514b4 100644 --- a/tools/common/constants.ts +++ b/tools/common/constants.ts @@ -39,3 +39,19 @@ export const MINIMUM_WORDPRESS_VERSION = '6.2.1' as const; // https://wordpress. export const DEFAULT_WORDPRESS_VERSION = 'latest' as const; export const DEFAULT_PHP_VERSION: typeof RecommendedPHPVersion = RecommendedPHPVersion; export const SQLITE_FILENAME = 'sqlite-database-integration' as const; + +// Import file constants +export const ACCEPTED_IMPORT_FILE_TYPES = [ '.zip', '.gz', '.gzip', '.tar', '.tar.gz', '.wpress' ]; + +// Archiver options +export const ARCHIVER_OPTIONS = { + zip: { + zlib: { level: 9 }, + followSymlinks: true, + }, + tar: { + gzip: true, + gzipOptions: { level: 9 }, + followSymlinks: true, + }, +}; diff --git a/apps/studio/src/lib/serialize-plugins.ts b/tools/common/lib/serialize-plugins.ts similarity index 100% rename from apps/studio/src/lib/serialize-plugins.ts rename to tools/common/lib/serialize-plugins.ts diff --git a/tools/common/lib/sync/constants.ts b/tools/common/lib/sync/constants.ts new file mode 100644 index 0000000000..28d55f5439 --- /dev/null +++ b/tools/common/lib/sync/constants.ts @@ -0,0 +1,16 @@ +export const SYNC_POLL_INTERVAL_MS = 3000; +export const SYNC_MAX_STALLED_ATTEMPTS = 200; +export const SYNC_PUSH_SIZE_LIMIT_GB = 5; +export const SYNC_PUSH_SIZE_LIMIT_BYTES = SYNC_PUSH_SIZE_LIMIT_GB * 1024 * 1024 * 1024; // 5GB + +export const SYNC_EXCLUSIONS = [ + 'database', + 'db.php', + 'debug.log', + 'sqlite-database-integration', + '.DS_Store', + 'Thumbs.db', + '.git', + 'node_modules', + 'cache', +]; diff --git a/tools/common/lib/sync/sync-api.ts b/tools/common/lib/sync/sync-api.ts new file mode 100644 index 0000000000..1c946bd0bd --- /dev/null +++ b/tools/common/lib/sync/sync-api.ts @@ -0,0 +1,235 @@ +import fs from 'fs'; +import { Readable } from 'stream'; +import { z } from 'zod'; +import wpcomFactory from '@studio/common/lib/wpcom-factory'; +import wpcomXhrRequest from '@studio/common/lib/wpcom-xhr-request-factory'; +import { + sitesEndpointResponseSchema, + pullSiteResponseSchema, + syncBackupResponseSchema, + importResponseSchema, +} from '@studio/common/types/sync'; +import { backupLsItemSchema, backupLsResponseBodySchema } from '@studio/common/types/sync-tree'; +import { transformSitesResponse } from './transform-sites'; +import type { SyncSite, ImportResponse, SyncOption } from '@studio/common/types/sync'; +import type { BackupLsItem } from '@studio/common/types/sync-tree'; + +const SITE_FIELDS = [ + 'name', + 'ID', + 'URL', + 'plan', + 'capabilities', + 'is_wpcom_atomic', + 'options', + 'jetpack', + 'is_deleted', + 'is_a8c', + 'hosting_provider_guess', + 'environment_type', +].join( ',' ); + +export async function fetchSyncableSites( token: string ): Promise< SyncSite[] > { + const wpcom = wpcomFactory( token, wpcomXhrRequest ); + + const rawResponse = await wpcom.req.get( + { + apiNamespace: 'rest/v1.2', + path: '/me/sites', + }, + { + fields: SITE_FIELDS, + filter: 'atomic,wpcom', + options: 'created_at,wpcom_staging_blog_ids', + site_activity: 'active', + } + ); + + const parsed = sitesEndpointResponseSchema.parse( rawResponse ); + return transformSitesResponse( parsed.sites ); +} + +export async function initiateBackup( + token: string, + remoteSiteId: number, + options: { optionsToSync: SyncOption[]; includePathList?: string[] } +): Promise< number > { + const wpcom = wpcomFactory( token, wpcomXhrRequest ); + + const body: { options: SyncOption[]; include_path_list?: string[] } = { + options: options.optionsToSync, + include_path_list: options.includePathList, + }; + + const rawResponse = await wpcom.req.post( { + path: `/sites/${ remoteSiteId }/studio-app/sync/backup`, + apiNamespace: 'wpcom/v2', + body, + } ); + + const response = pullSiteResponseSchema.parse( rawResponse ); + if ( ! response.success ) { + throw new Error( 'Backup request failed' ); + } + + return response.backup_id; +} + +export type BackupStatus = { + status: 'in-progress' | 'finished' | 'failed'; + downloadUrl: string | null; + percent: number; +}; + +export async function pollBackupStatus( + token: string, + remoteSiteId: number, + backupId: number +): Promise< BackupStatus > { + const wpcom = wpcomFactory( token, wpcomXhrRequest ); + + const rawResponse = await wpcom.req.get( `/sites/${ remoteSiteId }/studio-app/sync/backup`, { + apiNamespace: 'wpcom/v2', + backup_id: backupId, + } ); + + const response = syncBackupResponseSchema.parse( rawResponse ); + return { + status: response.status, + downloadUrl: response.download_url ?? null, + percent: response.percent, + }; +} + +export async function initiateImport( + token: string, + remoteSiteId: number, + attachmentId: string, + options?: { optionsToSync?: SyncOption[]; specificSelectionPaths?: string[] } +): Promise< void > { + const wpcom = wpcomFactory( token, wpcomXhrRequest ); + + const formData: [ string, unknown, Record< string, string >? ][] = []; + formData.push( [ 'import_attachment_id', attachmentId ] ); + + if ( options?.specificSelectionPaths?.length ) { + formData.push( [ 'list_sync_items', options.specificSelectionPaths.join( ',' ) ] ); + } + + if ( options?.optionsToSync ) { + formData.push( [ 'options', options.optionsToSync.join( ',' ) ] ); + } + + await wpcom.req.post( { + path: `/sites/${ remoteSiteId }/studio-app/sync/import/initiate`, + apiNamespace: 'wpcom/v2', + formData, + } ); +} + +export async function pollImportStatus( + token: string, + remoteSiteId: number +): Promise< ImportResponse > { + const wpcom = wpcomFactory( token, wpcomXhrRequest ); + + const rawResponse = await wpcom.req.get( `/sites/${ remoteSiteId }/studio-app/sync/import`, { + apiNamespace: 'wpcom/v2', + } ); + + return importResponseSchema.parse( rawResponse ); +} + +export async function checkBackupSize( url: string ): Promise< number > { + const response = await fetch( url, { method: 'HEAD' } ); + if ( ! response.ok ) { + throw new Error( `Failed to fetch backup size: ${ response.statusText }` ); + } + const contentLength = response.headers.get( 'content-length' ); + if ( ! contentLength ) { + return 0; + } + return parseInt( contentLength, 10 ); +} + +export async function downloadBackup( url: string, destPath: string ): Promise< void > { + const response = await fetch( url ); + if ( ! response.ok || ! response.body ) { + throw new Error( 'Failed to download backup' ); + } + + const fileStream = fs.createWriteStream( destPath ); + const readable = Readable.fromWeb( response.body as import('stream/web').ReadableStream ); + + return new Promise< void >( ( resolve, reject ) => { + readable.pipe( fileStream ); + fileStream.on( 'finish', resolve ); + fileStream.on( 'error', reject ); + readable.on( 'error', reject ); + } ); +} + +export async function fetchLatestRewindId( + token: string, + remoteSiteId: number +): Promise< string > { + const wpcom = wpcomFactory( token, wpcomXhrRequest ); + + const rawResponse = await wpcom.req.get( + `/sites/${ remoteSiteId }/studio-app/sync/get-latest-rewind-id`, + { apiNamespace: 'wpcom/v2' } + ); + + const parsed = z.object( { success: z.boolean(), rewind_id: z.string() } ).parse( rawResponse ); + + if ( ! parsed.success || ! parsed.rewind_id ) { + throw new Error( 'No rewind ID available' ); + } + + return parsed.rewind_id; +} + +export type RemoteFileEntry = { + name: string; + isDirectory: boolean; + pathId: string; + path: string; +}; + +export async function fetchRemoteFileTree( + token: string, + remoteSiteId: number, + rewindId: string, + treePath: string = '/wp-content/' +): Promise< RemoteFileEntry[] > { + const wpcom = wpcomFactory( token, wpcomXhrRequest ); + + const rawResponse = await wpcom.req.post( { + path: `/sites/${ remoteSiteId }/rewind/backup/ls`, + apiNamespace: 'wpcom/v2', + body: { backup_id: rewindId, path: treePath }, + } ); + + const parsed = backupLsResponseBodySchema.parse( rawResponse ); + + if ( ! parsed.ok ) { + throw new Error( parsed.error || 'Failed to fetch remote file tree' ); + } + + const entries: RemoteFileEntry[] = []; + for ( const [ name, rawItem ] of Object.entries( parsed.contents ) ) { + const itemResult = backupLsItemSchema.safeParse( rawItem ); + if ( itemResult.success ) { + const item: BackupLsItem = itemResult.data; + const isDirectory = item.type === 'dir' || item.has_children === true; + entries.push( { + name, + isDirectory, + pathId: item.id, + path: `${ treePath }${ name }${ isDirectory ? '/' : '' }`, + } ); + } + } + + return entries; +} diff --git a/apps/studio/src/modules/sync/lib/sync-support.ts b/tools/common/lib/sync/sync-support.ts similarity index 92% rename from apps/studio/src/modules/sync/lib/sync-support.ts rename to tools/common/lib/sync/sync-support.ts index a253a5cadd..7b8ae7b0ff 100644 --- a/apps/studio/src/modules/sync/lib/sync-support.ts +++ b/tools/common/lib/sync/sync-support.ts @@ -1,5 +1,4 @@ -import type { SyncSupport } from 'src/modules/sync/types'; -import type { SitesEndpointSite } from 'src/stores/sync/wpcom-sites'; +import type { SitesEndpointSite, SyncSupport } from '@studio/common/types/sync'; const STUDIO_SYNC_FEATURE_NAME = 'studio-sync'; diff --git a/tools/common/lib/sync/transform-sites.ts b/tools/common/lib/sync/transform-sites.ts new file mode 100644 index 0000000000..70723787fc --- /dev/null +++ b/tools/common/lib/sync/transform-sites.ts @@ -0,0 +1,68 @@ +import { sitesEndpointSiteSchema } from '@studio/common/types/sync'; +import { getSyncSupport, isPressableSite } from './sync-support'; +import type { SitesEndpointSite, SyncSite, SyncSupport } from '@studio/common/types/sync'; + +export function transformSingleSiteResponse( + site: SitesEndpointSite, + syncSupport: SyncSupport, + isStaging: boolean +): SyncSite { + return { + id: site.ID, + localSiteId: '', + name: site.name, + url: site.URL, + isStaging, + isPressable: isPressableSite( site ), + environmentType: site.environment_type, + syncSupport, + lastPullTimestamp: null, + lastPushTimestamp: null, + }; +} + +/** + * Transforms the WordPress.com sites API response into SyncSite objects. + * + * @param sites - Raw site data from the WordPress.com API + * @param options.connectedSiteIds - Optional IDs of sites already connected to the current local site. + * When provided, used to: 1) keep deleted sites in the list if they're connected, and + * 2) determine sync support status (already-connected vs syncable). + * @param options.onParseError - Optional callback for site parse errors (e.g. Sentry.captureException) + */ +export function transformSitesResponse( + sites: unknown[], + options?: { + connectedSiteIds?: number[]; + onParseError?: ( error: unknown ) => void; + } +): SyncSite[] { + const connectedSiteIds = options?.connectedSiteIds ?? []; + + const validatedSites = sites.reduce< SitesEndpointSite[] >( ( acc, rawSite ) => { + try { + return [ ...acc, sitesEndpointSiteSchema.parse( rawSite ) ]; + } catch ( error ) { + options?.onParseError?.( error ); + return acc; + } + }, [] ); + + const allStagingSiteIds = validatedSites.flatMap( + ( site ) => site.options?.wpcom_staging_blog_ids ?? [] + ); + + return validatedSites + .filter( ( site ) => ! site.is_a8c ) + .filter( + ( site ) => + ! site.is_deleted || + ( connectedSiteIds.length > 0 && connectedSiteIds.some( ( id ) => id === site.ID ) ) + ) + .map( ( site ) => { + const isStaging = allStagingSiteIds.includes( site.ID ); + const syncSupport = getSyncSupport( site, connectedSiteIds ); + + return transformSingleSiteResponse( site, syncSupport, isStaging ); + } ); +} diff --git a/tools/common/lib/sync/tree-utils.ts b/tools/common/lib/sync/tree-utils.ts new file mode 100644 index 0000000000..28de760284 --- /dev/null +++ b/tools/common/lib/sync/tree-utils.ts @@ -0,0 +1,48 @@ +import { SYNC_EXCLUSIONS } from './constants'; +import type { SyncOption } from '@studio/common/types/sync'; + +export const shouldExcludeFromSync = ( itemName: string ): boolean => { + if ( itemName.startsWith( '.' ) ) { + return true; + } + + if ( SYNC_EXCLUSIONS.includes( itemName ) ) { + return true; + } + + return false; +}; + +export const shouldLimitDepth = ( relativePath: string ): boolean => { + const normalizedPath = relativePath.replace( /^wp-content\//, '' ); + + // Match plugins/plugin-name or plugins/plugin-name/ + if ( normalizedPath.match( /^plugins\/[^/]+\/?$/ ) ) { + return true; + } + + // Match themes/theme-name or themes/theme-name/ + if ( normalizedPath.match( /^themes\/[^/]+\/?$/ ) ) { + return true; + } + + // Match mu-plugins/mu-plugin or mu-plugins/mu-plugin/ + if ( normalizedPath.match( /^mu-plugins\/[^/]+\/?$/ ) ) { + return true; + } + + return false; +}; + +export function categorizePath( relativePath: string ): SyncOption { + if ( relativePath.startsWith( 'plugins/' ) || relativePath === 'plugins' ) { + return 'plugins'; + } + if ( relativePath.startsWith( 'themes/' ) || relativePath === 'themes' ) { + return 'themes'; + } + if ( relativePath.startsWith( 'uploads/' ) || relativePath === 'uploads' ) { + return 'uploads'; + } + return 'contents'; +} diff --git a/tools/common/lib/sync/tus-upload.ts b/tools/common/lib/sync/tus-upload.ts new file mode 100644 index 0000000000..45f0b9d6f7 --- /dev/null +++ b/tools/common/lib/sync/tus-upload.ts @@ -0,0 +1,123 @@ +import fs from 'fs'; +import nodePath from 'path'; +import { Upload } from 'tus-js-client'; + +export type TusUploadOptions = { + token: string; + remoteSiteId: number; + archivePath: string; + onProgress?: ( percent: number ) => void; + onNetworkPause?: ( error: string ) => void; + onResume?: () => void; +}; + +export function createTusUpload( options: TusUploadOptions ): { + promise: Promise< string >; + abort: () => void; +} { + const { token, remoteSiteId, archivePath, onProgress, onNetworkPause, onResume } = options; + + let rejectFn: ( error: Error ) => void; + let uploadInstance: Upload | null = null; + let isAborted = false; + let hasUploadStarted = false; + let isNetworkPaused = false; + + const abort = () => { + isAborted = true; + if ( uploadInstance ) { + void uploadInstance.abort(); + } + rejectFn?.( new Error( 'Upload aborted' ) ); + }; + + const promise = ( async () => { + const file = fs.createReadStream( archivePath ); + const fileSize = fs.statSync( archivePath ).size; + const filename = nodePath.basename( archivePath ); + + return new Promise< string >( ( resolve, reject ) => { + rejectFn = reject; + + if ( isAborted ) { + file.destroy(); + reject( new Error( 'Upload aborted' ) ); + return; + } + + const upload = new Upload( file, { + endpoint: `https://public-api.wordpress.com/rest/v1.1/studio-file-uploads/${ remoteSiteId }`, + chunkSize: 500000, + retryDelays: [ 0, 1000, 3000, 5000, 10000, 25000 ], + overridePatchMethod: true, + removeFingerprintOnSuccess: true, + storeFingerprintForResuming: true, + headers: { + Authorization: `Bearer ${ token }`, + }, + metadata: { + filename, + filetype: 'application/gzip', + }, + uploadSize: fileSize, + onBeforeRequest: ( req ) => { + if ( req.getMethod() === 'HEAD' ) { + // @ts-expect-error Override method to get response headers + req._method = 'GET'; + req.setHeader( 'X-HTTP-Method-Override', 'HEAD' ); + } + }, + onError: ( error ) => { + file.destroy(); + reject( error ); + }, + onProgress: ( bytesSent: number, bytesTotal: number ) => { + if ( isNetworkPaused ) { + isNetworkPaused = false; + onResume?.(); + } + + if ( ! hasUploadStarted ) { + hasUploadStarted = true; + } + + if ( onProgress && bytesTotal > 0 ) { + onProgress( ( bytesSent / bytesTotal ) * 100 ); + } + }, + onSuccess: ( payload ) => { + file.destroy(); + if ( ! payload.lastResponse ) { + reject( new Error( 'Upload completed but no response received' ) ); + return; + } + + const attachmentId = payload.lastResponse.getHeader( 'x-studio-file-upload-media-id' ); + if ( attachmentId ) { + resolve( attachmentId ); + } else { + reject( new Error( 'Upload completed but attachment ID not found' ) ); + } + }, + onShouldRetry: ( error ) => { + if ( isAborted ) { + return false; + } + + if ( hasUploadStarted ) { + isNetworkPaused = true; + onNetworkPause?.( error.message ); + } + + const status = error.originalResponse ? error.originalResponse.getStatus() : 0; + return status !== 403; + }, + } ); + + uploadInstance = upload; + upload.start(); + } ); + } )(); + + return { promise, abort }; +} diff --git a/apps/studio/src/lib/tests/serialize-plugins.test.ts b/tools/common/lib/tests/serialize-plugins.test.ts similarity index 90% rename from apps/studio/src/lib/tests/serialize-plugins.test.ts rename to tools/common/lib/tests/serialize-plugins.test.ts index 8b7d1fa721..8468bd6113 100644 --- a/apps/studio/src/lib/tests/serialize-plugins.test.ts +++ b/tools/common/lib/tests/serialize-plugins.test.ts @@ -1,4 +1,4 @@ -import { serializePlugins } from 'src/lib/serialize-plugins'; +import { serializePlugins } from '@studio/common/lib/serialize-plugins'; describe( 'serializePlugins', () => { it( 'should correctly serialize an empty array', () => { diff --git a/tools/common/logger-actions.ts b/tools/common/logger-actions.ts index b984786974..6faa46face 100644 --- a/tools/common/logger-actions.ts +++ b/tools/common/logger-actions.ts @@ -39,7 +39,34 @@ export enum SiteCommandLoggerAction { SETUP_WORDPRESS = 'setupWordPress', SAVE_SITE = 'saveSite', APPLY_BLUEPRINT = 'applyBlueprint', + EXTRACT_BACKUP = 'extractBackup', + IMPORT_SITE = 'importSite', + IMPORT_DATABASE = 'importDatabase', + IMPORT_WP_CONTENT = 'importWpContent', + IMPORT_META = 'importMeta', + EXPORT_SITE = 'exportSite', + CREATE_BACKUP = 'createBackup', + EXPORT_DATABASE = 'exportDatabase', + EXPORT_WP_CONTENT = 'exportWpContent', + EXPORT_CONFIG = 'exportConfig', DELETE_PREVIEW_SITES = 'deletePreviewSites', DELETE_FILES = 'deleteFiles', CHECKING_DEPENDENCY_UPDATES = 'checkingDependencyUpdates', } + +export enum SyncCommandLoggerAction { + START_DAEMON = 'startDaemon', + STOP_SITE = 'stopSite', + LOAD_SITES = 'loadSites', + INSTALL_SQLITE = 'installSqlite', + FETCH_REMOTE_SITES = 'fetchRemoteSites', + ARCHIVE = 'archive', + UPLOAD = 'upload', + INITIATE_IMPORT = 'initiateImport', + POLL_IMPORT = 'pollImport', + INITIATE_BACKUP = 'initiateBackup', + POLL_BACKUP = 'pollBackup', + START_SITE = 'startSite', + DOWNLOAD = 'download', + IMPORT = 'import', +} diff --git a/tools/common/types/sync-tree.ts b/tools/common/types/sync-tree.ts new file mode 100644 index 0000000000..0bd73423ff --- /dev/null +++ b/tools/common/types/sync-tree.ts @@ -0,0 +1,44 @@ +import { z } from 'zod'; + +export type RawDirectoryEntry = { + name: string; + isDirectory: boolean; + path: string; + children?: RawDirectoryEntry[]; +}; + +export const latestRewindIdResponseSchema = z.object( { + body: z.object( { + success: z.boolean(), + rewind_id: z.string(), + } ), + status: z.number(), +} ); + +export const backupLsItemSchema = z.object( { + type: z.string(), + has_children: z.boolean().optional(), + period: z.string().optional(), + id: z.string(), + manifest_path: z.string().optional(), +} ); + +export const backupLsResponseBodySchema = z.object( { + ok: z.boolean(), + error: z.string(), + contents: z.record( z.string(), z.unknown() ), +} ); + +export const backupLsResponseSchema = z.object( { + body: backupLsResponseBodySchema, + status: z.number(), + headers: z.object( { + Allow: z.string(), + } ), +} ); + +export type BackupLsItem = z.infer< typeof backupLsItemSchema >; +export type BackupLsRequest = { + backup_id: string; + path: string; +}; diff --git a/tools/common/types/sync.ts b/tools/common/types/sync.ts new file mode 100644 index 0000000000..9f99b11c4a --- /dev/null +++ b/tools/common/types/sync.ts @@ -0,0 +1,135 @@ +import { z } from 'zod'; + +// WordPress.com /me/sites endpoint response schemas +export const sitesEndpointSiteSchema = z.object( { + ID: z.number(), + is_wpcom_atomic: z.boolean(), + name: z.string(), + URL: z.string(), + jetpack: z.boolean().optional(), + is_deleted: z.boolean(), + hosting_provider_guess: z.string().optional(), + environment_type: z + .enum( [ 'production', 'staging', 'development', 'sandbox', 'local' ] ) + .nullable() + .optional(), + is_a8c: z.boolean().optional(), + options: z + .object( { + created_at: z.string(), + wpcom_staging_blog_ids: z.array( z.number() ), + } ) + .optional(), + capabilities: z + .object( { + manage_options: z.boolean(), + } ) + .optional(), + plan: z + .object( { + expired: z.boolean().optional(), + features: z.object( { + active: z.array( z.string() ), + available: z.record( z.string(), z.array( z.string() ) ).optional(), + } ), + is_free: z.boolean().optional(), + product_id: z.coerce.number(), + product_name_short: z.string(), + product_slug: z.string(), + user_is_owner: z.boolean().optional(), + } ) + .optional(), +} ); + +export type SitesEndpointSite = z.infer< typeof sitesEndpointSiteSchema >; + +// Permissive wrapper for the /me/sites response (to fail gracefully per-site) +export const sitesEndpointResponseSchema = z.object( { + sites: z.array( z.unknown() ), +} ); + +// Sync support types +export const syncSupportValues = [ + 'unsupported', + 'syncable', + 'needs-transfer', + 'already-connected', + 'needs-upgrade', + 'deleted', + 'missing-permissions', +] as const; + +export type SyncSupport = ( typeof syncSupportValues )[ number ]; + +// Sync site representation +export type SyncSite = { + id: number; + localSiteId: string; + name: string; + url: string; + isStaging: boolean; + isPressable: boolean; + environmentType?: string | null; + syncSupport: SyncSupport; + lastPullTimestamp: string | null; + lastPushTimestamp: string | null; +}; + +// Pull backup API schemas +export const pullSiteResponseSchema = z.object( { + success: z.boolean(), + backup_id: z.number(), +} ); + +export const syncBackupResponseSchema = z.object( { + status: z.enum( [ 'in-progress', 'finished', 'failed' ] ), + download_url: z.string().nullable().optional(), + percent: z.number(), +} ); + +// Push import API schemas +export const importFailedResponseSchema = z.object( { + status: z.literal( 'failed' ), + success: z.boolean(), + error: z.string(), + error_data: z + .object( { + vp_restore_status: z.string().nullable(), + vp_restore_message: z.string().nullable(), + vp_rewind_id: z.string().nullable(), + } ) + .nullable(), +} ); + +export const importWorkingResponseSchema = z.object( { + status: z.enum( [ + 'started', + 'initial_backup_started', + 'initial_backup_finished', + 'archive_import_started', + 'archive_import_finished', + 'finished', + ] ), + success: z.boolean(), + backup_progress: z.number().nullable(), + import_progress: z.number().nullable(), +} ); + +export const importResponseSchema = z.discriminatedUnion( 'status', [ + importWorkingResponseSchema, + importFailedResponseSchema, +] ); + +export type ImportResponse = z.infer< typeof importResponseSchema >; + +// Sync option types (shared between push/pull) +export const syncOptionSchema = z.enum( [ + 'all', + 'sqls', + 'paths', + 'uploads', + 'plugins', + 'themes', + 'contents', +] ); +export type SyncOption = z.infer< typeof syncOptionSchema >;