diff --git a/backend/Dockerfile b/backend/Dockerfile index 3ab3bf6..9122ded 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -2,8 +2,8 @@ FROM openjdk:17-jdk-slim WORKDIR /app -# Install Maven -RUN apt-get update && apt-get install -y maven && rm -rf /var/lib/apt/lists/* +# Install Maven and PostgreSQL client tools +RUN apt-get update && apt-get install -y maven postgresql-client && rm -rf /var/lib/apt/lists/* # Copy source code COPY . . diff --git a/backend/src/main/java/com/storycove/service/DatabaseManagementService.java b/backend/src/main/java/com/storycove/service/DatabaseManagementService.java index 3040bbf..44446f0 100644 --- a/backend/src/main/java/com/storycove/service/DatabaseManagementService.java +++ b/backend/src/main/java/com/storycove/service/DatabaseManagementService.java @@ -70,6 +70,75 @@ public class DatabaseManagementService implements ApplicationContextAware { this.applicationContext = applicationContext; } + // Helper methods to extract database connection details + private String extractDatabaseUrl() { + try (Connection connection = getDataSource().getConnection()) { + return connection.getMetaData().getURL(); + } catch (SQLException e) { + throw new RuntimeException("Failed to extract database URL", e); + } + } + + private String extractDatabaseHost() { + String url = extractDatabaseUrl(); + // Extract host from jdbc:postgresql://host:port/database + if (url.startsWith("jdbc:postgresql://")) { + String hostPort = url.substring("jdbc:postgresql://".length()); + if (hostPort.contains("/")) { + hostPort = hostPort.substring(0, hostPort.indexOf("/")); + } + if (hostPort.contains(":")) { + return hostPort.substring(0, hostPort.indexOf(":")); + } + return hostPort; + } + return "localhost"; // fallback + } + + private String extractDatabasePort() { + String url = extractDatabaseUrl(); + // Extract port from jdbc:postgresql://host:port/database + if (url.startsWith("jdbc:postgresql://")) { + String hostPort = url.substring("jdbc:postgresql://".length()); + if (hostPort.contains("/")) { + hostPort = hostPort.substring(0, hostPort.indexOf("/")); + } + if (hostPort.contains(":")) { + return hostPort.substring(hostPort.indexOf(":") + 1); + } + } + return "5432"; // default PostgreSQL port + } + + private String extractDatabaseName() { + String url = extractDatabaseUrl(); + // Extract database name from jdbc:postgresql://host:port/database + if (url.startsWith("jdbc:postgresql://")) { + String remaining = url.substring("jdbc:postgresql://".length()); + if (remaining.contains("/")) { + String dbPart = remaining.substring(remaining.indexOf("/") + 1); + // Remove any query parameters + if (dbPart.contains("?")) { + dbPart = dbPart.substring(0, dbPart.indexOf("?")); + } + return dbPart; + } + } + return "storycove"; // fallback + } + + private String extractDatabaseUsername() { + // Get from environment variable or default + return System.getenv("SPRING_DATASOURCE_USERNAME") != null ? + System.getenv("SPRING_DATASOURCE_USERNAME") : "storycove"; + } + + private String extractDatabasePassword() { + // Get from environment variable or default + return System.getenv("SPRING_DATASOURCE_PASSWORD") != null ? + System.getenv("SPRING_DATASOURCE_PASSWORD") : "password"; + } + /** * Create a comprehensive backup including database and files in ZIP format */ @@ -172,175 +241,177 @@ public class DatabaseManagementService implements ApplicationContextAware { } public Resource createBackup() throws SQLException, IOException { - StringBuilder sqlDump = new StringBuilder(); - - try (Connection connection = getDataSource().getConnection()) { - // Add header - sqlDump.append("-- StoryCove Database Backup\n"); - sqlDump.append("-- Generated at: ").append(new java.util.Date()).append("\n\n"); - - // Disable foreign key checks during restore (PostgreSQL syntax) - sqlDump.append("SET session_replication_role = replica;\n\n"); - - // List of tables in dependency order (parents first for insertion) - List insertTables = Arrays.asList( - "authors", "series", "tags", "collections", - "stories", "story_tags", "author_urls", "collection_stories" - ); - - // TRUNCATE in reverse order (children first) - List truncateTables = Arrays.asList( - "collection_stories", "author_urls", "story_tags", - "stories", "collections", "tags", "series", "authors" - ); - - // Generate DELETE statements for each table (safer than TRUNCATE CASCADE) - for (String tableName : truncateTables) { - sqlDump.append("-- Clear Table: ").append(tableName).append("\n"); - sqlDump.append("DELETE FROM \"").append(tableName).append("\";\n"); + // Use PostgreSQL's native pg_dump for reliable backup + String dbHost = extractDatabaseHost(); + String dbPort = extractDatabasePort(); + String dbName = extractDatabaseName(); + String dbUser = extractDatabaseUsername(); + String dbPassword = extractDatabasePassword(); - // Reset auto-increment sequences for tables with ID columns - if (Arrays.asList("authors", "series", "tags", "collections", "stories").contains(tableName)) { - sqlDump.append("SELECT setval(pg_get_serial_sequence('\"").append(tableName).append("\"', 'id'), 1, false);\n"); + // Create temporary file for backup + Path tempBackupFile = Files.createTempFile("storycove_backup_", ".sql"); + + try { + // Build pg_dump command + ProcessBuilder pb = new ProcessBuilder( + "pg_dump", + "--host=" + dbHost, + "--port=" + dbPort, + "--username=" + dbUser, + "--dbname=" + dbName, + "--no-password", + "--verbose", + "--clean", + "--if-exists", + "--create", + "--file=" + tempBackupFile.toString() + ); + + // Set PGPASSWORD environment variable + Map env = pb.environment(); + env.put("PGPASSWORD", dbPassword); + + System.err.println("Starting PostgreSQL backup using pg_dump..."); + Process process = pb.start(); + + // Capture output + try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream()))) { + String line; + while ((line = reader.readLine()) != null) { + System.err.println("pg_dump: " + line); } } - sqlDump.append("\n"); - - // Generate INSERT statements in dependency order - for (String tableName : insertTables) { - sqlDump.append("-- Data for Table: ").append(tableName).append("\n"); - - // Get table data - try (PreparedStatement stmt = connection.prepareStatement("SELECT * FROM \"" + tableName + "\""); - ResultSet rs = stmt.executeQuery()) { - - ResultSetMetaData metaData = rs.getMetaData(); - int columnCount = metaData.getColumnCount(); - - // Build column names for INSERT statement - StringBuilder columnNames = new StringBuilder(); - for (int i = 1; i <= columnCount; i++) { - if (i > 1) columnNames.append(", "); - columnNames.append("\"").append(metaData.getColumnName(i)).append("\""); - } - - while (rs.next()) { - sqlDump.append("INSERT INTO \"").append(tableName).append("\" (") - .append(columnNames).append(") VALUES ("); - - for (int i = 1; i <= columnCount; i++) { - if (i > 1) sqlDump.append(", "); - - Object value = rs.getObject(i); - sqlDump.append(formatSqlValue(value)); - } - - sqlDump.append(");\n"); - } - } - - sqlDump.append("\n"); + + int exitCode = process.waitFor(); + if (exitCode != 0) { + throw new RuntimeException("pg_dump failed with exit code: " + exitCode); + } + + System.err.println("PostgreSQL backup completed successfully"); + + // Read the backup file into memory + byte[] backupData = Files.readAllBytes(tempBackupFile); + return new ByteArrayResource(backupData); + + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RuntimeException("Backup process was interrupted", e); + } finally { + // Clean up temporary file + try { + Files.deleteIfExists(tempBackupFile); + } catch (IOException e) { + System.err.println("Warning: Could not delete temporary backup file: " + e.getMessage()); } - - // Re-enable foreign key checks (PostgreSQL syntax) - sqlDump.append("SET session_replication_role = DEFAULT;\n"); } - - byte[] backupData = sqlDump.toString().getBytes(StandardCharsets.UTF_8); - return new ByteArrayResource(backupData); } @Transactional(timeout = 1800) // 30 minutes timeout for large backup restores public void restoreFromBackup(InputStream backupStream) throws IOException, SQLException { - // Read the SQL file - StringBuilder sqlContent = new StringBuilder(); - try (BufferedReader reader = new BufferedReader(new InputStreamReader(backupStream, StandardCharsets.UTF_8))) { - String line; - while ((line = reader.readLine()) != null) { - // Skip comments and empty lines - if (!line.trim().startsWith("--") && !line.trim().isEmpty()) { - sqlContent.append(line).append("\n"); + // Use PostgreSQL's native psql for reliable restore + String dbHost = extractDatabaseHost(); + String dbPort = extractDatabasePort(); + String dbName = extractDatabaseName(); + String dbUser = extractDatabaseUsername(); + String dbPassword = extractDatabasePassword(); + + // Create temporary file for the backup + Path tempBackupFile = Files.createTempFile("storycove_restore_", ".sql"); + + try { + // Write backup stream to temporary file + System.err.println("Writing backup data to temporary file..."); + try (InputStream input = backupStream; + OutputStream output = Files.newOutputStream(tempBackupFile)) { + byte[] buffer = new byte[8192]; + int bytesRead; + while ((bytesRead = input.read(buffer)) != -1) { + output.write(buffer, 0, bytesRead); } } - } - // Execute the SQL statements - try (Connection connection = getDataSource().getConnection()) { - connection.setAutoCommit(false); - - try { - // Ensure database schema exists before restoring data - ensureDatabaseSchemaExists(connection); - - // Parse SQL statements properly (handle semicolons inside string literals) - List statements = parseStatements(sqlContent.toString()); - System.err.println("Parsed " + statements.size() + " SQL statements. Starting execution..."); + System.err.println("Starting PostgreSQL restore using psql..."); - int successCount = 0; - for (String statement : statements) { - String trimmedStatement = statement.trim(); - if (!trimmedStatement.isEmpty()) { - try (PreparedStatement stmt = connection.prepareStatement(trimmedStatement)) { - stmt.setQueryTimeout(300); // 5 minute timeout per statement - stmt.executeUpdate(); - successCount++; + // Build psql command to restore the backup + ProcessBuilder pb = new ProcessBuilder( + "psql", + "--host=" + dbHost, + "--port=" + dbPort, + "--username=" + dbUser, + "--dbname=" + dbName, + "--no-password", + "--echo-errors", + "--file=" + tempBackupFile.toString() + ); - // Progress logging and batch commits for large restores - if (successCount % 100 == 0) { - System.err.println("Executed " + successCount + "/" + statements.size() + " statements..."); - } + // Set PGPASSWORD environment variable + Map env = pb.environment(); + env.put("PGPASSWORD", dbPassword); - // Commit every 500 statements to avoid huge transactions - if (successCount % 500 == 0) { - connection.commit(); - System.err.println("Committed batch at " + successCount + " statements"); - } - } catch (SQLException e) { - // Log detailed error information for failed statements - System.err.println("ERROR: Failed to execute SQL statement #" + (successCount + 1)); - System.err.println("Error: " + e.getMessage()); - System.err.println("SQL State: " + e.getSQLState()); - System.err.println("Error Code: " + e.getErrorCode()); - - // Show the problematic statement (first 500 chars) - String statementPreview = trimmedStatement.length() > 500 ? - trimmedStatement.substring(0, 500) + "..." : trimmedStatement; - System.err.println("Statement: " + statementPreview); - - throw e; // Re-throw to trigger rollback + Process process = pb.start(); + + // Capture output + try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream())); + BufferedReader outputReader = new BufferedReader(new InputStreamReader(process.getInputStream()))) { + + // Read stderr in a separate thread + Thread errorThread = new Thread(() -> { + try { + String line; + while ((line = reader.readLine()) != null) { + System.err.println("psql stderr: " + line); } + } catch (IOException e) { + System.err.println("Error reading psql stderr: " + e.getMessage()); } + }); + errorThread.start(); + + // Read stdout + String line; + while ((line = outputReader.readLine()) != null) { + System.err.println("psql stdout: " + line); } - - connection.commit(); - System.err.println("Restore completed successfully. Executed " + successCount + " SQL statements."); - - // Reindex search after successful restore - try { - String currentLibraryId = libraryService.getCurrentLibraryId(); - System.err.println("Starting search reindex after successful restore for library: " + currentLibraryId); - if (currentLibraryId == null) { - System.err.println("ERROR: No current library set during restore - cannot reindex search!"); - throw new IllegalStateException("No current library active during restore"); - } - - // Manually trigger reindexing using the correct database connection - System.err.println("Triggering manual reindex from library-specific database for library: " + currentLibraryId); - reindexStoriesAndAuthorsFromCurrentDatabase(); - - // Note: Collections collection will be recreated when needed by the service - System.err.println("Search reindex completed successfully for library: " + currentLibraryId); - } catch (Exception e) { - // Log the error but don't fail the restore - System.err.println("Warning: Failed to reindex search after restore: " + e.getMessage()); - e.printStackTrace(); + + errorThread.join(); + } + + int exitCode = process.waitFor(); + if (exitCode != 0) { + throw new RuntimeException("psql restore failed with exit code: " + exitCode); + } + + System.err.println("PostgreSQL restore completed successfully"); + + // Reindex search after successful restore + try { + String currentLibraryId = libraryService.getCurrentLibraryId(); + System.err.println("Starting search reindex after successful restore for library: " + currentLibraryId); + if (currentLibraryId == null) { + System.err.println("ERROR: No current library set during restore - cannot reindex search!"); + throw new IllegalStateException("No current library active during restore"); } - - } catch (SQLException e) { - connection.rollback(); - throw e; - } finally { - connection.setAutoCommit(true); + + // Manually trigger reindexing using the correct database connection + System.err.println("Triggering manual reindex from library-specific database for library: " + currentLibraryId); + reindexStoriesAndAuthorsFromCurrentDatabase(); + + // Note: Collections collection will be recreated when needed by the service + System.err.println("Search reindex completed successfully for library: " + currentLibraryId); + } catch (Exception e) { + // Log the error but don't fail the restore + System.err.println("Warning: Failed to reindex search after restore: " + e.getMessage()); + e.printStackTrace(); + } + } catch (InterruptedException e) { + Thread.currentThread().interrupt(); + throw new RuntimeException("Restore process was interrupted", e); + } finally { + // Clean up temporary file + try { + Files.deleteIfExists(tempBackupFile); + } catch (IOException e) { + System.err.println("Warning: Could not delete temporary restore file: " + e.getMessage()); } } }