backup / restore improvement
This commit is contained in:
@@ -2,8 +2,8 @@ FROM openjdk:17-jdk-slim
|
|||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
# Install Maven
|
# Install Maven and PostgreSQL client tools
|
||||||
RUN apt-get update && apt-get install -y maven && rm -rf /var/lib/apt/lists/*
|
RUN apt-get update && apt-get install -y maven postgresql-client && rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Copy source code
|
# Copy source code
|
||||||
COPY . .
|
COPY . .
|
||||||
|
|||||||
@@ -70,6 +70,75 @@ public class DatabaseManagementService implements ApplicationContextAware {
|
|||||||
this.applicationContext = applicationContext;
|
this.applicationContext = applicationContext;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Helper methods to extract database connection details
|
||||||
|
private String extractDatabaseUrl() {
|
||||||
|
try (Connection connection = getDataSource().getConnection()) {
|
||||||
|
return connection.getMetaData().getURL();
|
||||||
|
} catch (SQLException e) {
|
||||||
|
throw new RuntimeException("Failed to extract database URL", e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private String extractDatabaseHost() {
|
||||||
|
String url = extractDatabaseUrl();
|
||||||
|
// Extract host from jdbc:postgresql://host:port/database
|
||||||
|
if (url.startsWith("jdbc:postgresql://")) {
|
||||||
|
String hostPort = url.substring("jdbc:postgresql://".length());
|
||||||
|
if (hostPort.contains("/")) {
|
||||||
|
hostPort = hostPort.substring(0, hostPort.indexOf("/"));
|
||||||
|
}
|
||||||
|
if (hostPort.contains(":")) {
|
||||||
|
return hostPort.substring(0, hostPort.indexOf(":"));
|
||||||
|
}
|
||||||
|
return hostPort;
|
||||||
|
}
|
||||||
|
return "localhost"; // fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
private String extractDatabasePort() {
|
||||||
|
String url = extractDatabaseUrl();
|
||||||
|
// Extract port from jdbc:postgresql://host:port/database
|
||||||
|
if (url.startsWith("jdbc:postgresql://")) {
|
||||||
|
String hostPort = url.substring("jdbc:postgresql://".length());
|
||||||
|
if (hostPort.contains("/")) {
|
||||||
|
hostPort = hostPort.substring(0, hostPort.indexOf("/"));
|
||||||
|
}
|
||||||
|
if (hostPort.contains(":")) {
|
||||||
|
return hostPort.substring(hostPort.indexOf(":") + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "5432"; // default PostgreSQL port
|
||||||
|
}
|
||||||
|
|
||||||
|
private String extractDatabaseName() {
|
||||||
|
String url = extractDatabaseUrl();
|
||||||
|
// Extract database name from jdbc:postgresql://host:port/database
|
||||||
|
if (url.startsWith("jdbc:postgresql://")) {
|
||||||
|
String remaining = url.substring("jdbc:postgresql://".length());
|
||||||
|
if (remaining.contains("/")) {
|
||||||
|
String dbPart = remaining.substring(remaining.indexOf("/") + 1);
|
||||||
|
// Remove any query parameters
|
||||||
|
if (dbPart.contains("?")) {
|
||||||
|
dbPart = dbPart.substring(0, dbPart.indexOf("?"));
|
||||||
|
}
|
||||||
|
return dbPart;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return "storycove"; // fallback
|
||||||
|
}
|
||||||
|
|
||||||
|
private String extractDatabaseUsername() {
|
||||||
|
// Get from environment variable or default
|
||||||
|
return System.getenv("SPRING_DATASOURCE_USERNAME") != null ?
|
||||||
|
System.getenv("SPRING_DATASOURCE_USERNAME") : "storycove";
|
||||||
|
}
|
||||||
|
|
||||||
|
private String extractDatabasePassword() {
|
||||||
|
// Get from environment variable or default
|
||||||
|
return System.getenv("SPRING_DATASOURCE_PASSWORD") != null ?
|
||||||
|
System.getenv("SPRING_DATASOURCE_PASSWORD") : "password";
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create a comprehensive backup including database and files in ZIP format
|
* Create a comprehensive backup including database and files in ZIP format
|
||||||
*/
|
*/
|
||||||
@@ -172,175 +241,177 @@ public class DatabaseManagementService implements ApplicationContextAware {
|
|||||||
}
|
}
|
||||||
|
|
||||||
public Resource createBackup() throws SQLException, IOException {
|
public Resource createBackup() throws SQLException, IOException {
|
||||||
StringBuilder sqlDump = new StringBuilder();
|
// Use PostgreSQL's native pg_dump for reliable backup
|
||||||
|
String dbHost = extractDatabaseHost();
|
||||||
try (Connection connection = getDataSource().getConnection()) {
|
String dbPort = extractDatabasePort();
|
||||||
// Add header
|
String dbName = extractDatabaseName();
|
||||||
sqlDump.append("-- StoryCove Database Backup\n");
|
String dbUser = extractDatabaseUsername();
|
||||||
sqlDump.append("-- Generated at: ").append(new java.util.Date()).append("\n\n");
|
String dbPassword = extractDatabasePassword();
|
||||||
|
|
||||||
// Disable foreign key checks during restore (PostgreSQL syntax)
|
|
||||||
sqlDump.append("SET session_replication_role = replica;\n\n");
|
|
||||||
|
|
||||||
// List of tables in dependency order (parents first for insertion)
|
|
||||||
List<String> insertTables = Arrays.asList(
|
|
||||||
"authors", "series", "tags", "collections",
|
|
||||||
"stories", "story_tags", "author_urls", "collection_stories"
|
|
||||||
);
|
|
||||||
|
|
||||||
// TRUNCATE in reverse order (children first)
|
|
||||||
List<String> truncateTables = Arrays.asList(
|
|
||||||
"collection_stories", "author_urls", "story_tags",
|
|
||||||
"stories", "collections", "tags", "series", "authors"
|
|
||||||
);
|
|
||||||
|
|
||||||
// Generate DELETE statements for each table (safer than TRUNCATE CASCADE)
|
|
||||||
for (String tableName : truncateTables) {
|
|
||||||
sqlDump.append("-- Clear Table: ").append(tableName).append("\n");
|
|
||||||
sqlDump.append("DELETE FROM \"").append(tableName).append("\";\n");
|
|
||||||
|
|
||||||
// Reset auto-increment sequences for tables with ID columns
|
// Create temporary file for backup
|
||||||
if (Arrays.asList("authors", "series", "tags", "collections", "stories").contains(tableName)) {
|
Path tempBackupFile = Files.createTempFile("storycove_backup_", ".sql");
|
||||||
sqlDump.append("SELECT setval(pg_get_serial_sequence('\"").append(tableName).append("\"', 'id'), 1, false);\n");
|
|
||||||
|
try {
|
||||||
|
// Build pg_dump command
|
||||||
|
ProcessBuilder pb = new ProcessBuilder(
|
||||||
|
"pg_dump",
|
||||||
|
"--host=" + dbHost,
|
||||||
|
"--port=" + dbPort,
|
||||||
|
"--username=" + dbUser,
|
||||||
|
"--dbname=" + dbName,
|
||||||
|
"--no-password",
|
||||||
|
"--verbose",
|
||||||
|
"--clean",
|
||||||
|
"--if-exists",
|
||||||
|
"--create",
|
||||||
|
"--file=" + tempBackupFile.toString()
|
||||||
|
);
|
||||||
|
|
||||||
|
// Set PGPASSWORD environment variable
|
||||||
|
Map<String, String> env = pb.environment();
|
||||||
|
env.put("PGPASSWORD", dbPassword);
|
||||||
|
|
||||||
|
System.err.println("Starting PostgreSQL backup using pg_dump...");
|
||||||
|
Process process = pb.start();
|
||||||
|
|
||||||
|
// Capture output
|
||||||
|
try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream()))) {
|
||||||
|
String line;
|
||||||
|
while ((line = reader.readLine()) != null) {
|
||||||
|
System.err.println("pg_dump: " + line);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
sqlDump.append("\n");
|
|
||||||
|
int exitCode = process.waitFor();
|
||||||
// Generate INSERT statements in dependency order
|
if (exitCode != 0) {
|
||||||
for (String tableName : insertTables) {
|
throw new RuntimeException("pg_dump failed with exit code: " + exitCode);
|
||||||
sqlDump.append("-- Data for Table: ").append(tableName).append("\n");
|
}
|
||||||
|
|
||||||
// Get table data
|
System.err.println("PostgreSQL backup completed successfully");
|
||||||
try (PreparedStatement stmt = connection.prepareStatement("SELECT * FROM \"" + tableName + "\"");
|
|
||||||
ResultSet rs = stmt.executeQuery()) {
|
// Read the backup file into memory
|
||||||
|
byte[] backupData = Files.readAllBytes(tempBackupFile);
|
||||||
ResultSetMetaData metaData = rs.getMetaData();
|
return new ByteArrayResource(backupData);
|
||||||
int columnCount = metaData.getColumnCount();
|
|
||||||
|
} catch (InterruptedException e) {
|
||||||
// Build column names for INSERT statement
|
Thread.currentThread().interrupt();
|
||||||
StringBuilder columnNames = new StringBuilder();
|
throw new RuntimeException("Backup process was interrupted", e);
|
||||||
for (int i = 1; i <= columnCount; i++) {
|
} finally {
|
||||||
if (i > 1) columnNames.append(", ");
|
// Clean up temporary file
|
||||||
columnNames.append("\"").append(metaData.getColumnName(i)).append("\"");
|
try {
|
||||||
}
|
Files.deleteIfExists(tempBackupFile);
|
||||||
|
} catch (IOException e) {
|
||||||
while (rs.next()) {
|
System.err.println("Warning: Could not delete temporary backup file: " + e.getMessage());
|
||||||
sqlDump.append("INSERT INTO \"").append(tableName).append("\" (")
|
|
||||||
.append(columnNames).append(") VALUES (");
|
|
||||||
|
|
||||||
for (int i = 1; i <= columnCount; i++) {
|
|
||||||
if (i > 1) sqlDump.append(", ");
|
|
||||||
|
|
||||||
Object value = rs.getObject(i);
|
|
||||||
sqlDump.append(formatSqlValue(value));
|
|
||||||
}
|
|
||||||
|
|
||||||
sqlDump.append(");\n");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sqlDump.append("\n");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Re-enable foreign key checks (PostgreSQL syntax)
|
|
||||||
sqlDump.append("SET session_replication_role = DEFAULT;\n");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
byte[] backupData = sqlDump.toString().getBytes(StandardCharsets.UTF_8);
|
|
||||||
return new ByteArrayResource(backupData);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Transactional(timeout = 1800) // 30 minutes timeout for large backup restores
|
@Transactional(timeout = 1800) // 30 minutes timeout for large backup restores
|
||||||
public void restoreFromBackup(InputStream backupStream) throws IOException, SQLException {
|
public void restoreFromBackup(InputStream backupStream) throws IOException, SQLException {
|
||||||
// Read the SQL file
|
// Use PostgreSQL's native psql for reliable restore
|
||||||
StringBuilder sqlContent = new StringBuilder();
|
String dbHost = extractDatabaseHost();
|
||||||
try (BufferedReader reader = new BufferedReader(new InputStreamReader(backupStream, StandardCharsets.UTF_8))) {
|
String dbPort = extractDatabasePort();
|
||||||
String line;
|
String dbName = extractDatabaseName();
|
||||||
while ((line = reader.readLine()) != null) {
|
String dbUser = extractDatabaseUsername();
|
||||||
// Skip comments and empty lines
|
String dbPassword = extractDatabasePassword();
|
||||||
if (!line.trim().startsWith("--") && !line.trim().isEmpty()) {
|
|
||||||
sqlContent.append(line).append("\n");
|
// Create temporary file for the backup
|
||||||
|
Path tempBackupFile = Files.createTempFile("storycove_restore_", ".sql");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Write backup stream to temporary file
|
||||||
|
System.err.println("Writing backup data to temporary file...");
|
||||||
|
try (InputStream input = backupStream;
|
||||||
|
OutputStream output = Files.newOutputStream(tempBackupFile)) {
|
||||||
|
byte[] buffer = new byte[8192];
|
||||||
|
int bytesRead;
|
||||||
|
while ((bytesRead = input.read(buffer)) != -1) {
|
||||||
|
output.write(buffer, 0, bytesRead);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Execute the SQL statements
|
System.err.println("Starting PostgreSQL restore using psql...");
|
||||||
try (Connection connection = getDataSource().getConnection()) {
|
|
||||||
connection.setAutoCommit(false);
|
|
||||||
|
|
||||||
try {
|
|
||||||
// Ensure database schema exists before restoring data
|
|
||||||
ensureDatabaseSchemaExists(connection);
|
|
||||||
|
|
||||||
// Parse SQL statements properly (handle semicolons inside string literals)
|
|
||||||
List<String> statements = parseStatements(sqlContent.toString());
|
|
||||||
System.err.println("Parsed " + statements.size() + " SQL statements. Starting execution...");
|
|
||||||
|
|
||||||
int successCount = 0;
|
// Build psql command to restore the backup
|
||||||
for (String statement : statements) {
|
ProcessBuilder pb = new ProcessBuilder(
|
||||||
String trimmedStatement = statement.trim();
|
"psql",
|
||||||
if (!trimmedStatement.isEmpty()) {
|
"--host=" + dbHost,
|
||||||
try (PreparedStatement stmt = connection.prepareStatement(trimmedStatement)) {
|
"--port=" + dbPort,
|
||||||
stmt.setQueryTimeout(300); // 5 minute timeout per statement
|
"--username=" + dbUser,
|
||||||
stmt.executeUpdate();
|
"--dbname=" + dbName,
|
||||||
successCount++;
|
"--no-password",
|
||||||
|
"--echo-errors",
|
||||||
|
"--file=" + tempBackupFile.toString()
|
||||||
|
);
|
||||||
|
|
||||||
// Progress logging and batch commits for large restores
|
// Set PGPASSWORD environment variable
|
||||||
if (successCount % 100 == 0) {
|
Map<String, String> env = pb.environment();
|
||||||
System.err.println("Executed " + successCount + "/" + statements.size() + " statements...");
|
env.put("PGPASSWORD", dbPassword);
|
||||||
}
|
|
||||||
|
|
||||||
// Commit every 500 statements to avoid huge transactions
|
Process process = pb.start();
|
||||||
if (successCount % 500 == 0) {
|
|
||||||
connection.commit();
|
// Capture output
|
||||||
System.err.println("Committed batch at " + successCount + " statements");
|
try (BufferedReader reader = new BufferedReader(new InputStreamReader(process.getErrorStream()));
|
||||||
}
|
BufferedReader outputReader = new BufferedReader(new InputStreamReader(process.getInputStream()))) {
|
||||||
} catch (SQLException e) {
|
|
||||||
// Log detailed error information for failed statements
|
// Read stderr in a separate thread
|
||||||
System.err.println("ERROR: Failed to execute SQL statement #" + (successCount + 1));
|
Thread errorThread = new Thread(() -> {
|
||||||
System.err.println("Error: " + e.getMessage());
|
try {
|
||||||
System.err.println("SQL State: " + e.getSQLState());
|
String line;
|
||||||
System.err.println("Error Code: " + e.getErrorCode());
|
while ((line = reader.readLine()) != null) {
|
||||||
|
System.err.println("psql stderr: " + line);
|
||||||
// Show the problematic statement (first 500 chars)
|
|
||||||
String statementPreview = trimmedStatement.length() > 500 ?
|
|
||||||
trimmedStatement.substring(0, 500) + "..." : trimmedStatement;
|
|
||||||
System.err.println("Statement: " + statementPreview);
|
|
||||||
|
|
||||||
throw e; // Re-throw to trigger rollback
|
|
||||||
}
|
}
|
||||||
|
} catch (IOException e) {
|
||||||
|
System.err.println("Error reading psql stderr: " + e.getMessage());
|
||||||
}
|
}
|
||||||
|
});
|
||||||
|
errorThread.start();
|
||||||
|
|
||||||
|
// Read stdout
|
||||||
|
String line;
|
||||||
|
while ((line = outputReader.readLine()) != null) {
|
||||||
|
System.err.println("psql stdout: " + line);
|
||||||
}
|
}
|
||||||
|
|
||||||
connection.commit();
|
errorThread.join();
|
||||||
System.err.println("Restore completed successfully. Executed " + successCount + " SQL statements.");
|
}
|
||||||
|
|
||||||
// Reindex search after successful restore
|
int exitCode = process.waitFor();
|
||||||
try {
|
if (exitCode != 0) {
|
||||||
String currentLibraryId = libraryService.getCurrentLibraryId();
|
throw new RuntimeException("psql restore failed with exit code: " + exitCode);
|
||||||
System.err.println("Starting search reindex after successful restore for library: " + currentLibraryId);
|
}
|
||||||
if (currentLibraryId == null) {
|
|
||||||
System.err.println("ERROR: No current library set during restore - cannot reindex search!");
|
System.err.println("PostgreSQL restore completed successfully");
|
||||||
throw new IllegalStateException("No current library active during restore");
|
|
||||||
}
|
// Reindex search after successful restore
|
||||||
|
try {
|
||||||
// Manually trigger reindexing using the correct database connection
|
String currentLibraryId = libraryService.getCurrentLibraryId();
|
||||||
System.err.println("Triggering manual reindex from library-specific database for library: " + currentLibraryId);
|
System.err.println("Starting search reindex after successful restore for library: " + currentLibraryId);
|
||||||
reindexStoriesAndAuthorsFromCurrentDatabase();
|
if (currentLibraryId == null) {
|
||||||
|
System.err.println("ERROR: No current library set during restore - cannot reindex search!");
|
||||||
// Note: Collections collection will be recreated when needed by the service
|
throw new IllegalStateException("No current library active during restore");
|
||||||
System.err.println("Search reindex completed successfully for library: " + currentLibraryId);
|
|
||||||
} catch (Exception e) {
|
|
||||||
// Log the error but don't fail the restore
|
|
||||||
System.err.println("Warning: Failed to reindex search after restore: " + e.getMessage());
|
|
||||||
e.printStackTrace();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (SQLException e) {
|
// Manually trigger reindexing using the correct database connection
|
||||||
connection.rollback();
|
System.err.println("Triggering manual reindex from library-specific database for library: " + currentLibraryId);
|
||||||
throw e;
|
reindexStoriesAndAuthorsFromCurrentDatabase();
|
||||||
} finally {
|
|
||||||
connection.setAutoCommit(true);
|
// Note: Collections collection will be recreated when needed by the service
|
||||||
|
System.err.println("Search reindex completed successfully for library: " + currentLibraryId);
|
||||||
|
} catch (Exception e) {
|
||||||
|
// Log the error but don't fail the restore
|
||||||
|
System.err.println("Warning: Failed to reindex search after restore: " + e.getMessage());
|
||||||
|
e.printStackTrace();
|
||||||
|
}
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
Thread.currentThread().interrupt();
|
||||||
|
throw new RuntimeException("Restore process was interrupted", e);
|
||||||
|
} finally {
|
||||||
|
// Clean up temporary file
|
||||||
|
try {
|
||||||
|
Files.deleteIfExists(tempBackupFile);
|
||||||
|
} catch (IOException e) {
|
||||||
|
System.err.println("Warning: Could not delete temporary restore file: " + e.getMessage());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user