Automatic backup
This commit is contained in:
@@ -86,6 +86,9 @@ public interface StoryRepository extends JpaRepository<Story, UUID> {
|
|||||||
|
|
||||||
@Query("SELECT COUNT(s) FROM Story s WHERE s.createdAt >= :since")
|
@Query("SELECT COUNT(s) FROM Story s WHERE s.createdAt >= :since")
|
||||||
long countStoriesCreatedSince(@Param("since") LocalDateTime since);
|
long countStoriesCreatedSince(@Param("since") LocalDateTime since);
|
||||||
|
|
||||||
|
@Query("SELECT COUNT(s) FROM Story s WHERE s.createdAt >= :since OR s.updatedAt >= :since")
|
||||||
|
long countStoriesModifiedAfter(@Param("since") LocalDateTime since);
|
||||||
|
|
||||||
@Query("SELECT AVG(s.wordCount) FROM Story s")
|
@Query("SELECT AVG(s.wordCount) FROM Story s")
|
||||||
Double findAverageWordCount();
|
Double findAverageWordCount();
|
||||||
|
|||||||
@@ -0,0 +1,262 @@
|
|||||||
|
package com.storycove.service;
|
||||||
|
|
||||||
|
import com.storycove.repository.StoryRepository;
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.core.io.Resource;
|
||||||
|
import org.springframework.scheduling.annotation.Scheduled;
|
||||||
|
import org.springframework.stereotype.Service;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.nio.file.Files;
|
||||||
|
import java.nio.file.Path;
|
||||||
|
import java.nio.file.Paths;
|
||||||
|
import java.time.LocalDateTime;
|
||||||
|
import java.time.format.DateTimeFormatter;
|
||||||
|
import java.util.Comparator;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Service for automatic daily backups.
|
||||||
|
* Runs at 4 AM daily and creates a backup if content has changed since last backup.
|
||||||
|
* Keeps maximum of 5 backups, rotating old ones out.
|
||||||
|
*/
|
||||||
|
@Service
|
||||||
|
public class AutomaticBackupService {
|
||||||
|
|
||||||
|
private static final Logger logger = LoggerFactory.getLogger(AutomaticBackupService.class);
|
||||||
|
private static final int MAX_BACKUPS = 5;
|
||||||
|
private static final DateTimeFormatter FILENAME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss");
|
||||||
|
|
||||||
|
@Value("${storycove.automatic-backup.dir:/app/automatic-backups}")
|
||||||
|
private String automaticBackupDir;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private StoryRepository storyRepository;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private DatabaseManagementService databaseManagementService;
|
||||||
|
|
||||||
|
@Autowired
|
||||||
|
private LibraryService libraryService;
|
||||||
|
|
||||||
|
private LocalDateTime lastBackupCheck = null;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Scheduled job that runs daily at 4 AM.
|
||||||
|
* Creates a backup if content has changed since last backup.
|
||||||
|
*/
|
||||||
|
@Scheduled(cron = "0 0 4 * * ?")
|
||||||
|
public void performAutomaticBackup() {
|
||||||
|
logger.info("========================================");
|
||||||
|
logger.info("Starting automatic backup check at 4 AM");
|
||||||
|
logger.info("========================================");
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Get current library ID (or default)
|
||||||
|
String libraryId = libraryService.getCurrentLibraryId();
|
||||||
|
if (libraryId == null) {
|
||||||
|
libraryId = "default";
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Checking for content changes in library: {}", libraryId);
|
||||||
|
|
||||||
|
// Check if content has changed since last backup
|
||||||
|
if (!hasContentChanged()) {
|
||||||
|
logger.info("No content changes detected since last backup. Skipping backup.");
|
||||||
|
logger.info("========================================");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Content changes detected! Creating automatic backup...");
|
||||||
|
|
||||||
|
// Create backup directory for this library
|
||||||
|
Path backupPath = Paths.get(automaticBackupDir, libraryId);
|
||||||
|
Files.createDirectories(backupPath);
|
||||||
|
|
||||||
|
// Create the backup
|
||||||
|
String timestamp = LocalDateTime.now().format(FILENAME_FORMATTER);
|
||||||
|
String filename = String.format("auto_backup_%s.zip", timestamp);
|
||||||
|
Path backupFile = backupPath.resolve(filename);
|
||||||
|
|
||||||
|
logger.info("Creating complete backup to: {}", backupFile);
|
||||||
|
|
||||||
|
Resource backup = databaseManagementService.createCompleteBackup();
|
||||||
|
|
||||||
|
// Write backup to file
|
||||||
|
try (var inputStream = backup.getInputStream();
|
||||||
|
var outputStream = Files.newOutputStream(backupFile)) {
|
||||||
|
inputStream.transferTo(outputStream);
|
||||||
|
}
|
||||||
|
|
||||||
|
long fileSize = Files.size(backupFile);
|
||||||
|
logger.info("✅ Automatic backup created successfully");
|
||||||
|
logger.info(" File: {}", backupFile.getFileName());
|
||||||
|
logger.info(" Size: {} MB", fileSize / 1024 / 1024);
|
||||||
|
|
||||||
|
// Rotate old backups (keep only MAX_BACKUPS)
|
||||||
|
rotateBackups(backupPath);
|
||||||
|
|
||||||
|
// Update last backup check time
|
||||||
|
lastBackupCheck = LocalDateTime.now();
|
||||||
|
|
||||||
|
logger.info("========================================");
|
||||||
|
logger.info("Automatic backup completed successfully");
|
||||||
|
logger.info("========================================");
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("❌ Automatic backup failed", e);
|
||||||
|
logger.info("========================================");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if content has changed since last backup.
|
||||||
|
* Looks for stories created or updated after the last backup time.
|
||||||
|
*/
|
||||||
|
private boolean hasContentChanged() {
|
||||||
|
try {
|
||||||
|
if (lastBackupCheck == null) {
|
||||||
|
// First run - check if there are any stories at all
|
||||||
|
long storyCount = storyRepository.count();
|
||||||
|
logger.info("First backup check - found {} stories", storyCount);
|
||||||
|
return storyCount > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check for stories created or updated since last backup
|
||||||
|
long changedCount = storyRepository.countStoriesModifiedAfter(lastBackupCheck);
|
||||||
|
logger.info("Found {} stories modified since last backup ({})", changedCount, lastBackupCheck);
|
||||||
|
return changedCount > 0;
|
||||||
|
|
||||||
|
} catch (Exception e) {
|
||||||
|
logger.error("Error checking for content changes", e);
|
||||||
|
// On error, create backup to be safe
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Rotate backups - keep only MAX_BACKUPS most recent backups.
|
||||||
|
* Deletes older backups.
|
||||||
|
*/
|
||||||
|
private void rotateBackups(Path backupPath) throws IOException {
|
||||||
|
logger.info("Checking for old backups to rotate...");
|
||||||
|
|
||||||
|
// Find all backup files in the directory
|
||||||
|
List<Path> backupFiles;
|
||||||
|
try (Stream<Path> stream = Files.list(backupPath)) {
|
||||||
|
backupFiles = stream
|
||||||
|
.filter(Files::isRegularFile)
|
||||||
|
.filter(p -> p.getFileName().toString().startsWith("auto_backup_"))
|
||||||
|
.filter(p -> p.getFileName().toString().endsWith(".zip"))
|
||||||
|
.sorted(Comparator.comparing((Path p) -> {
|
||||||
|
try {
|
||||||
|
return Files.getLastModifiedTime(p);
|
||||||
|
} catch (IOException e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}).reversed()) // Most recent first
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
|
||||||
|
logger.info("Found {} automatic backups", backupFiles.size());
|
||||||
|
|
||||||
|
// Delete old backups if we exceed MAX_BACKUPS
|
||||||
|
if (backupFiles.size() > MAX_BACKUPS) {
|
||||||
|
List<Path> toDelete = backupFiles.subList(MAX_BACKUPS, backupFiles.size());
|
||||||
|
logger.info("Deleting {} old backups to maintain maximum of {}", toDelete.size(), MAX_BACKUPS);
|
||||||
|
|
||||||
|
for (Path oldBackup : toDelete) {
|
||||||
|
try {
|
||||||
|
Files.delete(oldBackup);
|
||||||
|
logger.info(" Deleted old backup: {}", oldBackup.getFileName());
|
||||||
|
} catch (IOException e) {
|
||||||
|
logger.warn("Failed to delete old backup: {}", oldBackup, e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
logger.info("Backup count within limit ({}), no rotation needed", MAX_BACKUPS);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Manual trigger for testing - creates backup immediately if content changed.
|
||||||
|
*/
|
||||||
|
public void triggerManualBackup() {
|
||||||
|
logger.info("Manual automatic backup triggered");
|
||||||
|
performAutomaticBackup();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get list of automatic backups for the current library.
|
||||||
|
*/
|
||||||
|
public List<BackupInfo> listAutomaticBackups() throws IOException {
|
||||||
|
String libraryId = libraryService.getCurrentLibraryId();
|
||||||
|
if (libraryId == null) {
|
||||||
|
libraryId = "default";
|
||||||
|
}
|
||||||
|
|
||||||
|
Path backupPath = Paths.get(automaticBackupDir, libraryId);
|
||||||
|
if (!Files.exists(backupPath)) {
|
||||||
|
return List.of();
|
||||||
|
}
|
||||||
|
|
||||||
|
try (Stream<Path> stream = Files.list(backupPath)) {
|
||||||
|
return stream
|
||||||
|
.filter(Files::isRegularFile)
|
||||||
|
.filter(p -> p.getFileName().toString().startsWith("auto_backup_"))
|
||||||
|
.filter(p -> p.getFileName().toString().endsWith(".zip"))
|
||||||
|
.sorted(Comparator.comparing((Path p) -> {
|
||||||
|
try {
|
||||||
|
return Files.getLastModifiedTime(p);
|
||||||
|
} catch (IOException e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
}).reversed())
|
||||||
|
.map(p -> {
|
||||||
|
try {
|
||||||
|
return new BackupInfo(
|
||||||
|
p.getFileName().toString(),
|
||||||
|
Files.size(p),
|
||||||
|
Files.getLastModifiedTime(p).toInstant().toString()
|
||||||
|
);
|
||||||
|
} catch (IOException e) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter(info -> info != null)
|
||||||
|
.collect(Collectors.toList());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Simple backup info class.
|
||||||
|
*/
|
||||||
|
public static class BackupInfo {
|
||||||
|
private final String filename;
|
||||||
|
private final long sizeBytes;
|
||||||
|
private final String createdAt;
|
||||||
|
|
||||||
|
public BackupInfo(String filename, long sizeBytes, String createdAt) {
|
||||||
|
this.filename = filename;
|
||||||
|
this.sizeBytes = sizeBytes;
|
||||||
|
this.createdAt = createdAt;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getFilename() {
|
||||||
|
return filename;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getSizeBytes() {
|
||||||
|
return sizeBytes;
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getCreatedAt() {
|
||||||
|
return createdAt;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -89,6 +89,8 @@ storycove:
|
|||||||
enable-metrics: ${SOLR_ENABLE_METRICS:true}
|
enable-metrics: ${SOLR_ENABLE_METRICS:true}
|
||||||
images:
|
images:
|
||||||
storage-path: ${IMAGE_STORAGE_PATH:/app/images}
|
storage-path: ${IMAGE_STORAGE_PATH:/app/images}
|
||||||
|
automatic-backup:
|
||||||
|
dir: ${AUTOMATIC_BACKUP_DIR:/app/automatic-backups}
|
||||||
|
|
||||||
management:
|
management:
|
||||||
endpoints:
|
endpoints:
|
||||||
|
|||||||
@@ -44,6 +44,7 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- images_data:/app/images
|
- images_data:/app/images
|
||||||
- library_config:/app/config
|
- library_config:/app/config
|
||||||
|
- automatic_backups:/app/automatic-backups
|
||||||
depends_on:
|
depends_on:
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
@@ -106,6 +107,7 @@ volumes:
|
|||||||
solr_data:
|
solr_data:
|
||||||
images_data:
|
images_data:
|
||||||
library_config:
|
library_config:
|
||||||
|
automatic_backups:
|
||||||
|
|
||||||
configs:
|
configs:
|
||||||
nginx_config:
|
nginx_config:
|
||||||
|
|||||||
Reference in New Issue
Block a user