Merge branch 'main' into statistics

This commit is contained in:
Stefan Hardegger
2025-10-21 07:58:25 +02:00
18 changed files with 1417 additions and 72 deletions

View File

@@ -0,0 +1,111 @@
package com.storycove.config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.CommandLineRunner;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.Statement;
import java.util.Arrays;
import java.util.List;
/**
* Runs database migrations on application startup.
* This ensures all library databases have the required schema,
* particularly for tables like backup_jobs that were added after initial deployment.
*/
@Component
@Order(1) // Run early in startup sequence
public class DatabaseMigrationRunner implements CommandLineRunner {
private static final Logger logger = LoggerFactory.getLogger(DatabaseMigrationRunner.class);
@Autowired
private DataSource dataSource;
@Value("${spring.datasource.username}")
private String dbUsername;
@Value("${spring.datasource.password}")
private String dbPassword;
// List of all library databases that need migrations
private static final List<String> LIBRARY_DATABASES = Arrays.asList(
"storycove", // default database
"storycove_afterdark",
"storycove_clas",
"storycove_secret"
);
// SQL for backup_jobs table migration (idempotent)
private static final String BACKUP_JOBS_MIGRATION = """
CREATE TABLE IF NOT EXISTS backup_jobs (
id UUID PRIMARY KEY,
library_id VARCHAR(255) NOT NULL,
type VARCHAR(50) NOT NULL CHECK (type IN ('DATABASE_ONLY', 'COMPLETE')),
status VARCHAR(50) NOT NULL CHECK (status IN ('PENDING', 'IN_PROGRESS', 'COMPLETED', 'FAILED', 'EXPIRED')),
file_path VARCHAR(1000),
file_size_bytes BIGINT,
progress_percent INTEGER,
error_message VARCHAR(1000),
created_at TIMESTAMP NOT NULL,
started_at TIMESTAMP,
completed_at TIMESTAMP,
expires_at TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_library_id ON backup_jobs(library_id);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_status ON backup_jobs(status);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_expires_at ON backup_jobs(expires_at);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_created_at ON backup_jobs(created_at DESC);
""";
@Override
public void run(String... args) throws Exception {
logger.info("🗄️ Starting database migrations...");
for (String database : LIBRARY_DATABASES) {
try {
applyMigrations(database);
logger.info("✅ Successfully applied migrations to database: {}", database);
} catch (Exception e) {
// Log error but don't fail startup if database doesn't exist yet
if (e.getMessage() != null && e.getMessage().contains("does not exist")) {
logger.warn("⚠️ Database {} does not exist yet, skipping migrations", database);
} else {
logger.error("❌ Failed to apply migrations to database: {}", database, e);
// Don't throw - allow application to start even if some migrations fail
}
}
}
logger.info("✅ Database migrations completed");
}
private void applyMigrations(String database) throws Exception {
// We need to connect directly to each database, not through SmartRoutingDataSource
// Build connection URL from the default datasource URL
String originalUrl = dataSource.getConnection().getMetaData().getURL();
String baseUrl = originalUrl.substring(0, originalUrl.lastIndexOf('/'));
String targetUrl = baseUrl + "/" + database;
// Connect directly to target database using credentials from application properties
try (Connection conn = java.sql.DriverManager.getConnection(
targetUrl,
dbUsername,
dbPassword
)) {
// Apply backup_jobs migration
try (Statement stmt = conn.createStatement()) {
stmt.execute(BACKUP_JOBS_MIGRATION);
}
logger.debug("Applied backup_jobs migration to {}", database);
}
}
}

View File

@@ -1,6 +1,8 @@
package com.storycove.controller;
import com.storycove.service.AsyncBackupService;
import com.storycove.service.DatabaseManagementService;
import com.storycove.service.LibraryService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpHeaders;
@@ -12,6 +14,7 @@ import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.Map;
@RestController
@@ -21,6 +24,12 @@ public class DatabaseController {
@Autowired
private DatabaseManagementService databaseManagementService;
@Autowired
private AsyncBackupService asyncBackupService;
@Autowired
private LibraryService libraryService;
@PostMapping("/backup")
public ResponseEntity<Resource> backupDatabase() {
try {
@@ -83,19 +92,141 @@ public class DatabaseController {
}
@PostMapping("/backup-complete")
public ResponseEntity<Resource> backupComplete() {
public ResponseEntity<Map<String, Object>> backupCompleteAsync() {
try {
Resource backup = databaseManagementService.createCompleteBackup();
String timestamp = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"));
String filename = "storycove_complete_backup_" + timestamp + ".zip";
String libraryId = libraryService.getCurrentLibraryId();
if (libraryId == null) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "No library selected"));
}
// Start backup job asynchronously
com.storycove.entity.BackupJob job = asyncBackupService.startBackupJob(
libraryId,
com.storycove.entity.BackupJob.BackupType.COMPLETE
);
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Backup started",
"jobId", job.getId().toString(),
"status", job.getStatus().toString()
));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to start backup: " + e.getMessage()));
}
}
@GetMapping("/backup-status/{jobId}")
public ResponseEntity<Map<String, Object>> getBackupStatus(@PathVariable String jobId) {
try {
java.util.UUID uuid = java.util.UUID.fromString(jobId);
java.util.Optional<com.storycove.entity.BackupJob> jobOpt = asyncBackupService.getJobStatus(uuid);
if (jobOpt.isEmpty()) {
return ResponseEntity.notFound().build();
}
com.storycove.entity.BackupJob job = jobOpt.get();
return ResponseEntity.ok(Map.of(
"success", true,
"jobId", job.getId().toString(),
"status", job.getStatus().toString(),
"progress", job.getProgressPercent(),
"fileSizeBytes", job.getFileSizeBytes() != null ? job.getFileSizeBytes() : 0,
"createdAt", job.getCreatedAt().toString(),
"completedAt", job.getCompletedAt() != null ? job.getCompletedAt().toString() : "",
"errorMessage", job.getErrorMessage() != null ? job.getErrorMessage() : ""
));
} catch (IllegalArgumentException e) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "Invalid job ID"));
}
}
@GetMapping("/backup-download/{jobId}")
public ResponseEntity<Resource> downloadBackup(@PathVariable String jobId) {
try {
java.util.UUID uuid = java.util.UUID.fromString(jobId);
Resource backup = asyncBackupService.getBackupFile(uuid);
java.util.Optional<com.storycove.entity.BackupJob> jobOpt = asyncBackupService.getJobStatus(uuid);
if (jobOpt.isEmpty()) {
return ResponseEntity.notFound().build();
}
com.storycove.entity.BackupJob job = jobOpt.get();
String timestamp = job.getCreatedAt().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"));
String extension = job.getType() == com.storycove.entity.BackupJob.BackupType.COMPLETE ? "zip" : "sql";
String filename = "storycove_backup_" + timestamp + "." + extension;
return ResponseEntity.ok()
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + filename + "\"")
.header(HttpHeaders.CONTENT_TYPE, "application/zip")
.header(HttpHeaders.CONTENT_TYPE,
job.getType() == com.storycove.entity.BackupJob.BackupType.COMPLETE
? "application/zip"
: "application/sql")
.body(backup);
} catch (IllegalArgumentException e) {
return ResponseEntity.badRequest().build();
} catch (Exception e) {
throw new RuntimeException("Failed to create complete backup: " + e.getMessage(), e);
throw new RuntimeException("Failed to download backup: " + e.getMessage(), e);
}
}
@GetMapping("/backup-list")
public ResponseEntity<Map<String, Object>> listBackups() {
try {
String libraryId = libraryService.getCurrentLibraryId();
if (libraryId == null) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "No library selected"));
}
List<com.storycove.entity.BackupJob> jobs = asyncBackupService.listBackupJobs(libraryId);
List<Map<String, Object>> jobsList = jobs.stream()
.map(job -> {
Map<String, Object> jobMap = new java.util.HashMap<>();
jobMap.put("jobId", job.getId().toString());
jobMap.put("type", job.getType().toString());
jobMap.put("status", job.getStatus().toString());
jobMap.put("progress", job.getProgressPercent());
jobMap.put("fileSizeBytes", job.getFileSizeBytes() != null ? job.getFileSizeBytes() : 0L);
jobMap.put("createdAt", job.getCreatedAt().toString());
jobMap.put("completedAt", job.getCompletedAt() != null ? job.getCompletedAt().toString() : "");
return jobMap;
})
.collect(java.util.stream.Collectors.toList());
return ResponseEntity.ok(Map.of(
"success", true,
"backups", jobsList
));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to list backups: " + e.getMessage()));
}
}
@DeleteMapping("/backup/{jobId}")
public ResponseEntity<Map<String, Object>> deleteBackup(@PathVariable String jobId) {
try {
java.util.UUID uuid = java.util.UUID.fromString(jobId);
asyncBackupService.deleteBackupJob(uuid);
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Backup deleted successfully"
));
} catch (IllegalArgumentException e) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "Invalid job ID"));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to delete backup: " + e.getMessage()));
}
}

View File

@@ -0,0 +1,195 @@
package com.storycove.entity;
import jakarta.persistence.*;
import java.time.LocalDateTime;
import java.util.UUID;
@Entity
@Table(name = "backup_jobs")
public class BackupJob {
@Id
@GeneratedValue(strategy = GenerationType.UUID)
private UUID id;
@Column(nullable = false)
private String libraryId;
@Column(nullable = false)
@Enumerated(EnumType.STRING)
private BackupType type;
@Column(nullable = false)
@Enumerated(EnumType.STRING)
private BackupStatus status;
@Column
private String filePath;
@Column
private Long fileSizeBytes;
@Column
private Integer progressPercent;
@Column(length = 1000)
private String errorMessage;
@Column(nullable = false)
private LocalDateTime createdAt;
@Column
private LocalDateTime startedAt;
@Column
private LocalDateTime completedAt;
@Column
private LocalDateTime expiresAt;
@PrePersist
protected void onCreate() {
createdAt = LocalDateTime.now();
// Backups expire after 24 hours
expiresAt = LocalDateTime.now().plusDays(1);
}
// Enums
public enum BackupType {
DATABASE_ONLY,
COMPLETE
}
public enum BackupStatus {
PENDING,
IN_PROGRESS,
COMPLETED,
FAILED,
EXPIRED
}
// Constructors
public BackupJob() {
}
public BackupJob(String libraryId, BackupType type) {
this.libraryId = libraryId;
this.type = type;
this.status = BackupStatus.PENDING;
this.progressPercent = 0;
}
// Getters and Setters
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public String getLibraryId() {
return libraryId;
}
public void setLibraryId(String libraryId) {
this.libraryId = libraryId;
}
public BackupType getType() {
return type;
}
public void setType(BackupType type) {
this.type = type;
}
public BackupStatus getStatus() {
return status;
}
public void setStatus(BackupStatus status) {
this.status = status;
}
public String getFilePath() {
return filePath;
}
public void setFilePath(String filePath) {
this.filePath = filePath;
}
public Long getFileSizeBytes() {
return fileSizeBytes;
}
public void setFileSizeBytes(Long fileSizeBytes) {
this.fileSizeBytes = fileSizeBytes;
}
public Integer getProgressPercent() {
return progressPercent;
}
public void setProgressPercent(Integer progressPercent) {
this.progressPercent = progressPercent;
}
public String getErrorMessage() {
return errorMessage;
}
public void setErrorMessage(String errorMessage) {
this.errorMessage = errorMessage;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
public LocalDateTime getStartedAt() {
return startedAt;
}
public void setStartedAt(LocalDateTime startedAt) {
this.startedAt = startedAt;
}
public LocalDateTime getCompletedAt() {
return completedAt;
}
public void setCompletedAt(LocalDateTime completedAt) {
this.completedAt = completedAt;
}
public LocalDateTime getExpiresAt() {
return expiresAt;
}
public void setExpiresAt(LocalDateTime expiresAt) {
this.expiresAt = expiresAt;
}
// Helper methods
public boolean isExpired() {
return LocalDateTime.now().isAfter(expiresAt);
}
public boolean isCompleted() {
return status == BackupStatus.COMPLETED;
}
public boolean isFailed() {
return status == BackupStatus.FAILED;
}
public boolean isInProgress() {
return status == BackupStatus.IN_PROGRESS;
}
}

View File

@@ -0,0 +1,25 @@
package com.storycove.repository;
import com.storycove.entity.BackupJob;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import java.time.LocalDateTime;
import java.util.List;
import java.util.UUID;
@Repository
public interface BackupJobRepository extends JpaRepository<BackupJob, UUID> {
List<BackupJob> findByLibraryIdOrderByCreatedAtDesc(String libraryId);
@Query("SELECT bj FROM BackupJob bj WHERE bj.expiresAt < :now AND bj.status = 'COMPLETED'")
List<BackupJob> findExpiredJobs(@Param("now") LocalDateTime now);
@Modifying
@Query("UPDATE BackupJob bj SET bj.status = 'EXPIRED' WHERE bj.expiresAt < :now AND bj.status = 'COMPLETED'")
int markExpiredJobs(@Param("now") LocalDateTime now);
}

View File

@@ -86,6 +86,9 @@ public interface StoryRepository extends JpaRepository<Story, UUID> {
@Query("SELECT COUNT(s) FROM Story s WHERE s.createdAt >= :since")
long countStoriesCreatedSince(@Param("since") LocalDateTime since);
@Query("SELECT COUNT(s) FROM Story s WHERE s.createdAt >= :since OR s.updatedAt >= :since")
long countStoriesModifiedAfter(@Param("since") LocalDateTime since);
@Query("SELECT AVG(s.wordCount) FROM Story s")
Double findAverageWordCount();

View File

@@ -0,0 +1,125 @@
package com.storycove.service;
import com.storycove.entity.BackupJob;
import com.storycove.repository.BackupJobRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.util.Optional;
import java.util.UUID;
/**
* Separate service for async backup execution.
* This is needed because @Async doesn't work when called from within the same class.
*/
@Service
public class AsyncBackupExecutor {
private static final Logger logger = LoggerFactory.getLogger(AsyncBackupExecutor.class);
@Value("${storycove.upload.dir:/app/images}")
private String uploadDir;
@Autowired
private BackupJobRepository backupJobRepository;
@Autowired
private DatabaseManagementService databaseManagementService;
@Autowired
private LibraryService libraryService;
/**
* Execute backup asynchronously.
* This method MUST be in a separate service class for @Async to work properly.
*/
@Async
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void executeBackupAsync(UUID jobId) {
logger.info("Async executor starting for job {}", jobId);
Optional<BackupJob> jobOpt = backupJobRepository.findById(jobId);
if (jobOpt.isEmpty()) {
logger.error("Backup job not found: {}", jobId);
return;
}
BackupJob job = jobOpt.get();
job.setStatus(BackupJob.BackupStatus.IN_PROGRESS);
job.setStartedAt(LocalDateTime.now());
job.setProgressPercent(0);
backupJobRepository.save(job);
try {
logger.info("Starting backup job {} for library {}", job.getId(), job.getLibraryId());
// Switch to the correct library
if (!job.getLibraryId().equals(libraryService.getCurrentLibraryId())) {
libraryService.switchToLibraryAfterAuthentication(job.getLibraryId());
}
// Create backup file
Path backupDir = Paths.get(uploadDir, "backups", job.getLibraryId());
Files.createDirectories(backupDir);
String filename = String.format("backup_%s_%s.%s",
job.getId().toString(),
LocalDateTime.now().toString().replaceAll(":", "-"),
job.getType() == BackupJob.BackupType.COMPLETE ? "zip" : "sql");
Path backupFile = backupDir.resolve(filename);
job.setProgressPercent(10);
backupJobRepository.save(job);
// Create the backup
Resource backupResource;
if (job.getType() == BackupJob.BackupType.COMPLETE) {
backupResource = databaseManagementService.createCompleteBackup();
} else {
backupResource = databaseManagementService.createBackup();
}
job.setProgressPercent(80);
backupJobRepository.save(job);
// Copy resource to permanent file
try (var inputStream = backupResource.getInputStream();
var outputStream = Files.newOutputStream(backupFile)) {
inputStream.transferTo(outputStream);
}
job.setProgressPercent(95);
backupJobRepository.save(job);
// Set file info
job.setFilePath(backupFile.toString());
job.setFileSizeBytes(Files.size(backupFile));
job.setStatus(BackupJob.BackupStatus.COMPLETED);
job.setCompletedAt(LocalDateTime.now());
job.setProgressPercent(100);
logger.info("Backup job {} completed successfully. File size: {} bytes",
job.getId(), job.getFileSizeBytes());
} catch (Exception e) {
logger.error("Backup job {} failed", job.getId(), e);
job.setStatus(BackupJob.BackupStatus.FAILED);
job.setErrorMessage(e.getMessage());
job.setCompletedAt(LocalDateTime.now());
} finally {
backupJobRepository.save(job);
}
}
}

View File

@@ -0,0 +1,167 @@
package com.storycove.service;
import com.storycove.entity.BackupJob;
import com.storycove.repository.BackupJobRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.FileSystemResource;
import org.springframework.core.io.Resource;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
@Service
public class AsyncBackupService {
private static final Logger logger = LoggerFactory.getLogger(AsyncBackupService.class);
@Value("${storycove.upload.dir:/app/images}")
private String uploadDir;
@Autowired
private BackupJobRepository backupJobRepository;
@Autowired
private AsyncBackupExecutor asyncBackupExecutor;
/**
* Start a backup job asynchronously.
* This method returns immediately after creating the job record.
*/
@Transactional
public BackupJob startBackupJob(String libraryId, BackupJob.BackupType type) {
logger.info("Creating backup job for library: {}, type: {}", libraryId, type);
BackupJob job = new BackupJob(libraryId, type);
job = backupJobRepository.save(job);
logger.info("Backup job created with ID: {}. Starting async execution...", job.getId());
// Start backup in background using separate service (ensures @Async works properly)
asyncBackupExecutor.executeBackupAsync(job.getId());
logger.info("Async backup execution triggered for job: {}", job.getId());
return job;
}
/**
* Get backup job status
*/
public Optional<BackupJob> getJobStatus(UUID jobId) {
return backupJobRepository.findById(jobId);
}
/**
* Get backup file for download
*/
public Resource getBackupFile(UUID jobId) throws IOException {
Optional<BackupJob> jobOpt = backupJobRepository.findById(jobId);
if (jobOpt.isEmpty()) {
throw new IOException("Backup job not found");
}
BackupJob job = jobOpt.get();
if (!job.isCompleted()) {
throw new IOException("Backup is not completed yet");
}
if (job.isExpired()) {
throw new IOException("Backup has expired");
}
if (job.getFilePath() == null) {
throw new IOException("Backup file path not set");
}
Path backupPath = Paths.get(job.getFilePath());
if (!Files.exists(backupPath)) {
throw new IOException("Backup file not found");
}
return new FileSystemResource(backupPath);
}
/**
* List backup jobs for a library
*/
public List<BackupJob> listBackupJobs(String libraryId) {
return backupJobRepository.findByLibraryIdOrderByCreatedAtDesc(libraryId);
}
/**
* Clean up expired backup jobs and their files
* Runs daily at 2 AM
*/
@Scheduled(cron = "0 0 2 * * ?")
@Transactional
public void cleanupExpiredBackups() {
logger.info("Starting cleanup of expired backups");
LocalDateTime now = LocalDateTime.now();
// Mark expired jobs
int markedCount = backupJobRepository.markExpiredJobs(now);
logger.info("Marked {} jobs as expired", markedCount);
// Find all expired jobs to delete their files
List<BackupJob> expiredJobs = backupJobRepository.findExpiredJobs(now);
for (BackupJob job : expiredJobs) {
if (job.getFilePath() != null) {
try {
Path filePath = Paths.get(job.getFilePath());
if (Files.exists(filePath)) {
Files.delete(filePath);
logger.info("Deleted expired backup file: {}", filePath);
}
} catch (IOException e) {
logger.warn("Failed to delete expired backup file: {}", job.getFilePath(), e);
}
}
// Delete the job record
backupJobRepository.delete(job);
}
logger.info("Cleanup completed. Deleted {} expired backups", expiredJobs.size());
}
/**
* Delete a specific backup job and its file
*/
@Transactional
public void deleteBackupJob(UUID jobId) throws IOException {
Optional<BackupJob> jobOpt = backupJobRepository.findById(jobId);
if (jobOpt.isEmpty()) {
throw new IOException("Backup job not found");
}
BackupJob job = jobOpt.get();
// Delete file if it exists
if (job.getFilePath() != null) {
Path filePath = Paths.get(job.getFilePath());
if (Files.exists(filePath)) {
Files.delete(filePath);
logger.info("Deleted backup file: {}", filePath);
}
}
// Delete job record
backupJobRepository.delete(job);
logger.info("Deleted backup job: {}", jobId);
}
}

View File

@@ -0,0 +1,262 @@
package com.storycove.service;
import com.storycove.repository.StoryRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Service for automatic daily backups.
* Runs at 4 AM daily and creates a backup if content has changed since last backup.
* Keeps maximum of 5 backups, rotating old ones out.
*/
@Service
public class AutomaticBackupService {
private static final Logger logger = LoggerFactory.getLogger(AutomaticBackupService.class);
private static final int MAX_BACKUPS = 5;
private static final DateTimeFormatter FILENAME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss");
@Value("${storycove.automatic-backup.dir:/app/automatic-backups}")
private String automaticBackupDir;
@Autowired
private StoryRepository storyRepository;
@Autowired
private DatabaseManagementService databaseManagementService;
@Autowired
private LibraryService libraryService;
private LocalDateTime lastBackupCheck = null;
/**
* Scheduled job that runs daily at 4 AM.
* Creates a backup if content has changed since last backup.
*/
@Scheduled(cron = "0 0 4 * * ?")
public void performAutomaticBackup() {
logger.info("========================================");
logger.info("Starting automatic backup check at 4 AM");
logger.info("========================================");
try {
// Get current library ID (or default)
String libraryId = libraryService.getCurrentLibraryId();
if (libraryId == null) {
libraryId = "default";
}
logger.info("Checking for content changes in library: {}", libraryId);
// Check if content has changed since last backup
if (!hasContentChanged()) {
logger.info("No content changes detected since last backup. Skipping backup.");
logger.info("========================================");
return;
}
logger.info("Content changes detected! Creating automatic backup...");
// Create backup directory for this library
Path backupPath = Paths.get(automaticBackupDir, libraryId);
Files.createDirectories(backupPath);
// Create the backup
String timestamp = LocalDateTime.now().format(FILENAME_FORMATTER);
String filename = String.format("auto_backup_%s.zip", timestamp);
Path backupFile = backupPath.resolve(filename);
logger.info("Creating complete backup to: {}", backupFile);
Resource backup = databaseManagementService.createCompleteBackup();
// Write backup to file
try (var inputStream = backup.getInputStream();
var outputStream = Files.newOutputStream(backupFile)) {
inputStream.transferTo(outputStream);
}
long fileSize = Files.size(backupFile);
logger.info("✅ Automatic backup created successfully");
logger.info(" File: {}", backupFile.getFileName());
logger.info(" Size: {} MB", fileSize / 1024 / 1024);
// Rotate old backups (keep only MAX_BACKUPS)
rotateBackups(backupPath);
// Update last backup check time
lastBackupCheck = LocalDateTime.now();
logger.info("========================================");
logger.info("Automatic backup completed successfully");
logger.info("========================================");
} catch (Exception e) {
logger.error("❌ Automatic backup failed", e);
logger.info("========================================");
}
}
/**
* Check if content has changed since last backup.
* Looks for stories created or updated after the last backup time.
*/
private boolean hasContentChanged() {
try {
if (lastBackupCheck == null) {
// First run - check if there are any stories at all
long storyCount = storyRepository.count();
logger.info("First backup check - found {} stories", storyCount);
return storyCount > 0;
}
// Check for stories created or updated since last backup
long changedCount = storyRepository.countStoriesModifiedAfter(lastBackupCheck);
logger.info("Found {} stories modified since last backup ({})", changedCount, lastBackupCheck);
return changedCount > 0;
} catch (Exception e) {
logger.error("Error checking for content changes", e);
// On error, create backup to be safe
return true;
}
}
/**
* Rotate backups - keep only MAX_BACKUPS most recent backups.
* Deletes older backups.
*/
private void rotateBackups(Path backupPath) throws IOException {
logger.info("Checking for old backups to rotate...");
// Find all backup files in the directory
List<Path> backupFiles;
try (Stream<Path> stream = Files.list(backupPath)) {
backupFiles = stream
.filter(Files::isRegularFile)
.filter(p -> p.getFileName().toString().startsWith("auto_backup_"))
.filter(p -> p.getFileName().toString().endsWith(".zip"))
.sorted(Comparator.comparing((Path p) -> {
try {
return Files.getLastModifiedTime(p);
} catch (IOException e) {
return null;
}
}).reversed()) // Most recent first
.collect(Collectors.toList());
}
logger.info("Found {} automatic backups", backupFiles.size());
// Delete old backups if we exceed MAX_BACKUPS
if (backupFiles.size() > MAX_BACKUPS) {
List<Path> toDelete = backupFiles.subList(MAX_BACKUPS, backupFiles.size());
logger.info("Deleting {} old backups to maintain maximum of {}", toDelete.size(), MAX_BACKUPS);
for (Path oldBackup : toDelete) {
try {
Files.delete(oldBackup);
logger.info(" Deleted old backup: {}", oldBackup.getFileName());
} catch (IOException e) {
logger.warn("Failed to delete old backup: {}", oldBackup, e);
}
}
} else {
logger.info("Backup count within limit ({}), no rotation needed", MAX_BACKUPS);
}
}
/**
* Manual trigger for testing - creates backup immediately if content changed.
*/
public void triggerManualBackup() {
logger.info("Manual automatic backup triggered");
performAutomaticBackup();
}
/**
* Get list of automatic backups for the current library.
*/
public List<BackupInfo> listAutomaticBackups() throws IOException {
String libraryId = libraryService.getCurrentLibraryId();
if (libraryId == null) {
libraryId = "default";
}
Path backupPath = Paths.get(automaticBackupDir, libraryId);
if (!Files.exists(backupPath)) {
return List.of();
}
try (Stream<Path> stream = Files.list(backupPath)) {
return stream
.filter(Files::isRegularFile)
.filter(p -> p.getFileName().toString().startsWith("auto_backup_"))
.filter(p -> p.getFileName().toString().endsWith(".zip"))
.sorted(Comparator.comparing((Path p) -> {
try {
return Files.getLastModifiedTime(p);
} catch (IOException e) {
return null;
}
}).reversed())
.map(p -> {
try {
return new BackupInfo(
p.getFileName().toString(),
Files.size(p),
Files.getLastModifiedTime(p).toInstant().toString()
);
} catch (IOException e) {
return null;
}
})
.filter(info -> info != null)
.collect(Collectors.toList());
}
}
/**
* Simple backup info class.
*/
public static class BackupInfo {
private final String filename;
private final long sizeBytes;
private final String createdAt;
public BackupInfo(String filename, long sizeBytes, String createdAt) {
this.filename = filename;
this.sizeBytes = sizeBytes;
this.createdAt = createdAt;
}
public String getFilename() {
return filename;
}
public long getSizeBytes() {
return sizeBytes;
}
public String getCreatedAt() {
return createdAt;
}
}
}

View File

@@ -7,7 +7,6 @@ import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.core.io.Resource;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@@ -141,26 +140,48 @@ public class DatabaseManagementService implements ApplicationContextAware {
/**
* Create a comprehensive backup including database and files in ZIP format
* Returns a streaming resource to avoid loading large backups into memory
*/
public Resource createCompleteBackup() throws SQLException, IOException {
// Create temp file with deleteOnExit as safety net
Path tempZip = Files.createTempFile("storycove-backup", ".zip");
tempZip.toFile().deleteOnExit();
try (ZipOutputStream zipOut = new ZipOutputStream(Files.newOutputStream(tempZip))) {
// 1. Add database dump
addDatabaseDumpToZip(zipOut);
// 2. Add all image files
addFilesToZip(zipOut);
// 3. Add metadata
addMetadataToZip(zipOut);
}
// Return the ZIP file as a resource
byte[] zipData = Files.readAllBytes(tempZip);
Files.deleteIfExists(tempZip);
return new ByteArrayResource(zipData);
// Return the ZIP file as a FileSystemResource for streaming
// This avoids loading the entire file into memory
return new org.springframework.core.io.FileSystemResource(tempZip.toFile()) {
@Override
public InputStream getInputStream() throws IOException {
// Wrap the input stream to delete the temp file after it's fully read
return new java.io.FilterInputStream(super.getInputStream()) {
@Override
public void close() throws IOException {
try {
super.close();
} finally {
// Clean up temp file after streaming is complete
try {
Files.deleteIfExists(tempZip);
} catch (IOException e) {
// Log but don't fail - deleteOnExit will handle it
System.err.println("Warning: Could not delete temp backup file: " + e.getMessage());
}
}
}
};
}
};
}
/**
@@ -289,20 +310,34 @@ public class DatabaseManagementService implements ApplicationContextAware {
System.err.println("PostgreSQL backup completed successfully");
// Read the backup file into memory
byte[] backupData = Files.readAllBytes(tempBackupFile);
return new ByteArrayResource(backupData);
// Return the backup file as a streaming resource to avoid memory issues with large databases
tempBackupFile.toFile().deleteOnExit();
return new org.springframework.core.io.FileSystemResource(tempBackupFile.toFile()) {
@Override
public InputStream getInputStream() throws IOException {
// Wrap the input stream to delete the temp file after it's fully read
return new java.io.FilterInputStream(super.getInputStream()) {
@Override
public void close() throws IOException {
try {
super.close();
} finally {
// Clean up temp file after streaming is complete
try {
Files.deleteIfExists(tempBackupFile);
} catch (IOException e) {
// Log but don't fail - deleteOnExit will handle it
System.err.println("Warning: Could not delete temp backup file: " + e.getMessage());
}
}
}
};
}
};
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException("Backup process was interrupted", e);
} finally {
// Clean up temporary file
try {
Files.deleteIfExists(tempBackupFile);
} catch (IOException e) {
System.err.println("Warning: Could not delete temporary backup file: " + e.getMessage());
}
}
}

View File

@@ -89,6 +89,8 @@ storycove:
enable-metrics: ${SOLR_ENABLE_METRICS:true}
images:
storage-path: ${IMAGE_STORAGE_PATH:/app/images}
automatic-backup:
dir: ${AUTOMATIC_BACKUP_DIR:/app/automatic-backups}
management:
endpoints: