maintenance improvements

This commit is contained in:
Stefan Hardegger
2025-09-26 21:41:33 +02:00
parent 74cdd5dc57
commit 5325169495
10 changed files with 377 additions and 65 deletions

View File

@@ -3,27 +3,43 @@ package com.storycove.controller;
import com.storycove.dto.HtmlSanitizationConfigDto;
import com.storycove.service.HtmlSanitizationService;
import com.storycove.service.ImageService;
import com.storycove.service.StoryService;
import com.storycove.entity.Story;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import java.util.List;
import java.util.HashMap;
import java.util.Optional;
import java.util.UUID;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.Files;
import java.io.IOException;
@RestController
@RequestMapping("/api/config")
public class ConfigController {
private static final Logger logger = LoggerFactory.getLogger(ConfigController.class);
private final HtmlSanitizationService htmlSanitizationService;
private final ImageService imageService;
private final StoryService storyService;
@Value("${app.reading.speed.default:200}")
private int defaultReadingSpeed;
@Autowired
public ConfigController(HtmlSanitizationService htmlSanitizationService, ImageService imageService) {
public ConfigController(HtmlSanitizationService htmlSanitizationService, ImageService imageService, StoryService storyService) {
this.htmlSanitizationService = htmlSanitizationService;
this.imageService = imageService;
this.storyService = storyService;
}
/**
@@ -61,27 +77,55 @@ public class ConfigController {
@PostMapping("/cleanup/images/preview")
public ResponseEntity<Map<String, Object>> previewImageCleanup() {
try {
logger.info("Starting image cleanup preview");
ImageService.ContentImageCleanupResult result = imageService.cleanupOrphanedContentImages(true);
Map<String, Object> response = Map.of(
"success", true,
"orphanedCount", result.getOrphanedImages().size(),
"totalSizeBytes", result.getTotalSizeBytes(),
"formattedSize", result.getFormattedSize(),
"foldersToDelete", result.getFoldersToDelete(),
"referencedImagesCount", result.getTotalReferencedImages(),
"errors", result.getErrors(),
"hasErrors", result.hasErrors(),
"dryRun", true
);
// Create detailed file information with story relationships
logger.info("Processing {} orphaned files for detailed information", result.getOrphanedImages().size());
List<Map<String, Object>> orphanedFiles = result.getOrphanedImages().stream()
.map(filePath -> {
try {
return createFileInfo(filePath);
} catch (Exception e) {
logger.error("Error processing file {}: {}", filePath, e.getMessage());
// Return a basic error entry instead of failing completely
Map<String, Object> errorEntry = new HashMap<>();
errorEntry.put("filePath", filePath);
errorEntry.put("fileName", Paths.get(filePath).getFileName().toString());
errorEntry.put("fileSize", 0L);
errorEntry.put("formattedSize", "0 B");
errorEntry.put("storyId", "error");
errorEntry.put("storyTitle", null);
errorEntry.put("storyExists", false);
errorEntry.put("canAccessStory", false);
errorEntry.put("error", e.getMessage());
return errorEntry;
}
})
.toList();
// Use HashMap to avoid Map.of() null value issues
Map<String, Object> response = new HashMap<>();
response.put("success", true);
response.put("orphanedCount", result.getOrphanedImages().size());
response.put("totalSizeBytes", result.getTotalSizeBytes());
response.put("formattedSize", result.getFormattedSize());
response.put("foldersToDelete", result.getFoldersToDelete());
response.put("referencedImagesCount", result.getTotalReferencedImages());
response.put("errors", result.getErrors());
response.put("hasErrors", result.hasErrors());
response.put("dryRun", true);
response.put("orphanedFiles", orphanedFiles);
logger.info("Image cleanup preview completed successfully");
return ResponseEntity.ok(response);
} catch (Exception e) {
return ResponseEntity.status(500).body(Map.of(
"success", false,
"error", "Failed to preview image cleanup: " + e.getMessage()
));
logger.error("Failed to preview image cleanup", e);
Map<String, Object> errorResponse = new HashMap<>();
errorResponse.put("success", false);
errorResponse.put("error", "Failed to preview image cleanup: " + (e.getMessage() != null ? e.getMessage() : e.getClass().getSimpleName()));
return ResponseEntity.status(500).body(errorResponse);
}
}
@@ -114,4 +158,89 @@ public class ConfigController {
));
}
}
/**
* Create detailed file information for orphaned image including story relationship
*/
private Map<String, Object> createFileInfo(String filePath) {
try {
Path path = Paths.get(filePath);
String fileName = path.getFileName().toString();
long fileSize = Files.exists(path) ? Files.size(path) : 0;
// Extract story UUID from the path (content images are stored in /content/{storyId}/)
String storyId = extractStoryIdFromPath(filePath);
// Look up the story if we have a valid UUID
Story relatedStory = null;
if (storyId != null) {
try {
UUID storyUuid = UUID.fromString(storyId);
relatedStory = storyService.findById(storyUuid);
} catch (Exception e) {
logger.debug("Could not find story with ID {}: {}", storyId, e.getMessage());
}
}
Map<String, Object> fileInfo = new HashMap<>();
fileInfo.put("filePath", filePath);
fileInfo.put("fileName", fileName);
fileInfo.put("fileSize", fileSize);
fileInfo.put("formattedSize", formatBytes(fileSize));
fileInfo.put("storyId", storyId != null ? storyId : "unknown");
fileInfo.put("storyTitle", relatedStory != null ? relatedStory.getTitle() : null);
fileInfo.put("storyExists", relatedStory != null);
fileInfo.put("canAccessStory", relatedStory != null);
return fileInfo;
} catch (Exception e) {
logger.error("Error creating file info for {}: {}", filePath, e.getMessage());
Map<String, Object> errorInfo = new HashMap<>();
errorInfo.put("filePath", filePath);
errorInfo.put("fileName", Paths.get(filePath).getFileName().toString());
errorInfo.put("fileSize", 0L);
errorInfo.put("formattedSize", "0 B");
errorInfo.put("storyId", "error");
errorInfo.put("storyTitle", null);
errorInfo.put("storyExists", false);
errorInfo.put("canAccessStory", false);
errorInfo.put("error", e.getMessage() != null ? e.getMessage() : e.getClass().getSimpleName());
return errorInfo;
}
}
/**
* Extract story ID from content image file path
*/
private String extractStoryIdFromPath(String filePath) {
try {
// Content images are stored in: /path/to/uploads/content/{storyId}/filename.ext
Path path = Paths.get(filePath);
Path parent = path.getParent();
if (parent != null) {
String potentialUuid = parent.getFileName().toString();
// Basic UUID validation (36 characters with dashes in right places)
if (potentialUuid.length() == 36 &&
potentialUuid.charAt(8) == '-' &&
potentialUuid.charAt(13) == '-' &&
potentialUuid.charAt(18) == '-' &&
potentialUuid.charAt(23) == '-') {
return potentialUuid;
}
}
} catch (Exception e) {
// Invalid path or other error
}
return null;
}
/**
* Format file size in human readable format
*/
private String formatBytes(long bytes) {
if (bytes < 1024) return bytes + " B";
if (bytes < 1024 * 1024) return String.format("%.1f KB", bytes / 1024.0);
if (bytes < 1024 * 1024 * 1024) return String.format("%.1f MB", bytes / (1024.0 * 1024.0));
return String.format("%.1f GB", bytes / (1024.0 * 1024.0 * 1024.0));
}
}

View File

@@ -97,6 +97,7 @@ public class DatabaseManagementService implements ApplicationContextAware {
/**
* Restore from complete backup (ZIP format)
*/
@Transactional(timeout = 1800) // 30 minutes timeout for large backup restores
public void restoreFromCompleteBackup(InputStream backupStream) throws IOException, SQLException {
String currentLibraryId = libraryService.getCurrentLibraryId();
System.err.println("Starting complete backup restore for library: " + currentLibraryId);
@@ -193,10 +194,15 @@ public class DatabaseManagementService implements ApplicationContextAware {
"stories", "collections", "tags", "series", "authors"
);
// Generate TRUNCATE statements for each table (assuming tables already exist)
// Generate DELETE statements for each table (safer than TRUNCATE CASCADE)
for (String tableName : truncateTables) {
sqlDump.append("-- Truncate Table: ").append(tableName).append("\n");
sqlDump.append("TRUNCATE TABLE \"").append(tableName).append("\" CASCADE;\n");
sqlDump.append("-- Clear Table: ").append(tableName).append("\n");
sqlDump.append("DELETE FROM \"").append(tableName).append("\";\n");
// Reset auto-increment sequences for tables with ID columns
if (Arrays.asList("authors", "series", "tags", "collections", "stories").contains(tableName)) {
sqlDump.append("SELECT setval(pg_get_serial_sequence('\"").append(tableName).append("\"', 'id'), 1, false);\n");
}
}
sqlDump.append("\n");
@@ -244,7 +250,7 @@ public class DatabaseManagementService implements ApplicationContextAware {
return new ByteArrayResource(backupData);
}
@Transactional
@Transactional(timeout = 1800) // 30 minutes timeout for large backup restores
public void restoreFromBackup(InputStream backupStream) throws IOException, SQLException {
// Read the SQL file
StringBuilder sqlContent = new StringBuilder();
@@ -268,14 +274,27 @@ public class DatabaseManagementService implements ApplicationContextAware {
// Parse SQL statements properly (handle semicolons inside string literals)
List<String> statements = parseStatements(sqlContent.toString());
System.err.println("Parsed " + statements.size() + " SQL statements. Starting execution...");
int successCount = 0;
for (String statement : statements) {
String trimmedStatement = statement.trim();
if (!trimmedStatement.isEmpty()) {
try (PreparedStatement stmt = connection.prepareStatement(trimmedStatement)) {
stmt.setQueryTimeout(300); // 5 minute timeout per statement
stmt.executeUpdate();
successCount++;
// Progress logging and batch commits for large restores
if (successCount % 100 == 0) {
System.err.println("Executed " + successCount + "/" + statements.size() + " statements...");
}
// Commit every 500 statements to avoid huge transactions
if (successCount % 500 == 0) {
connection.commit();
System.err.println("Committed batch at " + successCount + " statements");
}
} catch (SQLException e) {
// Log detailed error information for failed statements
System.err.println("ERROR: Failed to execute SQL statement #" + (successCount + 1));
@@ -449,7 +468,7 @@ public class DatabaseManagementService implements ApplicationContextAware {
/**
* Clear all data AND files (for complete restore)
*/
@Transactional
@Transactional(timeout = 600) // 10 minutes timeout for clearing large datasets
public int clearAllDataAndFiles() {
// First clear the database
int totalDeleted = clearAllData();

View File

@@ -4,6 +4,11 @@ spring:
username: ${SPRING_DATASOURCE_USERNAME:storycove}
password: ${SPRING_DATASOURCE_PASSWORD:password}
driver-class-name: org.postgresql.Driver
hikari:
connection-timeout: 60000 # 60 seconds
idle-timeout: 300000 # 5 minutes
max-lifetime: 1800000 # 30 minutes
maximum-pool-size: 20
jpa:
hibernate:
@@ -16,8 +21,8 @@ spring:
servlet:
multipart:
max-file-size: 256MB # Increased for backup restore
max-request-size: 260MB # Slightly higher to account for form data
max-file-size: 600MB # Increased for large backup restore (425MB+)
max-request-size: 610MB # Slightly higher to account for form data
jackson:
serialization:
@@ -27,6 +32,8 @@ spring:
server:
port: 8080
tomcat:
max-http-request-size: 650MB # Tomcat HTTP request size limit (separate from multipart)
storycove:
app: