18 Commits

Author SHA1 Message Date
Stefan Hardegger
77aec8a849 PDF & ZIP IMPORT 2025-12-05 10:21:03 +01:00
Stefan Hardegger
b1b5bbbccd Fix epub import 2025-11-22 14:29:15 +01:00
Stefan Hardegger
75768855e2 show reading progress in author page. Allow deletion of tags, even if assigned to story. 2025-10-31 09:54:04 +01:00
Stefan Hardegger
7a4dd567dc editing a story goes to detail view 2025-10-31 09:13:25 +01:00
Stefan Hardegger
715fb4e48a Display and correct calculation of reading progress of a story 2025-10-31 08:07:12 +01:00
Stefan Hardegger
0e1ed7c92e show story progress and reset last read when resetting progress. 2025-10-30 13:44:54 +01:00
Stefan Hardegger
a3bc83db8a file size limits, keep active filters in session 2025-10-30 13:11:40 +01:00
Stefan Hardegger
924ae12b5b statistics 2025-10-21 10:53:33 +02:00
Stefan Hardegger
16983fd871 Merge branch 'main' into statistics 2025-10-21 07:58:25 +02:00
Stefan Hardegger
ff49589f32 Automatic backup 2025-10-20 14:51:27 +02:00
Stefan Hardegger
4abb442c50 fix async 2025-10-20 14:34:26 +02:00
Stefan Hardegger
1c004eb7d6 fix backup async 2025-10-20 14:25:12 +02:00
Stefan Hardegger
32544d4f4a different approach to migration 2025-10-20 14:13:45 +02:00
Stefan Hardegger
1ee9af8f28 deployment fix? 2025-10-20 12:55:56 +02:00
Stefan Hardegger
70599083b8 db migration 2025-10-20 12:43:58 +02:00
Stefan Hardegger
6a38189ef0 fix images 2025-10-20 12:30:28 +02:00
Stefan Hardegger
c9d58173f3 improved backup creation 2025-10-20 09:23:34 +02:00
Stefan Hardegger
3dd2ff50d8 Fix for memory issue during backup 2025-10-20 08:58:09 +02:00
64 changed files with 6820 additions and 234 deletions

45
apply_migration_production.sh Executable file
View File

@@ -0,0 +1,45 @@
#!/bin/bash
# Run this script on your production server to apply the backup_jobs table migration
# to all library databases
echo "Applying backup_jobs table migration to all databases..."
echo ""
# Apply to each database
for DB in storycove storycove_afterdark storycove_clas storycove_secret; do
echo "Applying to $DB..."
docker-compose exec -T postgres psql -U storycove -d "$DB" <<'SQL'
CREATE TABLE IF NOT EXISTS backup_jobs (
id UUID PRIMARY KEY,
library_id VARCHAR(255) NOT NULL,
type VARCHAR(50) NOT NULL CHECK (type IN ('DATABASE_ONLY', 'COMPLETE')),
status VARCHAR(50) NOT NULL CHECK (status IN ('PENDING', 'IN_PROGRESS', 'COMPLETED', 'FAILED', 'EXPIRED')),
file_path VARCHAR(1000),
file_size_bytes BIGINT,
progress_percent INTEGER,
error_message VARCHAR(1000),
created_at TIMESTAMP NOT NULL,
started_at TIMESTAMP,
completed_at TIMESTAMP,
expires_at TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_library_id ON backup_jobs(library_id);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_status ON backup_jobs(status);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_expires_at ON backup_jobs(expires_at);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_created_at ON backup_jobs(created_at DESC);
SQL
echo "✓ Done with $DB"
echo ""
done
echo "Migration complete! Verifying..."
echo ""
# Verify tables exist
for DB in storycove storycove_afterdark storycove_clas storycove_secret; do
echo "Checking $DB:"
docker-compose exec -T postgres psql -U storycove -d "$DB" -c "\d backup_jobs" 2>&1 | grep -E "Table|does not exist" || echo " ✓ Table exists"
echo ""
done

View File

@@ -1,11 +1,11 @@
FROM openjdk:17-jdk-slim
FROM eclipse-temurin:17-jdk-jammy
WORKDIR /app
# Install Maven and PostgreSQL 15 client tools
RUN apt-get update && apt-get install -y wget ca-certificates gnupg maven && \
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
echo "deb http://apt.postgresql.org/pub/repos/apt/ bullseye-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /etc/apt/trusted.gpg.d/postgresql.gpg && \
echo "deb http://apt.postgresql.org/pub/repos/apt/ jammy-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
apt-get update && \
apt-get install -y postgresql-client-15 && \
rm -rf /var/lib/apt/lists/*

View File

@@ -0,0 +1,54 @@
#!/bin/bash
# Script to apply backup_jobs table migration to all library databases
# This should be run from the backend directory
set -e
# Use full docker path
DOCKER="/usr/local/bin/docker"
echo "Applying backup_jobs table migration..."
# Get database connection details from environment or use defaults
DB_HOST="${POSTGRES_HOST:-postgres}"
DB_PORT="${POSTGRES_PORT:-5432}"
DB_USER="${POSTGRES_USER:-storycove}"
DB_PASSWORD="${POSTGRES_PASSWORD:-password}"
# List of databases to update
DATABASES=("storycove" "storycove_afterdark")
for DB_NAME in "${DATABASES[@]}"; do
echo ""
echo "Applying migration to database: $DB_NAME"
# Check if database exists
if $DOCKER exec storycove-postgres-1 psql -U "$DB_USER" -lqt | cut -d \| -f 1 | grep -qw "$DB_NAME"; then
echo "Database $DB_NAME exists, applying migration..."
# Apply migration
$DOCKER exec -i storycove-postgres-1 psql -U "$DB_USER" -d "$DB_NAME" < create_backup_jobs_table.sql
if [ $? -eq 0 ]; then
echo "✓ Migration applied successfully to $DB_NAME"
else
echo "✗ Failed to apply migration to $DB_NAME"
exit 1
fi
else
echo "⚠ Database $DB_NAME does not exist, skipping..."
fi
done
echo ""
echo "Migration complete!"
echo ""
echo "Verifying table creation..."
for DB_NAME in "${DATABASES[@]}"; do
if $DOCKER exec storycove-postgres-1 psql -U "$DB_USER" -lqt | cut -d \| -f 1 | grep -qw "$DB_NAME"; then
echo ""
echo "Checking $DB_NAME:"
$DOCKER exec storycove-postgres-1 psql -U "$DB_USER" -d "$DB_NAME" -c "\d backup_jobs" 2>/dev/null || echo " Table not found in $DB_NAME"
fi
done

View File

@@ -0,0 +1,29 @@
-- Create backup_jobs table for async backup job tracking
-- This should be run on all library databases (default and afterdark)
CREATE TABLE IF NOT EXISTS backup_jobs (
id UUID PRIMARY KEY,
library_id VARCHAR(255) NOT NULL,
type VARCHAR(50) NOT NULL CHECK (type IN ('DATABASE_ONLY', 'COMPLETE')),
status VARCHAR(50) NOT NULL CHECK (status IN ('PENDING', 'IN_PROGRESS', 'COMPLETED', 'FAILED', 'EXPIRED')),
file_path VARCHAR(1000),
file_size_bytes BIGINT,
progress_percent INTEGER,
error_message VARCHAR(1000),
created_at TIMESTAMP NOT NULL,
started_at TIMESTAMP,
completed_at TIMESTAMP,
expires_at TIMESTAMP
);
-- Create index on library_id for faster lookups
CREATE INDEX IF NOT EXISTS idx_backup_jobs_library_id ON backup_jobs(library_id);
-- Create index on status for cleanup queries
CREATE INDEX IF NOT EXISTS idx_backup_jobs_status ON backup_jobs(status);
-- Create index on expires_at for cleanup queries
CREATE INDEX IF NOT EXISTS idx_backup_jobs_expires_at ON backup_jobs(expires_at);
-- Create index on created_at for ordering
CREATE INDEX IF NOT EXISTS idx_backup_jobs_created_at ON backup_jobs(created_at DESC);

View File

@@ -117,7 +117,12 @@
<artifactId>epublib-core</artifactId>
<version>3.1</version>
</dependency>
<dependency>
<groupId>org.apache.pdfbox</groupId>
<artifactId>pdfbox</artifactId>
<version>3.0.3</version>
</dependency>
<!-- Test dependencies -->
<dependency>
<groupId>org.springframework.boot</groupId>

View File

@@ -0,0 +1,111 @@
package com.storycove.config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.CommandLineRunner;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.Statement;
import java.util.Arrays;
import java.util.List;
/**
* Runs database migrations on application startup.
* This ensures all library databases have the required schema,
* particularly for tables like backup_jobs that were added after initial deployment.
*/
@Component
@Order(1) // Run early in startup sequence
public class DatabaseMigrationRunner implements CommandLineRunner {
private static final Logger logger = LoggerFactory.getLogger(DatabaseMigrationRunner.class);
@Autowired
private DataSource dataSource;
@Value("${spring.datasource.username}")
private String dbUsername;
@Value("${spring.datasource.password}")
private String dbPassword;
// List of all library databases that need migrations
private static final List<String> LIBRARY_DATABASES = Arrays.asList(
"storycove", // default database
"storycove_afterdark",
"storycove_clas",
"storycove_secret"
);
// SQL for backup_jobs table migration (idempotent)
private static final String BACKUP_JOBS_MIGRATION = """
CREATE TABLE IF NOT EXISTS backup_jobs (
id UUID PRIMARY KEY,
library_id VARCHAR(255) NOT NULL,
type VARCHAR(50) NOT NULL CHECK (type IN ('DATABASE_ONLY', 'COMPLETE')),
status VARCHAR(50) NOT NULL CHECK (status IN ('PENDING', 'IN_PROGRESS', 'COMPLETED', 'FAILED', 'EXPIRED')),
file_path VARCHAR(1000),
file_size_bytes BIGINT,
progress_percent INTEGER,
error_message VARCHAR(1000),
created_at TIMESTAMP NOT NULL,
started_at TIMESTAMP,
completed_at TIMESTAMP,
expires_at TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_library_id ON backup_jobs(library_id);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_status ON backup_jobs(status);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_expires_at ON backup_jobs(expires_at);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_created_at ON backup_jobs(created_at DESC);
""";
@Override
public void run(String... args) throws Exception {
logger.info("🗄️ Starting database migrations...");
for (String database : LIBRARY_DATABASES) {
try {
applyMigrations(database);
logger.info("✅ Successfully applied migrations to database: {}", database);
} catch (Exception e) {
// Log error but don't fail startup if database doesn't exist yet
if (e.getMessage() != null && e.getMessage().contains("does not exist")) {
logger.warn("⚠️ Database {} does not exist yet, skipping migrations", database);
} else {
logger.error("❌ Failed to apply migrations to database: {}", database, e);
// Don't throw - allow application to start even if some migrations fail
}
}
}
logger.info("✅ Database migrations completed");
}
private void applyMigrations(String database) throws Exception {
// We need to connect directly to each database, not through SmartRoutingDataSource
// Build connection URL from the default datasource URL
String originalUrl = dataSource.getConnection().getMetaData().getURL();
String baseUrl = originalUrl.substring(0, originalUrl.lastIndexOf('/'));
String targetUrl = baseUrl + "/" + database;
// Connect directly to target database using credentials from application properties
try (Connection conn = java.sql.DriverManager.getConnection(
targetUrl,
dbUsername,
dbPassword
)) {
// Apply backup_jobs migration
try (Statement stmt = conn.createStatement()) {
stmt.execute(BACKUP_JOBS_MIGRATION);
}
logger.debug("Applied backup_jobs migration to {}", database);
}
}
}

View File

@@ -1,6 +1,8 @@
package com.storycove.controller;
import com.storycove.service.AsyncBackupService;
import com.storycove.service.DatabaseManagementService;
import com.storycove.service.LibraryService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpHeaders;
@@ -12,6 +14,7 @@ import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.Map;
@RestController
@@ -21,6 +24,12 @@ public class DatabaseController {
@Autowired
private DatabaseManagementService databaseManagementService;
@Autowired
private AsyncBackupService asyncBackupService;
@Autowired
private LibraryService libraryService;
@PostMapping("/backup")
public ResponseEntity<Resource> backupDatabase() {
try {
@@ -83,19 +92,141 @@ public class DatabaseController {
}
@PostMapping("/backup-complete")
public ResponseEntity<Resource> backupComplete() {
public ResponseEntity<Map<String, Object>> backupCompleteAsync() {
try {
Resource backup = databaseManagementService.createCompleteBackup();
String timestamp = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"));
String filename = "storycove_complete_backup_" + timestamp + ".zip";
String libraryId = libraryService.getCurrentLibraryId();
if (libraryId == null) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "No library selected"));
}
// Start backup job asynchronously
com.storycove.entity.BackupJob job = asyncBackupService.startBackupJob(
libraryId,
com.storycove.entity.BackupJob.BackupType.COMPLETE
);
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Backup started",
"jobId", job.getId().toString(),
"status", job.getStatus().toString()
));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to start backup: " + e.getMessage()));
}
}
@GetMapping("/backup-status/{jobId}")
public ResponseEntity<Map<String, Object>> getBackupStatus(@PathVariable String jobId) {
try {
java.util.UUID uuid = java.util.UUID.fromString(jobId);
java.util.Optional<com.storycove.entity.BackupJob> jobOpt = asyncBackupService.getJobStatus(uuid);
if (jobOpt.isEmpty()) {
return ResponseEntity.notFound().build();
}
com.storycove.entity.BackupJob job = jobOpt.get();
return ResponseEntity.ok(Map.of(
"success", true,
"jobId", job.getId().toString(),
"status", job.getStatus().toString(),
"progress", job.getProgressPercent(),
"fileSizeBytes", job.getFileSizeBytes() != null ? job.getFileSizeBytes() : 0,
"createdAt", job.getCreatedAt().toString(),
"completedAt", job.getCompletedAt() != null ? job.getCompletedAt().toString() : "",
"errorMessage", job.getErrorMessage() != null ? job.getErrorMessage() : ""
));
} catch (IllegalArgumentException e) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "Invalid job ID"));
}
}
@GetMapping("/backup-download/{jobId}")
public ResponseEntity<Resource> downloadBackup(@PathVariable String jobId) {
try {
java.util.UUID uuid = java.util.UUID.fromString(jobId);
Resource backup = asyncBackupService.getBackupFile(uuid);
java.util.Optional<com.storycove.entity.BackupJob> jobOpt = asyncBackupService.getJobStatus(uuid);
if (jobOpt.isEmpty()) {
return ResponseEntity.notFound().build();
}
com.storycove.entity.BackupJob job = jobOpt.get();
String timestamp = job.getCreatedAt().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"));
String extension = job.getType() == com.storycove.entity.BackupJob.BackupType.COMPLETE ? "zip" : "sql";
String filename = "storycove_backup_" + timestamp + "." + extension;
return ResponseEntity.ok()
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + filename + "\"")
.header(HttpHeaders.CONTENT_TYPE, "application/zip")
.header(HttpHeaders.CONTENT_TYPE,
job.getType() == com.storycove.entity.BackupJob.BackupType.COMPLETE
? "application/zip"
: "application/sql")
.body(backup);
} catch (IllegalArgumentException e) {
return ResponseEntity.badRequest().build();
} catch (Exception e) {
throw new RuntimeException("Failed to create complete backup: " + e.getMessage(), e);
throw new RuntimeException("Failed to download backup: " + e.getMessage(), e);
}
}
@GetMapping("/backup-list")
public ResponseEntity<Map<String, Object>> listBackups() {
try {
String libraryId = libraryService.getCurrentLibraryId();
if (libraryId == null) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "No library selected"));
}
List<com.storycove.entity.BackupJob> jobs = asyncBackupService.listBackupJobs(libraryId);
List<Map<String, Object>> jobsList = jobs.stream()
.map(job -> {
Map<String, Object> jobMap = new java.util.HashMap<>();
jobMap.put("jobId", job.getId().toString());
jobMap.put("type", job.getType().toString());
jobMap.put("status", job.getStatus().toString());
jobMap.put("progress", job.getProgressPercent());
jobMap.put("fileSizeBytes", job.getFileSizeBytes() != null ? job.getFileSizeBytes() : 0L);
jobMap.put("createdAt", job.getCreatedAt().toString());
jobMap.put("completedAt", job.getCompletedAt() != null ? job.getCompletedAt().toString() : "");
return jobMap;
})
.collect(java.util.stream.Collectors.toList());
return ResponseEntity.ok(Map.of(
"success", true,
"backups", jobsList
));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to list backups: " + e.getMessage()));
}
}
@DeleteMapping("/backup/{jobId}")
public ResponseEntity<Map<String, Object>> deleteBackup(@PathVariable String jobId) {
try {
java.util.UUID uuid = java.util.UUID.fromString(jobId);
asyncBackupService.deleteBackupJob(uuid);
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Backup deleted successfully"
));
} catch (IllegalArgumentException e) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "Invalid job ID"));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to delete backup: " + e.getMessage()));
}
}

View File

@@ -42,6 +42,132 @@ public class LibraryStatisticsController {
}
}
/**
* Get top tags statistics
*/
@GetMapping("/top-tags")
public ResponseEntity<?> getTopTagsStatistics(
@PathVariable String libraryId,
@RequestParam(defaultValue = "20") int limit) {
try {
if (libraryService.getLibraryById(libraryId) == null) {
return ResponseEntity.notFound().build();
}
var stats = statisticsService.getTopTagsStatistics(libraryId, limit);
return ResponseEntity.ok(stats);
} catch (Exception e) {
logger.error("Failed to get top tags statistics for library: {}", libraryId, e);
return ResponseEntity.internalServerError()
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
}
}
/**
* Get top authors statistics
*/
@GetMapping("/top-authors")
public ResponseEntity<?> getTopAuthorsStatistics(
@PathVariable String libraryId,
@RequestParam(defaultValue = "10") int limit) {
try {
if (libraryService.getLibraryById(libraryId) == null) {
return ResponseEntity.notFound().build();
}
var stats = statisticsService.getTopAuthorsStatistics(libraryId, limit);
return ResponseEntity.ok(stats);
} catch (Exception e) {
logger.error("Failed to get top authors statistics for library: {}", libraryId, e);
return ResponseEntity.internalServerError()
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
}
}
/**
* Get rating statistics
*/
@GetMapping("/ratings")
public ResponseEntity<?> getRatingStatistics(@PathVariable String libraryId) {
try {
if (libraryService.getLibraryById(libraryId) == null) {
return ResponseEntity.notFound().build();
}
var stats = statisticsService.getRatingStatistics(libraryId);
return ResponseEntity.ok(stats);
} catch (Exception e) {
logger.error("Failed to get rating statistics for library: {}", libraryId, e);
return ResponseEntity.internalServerError()
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
}
}
/**
* Get source domain statistics
*/
@GetMapping("/source-domains")
public ResponseEntity<?> getSourceDomainStatistics(
@PathVariable String libraryId,
@RequestParam(defaultValue = "10") int limit) {
try {
if (libraryService.getLibraryById(libraryId) == null) {
return ResponseEntity.notFound().build();
}
var stats = statisticsService.getSourceDomainStatistics(libraryId, limit);
return ResponseEntity.ok(stats);
} catch (Exception e) {
logger.error("Failed to get source domain statistics for library: {}", libraryId, e);
return ResponseEntity.internalServerError()
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
}
}
/**
* Get reading progress statistics
*/
@GetMapping("/reading-progress")
public ResponseEntity<?> getReadingProgressStatistics(@PathVariable String libraryId) {
try {
if (libraryService.getLibraryById(libraryId) == null) {
return ResponseEntity.notFound().build();
}
var stats = statisticsService.getReadingProgressStatistics(libraryId);
return ResponseEntity.ok(stats);
} catch (Exception e) {
logger.error("Failed to get reading progress statistics for library: {}", libraryId, e);
return ResponseEntity.internalServerError()
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
}
}
/**
* Get reading activity statistics (last week)
*/
@GetMapping("/reading-activity")
public ResponseEntity<?> getReadingActivityStatistics(@PathVariable String libraryId) {
try {
if (libraryService.getLibraryById(libraryId) == null) {
return ResponseEntity.notFound().build();
}
var stats = statisticsService.getReadingActivityStatistics(libraryId);
return ResponseEntity.ok(stats);
} catch (Exception e) {
logger.error("Failed to get reading activity statistics for library: {}", libraryId, e);
return ResponseEntity.internalServerError()
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
}
}
// Error response DTO
private static class ErrorResponse {
private String error;

View File

@@ -44,12 +44,14 @@ public class StoryController {
private final ReadingTimeService readingTimeService;
private final EPUBImportService epubImportService;
private final EPUBExportService epubExportService;
private final PDFImportService pdfImportService;
private final ZIPImportService zipImportService;
private final AsyncImageProcessingService asyncImageProcessingService;
private final ImageProcessingProgressService progressService;
public StoryController(StoryService storyService,
public StoryController(StoryService storyService,
AuthorService authorService,
SeriesService seriesService,
SeriesService seriesService,
HtmlSanitizationService sanitizationService,
ImageService imageService,
CollectionService collectionService,
@@ -57,6 +59,8 @@ public class StoryController {
ReadingTimeService readingTimeService,
EPUBImportService epubImportService,
EPUBExportService epubExportService,
PDFImportService pdfImportService,
ZIPImportService zipImportService,
AsyncImageProcessingService asyncImageProcessingService,
ImageProcessingProgressService progressService) {
this.storyService = storyService;
@@ -69,6 +73,8 @@ public class StoryController {
this.readingTimeService = readingTimeService;
this.epubImportService = epubImportService;
this.epubExportService = epubExportService;
this.pdfImportService = pdfImportService;
this.zipImportService = zipImportService;
this.asyncImageProcessingService = asyncImageProcessingService;
this.progressService = progressService;
}
@@ -591,10 +597,11 @@ public class StoryController {
dto.setVolume(story.getVolume());
dto.setCreatedAt(story.getCreatedAt());
dto.setUpdatedAt(story.getUpdatedAt());
// Reading progress fields
dto.setIsRead(story.getIsRead());
dto.setReadingPosition(story.getReadingPosition());
dto.setReadingProgressPercentage(calculateReadingProgressPercentage(story));
dto.setLastReadAt(story.getLastReadAt());
if (story.getAuthor() != null) {
@@ -613,7 +620,27 @@ public class StoryController {
return dto;
}
private Integer calculateReadingProgressPercentage(Story story) {
if (story.getReadingPosition() == null || story.getReadingPosition() == 0) {
return 0;
}
// ALWAYS use contentHtml for consistency (frontend uses contentHtml for position tracking)
int totalLength = 0;
if (story.getContentHtml() != null && !story.getContentHtml().isEmpty()) {
totalLength = story.getContentHtml().length();
}
if (totalLength == 0) {
return 0;
}
// Calculate percentage and round to nearest integer
int percentage = Math.round((float) story.getReadingPosition() * 100 / totalLength);
return Math.min(100, percentage);
}
private StoryReadingDto convertToReadingDto(Story story) {
StoryReadingDto dto = new StoryReadingDto();
dto.setId(story.getId());
@@ -628,10 +655,11 @@ public class StoryController {
dto.setVolume(story.getVolume());
dto.setCreatedAt(story.getCreatedAt());
dto.setUpdatedAt(story.getUpdatedAt());
// Reading progress fields
dto.setIsRead(story.getIsRead());
dto.setReadingPosition(story.getReadingPosition());
dto.setReadingProgressPercentage(calculateReadingProgressPercentage(story));
dto.setLastReadAt(story.getLastReadAt());
if (story.getAuthor() != null) {
@@ -669,8 +697,9 @@ public class StoryController {
// Reading progress fields
dto.setIsRead(story.getIsRead());
dto.setReadingPosition(story.getReadingPosition());
dto.setReadingProgressPercentage(calculateReadingProgressPercentage(story));
dto.setLastReadAt(story.getLastReadAt());
if (story.getAuthor() != null) {
dto.setAuthorId(story.getAuthor().getId());
dto.setAuthorName(story.getAuthor().getName());
@@ -884,26 +913,147 @@ public class StoryController {
@PostMapping("/epub/validate")
public ResponseEntity<Map<String, Object>> validateEPUBFile(@RequestParam("file") MultipartFile file) {
logger.info("Validating EPUB file: {}", file.getOriginalFilename());
try {
List<String> errors = epubImportService.validateEPUBFile(file);
Map<String, Object> response = Map.of(
"valid", errors.isEmpty(),
"errors", errors,
"filename", file.getOriginalFilename(),
"size", file.getSize()
);
return ResponseEntity.ok(response);
} catch (Exception e) {
logger.error("Error validating EPUB file: {}", e.getMessage(), e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
.body(Map.of("error", "Failed to validate EPUB file"));
}
}
// PDF Import endpoint
@PostMapping("/pdf/import")
public ResponseEntity<FileImportResponse> importPDF(
@RequestParam("file") MultipartFile file,
@RequestParam(required = false) UUID authorId,
@RequestParam(required = false) String authorName,
@RequestParam(required = false) UUID seriesId,
@RequestParam(required = false) String seriesName,
@RequestParam(required = false) Integer seriesVolume,
@RequestParam(required = false) List<String> tags,
@RequestParam(defaultValue = "true") Boolean createMissingAuthor,
@RequestParam(defaultValue = "true") Boolean createMissingSeries,
@RequestParam(defaultValue = "true") Boolean extractImages) {
logger.info("Importing PDF file: {}", file.getOriginalFilename());
PDFImportRequest request = new PDFImportRequest();
request.setPdfFile(file);
request.setAuthorId(authorId);
request.setAuthorName(authorName);
request.setSeriesId(seriesId);
request.setSeriesName(seriesName);
request.setSeriesVolume(seriesVolume);
request.setTags(tags);
request.setCreateMissingAuthor(createMissingAuthor);
request.setCreateMissingSeries(createMissingSeries);
request.setExtractImages(extractImages);
try {
FileImportResponse response = pdfImportService.importPDF(request);
if (response.isSuccess()) {
logger.info("Successfully imported PDF: {} (Story ID: {})",
response.getStoryTitle(), response.getStoryId());
return ResponseEntity.ok(response);
} else {
logger.warn("PDF import failed: {}", response.getMessage());
return ResponseEntity.badRequest().body(response);
}
} catch (Exception e) {
logger.error("Error importing PDF: {}", e.getMessage(), e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
.body(FileImportResponse.error("Internal server error: " + e.getMessage(), file.getOriginalFilename()));
}
}
// Validate PDF file
@PostMapping("/pdf/validate")
public ResponseEntity<Map<String, Object>> validatePDFFile(@RequestParam("file") MultipartFile file) {
logger.info("Validating PDF file: {}", file.getOriginalFilename());
try {
List<String> errors = pdfImportService.validatePDFFile(file);
Map<String, Object> response = Map.of(
"valid", errors.isEmpty(),
"errors", errors,
"filename", file.getOriginalFilename(),
"size", file.getSize()
);
return ResponseEntity.ok(response);
} catch (Exception e) {
logger.error("Error validating PDF file: {}", e.getMessage(), e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
.body(Map.of("error", "Failed to validate PDF file"));
}
}
// ZIP Analysis endpoint - Step 1: Upload and analyze ZIP contents
@PostMapping("/zip/analyze")
public ResponseEntity<ZIPAnalysisResponse> analyzeZIPFile(@RequestParam("file") MultipartFile file) {
logger.info("Analyzing ZIP file: {}", file.getOriginalFilename());
try {
ZIPAnalysisResponse response = zipImportService.analyzeZIPFile(file);
if (response.isSuccess()) {
logger.info("Successfully analyzed ZIP file: {} ({} files found)",
file.getOriginalFilename(), response.getTotalFiles());
return ResponseEntity.ok(response);
} else {
logger.warn("ZIP analysis failed: {}", response.getMessage());
return ResponseEntity.badRequest().body(response);
}
} catch (Exception e) {
logger.error("Error analyzing ZIP file: {}", e.getMessage(), e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
.body(ZIPAnalysisResponse.error("Internal server error: " + e.getMessage()));
}
}
// ZIP Import endpoint - Step 2: Import selected files from analyzed ZIP
@PostMapping("/zip/import")
public ResponseEntity<ZIPImportResponse> importFromZIP(@Valid @RequestBody ZIPImportRequest request) {
logger.info("Importing files from ZIP session: {}", request.getZipSessionId());
try {
ZIPImportResponse response = zipImportService.importFromZIP(request);
logger.info("ZIP import completed: {} total, {} successful, {} failed",
response.getTotalFiles(), response.getSuccessfulImports(), response.getFailedImports());
if (response.isSuccess()) {
return ResponseEntity.ok(response);
} else {
return ResponseEntity.badRequest().body(response);
}
} catch (Exception e) {
logger.error("Error importing from ZIP: {}", e.getMessage(), e);
ZIPImportResponse errorResponse = new ZIPImportResponse();
errorResponse.setSuccess(false);
errorResponse.setMessage("Internal server error: " + e.getMessage());
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(errorResponse);
}
}
// Request DTOs
public static class CreateStoryRequest {
private String title;

View File

@@ -0,0 +1,132 @@
package com.storycove.dto;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
public class FileImportResponse {
private boolean success;
private String message;
private UUID storyId;
private String storyTitle;
private String fileName;
private String fileType; // "EPUB" or "PDF"
private Integer wordCount;
private Integer extractedImages;
private List<String> warnings;
private List<String> errors;
public FileImportResponse() {
this.warnings = new ArrayList<>();
this.errors = new ArrayList<>();
}
public FileImportResponse(boolean success, String message) {
this();
this.success = success;
this.message = message;
}
public static FileImportResponse success(UUID storyId, String storyTitle, String fileType) {
FileImportResponse response = new FileImportResponse(true, "File imported successfully");
response.setStoryId(storyId);
response.setStoryTitle(storyTitle);
response.setFileType(fileType);
return response;
}
public static FileImportResponse error(String message, String fileName) {
FileImportResponse response = new FileImportResponse(false, message);
response.setFileName(fileName);
return response;
}
public void addWarning(String warning) {
this.warnings.add(warning);
}
public void addError(String error) {
this.errors.add(error);
}
public boolean isSuccess() {
return success;
}
public void setSuccess(boolean success) {
this.success = success;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public UUID getStoryId() {
return storyId;
}
public void setStoryId(UUID storyId) {
this.storyId = storyId;
}
public String getStoryTitle() {
return storyTitle;
}
public void setStoryTitle(String storyTitle) {
this.storyTitle = storyTitle;
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public String getFileType() {
return fileType;
}
public void setFileType(String fileType) {
this.fileType = fileType;
}
public Integer getWordCount() {
return wordCount;
}
public void setWordCount(Integer wordCount) {
this.wordCount = wordCount;
}
public Integer getExtractedImages() {
return extractedImages;
}
public void setExtractedImages(Integer extractedImages) {
this.extractedImages = extractedImages;
}
public List<String> getWarnings() {
return warnings;
}
public void setWarnings(List<String> warnings) {
this.warnings = warnings;
}
public List<String> getErrors() {
return errors;
}
public void setErrors(List<String> errors) {
this.errors = errors;
}
}

View File

@@ -0,0 +1,76 @@
package com.storycove.dto;
public class FileInfoDto {
private String fileName;
private String fileType; // "EPUB" or "PDF"
private Long fileSize;
private String extractedTitle;
private String extractedAuthor;
private boolean hasMetadata;
private String error; // If file couldn't be analyzed
public FileInfoDto() {}
public FileInfoDto(String fileName, String fileType, Long fileSize) {
this.fileName = fileName;
this.fileType = fileType;
this.fileSize = fileSize;
}
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public String getFileType() {
return fileType;
}
public void setFileType(String fileType) {
this.fileType = fileType;
}
public Long getFileSize() {
return fileSize;
}
public void setFileSize(Long fileSize) {
this.fileSize = fileSize;
}
public String getExtractedTitle() {
return extractedTitle;
}
public void setExtractedTitle(String extractedTitle) {
this.extractedTitle = extractedTitle;
}
public String getExtractedAuthor() {
return extractedAuthor;
}
public void setExtractedAuthor(String extractedAuthor) {
this.extractedAuthor = extractedAuthor;
}
public boolean isHasMetadata() {
return hasMetadata;
}
public void setHasMetadata(boolean hasMetadata) {
this.hasMetadata = hasMetadata;
}
public String getError() {
return error;
}
public void setError(String error) {
this.error = error;
}
}

View File

@@ -0,0 +1,113 @@
package com.storycove.dto;
import jakarta.validation.constraints.NotNull;
import org.springframework.web.multipart.MultipartFile;
import java.util.List;
import java.util.UUID;
public class PDFImportRequest {
@NotNull(message = "PDF file is required")
private MultipartFile pdfFile;
private UUID authorId;
private String authorName;
private UUID seriesId;
private String seriesName;
private Integer seriesVolume;
private List<String> tags;
private Boolean createMissingAuthor = true;
private Boolean createMissingSeries = true;
private Boolean extractImages = true;
public PDFImportRequest() {}
public MultipartFile getPdfFile() {
return pdfFile;
}
public void setPdfFile(MultipartFile pdfFile) {
this.pdfFile = pdfFile;
}
public UUID getAuthorId() {
return authorId;
}
public void setAuthorId(UUID authorId) {
this.authorId = authorId;
}
public String getAuthorName() {
return authorName;
}
public void setAuthorName(String authorName) {
this.authorName = authorName;
}
public UUID getSeriesId() {
return seriesId;
}
public void setSeriesId(UUID seriesId) {
this.seriesId = seriesId;
}
public String getSeriesName() {
return seriesName;
}
public void setSeriesName(String seriesName) {
this.seriesName = seriesName;
}
public Integer getSeriesVolume() {
return seriesVolume;
}
public void setSeriesVolume(Integer seriesVolume) {
this.seriesVolume = seriesVolume;
}
public List<String> getTags() {
return tags;
}
public void setTags(List<String> tags) {
this.tags = tags;
}
public Boolean getCreateMissingAuthor() {
return createMissingAuthor;
}
public void setCreateMissingAuthor(Boolean createMissingAuthor) {
this.createMissingAuthor = createMissingAuthor;
}
public Boolean getCreateMissingSeries() {
return createMissingSeries;
}
public void setCreateMissingSeries(Boolean createMissingSeries) {
this.createMissingSeries = createMissingSeries;
}
public Boolean getExtractImages() {
return extractImages;
}
public void setExtractImages(Boolean extractImages) {
this.extractImages = extractImages;
}
}

View File

@@ -0,0 +1,45 @@
package com.storycove.dto;
import java.util.Map;
public class RatingStatsDto {
private double averageRating;
private long totalRatedStories;
private long totalUnratedStories;
private Map<Integer, Long> ratingDistribution; // rating (1-5) -> count
public RatingStatsDto() {
}
public double getAverageRating() {
return averageRating;
}
public void setAverageRating(double averageRating) {
this.averageRating = averageRating;
}
public long getTotalRatedStories() {
return totalRatedStories;
}
public void setTotalRatedStories(long totalRatedStories) {
this.totalRatedStories = totalRatedStories;
}
public long getTotalUnratedStories() {
return totalUnratedStories;
}
public void setTotalUnratedStories(long totalUnratedStories) {
this.totalUnratedStories = totalUnratedStories;
}
public Map<Integer, Long> getRatingDistribution() {
return ratingDistribution;
}
public void setRatingDistribution(Map<Integer, Long> ratingDistribution) {
this.ratingDistribution = ratingDistribution;
}
}

View File

@@ -0,0 +1,84 @@
package com.storycove.dto;
import java.util.List;
public class ReadingActivityStatsDto {
private long storiesReadLastWeek;
private long wordsReadLastWeek;
private long readingTimeMinutesLastWeek;
private List<DailyActivityDto> dailyActivity;
public ReadingActivityStatsDto() {
}
public long getStoriesReadLastWeek() {
return storiesReadLastWeek;
}
public void setStoriesReadLastWeek(long storiesReadLastWeek) {
this.storiesReadLastWeek = storiesReadLastWeek;
}
public long getWordsReadLastWeek() {
return wordsReadLastWeek;
}
public void setWordsReadLastWeek(long wordsReadLastWeek) {
this.wordsReadLastWeek = wordsReadLastWeek;
}
public long getReadingTimeMinutesLastWeek() {
return readingTimeMinutesLastWeek;
}
public void setReadingTimeMinutesLastWeek(long readingTimeMinutesLastWeek) {
this.readingTimeMinutesLastWeek = readingTimeMinutesLastWeek;
}
public List<DailyActivityDto> getDailyActivity() {
return dailyActivity;
}
public void setDailyActivity(List<DailyActivityDto> dailyActivity) {
this.dailyActivity = dailyActivity;
}
public static class DailyActivityDto {
private String date; // YYYY-MM-DD format
private long storiesRead;
private long wordsRead;
public DailyActivityDto() {
}
public DailyActivityDto(String date, long storiesRead, long wordsRead) {
this.date = date;
this.storiesRead = storiesRead;
this.wordsRead = wordsRead;
}
public String getDate() {
return date;
}
public void setDate(String date) {
this.date = date;
}
public long getStoriesRead() {
return storiesRead;
}
public void setStoriesRead(long storiesRead) {
this.storiesRead = storiesRead;
}
public long getWordsRead() {
return wordsRead;
}
public void setWordsRead(long wordsRead) {
this.wordsRead = wordsRead;
}
}
}

View File

@@ -0,0 +1,61 @@
package com.storycove.dto;
public class ReadingProgressStatsDto {
private long totalStories;
private long readStories;
private long unreadStories;
private double percentageRead;
private long totalWordsRead;
private long totalWordsUnread;
public ReadingProgressStatsDto() {
}
public long getTotalStories() {
return totalStories;
}
public void setTotalStories(long totalStories) {
this.totalStories = totalStories;
}
public long getReadStories() {
return readStories;
}
public void setReadStories(long readStories) {
this.readStories = readStories;
}
public long getUnreadStories() {
return unreadStories;
}
public void setUnreadStories(long unreadStories) {
this.unreadStories = unreadStories;
}
public double getPercentageRead() {
return percentageRead;
}
public void setPercentageRead(double percentageRead) {
this.percentageRead = percentageRead;
}
public long getTotalWordsRead() {
return totalWordsRead;
}
public void setTotalWordsRead(long totalWordsRead) {
this.totalWordsRead = totalWordsRead;
}
public long getTotalWordsUnread() {
return totalWordsUnread;
}
public void setTotalWordsUnread(long totalWordsUnread) {
this.totalWordsUnread = totalWordsUnread;
}
}

View File

@@ -0,0 +1,65 @@
package com.storycove.dto;
import java.util.List;
public class SourceDomainStatsDto {
private List<DomainStatsDto> topDomains;
private long storiesWithSource;
private long storiesWithoutSource;
public SourceDomainStatsDto() {
}
public List<DomainStatsDto> getTopDomains() {
return topDomains;
}
public void setTopDomains(List<DomainStatsDto> topDomains) {
this.topDomains = topDomains;
}
public long getStoriesWithSource() {
return storiesWithSource;
}
public void setStoriesWithSource(long storiesWithSource) {
this.storiesWithSource = storiesWithSource;
}
public long getStoriesWithoutSource() {
return storiesWithoutSource;
}
public void setStoriesWithoutSource(long storiesWithoutSource) {
this.storiesWithoutSource = storiesWithoutSource;
}
public static class DomainStatsDto {
private String domain;
private long storyCount;
public DomainStatsDto() {
}
public DomainStatsDto(String domain, long storyCount) {
this.domain = domain;
this.storyCount = storyCount;
}
public String getDomain() {
return domain;
}
public void setDomain(String domain) {
this.domain = domain;
}
public long getStoryCount() {
return storyCount;
}
public void setStoryCount(long storyCount) {
this.storyCount = storyCount;
}
}
}

View File

@@ -31,6 +31,7 @@ public class StoryDto {
// Reading progress fields
private Boolean isRead;
private Integer readingPosition;
private Integer readingProgressPercentage; // Pre-calculated percentage (0-100)
private LocalDateTime lastReadAt;
// Related entities as simple references
@@ -146,7 +147,15 @@ public class StoryDto {
public void setReadingPosition(Integer readingPosition) {
this.readingPosition = readingPosition;
}
public Integer getReadingProgressPercentage() {
return readingProgressPercentage;
}
public void setReadingProgressPercentage(Integer readingProgressPercentage) {
this.readingProgressPercentage = readingProgressPercentage;
}
public LocalDateTime getLastReadAt() {
return lastReadAt;
}

View File

@@ -25,6 +25,7 @@ public class StoryReadingDto {
// Reading progress fields
private Boolean isRead;
private Integer readingPosition;
private Integer readingProgressPercentage; // Pre-calculated percentage (0-100)
private LocalDateTime lastReadAt;
// Related entities as simple references
@@ -135,7 +136,15 @@ public class StoryReadingDto {
public void setReadingPosition(Integer readingPosition) {
this.readingPosition = readingPosition;
}
public Integer getReadingProgressPercentage() {
return readingProgressPercentage;
}
public void setReadingProgressPercentage(Integer readingProgressPercentage) {
this.readingProgressPercentage = readingProgressPercentage;
}
public LocalDateTime getLastReadAt() {
return lastReadAt;
}

View File

@@ -18,6 +18,7 @@ public class StorySearchDto {
// Reading status
private Boolean isRead;
private Integer readingPosition;
private Integer readingProgressPercentage; // Pre-calculated percentage (0-100)
private LocalDateTime lastReadAt;
// Author info
@@ -132,7 +133,15 @@ public class StorySearchDto {
public void setReadingPosition(Integer readingPosition) {
this.readingPosition = readingPosition;
}
public Integer getReadingProgressPercentage() {
return readingProgressPercentage;
}
public void setReadingProgressPercentage(Integer readingProgressPercentage) {
this.readingProgressPercentage = readingProgressPercentage;
}
public UUID getAuthorId() {
return authorId;
}

View File

@@ -23,6 +23,7 @@ public class StorySummaryDto {
// Reading progress fields
private Boolean isRead;
private Integer readingPosition;
private Integer readingProgressPercentage; // Pre-calculated percentage (0-100)
private LocalDateTime lastReadAt;
// Related entities as simple references
@@ -122,11 +123,19 @@ public class StorySummaryDto {
public Integer getReadingPosition() {
return readingPosition;
}
public void setReadingPosition(Integer readingPosition) {
this.readingPosition = readingPosition;
}
public Integer getReadingProgressPercentage() {
return readingProgressPercentage;
}
public void setReadingProgressPercentage(Integer readingProgressPercentage) {
this.readingProgressPercentage = readingProgressPercentage;
}
public LocalDateTime getLastReadAt() {
return lastReadAt;
}

View File

@@ -0,0 +1,76 @@
package com.storycove.dto;
import java.util.List;
public class TopAuthorsStatsDto {
private List<AuthorStatsDto> topAuthorsByStories;
private List<AuthorStatsDto> topAuthorsByWords;
public TopAuthorsStatsDto() {
}
public List<AuthorStatsDto> getTopAuthorsByStories() {
return topAuthorsByStories;
}
public void setTopAuthorsByStories(List<AuthorStatsDto> topAuthorsByStories) {
this.topAuthorsByStories = topAuthorsByStories;
}
public List<AuthorStatsDto> getTopAuthorsByWords() {
return topAuthorsByWords;
}
public void setTopAuthorsByWords(List<AuthorStatsDto> topAuthorsByWords) {
this.topAuthorsByWords = topAuthorsByWords;
}
public static class AuthorStatsDto {
private String authorId;
private String authorName;
private long storyCount;
private long totalWords;
public AuthorStatsDto() {
}
public AuthorStatsDto(String authorId, String authorName, long storyCount, long totalWords) {
this.authorId = authorId;
this.authorName = authorName;
this.storyCount = storyCount;
this.totalWords = totalWords;
}
public String getAuthorId() {
return authorId;
}
public void setAuthorId(String authorId) {
this.authorId = authorId;
}
public String getAuthorName() {
return authorName;
}
public void setAuthorName(String authorName) {
this.authorName = authorName;
}
public long getStoryCount() {
return storyCount;
}
public void setStoryCount(long storyCount) {
this.storyCount = storyCount;
}
public long getTotalWords() {
return totalWords;
}
public void setTotalWords(long totalWords) {
this.totalWords = totalWords;
}
}
}

View File

@@ -0,0 +1,51 @@
package com.storycove.dto;
import java.util.List;
public class TopTagsStatsDto {
private List<TagStatsDto> topTags;
public TopTagsStatsDto() {
}
public TopTagsStatsDto(List<TagStatsDto> topTags) {
this.topTags = topTags;
}
public List<TagStatsDto> getTopTags() {
return topTags;
}
public void setTopTags(List<TagStatsDto> topTags) {
this.topTags = topTags;
}
public static class TagStatsDto {
private String tagName;
private long storyCount;
public TagStatsDto() {
}
public TagStatsDto(String tagName, long storyCount) {
this.tagName = tagName;
this.storyCount = storyCount;
}
public String getTagName() {
return tagName;
}
public void setTagName(String tagName) {
this.tagName = tagName;
}
public long getStoryCount() {
return storyCount;
}
public void setStoryCount(long storyCount) {
this.storyCount = storyCount;
}
}
}

View File

@@ -0,0 +1,98 @@
package com.storycove.dto;
import java.util.ArrayList;
import java.util.List;
public class ZIPAnalysisResponse {
private boolean success;
private String message;
private String zipFileName;
private int totalFiles;
private int validFiles;
private List<FileInfoDto> files;
private List<String> warnings;
public ZIPAnalysisResponse() {
this.files = new ArrayList<>();
this.warnings = new ArrayList<>();
}
public static ZIPAnalysisResponse success(String zipFileName, List<FileInfoDto> files) {
ZIPAnalysisResponse response = new ZIPAnalysisResponse();
response.setSuccess(true);
response.setMessage("ZIP file analyzed successfully");
response.setZipFileName(zipFileName);
response.setFiles(files);
response.setTotalFiles(files.size());
response.setValidFiles((int) files.stream().filter(f -> f.getError() == null).count());
return response;
}
public static ZIPAnalysisResponse error(String message) {
ZIPAnalysisResponse response = new ZIPAnalysisResponse();
response.setSuccess(false);
response.setMessage(message);
return response;
}
public void addWarning(String warning) {
this.warnings.add(warning);
}
public boolean isSuccess() {
return success;
}
public void setSuccess(boolean success) {
this.success = success;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public String getZipFileName() {
return zipFileName;
}
public void setZipFileName(String zipFileName) {
this.zipFileName = zipFileName;
}
public int getTotalFiles() {
return totalFiles;
}
public void setTotalFiles(int totalFiles) {
this.totalFiles = totalFiles;
}
public int getValidFiles() {
return validFiles;
}
public void setValidFiles(int validFiles) {
this.validFiles = validFiles;
}
public List<FileInfoDto> getFiles() {
return files;
}
public void setFiles(List<FileInfoDto> files) {
this.files = files;
}
public List<String> getWarnings() {
return warnings;
}
public void setWarnings(List<String> warnings) {
this.warnings = warnings;
}
}

View File

@@ -0,0 +1,177 @@
package com.storycove.dto;
import jakarta.validation.constraints.NotNull;
import java.util.List;
import java.util.Map;
import java.util.UUID;
public class ZIPImportRequest {
@NotNull(message = "ZIP session ID is required")
private String zipSessionId; // Temporary ID for the uploaded ZIP file
@NotNull(message = "Selected files are required")
private List<String> selectedFiles; // List of file names to import
// Per-file metadata overrides (key = fileName)
private Map<String, FileImportMetadata> fileMetadata;
// Default metadata for all files (if not specified per file)
private UUID defaultAuthorId;
private String defaultAuthorName;
private UUID defaultSeriesId;
private String defaultSeriesName;
private List<String> defaultTags;
private Boolean createMissingAuthor = true;
private Boolean createMissingSeries = true;
private Boolean extractImages = true;
public ZIPImportRequest() {}
public static class FileImportMetadata {
private UUID authorId;
private String authorName;
private UUID seriesId;
private String seriesName;
private Integer seriesVolume;
private List<String> tags;
public UUID getAuthorId() {
return authorId;
}
public void setAuthorId(UUID authorId) {
this.authorId = authorId;
}
public String getAuthorName() {
return authorName;
}
public void setAuthorName(String authorName) {
this.authorName = authorName;
}
public UUID getSeriesId() {
return seriesId;
}
public void setSeriesId(UUID seriesId) {
this.seriesId = seriesId;
}
public String getSeriesName() {
return seriesName;
}
public void setSeriesName(String seriesName) {
this.seriesName = seriesName;
}
public Integer getSeriesVolume() {
return seriesVolume;
}
public void setSeriesVolume(Integer seriesVolume) {
this.seriesVolume = seriesVolume;
}
public List<String> getTags() {
return tags;
}
public void setTags(List<String> tags) {
this.tags = tags;
}
}
public String getZipSessionId() {
return zipSessionId;
}
public void setZipSessionId(String zipSessionId) {
this.zipSessionId = zipSessionId;
}
public List<String> getSelectedFiles() {
return selectedFiles;
}
public void setSelectedFiles(List<String> selectedFiles) {
this.selectedFiles = selectedFiles;
}
public Map<String, FileImportMetadata> getFileMetadata() {
return fileMetadata;
}
public void setFileMetadata(Map<String, FileImportMetadata> fileMetadata) {
this.fileMetadata = fileMetadata;
}
public UUID getDefaultAuthorId() {
return defaultAuthorId;
}
public void setDefaultAuthorId(UUID defaultAuthorId) {
this.defaultAuthorId = defaultAuthorId;
}
public String getDefaultAuthorName() {
return defaultAuthorName;
}
public void setDefaultAuthorName(String defaultAuthorName) {
this.defaultAuthorName = defaultAuthorName;
}
public UUID getDefaultSeriesId() {
return defaultSeriesId;
}
public void setDefaultSeriesId(UUID defaultSeriesId) {
this.defaultSeriesId = defaultSeriesId;
}
public String getDefaultSeriesName() {
return defaultSeriesName;
}
public void setDefaultSeriesName(String defaultSeriesName) {
this.defaultSeriesName = defaultSeriesName;
}
public List<String> getDefaultTags() {
return defaultTags;
}
public void setDefaultTags(List<String> defaultTags) {
this.defaultTags = defaultTags;
}
public Boolean getCreateMissingAuthor() {
return createMissingAuthor;
}
public void setCreateMissingAuthor(Boolean createMissingAuthor) {
this.createMissingAuthor = createMissingAuthor;
}
public Boolean getCreateMissingSeries() {
return createMissingSeries;
}
public void setCreateMissingSeries(Boolean createMissingSeries) {
this.createMissingSeries = createMissingSeries;
}
public Boolean getExtractImages() {
return extractImages;
}
public void setExtractImages(Boolean extractImages) {
this.extractImages = extractImages;
}
}

View File

@@ -0,0 +1,101 @@
package com.storycove.dto;
import java.util.ArrayList;
import java.util.List;
public class ZIPImportResponse {
private boolean success;
private String message;
private int totalFiles;
private int successfulImports;
private int failedImports;
private List<FileImportResponse> results;
private List<String> warnings;
public ZIPImportResponse() {
this.results = new ArrayList<>();
this.warnings = new ArrayList<>();
}
public static ZIPImportResponse create(List<FileImportResponse> results) {
ZIPImportResponse response = new ZIPImportResponse();
response.setResults(results);
response.setTotalFiles(results.size());
response.setSuccessfulImports((int) results.stream().filter(FileImportResponse::isSuccess).count());
response.setFailedImports((int) results.stream().filter(r -> !r.isSuccess()).count());
if (response.getFailedImports() == 0) {
response.setSuccess(true);
response.setMessage("All files imported successfully");
} else if (response.getSuccessfulImports() == 0) {
response.setSuccess(false);
response.setMessage("All file imports failed");
} else {
response.setSuccess(true);
response.setMessage("Partial success: " + response.getSuccessfulImports() + " imported, " + response.getFailedImports() + " failed");
}
return response;
}
public void addWarning(String warning) {
this.warnings.add(warning);
}
public boolean isSuccess() {
return success;
}
public void setSuccess(boolean success) {
this.success = success;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public int getTotalFiles() {
return totalFiles;
}
public void setTotalFiles(int totalFiles) {
this.totalFiles = totalFiles;
}
public int getSuccessfulImports() {
return successfulImports;
}
public void setSuccessfulImports(int successfulImports) {
this.successfulImports = successfulImports;
}
public int getFailedImports() {
return failedImports;
}
public void setFailedImports(int failedImports) {
this.failedImports = failedImports;
}
public List<FileImportResponse> getResults() {
return results;
}
public void setResults(List<FileImportResponse> results) {
this.results = results;
}
public List<String> getWarnings() {
return warnings;
}
public void setWarnings(List<String> warnings) {
this.warnings = warnings;
}
}

View File

@@ -0,0 +1,195 @@
package com.storycove.entity;
import jakarta.persistence.*;
import java.time.LocalDateTime;
import java.util.UUID;
@Entity
@Table(name = "backup_jobs")
public class BackupJob {
@Id
@GeneratedValue(strategy = GenerationType.UUID)
private UUID id;
@Column(nullable = false)
private String libraryId;
@Column(nullable = false)
@Enumerated(EnumType.STRING)
private BackupType type;
@Column(nullable = false)
@Enumerated(EnumType.STRING)
private BackupStatus status;
@Column
private String filePath;
@Column
private Long fileSizeBytes;
@Column
private Integer progressPercent;
@Column(length = 1000)
private String errorMessage;
@Column(nullable = false)
private LocalDateTime createdAt;
@Column
private LocalDateTime startedAt;
@Column
private LocalDateTime completedAt;
@Column
private LocalDateTime expiresAt;
@PrePersist
protected void onCreate() {
createdAt = LocalDateTime.now();
// Backups expire after 24 hours
expiresAt = LocalDateTime.now().plusDays(1);
}
// Enums
public enum BackupType {
DATABASE_ONLY,
COMPLETE
}
public enum BackupStatus {
PENDING,
IN_PROGRESS,
COMPLETED,
FAILED,
EXPIRED
}
// Constructors
public BackupJob() {
}
public BackupJob(String libraryId, BackupType type) {
this.libraryId = libraryId;
this.type = type;
this.status = BackupStatus.PENDING;
this.progressPercent = 0;
}
// Getters and Setters
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public String getLibraryId() {
return libraryId;
}
public void setLibraryId(String libraryId) {
this.libraryId = libraryId;
}
public BackupType getType() {
return type;
}
public void setType(BackupType type) {
this.type = type;
}
public BackupStatus getStatus() {
return status;
}
public void setStatus(BackupStatus status) {
this.status = status;
}
public String getFilePath() {
return filePath;
}
public void setFilePath(String filePath) {
this.filePath = filePath;
}
public Long getFileSizeBytes() {
return fileSizeBytes;
}
public void setFileSizeBytes(Long fileSizeBytes) {
this.fileSizeBytes = fileSizeBytes;
}
public Integer getProgressPercent() {
return progressPercent;
}
public void setProgressPercent(Integer progressPercent) {
this.progressPercent = progressPercent;
}
public String getErrorMessage() {
return errorMessage;
}
public void setErrorMessage(String errorMessage) {
this.errorMessage = errorMessage;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
public LocalDateTime getStartedAt() {
return startedAt;
}
public void setStartedAt(LocalDateTime startedAt) {
this.startedAt = startedAt;
}
public LocalDateTime getCompletedAt() {
return completedAt;
}
public void setCompletedAt(LocalDateTime completedAt) {
this.completedAt = completedAt;
}
public LocalDateTime getExpiresAt() {
return expiresAt;
}
public void setExpiresAt(LocalDateTime expiresAt) {
this.expiresAt = expiresAt;
}
// Helper methods
public boolean isExpired() {
return LocalDateTime.now().isAfter(expiresAt);
}
public boolean isCompleted() {
return status == BackupStatus.COMPLETED;
}
public boolean isFailed() {
return status == BackupStatus.FAILED;
}
public boolean isInProgress() {
return status == BackupStatus.IN_PROGRESS;
}
}

View File

@@ -287,10 +287,17 @@ public class Story {
/**
* Updates the reading progress and timestamp
* When position is 0 or null, resets lastReadAt to null so the story won't appear in "last read" sorting
*/
public void updateReadingProgress(Integer position) {
this.readingPosition = position;
this.lastReadAt = LocalDateTime.now();
// Only update lastReadAt if there's actual reading progress
// Reset to null when position is 0 or null to remove from "last read" sorting
if (position == null || position == 0) {
this.lastReadAt = null;
} else {
this.lastReadAt = LocalDateTime.now();
}
}
/**

View File

@@ -0,0 +1,25 @@
package com.storycove.repository;
import com.storycove.entity.BackupJob;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import java.time.LocalDateTime;
import java.util.List;
import java.util.UUID;
@Repository
public interface BackupJobRepository extends JpaRepository<BackupJob, UUID> {
List<BackupJob> findByLibraryIdOrderByCreatedAtDesc(String libraryId);
@Query("SELECT bj FROM BackupJob bj WHERE bj.expiresAt < :now AND bj.status = 'COMPLETED'")
List<BackupJob> findExpiredJobs(@Param("now") LocalDateTime now);
@Modifying
@Query("UPDATE BackupJob bj SET bj.status = 'EXPIRED' WHERE bj.expiresAt < :now AND bj.status = 'COMPLETED'")
int markExpiredJobs(@Param("now") LocalDateTime now);
}

View File

@@ -86,6 +86,9 @@ public interface StoryRepository extends JpaRepository<Story, UUID> {
@Query("SELECT COUNT(s) FROM Story s WHERE s.createdAt >= :since")
long countStoriesCreatedSince(@Param("since") LocalDateTime since);
@Query("SELECT COUNT(s) FROM Story s WHERE s.createdAt >= :since OR s.updatedAt >= :since")
long countStoriesModifiedAfter(@Param("since") LocalDateTime since);
@Query("SELECT AVG(s.wordCount) FROM Story s")
Double findAverageWordCount();

View File

@@ -0,0 +1,125 @@
package com.storycove.service;
import com.storycove.entity.BackupJob;
import com.storycove.repository.BackupJobRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.util.Optional;
import java.util.UUID;
/**
* Separate service for async backup execution.
* This is needed because @Async doesn't work when called from within the same class.
*/
@Service
public class AsyncBackupExecutor {
private static final Logger logger = LoggerFactory.getLogger(AsyncBackupExecutor.class);
@Value("${storycove.upload.dir:/app/images}")
private String uploadDir;
@Autowired
private BackupJobRepository backupJobRepository;
@Autowired
private DatabaseManagementService databaseManagementService;
@Autowired
private LibraryService libraryService;
/**
* Execute backup asynchronously.
* This method MUST be in a separate service class for @Async to work properly.
*/
@Async
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void executeBackupAsync(UUID jobId) {
logger.info("Async executor starting for job {}", jobId);
Optional<BackupJob> jobOpt = backupJobRepository.findById(jobId);
if (jobOpt.isEmpty()) {
logger.error("Backup job not found: {}", jobId);
return;
}
BackupJob job = jobOpt.get();
job.setStatus(BackupJob.BackupStatus.IN_PROGRESS);
job.setStartedAt(LocalDateTime.now());
job.setProgressPercent(0);
backupJobRepository.save(job);
try {
logger.info("Starting backup job {} for library {}", job.getId(), job.getLibraryId());
// Switch to the correct library
if (!job.getLibraryId().equals(libraryService.getCurrentLibraryId())) {
libraryService.switchToLibraryAfterAuthentication(job.getLibraryId());
}
// Create backup file
Path backupDir = Paths.get(uploadDir, "backups", job.getLibraryId());
Files.createDirectories(backupDir);
String filename = String.format("backup_%s_%s.%s",
job.getId().toString(),
LocalDateTime.now().toString().replaceAll(":", "-"),
job.getType() == BackupJob.BackupType.COMPLETE ? "zip" : "sql");
Path backupFile = backupDir.resolve(filename);
job.setProgressPercent(10);
backupJobRepository.save(job);
// Create the backup
Resource backupResource;
if (job.getType() == BackupJob.BackupType.COMPLETE) {
backupResource = databaseManagementService.createCompleteBackup();
} else {
backupResource = databaseManagementService.createBackup();
}
job.setProgressPercent(80);
backupJobRepository.save(job);
// Copy resource to permanent file
try (var inputStream = backupResource.getInputStream();
var outputStream = Files.newOutputStream(backupFile)) {
inputStream.transferTo(outputStream);
}
job.setProgressPercent(95);
backupJobRepository.save(job);
// Set file info
job.setFilePath(backupFile.toString());
job.setFileSizeBytes(Files.size(backupFile));
job.setStatus(BackupJob.BackupStatus.COMPLETED);
job.setCompletedAt(LocalDateTime.now());
job.setProgressPercent(100);
logger.info("Backup job {} completed successfully. File size: {} bytes",
job.getId(), job.getFileSizeBytes());
} catch (Exception e) {
logger.error("Backup job {} failed", job.getId(), e);
job.setStatus(BackupJob.BackupStatus.FAILED);
job.setErrorMessage(e.getMessage());
job.setCompletedAt(LocalDateTime.now());
} finally {
backupJobRepository.save(job);
}
}
}

View File

@@ -0,0 +1,167 @@
package com.storycove.service;
import com.storycove.entity.BackupJob;
import com.storycove.repository.BackupJobRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.FileSystemResource;
import org.springframework.core.io.Resource;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
@Service
public class AsyncBackupService {
private static final Logger logger = LoggerFactory.getLogger(AsyncBackupService.class);
@Value("${storycove.upload.dir:/app/images}")
private String uploadDir;
@Autowired
private BackupJobRepository backupJobRepository;
@Autowired
private AsyncBackupExecutor asyncBackupExecutor;
/**
* Start a backup job asynchronously.
* This method returns immediately after creating the job record.
*/
@Transactional
public BackupJob startBackupJob(String libraryId, BackupJob.BackupType type) {
logger.info("Creating backup job for library: {}, type: {}", libraryId, type);
BackupJob job = new BackupJob(libraryId, type);
job = backupJobRepository.save(job);
logger.info("Backup job created with ID: {}. Starting async execution...", job.getId());
// Start backup in background using separate service (ensures @Async works properly)
asyncBackupExecutor.executeBackupAsync(job.getId());
logger.info("Async backup execution triggered for job: {}", job.getId());
return job;
}
/**
* Get backup job status
*/
public Optional<BackupJob> getJobStatus(UUID jobId) {
return backupJobRepository.findById(jobId);
}
/**
* Get backup file for download
*/
public Resource getBackupFile(UUID jobId) throws IOException {
Optional<BackupJob> jobOpt = backupJobRepository.findById(jobId);
if (jobOpt.isEmpty()) {
throw new IOException("Backup job not found");
}
BackupJob job = jobOpt.get();
if (!job.isCompleted()) {
throw new IOException("Backup is not completed yet");
}
if (job.isExpired()) {
throw new IOException("Backup has expired");
}
if (job.getFilePath() == null) {
throw new IOException("Backup file path not set");
}
Path backupPath = Paths.get(job.getFilePath());
if (!Files.exists(backupPath)) {
throw new IOException("Backup file not found");
}
return new FileSystemResource(backupPath);
}
/**
* List backup jobs for a library
*/
public List<BackupJob> listBackupJobs(String libraryId) {
return backupJobRepository.findByLibraryIdOrderByCreatedAtDesc(libraryId);
}
/**
* Clean up expired backup jobs and their files
* Runs daily at 2 AM
*/
@Scheduled(cron = "0 0 2 * * ?")
@Transactional
public void cleanupExpiredBackups() {
logger.info("Starting cleanup of expired backups");
LocalDateTime now = LocalDateTime.now();
// Mark expired jobs
int markedCount = backupJobRepository.markExpiredJobs(now);
logger.info("Marked {} jobs as expired", markedCount);
// Find all expired jobs to delete their files
List<BackupJob> expiredJobs = backupJobRepository.findExpiredJobs(now);
for (BackupJob job : expiredJobs) {
if (job.getFilePath() != null) {
try {
Path filePath = Paths.get(job.getFilePath());
if (Files.exists(filePath)) {
Files.delete(filePath);
logger.info("Deleted expired backup file: {}", filePath);
}
} catch (IOException e) {
logger.warn("Failed to delete expired backup file: {}", job.getFilePath(), e);
}
}
// Delete the job record
backupJobRepository.delete(job);
}
logger.info("Cleanup completed. Deleted {} expired backups", expiredJobs.size());
}
/**
* Delete a specific backup job and its file
*/
@Transactional
public void deleteBackupJob(UUID jobId) throws IOException {
Optional<BackupJob> jobOpt = backupJobRepository.findById(jobId);
if (jobOpt.isEmpty()) {
throw new IOException("Backup job not found");
}
BackupJob job = jobOpt.get();
// Delete file if it exists
if (job.getFilePath() != null) {
Path filePath = Paths.get(job.getFilePath());
if (Files.exists(filePath)) {
Files.delete(filePath);
logger.info("Deleted backup file: {}", filePath);
}
}
// Delete job record
backupJobRepository.delete(job);
logger.info("Deleted backup job: {}", jobId);
}
}

View File

@@ -0,0 +1,262 @@
package com.storycove.service;
import com.storycove.repository.StoryRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Service for automatic daily backups.
* Runs at 4 AM daily and creates a backup if content has changed since last backup.
* Keeps maximum of 5 backups, rotating old ones out.
*/
@Service
public class AutomaticBackupService {
private static final Logger logger = LoggerFactory.getLogger(AutomaticBackupService.class);
private static final int MAX_BACKUPS = 5;
private static final DateTimeFormatter FILENAME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss");
@Value("${storycove.automatic-backup.dir:/app/automatic-backups}")
private String automaticBackupDir;
@Autowired
private StoryRepository storyRepository;
@Autowired
private DatabaseManagementService databaseManagementService;
@Autowired
private LibraryService libraryService;
private LocalDateTime lastBackupCheck = null;
/**
* Scheduled job that runs daily at 4 AM.
* Creates a backup if content has changed since last backup.
*/
@Scheduled(cron = "0 0 4 * * ?")
public void performAutomaticBackup() {
logger.info("========================================");
logger.info("Starting automatic backup check at 4 AM");
logger.info("========================================");
try {
// Get current library ID (or default)
String libraryId = libraryService.getCurrentLibraryId();
if (libraryId == null) {
libraryId = "default";
}
logger.info("Checking for content changes in library: {}", libraryId);
// Check if content has changed since last backup
if (!hasContentChanged()) {
logger.info("No content changes detected since last backup. Skipping backup.");
logger.info("========================================");
return;
}
logger.info("Content changes detected! Creating automatic backup...");
// Create backup directory for this library
Path backupPath = Paths.get(automaticBackupDir, libraryId);
Files.createDirectories(backupPath);
// Create the backup
String timestamp = LocalDateTime.now().format(FILENAME_FORMATTER);
String filename = String.format("auto_backup_%s.zip", timestamp);
Path backupFile = backupPath.resolve(filename);
logger.info("Creating complete backup to: {}", backupFile);
Resource backup = databaseManagementService.createCompleteBackup();
// Write backup to file
try (var inputStream = backup.getInputStream();
var outputStream = Files.newOutputStream(backupFile)) {
inputStream.transferTo(outputStream);
}
long fileSize = Files.size(backupFile);
logger.info("✅ Automatic backup created successfully");
logger.info(" File: {}", backupFile.getFileName());
logger.info(" Size: {} MB", fileSize / 1024 / 1024);
// Rotate old backups (keep only MAX_BACKUPS)
rotateBackups(backupPath);
// Update last backup check time
lastBackupCheck = LocalDateTime.now();
logger.info("========================================");
logger.info("Automatic backup completed successfully");
logger.info("========================================");
} catch (Exception e) {
logger.error("❌ Automatic backup failed", e);
logger.info("========================================");
}
}
/**
* Check if content has changed since last backup.
* Looks for stories created or updated after the last backup time.
*/
private boolean hasContentChanged() {
try {
if (lastBackupCheck == null) {
// First run - check if there are any stories at all
long storyCount = storyRepository.count();
logger.info("First backup check - found {} stories", storyCount);
return storyCount > 0;
}
// Check for stories created or updated since last backup
long changedCount = storyRepository.countStoriesModifiedAfter(lastBackupCheck);
logger.info("Found {} stories modified since last backup ({})", changedCount, lastBackupCheck);
return changedCount > 0;
} catch (Exception e) {
logger.error("Error checking for content changes", e);
// On error, create backup to be safe
return true;
}
}
/**
* Rotate backups - keep only MAX_BACKUPS most recent backups.
* Deletes older backups.
*/
private void rotateBackups(Path backupPath) throws IOException {
logger.info("Checking for old backups to rotate...");
// Find all backup files in the directory
List<Path> backupFiles;
try (Stream<Path> stream = Files.list(backupPath)) {
backupFiles = stream
.filter(Files::isRegularFile)
.filter(p -> p.getFileName().toString().startsWith("auto_backup_"))
.filter(p -> p.getFileName().toString().endsWith(".zip"))
.sorted(Comparator.comparing((Path p) -> {
try {
return Files.getLastModifiedTime(p);
} catch (IOException e) {
return null;
}
}).reversed()) // Most recent first
.collect(Collectors.toList());
}
logger.info("Found {} automatic backups", backupFiles.size());
// Delete old backups if we exceed MAX_BACKUPS
if (backupFiles.size() > MAX_BACKUPS) {
List<Path> toDelete = backupFiles.subList(MAX_BACKUPS, backupFiles.size());
logger.info("Deleting {} old backups to maintain maximum of {}", toDelete.size(), MAX_BACKUPS);
for (Path oldBackup : toDelete) {
try {
Files.delete(oldBackup);
logger.info(" Deleted old backup: {}", oldBackup.getFileName());
} catch (IOException e) {
logger.warn("Failed to delete old backup: {}", oldBackup, e);
}
}
} else {
logger.info("Backup count within limit ({}), no rotation needed", MAX_BACKUPS);
}
}
/**
* Manual trigger for testing - creates backup immediately if content changed.
*/
public void triggerManualBackup() {
logger.info("Manual automatic backup triggered");
performAutomaticBackup();
}
/**
* Get list of automatic backups for the current library.
*/
public List<BackupInfo> listAutomaticBackups() throws IOException {
String libraryId = libraryService.getCurrentLibraryId();
if (libraryId == null) {
libraryId = "default";
}
Path backupPath = Paths.get(automaticBackupDir, libraryId);
if (!Files.exists(backupPath)) {
return List.of();
}
try (Stream<Path> stream = Files.list(backupPath)) {
return stream
.filter(Files::isRegularFile)
.filter(p -> p.getFileName().toString().startsWith("auto_backup_"))
.filter(p -> p.getFileName().toString().endsWith(".zip"))
.sorted(Comparator.comparing((Path p) -> {
try {
return Files.getLastModifiedTime(p);
} catch (IOException e) {
return null;
}
}).reversed())
.map(p -> {
try {
return new BackupInfo(
p.getFileName().toString(),
Files.size(p),
Files.getLastModifiedTime(p).toInstant().toString()
);
} catch (IOException e) {
return null;
}
})
.filter(info -> info != null)
.collect(Collectors.toList());
}
}
/**
* Simple backup info class.
*/
public static class BackupInfo {
private final String filename;
private final long sizeBytes;
private final String createdAt;
public BackupInfo(String filename, long sizeBytes, String createdAt) {
this.filename = filename;
this.sizeBytes = sizeBytes;
this.createdAt = createdAt;
}
public String getFilename() {
return filename;
}
public long getSizeBytes() {
return sizeBytes;
}
public String getCreatedAt() {
return createdAt;
}
}
}

View File

@@ -7,7 +7,6 @@ import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.core.io.Resource;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@@ -141,26 +140,48 @@ public class DatabaseManagementService implements ApplicationContextAware {
/**
* Create a comprehensive backup including database and files in ZIP format
* Returns a streaming resource to avoid loading large backups into memory
*/
public Resource createCompleteBackup() throws SQLException, IOException {
// Create temp file with deleteOnExit as safety net
Path tempZip = Files.createTempFile("storycove-backup", ".zip");
tempZip.toFile().deleteOnExit();
try (ZipOutputStream zipOut = new ZipOutputStream(Files.newOutputStream(tempZip))) {
// 1. Add database dump
addDatabaseDumpToZip(zipOut);
// 2. Add all image files
addFilesToZip(zipOut);
// 3. Add metadata
addMetadataToZip(zipOut);
}
// Return the ZIP file as a resource
byte[] zipData = Files.readAllBytes(tempZip);
Files.deleteIfExists(tempZip);
return new ByteArrayResource(zipData);
// Return the ZIP file as a FileSystemResource for streaming
// This avoids loading the entire file into memory
return new org.springframework.core.io.FileSystemResource(tempZip.toFile()) {
@Override
public InputStream getInputStream() throws IOException {
// Wrap the input stream to delete the temp file after it's fully read
return new java.io.FilterInputStream(super.getInputStream()) {
@Override
public void close() throws IOException {
try {
super.close();
} finally {
// Clean up temp file after streaming is complete
try {
Files.deleteIfExists(tempZip);
} catch (IOException e) {
// Log but don't fail - deleteOnExit will handle it
System.err.println("Warning: Could not delete temp backup file: " + e.getMessage());
}
}
}
};
}
};
}
/**
@@ -289,20 +310,34 @@ public class DatabaseManagementService implements ApplicationContextAware {
System.err.println("PostgreSQL backup completed successfully");
// Read the backup file into memory
byte[] backupData = Files.readAllBytes(tempBackupFile);
return new ByteArrayResource(backupData);
// Return the backup file as a streaming resource to avoid memory issues with large databases
tempBackupFile.toFile().deleteOnExit();
return new org.springframework.core.io.FileSystemResource(tempBackupFile.toFile()) {
@Override
public InputStream getInputStream() throws IOException {
// Wrap the input stream to delete the temp file after it's fully read
return new java.io.FilterInputStream(super.getInputStream()) {
@Override
public void close() throws IOException {
try {
super.close();
} finally {
// Clean up temp file after streaming is complete
try {
Files.deleteIfExists(tempBackupFile);
} catch (IOException e) {
// Log but don't fail - deleteOnExit will handle it
System.err.println("Warning: Could not delete temp backup file: " + e.getMessage());
}
}
}
};
}
};
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
throw new RuntimeException("Backup process was interrupted", e);
} finally {
// Clean up temporary file
try {
Files.deleteIfExists(tempBackupFile);
} catch (IOException e) {
System.err.println("Warning: Could not delete temporary backup file: " + e.getMessage());
}
}
}
@@ -319,14 +354,24 @@ public class DatabaseManagementService implements ApplicationContextAware {
Path tempBackupFile = Files.createTempFile("storycove_restore_", ".sql");
try {
// Write backup stream to temporary file
// Write backup stream to temporary file, filtering out incompatible commands
System.err.println("Writing backup data to temporary file...");
try (InputStream input = backupStream;
OutputStream output = Files.newOutputStream(tempBackupFile)) {
byte[] buffer = new byte[8192];
int bytesRead;
while ((bytesRead = input.read(buffer)) != -1) {
output.write(buffer, 0, bytesRead);
BufferedReader reader = new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8));
BufferedWriter writer = Files.newBufferedWriter(tempBackupFile, StandardCharsets.UTF_8)) {
String line;
while ((line = reader.readLine()) != null) {
// Skip DROP DATABASE and CREATE DATABASE commands - we're already connected to the DB
// Also skip database connection commands as we're already connected
if (line.trim().startsWith("DROP DATABASE") ||
line.trim().startsWith("CREATE DATABASE") ||
line.trim().startsWith("\\connect")) {
System.err.println("Skipping incompatible command: " + line.substring(0, Math.min(50, line.length())));
continue;
}
writer.write(line);
writer.newLine();
}
}

View File

@@ -62,64 +62,74 @@ public class EPUBImportService {
public EPUBImportResponse importEPUB(EPUBImportRequest request) {
try {
MultipartFile epubFile = request.getEpubFile();
if (epubFile == null || epubFile.isEmpty()) {
return EPUBImportResponse.error("EPUB file is required");
}
if (!isValidEPUBFile(epubFile)) {
return EPUBImportResponse.error("Invalid EPUB file format");
}
log.info("Parsing EPUB file: {}", epubFile.getOriginalFilename());
Book book = parseEPUBFile(epubFile);
log.info("Creating story entity from EPUB metadata");
Story story = createStoryFromEPUB(book, request);
log.info("Saving story to database: {}", story.getTitle());
Story savedStory = storyService.create(story);
log.info("Story saved successfully with ID: {}", savedStory.getId());
// Process embedded images if content contains any
String originalContent = story.getContentHtml();
if (originalContent != null && originalContent.contains("<img")) {
try {
log.info("Processing embedded images for story: {}", savedStory.getId());
ImageService.ContentImageProcessingResult imageResult =
imageService.processContentImages(originalContent, savedStory.getId());
// Update story content with processed images if changed
if (!imageResult.getProcessedContent().equals(originalContent)) {
log.info("Updating story content with processed images");
savedStory.setContentHtml(imageResult.getProcessedContent());
savedStory = storyService.update(savedStory.getId(), savedStory);
// Log the image processing results
log.debug("EPUB Import - Image processing completed for story {}. Downloaded {} images.",
log.info("EPUB Import - Image processing completed for story {}. Downloaded {} images.",
savedStory.getId(), imageResult.getDownloadedImages().size());
if (imageResult.hasWarnings()) {
log.debug("EPUB Import - Image processing warnings: {}",
log.warn("EPUB Import - Image processing warnings: {}",
String.join(", ", imageResult.getWarnings()));
}
}
} catch (Exception e) {
// Log error but don't fail the import
System.err.println("EPUB Import - Failed to process embedded images for story " +
savedStory.getId() + ": " + e.getMessage());
log.error("EPUB Import - Failed to process embedded images for story {}: {}",
savedStory.getId(), e.getMessage(), e);
}
}
log.info("Building import response for story: {}", savedStory.getId());
EPUBImportResponse response = EPUBImportResponse.success(savedStory.getId(), savedStory.getTitle());
response.setWordCount(savedStory.getWordCount());
response.setTotalChapters(book.getSpine().size());
if (request.getPreserveReadingPosition() != null && request.getPreserveReadingPosition()) {
log.info("Extracting and saving reading position");
ReadingPosition readingPosition = extractReadingPosition(book, savedStory);
if (readingPosition != null) {
ReadingPosition savedPosition = readingPositionRepository.save(readingPosition);
response.setReadingPosition(convertToDto(savedPosition));
}
}
log.info("EPUB import completed successfully for: {}", savedStory.getTitle());
return response;
} catch (Exception e) {
log.error("EPUB import failed with exception: {}", e.getMessage(), e);
return EPUBImportResponse.error("Failed to import EPUB: " + e.getMessage());
}
}
@@ -147,77 +157,119 @@ public class EPUBImportService {
private Story createStoryFromEPUB(Book book, EPUBImportRequest request) {
Metadata metadata = book.getMetadata();
log.info("Extracting EPUB metadata");
String title = extractTitle(metadata);
String authorName = extractAuthorName(metadata, request);
String description = extractDescription(metadata);
log.info("Extracting and sanitizing content from {} chapters", book.getSpine().size());
String content = extractContent(book);
Story story = new Story();
story.setTitle(title);
story.setDescription(description);
story.setContentHtml(sanitizationService.sanitize(content));
// Extract and process cover image
if (request.getExtractCover() == null || request.getExtractCover()) {
log.info("Extracting cover image");
String coverPath = extractAndSaveCoverImage(book);
if (coverPath != null) {
log.info("Cover image saved at: {}", coverPath);
story.setCoverPath(coverPath);
}
}
if (request.getAuthorId() != null) {
try {
Author author = authorService.findById(request.getAuthorId());
// Handle author assignment
try {
if (request.getAuthorId() != null) {
log.info("Looking up author by ID: {}", request.getAuthorId());
try {
Author author = authorService.findById(request.getAuthorId());
story.setAuthor(author);
log.info("Author found and assigned: {}", author.getName());
} catch (ResourceNotFoundException e) {
log.warn("Author ID {} not found", request.getAuthorId());
if (request.getCreateMissingAuthor()) {
log.info("Creating new author: {}", authorName);
Author newAuthor = createAuthor(authorName);
story.setAuthor(newAuthor);
log.info("New author created with ID: {}", newAuthor.getId());
}
}
} else if (authorName != null && request.getCreateMissingAuthor()) {
log.info("Finding or creating author: {}", authorName);
Author author = findOrCreateAuthor(authorName);
story.setAuthor(author);
} catch (ResourceNotFoundException e) {
if (request.getCreateMissingAuthor()) {
Author newAuthor = createAuthor(authorName);
story.setAuthor(newAuthor);
}
log.info("Author assigned: {} (ID: {})", author.getName(), author.getId());
}
} else if (authorName != null && request.getCreateMissingAuthor()) {
Author author = findOrCreateAuthor(authorName);
story.setAuthor(author);
} catch (Exception e) {
log.error("Error handling author assignment: {}", e.getMessage(), e);
throw e;
}
if (request.getSeriesId() != null && request.getSeriesVolume() != null) {
try {
Series series = seriesService.findById(request.getSeriesId());
story.setSeries(series);
story.setVolume(request.getSeriesVolume());
} catch (ResourceNotFoundException e) {
if (request.getCreateMissingSeries() && request.getSeriesName() != null) {
Series newSeries = createSeries(request.getSeriesName());
story.setSeries(newSeries);
// Handle series assignment
try {
if (request.getSeriesId() != null && request.getSeriesVolume() != null) {
log.info("Looking up series by ID: {}", request.getSeriesId());
try {
Series series = seriesService.findById(request.getSeriesId());
story.setSeries(series);
story.setVolume(request.getSeriesVolume());
log.info("Series found and assigned: {} (volume {})", series.getName(), request.getSeriesVolume());
} catch (ResourceNotFoundException e) {
log.warn("Series ID {} not found", request.getSeriesId());
if (request.getCreateMissingSeries() && request.getSeriesName() != null) {
log.info("Creating new series: {}", request.getSeriesName());
Series newSeries = createSeries(request.getSeriesName());
story.setSeries(newSeries);
story.setVolume(request.getSeriesVolume());
log.info("New series created with ID: {}", newSeries.getId());
}
}
}
} catch (Exception e) {
log.error("Error handling series assignment: {}", e.getMessage(), e);
throw e;
}
// Handle tags from request or extract from EPUB metadata
List<String> allTags = new ArrayList<>();
if (request.getTags() != null && !request.getTags().isEmpty()) {
allTags.addAll(request.getTags());
try {
List<String> allTags = new ArrayList<>();
if (request.getTags() != null && !request.getTags().isEmpty()) {
allTags.addAll(request.getTags());
}
// Extract subjects/keywords from EPUB metadata
List<String> epubTags = extractTags(metadata);
if (epubTags != null && !epubTags.isEmpty()) {
allTags.addAll(epubTags);
}
log.info("Processing {} tags for story", allTags.size());
// Remove duplicates and create tags
allTags.stream()
.distinct()
.forEach(tagName -> {
try {
log.debug("Finding or creating tag: {}", tagName);
Tag tag = tagService.findOrCreate(tagName.trim());
story.addTag(tag);
} catch (Exception e) {
log.error("Error creating tag '{}': {}", tagName, e.getMessage(), e);
throw e;
}
});
} catch (Exception e) {
log.error("Error handling tags: {}", e.getMessage(), e);
throw e;
}
// Extract subjects/keywords from EPUB metadata
List<String> epubTags = extractTags(metadata);
if (epubTags != null && !epubTags.isEmpty()) {
allTags.addAll(epubTags);
}
// Remove duplicates and create tags
allTags.stream()
.distinct()
.forEach(tagName -> {
Tag tag = tagService.findOrCreate(tagName.trim());
story.addTag(tag);
});
// Extract additional metadata for potential future use
extractAdditionalMetadata(metadata, story);
log.info("Story entity created successfully: {}", title);
return story;
}
@@ -244,7 +296,13 @@ public class EPUBImportService {
private String extractDescription(Metadata metadata) {
List<String> descriptions = metadata.getDescriptions();
if (descriptions != null && !descriptions.isEmpty()) {
return descriptions.get(0);
String description = descriptions.get(0);
// Truncate to 1000 characters if necessary
if (description != null && description.length() > 1000) {
log.info("Description exceeds 1000 characters ({}), truncating...", description.length());
return description.substring(0, 997) + "...";
}
return description;
}
return null;
}

View File

@@ -188,13 +188,13 @@ public class HtmlSanitizationService {
return "";
}
logger.info("Content before sanitization: "+html);
logger.debug("Sanitizing HTML content (length: {} characters)", html.length());
// Preprocess to extract images from figure tags
String preprocessed = preprocessFigureTags(html);
String saniztedHtml = Jsoup.clean(preprocessed, allowlist.preserveRelativeLinks(true));
logger.info("Content after sanitization: "+saniztedHtml);
logger.debug("Sanitization complete (output length: {} characters)", saniztedHtml.length());
return saniztedHtml;
}

View File

@@ -1,8 +1,9 @@
package com.storycove.service;
import com.storycove.config.SolrProperties;
import com.storycove.dto.LibraryOverviewStatsDto;
import com.storycove.dto.*;
import com.storycove.dto.LibraryOverviewStatsDto.StoryWordCountDto;
import com.storycove.repository.CollectionRepository;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
@@ -17,7 +18,12 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.util.Map;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.stream.Collectors;
@Service
@ConditionalOnProperty(
@@ -39,6 +45,9 @@ public class LibraryStatisticsService {
@Autowired
private LibraryService libraryService;
@Autowired
private CollectionRepository collectionRepository;
/**
* Get overview statistics for a library
*/
@@ -133,13 +142,9 @@ public class LibraryStatisticsService {
/**
* Get total number of collections
*/
private long getTotalCollections(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0);
QueryResponse response = solrClient.query(properties.getCores().getCollections(), query);
return response.getResults().getNumFound();
private long getTotalCollections(String libraryId) {
// Collections are stored in the database, not indexed in Solr
return collectionRepository.countByIsArchivedFalse();
}
/**
@@ -254,4 +259,385 @@ public class LibraryStatisticsService {
long sum = 0;
double mean = 0.0;
}
/**
* Get top tags statistics
*/
public TopTagsStatsDto getTopTagsStatistics(String libraryId, int limit) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0);
query.setFacet(true);
query.addFacetField("tagNames");
query.setFacetLimit(limit);
query.setFacetSort("count"); // Sort by count (most popular first)
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
FacetField tagsFacet = response.getFacetField("tagNames");
List<TopTagsStatsDto.TagStatsDto> topTags = new ArrayList<>();
if (tagsFacet != null && tagsFacet.getValues() != null) {
for (FacetField.Count count : tagsFacet.getValues()) {
topTags.add(new TopTagsStatsDto.TagStatsDto(count.getName(), count.getCount()));
}
}
return new TopTagsStatsDto(topTags);
}
/**
* Get top authors statistics
*/
public TopAuthorsStatsDto getTopAuthorsStatistics(String libraryId, int limit) throws IOException, SolrServerException {
TopAuthorsStatsDto stats = new TopAuthorsStatsDto();
// Top authors by story count
stats.setTopAuthorsByStories(getTopAuthorsByStoryCount(libraryId, limit));
// Top authors by total words
stats.setTopAuthorsByWords(getTopAuthorsByWordCount(libraryId, limit));
return stats;
}
private List<TopAuthorsStatsDto.AuthorStatsDto> getTopAuthorsByStoryCount(String libraryId, int limit)
throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0);
query.setFacet(true);
query.addFacetField("authorId");
query.setFacetLimit(limit);
query.setFacetSort("count");
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
FacetField authorFacet = response.getFacetField("authorId");
List<TopAuthorsStatsDto.AuthorStatsDto> topAuthors = new ArrayList<>();
if (authorFacet != null && authorFacet.getValues() != null) {
for (FacetField.Count count : authorFacet.getValues()) {
String authorId = count.getName();
long storyCount = count.getCount();
// Get author name and total words
SolrQuery authorQuery = new SolrQuery("authorId:" + authorId);
authorQuery.addFilterQuery("libraryId:" + libraryId);
authorQuery.setRows(1);
authorQuery.setFields("authorName");
QueryResponse authorResponse = solrClient.query(properties.getCores().getStories(), authorQuery);
String authorName = "";
if (!authorResponse.getResults().isEmpty()) {
authorName = (String) authorResponse.getResults().get(0).getFieldValue("authorName");
}
// Get total words for this author
long totalWords = getAuthorTotalWords(libraryId, authorId);
topAuthors.add(new TopAuthorsStatsDto.AuthorStatsDto(authorId, authorName, storyCount, totalWords));
}
}
return topAuthors;
}
private List<TopAuthorsStatsDto.AuthorStatsDto> getTopAuthorsByWordCount(String libraryId, int limit)
throws IOException, SolrServerException {
// First get all unique authors
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0);
query.setFacet(true);
query.addFacetField("authorId");
query.setFacetLimit(-1); // Get all authors
query.setFacetSort("count");
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
FacetField authorFacet = response.getFacetField("authorId");
List<TopAuthorsStatsDto.AuthorStatsDto> allAuthors = new ArrayList<>();
if (authorFacet != null && authorFacet.getValues() != null) {
for (FacetField.Count count : authorFacet.getValues()) {
String authorId = count.getName();
long storyCount = count.getCount();
// Get author name
SolrQuery authorQuery = new SolrQuery("authorId:" + authorId);
authorQuery.addFilterQuery("libraryId:" + libraryId);
authorQuery.setRows(1);
authorQuery.setFields("authorName");
QueryResponse authorResponse = solrClient.query(properties.getCores().getStories(), authorQuery);
String authorName = "";
if (!authorResponse.getResults().isEmpty()) {
authorName = (String) authorResponse.getResults().get(0).getFieldValue("authorName");
}
// Get total words for this author
long totalWords = getAuthorTotalWords(libraryId, authorId);
allAuthors.add(new TopAuthorsStatsDto.AuthorStatsDto(authorId, authorName, storyCount, totalWords));
}
}
// Sort by total words and return top N
return allAuthors.stream()
.sorted(Comparator.comparingLong(TopAuthorsStatsDto.AuthorStatsDto::getTotalWords).reversed())
.limit(limit)
.collect(Collectors.toList());
}
private long getAuthorTotalWords(String libraryId, String authorId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("authorId:" + authorId);
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0);
query.setParam(StatsParams.STATS, true);
query.setParam(StatsParams.STATS_FIELD, "wordCount");
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
var fieldStatsInfo = response.getFieldStatsInfo();
if (fieldStatsInfo != null && fieldStatsInfo.get("wordCount") != null) {
var fieldStat = fieldStatsInfo.get("wordCount");
Object sumObj = fieldStat.getSum();
return (sumObj != null) ? ((Number) sumObj).longValue() : 0L;
}
return 0L;
}
/**
* Get rating statistics
*/
public RatingStatsDto getRatingStatistics(String libraryId) throws IOException, SolrServerException {
RatingStatsDto stats = new RatingStatsDto();
// Get average rating using stats component
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.addFilterQuery("rating:[* TO *]"); // Only rated stories
query.setRows(0);
query.setParam(StatsParams.STATS, true);
query.setParam(StatsParams.STATS_FIELD, "rating");
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
long totalRated = response.getResults().getNumFound();
var fieldStatsInfo = response.getFieldStatsInfo();
if (fieldStatsInfo != null && fieldStatsInfo.get("rating") != null) {
var fieldStat = fieldStatsInfo.get("rating");
Object meanObj = fieldStat.getMean();
stats.setAverageRating((meanObj != null) ? ((Number) meanObj).doubleValue() : 0.0);
}
stats.setTotalRatedStories(totalRated);
// Get total stories to calculate unrated
long totalStories = getTotalStories(libraryId);
stats.setTotalUnratedStories(totalStories - totalRated);
// Get rating distribution using faceting
SolrQuery distQuery = new SolrQuery("*:*");
distQuery.addFilterQuery("libraryId:" + libraryId);
distQuery.addFilterQuery("rating:[* TO *]");
distQuery.setRows(0);
distQuery.setFacet(true);
distQuery.addFacetField("rating");
distQuery.setFacetLimit(-1);
QueryResponse distResponse = solrClient.query(properties.getCores().getStories(), distQuery);
FacetField ratingFacet = distResponse.getFacetField("rating");
Map<Integer, Long> distribution = new HashMap<>();
if (ratingFacet != null && ratingFacet.getValues() != null) {
for (FacetField.Count count : ratingFacet.getValues()) {
try {
int rating = Integer.parseInt(count.getName());
distribution.put(rating, count.getCount());
} catch (NumberFormatException e) {
// Skip invalid ratings
}
}
}
stats.setRatingDistribution(distribution);
return stats;
}
/**
* Get source domain statistics
*/
public SourceDomainStatsDto getSourceDomainStatistics(String libraryId, int limit) throws IOException, SolrServerException {
SourceDomainStatsDto stats = new SourceDomainStatsDto();
// Get top domains using faceting
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.addFilterQuery("sourceDomain:[* TO *]"); // Only stories with source
query.setRows(0);
query.setFacet(true);
query.addFacetField("sourceDomain");
query.setFacetLimit(limit);
query.setFacetSort("count");
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
long storiesWithSource = response.getResults().getNumFound();
FacetField domainFacet = response.getFacetField("sourceDomain");
List<SourceDomainStatsDto.DomainStatsDto> topDomains = new ArrayList<>();
if (domainFacet != null && domainFacet.getValues() != null) {
for (FacetField.Count count : domainFacet.getValues()) {
topDomains.add(new SourceDomainStatsDto.DomainStatsDto(count.getName(), count.getCount()));
}
}
stats.setTopDomains(topDomains);
stats.setStoriesWithSource(storiesWithSource);
long totalStories = getTotalStories(libraryId);
stats.setStoriesWithoutSource(totalStories - storiesWithSource);
return stats;
}
/**
* Get reading progress statistics
*/
public ReadingProgressStatsDto getReadingProgressStatistics(String libraryId) throws IOException, SolrServerException {
ReadingProgressStatsDto stats = new ReadingProgressStatsDto();
long totalStories = getTotalStories(libraryId);
stats.setTotalStories(totalStories);
// Get read stories count
SolrQuery readQuery = new SolrQuery("*:*");
readQuery.addFilterQuery("libraryId:" + libraryId);
readQuery.addFilterQuery("isRead:true");
readQuery.setRows(0);
QueryResponse readResponse = solrClient.query(properties.getCores().getStories(), readQuery);
long readStories = readResponse.getResults().getNumFound();
stats.setReadStories(readStories);
stats.setUnreadStories(totalStories - readStories);
if (totalStories > 0) {
stats.setPercentageRead((readStories * 100.0) / totalStories);
}
// Get total words read
SolrQuery readWordsQuery = new SolrQuery("*:*");
readWordsQuery.addFilterQuery("libraryId:" + libraryId);
readWordsQuery.addFilterQuery("isRead:true");
readWordsQuery.setRows(0);
readWordsQuery.setParam(StatsParams.STATS, true);
readWordsQuery.setParam(StatsParams.STATS_FIELD, "wordCount");
QueryResponse readWordsResponse = solrClient.query(properties.getCores().getStories(), readWordsQuery);
var readFieldStats = readWordsResponse.getFieldStatsInfo();
if (readFieldStats != null && readFieldStats.get("wordCount") != null) {
var fieldStat = readFieldStats.get("wordCount");
Object sumObj = fieldStat.getSum();
stats.setTotalWordsRead((sumObj != null) ? ((Number) sumObj).longValue() : 0L);
}
// Get total words unread
SolrQuery unreadWordsQuery = new SolrQuery("*:*");
unreadWordsQuery.addFilterQuery("libraryId:" + libraryId);
unreadWordsQuery.addFilterQuery("isRead:false");
unreadWordsQuery.setRows(0);
unreadWordsQuery.setParam(StatsParams.STATS, true);
unreadWordsQuery.setParam(StatsParams.STATS_FIELD, "wordCount");
QueryResponse unreadWordsResponse = solrClient.query(properties.getCores().getStories(), unreadWordsQuery);
var unreadFieldStats = unreadWordsResponse.getFieldStatsInfo();
if (unreadFieldStats != null && unreadFieldStats.get("wordCount") != null) {
var fieldStat = unreadFieldStats.get("wordCount");
Object sumObj = fieldStat.getSum();
stats.setTotalWordsUnread((sumObj != null) ? ((Number) sumObj).longValue() : 0L);
}
return stats;
}
/**
* Get reading activity statistics for the last week
*/
public ReadingActivityStatsDto getReadingActivityStatistics(String libraryId) throws IOException, SolrServerException {
ReadingActivityStatsDto stats = new ReadingActivityStatsDto();
LocalDateTime oneWeekAgo = LocalDateTime.now().minusWeeks(1);
String oneWeekAgoStr = oneWeekAgo.toInstant(ZoneOffset.UTC).toString();
// Get stories read in last week
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.addFilterQuery("lastReadAt:[" + oneWeekAgoStr + " TO *]");
query.setRows(0);
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
long storiesReadLastWeek = response.getResults().getNumFound();
stats.setStoriesReadLastWeek(storiesReadLastWeek);
// Get words read in last week
SolrQuery wordsQuery = new SolrQuery("*:*");
wordsQuery.addFilterQuery("libraryId:" + libraryId);
wordsQuery.addFilterQuery("lastReadAt:[" + oneWeekAgoStr + " TO *]");
wordsQuery.setRows(0);
wordsQuery.setParam(StatsParams.STATS, true);
wordsQuery.setParam(StatsParams.STATS_FIELD, "wordCount");
QueryResponse wordsResponse = solrClient.query(properties.getCores().getStories(), wordsQuery);
var fieldStatsInfo = wordsResponse.getFieldStatsInfo();
long wordsReadLastWeek = 0L;
if (fieldStatsInfo != null && fieldStatsInfo.get("wordCount") != null) {
var fieldStat = fieldStatsInfo.get("wordCount");
Object sumObj = fieldStat.getSum();
wordsReadLastWeek = (sumObj != null) ? ((Number) sumObj).longValue() : 0L;
}
stats.setWordsReadLastWeek(wordsReadLastWeek);
stats.setReadingTimeMinutesLastWeek(wordsReadLastWeek / WORDS_PER_MINUTE);
// Get daily activity (last 7 days)
List<ReadingActivityStatsDto.DailyActivityDto> dailyActivity = new ArrayList<>();
for (int i = 6; i >= 0; i--) {
LocalDate date = LocalDate.now().minusDays(i);
LocalDateTime dayStart = date.atStartOfDay();
LocalDateTime dayEnd = date.atTime(23, 59, 59);
String dayStartStr = dayStart.toInstant(ZoneOffset.UTC).toString();
String dayEndStr = dayEnd.toInstant(ZoneOffset.UTC).toString();
SolrQuery dayQuery = new SolrQuery("*:*");
dayQuery.addFilterQuery("libraryId:" + libraryId);
dayQuery.addFilterQuery("lastReadAt:[" + dayStartStr + " TO " + dayEndStr + "]");
dayQuery.setRows(0);
dayQuery.setParam(StatsParams.STATS, true);
dayQuery.setParam(StatsParams.STATS_FIELD, "wordCount");
QueryResponse dayResponse = solrClient.query(properties.getCores().getStories(), dayQuery);
long storiesRead = dayResponse.getResults().getNumFound();
long wordsRead = 0L;
var dayFieldStats = dayResponse.getFieldStatsInfo();
if (dayFieldStats != null && dayFieldStats.get("wordCount") != null) {
var fieldStat = dayFieldStats.get("wordCount");
Object sumObj = fieldStat.getSum();
wordsRead = (sumObj != null) ? ((Number) sumObj).longValue() : 0L;
}
dailyActivity.add(new ReadingActivityStatsDto.DailyActivityDto(
date.format(DateTimeFormatter.ISO_LOCAL_DATE),
storiesRead,
wordsRead
));
}
stats.setDailyActivity(dailyActivity);
return stats;
}
}

View File

@@ -0,0 +1,683 @@
package com.storycove.service;
import com.storycove.dto.FileImportResponse;
import com.storycove.dto.PDFImportRequest;
import com.storycove.entity.*;
import com.storycove.service.exception.InvalidFileException;
import com.storycove.service.exception.ResourceNotFoundException;
import org.apache.pdfbox.Loader;
import org.apache.pdfbox.pdmodel.PDDocument;
import org.apache.pdfbox.pdmodel.PDDocumentInformation;
import org.apache.pdfbox.pdmodel.PDPage;
import org.apache.pdfbox.pdmodel.graphics.image.PDImageXObject;
import org.apache.pdfbox.text.PDFTextStripper;
import org.apache.pdfbox.text.TextPosition;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import javax.imageio.ImageIO;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.*;
import java.util.regex.Pattern;
@Service
@Transactional
public class PDFImportService {
private static final Logger log = LoggerFactory.getLogger(PDFImportService.class);
private static final Pattern PAGE_NUMBER_PATTERN = Pattern.compile("^\\s*\\d+\\s*$");
private static final int MAX_FILE_SIZE = 300 * 1024 * 1024; // 300MB
private final StoryService storyService;
private final AuthorService authorService;
private final SeriesService seriesService;
private final TagService tagService;
private final HtmlSanitizationService sanitizationService;
private final ImageService imageService;
private final LibraryService libraryService;
@Autowired
public PDFImportService(StoryService storyService,
AuthorService authorService,
SeriesService seriesService,
TagService tagService,
HtmlSanitizationService sanitizationService,
ImageService imageService,
LibraryService libraryService) {
this.storyService = storyService;
this.authorService = authorService;
this.seriesService = seriesService;
this.tagService = tagService;
this.sanitizationService = sanitizationService;
this.imageService = imageService;
this.libraryService = libraryService;
}
public FileImportResponse importPDF(PDFImportRequest request) {
try {
MultipartFile pdfFile = request.getPdfFile();
if (pdfFile == null || pdfFile.isEmpty()) {
return FileImportResponse.error("PDF file is required", null);
}
if (!isValidPDFFile(pdfFile)) {
return FileImportResponse.error("Invalid PDF file format", pdfFile.getOriginalFilename());
}
log.info("Parsing PDF file: {}", pdfFile.getOriginalFilename());
PDDocument document = parsePDFFile(pdfFile);
try {
log.info("Extracting metadata from PDF");
PDFMetadata metadata = extractMetadata(document, pdfFile.getOriginalFilename());
// Validate author is provided
String authorName = determineAuthorName(request, metadata);
if (authorName == null || authorName.trim().isEmpty()) {
return FileImportResponse.error("Author name is required for PDF import. No author found in PDF metadata.", pdfFile.getOriginalFilename());
}
log.info("Extracting content and images from PDF");
PDFContent content = extractContentWithImages(document, request.getExtractImages());
log.info("Creating story entity from PDF");
Story story = createStoryFromPDF(metadata, content, request, authorName);
log.info("Saving story to database: {}", story.getTitle());
Story savedStory = storyService.create(story);
log.info("Story saved successfully with ID: {}", savedStory.getId());
// Process and save embedded images if any were extracted
if (request.getExtractImages() && !content.getImages().isEmpty()) {
try {
log.info("Processing {} embedded images for story: {}", content.getImages().size(), savedStory.getId());
String updatedContent = processAndSaveImages(content, savedStory.getId());
if (!updatedContent.equals(savedStory.getContentHtml())) {
savedStory.setContentHtml(updatedContent);
savedStory = storyService.update(savedStory.getId(), savedStory);
log.info("Story content updated with processed images");
}
} catch (Exception e) {
log.error("Failed to process embedded images for story {}: {}", savedStory.getId(), e.getMessage(), e);
}
}
log.info("PDF import completed successfully for: {}", savedStory.getTitle());
FileImportResponse response = FileImportResponse.success(savedStory.getId(), savedStory.getTitle(), "PDF");
response.setFileName(pdfFile.getOriginalFilename());
response.setWordCount(savedStory.getWordCount());
response.setExtractedImages(content.getImages().size());
return response;
} finally {
document.close();
}
} catch (Exception e) {
log.error("PDF import failed with exception: {}", e.getMessage(), e);
return FileImportResponse.error("Failed to import PDF: " + e.getMessage(),
request.getPdfFile() != null ? request.getPdfFile().getOriginalFilename() : null);
}
}
private boolean isValidPDFFile(MultipartFile file) {
String filename = file.getOriginalFilename();
if (filename == null || !filename.toLowerCase().endsWith(".pdf")) {
return false;
}
if (file.getSize() > MAX_FILE_SIZE) {
log.warn("PDF file size {} exceeds maximum {}", file.getSize(), MAX_FILE_SIZE);
return false;
}
String contentType = file.getContentType();
return "application/pdf".equals(contentType) || contentType == null;
}
private PDDocument parsePDFFile(MultipartFile pdfFile) throws IOException {
try (InputStream inputStream = pdfFile.getInputStream()) {
return Loader.loadPDF(inputStream.readAllBytes());
} catch (Exception e) {
throw new InvalidFileException("Failed to parse PDF file: " + e.getMessage());
}
}
private PDFMetadata extractMetadata(PDDocument document, String fileName) {
PDFMetadata metadata = new PDFMetadata();
PDDocumentInformation info = document.getDocumentInformation();
if (info != null) {
metadata.setTitle(info.getTitle());
metadata.setAuthor(info.getAuthor());
metadata.setSubject(info.getSubject());
metadata.setKeywords(info.getKeywords());
metadata.setCreator(info.getCreator());
}
// Use filename as fallback title
if (metadata.getTitle() == null || metadata.getTitle().trim().isEmpty()) {
String titleFromFilename = fileName.replaceAll("\\.pdf$", "").replaceAll("[_-]", " ");
metadata.setTitle(titleFromFilename);
}
metadata.setPageCount(document.getNumberOfPages());
return metadata;
}
private PDFContent extractContentWithImages(PDDocument document, Boolean extractImages) throws IOException {
PDFContent content = new PDFContent();
StringBuilder htmlContent = new StringBuilder();
List<PDFImage> images = new ArrayList<>();
boolean shouldExtractImages = extractImages != null && extractImages;
// Extract images first to know their positions
if (shouldExtractImages) {
images = extractImagesFromPDF(document);
log.info("Extracted {} images from PDF", images.size());
}
// Extract text with custom stripper to filter headers/footers
CustomPDFTextStripper stripper = new CustomPDFTextStripper();
stripper.setSortByPosition(true);
// Process page by page to insert images at correct positions
for (int pageNum = 0; pageNum < document.getNumberOfPages(); pageNum++) {
stripper.setStartPage(pageNum + 1);
stripper.setEndPage(pageNum + 1);
String pageText = stripper.getText(document);
// Filter out obvious page numbers and headers/footers
pageText = filterHeadersFooters(pageText, pageNum + 1);
if (pageText != null && !pageText.trim().isEmpty()) {
// Convert text to HTML paragraphs
String[] paragraphs = pageText.split("\\n\\s*\\n");
for (String para : paragraphs) {
String trimmed = para.trim();
if (!trimmed.isEmpty() && !isLikelyHeaderFooter(trimmed)) {
htmlContent.append("<p>").append(escapeHtml(trimmed)).append("</p>\n");
}
}
}
// Insert images that belong to this page
if (shouldExtractImages) {
for (PDFImage image : images) {
if (image.getPageNumber() == pageNum) {
// Add placeholder for image (will be replaced with actual path after saving)
htmlContent.append("<img data-pdf-image-id=\"")
.append(image.getImageId())
.append("\" alt=\"Image from PDF\" />\n");
}
}
}
}
content.setHtmlContent(htmlContent.toString());
content.setImages(images);
return content;
}
private List<PDFImage> extractImagesFromPDF(PDDocument document) {
List<PDFImage> images = new ArrayList<>();
int imageCounter = 0;
for (int pageNum = 0; pageNum < document.getNumberOfPages(); pageNum++) {
try {
PDPage page = document.getPage(pageNum);
// Get all images from the page resources
Iterable<org.apache.pdfbox.cos.COSName> names = page.getResources().getXObjectNames();
for (org.apache.pdfbox.cos.COSName name : names) {
try {
org.apache.pdfbox.pdmodel.graphics.PDXObject xObject = page.getResources().getXObject(name);
if (xObject instanceof PDImageXObject) {
PDImageXObject imageObj = (PDImageXObject) xObject;
BufferedImage bImage = imageObj.getImage();
// Skip very small images (likely decorative or icons)
if (bImage.getWidth() < 50 || bImage.getHeight() < 50) {
continue;
}
// Convert BufferedImage to byte array
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ImageIO.write(bImage, "png", baos);
byte[] imageBytes = baos.toByteArray();
PDFImage pdfImage = new PDFImage();
pdfImage.setImageId("pdf-img-" + imageCounter);
pdfImage.setPageNumber(pageNum);
pdfImage.setImageData(imageBytes);
pdfImage.setWidth(bImage.getWidth());
pdfImage.setHeight(bImage.getHeight());
images.add(pdfImage);
imageCounter++;
}
} catch (Exception e) {
log.warn("Failed to extract image '{}' from page {}: {}", name, pageNum, e.getMessage());
}
}
} catch (Exception e) {
log.warn("Failed to process images on page {}: {}", pageNum, e.getMessage());
}
}
return images;
}
private String processAndSaveImages(PDFContent content, UUID storyId) throws IOException {
String htmlContent = content.getHtmlContent();
// Get current library ID for constructing image URLs
String currentLibraryId = libraryService.getCurrentLibraryId();
if (currentLibraryId == null || currentLibraryId.trim().isEmpty()) {
log.warn("Current library ID is null or empty when processing PDF images for story: {}", storyId);
currentLibraryId = "default";
}
for (PDFImage image : content.getImages()) {
try {
// Create a MultipartFile from the image bytes
MultipartFile imageFile = new PDFImageMultipartFile(
image.getImageData(),
"pdf-image-" + image.getImageId() + ".png",
"image/png"
);
// Save the image using ImageService (ImageType.CONTENT saves to content directory)
String imagePath = imageService.uploadImage(imageFile, ImageService.ImageType.CONTENT);
// Construct the full URL with library ID
// imagePath will be like "content/uuid.png"
String imageUrl = "/api/files/images/" + currentLibraryId + "/" + imagePath;
// Replace placeholder with actual image URL
String placeholder = "data-pdf-image-id=\"" + image.getImageId() + "\"";
String replacement = "src=\"" + imageUrl + "\"";
htmlContent = htmlContent.replace(placeholder, replacement);
log.debug("Saved PDF image {} to path: {} (URL: {})", image.getImageId(), imagePath, imageUrl);
} catch (Exception e) {
log.error("Failed to save PDF image {}: {}", image.getImageId(), e.getMessage());
// Remove the placeholder if we failed to save the image
htmlContent = htmlContent.replaceAll(
"<img data-pdf-image-id=\"" + image.getImageId() + "\"[^>]*>",
""
);
}
}
return htmlContent;
}
private String filterHeadersFooters(String text, int pageNumber) {
if (text == null) return "";
String[] lines = text.split("\\n");
if (lines.length <= 2) return text; // Too short to have headers/footers
StringBuilder filtered = new StringBuilder();
// Skip first line if it looks like a header
int startIdx = 0;
if (lines.length > 1 && isLikelyHeaderFooter(lines[0])) {
startIdx = 1;
}
// Skip last line if it looks like a footer or page number
int endIdx = lines.length;
if (lines.length > 1 && isLikelyHeaderFooter(lines[lines.length - 1])) {
endIdx = lines.length - 1;
}
for (int i = startIdx; i < endIdx; i++) {
filtered.append(lines[i]).append("\n");
}
return filtered.toString();
}
private boolean isLikelyHeaderFooter(String line) {
String trimmed = line.trim();
// Check if it's just a page number
if (PAGE_NUMBER_PATTERN.matcher(trimmed).matches()) {
return true;
}
// Check if it's very short (likely header/footer)
if (trimmed.length() < 3) {
return true;
}
// Check for common header/footer patterns
String lower = trimmed.toLowerCase();
if (lower.matches(".*page \\d+.*") ||
lower.matches(".*\\d+ of \\d+.*") ||
lower.matches("chapter \\d+") ||
lower.matches("\\d+")) {
return true;
}
return false;
}
private String determineAuthorName(PDFImportRequest request, PDFMetadata metadata) {
// Priority: request.authorName > request.authorId > metadata.author
if (request.getAuthorName() != null && !request.getAuthorName().trim().isEmpty()) {
return request.getAuthorName().trim();
}
if (request.getAuthorId() != null) {
try {
Author author = authorService.findById(request.getAuthorId());
return author.getName();
} catch (ResourceNotFoundException e) {
log.warn("Author ID {} not found", request.getAuthorId());
}
}
if (metadata.getAuthor() != null && !metadata.getAuthor().trim().isEmpty()) {
return metadata.getAuthor().trim();
}
return null;
}
private Story createStoryFromPDF(PDFMetadata metadata, PDFContent content,
PDFImportRequest request, String authorName) {
Story story = new Story();
story.setTitle(metadata.getTitle() != null ? metadata.getTitle() : "Untitled PDF");
story.setDescription(metadata.getSubject());
story.setContentHtml(sanitizationService.sanitize(content.getHtmlContent()));
// Handle author assignment
try {
if (request.getAuthorId() != null) {
try {
Author author = authorService.findById(request.getAuthorId());
story.setAuthor(author);
} catch (ResourceNotFoundException e) {
if (request.getCreateMissingAuthor()) {
Author newAuthor = createAuthor(authorName);
story.setAuthor(newAuthor);
}
}
} else if (authorName != null && request.getCreateMissingAuthor()) {
Author author = findOrCreateAuthor(authorName);
story.setAuthor(author);
}
} catch (Exception e) {
log.error("Error handling author assignment: {}", e.getMessage(), e);
throw e;
}
// Handle series assignment
try {
if (request.getSeriesId() != null && request.getSeriesVolume() != null) {
try {
Series series = seriesService.findById(request.getSeriesId());
story.setSeries(series);
story.setVolume(request.getSeriesVolume());
} catch (ResourceNotFoundException e) {
if (request.getCreateMissingSeries() && request.getSeriesName() != null) {
Series newSeries = createSeries(request.getSeriesName());
story.setSeries(newSeries);
story.setVolume(request.getSeriesVolume());
}
}
}
} catch (Exception e) {
log.error("Error handling series assignment: {}", e.getMessage(), e);
throw e;
}
// Handle tags
try {
List<String> allTags = new ArrayList<>();
if (request.getTags() != null && !request.getTags().isEmpty()) {
allTags.addAll(request.getTags());
}
// Extract keywords from PDF metadata
if (metadata.getKeywords() != null && !metadata.getKeywords().trim().isEmpty()) {
String[] keywords = metadata.getKeywords().split("[,;]");
for (String keyword : keywords) {
String trimmed = keyword.trim();
if (!trimmed.isEmpty()) {
allTags.add(trimmed);
}
}
}
// Create tags
allTags.stream()
.distinct()
.forEach(tagName -> {
try {
Tag tag = tagService.findOrCreate(tagName.trim());
story.addTag(tag);
} catch (Exception e) {
log.error("Error creating tag '{}': {}", tagName, e.getMessage(), e);
}
});
} catch (Exception e) {
log.error("Error handling tags: {}", e.getMessage(), e);
throw e;
}
return story;
}
private Author findOrCreateAuthor(String authorName) {
Optional<Author> existingAuthor = authorService.findByNameOptional(authorName);
if (existingAuthor.isPresent()) {
return existingAuthor.get();
}
return createAuthor(authorName);
}
private Author createAuthor(String authorName) {
Author author = new Author();
author.setName(authorName);
return authorService.create(author);
}
private Series createSeries(String seriesName) {
Series series = new Series();
series.setName(seriesName);
return seriesService.create(series);
}
private String escapeHtml(String text) {
return text.replace("&", "&amp;")
.replace("<", "&lt;")
.replace(">", "&gt;")
.replace("\"", "&quot;")
.replace("'", "&#39;")
.replace("\n", "<br/>");
}
public List<String> validatePDFFile(MultipartFile file) {
List<String> errors = new ArrayList<>();
if (file == null || file.isEmpty()) {
errors.add("PDF file is required");
return errors;
}
if (!isValidPDFFile(file)) {
errors.add("Invalid PDF file format. Only .pdf files are supported");
}
if (file.getSize() > MAX_FILE_SIZE) {
errors.add("PDF file size exceeds " + (MAX_FILE_SIZE / 1024 / 1024) + "MB limit");
}
try {
PDDocument document = parsePDFFile(file);
try {
if (document.getNumberOfPages() == 0) {
errors.add("PDF file contains no pages");
}
} finally {
document.close();
}
} catch (Exception e) {
errors.add("Failed to parse PDF file: " + e.getMessage());
}
return errors;
}
// Inner classes for data structures
private static class PDFMetadata {
private String title;
private String author;
private String subject;
private String keywords;
private String creator;
private int pageCount;
public String getTitle() { return title; }
public void setTitle(String title) { this.title = title; }
public String getAuthor() { return author; }
public void setAuthor(String author) { this.author = author; }
public String getSubject() { return subject; }
public void setSubject(String subject) { this.subject = subject; }
public String getKeywords() { return keywords; }
public void setKeywords(String keywords) { this.keywords = keywords; }
public String getCreator() { return creator; }
public void setCreator(String creator) { this.creator = creator; }
public int getPageCount() { return pageCount; }
public void setPageCount(int pageCount) { this.pageCount = pageCount; }
}
private static class PDFContent {
private String htmlContent;
private List<PDFImage> images = new ArrayList<>();
public String getHtmlContent() { return htmlContent; }
public void setHtmlContent(String htmlContent) { this.htmlContent = htmlContent; }
public List<PDFImage> getImages() { return images; }
public void setImages(List<PDFImage> images) { this.images = images; }
}
private static class PDFImage {
private String imageId;
private int pageNumber;
private byte[] imageData;
private int width;
private int height;
public String getImageId() { return imageId; }
public void setImageId(String imageId) { this.imageId = imageId; }
public int getPageNumber() { return pageNumber; }
public void setPageNumber(int pageNumber) { this.pageNumber = pageNumber; }
public byte[] getImageData() { return imageData; }
public void setImageData(byte[] imageData) { this.imageData = imageData; }
public int getWidth() { return width; }
public void setWidth(int width) { this.width = width; }
public int getHeight() { return height; }
public void setHeight(int height) { this.height = height; }
}
/**
* Custom PDF text stripper to filter headers/footers
*/
private static class CustomPDFTextStripper extends PDFTextStripper {
public CustomPDFTextStripper() throws IOException {
super();
}
@Override
protected void writeString(String text, List<TextPosition> textPositions) throws IOException {
super.writeString(text, textPositions);
}
}
/**
* Custom MultipartFile implementation for PDF images
*/
private static class PDFImageMultipartFile implements MultipartFile {
private final byte[] data;
private final String filename;
private final String contentType;
public PDFImageMultipartFile(byte[] data, String filename, String contentType) {
this.data = data;
this.filename = filename;
this.contentType = contentType;
}
@Override
public String getName() {
return "image";
}
@Override
public String getOriginalFilename() {
return filename;
}
@Override
public String getContentType() {
return contentType;
}
@Override
public boolean isEmpty() {
return data == null || data.length == 0;
}
@Override
public long getSize() {
return data != null ? data.length : 0;
}
@Override
public byte[] getBytes() {
return data;
}
@Override
public InputStream getInputStream() {
return new ByteArrayInputStream(data);
}
@Override
public void transferTo(java.io.File dest) throws IOException {
try (java.io.FileOutputStream fos = new java.io.FileOutputStream(dest)) {
fos.write(data);
}
}
@Override
public void transferTo(java.nio.file.Path dest) throws IOException {
java.nio.file.Files.write(dest, data);
}
}
}

View File

@@ -347,6 +347,7 @@ public class SolrService {
doc.addField("volume", story.getVolume());
doc.addField("isRead", story.getIsRead());
doc.addField("readingPosition", story.getReadingPosition());
doc.addField("readingProgressPercentage", calculateReadingProgressPercentage(story));
if (story.getLastReadAt() != null) {
doc.addField("lastReadAt", formatDateTime(story.getLastReadAt()));
@@ -544,6 +545,26 @@ public class SolrService {
return dateTime.format(DateTimeFormatter.ISO_LOCAL_DATE_TIME) + "Z";
}
private Integer calculateReadingProgressPercentage(Story story) {
if (story.getReadingPosition() == null || story.getReadingPosition() == 0) {
return 0;
}
// ALWAYS use contentHtml for consistency (frontend uses contentHtml for position tracking)
int totalLength = 0;
if (story.getContentHtml() != null && !story.getContentHtml().isEmpty()) {
totalLength = story.getContentHtml().length();
}
if (totalLength == 0) {
return 0;
}
// Calculate percentage and round to nearest integer
int percentage = Math.round((float) story.getReadingPosition() * 100 / totalLength);
return Math.min(100, percentage);
}
// ===============================
// UTILITY METHODS
// ===============================
@@ -1039,6 +1060,7 @@ public class SolrService {
story.setVolume((Integer) doc.getFieldValue("volume"));
story.setIsRead((Boolean) doc.getFieldValue("isRead"));
story.setReadingPosition((Integer) doc.getFieldValue("readingPosition"));
story.setReadingProgressPercentage((Integer) doc.getFieldValue("readingProgressPercentage"));
// Handle dates
story.setLastReadAt(parseDateTimeFromSolr(doc.getFieldValue("lastReadAt")));

View File

@@ -28,11 +28,12 @@ import java.util.UUID;
@Validated
@Transactional
public class TagService {
private static final Logger logger = LoggerFactory.getLogger(TagService.class);
private final TagRepository tagRepository;
private final TagAliasRepository tagAliasRepository;
private SolrService solrService;
@Autowired
public TagService(TagRepository tagRepository, TagAliasRepository tagAliasRepository) {
@@ -40,6 +41,11 @@ public class TagService {
this.tagAliasRepository = tagAliasRepository;
}
@Autowired(required = false)
public void setSolrService(SolrService solrService) {
this.solrService = solrService;
}
@Transactional(readOnly = true)
public List<Tag> findAll() {
return tagRepository.findAll();
@@ -142,13 +148,39 @@ public class TagService {
public void delete(UUID id) {
Tag tag = findById(id);
// Check if tag is used by any stories
// Remove tag from all stories before deletion and track for reindexing
List<Story> storiesToReindex = new ArrayList<>();
if (!tag.getStories().isEmpty()) {
throw new IllegalStateException("Cannot delete tag that is used by stories. Remove tag from all stories first.");
// Create a copy to avoid ConcurrentModificationException
List<Story> storiesToUpdate = new ArrayList<>(tag.getStories());
storiesToUpdate.forEach(story -> {
story.removeTag(tag);
storiesToReindex.add(story);
});
logger.info("Removed tag '{}' from {} stories before deletion", tag.getName(), storiesToUpdate.size());
}
// Remove tag from all collections before deletion
if (tag.getCollections() != null && !tag.getCollections().isEmpty()) {
tag.getCollections().forEach(collection -> collection.getTags().remove(tag));
logger.info("Removed tag '{}' from {} collections before deletion", tag.getName(), tag.getCollections().size());
}
tagRepository.delete(tag);
logger.info("Deleted tag '{}'", tag.getName());
// Reindex affected stories in Solr
if (solrService != null && !storiesToReindex.isEmpty()) {
try {
for (Story story : storiesToReindex) {
solrService.indexStory(story);
}
logger.info("Reindexed {} stories after tag deletion", storiesToReindex.size());
} catch (Exception e) {
logger.error("Failed to reindex stories after tag deletion", e);
}
}
}
public List<Tag> deleteUnusedTags() {

View File

@@ -0,0 +1,521 @@
package com.storycove.service;
import com.storycove.dto.*;
import com.storycove.service.exception.InvalidFileException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import java.io.*;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
import java.util.zip.ZipEntry;
import java.util.zip.ZipInputStream;
@Service
public class ZIPImportService {
private static final Logger log = LoggerFactory.getLogger(ZIPImportService.class);
private static final long MAX_ZIP_SIZE = 1024L * 1024 * 1024; // 1GB
private static final int MAX_FILES_IN_ZIP = 30;
private static final long ZIP_SESSION_TIMEOUT_MS = 30 * 60 * 1000; // 30 minutes
// Temporary storage for extracted ZIP files (sessionId -> session data)
private final Map<String, ZIPSession> activeSessions = new ConcurrentHashMap<>();
private final EPUBImportService epubImportService;
private final PDFImportService pdfImportService;
@Autowired
public ZIPImportService(EPUBImportService epubImportService,
PDFImportService pdfImportService) {
this.epubImportService = epubImportService;
this.pdfImportService = pdfImportService;
}
/**
* Analyze a ZIP file and return information about its contents
*/
public ZIPAnalysisResponse analyzeZIPFile(MultipartFile zipFile) {
try {
// Validate ZIP file
if (zipFile == null || zipFile.isEmpty()) {
return ZIPAnalysisResponse.error("ZIP file is required");
}
if (!isValidZIPFile(zipFile)) {
return ZIPAnalysisResponse.error("Invalid ZIP file format");
}
if (zipFile.getSize() > MAX_ZIP_SIZE) {
return ZIPAnalysisResponse.error("ZIP file size exceeds " + (MAX_ZIP_SIZE / 1024 / 1024) + "MB limit");
}
log.info("Analyzing ZIP file: {} (size: {} bytes)", zipFile.getOriginalFilename(), zipFile.getSize());
// Create temporary directory for extraction
String sessionId = UUID.randomUUID().toString();
Path tempDir = Files.createTempDirectory("storycove-zip-" + sessionId);
// Extract ZIP contents
List<FileInfoDto> files = extractAndAnalyzeZIP(zipFile, tempDir, sessionId);
if (files.isEmpty()) {
cleanupSession(sessionId);
return ZIPAnalysisResponse.error("No valid EPUB or PDF files found in ZIP");
}
if (files.size() > MAX_FILES_IN_ZIP) {
cleanupSession(sessionId);
return ZIPAnalysisResponse.error("ZIP contains too many files (max " + MAX_FILES_IN_ZIP + ")");
}
// Store session data
ZIPSession session = new ZIPSession(sessionId, tempDir, files);
activeSessions.put(sessionId, session);
// Schedule cleanup
scheduleSessionCleanup(sessionId);
ZIPAnalysisResponse response = ZIPAnalysisResponse.success(zipFile.getOriginalFilename(), files);
response.addWarning("Session ID: " + sessionId + " (valid for 30 minutes)");
log.info("ZIP analysis completed. Session ID: {}, Files found: {}", sessionId, files.size());
return response;
} catch (Exception e) {
log.error("Failed to analyze ZIP file: {}", e.getMessage(), e);
return ZIPAnalysisResponse.error("Failed to analyze ZIP file: " + e.getMessage());
}
}
/**
* Import selected files from a previously analyzed ZIP
*/
public ZIPImportResponse importFromZIP(ZIPImportRequest request) {
try {
// Validate session
ZIPSession session = activeSessions.get(request.getZipSessionId());
if (session == null) {
return createErrorResponse("Invalid or expired session ID");
}
if (session.isExpired()) {
cleanupSession(request.getZipSessionId());
return createErrorResponse("Session has expired. Please re-upload the ZIP file");
}
List<String> selectedFiles = request.getSelectedFiles();
if (selectedFiles == null || selectedFiles.isEmpty()) {
return createErrorResponse("No files selected for import");
}
log.info("Importing {} files from ZIP session: {}", selectedFiles.size(), request.getZipSessionId());
List<FileImportResponse> results = new ArrayList<>();
// Import each selected file
for (String fileName : selectedFiles) {
try {
FileInfoDto fileInfo = session.getFileInfo(fileName);
if (fileInfo == null) {
FileImportResponse errorResult = FileImportResponse.error("File not found in session: " + fileName, fileName);
results.add(errorResult);
continue;
}
if (fileInfo.getError() != null) {
FileImportResponse errorResult = FileImportResponse.error("File has errors: " + fileInfo.getError(), fileName);
results.add(errorResult);
continue;
}
// Get file-specific or default metadata
ZIPImportRequest.FileImportMetadata metadata = getFileMetadata(request, fileName);
// Import based on file type
FileImportResponse result;
if ("EPUB".equals(fileInfo.getFileType())) {
result = importEPUBFromSession(session, fileName, metadata, request);
} else if ("PDF".equals(fileInfo.getFileType())) {
result = importPDFFromSession(session, fileName, metadata, request);
} else {
result = FileImportResponse.error("Unsupported file type: " + fileInfo.getFileType(), fileName);
}
results.add(result);
if (result.isSuccess()) {
log.info("Successfully imported file: {} (Story ID: {})", fileName, result.getStoryId());
} else {
log.warn("Failed to import file: {} - {}", fileName, result.getMessage());
}
} catch (Exception e) {
log.error("Failed to import file {}: {}", fileName, e.getMessage(), e);
FileImportResponse errorResult = FileImportResponse.error("Import failed: " + e.getMessage(), fileName);
results.add(errorResult);
}
}
// Cleanup session after import
cleanupSession(request.getZipSessionId());
log.info("ZIP import completed. Total: {}, Success: {}, Failed: {}",
results.size(),
results.stream().filter(FileImportResponse::isSuccess).count(),
results.stream().filter(r -> !r.isSuccess()).count());
return ZIPImportResponse.create(results);
} catch (Exception e) {
log.error("ZIP import failed: {}", e.getMessage(), e);
return createErrorResponse("Import failed: " + e.getMessage());
}
}
private boolean isValidZIPFile(MultipartFile file) {
String filename = file.getOriginalFilename();
if (filename == null || !filename.toLowerCase().endsWith(".zip")) {
return false;
}
String contentType = file.getContentType();
return "application/zip".equals(contentType) ||
"application/x-zip-compressed".equals(contentType) ||
contentType == null;
}
private List<FileInfoDto> extractAndAnalyzeZIP(MultipartFile zipFile, Path tempDir, String sessionId) throws IOException {
List<FileInfoDto> files = new ArrayList<>();
int fileCount = 0;
try (ZipInputStream zis = new ZipInputStream(zipFile.getInputStream())) {
ZipEntry entry;
while ((entry = zis.getNextEntry()) != null) {
// Skip directories
if (entry.isDirectory()) {
continue;
}
// Only process root-level files
String entryName = entry.getName();
if (entryName.contains("/") || entryName.contains("\\")) {
log.debug("Skipping nested file: {}", entryName);
continue;
}
// Check if it's an EPUB or PDF
String lowerName = entryName.toLowerCase();
if (!lowerName.endsWith(".epub") && !lowerName.endsWith(".pdf")) {
log.debug("Skipping non-EPUB/PDF file: {}", entryName);
continue;
}
fileCount++;
if (fileCount > MAX_FILES_IN_ZIP) {
log.warn("ZIP contains more than {} files, stopping extraction", MAX_FILES_IN_ZIP);
break;
}
// Extract file to temp directory
Path extractedFile = tempDir.resolve(entryName);
Files.copy(zis, extractedFile);
// Analyze the extracted file
FileInfoDto fileInfo = analyzeExtractedFile(extractedFile, entryName);
files.add(fileInfo);
zis.closeEntry();
}
}
return files;
}
private FileInfoDto analyzeExtractedFile(Path filePath, String fileName) {
try {
long fileSize = Files.size(filePath);
String fileType;
String extractedTitle = null;
String extractedAuthor = null;
boolean hasMetadata = false;
if (fileName.toLowerCase().endsWith(".epub")) {
fileType = "EPUB";
// Try to extract EPUB metadata
try {
// Create a temporary MultipartFile for validation
byte[] fileBytes = Files.readAllBytes(filePath);
MultipartFile tempFile = new TempMultipartFile(fileBytes, fileName, "application/epub+zip");
// Use EPUBImportService to extract metadata
// For now, we'll just validate the file
List<String> errors = epubImportService.validateEPUBFile(tempFile);
if (!errors.isEmpty()) {
FileInfoDto errorInfo = new FileInfoDto(fileName, fileType, fileSize);
errorInfo.setError(String.join(", ", errors));
return errorInfo;
}
hasMetadata = true;
// We could extract more metadata here if needed
} catch (Exception e) {
log.warn("Failed to extract EPUB metadata for {}: {}", fileName, e.getMessage());
}
} else if (fileName.toLowerCase().endsWith(".pdf")) {
fileType = "PDF";
// Try to extract PDF metadata
try {
byte[] fileBytes = Files.readAllBytes(filePath);
MultipartFile tempFile = new TempMultipartFile(fileBytes, fileName, "application/pdf");
// Use PDFImportService to validate
List<String> errors = pdfImportService.validatePDFFile(tempFile);
if (!errors.isEmpty()) {
FileInfoDto errorInfo = new FileInfoDto(fileName, fileType, fileSize);
errorInfo.setError(String.join(", ", errors));
return errorInfo;
}
hasMetadata = true;
// We could extract more metadata here if needed
} catch (Exception e) {
log.warn("Failed to extract PDF metadata for {}: {}", fileName, e.getMessage());
}
} else {
FileInfoDto errorInfo = new FileInfoDto(fileName, "UNKNOWN", fileSize);
errorInfo.setError("Unsupported file type");
return errorInfo;
}
FileInfoDto fileInfo = new FileInfoDto(fileName, fileType, fileSize);
fileInfo.setExtractedTitle(extractedTitle);
fileInfo.setExtractedAuthor(extractedAuthor);
fileInfo.setHasMetadata(hasMetadata);
return fileInfo;
} catch (Exception e) {
log.error("Failed to analyze file {}: {}", fileName, e.getMessage(), e);
FileInfoDto errorInfo = new FileInfoDto(fileName, "UNKNOWN", 0L);
errorInfo.setError("Failed to analyze file: " + e.getMessage());
return errorInfo;
}
}
private ZIPImportRequest.FileImportMetadata getFileMetadata(ZIPImportRequest request, String fileName) {
// Check for file-specific metadata first
if (request.getFileMetadata() != null && request.getFileMetadata().containsKey(fileName)) {
return request.getFileMetadata().get(fileName);
}
// Return default metadata
ZIPImportRequest.FileImportMetadata metadata = new ZIPImportRequest.FileImportMetadata();
metadata.setAuthorId(request.getDefaultAuthorId());
metadata.setAuthorName(request.getDefaultAuthorName());
metadata.setSeriesId(request.getDefaultSeriesId());
metadata.setSeriesName(request.getDefaultSeriesName());
metadata.setTags(request.getDefaultTags());
return metadata;
}
private FileImportResponse importEPUBFromSession(ZIPSession session, String fileName,
ZIPImportRequest.FileImportMetadata metadata,
ZIPImportRequest request) throws IOException {
Path filePath = session.getTempDir().resolve(fileName);
byte[] fileBytes = Files.readAllBytes(filePath);
MultipartFile epubFile = new TempMultipartFile(fileBytes, fileName, "application/epub+zip");
EPUBImportRequest epubRequest = new EPUBImportRequest();
epubRequest.setEpubFile(epubFile);
epubRequest.setAuthorId(metadata.getAuthorId());
epubRequest.setAuthorName(metadata.getAuthorName());
epubRequest.setSeriesId(metadata.getSeriesId());
epubRequest.setSeriesName(metadata.getSeriesName());
epubRequest.setSeriesVolume(metadata.getSeriesVolume());
epubRequest.setTags(metadata.getTags());
epubRequest.setCreateMissingAuthor(request.getCreateMissingAuthor());
epubRequest.setCreateMissingSeries(request.getCreateMissingSeries());
epubRequest.setExtractCover(true);
EPUBImportResponse epubResponse = epubImportService.importEPUB(epubRequest);
// Convert EPUBImportResponse to FileImportResponse
if (epubResponse.isSuccess()) {
FileImportResponse response = FileImportResponse.success(epubResponse.getStoryId(), epubResponse.getStoryTitle(), "EPUB");
response.setFileName(fileName);
response.setWordCount(epubResponse.getWordCount());
return response;
} else {
return FileImportResponse.error(epubResponse.getMessage(), fileName);
}
}
private FileImportResponse importPDFFromSession(ZIPSession session, String fileName,
ZIPImportRequest.FileImportMetadata metadata,
ZIPImportRequest request) throws IOException {
Path filePath = session.getTempDir().resolve(fileName);
byte[] fileBytes = Files.readAllBytes(filePath);
MultipartFile pdfFile = new TempMultipartFile(fileBytes, fileName, "application/pdf");
PDFImportRequest pdfRequest = new PDFImportRequest();
pdfRequest.setPdfFile(pdfFile);
pdfRequest.setAuthorId(metadata.getAuthorId());
pdfRequest.setAuthorName(metadata.getAuthorName());
pdfRequest.setSeriesId(metadata.getSeriesId());
pdfRequest.setSeriesName(metadata.getSeriesName());
pdfRequest.setSeriesVolume(metadata.getSeriesVolume());
pdfRequest.setTags(metadata.getTags());
pdfRequest.setCreateMissingAuthor(request.getCreateMissingAuthor());
pdfRequest.setCreateMissingSeries(request.getCreateMissingSeries());
pdfRequest.setExtractImages(request.getExtractImages());
return pdfImportService.importPDF(pdfRequest);
}
private void scheduleSessionCleanup(String sessionId) {
Timer timer = new Timer(true);
timer.schedule(new TimerTask() {
@Override
public void run() {
cleanupSession(sessionId);
}
}, ZIP_SESSION_TIMEOUT_MS);
}
private void cleanupSession(String sessionId) {
ZIPSession session = activeSessions.remove(sessionId);
if (session != null) {
try {
deleteDirectory(session.getTempDir());
log.info("Cleaned up ZIP session: {}", sessionId);
} catch (Exception e) {
log.error("Failed to cleanup ZIP session {}: {}", sessionId, e.getMessage(), e);
}
}
}
private void deleteDirectory(Path directory) throws IOException {
if (Files.exists(directory)) {
Files.walk(directory)
.sorted((a, b) -> -a.compareTo(b)) // Delete files before directories
.forEach(path -> {
try {
Files.delete(path);
} catch (IOException e) {
log.warn("Failed to delete file {}: {}", path, e.getMessage());
}
});
}
}
private ZIPImportResponse createErrorResponse(String message) {
ZIPImportResponse response = new ZIPImportResponse();
response.setSuccess(false);
response.setMessage(message);
return response;
}
// Inner classes
private static class ZIPSession {
private final String sessionId;
private final Path tempDir;
private final Map<String, FileInfoDto> files;
private final long createdAt;
public ZIPSession(String sessionId, Path tempDir, List<FileInfoDto> fileList) {
this.sessionId = sessionId;
this.tempDir = tempDir;
this.files = new HashMap<>();
for (FileInfoDto file : fileList) {
this.files.put(file.getFileName(), file);
}
this.createdAt = System.currentTimeMillis();
}
public Path getTempDir() {
return tempDir;
}
public FileInfoDto getFileInfo(String fileName) {
return files.get(fileName);
}
public boolean isExpired() {
return System.currentTimeMillis() - createdAt > ZIP_SESSION_TIMEOUT_MS;
}
}
/**
* Temporary MultipartFile implementation for extracted files
*/
private static class TempMultipartFile implements MultipartFile {
private final byte[] data;
private final String filename;
private final String contentType;
public TempMultipartFile(byte[] data, String filename, String contentType) {
this.data = data;
this.filename = filename;
this.contentType = contentType;
}
@Override
public String getName() {
return "file";
}
@Override
public String getOriginalFilename() {
return filename;
}
@Override
public String getContentType() {
return contentType;
}
@Override
public boolean isEmpty() {
return data == null || data.length == 0;
}
@Override
public long getSize() {
return data != null ? data.length : 0;
}
@Override
public byte[] getBytes() {
return data;
}
@Override
public InputStream getInputStream() {
return new ByteArrayInputStream(data);
}
@Override
public void transferTo(java.io.File dest) throws IOException {
try (java.io.FileOutputStream fos = new java.io.FileOutputStream(dest)) {
fos.write(data);
}
}
@Override
public void transferTo(java.nio.file.Path dest) throws IOException {
Files.write(dest, data);
}
}
}

View File

@@ -21,8 +21,8 @@ spring:
servlet:
multipart:
max-file-size: 600MB # Increased for large backup restore (425MB+)
max-request-size: 610MB # Slightly higher to account for form data
max-file-size: 4096MB # 4GB for large backup restore
max-request-size: 4150MB # Slightly higher to account for form data
jackson:
serialization:
@@ -33,7 +33,7 @@ spring:
server:
port: 8080
tomcat:
max-http-request-size: 650MB # Tomcat HTTP request size limit (separate from multipart)
max-http-request-size: 4200MB # Tomcat HTTP request size limit (4GB + overhead)
storycove:
app:
@@ -89,6 +89,8 @@ storycove:
enable-metrics: ${SOLR_ENABLE_METRICS:true}
images:
storage-path: ${IMAGE_STORAGE_PATH:/app/images}
automatic-backup:
dir: ${AUTOMATIC_BACKUP_DIR:/app/automatic-backups}
management:
endpoints:

View File

@@ -0,0 +1,296 @@
package com.storycove.service;
import com.storycove.dto.FileImportResponse;
import com.storycove.dto.PDFImportRequest;
import com.storycove.entity.*;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.mock.web.MockMultipartFile;
import java.util.*;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.*;
/**
* Tests for PDFImportService.
* Note: These tests mock the PDF parsing since Apache PDFBox is complex to test.
* Integration tests should be added separately to test actual PDF file parsing.
*/
@ExtendWith(MockitoExtension.class)
class PDFImportServiceTest {
@Mock
private StoryService storyService;
@Mock
private AuthorService authorService;
@Mock
private SeriesService seriesService;
@Mock
private TagService tagService;
@Mock
private HtmlSanitizationService sanitizationService;
@Mock
private ImageService imageService;
@Mock
private LibraryService libraryService;
@InjectMocks
private PDFImportService pdfImportService;
private PDFImportRequest testRequest;
private Story testStory;
private Author testAuthor;
private Series testSeries;
private UUID storyId;
@BeforeEach
void setUp() {
storyId = UUID.randomUUID();
testStory = new Story();
testStory.setId(storyId);
testStory.setTitle("Test Story");
testStory.setWordCount(1000);
testAuthor = new Author();
testAuthor.setId(UUID.randomUUID());
testAuthor.setName("Test Author");
testSeries = new Series();
testSeries.setId(UUID.randomUUID());
testSeries.setName("Test Series");
testRequest = new PDFImportRequest();
}
// ========================================
// File Validation Tests
// ========================================
@Test
@DisplayName("Should reject null PDF file")
void testNullPDFFile() {
// Arrange
testRequest.setPdfFile(null);
// Act
FileImportResponse response = pdfImportService.importPDF(testRequest);
// Assert
assertFalse(response.isSuccess());
assertEquals("PDF file is required", response.getMessage());
verify(storyService, never()).create(any(Story.class));
}
@Test
@DisplayName("Should reject empty PDF file")
void testEmptyPDFFile() {
// Arrange
MockMultipartFile emptyFile = new MockMultipartFile(
"file", "test.pdf", "application/pdf", new byte[0]
);
testRequest.setPdfFile(emptyFile);
// Act
FileImportResponse response = pdfImportService.importPDF(testRequest);
// Assert
assertFalse(response.isSuccess());
assertEquals("PDF file is required", response.getMessage());
verify(storyService, never()).create(any(Story.class));
}
@Test
@DisplayName("Should reject non-PDF file by extension")
void testInvalidFileExtension() {
// Arrange
MockMultipartFile invalidFile = new MockMultipartFile(
"file", "test.txt", "text/plain", "test content".getBytes()
);
testRequest.setPdfFile(invalidFile);
// Act
FileImportResponse response = pdfImportService.importPDF(testRequest);
// Assert
assertFalse(response.isSuccess());
assertTrue(response.getMessage().contains("Invalid PDF file format"));
verify(storyService, never()).create(any(Story.class));
}
@Test
@DisplayName("Should reject file exceeding 300MB size limit")
void testFileSizeExceedsLimit() {
// Arrange
long fileSize = 301L * 1024 * 1024; // 301 MB
MockMultipartFile largeFile = new MockMultipartFile(
"file", "test.pdf", "application/pdf", new byte[(int)Math.min(fileSize, 1000)]
) {
@Override
public long getSize() {
return fileSize;
}
};
testRequest.setPdfFile(largeFile);
// Act
FileImportResponse response = pdfImportService.importPDF(testRequest);
// Assert
assertFalse(response.isSuccess());
assertTrue(response.getMessage().contains("Invalid PDF file format"));
verify(storyService, never()).create(any(Story.class));
}
// ========================================
// Author Handling Tests
// ========================================
@Test
@DisplayName("Should require author name when not in metadata")
void testRequiresAuthorName() {
// Arrange - Create a minimal valid PDF (will fail parsing but test validation)
MockMultipartFile pdfFile = new MockMultipartFile(
"file", "test.pdf", "application/pdf",
"%PDF-1.4\n%%EOF".getBytes()
);
testRequest.setPdfFile(pdfFile);
testRequest.setAuthorName(null);
testRequest.setAuthorId(null);
// Act
FileImportResponse response = pdfImportService.importPDF(testRequest);
// Assert
assertFalse(response.isSuccess());
// Should fail during import because author is required
verify(storyService, never()).create(any(Story.class));
}
// ========================================
// Validation Method Tests
// ========================================
@Test
@DisplayName("Should validate PDF file successfully")
void testValidatePDFFile_Valid() {
// Arrange
MockMultipartFile pdfFile = new MockMultipartFile(
"file", "test.pdf", "application/pdf",
new byte[100]
);
// Act
List<String> errors = pdfImportService.validatePDFFile(pdfFile);
// Assert - Will have errors because it's not a real PDF, but tests the method exists
assertNotNull(errors);
}
@Test
@DisplayName("Should return errors for null file in validation")
void testValidatePDFFile_Null() {
// Act
List<String> errors = pdfImportService.validatePDFFile(null);
// Assert
assertNotNull(errors);
assertFalse(errors.isEmpty());
assertTrue(errors.get(0).contains("required"));
}
@Test
@DisplayName("Should return errors for empty file in validation")
void testValidatePDFFile_Empty() {
// Arrange
MockMultipartFile emptyFile = new MockMultipartFile(
"file", "test.pdf", "application/pdf", new byte[0]
);
// Act
List<String> errors = pdfImportService.validatePDFFile(emptyFile);
// Assert
assertNotNull(errors);
assertFalse(errors.isEmpty());
assertTrue(errors.get(0).contains("required"));
}
@Test
@DisplayName("Should return errors for oversized file in validation")
void testValidatePDFFile_Oversized() {
// Arrange
long fileSize = 301L * 1024 * 1024; // 301 MB
MockMultipartFile largeFile = new MockMultipartFile(
"file", "test.pdf", "application/pdf", new byte[1000]
) {
@Override
public long getSize() {
return fileSize;
}
};
// Act
List<String> errors = pdfImportService.validatePDFFile(largeFile);
// Assert
assertNotNull(errors);
assertFalse(errors.isEmpty());
assertTrue(errors.stream().anyMatch(e -> e.contains("300MB")));
}
// ========================================
// Integration Tests (Mocked)
// ========================================
@Test
@DisplayName("Should handle extraction images flag")
void testExtractImagesFlag() {
// Arrange
MockMultipartFile pdfFile = new MockMultipartFile(
"file", "test.pdf", "application/pdf",
"%PDF-1.4\n%%EOF".getBytes()
);
testRequest.setPdfFile(pdfFile);
testRequest.setAuthorName("Test Author");
testRequest.setExtractImages(false);
// Act
FileImportResponse response = pdfImportService.importPDF(testRequest);
// Assert - Will fail parsing but tests that the flag is accepted
assertNotNull(response);
}
@Test
@DisplayName("Should accept tags in request")
void testAcceptTags() {
// Arrange
MockMultipartFile pdfFile = new MockMultipartFile(
"file", "test.pdf", "application/pdf",
"%PDF-1.4\n%%EOF".getBytes()
);
testRequest.setPdfFile(pdfFile);
testRequest.setAuthorName("Test Author");
testRequest.setTags(Arrays.asList("tag1", "tag2"));
// Act
FileImportResponse response = pdfImportService.importPDF(testRequest);
// Assert - Will fail parsing but tests that tags are accepted
assertNotNull(response);
}
}

View File

@@ -85,7 +85,8 @@ class StoryServiceTest {
Story result = storyService.updateReadingProgress(testId, position);
assertEquals(0, result.getReadingPosition());
assertNotNull(result.getLastReadAt());
// When position is 0, lastReadAt should be reset to null so the story doesn't appear in "last read" sorting
assertNull(result.getLastReadAt());
verify(storyRepository).save(testStory);
}
@@ -111,7 +112,8 @@ class StoryServiceTest {
Story result = storyService.updateReadingProgress(testId, position);
assertNull(result.getReadingPosition());
assertNotNull(result.getLastReadAt());
// When position is null, lastReadAt should be reset to null so the story doesn't appear in "last read" sorting
assertNull(result.getLastReadAt());
verify(storyRepository).save(testStory);
}

View File

@@ -0,0 +1,310 @@
package com.storycove.service;
import com.storycove.dto.*;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.mock.web.MockMultipartFile;
import java.util.*;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.ArgumentMatchers.*;
import static org.mockito.Mockito.*;
/**
* Tests for ZIPImportService.
*/
@ExtendWith(MockitoExtension.class)
class ZIPImportServiceTest {
@Mock
private EPUBImportService epubImportService;
@Mock
private PDFImportService pdfImportService;
@InjectMocks
private ZIPImportService zipImportService;
private ZIPImportRequest testImportRequest;
@BeforeEach
void setUp() {
testImportRequest = new ZIPImportRequest();
}
// ========================================
// File Validation Tests
// ========================================
@Test
@DisplayName("Should reject null ZIP file")
void testNullZIPFile() {
// Act
ZIPAnalysisResponse response = zipImportService.analyzeZIPFile(null);
// Assert
assertFalse(response.isSuccess());
assertEquals("ZIP file is required", response.getMessage());
}
@Test
@DisplayName("Should reject empty ZIP file")
void testEmptyZIPFile() {
// Arrange
MockMultipartFile emptyFile = new MockMultipartFile(
"file", "test.zip", "application/zip", new byte[0]
);
// Act
ZIPAnalysisResponse response = zipImportService.analyzeZIPFile(emptyFile);
// Assert
assertFalse(response.isSuccess());
assertEquals("ZIP file is required", response.getMessage());
}
@Test
@DisplayName("Should reject non-ZIP file")
void testInvalidFileType() {
// Arrange
MockMultipartFile invalidFile = new MockMultipartFile(
"file", "test.txt", "text/plain", "test content".getBytes()
);
// Act
ZIPAnalysisResponse response = zipImportService.analyzeZIPFile(invalidFile);
// Assert
assertFalse(response.isSuccess());
assertTrue(response.getMessage().contains("Invalid ZIP file format"));
}
@Test
@DisplayName("Should reject oversized ZIP file")
void testOversizedZIPFile() {
// Arrange
long fileSize = 1025L * 1024 * 1024; // 1025 MB (> 1GB limit)
MockMultipartFile largeFile = new MockMultipartFile(
"file", "test.zip", "application/zip", new byte[1000]
) {
@Override
public long getSize() {
return fileSize;
}
};
// Act
ZIPAnalysisResponse response = zipImportService.analyzeZIPFile(largeFile);
// Assert
assertFalse(response.isSuccess());
assertTrue(response.getMessage().contains("exceeds"));
assertTrue(response.getMessage().contains("1024MB") || response.getMessage().contains("1GB"));
}
// ========================================
// Import Request Validation Tests
// ========================================
@Test
@DisplayName("Should reject import with invalid session ID")
void testInvalidSessionId() {
// Arrange
testImportRequest.setZipSessionId("invalid-session-id");
testImportRequest.setSelectedFiles(Arrays.asList("file1.epub"));
// Act
ZIPImportResponse response = zipImportService.importFromZIP(testImportRequest);
// Assert
assertFalse(response.isSuccess());
assertTrue(response.getMessage().contains("Invalid") || response.getMessage().contains("expired"));
}
@Test
@DisplayName("Should reject import with no selected files")
void testNoSelectedFiles() {
// Arrange
testImportRequest.setZipSessionId("some-session-id");
testImportRequest.setSelectedFiles(Collections.emptyList());
// Act
ZIPImportResponse response = zipImportService.importFromZIP(testImportRequest);
// Assert
assertFalse(response.isSuccess());
assertTrue(response.getMessage().contains("No files selected") || response.getMessage().contains("Invalid"));
}
@Test
@DisplayName("Should reject import with null selected files")
void testNullSelectedFiles() {
// Arrange
testImportRequest.setZipSessionId("some-session-id");
testImportRequest.setSelectedFiles(null);
// Act
ZIPImportResponse response = zipImportService.importFromZIP(testImportRequest);
// Assert
assertFalse(response.isSuccess());
assertTrue(response.getMessage().contains("No files selected") || response.getMessage().contains("Invalid"));
}
// ========================================
// ZIP Analysis Tests
// ========================================
@Test
@DisplayName("Should handle corrupted ZIP file gracefully")
void testCorruptedZIPFile() {
// Arrange
MockMultipartFile corruptedFile = new MockMultipartFile(
"file", "test.zip", "application/zip",
"PK\3\4corrupted data".getBytes()
);
// Act
ZIPAnalysisResponse response = zipImportService.analyzeZIPFile(corruptedFile);
// Assert
assertFalse(response.isSuccess());
assertNotNull(response.getMessage());
}
// ========================================
// Helper Method Tests
// ========================================
@Test
@DisplayName("Should accept default metadata in import request")
void testDefaultMetadata() {
// Arrange
testImportRequest.setZipSessionId("test-session");
testImportRequest.setSelectedFiles(Arrays.asList("file1.epub"));
testImportRequest.setDefaultAuthorName("Default Author");
testImportRequest.setDefaultTags(Arrays.asList("tag1", "tag2"));
// Act - will fail due to invalid session, but tests that metadata is accepted
ZIPImportResponse response = zipImportService.importFromZIP(testImportRequest);
// Assert
assertNotNull(response);
assertFalse(response.isSuccess()); // Expected to fail due to invalid session
}
@Test
@DisplayName("Should accept per-file metadata in import request")
void testPerFileMetadata() {
// Arrange
Map<String, ZIPImportRequest.FileImportMetadata> fileMetadata = new HashMap<>();
ZIPImportRequest.FileImportMetadata metadata = new ZIPImportRequest.FileImportMetadata();
metadata.setAuthorName("Specific Author");
metadata.setTags(Arrays.asList("tag1"));
fileMetadata.put("file1.epub", metadata);
testImportRequest.setZipSessionId("test-session");
testImportRequest.setSelectedFiles(Arrays.asList("file1.epub"));
testImportRequest.setFileMetadata(fileMetadata);
// Act - will fail due to invalid session, but tests that metadata is accepted
ZIPImportResponse response = zipImportService.importFromZIP(testImportRequest);
// Assert
assertNotNull(response);
assertFalse(response.isSuccess()); // Expected to fail due to invalid session
}
@Test
@DisplayName("Should accept createMissing flags")
void testCreateMissingFlags() {
// Arrange
testImportRequest.setZipSessionId("test-session");
testImportRequest.setSelectedFiles(Arrays.asList("file1.epub"));
testImportRequest.setCreateMissingAuthor(false);
testImportRequest.setCreateMissingSeries(false);
testImportRequest.setExtractImages(false);
// Act - will fail due to invalid session, but tests that flags are accepted
ZIPImportResponse response = zipImportService.importFromZIP(testImportRequest);
// Assert
assertNotNull(response);
}
// ========================================
// Response Object Tests
// ========================================
@Test
@DisplayName("ZIPImportResponse should calculate statistics correctly")
void testZIPImportResponseStatistics() {
// Arrange
List<FileImportResponse> results = new ArrayList<>();
FileImportResponse success1 = FileImportResponse.success(UUID.randomUUID(), "Story 1", "EPUB");
FileImportResponse success2 = FileImportResponse.success(UUID.randomUUID(), "Story 2", "PDF");
FileImportResponse failure = FileImportResponse.error("Import failed", "story3.epub");
results.add(success1);
results.add(success2);
results.add(failure);
// Act
ZIPImportResponse response = ZIPImportResponse.create(results);
// Assert
assertNotNull(response);
assertEquals(3, response.getTotalFiles());
assertEquals(2, response.getSuccessfulImports());
assertEquals(1, response.getFailedImports());
assertTrue(response.isSuccess()); // Partial success
assertTrue(response.getMessage().contains("2 imported"));
}
@Test
@DisplayName("ZIPImportResponse should handle all failures")
void testZIPImportResponseAllFailures() {
// Arrange
List<FileImportResponse> results = new ArrayList<>();
results.add(FileImportResponse.error("Error 1", "file1.epub"));
results.add(FileImportResponse.error("Error 2", "file2.pdf"));
// Act
ZIPImportResponse response = ZIPImportResponse.create(results);
// Assert
assertNotNull(response);
assertEquals(2, response.getTotalFiles());
assertEquals(0, response.getSuccessfulImports());
assertEquals(2, response.getFailedImports());
assertFalse(response.isSuccess());
assertTrue(response.getMessage().contains("failed"));
}
@Test
@DisplayName("ZIPImportResponse should handle all successes")
void testZIPImportResponseAllSuccesses() {
// Arrange
List<FileImportResponse> results = new ArrayList<>();
results.add(FileImportResponse.success(UUID.randomUUID(), "Story 1", "EPUB"));
results.add(FileImportResponse.success(UUID.randomUUID(), "Story 2", "PDF"));
// Act
ZIPImportResponse response = ZIPImportResponse.create(results);
// Assert
assertNotNull(response);
assertEquals(2, response.getTotalFiles());
assertEquals(2, response.getSuccessfulImports());
assertEquals(0, response.getFailedImports());
assertTrue(response.isSuccess());
assertTrue(response.getMessage().contains("All files imported successfully"));
}
}

View File

@@ -55,6 +55,11 @@ if [ $RETRY_COUNT -eq $MAX_RETRIES ]; then
exit 1
fi
# Apply database migrations
echo -e "${YELLOW}🗄️ Applying database migrations...${NC}"
docker-compose run --rm migrations
echo -e "${GREEN}✅ Database migrations applied${NC}"
# Check if Solr is ready
echo -e "${YELLOW}🔍 Checking Solr health...${NC}"
RETRY_COUNT=0

View File

@@ -44,9 +44,10 @@ services:
volumes:
- images_data:/app/images
- library_config:/app/config
- automatic_backups:/app/automatic-backups
depends_on:
postgres:
condition: service_started
condition: service_healthy
solr:
condition: service_started
networks:
@@ -65,6 +66,11 @@ services:
- postgres_data:/var/lib/postgresql/data
networks:
- storycove-network
healthcheck:
test: ["CMD-SHELL", "pg_isready -U storycove -d storycove"]
interval: 5s
timeout: 5s
retries: 5
solr:
@@ -101,6 +107,7 @@ volumes:
solr_data:
images_data:
library_config:
automatic_backups:
configs:
nginx_config:
@@ -117,7 +124,7 @@ configs:
}
server {
listen 80;
client_max_body_size 600M;
client_max_body_size 2048M;
location / {
proxy_pass http://frontend;
proxy_http_version 1.1;
@@ -138,8 +145,8 @@ configs:
proxy_connect_timeout 900s;
proxy_send_timeout 900s;
proxy_read_timeout 900s;
# Large upload settings
client_max_body_size 600M;
# Large upload settings (2GB for backups)
client_max_body_size 2048M;
proxy_request_buffering off;
proxy_max_temp_file_size 0;
}

View File

@@ -0,0 +1,829 @@
'use client';
import { useState } from 'react';
import { useRouter } from 'next/navigation';
import { DocumentArrowUpIcon, CheckCircleIcon, XCircleIcon } from '@heroicons/react/24/outline';
import Button from '@/components/ui/Button';
import { Input } from '@/components/ui/Input';
import ImportLayout from '@/components/layout/ImportLayout';
import AuthorSelector from '@/components/stories/AuthorSelector';
import SeriesSelector from '@/components/stories/SeriesSelector';
type FileType = 'epub' | 'pdf' | 'zip' | null;
interface ImportResponse {
success: boolean;
message: string;
storyId?: string;
storyTitle?: string;
fileName?: string;
fileType?: string;
wordCount?: number;
extractedImages?: number;
warnings?: string[];
errors?: string[];
}
interface ZIPAnalysisResponse {
success: boolean;
message: string;
zipFileName?: string;
totalFiles?: number;
validFiles?: number;
files?: FileInfo[];
warnings?: string[];
}
interface FileInfo {
fileName: string;
fileType: string;
fileSize: number;
extractedTitle?: string;
extractedAuthor?: string;
hasMetadata: boolean;
error?: string;
}
interface ZIPImportResponse {
success: boolean;
message: string;
totalFiles: number;
successfulImports: number;
failedImports: number;
results: ImportResponse[];
warnings?: string[];
}
export default function FileImportPage() {
const router = useRouter();
const [selectedFile, setSelectedFile] = useState<File | null>(null);
const [fileType, setFileType] = useState<FileType>(null);
const [isLoading, setIsLoading] = useState(false);
const [isValidating, setIsValidating] = useState(false);
const [validationResult, setValidationResult] = useState<any>(null);
const [importResult, setImportResult] = useState<ImportResponse | null>(null);
const [error, setError] = useState<string | null>(null);
// ZIP-specific state
const [zipAnalysis, setZipAnalysis] = useState<ZIPAnalysisResponse | null>(null);
const [zipSessionId, setZipSessionId] = useState<string | null>(null);
const [selectedZipFiles, setSelectedZipFiles] = useState<Set<string>>(new Set());
const [fileMetadata, setFileMetadata] = useState<Map<string, any>>(new Map());
const [zipImportResult, setZipImportResult] = useState<ZIPImportResponse | null>(null);
// Import options
const [authorName, setAuthorName] = useState<string>('');
const [authorId, setAuthorId] = useState<string | undefined>(undefined);
const [seriesName, setSeriesName] = useState<string>('');
const [seriesId, setSeriesId] = useState<string | undefined>(undefined);
const [seriesVolume, setSeriesVolume] = useState<string>('');
const [tags, setTags] = useState<string>('');
const [createMissingAuthor, setCreateMissingAuthor] = useState(true);
const [createMissingSeries, setCreateMissingSeries] = useState(true);
const [extractImages, setExtractImages] = useState(true);
const [preserveReadingPosition, setPreserveReadingPosition] = useState(true);
const detectFileType = (file: File): FileType => {
const filename = file.name.toLowerCase();
if (filename.endsWith('.epub')) return 'epub';
if (filename.endsWith('.pdf')) return 'pdf';
if (filename.endsWith('.zip')) return 'zip';
return null;
};
const handleFileChange = async (e: React.ChangeEvent<HTMLInputElement>) => {
const file = e.target.files?.[0];
if (file) {
setSelectedFile(file);
setValidationResult(null);
setImportResult(null);
setZipAnalysis(null);
setZipSessionId(null);
setSelectedZipFiles(new Set());
setZipImportResult(null);
setError(null);
const detectedType = detectFileType(file);
setFileType(detectedType);
if (!detectedType) {
setError('Unsupported file type. Please select an EPUB, PDF, or ZIP file.');
return;
}
if (detectedType === 'zip') {
await analyzeZipFile(file);
} else {
await validateFile(file, detectedType);
}
}
};
const validateFile = async (file: File, type: FileType) => {
if (type === 'zip') return; // ZIP has its own analysis flow
setIsValidating(true);
try {
const token = localStorage.getItem('auth-token');
const formData = new FormData();
formData.append('file', file);
const endpoint = type === 'epub' ? '/api/stories/epub/validate' : '/api/stories/pdf/validate';
const response = await fetch(endpoint, {
method: 'POST',
headers: {
'Authorization': token ? `Bearer ${token}` : '',
},
body: formData,
});
if (response.ok) {
const result = await response.json();
setValidationResult(result);
if (!result.valid) {
setError(`${type?.toUpperCase() || 'File'} validation failed: ` + result.errors.join(', '));
}
} else if (response.status === 401 || response.status === 403) {
setError('Authentication required. Please log in.');
} else {
setError(`Failed to validate ${type?.toUpperCase() || 'file'}`);
}
} catch (err) {
setError(`Error validating ${type?.toUpperCase() || 'file'}: ` + (err as Error).message);
} finally {
setIsValidating(false);
}
};
const analyzeZipFile = async (file: File) => {
setIsLoading(true);
try {
const token = localStorage.getItem('auth-token');
const formData = new FormData();
formData.append('file', file);
const response = await fetch('/api/stories/zip/analyze', {
method: 'POST',
headers: {
'Authorization': token ? `Bearer ${token}` : '',
},
body: formData,
});
if (response.ok) {
const result: ZIPAnalysisResponse = await response.json();
setZipAnalysis(result);
if (result.success && result.warnings && result.warnings.length > 0) {
// Extract session ID from warnings
const sessionWarning = result.warnings.find(w => w.includes('Session ID:'));
if (sessionWarning) {
const match = sessionWarning.match(/Session ID: ([a-f0-9-]+)/);
if (match) {
setZipSessionId(match[1]);
}
}
}
if (!result.success) {
setError(result.message);
} else if (result.files && result.files.length === 0) {
setError('No valid EPUB or PDF files found in ZIP');
}
} else if (response.status === 401 || response.status === 403) {
setError('Authentication required. Please log in.');
} else {
setError('Failed to analyze ZIP file');
}
} catch (err) {
setError('Error analyzing ZIP file: ' + (err as Error).message);
} finally {
setIsLoading(false);
}
};
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
if (!selectedFile) {
setError('Please select a file');
return;
}
if (fileType === 'zip') {
await handleZipImport();
return;
}
if (validationResult && !validationResult.valid) {
setError(`Cannot import invalid ${fileType?.toUpperCase()} file`);
return;
}
// Check PDF requires author
if (fileType === 'pdf' && !authorName.trim()) {
setError('PDF import requires an author name. Please provide an author name or ensure the PDF has author metadata.');
return;
}
setIsLoading(true);
setError(null);
try {
const token = localStorage.getItem('auth-token');
const formData = new FormData();
formData.append('file', selectedFile);
if (authorId) {
formData.append('authorId', authorId);
} else if (authorName) {
formData.append('authorName', authorName);
}
if (seriesId) {
formData.append('seriesId', seriesId);
} else if (seriesName) {
formData.append('seriesName', seriesName);
}
if (seriesVolume) formData.append('seriesVolume', seriesVolume);
if (tags) {
const tagList = tags.split(',').map(t => t.trim()).filter(t => t.length > 0);
tagList.forEach(tag => formData.append('tags', tag));
}
formData.append('createMissingAuthor', createMissingAuthor.toString());
formData.append('createMissingSeries', createMissingSeries.toString());
if (fileType === 'epub') {
formData.append('preserveReadingPosition', preserveReadingPosition.toString());
} else if (fileType === 'pdf') {
formData.append('extractImages', extractImages.toString());
}
const endpoint = fileType === 'epub' ? '/api/stories/epub/import' : '/api/stories/pdf/import';
const response = await fetch(endpoint, {
method: 'POST',
headers: {
'Authorization': token ? `Bearer ${token}` : '',
},
body: formData,
});
const result = await response.json();
if (response.ok && result.success) {
setImportResult(result);
} else if (response.status === 401 || response.status === 403) {
setError('Authentication required. Please log in.');
} else {
setError(result.message || `Failed to import ${fileType?.toUpperCase()}`);
}
} catch (err) {
setError(`Error importing ${fileType?.toUpperCase()}: ` + (err as Error).message);
} finally {
setIsLoading(false);
}
};
const handleZipImport = async () => {
if (!zipSessionId) {
setError('ZIP session expired. Please re-upload the ZIP file.');
return;
}
if (selectedZipFiles.size === 0) {
setError('Please select at least one file to import');
return;
}
setIsLoading(true);
setError(null);
try {
const token = localStorage.getItem('auth-token');
const requestBody: any = {
zipSessionId: zipSessionId,
selectedFiles: Array.from(selectedZipFiles),
defaultAuthorId: authorId || undefined,
defaultAuthorName: authorName || undefined,
defaultSeriesId: seriesId || undefined,
defaultSeriesName: seriesName || undefined,
defaultTags: tags ? tags.split(',').map(t => t.trim()).filter(t => t.length > 0) : undefined,
createMissingAuthor,
createMissingSeries,
extractImages,
};
// Add per-file metadata if any
if (fileMetadata.size > 0) {
const metadata: any = {};
fileMetadata.forEach((value, key) => {
metadata[key] = value;
});
requestBody.fileMetadata = metadata;
}
const response = await fetch('/api/stories/zip/import', {
method: 'POST',
headers: {
'Authorization': token ? `Bearer ${token}` : '',
'Content-Type': 'application/json',
},
body: JSON.stringify(requestBody),
});
const result: ZIPImportResponse = await response.json();
if (response.ok) {
setZipImportResult(result);
} else if (response.status === 401 || response.status === 403) {
setError('Authentication required. Please log in.');
} else {
setError(result.message || 'Failed to import files from ZIP');
}
} catch (err) {
setError('Error importing from ZIP: ' + (err as Error).message);
} finally {
setIsLoading(false);
}
};
const toggleZipFileSelection = (fileName: string) => {
const newSelection = new Set(selectedZipFiles);
if (newSelection.has(fileName)) {
newSelection.delete(fileName);
} else {
newSelection.add(fileName);
}
setSelectedZipFiles(newSelection);
};
const selectAllZipFiles = () => {
if (zipAnalysis?.files) {
const validFiles = zipAnalysis.files.filter(f => !f.error);
setSelectedZipFiles(new Set(validFiles.map(f => f.fileName)));
}
};
const deselectAllZipFiles = () => {
setSelectedZipFiles(new Set());
};
const resetForm = () => {
setSelectedFile(null);
setFileType(null);
setValidationResult(null);
setImportResult(null);
setZipAnalysis(null);
setZipSessionId(null);
setSelectedZipFiles(new Set());
setFileMetadata(new Map());
setZipImportResult(null);
setError(null);
setAuthorName('');
setAuthorId(undefined);
setSeriesName('');
setSeriesId(undefined);
setSeriesVolume('');
setTags('');
};
const handleAuthorChange = (name: string, id?: string) => {
setAuthorName(name);
setAuthorId(id);
};
const handleSeriesChange = (name: string, id?: string) => {
setSeriesName(name);
setSeriesId(id);
};
// Show success screen for single file import
if (importResult?.success) {
return (
<ImportLayout
title="Import Successful"
description="Your file has been successfully imported into StoryCove"
>
<div className="space-y-6">
<div className="bg-green-50 dark:bg-green-900/20 border border-green-200 dark:border-green-800 rounded-lg p-6">
<h2 className="text-xl font-semibold text-green-600 dark:text-green-400 mb-2">Import Completed</h2>
<p className="theme-text">
Your {importResult.fileType || fileType?.toUpperCase()} file has been successfully imported.
</p>
</div>
<div className="theme-card theme-shadow rounded-lg p-6">
<div className="space-y-4">
<div>
<span className="font-semibold theme-header">Story Title:</span>
<p className="theme-text">{importResult.storyTitle}</p>
</div>
{importResult.wordCount && (
<div>
<span className="font-semibold theme-header">Word Count:</span>
<p className="theme-text">{importResult.wordCount.toLocaleString()} words</p>
</div>
)}
{importResult.extractedImages !== undefined && importResult.extractedImages > 0 && (
<div>
<span className="font-semibold theme-header">Extracted Images:</span>
<p className="theme-text">{importResult.extractedImages}</p>
</div>
)}
{importResult.warnings && importResult.warnings.length > 0 && (
<div className="bg-yellow-50 dark:bg-yellow-900/20 border border-yellow-200 dark:border-yellow-800 rounded-lg p-4">
<strong className="text-yellow-800 dark:text-yellow-200">Warnings:</strong>
<ul className="list-disc list-inside mt-2 text-yellow-700 dark:text-yellow-300">
{importResult.warnings.map((warning, index) => (
<li key={index}>{warning}</li>
))}
</ul>
</div>
)}
<div className="flex gap-4 mt-6">
<Button
onClick={() => router.push(`/stories/${importResult.storyId}`)}
>
View Story
</Button>
<Button
onClick={resetForm}
variant="secondary"
>
Import Another File
</Button>
</div>
</div>
</div>
</div>
</ImportLayout>
);
}
// Show success screen for ZIP import
if (zipImportResult) {
return (
<ImportLayout
title="ZIP Import Complete"
description="Import results from your ZIP file"
>
<div className="space-y-6">
<div className={`border rounded-lg p-6 ${
zipImportResult.failedImports === 0
? 'bg-green-50 dark:bg-green-900/20 border-green-200 dark:border-green-800'
: 'bg-yellow-50 dark:bg-yellow-900/20 border-yellow-200 dark:border-yellow-800'
}`}>
<h2 className={`text-xl font-semibold mb-2 ${
zipImportResult.failedImports === 0
? 'text-green-600 dark:text-green-400'
: 'text-yellow-600 dark:text-yellow-400'
}`}>
{zipImportResult.message}
</h2>
<p className="theme-text">
{zipImportResult.successfulImports} of {zipImportResult.totalFiles} files imported successfully
</p>
</div>
<div className="theme-card theme-shadow rounded-lg p-6">
<h3 className="text-lg font-semibold theme-header mb-4">Import Results</h3>
<div className="space-y-3">
{zipImportResult.results.map((result, index) => (
<div key={index} className={`p-4 rounded-lg border ${
result.success
? 'bg-green-50 dark:bg-green-900/10 border-green-200 dark:border-green-800'
: 'bg-red-50 dark:bg-red-900/10 border-red-200 dark:border-red-800'
}`}>
<div className="flex items-start gap-3">
{result.success ? (
<CheckCircleIcon className="h-5 w-5 text-green-600 dark:text-green-400 flex-shrink-0 mt-0.5" />
) : (
<XCircleIcon className="h-5 w-5 text-red-600 dark:text-red-400 flex-shrink-0 mt-0.5" />
)}
<div className="flex-1">
<p className="font-medium theme-header">
{result.fileName || result.storyTitle || 'Unknown file'}
</p>
{result.success && result.storyTitle && (
<p className="text-sm theme-text">
Imported as: {result.storyTitle}
{result.storyId && (
<button
onClick={() => router.push(`/stories/${result.storyId}`)}
className="ml-2 text-xs text-blue-600 dark:text-blue-400 hover:underline"
>
View
</button>
)}
</p>
)}
{!result.success && (
<p className="text-sm text-red-600 dark:text-red-400">{result.message}</p>
)}
</div>
</div>
</div>
))}
</div>
<div className="flex gap-4 mt-6">
<Button
onClick={() => router.push('/library')}
>
Go to Library
</Button>
<Button
onClick={resetForm}
variant="secondary"
>
Import Another File
</Button>
</div>
</div>
</div>
</ImportLayout>
);
}
return (
<ImportLayout
title="Import from File"
description="Upload an EPUB, PDF, or ZIP file to import stories into your library"
>
{error && (
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4 mb-6">
<p className="text-red-800 dark:text-red-200">{error}</p>
</div>
)}
<form onSubmit={handleSubmit} className="space-y-6">
{/* File Upload */}
<div className="theme-card theme-shadow rounded-lg p-6">
<div className="mb-4">
<h3 className="text-lg font-semibold theme-header mb-2">Select File</h3>
<p className="theme-text">
Choose an EPUB, PDF, or ZIP file from your device to import.
</p>
</div>
<div className="space-y-4">
<div>
<label htmlFor="import-file" className="block text-sm font-medium theme-header mb-1">
File (EPUB, PDF, or ZIP)
</label>
<Input
id="import-file"
type="file"
accept=".epub,.pdf,.zip,application/epub+zip,application/pdf,application/zip"
onChange={handleFileChange}
disabled={isLoading || isValidating}
/>
</div>
{selectedFile && (
<div className="flex items-center gap-2">
<DocumentArrowUpIcon className="h-5 w-5 theme-text" />
<span className="text-sm theme-text">
{selectedFile.name} ({(selectedFile.size / 1024 / 1024).toFixed(2)} MB)
{fileType && <span className="ml-2 inline-flex items-center px-2 py-1 rounded text-xs font-medium bg-blue-100 dark:bg-blue-900/20 text-blue-800 dark:text-blue-200">
{fileType.toUpperCase()}
</span>}
</span>
</div>
)}
{isValidating && (
<div className="text-sm theme-accent">
Validating file...
</div>
)}
{validationResult && fileType !== 'zip' && (
<div className="text-sm">
{validationResult.valid ? (
<span className="inline-flex items-center px-2 py-1 rounded text-xs font-medium bg-green-100 dark:bg-green-900/20 text-green-800 dark:text-green-200">
Valid {fileType?.toUpperCase()}
</span>
) : (
<span className="inline-flex items-center px-2 py-1 rounded text-xs font-medium bg-red-100 dark:bg-red-900/20 text-red-800 dark:text-red-200">
Invalid {fileType?.toUpperCase()}
</span>
)}
</div>
)}
</div>
</div>
{/* ZIP File Selection */}
{fileType === 'zip' && zipAnalysis?.success && zipAnalysis.files && (
<div className="theme-card theme-shadow rounded-lg p-6">
<div className="mb-4 flex items-center justify-between">
<div>
<h3 className="text-lg font-semibold theme-header mb-2">Select Files to Import</h3>
<p className="theme-text">
{zipAnalysis.validFiles} valid files found in ZIP ({zipAnalysis.totalFiles} total)
</p>
</div>
<div className="flex gap-2">
<Button
type="button"
variant="secondary"
size="sm"
onClick={selectAllZipFiles}
>
Select All
</Button>
<Button
type="button"
variant="ghost"
size="sm"
onClick={deselectAllZipFiles}
>
Deselect All
</Button>
</div>
</div>
<div className="space-y-2 max-h-96 overflow-y-auto">
{zipAnalysis.files.map((file, index) => (
<div
key={index}
className={`p-3 rounded-lg border ${
file.error
? 'bg-red-50 dark:bg-red-900/10 border-red-200 dark:border-red-800 opacity-50'
: selectedZipFiles.has(file.fileName)
? 'bg-blue-50 dark:bg-blue-900/10 border-blue-300 dark:border-blue-700'
: 'theme-card border-gray-200 dark:border-gray-700'
}`}
>
<div className="flex items-start gap-3">
{!file.error && (
<input
type="checkbox"
checked={selectedZipFiles.has(file.fileName)}
onChange={() => toggleZipFileSelection(file.fileName)}
className="mt-1"
/>
)}
<div className="flex-1">
<p className="font-medium theme-header">{file.fileName}</p>
<p className="text-xs theme-text mt-1">
{file.fileType} {(file.fileSize / 1024).toFixed(2)} KB
{file.extractedTitle && `${file.extractedTitle}`}
</p>
{file.error && (
<p className="text-xs text-red-600 dark:text-red-400 mt-1">{file.error}</p>
)}
</div>
</div>
</div>
))}
</div>
</div>
)}
{/* Import Options - Show for all file types */}
{fileType && (!zipAnalysis || (zipAnalysis && selectedZipFiles.size > 0)) && (
<div className="theme-card theme-shadow rounded-lg p-6">
<div className="mb-4">
<h3 className="text-lg font-semibold theme-header mb-2">Import Options</h3>
<p className="theme-text">
Configure how the {fileType === 'zip' ? 'files' : 'file'} should be imported.
{fileType === 'zip' && ' These settings apply to all selected files.'}
</p>
</div>
<div className="space-y-4">
<AuthorSelector
value={authorName}
onChange={handleAuthorChange}
placeholder={fileType === 'epub' ? 'Leave empty to use file metadata' : 'Required for PDF import'}
required={fileType === 'pdf'}
label={`Author${fileType === 'pdf' ? ' *' : ''}${fileType === 'zip' ? ' (Default)' : ''}`}
error={fileType === 'pdf' && !authorName ? 'PDF import requires an author name. Select an existing author or enter a new one.' : undefined}
/>
<SeriesSelector
value={seriesName}
onChange={handleSeriesChange}
placeholder="Optional: Add to a series"
label={`Series${fileType === 'zip' ? ' (Default)' : ''}`}
authorId={authorId}
/>
{seriesName && (
<div>
<label htmlFor="series-volume" className="block text-sm font-medium theme-header mb-1">Series Volume</label>
<Input
id="series-volume"
type="number"
value={seriesVolume}
onChange={(e) => setSeriesVolume(e.target.value)}
placeholder="Volume number in series"
/>
</div>
)}
<div>
<label htmlFor="tags" className="block text-sm font-medium theme-header mb-1">
Tags {fileType === 'zip' && '(Default)'}
</label>
<Input
id="tags"
value={tags}
onChange={(e) => setTags(e.target.value)}
placeholder="Comma-separated tags (e.g., fantasy, adventure, romance)"
/>
</div>
<div className="space-y-3">
{fileType === 'epub' && (
<div className="flex items-center">
<input
type="checkbox"
id="preserve-reading-position"
checked={preserveReadingPosition}
onChange={(e) => setPreserveReadingPosition(e.target.checked)}
className="mr-2"
/>
<label htmlFor="preserve-reading-position" className="text-sm theme-text">
Preserve reading position from EPUB metadata
</label>
</div>
)}
{(fileType === 'pdf' || fileType === 'zip') && (
<div className="flex items-center">
<input
type="checkbox"
id="extract-images"
checked={extractImages}
onChange={(e) => setExtractImages(e.target.checked)}
className="mr-2"
/>
<label htmlFor="extract-images" className="text-sm theme-text">
Extract and store embedded images from PDFs
</label>
</div>
)}
<div className="flex items-center">
<input
type="checkbox"
id="create-missing-author"
checked={createMissingAuthor}
onChange={(e) => setCreateMissingAuthor(e.target.checked)}
className="mr-2"
/>
<label htmlFor="create-missing-author" className="text-sm theme-text">
Create author if not found
</label>
</div>
<div className="flex items-center">
<input
type="checkbox"
id="create-missing-series"
checked={createMissingSeries}
onChange={(e) => setCreateMissingSeries(e.target.checked)}
className="mr-2"
/>
<label htmlFor="create-missing-series" className="text-sm theme-text">
Create series if not found
</label>
</div>
</div>
</div>
</div>
)}
{/* Submit Button */}
{fileType && fileType !== 'zip' && (
<div className="flex justify-end">
<Button
type="submit"
disabled={!selectedFile || isLoading || isValidating || (validationResult && !validationResult.valid)}
loading={isLoading}
>
{isLoading ? 'Importing...' : `Import ${fileType.toUpperCase()}`}
</Button>
</div>
)}
{fileType === 'zip' && zipAnalysis?.success && (
<div className="flex justify-end">
<Button
type="submit"
disabled={selectedZipFiles.size === 0 || isLoading}
loading={isLoading}
>
{isLoading ? 'Importing...' : `Import ${selectedZipFiles.size} File${selectedZipFiles.size !== 1 ? 's' : ''}`}
</Button>
</div>
)}
</form>
</ImportLayout>
);
}

View File

@@ -13,6 +13,7 @@ import SidebarLayout from '../../components/library/SidebarLayout';
import ToolbarLayout from '../../components/library/ToolbarLayout';
import MinimalLayout from '../../components/library/MinimalLayout';
import { useLibraryLayout } from '../../hooks/useLibraryLayout';
import { useLibraryFilters, clearLibraryFilters } from '../../hooks/useLibraryFilters';
type ViewMode = 'grid' | 'list';
type SortOption = 'createdAt' | 'title' | 'authorName' | 'rating' | 'wordCount' | 'lastReadAt';
@@ -26,17 +27,21 @@ export default function LibraryContent() {
const [loading, setLoading] = useState(false);
const [searchLoading, setSearchLoading] = useState(false);
const [randomLoading, setRandomLoading] = useState(false);
const [searchQuery, setSearchQuery] = useState('');
const [selectedTags, setSelectedTags] = useState<string[]>([]);
const [viewMode, setViewMode] = useState<ViewMode>('list');
const [sortOption, setSortOption] = useState<SortOption>('lastReadAt');
const [sortDirection, setSortDirection] = useState<'asc' | 'desc'>('desc');
// Persisted filter state (survives navigation within session)
const [searchQuery, setSearchQuery] = useLibraryFilters<string>('searchQuery', '');
const [selectedTags, setSelectedTags] = useLibraryFilters<string[]>('selectedTags', []);
const [viewMode, setViewMode] = useLibraryFilters<ViewMode>('viewMode', 'list');
const [sortOption, setSortOption] = useLibraryFilters<SortOption>('sortOption', 'lastReadAt');
const [sortDirection, setSortDirection] = useLibraryFilters<'asc' | 'desc'>('sortDirection', 'desc');
const [advancedFilters, setAdvancedFilters] = useLibraryFilters<AdvancedFilters>('advancedFilters', {});
// Non-persisted state (resets on navigation)
const [page, setPage] = useState(0);
const [totalPages, setTotalPages] = useState(1);
const [totalElements, setTotalElements] = useState(0);
const [refreshTrigger, setRefreshTrigger] = useState(0);
const [urlParamsProcessed, setUrlParamsProcessed] = useState(false);
const [advancedFilters, setAdvancedFilters] = useState<AdvancedFilters>({});
// Initialize filters from URL parameters
useEffect(() => {
@@ -209,11 +214,15 @@ export default function LibraryContent() {
}
};
const clearFilters = () => {
const handleClearFilters = () => {
// Clear state
setSearchQuery('');
setSelectedTags([]);
setAdvancedFilters({});
setPage(0);
// Clear sessionStorage
clearLibraryFilters();
// Trigger refresh
setRefreshTrigger(prev => prev + 1);
};
@@ -266,7 +275,7 @@ export default function LibraryContent() {
onSortDirectionToggle: handleSortDirectionToggle,
onAdvancedFiltersChange: handleAdvancedFiltersChange,
onRandomStory: handleRandomStory,
onClearFilters: clearFilters,
onClearFilters: handleClearFilters,
};
const renderContent = () => {
@@ -280,7 +289,7 @@ export default function LibraryContent() {
}
</p>
{searchQuery || selectedTags.length > 0 || Object.values(advancedFilters).some(v => v !== undefined && v !== '' && v !== 'all' && v !== false) ? (
<Button variant="ghost" onClick={clearFilters}>
<Button variant="ghost" onClick={handleClearFilters}>
Clear Filters
</Button>
) : (

View File

@@ -120,26 +120,27 @@ export default function TagMaintenancePage() {
const handleDeleteSelected = async () => {
if (selectedTagIds.size === 0) return;
const confirmation = confirm(
`Are you sure you want to delete ${selectedTagIds.size} selected tag(s)? This action cannot be undone.`
);
if (!confirmation) return;
try {
const deletePromises = Array.from(selectedTagIds).map(tagId =>
const deletePromises = Array.from(selectedTagIds).map(tagId =>
tagApi.deleteTag(tagId)
);
await Promise.all(deletePromises);
// Reload tags and reset selection
await loadTags();
setSelectedTagIds(new Set());
} catch (error) {
} catch (error: any) {
console.error('Failed to delete tags:', error);
alert('Failed to delete some tags. Please try again.');
const errorMessage = error.response?.data?.error || error.message || 'Failed to delete some tags. Please try again.';
alert(errorMessage);
}
};

View File

@@ -0,0 +1,491 @@
'use client';
import { useState, useEffect } from 'react';
import { useRouter } from 'next/navigation';
import AppLayout from '@/components/layout/AppLayout';
import { statisticsApi, getCurrentLibraryId } from '@/lib/api';
import {
LibraryOverviewStats,
TopTagsStats,
TopAuthorsStats,
RatingStats,
SourceDomainStats,
ReadingProgressStats,
ReadingActivityStats
} from '@/types/api';
function StatisticsContent() {
const router = useRouter();
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
// Statistics state
const [overviewStats, setOverviewStats] = useState<LibraryOverviewStats | null>(null);
const [topTags, setTopTags] = useState<TopTagsStats | null>(null);
const [topAuthors, setTopAuthors] = useState<TopAuthorsStats | null>(null);
const [ratingStats, setRatingStats] = useState<RatingStats | null>(null);
const [sourceDomains, setSourceDomains] = useState<SourceDomainStats | null>(null);
const [readingProgress, setReadingProgress] = useState<ReadingProgressStats | null>(null);
const [readingActivity, setReadingActivity] = useState<ReadingActivityStats | null>(null);
useEffect(() => {
loadStatistics();
}, []);
const loadStatistics = async () => {
try {
setLoading(true);
setError(null);
const libraryId = getCurrentLibraryId();
if (!libraryId) {
router.push('/library');
return;
}
// Load all statistics in parallel
const [overview, tags, authors, ratings, domains, progress, activity] = await Promise.all([
statisticsApi.getOverviewStatistics(libraryId),
statisticsApi.getTopTags(libraryId, 20),
statisticsApi.getTopAuthors(libraryId, 10),
statisticsApi.getRatingStats(libraryId),
statisticsApi.getSourceDomainStats(libraryId, 10),
statisticsApi.getReadingProgress(libraryId),
statisticsApi.getReadingActivity(libraryId),
]);
setOverviewStats(overview);
setTopTags(tags);
setTopAuthors(authors);
setRatingStats(ratings);
setSourceDomains(domains);
setReadingProgress(progress);
setReadingActivity(activity);
} catch (err) {
console.error('Failed to load statistics:', err);
setError('Failed to load statistics. Please try again.');
} finally {
setLoading(false);
}
};
const formatNumber = (num: number): string => {
return num.toLocaleString();
};
const formatTime = (minutes: number): string => {
const hours = Math.floor(minutes / 60);
const mins = Math.round(minutes % 60);
if (hours > 24) {
const days = Math.floor(hours / 24);
const remainingHours = hours % 24;
return `${days}d ${remainingHours}h`;
}
if (hours > 0) {
return `${hours}h ${mins}m`;
}
return `${mins}m`;
};
if (loading) {
return (
<div className="container mx-auto px-4 py-8">
<div className="flex items-center justify-center min-h-[400px]">
<div className="text-center">
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-blue-600 mx-auto mb-4"></div>
<p className="text-gray-600 dark:text-gray-400">Loading statistics...</p>
</div>
</div>
</div>
);
}
if (error) {
return (
<div className="container mx-auto px-4 py-8">
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-6">
<h3 className="text-lg font-semibold text-red-800 dark:text-red-200 mb-2">Error</h3>
<p className="text-red-600 dark:text-red-400">{error}</p>
<button
onClick={loadStatistics}
className="mt-4 px-4 py-2 bg-red-600 text-white rounded hover:bg-red-700 transition-colors"
>
Try Again
</button>
</div>
</div>
);
}
return (
<div className="container mx-auto px-4 py-8">
<div className="mb-8">
<h1 className="text-3xl font-bold text-gray-900 dark:text-white mb-2">Library Statistics</h1>
<p className="text-gray-600 dark:text-gray-400">
Insights and analytics for your story collection
</p>
</div>
{/* Collection Overview */}
{overviewStats && (
<section className="mb-8">
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Collection Overview</h2>
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
<StatCard title="Total Stories" value={formatNumber(overviewStats.totalStories)} />
<StatCard title="Total Authors" value={formatNumber(overviewStats.totalAuthors)} />
<StatCard title="Total Series" value={formatNumber(overviewStats.totalSeries)} />
<StatCard title="Total Tags" value={formatNumber(overviewStats.totalTags)} />
<StatCard title="Total Collections" value={formatNumber(overviewStats.totalCollections)} />
<StatCard title="Source Domains" value={formatNumber(overviewStats.uniqueSourceDomains)} />
</div>
</section>
)}
{/* Content Metrics */}
{overviewStats && (
<section className="mb-8">
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Content Metrics</h2>
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
<StatCard
title="Total Words"
value={formatNumber(overviewStats.totalWordCount)}
subtitle={`${formatTime(overviewStats.totalReadingTimeMinutes)} reading time`}
/>
<StatCard
title="Average Words per Story"
value={formatNumber(Math.round(overviewStats.averageWordsPerStory))}
subtitle={`${formatTime(overviewStats.averageReadingTimeMinutes)} avg reading time`}
/>
{overviewStats.longestStory && (
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<h3 className="text-sm font-medium text-gray-500 dark:text-gray-400 mb-2">Longest Story</h3>
<p className="text-2xl font-bold text-gray-900 dark:text-white mb-1">
{formatNumber(overviewStats.longestStory.wordCount)} words
</p>
<p className="text-sm text-gray-600 dark:text-gray-400 truncate" title={overviewStats.longestStory.title}>
{overviewStats.longestStory.title}
</p>
<p className="text-xs text-gray-500 dark:text-gray-500">
by {overviewStats.longestStory.authorName}
</p>
</div>
)}
{overviewStats.shortestStory && (
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<h3 className="text-sm font-medium text-gray-500 dark:text-gray-400 mb-2">Shortest Story</h3>
<p className="text-2xl font-bold text-gray-900 dark:text-white mb-1">
{formatNumber(overviewStats.shortestStory.wordCount)} words
</p>
<p className="text-sm text-gray-600 dark:text-gray-400 truncate" title={overviewStats.shortestStory.title}>
{overviewStats.shortestStory.title}
</p>
<p className="text-xs text-gray-500 dark:text-gray-500">
by {overviewStats.shortestStory.authorName}
</p>
</div>
)}
</div>
</section>
)}
{/* Reading Progress & Activity - Side by side */}
<div className="grid grid-cols-1 lg:grid-cols-2 gap-8 mb-8">
{/* Reading Progress */}
{readingProgress && (
<section>
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Reading Progress</h2>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<div className="mb-6">
<div className="flex justify-between items-center mb-2">
<span className="text-sm font-medium text-gray-600 dark:text-gray-400">
{formatNumber(readingProgress.readStories)} of {formatNumber(readingProgress.totalStories)} stories read
</span>
<span className="text-sm font-semibold text-blue-600 dark:text-blue-400">
{readingProgress.percentageRead.toFixed(1)}%
</span>
</div>
<div className="w-full bg-gray-200 dark:bg-gray-700 rounded-full h-3">
<div
className="bg-blue-600 h-3 rounded-full transition-all duration-500"
style={{ width: `${readingProgress.percentageRead}%` }}
></div>
</div>
</div>
<div className="grid grid-cols-2 gap-4">
<div>
<p className="text-sm text-gray-500 dark:text-gray-400">Words Read</p>
<p className="text-xl font-semibold text-green-600 dark:text-green-400">
{formatNumber(readingProgress.totalWordsRead)}
</p>
</div>
<div>
<p className="text-sm text-gray-500 dark:text-gray-400">Words Remaining</p>
<p className="text-xl font-semibold text-orange-600 dark:text-orange-400">
{formatNumber(readingProgress.totalWordsUnread)}
</p>
</div>
</div>
</div>
</section>
)}
{/* Reading Activity - Last Week */}
{readingActivity && (
<section>
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Last Week Activity</h2>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<div className="grid grid-cols-3 gap-4 mb-6">
<div className="text-center">
<p className="text-sm text-gray-500 dark:text-gray-400">Stories</p>
<p className="text-2xl font-bold text-gray-900 dark:text-white">
{formatNumber(readingActivity.storiesReadLastWeek)}
</p>
</div>
<div className="text-center">
<p className="text-sm text-gray-500 dark:text-gray-400">Words</p>
<p className="text-2xl font-bold text-gray-900 dark:text-white">
{formatNumber(readingActivity.wordsReadLastWeek)}
</p>
</div>
<div className="text-center">
<p className="text-sm text-gray-500 dark:text-gray-400">Time</p>
<p className="text-2xl font-bold text-gray-900 dark:text-white">
{formatTime(readingActivity.readingTimeMinutesLastWeek)}
</p>
</div>
</div>
{/* Daily Activity Chart */}
<div className="space-y-2">
<p className="text-sm font-medium text-gray-600 dark:text-gray-400 mb-3">Daily Breakdown</p>
{readingActivity.dailyActivity.map((day) => {
const maxWords = Math.max(...readingActivity.dailyActivity.map(d => d.wordsRead), 1);
const percentage = (day.wordsRead / maxWords) * 100;
return (
<div key={day.date} className="flex items-center gap-3">
<span className="text-xs text-gray-500 dark:text-gray-400 w-20">
{new Date(day.date).toLocaleDateString('en-US', { month: 'short', day: 'numeric' })}
</span>
<div className="flex-1 bg-gray-200 dark:bg-gray-700 rounded-full h-6 relative">
<div
className="bg-blue-500 h-6 rounded-full transition-all duration-300"
style={{ width: `${percentage}%` }}
></div>
{day.storiesRead > 0 && (
<span className="absolute inset-0 flex items-center justify-center text-xs font-medium text-gray-700 dark:text-gray-300">
{day.storiesRead} {day.storiesRead === 1 ? 'story' : 'stories'}
</span>
)}
</div>
</div>
);
})}
</div>
</div>
</section>
)}
</div>
{/* Ratings & Source Domains - Side by side */}
<div className="grid grid-cols-1 lg:grid-cols-2 gap-8 mb-8">
{/* Rating Statistics */}
{ratingStats && (
<section>
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Rating Statistics</h2>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<div className="text-center mb-6">
<p className="text-sm text-gray-500 dark:text-gray-400 mb-1">Average Rating</p>
<p className="text-4xl font-bold text-yellow-500">
{ratingStats.averageRating.toFixed(1)}
</p>
<p className="text-sm text-gray-600 dark:text-gray-400 mt-2">
{formatNumber(ratingStats.totalRatedStories)} rated {formatNumber(ratingStats.totalUnratedStories)} unrated
</p>
</div>
{/* Rating Distribution */}
<div className="space-y-2">
{[5, 4, 3, 2, 1].map(rating => {
const count = ratingStats.ratingDistribution[rating] || 0;
const percentage = ratingStats.totalRatedStories > 0
? (count / ratingStats.totalRatedStories) * 100
: 0;
return (
<div key={rating} className="flex items-center gap-2">
<span className="text-sm font-medium text-gray-600 dark:text-gray-400 w-12">
{rating}
</span>
<div className="flex-1 bg-gray-200 dark:bg-gray-700 rounded-full h-4">
<div
className="bg-yellow-500 h-4 rounded-full transition-all duration-300"
style={{ width: `${percentage}%` }}
></div>
</div>
<span className="text-sm text-gray-600 dark:text-gray-400 w-16 text-right">
{formatNumber(count)}
</span>
</div>
);
})}
</div>
</div>
</section>
)}
{/* Source Domains */}
{sourceDomains && (
<section>
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Source Domains</h2>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<div className="grid grid-cols-2 gap-4 mb-6">
<div className="text-center">
<p className="text-sm text-gray-500 dark:text-gray-400">With Source</p>
<p className="text-2xl font-bold text-green-600 dark:text-green-400">
{formatNumber(sourceDomains.storiesWithSource)}
</p>
</div>
<div className="text-center">
<p className="text-sm text-gray-500 dark:text-gray-400">No Source</p>
<p className="text-2xl font-bold text-gray-500 dark:text-gray-400">
{formatNumber(sourceDomains.storiesWithoutSource)}
</p>
</div>
</div>
<div className="space-y-3">
<p className="text-sm font-medium text-gray-600 dark:text-gray-400">Top Domains</p>
{sourceDomains.topDomains.slice(0, 5).map((domain, index) => (
<div key={domain.domain} className="flex items-center justify-between">
<div className="flex items-center gap-2 flex-1 min-w-0">
<span className="text-sm font-medium text-gray-500 dark:text-gray-400 w-5">
{index + 1}.
</span>
<span className="text-sm text-gray-700 dark:text-gray-300 truncate" title={domain.domain}>
{domain.domain}
</span>
</div>
<span className="text-sm font-semibold text-blue-600 dark:text-blue-400 ml-2">
{formatNumber(domain.storyCount)}
</span>
</div>
))}
</div>
</div>
</section>
)}
</div>
{/* Top Tags & Top Authors - Side by side */}
<div className="grid grid-cols-1 lg:grid-cols-2 gap-8">
{/* Top Tags */}
{topTags && (
<section>
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Most Used Tags</h2>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<div className="space-y-3">
{topTags.topTags.slice(0, 10).map((tag, index) => {
const maxCount = topTags.topTags[0]?.storyCount || 1;
const percentage = (tag.storyCount / maxCount) * 100;
return (
<div key={tag.tagName} className="flex items-center gap-3">
<span className="text-sm font-medium text-gray-500 dark:text-gray-400 w-6">
{index + 1}
</span>
<div className="flex-1">
<div className="flex items-center justify-between mb-1">
<span className="text-sm font-medium text-gray-700 dark:text-gray-300">
{tag.tagName}
</span>
<span className="text-sm text-gray-600 dark:text-gray-400">
{formatNumber(tag.storyCount)}
</span>
</div>
<div className="w-full bg-gray-200 dark:bg-gray-700 rounded-full h-2">
<div
className="bg-purple-500 h-2 rounded-full transition-all duration-300"
style={{ width: `${percentage}%` }}
></div>
</div>
</div>
</div>
);
})}
</div>
</div>
</section>
)}
{/* Top Authors */}
{topAuthors && (
<section>
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Top Authors</h2>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
{/* Tab switcher */}
<div className="flex gap-2 mb-4">
<button
onClick={() => {/* Could add tab switching if needed */}}
className="flex-1 px-4 py-2 text-sm font-medium bg-blue-100 dark:bg-blue-900/30 text-blue-700 dark:text-blue-300 rounded-lg"
>
By Stories
</button>
<button
onClick={() => {/* Could add tab switching if needed */}}
className="flex-1 px-4 py-2 text-sm font-medium text-gray-600 dark:text-gray-400 hover:bg-gray-100 dark:hover:bg-gray-700 rounded-lg"
>
By Words
</button>
</div>
<div className="space-y-3">
{topAuthors.topAuthorsByStories.slice(0, 5).map((author, index) => (
<div key={author.authorId} className="flex items-center justify-between p-3 bg-gray-50 dark:bg-gray-700/50 rounded-lg">
<div className="flex items-center gap-3 flex-1 min-w-0">
<span className="text-lg font-bold text-gray-400 dark:text-gray-500 w-6">
{index + 1}
</span>
<div className="flex-1 min-w-0">
<p className="text-sm font-medium text-gray-900 dark:text-white truncate" title={author.authorName}>
{author.authorName}
</p>
<p className="text-xs text-gray-500 dark:text-gray-400">
{formatNumber(author.storyCount)} stories {formatNumber(author.totalWords)} words
</p>
</div>
</div>
</div>
))}
</div>
</div>
</section>
)}
</div>
</div>
);
}
export default function StatisticsPage() {
return (
<AppLayout>
<StatisticsContent />
</AppLayout>
);
}
// Reusable stat card component
function StatCard({ title, value, subtitle }: { title: string; value: string; subtitle?: string }) {
return (
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<h3 className="text-sm font-medium text-gray-500 dark:text-gray-400 mb-2">{title}</h3>
<p className="text-2xl font-bold text-gray-900 dark:text-white">{value}</p>
{subtitle && (
<p className="text-sm text-gray-600 dark:text-gray-400 mt-1">{subtitle}</p>
)}
</div>
);
}

View File

@@ -186,13 +186,13 @@ export default function EditStoryPage() {
};
const updatedStory = await storyApi.updateStory(storyId, updateData);
// If there's a new cover image, upload it separately
if (coverImage) {
await storyApi.uploadCover(storyId, coverImage);
}
router.push(`/stories/${storyId}`);
router.push(`/stories/${storyId}/detail`);
} catch (error: any) {
console.error('Failed to update story:', error);
const errorMessage = error.response?.data?.message || 'Failed to update story';

View File

@@ -95,20 +95,20 @@ export default function StoryReadingPage() {
// Convert scroll position to approximate character position in the content
const getCharacterPositionFromScroll = useCallback((): number => {
if (!contentRef.current || !story) return 0;
const content = contentRef.current;
const scrolled = window.scrollY;
const contentTop = content.offsetTop;
const contentHeight = content.scrollHeight;
const windowHeight = window.innerHeight;
// Calculate how far through the content we are (0-1)
const scrollRatio = Math.min(1, Math.max(0,
const scrollRatio = Math.min(1, Math.max(0,
(scrolled - contentTop + windowHeight * 0.3) / contentHeight
));
// Convert to character position in the plain text content
const textLength = story.contentPlain?.length || story.contentHtml?.length || 0;
// Convert to character position in the HTML content (ALWAYS use contentHtml for consistency)
const textLength = story.contentHtml?.length || 0;
return Math.floor(scrollRatio * textLength);
}, [story]);
@@ -116,7 +116,8 @@ export default function StoryReadingPage() {
const calculateReadingPercentage = useCallback((currentPosition: number): number => {
if (!story) return 0;
const totalLength = story.contentPlain?.length || story.contentHtml?.length || 0;
// ALWAYS use contentHtml for consistency with position calculation
const totalLength = story.contentHtml?.length || 0;
if (totalLength === 0) return 0;
return Math.round((currentPosition / totalLength) * 100);
@@ -126,7 +127,8 @@ export default function StoryReadingPage() {
const scrollToCharacterPosition = useCallback((position: number) => {
if (!contentRef.current || !story || hasScrolledToPosition) return;
const textLength = story.contentPlain?.length || story.contentHtml?.length || 0;
// ALWAYS use contentHtml for consistency with position calculation
const textLength = story.contentHtml?.length || 0;
if (textLength === 0 || position === 0) return;
const ratio = position / textLength;

View File

@@ -27,9 +27,9 @@ export default function Header() {
description: 'Import a single story from a website'
},
{
href: '/import/epub',
label: 'Import EPUB',
description: 'Import a story from an EPUB file'
href: '/import/file',
label: 'Import from File',
description: 'Import from EPUB, PDF, or ZIP file'
},
{
href: '/import/bulk',
@@ -75,12 +75,18 @@ export default function Header() {
>
Collections
</Link>
<Link
href="/authors"
<Link
href="/authors"
className="theme-text hover:theme-accent transition-colors font-medium"
>
Authors
</Link>
<Link
href="/statistics"
className="theme-text hover:theme-accent transition-colors font-medium"
>
Statistics
</Link>
<Dropdown
trigger="Add Story"
items={addStoryItems}
@@ -146,13 +152,20 @@ export default function Header() {
>
Collections
</Link>
<Link
href="/authors"
<Link
href="/authors"
className="theme-text hover:theme-accent transition-colors font-medium px-2 py-1"
onClick={() => setIsMenuOpen(false)}
>
Authors
</Link>
<Link
href="/statistics"
className="theme-text hover:theme-accent transition-colors font-medium px-2 py-1"
onClick={() => setIsMenuOpen(false)}
>
Statistics
</Link>
<div className="px-2 py-1">
<div className="font-medium theme-text mb-1">Add Story</div>
<div className="pl-4 space-y-1">

View File

@@ -31,10 +31,10 @@ const importTabs: ImportTab[] = [
description: 'Import a single story from a website'
},
{
id: 'epub',
label: 'Import EPUB',
href: '/import/epub',
description: 'Import a story from an EPUB file'
id: 'file',
label: 'Import from File',
href: '/import/file',
description: 'Import from EPUB, PDF, or ZIP file'
},
{
id: 'bulk',

View File

@@ -33,11 +33,18 @@ export default function SystemSettings({}: SystemSettingsProps) {
});
const [databaseStatus, setDatabaseStatus] = useState<{
completeBackup: { loading: boolean; message: string; success?: boolean };
completeBackup: {
loading: boolean;
message: string;
success?: boolean;
jobId?: string;
progress?: number;
downloadReady?: boolean;
};
completeRestore: { loading: boolean; message: string; success?: boolean };
completeClear: { loading: boolean; message: string; success?: boolean };
}>({
completeBackup: { loading: false, message: '' },
completeBackup: { loading: false, message: '', progress: 0 },
completeRestore: { loading: false, message: '' },
completeClear: { loading: false, message: '' }
});
@@ -73,43 +80,117 @@ export default function SystemSettings({}: SystemSettingsProps) {
const handleCompleteBackup = async () => {
setDatabaseStatus(prev => ({
...prev,
completeBackup: { loading: true, message: 'Creating complete backup...', success: undefined }
completeBackup: { loading: true, message: 'Starting backup...', success: undefined, progress: 0, downloadReady: false }
}));
try {
const backupBlob = await databaseApi.backupComplete();
// Create download link
const url = window.URL.createObjectURL(backupBlob);
const link = document.createElement('a');
link.href = url;
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19);
link.download = `storycove_complete_backup_${timestamp}.zip`;
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
window.URL.revokeObjectURL(url);
// Start the async backup job
const startResponse = await databaseApi.backupComplete();
const jobId = startResponse.jobId;
setDatabaseStatus(prev => ({
...prev,
completeBackup: { loading: false, message: 'Complete backup downloaded successfully', success: true }
completeBackup: { ...prev.completeBackup, jobId, message: 'Backup in progress...' }
}));
// Poll for progress
const pollInterval = setInterval(async () => {
try {
const status = await databaseApi.getBackupStatus(jobId);
if (status.status === 'COMPLETED') {
clearInterval(pollInterval);
setDatabaseStatus(prev => ({
...prev,
completeBackup: {
loading: false,
message: 'Backup completed! Ready to download.',
success: true,
jobId,
progress: 100,
downloadReady: true
}
}));
// Clear message after 30 seconds (keep download button visible)
setTimeout(() => {
setDatabaseStatus(prev => ({
...prev,
completeBackup: { ...prev.completeBackup, message: '' }
}));
}, 30000);
} else if (status.status === 'FAILED') {
clearInterval(pollInterval);
setDatabaseStatus(prev => ({
...prev,
completeBackup: {
loading: false,
message: `Backup failed: ${status.errorMessage}`,
success: false,
progress: 0,
downloadReady: false
}
}));
} else {
// Update progress
setDatabaseStatus(prev => ({
...prev,
completeBackup: {
...prev.completeBackup,
progress: status.progress,
message: `Creating backup... ${status.progress}%`
}
}));
}
} catch (pollError: any) {
clearInterval(pollInterval);
setDatabaseStatus(prev => ({
...prev,
completeBackup: {
loading: false,
message: `Failed to check backup status: ${pollError.message}`,
success: false,
progress: 0,
downloadReady: false
}
}));
}
}, 2000); // Poll every 2 seconds
} catch (error: any) {
setDatabaseStatus(prev => ({
...prev,
completeBackup: { loading: false, message: error.message || 'Complete backup failed', success: false }
completeBackup: {
loading: false,
message: error.message || 'Failed to start backup',
success: false,
progress: 0,
downloadReady: false
}
}));
}
};
// Clear message after 5 seconds
setTimeout(() => {
setDatabaseStatus(prev => ({
...prev,
completeBackup: { loading: false, message: '', success: undefined }
}));
}, 5000);
const handleDownloadBackup = (jobId: string) => {
const downloadUrl = databaseApi.downloadBackup(jobId);
const link = document.createElement('a');
link.href = downloadUrl;
link.download = ''; // Filename will be set by server
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
// Clear the download ready state after download
setDatabaseStatus(prev => ({
...prev,
completeBackup: {
loading: false,
message: 'Backup downloaded successfully',
success: true,
progress: 100,
downloadReady: false
}
}));
};
const handleCompleteRestore = async (event: React.ChangeEvent<HTMLInputElement>) => {
@@ -792,20 +873,50 @@ export default function SystemSettings({}: SystemSettingsProps) {
<p className="text-sm theme-text mb-3">
Download a complete backup as a ZIP file. This includes your database AND all uploaded files (cover images, avatars). This is a comprehensive backup of your entire StoryCove installation.
</p>
<Button
onClick={handleCompleteBackup}
disabled={databaseStatus.completeBackup.loading}
loading={databaseStatus.completeBackup.loading}
variant="primary"
className="w-full sm:w-auto"
>
{databaseStatus.completeBackup.loading ? 'Creating Backup...' : 'Download Backup'}
</Button>
<div className="space-y-3">
<Button
onClick={handleCompleteBackup}
disabled={databaseStatus.completeBackup.loading || databaseStatus.completeBackup.downloadReady}
loading={databaseStatus.completeBackup.loading}
variant="primary"
className="w-full sm:w-auto"
>
{databaseStatus.completeBackup.loading ? 'Creating Backup...' : 'Create Backup'}
</Button>
{databaseStatus.completeBackup.downloadReady && databaseStatus.completeBackup.jobId && (
<Button
onClick={() => handleDownloadBackup(databaseStatus.completeBackup.jobId!)}
variant="primary"
className="w-full sm:w-auto ml-0 sm:ml-3 bg-green-600 hover:bg-green-700"
>
Download Backup
</Button>
)}
</div>
{databaseStatus.completeBackup.loading && databaseStatus.completeBackup.progress !== undefined && (
<div className="mt-3">
<div className="flex justify-between text-sm theme-text mb-1">
<span>Progress</span>
<span>{databaseStatus.completeBackup.progress}%</span>
</div>
<div className="w-full bg-gray-200 dark:bg-gray-700 rounded-full h-2.5">
<div
className="bg-blue-600 dark:bg-blue-500 h-2.5 rounded-full transition-all duration-300"
style={{ width: `${databaseStatus.completeBackup.progress}%` }}
></div>
</div>
</div>
)}
{databaseStatus.completeBackup.message && (
<div className={`text-sm p-2 rounded mt-3 ${
databaseStatus.completeBackup.success
? 'bg-green-50 dark:bg-green-900/20 text-green-800 dark:text-green-200'
: 'bg-red-50 dark:bg-red-900/20 text-red-800 dark:text-red-200'
: databaseStatus.completeBackup.success === false
? 'bg-red-50 dark:bg-red-900/20 text-red-800 dark:text-red-200'
: 'bg-blue-50 dark:bg-blue-900/20 text-blue-800 dark:text-blue-200'
}`}>
{databaseStatus.completeBackup.message}
</div>

View File

@@ -114,9 +114,10 @@ const htmlToSlate = (html: string): Descendant[] => {
const img = element as HTMLImageElement;
results.push({
type: 'image',
src: img.src || img.getAttribute('src') || '',
alt: img.alt || img.getAttribute('alt') || '',
caption: img.title || img.getAttribute('title') || '',
// Use getAttribute to preserve relative URLs instead of .src which converts to absolute
src: img.getAttribute('src') || '',
alt: img.getAttribute('alt') || '',
caption: img.getAttribute('title') || '',
children: [{ text: '' }] // Images need children in Slate
});
break;

View File

@@ -72,16 +72,8 @@ export default function StoryCard({
return new Date(dateString).toLocaleDateString();
};
const calculateReadingPercentage = (story: Story): number => {
if (!story.readingPosition) return 0;
const totalLength = story.contentPlain?.length || story.contentHtml?.length || 0;
if (totalLength === 0) return 0;
return Math.round((story.readingPosition / totalLength) * 100);
};
const readingPercentage = calculateReadingPercentage(story);
// Use the pre-calculated percentage from the backend
const readingPercentage = story.readingProgressPercentage || 0;
if (viewMode === 'list') {
return (

View File

@@ -129,7 +129,8 @@ export default function TagEditModal({ tag, isOpen, onClose, onSave, onDelete }:
onDelete(tag);
onClose();
} catch (error: any) {
setErrors({ submit: error.message });
const errorMessage = error.response?.data?.error || error.message || 'Failed to delete tag';
setErrors({ submit: errorMessage });
} finally {
setSaving(false);
}

View File

@@ -0,0 +1,68 @@
import { useState, useEffect, Dispatch, SetStateAction } from 'react';
/**
* Custom hook for persisting library filter state in sessionStorage.
* Filters are preserved during the browser session but cleared when the tab is closed.
*
* @param key - Unique identifier for the filter value in sessionStorage
* @param defaultValue - Default value if no stored value exists
* @returns Tuple of [value, setValue] similar to useState
*/
export function useLibraryFilters<T>(
key: string,
defaultValue: T
): [T, Dispatch<SetStateAction<T>>] {
// Initialize state from sessionStorage or use default value
const [value, setValue] = useState<T>(() => {
// SSR safety: sessionStorage only available in browser
if (typeof window === 'undefined') {
return defaultValue;
}
try {
const stored = sessionStorage.getItem(`library_filter_${key}`);
if (stored === null) {
return defaultValue;
}
return JSON.parse(stored) as T;
} catch (error) {
console.warn(`Failed to parse sessionStorage value for library_filter_${key}:`, error);
return defaultValue;
}
});
// Persist to sessionStorage whenever value changes
useEffect(() => {
if (typeof window === 'undefined') return;
try {
sessionStorage.setItem(`library_filter_${key}`, JSON.stringify(value));
} catch (error) {
console.warn(`Failed to save to sessionStorage for library_filter_${key}:`, error);
}
}, [key, value]);
return [value, setValue];
}
/**
* Clear all library filters from sessionStorage.
* Useful for "Clear Filters" button or when switching libraries.
*/
export function clearLibraryFilters(): void {
if (typeof window === 'undefined') return;
try {
// Get all sessionStorage keys
const keys = Object.keys(sessionStorage);
// Remove only library filter keys
keys.forEach(key => {
if (key.startsWith('library_filter_')) {
sessionStorage.removeItem(key);
}
});
} catch (error) {
console.warn('Failed to clear library filters from sessionStorage:', error);
}
}

View File

@@ -1013,10 +1013,47 @@ export const databaseApi = {
return response.data;
},
backupComplete: async (): Promise<Blob> => {
const response = await api.post('/database/backup-complete', {}, {
responseType: 'blob'
});
backupComplete: async (): Promise<{ success: boolean; jobId: string; status: string; message: string }> => {
const response = await api.post('/database/backup-complete');
return response.data;
},
getBackupStatus: async (jobId: string): Promise<{
success: boolean;
jobId: string;
status: string;
progress: number;
fileSizeBytes: number;
createdAt: string;
completedAt: string;
errorMessage: string;
}> => {
const response = await api.get(`/database/backup-status/${jobId}`);
return response.data;
},
downloadBackup: (jobId: string): string => {
return `/api/database/backup-download/${jobId}`;
},
listBackups: async (): Promise<{
success: boolean;
backups: Array<{
jobId: string;
type: string;
status: string;
progress: number;
fileSizeBytes: number;
createdAt: string;
completedAt: string;
}>;
}> => {
const response = await api.get('/database/backup-list');
return response.data;
},
deleteBackup: async (jobId: string): Promise<{ success: boolean; message: string }> => {
const response = await api.delete(`/database/backup/${jobId}`);
return response.data;
},
@@ -1059,6 +1096,42 @@ export const statisticsApi = {
const response = await api.get(`/libraries/${libraryId}/statistics/overview`);
return response.data;
},
getTopTags: async (libraryId: string, limit: number = 20): Promise<import('../types/api').TopTagsStats> => {
const response = await api.get(`/libraries/${libraryId}/statistics/top-tags`, {
params: { limit }
});
return response.data;
},
getTopAuthors: async (libraryId: string, limit: number = 10): Promise<import('../types/api').TopAuthorsStats> => {
const response = await api.get(`/libraries/${libraryId}/statistics/top-authors`, {
params: { limit }
});
return response.data;
},
getRatingStats: async (libraryId: string): Promise<import('../types/api').RatingStats> => {
const response = await api.get(`/libraries/${libraryId}/statistics/ratings`);
return response.data;
},
getSourceDomainStats: async (libraryId: string, limit: number = 10): Promise<import('../types/api').SourceDomainStats> => {
const response = await api.get(`/libraries/${libraryId}/statistics/source-domains`, {
params: { limit }
});
return response.data;
},
getReadingProgress: async (libraryId: string): Promise<import('../types/api').ReadingProgressStats> => {
const response = await api.get(`/libraries/${libraryId}/statistics/reading-progress`);
return response.data;
},
getReadingActivity: async (libraryId: string): Promise<import('../types/api').ReadingActivityStats> => {
const response = await api.get(`/libraries/${libraryId}/statistics/reading-activity`);
return response.data;
},
};
// Image utility - now library-aware

View File

@@ -16,6 +16,7 @@ export interface Story {
tags: Tag[];
tagNames?: string[] | null; // Used in search results
readingPosition?: number;
readingProgressPercentage?: number; // Pre-calculated percentage (0-100) from backend
lastReadAt?: string;
createdAt: string;
updatedAt: string;
@@ -233,4 +234,71 @@ export interface StoryWordCount {
authorName: string;
wordCount: number;
readingTimeMinutes: number;
}
// Top Tags Statistics
export interface TopTagsStats {
topTags: TagStats[];
}
export interface TagStats {
tagName: string;
storyCount: number;
}
// Top Authors Statistics
export interface TopAuthorsStats {
topAuthorsByStories: AuthorStats[];
topAuthorsByWords: AuthorStats[];
}
export interface AuthorStats {
authorId: string;
authorName: string;
storyCount: number;
totalWords: number;
}
// Rating Statistics
export interface RatingStats {
averageRating: number;
totalRatedStories: number;
totalUnratedStories: number;
ratingDistribution: Record<number, number>; // rating -> count
}
// Source Domain Statistics
export interface SourceDomainStats {
topDomains: DomainStats[];
storiesWithSource: number;
storiesWithoutSource: number;
}
export interface DomainStats {
domain: string;
storyCount: number;
}
// Reading Progress Statistics
export interface ReadingProgressStats {
totalStories: number;
readStories: number;
unreadStories: number;
percentageRead: number;
totalWordsRead: number;
totalWordsUnread: number;
}
// Reading Activity Statistics
export interface ReadingActivityStats {
storiesReadLastWeek: number;
wordsReadLastWeek: number;
readingTimeMinutesLastWeek: number;
dailyActivity: DailyActivity[];
}
export interface DailyActivity {
date: string; // YYYY-MM-DD
storiesRead: number;
wordsRead: number;
}

File diff suppressed because one or more lines are too long

View File

@@ -13,7 +13,7 @@ http {
server {
listen 80;
client_max_body_size 600M;
client_max_body_size 4096M; # 4GB for large backup uploads
# Frontend routes
location / {
@@ -55,8 +55,8 @@ http {
proxy_connect_timeout 900s;
proxy_send_timeout 900s;
proxy_read_timeout 900s;
# Large upload settings
client_max_body_size 600M;
# Large upload settings (4GB for backups)
client_max_body_size 4096M;
proxy_request_buffering off;
proxy_max_temp_file_size 0;
}

View File

@@ -86,6 +86,7 @@
<!-- Reading Status Fields -->
<field name="isRead" type="boolean" indexed="true" stored="true"/>
<field name="readingPosition" type="pint" indexed="true" stored="true"/>
<field name="readingProgressPercentage" type="pint" indexed="true" stored="true"/>
<field name="lastReadAt" type="pdate" indexed="true" stored="true"/>
<field name="lastRead" type="pdate" indexed="true" stored="true"/>
@@ -112,6 +113,13 @@
<field name="searchScore" type="pdouble" indexed="false" stored="true"/>
<field name="highlights" type="strings" indexed="false" stored="true"/>
<!-- Statistics-specific Fields -->
<field name="hasDescription" type="boolean" indexed="true" stored="true"/>
<field name="hasCoverImage" type="boolean" indexed="true" stored="true"/>
<field name="hasRating" type="boolean" indexed="true" stored="true"/>
<field name="sourceDomain" type="string" indexed="true" stored="true"/>
<field name="tagCount" type="pint" indexed="true" stored="true"/>
<!-- Combined search field for general queries -->
<field name="text" type="text_general" indexed="true" stored="false" multiValued="true"/>