Compare commits
8 Commits
16983fd871
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
77aec8a849 | ||
|
|
b1b5bbbccd | ||
|
|
75768855e2 | ||
|
|
7a4dd567dc | ||
|
|
715fb4e48a | ||
|
|
0e1ed7c92e | ||
|
|
a3bc83db8a | ||
|
|
924ae12b5b |
@@ -1,11 +1,11 @@
|
||||
FROM openjdk:17-jdk-slim
|
||||
FROM eclipse-temurin:17-jdk-jammy
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install Maven and PostgreSQL 15 client tools
|
||||
RUN apt-get update && apt-get install -y wget ca-certificates gnupg maven && \
|
||||
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
|
||||
echo "deb http://apt.postgresql.org/pub/repos/apt/ bullseye-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
|
||||
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor -o /etc/apt/trusted.gpg.d/postgresql.gpg && \
|
||||
echo "deb http://apt.postgresql.org/pub/repos/apt/ jammy-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
|
||||
apt-get update && \
|
||||
apt-get install -y postgresql-client-15 && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@@ -117,6 +117,11 @@
|
||||
<artifactId>epublib-core</artifactId>
|
||||
<version>3.1</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.pdfbox</groupId>
|
||||
<artifactId>pdfbox</artifactId>
|
||||
<version>3.0.3</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Test dependencies -->
|
||||
<dependency>
|
||||
|
||||
@@ -42,6 +42,132 @@ public class LibraryStatisticsController {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get top tags statistics
|
||||
*/
|
||||
@GetMapping("/top-tags")
|
||||
public ResponseEntity<?> getTopTagsStatistics(
|
||||
@PathVariable String libraryId,
|
||||
@RequestParam(defaultValue = "20") int limit) {
|
||||
try {
|
||||
if (libraryService.getLibraryById(libraryId) == null) {
|
||||
return ResponseEntity.notFound().build();
|
||||
}
|
||||
|
||||
var stats = statisticsService.getTopTagsStatistics(libraryId, limit);
|
||||
return ResponseEntity.ok(stats);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to get top tags statistics for library: {}", libraryId, e);
|
||||
return ResponseEntity.internalServerError()
|
||||
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get top authors statistics
|
||||
*/
|
||||
@GetMapping("/top-authors")
|
||||
public ResponseEntity<?> getTopAuthorsStatistics(
|
||||
@PathVariable String libraryId,
|
||||
@RequestParam(defaultValue = "10") int limit) {
|
||||
try {
|
||||
if (libraryService.getLibraryById(libraryId) == null) {
|
||||
return ResponseEntity.notFound().build();
|
||||
}
|
||||
|
||||
var stats = statisticsService.getTopAuthorsStatistics(libraryId, limit);
|
||||
return ResponseEntity.ok(stats);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to get top authors statistics for library: {}", libraryId, e);
|
||||
return ResponseEntity.internalServerError()
|
||||
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get rating statistics
|
||||
*/
|
||||
@GetMapping("/ratings")
|
||||
public ResponseEntity<?> getRatingStatistics(@PathVariable String libraryId) {
|
||||
try {
|
||||
if (libraryService.getLibraryById(libraryId) == null) {
|
||||
return ResponseEntity.notFound().build();
|
||||
}
|
||||
|
||||
var stats = statisticsService.getRatingStatistics(libraryId);
|
||||
return ResponseEntity.ok(stats);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to get rating statistics for library: {}", libraryId, e);
|
||||
return ResponseEntity.internalServerError()
|
||||
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get source domain statistics
|
||||
*/
|
||||
@GetMapping("/source-domains")
|
||||
public ResponseEntity<?> getSourceDomainStatistics(
|
||||
@PathVariable String libraryId,
|
||||
@RequestParam(defaultValue = "10") int limit) {
|
||||
try {
|
||||
if (libraryService.getLibraryById(libraryId) == null) {
|
||||
return ResponseEntity.notFound().build();
|
||||
}
|
||||
|
||||
var stats = statisticsService.getSourceDomainStatistics(libraryId, limit);
|
||||
return ResponseEntity.ok(stats);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to get source domain statistics for library: {}", libraryId, e);
|
||||
return ResponseEntity.internalServerError()
|
||||
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get reading progress statistics
|
||||
*/
|
||||
@GetMapping("/reading-progress")
|
||||
public ResponseEntity<?> getReadingProgressStatistics(@PathVariable String libraryId) {
|
||||
try {
|
||||
if (libraryService.getLibraryById(libraryId) == null) {
|
||||
return ResponseEntity.notFound().build();
|
||||
}
|
||||
|
||||
var stats = statisticsService.getReadingProgressStatistics(libraryId);
|
||||
return ResponseEntity.ok(stats);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to get reading progress statistics for library: {}", libraryId, e);
|
||||
return ResponseEntity.internalServerError()
|
||||
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get reading activity statistics (last week)
|
||||
*/
|
||||
@GetMapping("/reading-activity")
|
||||
public ResponseEntity<?> getReadingActivityStatistics(@PathVariable String libraryId) {
|
||||
try {
|
||||
if (libraryService.getLibraryById(libraryId) == null) {
|
||||
return ResponseEntity.notFound().build();
|
||||
}
|
||||
|
||||
var stats = statisticsService.getReadingActivityStatistics(libraryId);
|
||||
return ResponseEntity.ok(stats);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to get reading activity statistics for library: {}", libraryId, e);
|
||||
return ResponseEntity.internalServerError()
|
||||
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
// Error response DTO
|
||||
private static class ErrorResponse {
|
||||
private String error;
|
||||
|
||||
@@ -44,6 +44,8 @@ public class StoryController {
|
||||
private final ReadingTimeService readingTimeService;
|
||||
private final EPUBImportService epubImportService;
|
||||
private final EPUBExportService epubExportService;
|
||||
private final PDFImportService pdfImportService;
|
||||
private final ZIPImportService zipImportService;
|
||||
private final AsyncImageProcessingService asyncImageProcessingService;
|
||||
private final ImageProcessingProgressService progressService;
|
||||
|
||||
@@ -57,6 +59,8 @@ public class StoryController {
|
||||
ReadingTimeService readingTimeService,
|
||||
EPUBImportService epubImportService,
|
||||
EPUBExportService epubExportService,
|
||||
PDFImportService pdfImportService,
|
||||
ZIPImportService zipImportService,
|
||||
AsyncImageProcessingService asyncImageProcessingService,
|
||||
ImageProcessingProgressService progressService) {
|
||||
this.storyService = storyService;
|
||||
@@ -69,6 +73,8 @@ public class StoryController {
|
||||
this.readingTimeService = readingTimeService;
|
||||
this.epubImportService = epubImportService;
|
||||
this.epubExportService = epubExportService;
|
||||
this.pdfImportService = pdfImportService;
|
||||
this.zipImportService = zipImportService;
|
||||
this.asyncImageProcessingService = asyncImageProcessingService;
|
||||
this.progressService = progressService;
|
||||
}
|
||||
@@ -595,6 +601,7 @@ public class StoryController {
|
||||
// Reading progress fields
|
||||
dto.setIsRead(story.getIsRead());
|
||||
dto.setReadingPosition(story.getReadingPosition());
|
||||
dto.setReadingProgressPercentage(calculateReadingProgressPercentage(story));
|
||||
dto.setLastReadAt(story.getLastReadAt());
|
||||
|
||||
if (story.getAuthor() != null) {
|
||||
@@ -614,6 +621,26 @@ public class StoryController {
|
||||
return dto;
|
||||
}
|
||||
|
||||
private Integer calculateReadingProgressPercentage(Story story) {
|
||||
if (story.getReadingPosition() == null || story.getReadingPosition() == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// ALWAYS use contentHtml for consistency (frontend uses contentHtml for position tracking)
|
||||
int totalLength = 0;
|
||||
if (story.getContentHtml() != null && !story.getContentHtml().isEmpty()) {
|
||||
totalLength = story.getContentHtml().length();
|
||||
}
|
||||
|
||||
if (totalLength == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Calculate percentage and round to nearest integer
|
||||
int percentage = Math.round((float) story.getReadingPosition() * 100 / totalLength);
|
||||
return Math.min(100, percentage);
|
||||
}
|
||||
|
||||
private StoryReadingDto convertToReadingDto(Story story) {
|
||||
StoryReadingDto dto = new StoryReadingDto();
|
||||
dto.setId(story.getId());
|
||||
@@ -632,6 +659,7 @@ public class StoryController {
|
||||
// Reading progress fields
|
||||
dto.setIsRead(story.getIsRead());
|
||||
dto.setReadingPosition(story.getReadingPosition());
|
||||
dto.setReadingProgressPercentage(calculateReadingProgressPercentage(story));
|
||||
dto.setLastReadAt(story.getLastReadAt());
|
||||
|
||||
if (story.getAuthor() != null) {
|
||||
@@ -669,6 +697,7 @@ public class StoryController {
|
||||
// Reading progress fields
|
||||
dto.setIsRead(story.getIsRead());
|
||||
dto.setReadingPosition(story.getReadingPosition());
|
||||
dto.setReadingProgressPercentage(calculateReadingProgressPercentage(story));
|
||||
dto.setLastReadAt(story.getLastReadAt());
|
||||
|
||||
if (story.getAuthor() != null) {
|
||||
@@ -904,6 +933,127 @@ public class StoryController {
|
||||
}
|
||||
}
|
||||
|
||||
// PDF Import endpoint
|
||||
@PostMapping("/pdf/import")
|
||||
public ResponseEntity<FileImportResponse> importPDF(
|
||||
@RequestParam("file") MultipartFile file,
|
||||
@RequestParam(required = false) UUID authorId,
|
||||
@RequestParam(required = false) String authorName,
|
||||
@RequestParam(required = false) UUID seriesId,
|
||||
@RequestParam(required = false) String seriesName,
|
||||
@RequestParam(required = false) Integer seriesVolume,
|
||||
@RequestParam(required = false) List<String> tags,
|
||||
@RequestParam(defaultValue = "true") Boolean createMissingAuthor,
|
||||
@RequestParam(defaultValue = "true") Boolean createMissingSeries,
|
||||
@RequestParam(defaultValue = "true") Boolean extractImages) {
|
||||
|
||||
logger.info("Importing PDF file: {}", file.getOriginalFilename());
|
||||
|
||||
PDFImportRequest request = new PDFImportRequest();
|
||||
request.setPdfFile(file);
|
||||
request.setAuthorId(authorId);
|
||||
request.setAuthorName(authorName);
|
||||
request.setSeriesId(seriesId);
|
||||
request.setSeriesName(seriesName);
|
||||
request.setSeriesVolume(seriesVolume);
|
||||
request.setTags(tags);
|
||||
request.setCreateMissingAuthor(createMissingAuthor);
|
||||
request.setCreateMissingSeries(createMissingSeries);
|
||||
request.setExtractImages(extractImages);
|
||||
|
||||
try {
|
||||
FileImportResponse response = pdfImportService.importPDF(request);
|
||||
|
||||
if (response.isSuccess()) {
|
||||
logger.info("Successfully imported PDF: {} (Story ID: {})",
|
||||
response.getStoryTitle(), response.getStoryId());
|
||||
return ResponseEntity.ok(response);
|
||||
} else {
|
||||
logger.warn("PDF import failed: {}", response.getMessage());
|
||||
return ResponseEntity.badRequest().body(response);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error importing PDF: {}", e.getMessage(), e);
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
|
||||
.body(FileImportResponse.error("Internal server error: " + e.getMessage(), file.getOriginalFilename()));
|
||||
}
|
||||
}
|
||||
|
||||
// Validate PDF file
|
||||
@PostMapping("/pdf/validate")
|
||||
public ResponseEntity<Map<String, Object>> validatePDFFile(@RequestParam("file") MultipartFile file) {
|
||||
logger.info("Validating PDF file: {}", file.getOriginalFilename());
|
||||
|
||||
try {
|
||||
List<String> errors = pdfImportService.validatePDFFile(file);
|
||||
|
||||
Map<String, Object> response = Map.of(
|
||||
"valid", errors.isEmpty(),
|
||||
"errors", errors,
|
||||
"filename", file.getOriginalFilename(),
|
||||
"size", file.getSize()
|
||||
);
|
||||
|
||||
return ResponseEntity.ok(response);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error validating PDF file: {}", e.getMessage(), e);
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
|
||||
.body(Map.of("error", "Failed to validate PDF file"));
|
||||
}
|
||||
}
|
||||
|
||||
// ZIP Analysis endpoint - Step 1: Upload and analyze ZIP contents
|
||||
@PostMapping("/zip/analyze")
|
||||
public ResponseEntity<ZIPAnalysisResponse> analyzeZIPFile(@RequestParam("file") MultipartFile file) {
|
||||
logger.info("Analyzing ZIP file: {}", file.getOriginalFilename());
|
||||
|
||||
try {
|
||||
ZIPAnalysisResponse response = zipImportService.analyzeZIPFile(file);
|
||||
|
||||
if (response.isSuccess()) {
|
||||
logger.info("Successfully analyzed ZIP file: {} ({} files found)",
|
||||
file.getOriginalFilename(), response.getTotalFiles());
|
||||
return ResponseEntity.ok(response);
|
||||
} else {
|
||||
logger.warn("ZIP analysis failed: {}", response.getMessage());
|
||||
return ResponseEntity.badRequest().body(response);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error analyzing ZIP file: {}", e.getMessage(), e);
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
|
||||
.body(ZIPAnalysisResponse.error("Internal server error: " + e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
// ZIP Import endpoint - Step 2: Import selected files from analyzed ZIP
|
||||
@PostMapping("/zip/import")
|
||||
public ResponseEntity<ZIPImportResponse> importFromZIP(@Valid @RequestBody ZIPImportRequest request) {
|
||||
logger.info("Importing files from ZIP session: {}", request.getZipSessionId());
|
||||
|
||||
try {
|
||||
ZIPImportResponse response = zipImportService.importFromZIP(request);
|
||||
|
||||
logger.info("ZIP import completed: {} total, {} successful, {} failed",
|
||||
response.getTotalFiles(), response.getSuccessfulImports(), response.getFailedImports());
|
||||
|
||||
if (response.isSuccess()) {
|
||||
return ResponseEntity.ok(response);
|
||||
} else {
|
||||
return ResponseEntity.badRequest().body(response);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error importing from ZIP: {}", e.getMessage(), e);
|
||||
ZIPImportResponse errorResponse = new ZIPImportResponse();
|
||||
errorResponse.setSuccess(false);
|
||||
errorResponse.setMessage("Internal server error: " + e.getMessage());
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).body(errorResponse);
|
||||
}
|
||||
}
|
||||
|
||||
// Request DTOs
|
||||
public static class CreateStoryRequest {
|
||||
private String title;
|
||||
|
||||
132
backend/src/main/java/com/storycove/dto/FileImportResponse.java
Normal file
132
backend/src/main/java/com/storycove/dto/FileImportResponse.java
Normal file
@@ -0,0 +1,132 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public class FileImportResponse {
|
||||
|
||||
private boolean success;
|
||||
private String message;
|
||||
private UUID storyId;
|
||||
private String storyTitle;
|
||||
private String fileName;
|
||||
private String fileType; // "EPUB" or "PDF"
|
||||
private Integer wordCount;
|
||||
private Integer extractedImages;
|
||||
private List<String> warnings;
|
||||
private List<String> errors;
|
||||
|
||||
public FileImportResponse() {
|
||||
this.warnings = new ArrayList<>();
|
||||
this.errors = new ArrayList<>();
|
||||
}
|
||||
|
||||
public FileImportResponse(boolean success, String message) {
|
||||
this();
|
||||
this.success = success;
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public static FileImportResponse success(UUID storyId, String storyTitle, String fileType) {
|
||||
FileImportResponse response = new FileImportResponse(true, "File imported successfully");
|
||||
response.setStoryId(storyId);
|
||||
response.setStoryTitle(storyTitle);
|
||||
response.setFileType(fileType);
|
||||
return response;
|
||||
}
|
||||
|
||||
public static FileImportResponse error(String message, String fileName) {
|
||||
FileImportResponse response = new FileImportResponse(false, message);
|
||||
response.setFileName(fileName);
|
||||
return response;
|
||||
}
|
||||
|
||||
public void addWarning(String warning) {
|
||||
this.warnings.add(warning);
|
||||
}
|
||||
|
||||
public void addError(String error) {
|
||||
this.errors.add(error);
|
||||
}
|
||||
|
||||
public boolean isSuccess() {
|
||||
return success;
|
||||
}
|
||||
|
||||
public void setSuccess(boolean success) {
|
||||
this.success = success;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public UUID getStoryId() {
|
||||
return storyId;
|
||||
}
|
||||
|
||||
public void setStoryId(UUID storyId) {
|
||||
this.storyId = storyId;
|
||||
}
|
||||
|
||||
public String getStoryTitle() {
|
||||
return storyTitle;
|
||||
}
|
||||
|
||||
public void setStoryTitle(String storyTitle) {
|
||||
this.storyTitle = storyTitle;
|
||||
}
|
||||
|
||||
public String getFileName() {
|
||||
return fileName;
|
||||
}
|
||||
|
||||
public void setFileName(String fileName) {
|
||||
this.fileName = fileName;
|
||||
}
|
||||
|
||||
public String getFileType() {
|
||||
return fileType;
|
||||
}
|
||||
|
||||
public void setFileType(String fileType) {
|
||||
this.fileType = fileType;
|
||||
}
|
||||
|
||||
public Integer getWordCount() {
|
||||
return wordCount;
|
||||
}
|
||||
|
||||
public void setWordCount(Integer wordCount) {
|
||||
this.wordCount = wordCount;
|
||||
}
|
||||
|
||||
public Integer getExtractedImages() {
|
||||
return extractedImages;
|
||||
}
|
||||
|
||||
public void setExtractedImages(Integer extractedImages) {
|
||||
this.extractedImages = extractedImages;
|
||||
}
|
||||
|
||||
public List<String> getWarnings() {
|
||||
return warnings;
|
||||
}
|
||||
|
||||
public void setWarnings(List<String> warnings) {
|
||||
this.warnings = warnings;
|
||||
}
|
||||
|
||||
public List<String> getErrors() {
|
||||
return errors;
|
||||
}
|
||||
|
||||
public void setErrors(List<String> errors) {
|
||||
this.errors = errors;
|
||||
}
|
||||
}
|
||||
76
backend/src/main/java/com/storycove/dto/FileInfoDto.java
Normal file
76
backend/src/main/java/com/storycove/dto/FileInfoDto.java
Normal file
@@ -0,0 +1,76 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
public class FileInfoDto {
|
||||
|
||||
private String fileName;
|
||||
private String fileType; // "EPUB" or "PDF"
|
||||
private Long fileSize;
|
||||
private String extractedTitle;
|
||||
private String extractedAuthor;
|
||||
private boolean hasMetadata;
|
||||
private String error; // If file couldn't be analyzed
|
||||
|
||||
public FileInfoDto() {}
|
||||
|
||||
public FileInfoDto(String fileName, String fileType, Long fileSize) {
|
||||
this.fileName = fileName;
|
||||
this.fileType = fileType;
|
||||
this.fileSize = fileSize;
|
||||
}
|
||||
|
||||
public String getFileName() {
|
||||
return fileName;
|
||||
}
|
||||
|
||||
public void setFileName(String fileName) {
|
||||
this.fileName = fileName;
|
||||
}
|
||||
|
||||
public String getFileType() {
|
||||
return fileType;
|
||||
}
|
||||
|
||||
public void setFileType(String fileType) {
|
||||
this.fileType = fileType;
|
||||
}
|
||||
|
||||
public Long getFileSize() {
|
||||
return fileSize;
|
||||
}
|
||||
|
||||
public void setFileSize(Long fileSize) {
|
||||
this.fileSize = fileSize;
|
||||
}
|
||||
|
||||
public String getExtractedTitle() {
|
||||
return extractedTitle;
|
||||
}
|
||||
|
||||
public void setExtractedTitle(String extractedTitle) {
|
||||
this.extractedTitle = extractedTitle;
|
||||
}
|
||||
|
||||
public String getExtractedAuthor() {
|
||||
return extractedAuthor;
|
||||
}
|
||||
|
||||
public void setExtractedAuthor(String extractedAuthor) {
|
||||
this.extractedAuthor = extractedAuthor;
|
||||
}
|
||||
|
||||
public boolean isHasMetadata() {
|
||||
return hasMetadata;
|
||||
}
|
||||
|
||||
public void setHasMetadata(boolean hasMetadata) {
|
||||
this.hasMetadata = hasMetadata;
|
||||
}
|
||||
|
||||
public String getError() {
|
||||
return error;
|
||||
}
|
||||
|
||||
public void setError(String error) {
|
||||
this.error = error;
|
||||
}
|
||||
}
|
||||
113
backend/src/main/java/com/storycove/dto/PDFImportRequest.java
Normal file
113
backend/src/main/java/com/storycove/dto/PDFImportRequest.java
Normal file
@@ -0,0 +1,113 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public class PDFImportRequest {
|
||||
|
||||
@NotNull(message = "PDF file is required")
|
||||
private MultipartFile pdfFile;
|
||||
|
||||
private UUID authorId;
|
||||
|
||||
private String authorName;
|
||||
|
||||
private UUID seriesId;
|
||||
|
||||
private String seriesName;
|
||||
|
||||
private Integer seriesVolume;
|
||||
|
||||
private List<String> tags;
|
||||
|
||||
private Boolean createMissingAuthor = true;
|
||||
|
||||
private Boolean createMissingSeries = true;
|
||||
|
||||
private Boolean extractImages = true;
|
||||
|
||||
public PDFImportRequest() {}
|
||||
|
||||
public MultipartFile getPdfFile() {
|
||||
return pdfFile;
|
||||
}
|
||||
|
||||
public void setPdfFile(MultipartFile pdfFile) {
|
||||
this.pdfFile = pdfFile;
|
||||
}
|
||||
|
||||
public UUID getAuthorId() {
|
||||
return authorId;
|
||||
}
|
||||
|
||||
public void setAuthorId(UUID authorId) {
|
||||
this.authorId = authorId;
|
||||
}
|
||||
|
||||
public String getAuthorName() {
|
||||
return authorName;
|
||||
}
|
||||
|
||||
public void setAuthorName(String authorName) {
|
||||
this.authorName = authorName;
|
||||
}
|
||||
|
||||
public UUID getSeriesId() {
|
||||
return seriesId;
|
||||
}
|
||||
|
||||
public void setSeriesId(UUID seriesId) {
|
||||
this.seriesId = seriesId;
|
||||
}
|
||||
|
||||
public String getSeriesName() {
|
||||
return seriesName;
|
||||
}
|
||||
|
||||
public void setSeriesName(String seriesName) {
|
||||
this.seriesName = seriesName;
|
||||
}
|
||||
|
||||
public Integer getSeriesVolume() {
|
||||
return seriesVolume;
|
||||
}
|
||||
|
||||
public void setSeriesVolume(Integer seriesVolume) {
|
||||
this.seriesVolume = seriesVolume;
|
||||
}
|
||||
|
||||
public List<String> getTags() {
|
||||
return tags;
|
||||
}
|
||||
|
||||
public void setTags(List<String> tags) {
|
||||
this.tags = tags;
|
||||
}
|
||||
|
||||
public Boolean getCreateMissingAuthor() {
|
||||
return createMissingAuthor;
|
||||
}
|
||||
|
||||
public void setCreateMissingAuthor(Boolean createMissingAuthor) {
|
||||
this.createMissingAuthor = createMissingAuthor;
|
||||
}
|
||||
|
||||
public Boolean getCreateMissingSeries() {
|
||||
return createMissingSeries;
|
||||
}
|
||||
|
||||
public void setCreateMissingSeries(Boolean createMissingSeries) {
|
||||
this.createMissingSeries = createMissingSeries;
|
||||
}
|
||||
|
||||
public Boolean getExtractImages() {
|
||||
return extractImages;
|
||||
}
|
||||
|
||||
public void setExtractImages(Boolean extractImages) {
|
||||
this.extractImages = extractImages;
|
||||
}
|
||||
}
|
||||
45
backend/src/main/java/com/storycove/dto/RatingStatsDto.java
Normal file
45
backend/src/main/java/com/storycove/dto/RatingStatsDto.java
Normal file
@@ -0,0 +1,45 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
public class RatingStatsDto {
|
||||
private double averageRating;
|
||||
private long totalRatedStories;
|
||||
private long totalUnratedStories;
|
||||
private Map<Integer, Long> ratingDistribution; // rating (1-5) -> count
|
||||
|
||||
public RatingStatsDto() {
|
||||
}
|
||||
|
||||
public double getAverageRating() {
|
||||
return averageRating;
|
||||
}
|
||||
|
||||
public void setAverageRating(double averageRating) {
|
||||
this.averageRating = averageRating;
|
||||
}
|
||||
|
||||
public long getTotalRatedStories() {
|
||||
return totalRatedStories;
|
||||
}
|
||||
|
||||
public void setTotalRatedStories(long totalRatedStories) {
|
||||
this.totalRatedStories = totalRatedStories;
|
||||
}
|
||||
|
||||
public long getTotalUnratedStories() {
|
||||
return totalUnratedStories;
|
||||
}
|
||||
|
||||
public void setTotalUnratedStories(long totalUnratedStories) {
|
||||
this.totalUnratedStories = totalUnratedStories;
|
||||
}
|
||||
|
||||
public Map<Integer, Long> getRatingDistribution() {
|
||||
return ratingDistribution;
|
||||
}
|
||||
|
||||
public void setRatingDistribution(Map<Integer, Long> ratingDistribution) {
|
||||
this.ratingDistribution = ratingDistribution;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,84 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class ReadingActivityStatsDto {
|
||||
private long storiesReadLastWeek;
|
||||
private long wordsReadLastWeek;
|
||||
private long readingTimeMinutesLastWeek;
|
||||
private List<DailyActivityDto> dailyActivity;
|
||||
|
||||
public ReadingActivityStatsDto() {
|
||||
}
|
||||
|
||||
public long getStoriesReadLastWeek() {
|
||||
return storiesReadLastWeek;
|
||||
}
|
||||
|
||||
public void setStoriesReadLastWeek(long storiesReadLastWeek) {
|
||||
this.storiesReadLastWeek = storiesReadLastWeek;
|
||||
}
|
||||
|
||||
public long getWordsReadLastWeek() {
|
||||
return wordsReadLastWeek;
|
||||
}
|
||||
|
||||
public void setWordsReadLastWeek(long wordsReadLastWeek) {
|
||||
this.wordsReadLastWeek = wordsReadLastWeek;
|
||||
}
|
||||
|
||||
public long getReadingTimeMinutesLastWeek() {
|
||||
return readingTimeMinutesLastWeek;
|
||||
}
|
||||
|
||||
public void setReadingTimeMinutesLastWeek(long readingTimeMinutesLastWeek) {
|
||||
this.readingTimeMinutesLastWeek = readingTimeMinutesLastWeek;
|
||||
}
|
||||
|
||||
public List<DailyActivityDto> getDailyActivity() {
|
||||
return dailyActivity;
|
||||
}
|
||||
|
||||
public void setDailyActivity(List<DailyActivityDto> dailyActivity) {
|
||||
this.dailyActivity = dailyActivity;
|
||||
}
|
||||
|
||||
public static class DailyActivityDto {
|
||||
private String date; // YYYY-MM-DD format
|
||||
private long storiesRead;
|
||||
private long wordsRead;
|
||||
|
||||
public DailyActivityDto() {
|
||||
}
|
||||
|
||||
public DailyActivityDto(String date, long storiesRead, long wordsRead) {
|
||||
this.date = date;
|
||||
this.storiesRead = storiesRead;
|
||||
this.wordsRead = wordsRead;
|
||||
}
|
||||
|
||||
public String getDate() {
|
||||
return date;
|
||||
}
|
||||
|
||||
public void setDate(String date) {
|
||||
this.date = date;
|
||||
}
|
||||
|
||||
public long getStoriesRead() {
|
||||
return storiesRead;
|
||||
}
|
||||
|
||||
public void setStoriesRead(long storiesRead) {
|
||||
this.storiesRead = storiesRead;
|
||||
}
|
||||
|
||||
public long getWordsRead() {
|
||||
return wordsRead;
|
||||
}
|
||||
|
||||
public void setWordsRead(long wordsRead) {
|
||||
this.wordsRead = wordsRead;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,61 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
public class ReadingProgressStatsDto {
|
||||
private long totalStories;
|
||||
private long readStories;
|
||||
private long unreadStories;
|
||||
private double percentageRead;
|
||||
private long totalWordsRead;
|
||||
private long totalWordsUnread;
|
||||
|
||||
public ReadingProgressStatsDto() {
|
||||
}
|
||||
|
||||
public long getTotalStories() {
|
||||
return totalStories;
|
||||
}
|
||||
|
||||
public void setTotalStories(long totalStories) {
|
||||
this.totalStories = totalStories;
|
||||
}
|
||||
|
||||
public long getReadStories() {
|
||||
return readStories;
|
||||
}
|
||||
|
||||
public void setReadStories(long readStories) {
|
||||
this.readStories = readStories;
|
||||
}
|
||||
|
||||
public long getUnreadStories() {
|
||||
return unreadStories;
|
||||
}
|
||||
|
||||
public void setUnreadStories(long unreadStories) {
|
||||
this.unreadStories = unreadStories;
|
||||
}
|
||||
|
||||
public double getPercentageRead() {
|
||||
return percentageRead;
|
||||
}
|
||||
|
||||
public void setPercentageRead(double percentageRead) {
|
||||
this.percentageRead = percentageRead;
|
||||
}
|
||||
|
||||
public long getTotalWordsRead() {
|
||||
return totalWordsRead;
|
||||
}
|
||||
|
||||
public void setTotalWordsRead(long totalWordsRead) {
|
||||
this.totalWordsRead = totalWordsRead;
|
||||
}
|
||||
|
||||
public long getTotalWordsUnread() {
|
||||
return totalWordsUnread;
|
||||
}
|
||||
|
||||
public void setTotalWordsUnread(long totalWordsUnread) {
|
||||
this.totalWordsUnread = totalWordsUnread;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class SourceDomainStatsDto {
|
||||
private List<DomainStatsDto> topDomains;
|
||||
private long storiesWithSource;
|
||||
private long storiesWithoutSource;
|
||||
|
||||
public SourceDomainStatsDto() {
|
||||
}
|
||||
|
||||
public List<DomainStatsDto> getTopDomains() {
|
||||
return topDomains;
|
||||
}
|
||||
|
||||
public void setTopDomains(List<DomainStatsDto> topDomains) {
|
||||
this.topDomains = topDomains;
|
||||
}
|
||||
|
||||
public long getStoriesWithSource() {
|
||||
return storiesWithSource;
|
||||
}
|
||||
|
||||
public void setStoriesWithSource(long storiesWithSource) {
|
||||
this.storiesWithSource = storiesWithSource;
|
||||
}
|
||||
|
||||
public long getStoriesWithoutSource() {
|
||||
return storiesWithoutSource;
|
||||
}
|
||||
|
||||
public void setStoriesWithoutSource(long storiesWithoutSource) {
|
||||
this.storiesWithoutSource = storiesWithoutSource;
|
||||
}
|
||||
|
||||
public static class DomainStatsDto {
|
||||
private String domain;
|
||||
private long storyCount;
|
||||
|
||||
public DomainStatsDto() {
|
||||
}
|
||||
|
||||
public DomainStatsDto(String domain, long storyCount) {
|
||||
this.domain = domain;
|
||||
this.storyCount = storyCount;
|
||||
}
|
||||
|
||||
public String getDomain() {
|
||||
return domain;
|
||||
}
|
||||
|
||||
public void setDomain(String domain) {
|
||||
this.domain = domain;
|
||||
}
|
||||
|
||||
public long getStoryCount() {
|
||||
return storyCount;
|
||||
}
|
||||
|
||||
public void setStoryCount(long storyCount) {
|
||||
this.storyCount = storyCount;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -31,6 +31,7 @@ public class StoryDto {
|
||||
// Reading progress fields
|
||||
private Boolean isRead;
|
||||
private Integer readingPosition;
|
||||
private Integer readingProgressPercentage; // Pre-calculated percentage (0-100)
|
||||
private LocalDateTime lastReadAt;
|
||||
|
||||
// Related entities as simple references
|
||||
@@ -147,6 +148,14 @@ public class StoryDto {
|
||||
this.readingPosition = readingPosition;
|
||||
}
|
||||
|
||||
public Integer getReadingProgressPercentage() {
|
||||
return readingProgressPercentage;
|
||||
}
|
||||
|
||||
public void setReadingProgressPercentage(Integer readingProgressPercentage) {
|
||||
this.readingProgressPercentage = readingProgressPercentage;
|
||||
}
|
||||
|
||||
public LocalDateTime getLastReadAt() {
|
||||
return lastReadAt;
|
||||
}
|
||||
|
||||
@@ -25,6 +25,7 @@ public class StoryReadingDto {
|
||||
// Reading progress fields
|
||||
private Boolean isRead;
|
||||
private Integer readingPosition;
|
||||
private Integer readingProgressPercentage; // Pre-calculated percentage (0-100)
|
||||
private LocalDateTime lastReadAt;
|
||||
|
||||
// Related entities as simple references
|
||||
@@ -136,6 +137,14 @@ public class StoryReadingDto {
|
||||
this.readingPosition = readingPosition;
|
||||
}
|
||||
|
||||
public Integer getReadingProgressPercentage() {
|
||||
return readingProgressPercentage;
|
||||
}
|
||||
|
||||
public void setReadingProgressPercentage(Integer readingProgressPercentage) {
|
||||
this.readingProgressPercentage = readingProgressPercentage;
|
||||
}
|
||||
|
||||
public LocalDateTime getLastReadAt() {
|
||||
return lastReadAt;
|
||||
}
|
||||
|
||||
@@ -18,6 +18,7 @@ public class StorySearchDto {
|
||||
// Reading status
|
||||
private Boolean isRead;
|
||||
private Integer readingPosition;
|
||||
private Integer readingProgressPercentage; // Pre-calculated percentage (0-100)
|
||||
private LocalDateTime lastReadAt;
|
||||
|
||||
// Author info
|
||||
@@ -133,6 +134,14 @@ public class StorySearchDto {
|
||||
this.readingPosition = readingPosition;
|
||||
}
|
||||
|
||||
public Integer getReadingProgressPercentage() {
|
||||
return readingProgressPercentage;
|
||||
}
|
||||
|
||||
public void setReadingProgressPercentage(Integer readingProgressPercentage) {
|
||||
this.readingProgressPercentage = readingProgressPercentage;
|
||||
}
|
||||
|
||||
public UUID getAuthorId() {
|
||||
return authorId;
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ public class StorySummaryDto {
|
||||
// Reading progress fields
|
||||
private Boolean isRead;
|
||||
private Integer readingPosition;
|
||||
private Integer readingProgressPercentage; // Pre-calculated percentage (0-100)
|
||||
private LocalDateTime lastReadAt;
|
||||
|
||||
// Related entities as simple references
|
||||
@@ -127,6 +128,14 @@ public class StorySummaryDto {
|
||||
this.readingPosition = readingPosition;
|
||||
}
|
||||
|
||||
public Integer getReadingProgressPercentage() {
|
||||
return readingProgressPercentage;
|
||||
}
|
||||
|
||||
public void setReadingProgressPercentage(Integer readingProgressPercentage) {
|
||||
this.readingProgressPercentage = readingProgressPercentage;
|
||||
}
|
||||
|
||||
public LocalDateTime getLastReadAt() {
|
||||
return lastReadAt;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,76 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class TopAuthorsStatsDto {
|
||||
private List<AuthorStatsDto> topAuthorsByStories;
|
||||
private List<AuthorStatsDto> topAuthorsByWords;
|
||||
|
||||
public TopAuthorsStatsDto() {
|
||||
}
|
||||
|
||||
public List<AuthorStatsDto> getTopAuthorsByStories() {
|
||||
return topAuthorsByStories;
|
||||
}
|
||||
|
||||
public void setTopAuthorsByStories(List<AuthorStatsDto> topAuthorsByStories) {
|
||||
this.topAuthorsByStories = topAuthorsByStories;
|
||||
}
|
||||
|
||||
public List<AuthorStatsDto> getTopAuthorsByWords() {
|
||||
return topAuthorsByWords;
|
||||
}
|
||||
|
||||
public void setTopAuthorsByWords(List<AuthorStatsDto> topAuthorsByWords) {
|
||||
this.topAuthorsByWords = topAuthorsByWords;
|
||||
}
|
||||
|
||||
public static class AuthorStatsDto {
|
||||
private String authorId;
|
||||
private String authorName;
|
||||
private long storyCount;
|
||||
private long totalWords;
|
||||
|
||||
public AuthorStatsDto() {
|
||||
}
|
||||
|
||||
public AuthorStatsDto(String authorId, String authorName, long storyCount, long totalWords) {
|
||||
this.authorId = authorId;
|
||||
this.authorName = authorName;
|
||||
this.storyCount = storyCount;
|
||||
this.totalWords = totalWords;
|
||||
}
|
||||
|
||||
public String getAuthorId() {
|
||||
return authorId;
|
||||
}
|
||||
|
||||
public void setAuthorId(String authorId) {
|
||||
this.authorId = authorId;
|
||||
}
|
||||
|
||||
public String getAuthorName() {
|
||||
return authorName;
|
||||
}
|
||||
|
||||
public void setAuthorName(String authorName) {
|
||||
this.authorName = authorName;
|
||||
}
|
||||
|
||||
public long getStoryCount() {
|
||||
return storyCount;
|
||||
}
|
||||
|
||||
public void setStoryCount(long storyCount) {
|
||||
this.storyCount = storyCount;
|
||||
}
|
||||
|
||||
public long getTotalWords() {
|
||||
return totalWords;
|
||||
}
|
||||
|
||||
public void setTotalWords(long totalWords) {
|
||||
this.totalWords = totalWords;
|
||||
}
|
||||
}
|
||||
}
|
||||
51
backend/src/main/java/com/storycove/dto/TopTagsStatsDto.java
Normal file
51
backend/src/main/java/com/storycove/dto/TopTagsStatsDto.java
Normal file
@@ -0,0 +1,51 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
public class TopTagsStatsDto {
|
||||
private List<TagStatsDto> topTags;
|
||||
|
||||
public TopTagsStatsDto() {
|
||||
}
|
||||
|
||||
public TopTagsStatsDto(List<TagStatsDto> topTags) {
|
||||
this.topTags = topTags;
|
||||
}
|
||||
|
||||
public List<TagStatsDto> getTopTags() {
|
||||
return topTags;
|
||||
}
|
||||
|
||||
public void setTopTags(List<TagStatsDto> topTags) {
|
||||
this.topTags = topTags;
|
||||
}
|
||||
|
||||
public static class TagStatsDto {
|
||||
private String tagName;
|
||||
private long storyCount;
|
||||
|
||||
public TagStatsDto() {
|
||||
}
|
||||
|
||||
public TagStatsDto(String tagName, long storyCount) {
|
||||
this.tagName = tagName;
|
||||
this.storyCount = storyCount;
|
||||
}
|
||||
|
||||
public String getTagName() {
|
||||
return tagName;
|
||||
}
|
||||
|
||||
public void setTagName(String tagName) {
|
||||
this.tagName = tagName;
|
||||
}
|
||||
|
||||
public long getStoryCount() {
|
||||
return storyCount;
|
||||
}
|
||||
|
||||
public void setStoryCount(long storyCount) {
|
||||
this.storyCount = storyCount;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,98 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class ZIPAnalysisResponse {
|
||||
|
||||
private boolean success;
|
||||
private String message;
|
||||
private String zipFileName;
|
||||
private int totalFiles;
|
||||
private int validFiles;
|
||||
private List<FileInfoDto> files;
|
||||
private List<String> warnings;
|
||||
|
||||
public ZIPAnalysisResponse() {
|
||||
this.files = new ArrayList<>();
|
||||
this.warnings = new ArrayList<>();
|
||||
}
|
||||
|
||||
public static ZIPAnalysisResponse success(String zipFileName, List<FileInfoDto> files) {
|
||||
ZIPAnalysisResponse response = new ZIPAnalysisResponse();
|
||||
response.setSuccess(true);
|
||||
response.setMessage("ZIP file analyzed successfully");
|
||||
response.setZipFileName(zipFileName);
|
||||
response.setFiles(files);
|
||||
response.setTotalFiles(files.size());
|
||||
response.setValidFiles((int) files.stream().filter(f -> f.getError() == null).count());
|
||||
return response;
|
||||
}
|
||||
|
||||
public static ZIPAnalysisResponse error(String message) {
|
||||
ZIPAnalysisResponse response = new ZIPAnalysisResponse();
|
||||
response.setSuccess(false);
|
||||
response.setMessage(message);
|
||||
return response;
|
||||
}
|
||||
|
||||
public void addWarning(String warning) {
|
||||
this.warnings.add(warning);
|
||||
}
|
||||
|
||||
public boolean isSuccess() {
|
||||
return success;
|
||||
}
|
||||
|
||||
public void setSuccess(boolean success) {
|
||||
this.success = success;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public String getZipFileName() {
|
||||
return zipFileName;
|
||||
}
|
||||
|
||||
public void setZipFileName(String zipFileName) {
|
||||
this.zipFileName = zipFileName;
|
||||
}
|
||||
|
||||
public int getTotalFiles() {
|
||||
return totalFiles;
|
||||
}
|
||||
|
||||
public void setTotalFiles(int totalFiles) {
|
||||
this.totalFiles = totalFiles;
|
||||
}
|
||||
|
||||
public int getValidFiles() {
|
||||
return validFiles;
|
||||
}
|
||||
|
||||
public void setValidFiles(int validFiles) {
|
||||
this.validFiles = validFiles;
|
||||
}
|
||||
|
||||
public List<FileInfoDto> getFiles() {
|
||||
return files;
|
||||
}
|
||||
|
||||
public void setFiles(List<FileInfoDto> files) {
|
||||
this.files = files;
|
||||
}
|
||||
|
||||
public List<String> getWarnings() {
|
||||
return warnings;
|
||||
}
|
||||
|
||||
public void setWarnings(List<String> warnings) {
|
||||
this.warnings = warnings;
|
||||
}
|
||||
}
|
||||
177
backend/src/main/java/com/storycove/dto/ZIPImportRequest.java
Normal file
177
backend/src/main/java/com/storycove/dto/ZIPImportRequest.java
Normal file
@@ -0,0 +1,177 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.UUID;
|
||||
|
||||
public class ZIPImportRequest {
|
||||
|
||||
@NotNull(message = "ZIP session ID is required")
|
||||
private String zipSessionId; // Temporary ID for the uploaded ZIP file
|
||||
|
||||
@NotNull(message = "Selected files are required")
|
||||
private List<String> selectedFiles; // List of file names to import
|
||||
|
||||
// Per-file metadata overrides (key = fileName)
|
||||
private Map<String, FileImportMetadata> fileMetadata;
|
||||
|
||||
// Default metadata for all files (if not specified per file)
|
||||
private UUID defaultAuthorId;
|
||||
private String defaultAuthorName;
|
||||
private UUID defaultSeriesId;
|
||||
private String defaultSeriesName;
|
||||
private List<String> defaultTags;
|
||||
|
||||
private Boolean createMissingAuthor = true;
|
||||
private Boolean createMissingSeries = true;
|
||||
private Boolean extractImages = true;
|
||||
|
||||
public ZIPImportRequest() {}
|
||||
|
||||
public static class FileImportMetadata {
|
||||
private UUID authorId;
|
||||
private String authorName;
|
||||
private UUID seriesId;
|
||||
private String seriesName;
|
||||
private Integer seriesVolume;
|
||||
private List<String> tags;
|
||||
|
||||
public UUID getAuthorId() {
|
||||
return authorId;
|
||||
}
|
||||
|
||||
public void setAuthorId(UUID authorId) {
|
||||
this.authorId = authorId;
|
||||
}
|
||||
|
||||
public String getAuthorName() {
|
||||
return authorName;
|
||||
}
|
||||
|
||||
public void setAuthorName(String authorName) {
|
||||
this.authorName = authorName;
|
||||
}
|
||||
|
||||
public UUID getSeriesId() {
|
||||
return seriesId;
|
||||
}
|
||||
|
||||
public void setSeriesId(UUID seriesId) {
|
||||
this.seriesId = seriesId;
|
||||
}
|
||||
|
||||
public String getSeriesName() {
|
||||
return seriesName;
|
||||
}
|
||||
|
||||
public void setSeriesName(String seriesName) {
|
||||
this.seriesName = seriesName;
|
||||
}
|
||||
|
||||
public Integer getSeriesVolume() {
|
||||
return seriesVolume;
|
||||
}
|
||||
|
||||
public void setSeriesVolume(Integer seriesVolume) {
|
||||
this.seriesVolume = seriesVolume;
|
||||
}
|
||||
|
||||
public List<String> getTags() {
|
||||
return tags;
|
||||
}
|
||||
|
||||
public void setTags(List<String> tags) {
|
||||
this.tags = tags;
|
||||
}
|
||||
}
|
||||
|
||||
public String getZipSessionId() {
|
||||
return zipSessionId;
|
||||
}
|
||||
|
||||
public void setZipSessionId(String zipSessionId) {
|
||||
this.zipSessionId = zipSessionId;
|
||||
}
|
||||
|
||||
public List<String> getSelectedFiles() {
|
||||
return selectedFiles;
|
||||
}
|
||||
|
||||
public void setSelectedFiles(List<String> selectedFiles) {
|
||||
this.selectedFiles = selectedFiles;
|
||||
}
|
||||
|
||||
public Map<String, FileImportMetadata> getFileMetadata() {
|
||||
return fileMetadata;
|
||||
}
|
||||
|
||||
public void setFileMetadata(Map<String, FileImportMetadata> fileMetadata) {
|
||||
this.fileMetadata = fileMetadata;
|
||||
}
|
||||
|
||||
public UUID getDefaultAuthorId() {
|
||||
return defaultAuthorId;
|
||||
}
|
||||
|
||||
public void setDefaultAuthorId(UUID defaultAuthorId) {
|
||||
this.defaultAuthorId = defaultAuthorId;
|
||||
}
|
||||
|
||||
public String getDefaultAuthorName() {
|
||||
return defaultAuthorName;
|
||||
}
|
||||
|
||||
public void setDefaultAuthorName(String defaultAuthorName) {
|
||||
this.defaultAuthorName = defaultAuthorName;
|
||||
}
|
||||
|
||||
public UUID getDefaultSeriesId() {
|
||||
return defaultSeriesId;
|
||||
}
|
||||
|
||||
public void setDefaultSeriesId(UUID defaultSeriesId) {
|
||||
this.defaultSeriesId = defaultSeriesId;
|
||||
}
|
||||
|
||||
public String getDefaultSeriesName() {
|
||||
return defaultSeriesName;
|
||||
}
|
||||
|
||||
public void setDefaultSeriesName(String defaultSeriesName) {
|
||||
this.defaultSeriesName = defaultSeriesName;
|
||||
}
|
||||
|
||||
public List<String> getDefaultTags() {
|
||||
return defaultTags;
|
||||
}
|
||||
|
||||
public void setDefaultTags(List<String> defaultTags) {
|
||||
this.defaultTags = defaultTags;
|
||||
}
|
||||
|
||||
public Boolean getCreateMissingAuthor() {
|
||||
return createMissingAuthor;
|
||||
}
|
||||
|
||||
public void setCreateMissingAuthor(Boolean createMissingAuthor) {
|
||||
this.createMissingAuthor = createMissingAuthor;
|
||||
}
|
||||
|
||||
public Boolean getCreateMissingSeries() {
|
||||
return createMissingSeries;
|
||||
}
|
||||
|
||||
public void setCreateMissingSeries(Boolean createMissingSeries) {
|
||||
this.createMissingSeries = createMissingSeries;
|
||||
}
|
||||
|
||||
public Boolean getExtractImages() {
|
||||
return extractImages;
|
||||
}
|
||||
|
||||
public void setExtractImages(Boolean extractImages) {
|
||||
this.extractImages = extractImages;
|
||||
}
|
||||
}
|
||||
101
backend/src/main/java/com/storycove/dto/ZIPImportResponse.java
Normal file
101
backend/src/main/java/com/storycove/dto/ZIPImportResponse.java
Normal file
@@ -0,0 +1,101 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
public class ZIPImportResponse {
|
||||
|
||||
private boolean success;
|
||||
private String message;
|
||||
private int totalFiles;
|
||||
private int successfulImports;
|
||||
private int failedImports;
|
||||
private List<FileImportResponse> results;
|
||||
private List<String> warnings;
|
||||
|
||||
public ZIPImportResponse() {
|
||||
this.results = new ArrayList<>();
|
||||
this.warnings = new ArrayList<>();
|
||||
}
|
||||
|
||||
public static ZIPImportResponse create(List<FileImportResponse> results) {
|
||||
ZIPImportResponse response = new ZIPImportResponse();
|
||||
response.setResults(results);
|
||||
response.setTotalFiles(results.size());
|
||||
response.setSuccessfulImports((int) results.stream().filter(FileImportResponse::isSuccess).count());
|
||||
response.setFailedImports((int) results.stream().filter(r -> !r.isSuccess()).count());
|
||||
|
||||
if (response.getFailedImports() == 0) {
|
||||
response.setSuccess(true);
|
||||
response.setMessage("All files imported successfully");
|
||||
} else if (response.getSuccessfulImports() == 0) {
|
||||
response.setSuccess(false);
|
||||
response.setMessage("All file imports failed");
|
||||
} else {
|
||||
response.setSuccess(true);
|
||||
response.setMessage("Partial success: " + response.getSuccessfulImports() + " imported, " + response.getFailedImports() + " failed");
|
||||
}
|
||||
|
||||
return response;
|
||||
}
|
||||
|
||||
public void addWarning(String warning) {
|
||||
this.warnings.add(warning);
|
||||
}
|
||||
|
||||
public boolean isSuccess() {
|
||||
return success;
|
||||
}
|
||||
|
||||
public void setSuccess(boolean success) {
|
||||
this.success = success;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public int getTotalFiles() {
|
||||
return totalFiles;
|
||||
}
|
||||
|
||||
public void setTotalFiles(int totalFiles) {
|
||||
this.totalFiles = totalFiles;
|
||||
}
|
||||
|
||||
public int getSuccessfulImports() {
|
||||
return successfulImports;
|
||||
}
|
||||
|
||||
public void setSuccessfulImports(int successfulImports) {
|
||||
this.successfulImports = successfulImports;
|
||||
}
|
||||
|
||||
public int getFailedImports() {
|
||||
return failedImports;
|
||||
}
|
||||
|
||||
public void setFailedImports(int failedImports) {
|
||||
this.failedImports = failedImports;
|
||||
}
|
||||
|
||||
public List<FileImportResponse> getResults() {
|
||||
return results;
|
||||
}
|
||||
|
||||
public void setResults(List<FileImportResponse> results) {
|
||||
this.results = results;
|
||||
}
|
||||
|
||||
public List<String> getWarnings() {
|
||||
return warnings;
|
||||
}
|
||||
|
||||
public void setWarnings(List<String> warnings) {
|
||||
this.warnings = warnings;
|
||||
}
|
||||
}
|
||||
@@ -287,11 +287,18 @@ public class Story {
|
||||
|
||||
/**
|
||||
* Updates the reading progress and timestamp
|
||||
* When position is 0 or null, resets lastReadAt to null so the story won't appear in "last read" sorting
|
||||
*/
|
||||
public void updateReadingProgress(Integer position) {
|
||||
this.readingPosition = position;
|
||||
// Only update lastReadAt if there's actual reading progress
|
||||
// Reset to null when position is 0 or null to remove from "last read" sorting
|
||||
if (position == null || position == 0) {
|
||||
this.lastReadAt = null;
|
||||
} else {
|
||||
this.lastReadAt = LocalDateTime.now();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks the story as read and updates the reading position to the end
|
||||
|
||||
@@ -354,14 +354,24 @@ public class DatabaseManagementService implements ApplicationContextAware {
|
||||
Path tempBackupFile = Files.createTempFile("storycove_restore_", ".sql");
|
||||
|
||||
try {
|
||||
// Write backup stream to temporary file
|
||||
// Write backup stream to temporary file, filtering out incompatible commands
|
||||
System.err.println("Writing backup data to temporary file...");
|
||||
try (InputStream input = backupStream;
|
||||
OutputStream output = Files.newOutputStream(tempBackupFile)) {
|
||||
byte[] buffer = new byte[8192];
|
||||
int bytesRead;
|
||||
while ((bytesRead = input.read(buffer)) != -1) {
|
||||
output.write(buffer, 0, bytesRead);
|
||||
BufferedReader reader = new BufferedReader(new InputStreamReader(input, StandardCharsets.UTF_8));
|
||||
BufferedWriter writer = Files.newBufferedWriter(tempBackupFile, StandardCharsets.UTF_8)) {
|
||||
|
||||
String line;
|
||||
while ((line = reader.readLine()) != null) {
|
||||
// Skip DROP DATABASE and CREATE DATABASE commands - we're already connected to the DB
|
||||
// Also skip database connection commands as we're already connected
|
||||
if (line.trim().startsWith("DROP DATABASE") ||
|
||||
line.trim().startsWith("CREATE DATABASE") ||
|
||||
line.trim().startsWith("\\connect")) {
|
||||
System.err.println("Skipping incompatible command: " + line.substring(0, Math.min(50, line.length())));
|
||||
continue;
|
||||
}
|
||||
writer.write(line);
|
||||
writer.newLine();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -71,45 +71,53 @@ public class EPUBImportService {
|
||||
return EPUBImportResponse.error("Invalid EPUB file format");
|
||||
}
|
||||
|
||||
log.info("Parsing EPUB file: {}", epubFile.getOriginalFilename());
|
||||
Book book = parseEPUBFile(epubFile);
|
||||
|
||||
log.info("Creating story entity from EPUB metadata");
|
||||
Story story = createStoryFromEPUB(book, request);
|
||||
|
||||
log.info("Saving story to database: {}", story.getTitle());
|
||||
Story savedStory = storyService.create(story);
|
||||
log.info("Story saved successfully with ID: {}", savedStory.getId());
|
||||
|
||||
// Process embedded images if content contains any
|
||||
String originalContent = story.getContentHtml();
|
||||
if (originalContent != null && originalContent.contains("<img")) {
|
||||
try {
|
||||
log.info("Processing embedded images for story: {}", savedStory.getId());
|
||||
ImageService.ContentImageProcessingResult imageResult =
|
||||
imageService.processContentImages(originalContent, savedStory.getId());
|
||||
|
||||
// Update story content with processed images if changed
|
||||
if (!imageResult.getProcessedContent().equals(originalContent)) {
|
||||
log.info("Updating story content with processed images");
|
||||
savedStory.setContentHtml(imageResult.getProcessedContent());
|
||||
savedStory = storyService.update(savedStory.getId(), savedStory);
|
||||
|
||||
// Log the image processing results
|
||||
log.debug("EPUB Import - Image processing completed for story {}. Downloaded {} images.",
|
||||
log.info("EPUB Import - Image processing completed for story {}. Downloaded {} images.",
|
||||
savedStory.getId(), imageResult.getDownloadedImages().size());
|
||||
|
||||
if (imageResult.hasWarnings()) {
|
||||
log.debug("EPUB Import - Image processing warnings: {}",
|
||||
log.warn("EPUB Import - Image processing warnings: {}",
|
||||
String.join(", ", imageResult.getWarnings()));
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// Log error but don't fail the import
|
||||
System.err.println("EPUB Import - Failed to process embedded images for story " +
|
||||
savedStory.getId() + ": " + e.getMessage());
|
||||
log.error("EPUB Import - Failed to process embedded images for story {}: {}",
|
||||
savedStory.getId(), e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
log.info("Building import response for story: {}", savedStory.getId());
|
||||
EPUBImportResponse response = EPUBImportResponse.success(savedStory.getId(), savedStory.getTitle());
|
||||
response.setWordCount(savedStory.getWordCount());
|
||||
response.setTotalChapters(book.getSpine().size());
|
||||
|
||||
if (request.getPreserveReadingPosition() != null && request.getPreserveReadingPosition()) {
|
||||
log.info("Extracting and saving reading position");
|
||||
ReadingPosition readingPosition = extractReadingPosition(book, savedStory);
|
||||
if (readingPosition != null) {
|
||||
ReadingPosition savedPosition = readingPositionRepository.save(readingPosition);
|
||||
@@ -117,9 +125,11 @@ public class EPUBImportService {
|
||||
}
|
||||
}
|
||||
|
||||
log.info("EPUB import completed successfully for: {}", savedStory.getTitle());
|
||||
return response;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("EPUB import failed with exception: {}", e.getMessage(), e);
|
||||
return EPUBImportResponse.error("Failed to import EPUB: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
@@ -148,9 +158,12 @@ public class EPUBImportService {
|
||||
private Story createStoryFromEPUB(Book book, EPUBImportRequest request) {
|
||||
Metadata metadata = book.getMetadata();
|
||||
|
||||
log.info("Extracting EPUB metadata");
|
||||
String title = extractTitle(metadata);
|
||||
String authorName = extractAuthorName(metadata, request);
|
||||
String description = extractDescription(metadata);
|
||||
|
||||
log.info("Extracting and sanitizing content from {} chapters", book.getSpine().size());
|
||||
String content = extractContent(book);
|
||||
|
||||
Story story = new Story();
|
||||
@@ -160,42 +173,69 @@ public class EPUBImportService {
|
||||
|
||||
// Extract and process cover image
|
||||
if (request.getExtractCover() == null || request.getExtractCover()) {
|
||||
log.info("Extracting cover image");
|
||||
String coverPath = extractAndSaveCoverImage(book);
|
||||
if (coverPath != null) {
|
||||
log.info("Cover image saved at: {}", coverPath);
|
||||
story.setCoverPath(coverPath);
|
||||
}
|
||||
}
|
||||
|
||||
// Handle author assignment
|
||||
try {
|
||||
if (request.getAuthorId() != null) {
|
||||
log.info("Looking up author by ID: {}", request.getAuthorId());
|
||||
try {
|
||||
Author author = authorService.findById(request.getAuthorId());
|
||||
story.setAuthor(author);
|
||||
log.info("Author found and assigned: {}", author.getName());
|
||||
} catch (ResourceNotFoundException e) {
|
||||
log.warn("Author ID {} not found", request.getAuthorId());
|
||||
if (request.getCreateMissingAuthor()) {
|
||||
log.info("Creating new author: {}", authorName);
|
||||
Author newAuthor = createAuthor(authorName);
|
||||
story.setAuthor(newAuthor);
|
||||
log.info("New author created with ID: {}", newAuthor.getId());
|
||||
}
|
||||
}
|
||||
} else if (authorName != null && request.getCreateMissingAuthor()) {
|
||||
log.info("Finding or creating author: {}", authorName);
|
||||
Author author = findOrCreateAuthor(authorName);
|
||||
story.setAuthor(author);
|
||||
log.info("Author assigned: {} (ID: {})", author.getName(), author.getId());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Error handling author assignment: {}", e.getMessage(), e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
// Handle series assignment
|
||||
try {
|
||||
if (request.getSeriesId() != null && request.getSeriesVolume() != null) {
|
||||
log.info("Looking up series by ID: {}", request.getSeriesId());
|
||||
try {
|
||||
Series series = seriesService.findById(request.getSeriesId());
|
||||
story.setSeries(series);
|
||||
story.setVolume(request.getSeriesVolume());
|
||||
log.info("Series found and assigned: {} (volume {})", series.getName(), request.getSeriesVolume());
|
||||
} catch (ResourceNotFoundException e) {
|
||||
log.warn("Series ID {} not found", request.getSeriesId());
|
||||
if (request.getCreateMissingSeries() && request.getSeriesName() != null) {
|
||||
log.info("Creating new series: {}", request.getSeriesName());
|
||||
Series newSeries = createSeries(request.getSeriesName());
|
||||
story.setSeries(newSeries);
|
||||
story.setVolume(request.getSeriesVolume());
|
||||
log.info("New series created with ID: {}", newSeries.getId());
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Error handling series assignment: {}", e.getMessage(), e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
// Handle tags from request or extract from EPUB metadata
|
||||
try {
|
||||
List<String> allTags = new ArrayList<>();
|
||||
if (request.getTags() != null && !request.getTags().isEmpty()) {
|
||||
allTags.addAll(request.getTags());
|
||||
@@ -207,17 +247,29 @@ public class EPUBImportService {
|
||||
allTags.addAll(epubTags);
|
||||
}
|
||||
|
||||
log.info("Processing {} tags for story", allTags.size());
|
||||
// Remove duplicates and create tags
|
||||
allTags.stream()
|
||||
.distinct()
|
||||
.forEach(tagName -> {
|
||||
try {
|
||||
log.debug("Finding or creating tag: {}", tagName);
|
||||
Tag tag = tagService.findOrCreate(tagName.trim());
|
||||
story.addTag(tag);
|
||||
} catch (Exception e) {
|
||||
log.error("Error creating tag '{}': {}", tagName, e.getMessage(), e);
|
||||
throw e;
|
||||
}
|
||||
});
|
||||
} catch (Exception e) {
|
||||
log.error("Error handling tags: {}", e.getMessage(), e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
// Extract additional metadata for potential future use
|
||||
extractAdditionalMetadata(metadata, story);
|
||||
|
||||
log.info("Story entity created successfully: {}", title);
|
||||
return story;
|
||||
}
|
||||
|
||||
@@ -244,7 +296,13 @@ public class EPUBImportService {
|
||||
private String extractDescription(Metadata metadata) {
|
||||
List<String> descriptions = metadata.getDescriptions();
|
||||
if (descriptions != null && !descriptions.isEmpty()) {
|
||||
return descriptions.get(0);
|
||||
String description = descriptions.get(0);
|
||||
// Truncate to 1000 characters if necessary
|
||||
if (description != null && description.length() > 1000) {
|
||||
log.info("Description exceeds 1000 characters ({}), truncating...", description.length());
|
||||
return description.substring(0, 997) + "...";
|
||||
}
|
||||
return description;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
@@ -188,13 +188,13 @@ public class HtmlSanitizationService {
|
||||
return "";
|
||||
}
|
||||
|
||||
logger.info("Content before sanitization: "+html);
|
||||
logger.debug("Sanitizing HTML content (length: {} characters)", html.length());
|
||||
|
||||
// Preprocess to extract images from figure tags
|
||||
String preprocessed = preprocessFigureTags(html);
|
||||
|
||||
String saniztedHtml = Jsoup.clean(preprocessed, allowlist.preserveRelativeLinks(true));
|
||||
logger.info("Content after sanitization: "+saniztedHtml);
|
||||
logger.debug("Sanitization complete (output length: {} characters)", saniztedHtml.length());
|
||||
return saniztedHtml;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.config.SolrProperties;
|
||||
import com.storycove.dto.LibraryOverviewStatsDto;
|
||||
import com.storycove.dto.*;
|
||||
import com.storycove.dto.LibraryOverviewStatsDto.StoryWordCountDto;
|
||||
import com.storycove.repository.CollectionRepository;
|
||||
import org.apache.solr.client.solrj.SolrClient;
|
||||
import org.apache.solr.client.solrj.SolrQuery;
|
||||
import org.apache.solr.client.solrj.SolrServerException;
|
||||
@@ -17,7 +18,12 @@ import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
import java.time.LocalDate;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Service
|
||||
@ConditionalOnProperty(
|
||||
@@ -39,6 +45,9 @@ public class LibraryStatisticsService {
|
||||
@Autowired
|
||||
private LibraryService libraryService;
|
||||
|
||||
@Autowired
|
||||
private CollectionRepository collectionRepository;
|
||||
|
||||
/**
|
||||
* Get overview statistics for a library
|
||||
*/
|
||||
@@ -133,13 +142,9 @@ public class LibraryStatisticsService {
|
||||
/**
|
||||
* Get total number of collections
|
||||
*/
|
||||
private long getTotalCollections(String libraryId) throws IOException, SolrServerException {
|
||||
SolrQuery query = new SolrQuery("*:*");
|
||||
query.addFilterQuery("libraryId:" + libraryId);
|
||||
query.setRows(0);
|
||||
|
||||
QueryResponse response = solrClient.query(properties.getCores().getCollections(), query);
|
||||
return response.getResults().getNumFound();
|
||||
private long getTotalCollections(String libraryId) {
|
||||
// Collections are stored in the database, not indexed in Solr
|
||||
return collectionRepository.countByIsArchivedFalse();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -254,4 +259,385 @@ public class LibraryStatisticsService {
|
||||
long sum = 0;
|
||||
double mean = 0.0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get top tags statistics
|
||||
*/
|
||||
public TopTagsStatsDto getTopTagsStatistics(String libraryId, int limit) throws IOException, SolrServerException {
|
||||
SolrQuery query = new SolrQuery("*:*");
|
||||
query.addFilterQuery("libraryId:" + libraryId);
|
||||
query.setRows(0);
|
||||
query.setFacet(true);
|
||||
query.addFacetField("tagNames");
|
||||
query.setFacetLimit(limit);
|
||||
query.setFacetSort("count"); // Sort by count (most popular first)
|
||||
|
||||
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
|
||||
FacetField tagsFacet = response.getFacetField("tagNames");
|
||||
|
||||
List<TopTagsStatsDto.TagStatsDto> topTags = new ArrayList<>();
|
||||
if (tagsFacet != null && tagsFacet.getValues() != null) {
|
||||
for (FacetField.Count count : tagsFacet.getValues()) {
|
||||
topTags.add(new TopTagsStatsDto.TagStatsDto(count.getName(), count.getCount()));
|
||||
}
|
||||
}
|
||||
|
||||
return new TopTagsStatsDto(topTags);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get top authors statistics
|
||||
*/
|
||||
public TopAuthorsStatsDto getTopAuthorsStatistics(String libraryId, int limit) throws IOException, SolrServerException {
|
||||
TopAuthorsStatsDto stats = new TopAuthorsStatsDto();
|
||||
|
||||
// Top authors by story count
|
||||
stats.setTopAuthorsByStories(getTopAuthorsByStoryCount(libraryId, limit));
|
||||
|
||||
// Top authors by total words
|
||||
stats.setTopAuthorsByWords(getTopAuthorsByWordCount(libraryId, limit));
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
private List<TopAuthorsStatsDto.AuthorStatsDto> getTopAuthorsByStoryCount(String libraryId, int limit)
|
||||
throws IOException, SolrServerException {
|
||||
SolrQuery query = new SolrQuery("*:*");
|
||||
query.addFilterQuery("libraryId:" + libraryId);
|
||||
query.setRows(0);
|
||||
query.setFacet(true);
|
||||
query.addFacetField("authorId");
|
||||
query.setFacetLimit(limit);
|
||||
query.setFacetSort("count");
|
||||
|
||||
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
|
||||
FacetField authorFacet = response.getFacetField("authorId");
|
||||
|
||||
List<TopAuthorsStatsDto.AuthorStatsDto> topAuthors = new ArrayList<>();
|
||||
if (authorFacet != null && authorFacet.getValues() != null) {
|
||||
for (FacetField.Count count : authorFacet.getValues()) {
|
||||
String authorId = count.getName();
|
||||
long storyCount = count.getCount();
|
||||
|
||||
// Get author name and total words
|
||||
SolrQuery authorQuery = new SolrQuery("authorId:" + authorId);
|
||||
authorQuery.addFilterQuery("libraryId:" + libraryId);
|
||||
authorQuery.setRows(1);
|
||||
authorQuery.setFields("authorName");
|
||||
|
||||
QueryResponse authorResponse = solrClient.query(properties.getCores().getStories(), authorQuery);
|
||||
String authorName = "";
|
||||
if (!authorResponse.getResults().isEmpty()) {
|
||||
authorName = (String) authorResponse.getResults().get(0).getFieldValue("authorName");
|
||||
}
|
||||
|
||||
// Get total words for this author
|
||||
long totalWords = getAuthorTotalWords(libraryId, authorId);
|
||||
|
||||
topAuthors.add(new TopAuthorsStatsDto.AuthorStatsDto(authorId, authorName, storyCount, totalWords));
|
||||
}
|
||||
}
|
||||
|
||||
return topAuthors;
|
||||
}
|
||||
|
||||
private List<TopAuthorsStatsDto.AuthorStatsDto> getTopAuthorsByWordCount(String libraryId, int limit)
|
||||
throws IOException, SolrServerException {
|
||||
// First get all unique authors
|
||||
SolrQuery query = new SolrQuery("*:*");
|
||||
query.addFilterQuery("libraryId:" + libraryId);
|
||||
query.setRows(0);
|
||||
query.setFacet(true);
|
||||
query.addFacetField("authorId");
|
||||
query.setFacetLimit(-1); // Get all authors
|
||||
query.setFacetSort("count");
|
||||
|
||||
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
|
||||
FacetField authorFacet = response.getFacetField("authorId");
|
||||
|
||||
List<TopAuthorsStatsDto.AuthorStatsDto> allAuthors = new ArrayList<>();
|
||||
if (authorFacet != null && authorFacet.getValues() != null) {
|
||||
for (FacetField.Count count : authorFacet.getValues()) {
|
||||
String authorId = count.getName();
|
||||
long storyCount = count.getCount();
|
||||
|
||||
// Get author name
|
||||
SolrQuery authorQuery = new SolrQuery("authorId:" + authorId);
|
||||
authorQuery.addFilterQuery("libraryId:" + libraryId);
|
||||
authorQuery.setRows(1);
|
||||
authorQuery.setFields("authorName");
|
||||
|
||||
QueryResponse authorResponse = solrClient.query(properties.getCores().getStories(), authorQuery);
|
||||
String authorName = "";
|
||||
if (!authorResponse.getResults().isEmpty()) {
|
||||
authorName = (String) authorResponse.getResults().get(0).getFieldValue("authorName");
|
||||
}
|
||||
|
||||
// Get total words for this author
|
||||
long totalWords = getAuthorTotalWords(libraryId, authorId);
|
||||
|
||||
allAuthors.add(new TopAuthorsStatsDto.AuthorStatsDto(authorId, authorName, storyCount, totalWords));
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by total words and return top N
|
||||
return allAuthors.stream()
|
||||
.sorted(Comparator.comparingLong(TopAuthorsStatsDto.AuthorStatsDto::getTotalWords).reversed())
|
||||
.limit(limit)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
|
||||
private long getAuthorTotalWords(String libraryId, String authorId) throws IOException, SolrServerException {
|
||||
SolrQuery query = new SolrQuery("authorId:" + authorId);
|
||||
query.addFilterQuery("libraryId:" + libraryId);
|
||||
query.setRows(0);
|
||||
query.setParam(StatsParams.STATS, true);
|
||||
query.setParam(StatsParams.STATS_FIELD, "wordCount");
|
||||
|
||||
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
|
||||
|
||||
var fieldStatsInfo = response.getFieldStatsInfo();
|
||||
if (fieldStatsInfo != null && fieldStatsInfo.get("wordCount") != null) {
|
||||
var fieldStat = fieldStatsInfo.get("wordCount");
|
||||
Object sumObj = fieldStat.getSum();
|
||||
return (sumObj != null) ? ((Number) sumObj).longValue() : 0L;
|
||||
}
|
||||
|
||||
return 0L;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get rating statistics
|
||||
*/
|
||||
public RatingStatsDto getRatingStatistics(String libraryId) throws IOException, SolrServerException {
|
||||
RatingStatsDto stats = new RatingStatsDto();
|
||||
|
||||
// Get average rating using stats component
|
||||
SolrQuery query = new SolrQuery("*:*");
|
||||
query.addFilterQuery("libraryId:" + libraryId);
|
||||
query.addFilterQuery("rating:[* TO *]"); // Only rated stories
|
||||
query.setRows(0);
|
||||
query.setParam(StatsParams.STATS, true);
|
||||
query.setParam(StatsParams.STATS_FIELD, "rating");
|
||||
|
||||
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
|
||||
long totalRated = response.getResults().getNumFound();
|
||||
|
||||
var fieldStatsInfo = response.getFieldStatsInfo();
|
||||
if (fieldStatsInfo != null && fieldStatsInfo.get("rating") != null) {
|
||||
var fieldStat = fieldStatsInfo.get("rating");
|
||||
Object meanObj = fieldStat.getMean();
|
||||
stats.setAverageRating((meanObj != null) ? ((Number) meanObj).doubleValue() : 0.0);
|
||||
}
|
||||
|
||||
stats.setTotalRatedStories(totalRated);
|
||||
|
||||
// Get total stories to calculate unrated
|
||||
long totalStories = getTotalStories(libraryId);
|
||||
stats.setTotalUnratedStories(totalStories - totalRated);
|
||||
|
||||
// Get rating distribution using faceting
|
||||
SolrQuery distQuery = new SolrQuery("*:*");
|
||||
distQuery.addFilterQuery("libraryId:" + libraryId);
|
||||
distQuery.addFilterQuery("rating:[* TO *]");
|
||||
distQuery.setRows(0);
|
||||
distQuery.setFacet(true);
|
||||
distQuery.addFacetField("rating");
|
||||
distQuery.setFacetLimit(-1);
|
||||
|
||||
QueryResponse distResponse = solrClient.query(properties.getCores().getStories(), distQuery);
|
||||
FacetField ratingFacet = distResponse.getFacetField("rating");
|
||||
|
||||
Map<Integer, Long> distribution = new HashMap<>();
|
||||
if (ratingFacet != null && ratingFacet.getValues() != null) {
|
||||
for (FacetField.Count count : ratingFacet.getValues()) {
|
||||
try {
|
||||
int rating = Integer.parseInt(count.getName());
|
||||
distribution.put(rating, count.getCount());
|
||||
} catch (NumberFormatException e) {
|
||||
// Skip invalid ratings
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
stats.setRatingDistribution(distribution);
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get source domain statistics
|
||||
*/
|
||||
public SourceDomainStatsDto getSourceDomainStatistics(String libraryId, int limit) throws IOException, SolrServerException {
|
||||
SourceDomainStatsDto stats = new SourceDomainStatsDto();
|
||||
|
||||
// Get top domains using faceting
|
||||
SolrQuery query = new SolrQuery("*:*");
|
||||
query.addFilterQuery("libraryId:" + libraryId);
|
||||
query.addFilterQuery("sourceDomain:[* TO *]"); // Only stories with source
|
||||
query.setRows(0);
|
||||
query.setFacet(true);
|
||||
query.addFacetField("sourceDomain");
|
||||
query.setFacetLimit(limit);
|
||||
query.setFacetSort("count");
|
||||
|
||||
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
|
||||
long storiesWithSource = response.getResults().getNumFound();
|
||||
|
||||
FacetField domainFacet = response.getFacetField("sourceDomain");
|
||||
|
||||
List<SourceDomainStatsDto.DomainStatsDto> topDomains = new ArrayList<>();
|
||||
if (domainFacet != null && domainFacet.getValues() != null) {
|
||||
for (FacetField.Count count : domainFacet.getValues()) {
|
||||
topDomains.add(new SourceDomainStatsDto.DomainStatsDto(count.getName(), count.getCount()));
|
||||
}
|
||||
}
|
||||
|
||||
stats.setTopDomains(topDomains);
|
||||
stats.setStoriesWithSource(storiesWithSource);
|
||||
|
||||
long totalStories = getTotalStories(libraryId);
|
||||
stats.setStoriesWithoutSource(totalStories - storiesWithSource);
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get reading progress statistics
|
||||
*/
|
||||
public ReadingProgressStatsDto getReadingProgressStatistics(String libraryId) throws IOException, SolrServerException {
|
||||
ReadingProgressStatsDto stats = new ReadingProgressStatsDto();
|
||||
|
||||
long totalStories = getTotalStories(libraryId);
|
||||
stats.setTotalStories(totalStories);
|
||||
|
||||
// Get read stories count
|
||||
SolrQuery readQuery = new SolrQuery("*:*");
|
||||
readQuery.addFilterQuery("libraryId:" + libraryId);
|
||||
readQuery.addFilterQuery("isRead:true");
|
||||
readQuery.setRows(0);
|
||||
|
||||
QueryResponse readResponse = solrClient.query(properties.getCores().getStories(), readQuery);
|
||||
long readStories = readResponse.getResults().getNumFound();
|
||||
|
||||
stats.setReadStories(readStories);
|
||||
stats.setUnreadStories(totalStories - readStories);
|
||||
|
||||
if (totalStories > 0) {
|
||||
stats.setPercentageRead((readStories * 100.0) / totalStories);
|
||||
}
|
||||
|
||||
// Get total words read
|
||||
SolrQuery readWordsQuery = new SolrQuery("*:*");
|
||||
readWordsQuery.addFilterQuery("libraryId:" + libraryId);
|
||||
readWordsQuery.addFilterQuery("isRead:true");
|
||||
readWordsQuery.setRows(0);
|
||||
readWordsQuery.setParam(StatsParams.STATS, true);
|
||||
readWordsQuery.setParam(StatsParams.STATS_FIELD, "wordCount");
|
||||
|
||||
QueryResponse readWordsResponse = solrClient.query(properties.getCores().getStories(), readWordsQuery);
|
||||
var readFieldStats = readWordsResponse.getFieldStatsInfo();
|
||||
if (readFieldStats != null && readFieldStats.get("wordCount") != null) {
|
||||
var fieldStat = readFieldStats.get("wordCount");
|
||||
Object sumObj = fieldStat.getSum();
|
||||
stats.setTotalWordsRead((sumObj != null) ? ((Number) sumObj).longValue() : 0L);
|
||||
}
|
||||
|
||||
// Get total words unread
|
||||
SolrQuery unreadWordsQuery = new SolrQuery("*:*");
|
||||
unreadWordsQuery.addFilterQuery("libraryId:" + libraryId);
|
||||
unreadWordsQuery.addFilterQuery("isRead:false");
|
||||
unreadWordsQuery.setRows(0);
|
||||
unreadWordsQuery.setParam(StatsParams.STATS, true);
|
||||
unreadWordsQuery.setParam(StatsParams.STATS_FIELD, "wordCount");
|
||||
|
||||
QueryResponse unreadWordsResponse = solrClient.query(properties.getCores().getStories(), unreadWordsQuery);
|
||||
var unreadFieldStats = unreadWordsResponse.getFieldStatsInfo();
|
||||
if (unreadFieldStats != null && unreadFieldStats.get("wordCount") != null) {
|
||||
var fieldStat = unreadFieldStats.get("wordCount");
|
||||
Object sumObj = fieldStat.getSum();
|
||||
stats.setTotalWordsUnread((sumObj != null) ? ((Number) sumObj).longValue() : 0L);
|
||||
}
|
||||
|
||||
return stats;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get reading activity statistics for the last week
|
||||
*/
|
||||
public ReadingActivityStatsDto getReadingActivityStatistics(String libraryId) throws IOException, SolrServerException {
|
||||
ReadingActivityStatsDto stats = new ReadingActivityStatsDto();
|
||||
|
||||
LocalDateTime oneWeekAgo = LocalDateTime.now().minusWeeks(1);
|
||||
String oneWeekAgoStr = oneWeekAgo.toInstant(ZoneOffset.UTC).toString();
|
||||
|
||||
// Get stories read in last week
|
||||
SolrQuery query = new SolrQuery("*:*");
|
||||
query.addFilterQuery("libraryId:" + libraryId);
|
||||
query.addFilterQuery("lastReadAt:[" + oneWeekAgoStr + " TO *]");
|
||||
query.setRows(0);
|
||||
|
||||
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
|
||||
long storiesReadLastWeek = response.getResults().getNumFound();
|
||||
stats.setStoriesReadLastWeek(storiesReadLastWeek);
|
||||
|
||||
// Get words read in last week
|
||||
SolrQuery wordsQuery = new SolrQuery("*:*");
|
||||
wordsQuery.addFilterQuery("libraryId:" + libraryId);
|
||||
wordsQuery.addFilterQuery("lastReadAt:[" + oneWeekAgoStr + " TO *]");
|
||||
wordsQuery.setRows(0);
|
||||
wordsQuery.setParam(StatsParams.STATS, true);
|
||||
wordsQuery.setParam(StatsParams.STATS_FIELD, "wordCount");
|
||||
|
||||
QueryResponse wordsResponse = solrClient.query(properties.getCores().getStories(), wordsQuery);
|
||||
var fieldStatsInfo = wordsResponse.getFieldStatsInfo();
|
||||
long wordsReadLastWeek = 0L;
|
||||
if (fieldStatsInfo != null && fieldStatsInfo.get("wordCount") != null) {
|
||||
var fieldStat = fieldStatsInfo.get("wordCount");
|
||||
Object sumObj = fieldStat.getSum();
|
||||
wordsReadLastWeek = (sumObj != null) ? ((Number) sumObj).longValue() : 0L;
|
||||
}
|
||||
|
||||
stats.setWordsReadLastWeek(wordsReadLastWeek);
|
||||
stats.setReadingTimeMinutesLastWeek(wordsReadLastWeek / WORDS_PER_MINUTE);
|
||||
|
||||
// Get daily activity (last 7 days)
|
||||
List<ReadingActivityStatsDto.DailyActivityDto> dailyActivity = new ArrayList<>();
|
||||
for (int i = 6; i >= 0; i--) {
|
||||
LocalDate date = LocalDate.now().minusDays(i);
|
||||
LocalDateTime dayStart = date.atStartOfDay();
|
||||
LocalDateTime dayEnd = date.atTime(23, 59, 59);
|
||||
|
||||
String dayStartStr = dayStart.toInstant(ZoneOffset.UTC).toString();
|
||||
String dayEndStr = dayEnd.toInstant(ZoneOffset.UTC).toString();
|
||||
|
||||
SolrQuery dayQuery = new SolrQuery("*:*");
|
||||
dayQuery.addFilterQuery("libraryId:" + libraryId);
|
||||
dayQuery.addFilterQuery("lastReadAt:[" + dayStartStr + " TO " + dayEndStr + "]");
|
||||
dayQuery.setRows(0);
|
||||
dayQuery.setParam(StatsParams.STATS, true);
|
||||
dayQuery.setParam(StatsParams.STATS_FIELD, "wordCount");
|
||||
|
||||
QueryResponse dayResponse = solrClient.query(properties.getCores().getStories(), dayQuery);
|
||||
long storiesRead = dayResponse.getResults().getNumFound();
|
||||
|
||||
long wordsRead = 0L;
|
||||
var dayFieldStats = dayResponse.getFieldStatsInfo();
|
||||
if (dayFieldStats != null && dayFieldStats.get("wordCount") != null) {
|
||||
var fieldStat = dayFieldStats.get("wordCount");
|
||||
Object sumObj = fieldStat.getSum();
|
||||
wordsRead = (sumObj != null) ? ((Number) sumObj).longValue() : 0L;
|
||||
}
|
||||
|
||||
dailyActivity.add(new ReadingActivityStatsDto.DailyActivityDto(
|
||||
date.format(DateTimeFormatter.ISO_LOCAL_DATE),
|
||||
storiesRead,
|
||||
wordsRead
|
||||
));
|
||||
}
|
||||
|
||||
stats.setDailyActivity(dailyActivity);
|
||||
|
||||
return stats;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,683 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.dto.FileImportResponse;
|
||||
import com.storycove.dto.PDFImportRequest;
|
||||
import com.storycove.entity.*;
|
||||
import com.storycove.service.exception.InvalidFileException;
|
||||
import com.storycove.service.exception.ResourceNotFoundException;
|
||||
|
||||
import org.apache.pdfbox.Loader;
|
||||
import org.apache.pdfbox.pdmodel.PDDocument;
|
||||
import org.apache.pdfbox.pdmodel.PDDocumentInformation;
|
||||
import org.apache.pdfbox.pdmodel.PDPage;
|
||||
import org.apache.pdfbox.pdmodel.graphics.image.PDImageXObject;
|
||||
import org.apache.pdfbox.text.PDFTextStripper;
|
||||
import org.apache.pdfbox.text.TextPosition;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import javax.imageio.ImageIO;
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.*;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@Service
|
||||
@Transactional
|
||||
public class PDFImportService {
|
||||
private static final Logger log = LoggerFactory.getLogger(PDFImportService.class);
|
||||
|
||||
private static final Pattern PAGE_NUMBER_PATTERN = Pattern.compile("^\\s*\\d+\\s*$");
|
||||
private static final int MAX_FILE_SIZE = 300 * 1024 * 1024; // 300MB
|
||||
|
||||
private final StoryService storyService;
|
||||
private final AuthorService authorService;
|
||||
private final SeriesService seriesService;
|
||||
private final TagService tagService;
|
||||
private final HtmlSanitizationService sanitizationService;
|
||||
private final ImageService imageService;
|
||||
private final LibraryService libraryService;
|
||||
|
||||
@Autowired
|
||||
public PDFImportService(StoryService storyService,
|
||||
AuthorService authorService,
|
||||
SeriesService seriesService,
|
||||
TagService tagService,
|
||||
HtmlSanitizationService sanitizationService,
|
||||
ImageService imageService,
|
||||
LibraryService libraryService) {
|
||||
this.storyService = storyService;
|
||||
this.authorService = authorService;
|
||||
this.seriesService = seriesService;
|
||||
this.tagService = tagService;
|
||||
this.sanitizationService = sanitizationService;
|
||||
this.imageService = imageService;
|
||||
this.libraryService = libraryService;
|
||||
}
|
||||
|
||||
public FileImportResponse importPDF(PDFImportRequest request) {
|
||||
try {
|
||||
MultipartFile pdfFile = request.getPdfFile();
|
||||
|
||||
if (pdfFile == null || pdfFile.isEmpty()) {
|
||||
return FileImportResponse.error("PDF file is required", null);
|
||||
}
|
||||
|
||||
if (!isValidPDFFile(pdfFile)) {
|
||||
return FileImportResponse.error("Invalid PDF file format", pdfFile.getOriginalFilename());
|
||||
}
|
||||
|
||||
log.info("Parsing PDF file: {}", pdfFile.getOriginalFilename());
|
||||
PDDocument document = parsePDFFile(pdfFile);
|
||||
|
||||
try {
|
||||
log.info("Extracting metadata from PDF");
|
||||
PDFMetadata metadata = extractMetadata(document, pdfFile.getOriginalFilename());
|
||||
|
||||
// Validate author is provided
|
||||
String authorName = determineAuthorName(request, metadata);
|
||||
if (authorName == null || authorName.trim().isEmpty()) {
|
||||
return FileImportResponse.error("Author name is required for PDF import. No author found in PDF metadata.", pdfFile.getOriginalFilename());
|
||||
}
|
||||
|
||||
log.info("Extracting content and images from PDF");
|
||||
PDFContent content = extractContentWithImages(document, request.getExtractImages());
|
||||
|
||||
log.info("Creating story entity from PDF");
|
||||
Story story = createStoryFromPDF(metadata, content, request, authorName);
|
||||
|
||||
log.info("Saving story to database: {}", story.getTitle());
|
||||
Story savedStory = storyService.create(story);
|
||||
log.info("Story saved successfully with ID: {}", savedStory.getId());
|
||||
|
||||
// Process and save embedded images if any were extracted
|
||||
if (request.getExtractImages() && !content.getImages().isEmpty()) {
|
||||
try {
|
||||
log.info("Processing {} embedded images for story: {}", content.getImages().size(), savedStory.getId());
|
||||
String updatedContent = processAndSaveImages(content, savedStory.getId());
|
||||
|
||||
if (!updatedContent.equals(savedStory.getContentHtml())) {
|
||||
savedStory.setContentHtml(updatedContent);
|
||||
savedStory = storyService.update(savedStory.getId(), savedStory);
|
||||
log.info("Story content updated with processed images");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to process embedded images for story {}: {}", savedStory.getId(), e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
log.info("PDF import completed successfully for: {}", savedStory.getTitle());
|
||||
FileImportResponse response = FileImportResponse.success(savedStory.getId(), savedStory.getTitle(), "PDF");
|
||||
response.setFileName(pdfFile.getOriginalFilename());
|
||||
response.setWordCount(savedStory.getWordCount());
|
||||
response.setExtractedImages(content.getImages().size());
|
||||
|
||||
return response;
|
||||
|
||||
} finally {
|
||||
document.close();
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("PDF import failed with exception: {}", e.getMessage(), e);
|
||||
return FileImportResponse.error("Failed to import PDF: " + e.getMessage(),
|
||||
request.getPdfFile() != null ? request.getPdfFile().getOriginalFilename() : null);
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isValidPDFFile(MultipartFile file) {
|
||||
String filename = file.getOriginalFilename();
|
||||
if (filename == null || !filename.toLowerCase().endsWith(".pdf")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (file.getSize() > MAX_FILE_SIZE) {
|
||||
log.warn("PDF file size {} exceeds maximum {}", file.getSize(), MAX_FILE_SIZE);
|
||||
return false;
|
||||
}
|
||||
|
||||
String contentType = file.getContentType();
|
||||
return "application/pdf".equals(contentType) || contentType == null;
|
||||
}
|
||||
|
||||
private PDDocument parsePDFFile(MultipartFile pdfFile) throws IOException {
|
||||
try (InputStream inputStream = pdfFile.getInputStream()) {
|
||||
return Loader.loadPDF(inputStream.readAllBytes());
|
||||
} catch (Exception e) {
|
||||
throw new InvalidFileException("Failed to parse PDF file: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private PDFMetadata extractMetadata(PDDocument document, String fileName) {
|
||||
PDFMetadata metadata = new PDFMetadata();
|
||||
PDDocumentInformation info = document.getDocumentInformation();
|
||||
|
||||
if (info != null) {
|
||||
metadata.setTitle(info.getTitle());
|
||||
metadata.setAuthor(info.getAuthor());
|
||||
metadata.setSubject(info.getSubject());
|
||||
metadata.setKeywords(info.getKeywords());
|
||||
metadata.setCreator(info.getCreator());
|
||||
}
|
||||
|
||||
// Use filename as fallback title
|
||||
if (metadata.getTitle() == null || metadata.getTitle().trim().isEmpty()) {
|
||||
String titleFromFilename = fileName.replaceAll("\\.pdf$", "").replaceAll("[_-]", " ");
|
||||
metadata.setTitle(titleFromFilename);
|
||||
}
|
||||
|
||||
metadata.setPageCount(document.getNumberOfPages());
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
private PDFContent extractContentWithImages(PDDocument document, Boolean extractImages) throws IOException {
|
||||
PDFContent content = new PDFContent();
|
||||
StringBuilder htmlContent = new StringBuilder();
|
||||
List<PDFImage> images = new ArrayList<>();
|
||||
|
||||
boolean shouldExtractImages = extractImages != null && extractImages;
|
||||
|
||||
// Extract images first to know their positions
|
||||
if (shouldExtractImages) {
|
||||
images = extractImagesFromPDF(document);
|
||||
log.info("Extracted {} images from PDF", images.size());
|
||||
}
|
||||
|
||||
// Extract text with custom stripper to filter headers/footers
|
||||
CustomPDFTextStripper stripper = new CustomPDFTextStripper();
|
||||
stripper.setSortByPosition(true);
|
||||
|
||||
// Process page by page to insert images at correct positions
|
||||
for (int pageNum = 0; pageNum < document.getNumberOfPages(); pageNum++) {
|
||||
stripper.setStartPage(pageNum + 1);
|
||||
stripper.setEndPage(pageNum + 1);
|
||||
|
||||
String pageText = stripper.getText(document);
|
||||
|
||||
// Filter out obvious page numbers and headers/footers
|
||||
pageText = filterHeadersFooters(pageText, pageNum + 1);
|
||||
|
||||
if (pageText != null && !pageText.trim().isEmpty()) {
|
||||
// Convert text to HTML paragraphs
|
||||
String[] paragraphs = pageText.split("\\n\\s*\\n");
|
||||
|
||||
for (String para : paragraphs) {
|
||||
String trimmed = para.trim();
|
||||
if (!trimmed.isEmpty() && !isLikelyHeaderFooter(trimmed)) {
|
||||
htmlContent.append("<p>").append(escapeHtml(trimmed)).append("</p>\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Insert images that belong to this page
|
||||
if (shouldExtractImages) {
|
||||
for (PDFImage image : images) {
|
||||
if (image.getPageNumber() == pageNum) {
|
||||
// Add placeholder for image (will be replaced with actual path after saving)
|
||||
htmlContent.append("<img data-pdf-image-id=\"")
|
||||
.append(image.getImageId())
|
||||
.append("\" alt=\"Image from PDF\" />\n");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
content.setHtmlContent(htmlContent.toString());
|
||||
content.setImages(images);
|
||||
|
||||
return content;
|
||||
}
|
||||
|
||||
private List<PDFImage> extractImagesFromPDF(PDDocument document) {
|
||||
List<PDFImage> images = new ArrayList<>();
|
||||
int imageCounter = 0;
|
||||
|
||||
for (int pageNum = 0; pageNum < document.getNumberOfPages(); pageNum++) {
|
||||
try {
|
||||
PDPage page = document.getPage(pageNum);
|
||||
|
||||
// Get all images from the page resources
|
||||
Iterable<org.apache.pdfbox.cos.COSName> names = page.getResources().getXObjectNames();
|
||||
for (org.apache.pdfbox.cos.COSName name : names) {
|
||||
try {
|
||||
org.apache.pdfbox.pdmodel.graphics.PDXObject xObject = page.getResources().getXObject(name);
|
||||
|
||||
if (xObject instanceof PDImageXObject) {
|
||||
PDImageXObject imageObj = (PDImageXObject) xObject;
|
||||
BufferedImage bImage = imageObj.getImage();
|
||||
|
||||
// Skip very small images (likely decorative or icons)
|
||||
if (bImage.getWidth() < 50 || bImage.getHeight() < 50) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Convert BufferedImage to byte array
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
ImageIO.write(bImage, "png", baos);
|
||||
byte[] imageBytes = baos.toByteArray();
|
||||
|
||||
PDFImage pdfImage = new PDFImage();
|
||||
pdfImage.setImageId("pdf-img-" + imageCounter);
|
||||
pdfImage.setPageNumber(pageNum);
|
||||
pdfImage.setImageData(imageBytes);
|
||||
pdfImage.setWidth(bImage.getWidth());
|
||||
pdfImage.setHeight(bImage.getHeight());
|
||||
|
||||
images.add(pdfImage);
|
||||
imageCounter++;
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("Failed to extract image '{}' from page {}: {}", name, pageNum, e.getMessage());
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.warn("Failed to process images on page {}: {}", pageNum, e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
return images;
|
||||
}
|
||||
|
||||
private String processAndSaveImages(PDFContent content, UUID storyId) throws IOException {
|
||||
String htmlContent = content.getHtmlContent();
|
||||
|
||||
// Get current library ID for constructing image URLs
|
||||
String currentLibraryId = libraryService.getCurrentLibraryId();
|
||||
if (currentLibraryId == null || currentLibraryId.trim().isEmpty()) {
|
||||
log.warn("Current library ID is null or empty when processing PDF images for story: {}", storyId);
|
||||
currentLibraryId = "default";
|
||||
}
|
||||
|
||||
for (PDFImage image : content.getImages()) {
|
||||
try {
|
||||
// Create a MultipartFile from the image bytes
|
||||
MultipartFile imageFile = new PDFImageMultipartFile(
|
||||
image.getImageData(),
|
||||
"pdf-image-" + image.getImageId() + ".png",
|
||||
"image/png"
|
||||
);
|
||||
|
||||
// Save the image using ImageService (ImageType.CONTENT saves to content directory)
|
||||
String imagePath = imageService.uploadImage(imageFile, ImageService.ImageType.CONTENT);
|
||||
|
||||
// Construct the full URL with library ID
|
||||
// imagePath will be like "content/uuid.png"
|
||||
String imageUrl = "/api/files/images/" + currentLibraryId + "/" + imagePath;
|
||||
|
||||
// Replace placeholder with actual image URL
|
||||
String placeholder = "data-pdf-image-id=\"" + image.getImageId() + "\"";
|
||||
String replacement = "src=\"" + imageUrl + "\"";
|
||||
htmlContent = htmlContent.replace(placeholder, replacement);
|
||||
|
||||
log.debug("Saved PDF image {} to path: {} (URL: {})", image.getImageId(), imagePath, imageUrl);
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to save PDF image {}: {}", image.getImageId(), e.getMessage());
|
||||
// Remove the placeholder if we failed to save the image
|
||||
htmlContent = htmlContent.replaceAll(
|
||||
"<img data-pdf-image-id=\"" + image.getImageId() + "\"[^>]*>",
|
||||
""
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return htmlContent;
|
||||
}
|
||||
|
||||
private String filterHeadersFooters(String text, int pageNumber) {
|
||||
if (text == null) return "";
|
||||
|
||||
String[] lines = text.split("\\n");
|
||||
if (lines.length <= 2) return text; // Too short to have headers/footers
|
||||
|
||||
StringBuilder filtered = new StringBuilder();
|
||||
|
||||
// Skip first line if it looks like a header
|
||||
int startIdx = 0;
|
||||
if (lines.length > 1 && isLikelyHeaderFooter(lines[0])) {
|
||||
startIdx = 1;
|
||||
}
|
||||
|
||||
// Skip last line if it looks like a footer or page number
|
||||
int endIdx = lines.length;
|
||||
if (lines.length > 1 && isLikelyHeaderFooter(lines[lines.length - 1])) {
|
||||
endIdx = lines.length - 1;
|
||||
}
|
||||
|
||||
for (int i = startIdx; i < endIdx; i++) {
|
||||
filtered.append(lines[i]).append("\n");
|
||||
}
|
||||
|
||||
return filtered.toString();
|
||||
}
|
||||
|
||||
private boolean isLikelyHeaderFooter(String line) {
|
||||
String trimmed = line.trim();
|
||||
|
||||
// Check if it's just a page number
|
||||
if (PAGE_NUMBER_PATTERN.matcher(trimmed).matches()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check if it's very short (likely header/footer)
|
||||
if (trimmed.length() < 3) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// Check for common header/footer patterns
|
||||
String lower = trimmed.toLowerCase();
|
||||
if (lower.matches(".*page \\d+.*") ||
|
||||
lower.matches(".*\\d+ of \\d+.*") ||
|
||||
lower.matches("chapter \\d+") ||
|
||||
lower.matches("\\d+")) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private String determineAuthorName(PDFImportRequest request, PDFMetadata metadata) {
|
||||
// Priority: request.authorName > request.authorId > metadata.author
|
||||
if (request.getAuthorName() != null && !request.getAuthorName().trim().isEmpty()) {
|
||||
return request.getAuthorName().trim();
|
||||
}
|
||||
|
||||
if (request.getAuthorId() != null) {
|
||||
try {
|
||||
Author author = authorService.findById(request.getAuthorId());
|
||||
return author.getName();
|
||||
} catch (ResourceNotFoundException e) {
|
||||
log.warn("Author ID {} not found", request.getAuthorId());
|
||||
}
|
||||
}
|
||||
|
||||
if (metadata.getAuthor() != null && !metadata.getAuthor().trim().isEmpty()) {
|
||||
return metadata.getAuthor().trim();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private Story createStoryFromPDF(PDFMetadata metadata, PDFContent content,
|
||||
PDFImportRequest request, String authorName) {
|
||||
Story story = new Story();
|
||||
story.setTitle(metadata.getTitle() != null ? metadata.getTitle() : "Untitled PDF");
|
||||
story.setDescription(metadata.getSubject());
|
||||
story.setContentHtml(sanitizationService.sanitize(content.getHtmlContent()));
|
||||
|
||||
// Handle author assignment
|
||||
try {
|
||||
if (request.getAuthorId() != null) {
|
||||
try {
|
||||
Author author = authorService.findById(request.getAuthorId());
|
||||
story.setAuthor(author);
|
||||
} catch (ResourceNotFoundException e) {
|
||||
if (request.getCreateMissingAuthor()) {
|
||||
Author newAuthor = createAuthor(authorName);
|
||||
story.setAuthor(newAuthor);
|
||||
}
|
||||
}
|
||||
} else if (authorName != null && request.getCreateMissingAuthor()) {
|
||||
Author author = findOrCreateAuthor(authorName);
|
||||
story.setAuthor(author);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Error handling author assignment: {}", e.getMessage(), e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
// Handle series assignment
|
||||
try {
|
||||
if (request.getSeriesId() != null && request.getSeriesVolume() != null) {
|
||||
try {
|
||||
Series series = seriesService.findById(request.getSeriesId());
|
||||
story.setSeries(series);
|
||||
story.setVolume(request.getSeriesVolume());
|
||||
} catch (ResourceNotFoundException e) {
|
||||
if (request.getCreateMissingSeries() && request.getSeriesName() != null) {
|
||||
Series newSeries = createSeries(request.getSeriesName());
|
||||
story.setSeries(newSeries);
|
||||
story.setVolume(request.getSeriesVolume());
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
log.error("Error handling series assignment: {}", e.getMessage(), e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
// Handle tags
|
||||
try {
|
||||
List<String> allTags = new ArrayList<>();
|
||||
if (request.getTags() != null && !request.getTags().isEmpty()) {
|
||||
allTags.addAll(request.getTags());
|
||||
}
|
||||
|
||||
// Extract keywords from PDF metadata
|
||||
if (metadata.getKeywords() != null && !metadata.getKeywords().trim().isEmpty()) {
|
||||
String[] keywords = metadata.getKeywords().split("[,;]");
|
||||
for (String keyword : keywords) {
|
||||
String trimmed = keyword.trim();
|
||||
if (!trimmed.isEmpty()) {
|
||||
allTags.add(trimmed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create tags
|
||||
allTags.stream()
|
||||
.distinct()
|
||||
.forEach(tagName -> {
|
||||
try {
|
||||
Tag tag = tagService.findOrCreate(tagName.trim());
|
||||
story.addTag(tag);
|
||||
} catch (Exception e) {
|
||||
log.error("Error creating tag '{}': {}", tagName, e.getMessage(), e);
|
||||
}
|
||||
});
|
||||
} catch (Exception e) {
|
||||
log.error("Error handling tags: {}", e.getMessage(), e);
|
||||
throw e;
|
||||
}
|
||||
|
||||
return story;
|
||||
}
|
||||
|
||||
private Author findOrCreateAuthor(String authorName) {
|
||||
Optional<Author> existingAuthor = authorService.findByNameOptional(authorName);
|
||||
if (existingAuthor.isPresent()) {
|
||||
return existingAuthor.get();
|
||||
}
|
||||
return createAuthor(authorName);
|
||||
}
|
||||
|
||||
private Author createAuthor(String authorName) {
|
||||
Author author = new Author();
|
||||
author.setName(authorName);
|
||||
return authorService.create(author);
|
||||
}
|
||||
|
||||
private Series createSeries(String seriesName) {
|
||||
Series series = new Series();
|
||||
series.setName(seriesName);
|
||||
return seriesService.create(series);
|
||||
}
|
||||
|
||||
private String escapeHtml(String text) {
|
||||
return text.replace("&", "&")
|
||||
.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace("\"", """)
|
||||
.replace("'", "'")
|
||||
.replace("\n", "<br/>");
|
||||
}
|
||||
|
||||
public List<String> validatePDFFile(MultipartFile file) {
|
||||
List<String> errors = new ArrayList<>();
|
||||
|
||||
if (file == null || file.isEmpty()) {
|
||||
errors.add("PDF file is required");
|
||||
return errors;
|
||||
}
|
||||
|
||||
if (!isValidPDFFile(file)) {
|
||||
errors.add("Invalid PDF file format. Only .pdf files are supported");
|
||||
}
|
||||
|
||||
if (file.getSize() > MAX_FILE_SIZE) {
|
||||
errors.add("PDF file size exceeds " + (MAX_FILE_SIZE / 1024 / 1024) + "MB limit");
|
||||
}
|
||||
|
||||
try {
|
||||
PDDocument document = parsePDFFile(file);
|
||||
try {
|
||||
if (document.getNumberOfPages() == 0) {
|
||||
errors.add("PDF file contains no pages");
|
||||
}
|
||||
} finally {
|
||||
document.close();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
errors.add("Failed to parse PDF file: " + e.getMessage());
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
|
||||
// Inner classes for data structures
|
||||
|
||||
private static class PDFMetadata {
|
||||
private String title;
|
||||
private String author;
|
||||
private String subject;
|
||||
private String keywords;
|
||||
private String creator;
|
||||
private int pageCount;
|
||||
|
||||
public String getTitle() { return title; }
|
||||
public void setTitle(String title) { this.title = title; }
|
||||
public String getAuthor() { return author; }
|
||||
public void setAuthor(String author) { this.author = author; }
|
||||
public String getSubject() { return subject; }
|
||||
public void setSubject(String subject) { this.subject = subject; }
|
||||
public String getKeywords() { return keywords; }
|
||||
public void setKeywords(String keywords) { this.keywords = keywords; }
|
||||
public String getCreator() { return creator; }
|
||||
public void setCreator(String creator) { this.creator = creator; }
|
||||
public int getPageCount() { return pageCount; }
|
||||
public void setPageCount(int pageCount) { this.pageCount = pageCount; }
|
||||
}
|
||||
|
||||
private static class PDFContent {
|
||||
private String htmlContent;
|
||||
private List<PDFImage> images = new ArrayList<>();
|
||||
|
||||
public String getHtmlContent() { return htmlContent; }
|
||||
public void setHtmlContent(String htmlContent) { this.htmlContent = htmlContent; }
|
||||
public List<PDFImage> getImages() { return images; }
|
||||
public void setImages(List<PDFImage> images) { this.images = images; }
|
||||
}
|
||||
|
||||
private static class PDFImage {
|
||||
private String imageId;
|
||||
private int pageNumber;
|
||||
private byte[] imageData;
|
||||
private int width;
|
||||
private int height;
|
||||
|
||||
public String getImageId() { return imageId; }
|
||||
public void setImageId(String imageId) { this.imageId = imageId; }
|
||||
public int getPageNumber() { return pageNumber; }
|
||||
public void setPageNumber(int pageNumber) { this.pageNumber = pageNumber; }
|
||||
public byte[] getImageData() { return imageData; }
|
||||
public void setImageData(byte[] imageData) { this.imageData = imageData; }
|
||||
public int getWidth() { return width; }
|
||||
public void setWidth(int width) { this.width = width; }
|
||||
public int getHeight() { return height; }
|
||||
public void setHeight(int height) { this.height = height; }
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom PDF text stripper to filter headers/footers
|
||||
*/
|
||||
private static class CustomPDFTextStripper extends PDFTextStripper {
|
||||
public CustomPDFTextStripper() throws IOException {
|
||||
super();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void writeString(String text, List<TextPosition> textPositions) throws IOException {
|
||||
super.writeString(text, textPositions);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom MultipartFile implementation for PDF images
|
||||
*/
|
||||
private static class PDFImageMultipartFile implements MultipartFile {
|
||||
private final byte[] data;
|
||||
private final String filename;
|
||||
private final String contentType;
|
||||
|
||||
public PDFImageMultipartFile(byte[] data, String filename, String contentType) {
|
||||
this.data = data;
|
||||
this.filename = filename;
|
||||
this.contentType = contentType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "image";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getOriginalFilename() {
|
||||
return filename;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getContentType() {
|
||||
return contentType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return data == null || data.length == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getSize() {
|
||||
return data != null ? data.length : 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getBytes() {
|
||||
return data;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream getInputStream() {
|
||||
return new ByteArrayInputStream(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void transferTo(java.io.File dest) throws IOException {
|
||||
try (java.io.FileOutputStream fos = new java.io.FileOutputStream(dest)) {
|
||||
fos.write(data);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void transferTo(java.nio.file.Path dest) throws IOException {
|
||||
java.nio.file.Files.write(dest, data);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -347,6 +347,7 @@ public class SolrService {
|
||||
doc.addField("volume", story.getVolume());
|
||||
doc.addField("isRead", story.getIsRead());
|
||||
doc.addField("readingPosition", story.getReadingPosition());
|
||||
doc.addField("readingProgressPercentage", calculateReadingProgressPercentage(story));
|
||||
|
||||
if (story.getLastReadAt() != null) {
|
||||
doc.addField("lastReadAt", formatDateTime(story.getLastReadAt()));
|
||||
@@ -544,6 +545,26 @@ public class SolrService {
|
||||
return dateTime.format(DateTimeFormatter.ISO_LOCAL_DATE_TIME) + "Z";
|
||||
}
|
||||
|
||||
private Integer calculateReadingProgressPercentage(Story story) {
|
||||
if (story.getReadingPosition() == null || story.getReadingPosition() == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// ALWAYS use contentHtml for consistency (frontend uses contentHtml for position tracking)
|
||||
int totalLength = 0;
|
||||
if (story.getContentHtml() != null && !story.getContentHtml().isEmpty()) {
|
||||
totalLength = story.getContentHtml().length();
|
||||
}
|
||||
|
||||
if (totalLength == 0) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Calculate percentage and round to nearest integer
|
||||
int percentage = Math.round((float) story.getReadingPosition() * 100 / totalLength);
|
||||
return Math.min(100, percentage);
|
||||
}
|
||||
|
||||
// ===============================
|
||||
// UTILITY METHODS
|
||||
// ===============================
|
||||
@@ -1039,6 +1060,7 @@ public class SolrService {
|
||||
story.setVolume((Integer) doc.getFieldValue("volume"));
|
||||
story.setIsRead((Boolean) doc.getFieldValue("isRead"));
|
||||
story.setReadingPosition((Integer) doc.getFieldValue("readingPosition"));
|
||||
story.setReadingProgressPercentage((Integer) doc.getFieldValue("readingProgressPercentage"));
|
||||
|
||||
// Handle dates
|
||||
story.setLastReadAt(parseDateTimeFromSolr(doc.getFieldValue("lastReadAt")));
|
||||
|
||||
@@ -33,6 +33,7 @@ public class TagService {
|
||||
|
||||
private final TagRepository tagRepository;
|
||||
private final TagAliasRepository tagAliasRepository;
|
||||
private SolrService solrService;
|
||||
|
||||
@Autowired
|
||||
public TagService(TagRepository tagRepository, TagAliasRepository tagAliasRepository) {
|
||||
@@ -40,6 +41,11 @@ public class TagService {
|
||||
this.tagAliasRepository = tagAliasRepository;
|
||||
}
|
||||
|
||||
@Autowired(required = false)
|
||||
public void setSolrService(SolrService solrService) {
|
||||
this.solrService = solrService;
|
||||
}
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public List<Tag> findAll() {
|
||||
return tagRepository.findAll();
|
||||
@@ -143,12 +149,38 @@ public class TagService {
|
||||
public void delete(UUID id) {
|
||||
Tag tag = findById(id);
|
||||
|
||||
// Check if tag is used by any stories
|
||||
// Remove tag from all stories before deletion and track for reindexing
|
||||
List<Story> storiesToReindex = new ArrayList<>();
|
||||
if (!tag.getStories().isEmpty()) {
|
||||
throw new IllegalStateException("Cannot delete tag that is used by stories. Remove tag from all stories first.");
|
||||
// Create a copy to avoid ConcurrentModificationException
|
||||
List<Story> storiesToUpdate = new ArrayList<>(tag.getStories());
|
||||
storiesToUpdate.forEach(story -> {
|
||||
story.removeTag(tag);
|
||||
storiesToReindex.add(story);
|
||||
});
|
||||
logger.info("Removed tag '{}' from {} stories before deletion", tag.getName(), storiesToUpdate.size());
|
||||
}
|
||||
|
||||
// Remove tag from all collections before deletion
|
||||
if (tag.getCollections() != null && !tag.getCollections().isEmpty()) {
|
||||
tag.getCollections().forEach(collection -> collection.getTags().remove(tag));
|
||||
logger.info("Removed tag '{}' from {} collections before deletion", tag.getName(), tag.getCollections().size());
|
||||
}
|
||||
|
||||
tagRepository.delete(tag);
|
||||
logger.info("Deleted tag '{}'", tag.getName());
|
||||
|
||||
// Reindex affected stories in Solr
|
||||
if (solrService != null && !storiesToReindex.isEmpty()) {
|
||||
try {
|
||||
for (Story story : storiesToReindex) {
|
||||
solrService.indexStory(story);
|
||||
}
|
||||
logger.info("Reindexed {} stories after tag deletion", storiesToReindex.size());
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to reindex stories after tag deletion", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public List<Tag> deleteUnusedTags() {
|
||||
|
||||
@@ -0,0 +1,521 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.dto.*;
|
||||
import com.storycove.service.exception.InvalidFileException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipInputStream;
|
||||
|
||||
@Service
|
||||
public class ZIPImportService {
|
||||
private static final Logger log = LoggerFactory.getLogger(ZIPImportService.class);
|
||||
|
||||
private static final long MAX_ZIP_SIZE = 1024L * 1024 * 1024; // 1GB
|
||||
private static final int MAX_FILES_IN_ZIP = 30;
|
||||
private static final long ZIP_SESSION_TIMEOUT_MS = 30 * 60 * 1000; // 30 minutes
|
||||
|
||||
// Temporary storage for extracted ZIP files (sessionId -> session data)
|
||||
private final Map<String, ZIPSession> activeSessions = new ConcurrentHashMap<>();
|
||||
|
||||
private final EPUBImportService epubImportService;
|
||||
private final PDFImportService pdfImportService;
|
||||
|
||||
@Autowired
|
||||
public ZIPImportService(EPUBImportService epubImportService,
|
||||
PDFImportService pdfImportService) {
|
||||
this.epubImportService = epubImportService;
|
||||
this.pdfImportService = pdfImportService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Analyze a ZIP file and return information about its contents
|
||||
*/
|
||||
public ZIPAnalysisResponse analyzeZIPFile(MultipartFile zipFile) {
|
||||
try {
|
||||
// Validate ZIP file
|
||||
if (zipFile == null || zipFile.isEmpty()) {
|
||||
return ZIPAnalysisResponse.error("ZIP file is required");
|
||||
}
|
||||
|
||||
if (!isValidZIPFile(zipFile)) {
|
||||
return ZIPAnalysisResponse.error("Invalid ZIP file format");
|
||||
}
|
||||
|
||||
if (zipFile.getSize() > MAX_ZIP_SIZE) {
|
||||
return ZIPAnalysisResponse.error("ZIP file size exceeds " + (MAX_ZIP_SIZE / 1024 / 1024) + "MB limit");
|
||||
}
|
||||
|
||||
log.info("Analyzing ZIP file: {} (size: {} bytes)", zipFile.getOriginalFilename(), zipFile.getSize());
|
||||
|
||||
// Create temporary directory for extraction
|
||||
String sessionId = UUID.randomUUID().toString();
|
||||
Path tempDir = Files.createTempDirectory("storycove-zip-" + sessionId);
|
||||
|
||||
// Extract ZIP contents
|
||||
List<FileInfoDto> files = extractAndAnalyzeZIP(zipFile, tempDir, sessionId);
|
||||
|
||||
if (files.isEmpty()) {
|
||||
cleanupSession(sessionId);
|
||||
return ZIPAnalysisResponse.error("No valid EPUB or PDF files found in ZIP");
|
||||
}
|
||||
|
||||
if (files.size() > MAX_FILES_IN_ZIP) {
|
||||
cleanupSession(sessionId);
|
||||
return ZIPAnalysisResponse.error("ZIP contains too many files (max " + MAX_FILES_IN_ZIP + ")");
|
||||
}
|
||||
|
||||
// Store session data
|
||||
ZIPSession session = new ZIPSession(sessionId, tempDir, files);
|
||||
activeSessions.put(sessionId, session);
|
||||
|
||||
// Schedule cleanup
|
||||
scheduleSessionCleanup(sessionId);
|
||||
|
||||
ZIPAnalysisResponse response = ZIPAnalysisResponse.success(zipFile.getOriginalFilename(), files);
|
||||
response.addWarning("Session ID: " + sessionId + " (valid for 30 minutes)");
|
||||
|
||||
log.info("ZIP analysis completed. Session ID: {}, Files found: {}", sessionId, files.size());
|
||||
return response;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to analyze ZIP file: {}", e.getMessage(), e);
|
||||
return ZIPAnalysisResponse.error("Failed to analyze ZIP file: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Import selected files from a previously analyzed ZIP
|
||||
*/
|
||||
public ZIPImportResponse importFromZIP(ZIPImportRequest request) {
|
||||
try {
|
||||
// Validate session
|
||||
ZIPSession session = activeSessions.get(request.getZipSessionId());
|
||||
if (session == null) {
|
||||
return createErrorResponse("Invalid or expired session ID");
|
||||
}
|
||||
|
||||
if (session.isExpired()) {
|
||||
cleanupSession(request.getZipSessionId());
|
||||
return createErrorResponse("Session has expired. Please re-upload the ZIP file");
|
||||
}
|
||||
|
||||
List<String> selectedFiles = request.getSelectedFiles();
|
||||
if (selectedFiles == null || selectedFiles.isEmpty()) {
|
||||
return createErrorResponse("No files selected for import");
|
||||
}
|
||||
|
||||
log.info("Importing {} files from ZIP session: {}", selectedFiles.size(), request.getZipSessionId());
|
||||
|
||||
List<FileImportResponse> results = new ArrayList<>();
|
||||
|
||||
// Import each selected file
|
||||
for (String fileName : selectedFiles) {
|
||||
try {
|
||||
FileInfoDto fileInfo = session.getFileInfo(fileName);
|
||||
if (fileInfo == null) {
|
||||
FileImportResponse errorResult = FileImportResponse.error("File not found in session: " + fileName, fileName);
|
||||
results.add(errorResult);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (fileInfo.getError() != null) {
|
||||
FileImportResponse errorResult = FileImportResponse.error("File has errors: " + fileInfo.getError(), fileName);
|
||||
results.add(errorResult);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Get file-specific or default metadata
|
||||
ZIPImportRequest.FileImportMetadata metadata = getFileMetadata(request, fileName);
|
||||
|
||||
// Import based on file type
|
||||
FileImportResponse result;
|
||||
if ("EPUB".equals(fileInfo.getFileType())) {
|
||||
result = importEPUBFromSession(session, fileName, metadata, request);
|
||||
} else if ("PDF".equals(fileInfo.getFileType())) {
|
||||
result = importPDFFromSession(session, fileName, metadata, request);
|
||||
} else {
|
||||
result = FileImportResponse.error("Unsupported file type: " + fileInfo.getFileType(), fileName);
|
||||
}
|
||||
|
||||
results.add(result);
|
||||
|
||||
if (result.isSuccess()) {
|
||||
log.info("Successfully imported file: {} (Story ID: {})", fileName, result.getStoryId());
|
||||
} else {
|
||||
log.warn("Failed to import file: {} - {}", fileName, result.getMessage());
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to import file {}: {}", fileName, e.getMessage(), e);
|
||||
FileImportResponse errorResult = FileImportResponse.error("Import failed: " + e.getMessage(), fileName);
|
||||
results.add(errorResult);
|
||||
}
|
||||
}
|
||||
|
||||
// Cleanup session after import
|
||||
cleanupSession(request.getZipSessionId());
|
||||
|
||||
log.info("ZIP import completed. Total: {}, Success: {}, Failed: {}",
|
||||
results.size(),
|
||||
results.stream().filter(FileImportResponse::isSuccess).count(),
|
||||
results.stream().filter(r -> !r.isSuccess()).count());
|
||||
|
||||
return ZIPImportResponse.create(results);
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("ZIP import failed: {}", e.getMessage(), e);
|
||||
return createErrorResponse("Import failed: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isValidZIPFile(MultipartFile file) {
|
||||
String filename = file.getOriginalFilename();
|
||||
if (filename == null || !filename.toLowerCase().endsWith(".zip")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String contentType = file.getContentType();
|
||||
return "application/zip".equals(contentType) ||
|
||||
"application/x-zip-compressed".equals(contentType) ||
|
||||
contentType == null;
|
||||
}
|
||||
|
||||
private List<FileInfoDto> extractAndAnalyzeZIP(MultipartFile zipFile, Path tempDir, String sessionId) throws IOException {
|
||||
List<FileInfoDto> files = new ArrayList<>();
|
||||
int fileCount = 0;
|
||||
|
||||
try (ZipInputStream zis = new ZipInputStream(zipFile.getInputStream())) {
|
||||
ZipEntry entry;
|
||||
|
||||
while ((entry = zis.getNextEntry()) != null) {
|
||||
// Skip directories
|
||||
if (entry.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Only process root-level files
|
||||
String entryName = entry.getName();
|
||||
if (entryName.contains("/") || entryName.contains("\\")) {
|
||||
log.debug("Skipping nested file: {}", entryName);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if it's an EPUB or PDF
|
||||
String lowerName = entryName.toLowerCase();
|
||||
if (!lowerName.endsWith(".epub") && !lowerName.endsWith(".pdf")) {
|
||||
log.debug("Skipping non-EPUB/PDF file: {}", entryName);
|
||||
continue;
|
||||
}
|
||||
|
||||
fileCount++;
|
||||
if (fileCount > MAX_FILES_IN_ZIP) {
|
||||
log.warn("ZIP contains more than {} files, stopping extraction", MAX_FILES_IN_ZIP);
|
||||
break;
|
||||
}
|
||||
|
||||
// Extract file to temp directory
|
||||
Path extractedFile = tempDir.resolve(entryName);
|
||||
Files.copy(zis, extractedFile);
|
||||
|
||||
// Analyze the extracted file
|
||||
FileInfoDto fileInfo = analyzeExtractedFile(extractedFile, entryName);
|
||||
files.add(fileInfo);
|
||||
|
||||
zis.closeEntry();
|
||||
}
|
||||
}
|
||||
|
||||
return files;
|
||||
}
|
||||
|
||||
private FileInfoDto analyzeExtractedFile(Path filePath, String fileName) {
|
||||
try {
|
||||
long fileSize = Files.size(filePath);
|
||||
String fileType;
|
||||
String extractedTitle = null;
|
||||
String extractedAuthor = null;
|
||||
boolean hasMetadata = false;
|
||||
|
||||
if (fileName.toLowerCase().endsWith(".epub")) {
|
||||
fileType = "EPUB";
|
||||
// Try to extract EPUB metadata
|
||||
try {
|
||||
// Create a temporary MultipartFile for validation
|
||||
byte[] fileBytes = Files.readAllBytes(filePath);
|
||||
MultipartFile tempFile = new TempMultipartFile(fileBytes, fileName, "application/epub+zip");
|
||||
|
||||
// Use EPUBImportService to extract metadata
|
||||
// For now, we'll just validate the file
|
||||
List<String> errors = epubImportService.validateEPUBFile(tempFile);
|
||||
if (!errors.isEmpty()) {
|
||||
FileInfoDto errorInfo = new FileInfoDto(fileName, fileType, fileSize);
|
||||
errorInfo.setError(String.join(", ", errors));
|
||||
return errorInfo;
|
||||
}
|
||||
|
||||
hasMetadata = true;
|
||||
// We could extract more metadata here if needed
|
||||
} catch (Exception e) {
|
||||
log.warn("Failed to extract EPUB metadata for {}: {}", fileName, e.getMessage());
|
||||
}
|
||||
} else if (fileName.toLowerCase().endsWith(".pdf")) {
|
||||
fileType = "PDF";
|
||||
// Try to extract PDF metadata
|
||||
try {
|
||||
byte[] fileBytes = Files.readAllBytes(filePath);
|
||||
MultipartFile tempFile = new TempMultipartFile(fileBytes, fileName, "application/pdf");
|
||||
|
||||
// Use PDFImportService to validate
|
||||
List<String> errors = pdfImportService.validatePDFFile(tempFile);
|
||||
if (!errors.isEmpty()) {
|
||||
FileInfoDto errorInfo = new FileInfoDto(fileName, fileType, fileSize);
|
||||
errorInfo.setError(String.join(", ", errors));
|
||||
return errorInfo;
|
||||
}
|
||||
|
||||
hasMetadata = true;
|
||||
// We could extract more metadata here if needed
|
||||
} catch (Exception e) {
|
||||
log.warn("Failed to extract PDF metadata for {}: {}", fileName, e.getMessage());
|
||||
}
|
||||
} else {
|
||||
FileInfoDto errorInfo = new FileInfoDto(fileName, "UNKNOWN", fileSize);
|
||||
errorInfo.setError("Unsupported file type");
|
||||
return errorInfo;
|
||||
}
|
||||
|
||||
FileInfoDto fileInfo = new FileInfoDto(fileName, fileType, fileSize);
|
||||
fileInfo.setExtractedTitle(extractedTitle);
|
||||
fileInfo.setExtractedAuthor(extractedAuthor);
|
||||
fileInfo.setHasMetadata(hasMetadata);
|
||||
|
||||
return fileInfo;
|
||||
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to analyze file {}: {}", fileName, e.getMessage(), e);
|
||||
FileInfoDto errorInfo = new FileInfoDto(fileName, "UNKNOWN", 0L);
|
||||
errorInfo.setError("Failed to analyze file: " + e.getMessage());
|
||||
return errorInfo;
|
||||
}
|
||||
}
|
||||
|
||||
private ZIPImportRequest.FileImportMetadata getFileMetadata(ZIPImportRequest request, String fileName) {
|
||||
// Check for file-specific metadata first
|
||||
if (request.getFileMetadata() != null && request.getFileMetadata().containsKey(fileName)) {
|
||||
return request.getFileMetadata().get(fileName);
|
||||
}
|
||||
|
||||
// Return default metadata
|
||||
ZIPImportRequest.FileImportMetadata metadata = new ZIPImportRequest.FileImportMetadata();
|
||||
metadata.setAuthorId(request.getDefaultAuthorId());
|
||||
metadata.setAuthorName(request.getDefaultAuthorName());
|
||||
metadata.setSeriesId(request.getDefaultSeriesId());
|
||||
metadata.setSeriesName(request.getDefaultSeriesName());
|
||||
metadata.setTags(request.getDefaultTags());
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
private FileImportResponse importEPUBFromSession(ZIPSession session, String fileName,
|
||||
ZIPImportRequest.FileImportMetadata metadata,
|
||||
ZIPImportRequest request) throws IOException {
|
||||
Path filePath = session.getTempDir().resolve(fileName);
|
||||
byte[] fileBytes = Files.readAllBytes(filePath);
|
||||
|
||||
MultipartFile epubFile = new TempMultipartFile(fileBytes, fileName, "application/epub+zip");
|
||||
|
||||
EPUBImportRequest epubRequest = new EPUBImportRequest();
|
||||
epubRequest.setEpubFile(epubFile);
|
||||
epubRequest.setAuthorId(metadata.getAuthorId());
|
||||
epubRequest.setAuthorName(metadata.getAuthorName());
|
||||
epubRequest.setSeriesId(metadata.getSeriesId());
|
||||
epubRequest.setSeriesName(metadata.getSeriesName());
|
||||
epubRequest.setSeriesVolume(metadata.getSeriesVolume());
|
||||
epubRequest.setTags(metadata.getTags());
|
||||
epubRequest.setCreateMissingAuthor(request.getCreateMissingAuthor());
|
||||
epubRequest.setCreateMissingSeries(request.getCreateMissingSeries());
|
||||
epubRequest.setExtractCover(true);
|
||||
|
||||
EPUBImportResponse epubResponse = epubImportService.importEPUB(epubRequest);
|
||||
|
||||
// Convert EPUBImportResponse to FileImportResponse
|
||||
if (epubResponse.isSuccess()) {
|
||||
FileImportResponse response = FileImportResponse.success(epubResponse.getStoryId(), epubResponse.getStoryTitle(), "EPUB");
|
||||
response.setFileName(fileName);
|
||||
response.setWordCount(epubResponse.getWordCount());
|
||||
return response;
|
||||
} else {
|
||||
return FileImportResponse.error(epubResponse.getMessage(), fileName);
|
||||
}
|
||||
}
|
||||
|
||||
private FileImportResponse importPDFFromSession(ZIPSession session, String fileName,
|
||||
ZIPImportRequest.FileImportMetadata metadata,
|
||||
ZIPImportRequest request) throws IOException {
|
||||
Path filePath = session.getTempDir().resolve(fileName);
|
||||
byte[] fileBytes = Files.readAllBytes(filePath);
|
||||
|
||||
MultipartFile pdfFile = new TempMultipartFile(fileBytes, fileName, "application/pdf");
|
||||
|
||||
PDFImportRequest pdfRequest = new PDFImportRequest();
|
||||
pdfRequest.setPdfFile(pdfFile);
|
||||
pdfRequest.setAuthorId(metadata.getAuthorId());
|
||||
pdfRequest.setAuthorName(metadata.getAuthorName());
|
||||
pdfRequest.setSeriesId(metadata.getSeriesId());
|
||||
pdfRequest.setSeriesName(metadata.getSeriesName());
|
||||
pdfRequest.setSeriesVolume(metadata.getSeriesVolume());
|
||||
pdfRequest.setTags(metadata.getTags());
|
||||
pdfRequest.setCreateMissingAuthor(request.getCreateMissingAuthor());
|
||||
pdfRequest.setCreateMissingSeries(request.getCreateMissingSeries());
|
||||
pdfRequest.setExtractImages(request.getExtractImages());
|
||||
|
||||
return pdfImportService.importPDF(pdfRequest);
|
||||
}
|
||||
|
||||
private void scheduleSessionCleanup(String sessionId) {
|
||||
Timer timer = new Timer(true);
|
||||
timer.schedule(new TimerTask() {
|
||||
@Override
|
||||
public void run() {
|
||||
cleanupSession(sessionId);
|
||||
}
|
||||
}, ZIP_SESSION_TIMEOUT_MS);
|
||||
}
|
||||
|
||||
private void cleanupSession(String sessionId) {
|
||||
ZIPSession session = activeSessions.remove(sessionId);
|
||||
if (session != null) {
|
||||
try {
|
||||
deleteDirectory(session.getTempDir());
|
||||
log.info("Cleaned up ZIP session: {}", sessionId);
|
||||
} catch (Exception e) {
|
||||
log.error("Failed to cleanup ZIP session {}: {}", sessionId, e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void deleteDirectory(Path directory) throws IOException {
|
||||
if (Files.exists(directory)) {
|
||||
Files.walk(directory)
|
||||
.sorted((a, b) -> -a.compareTo(b)) // Delete files before directories
|
||||
.forEach(path -> {
|
||||
try {
|
||||
Files.delete(path);
|
||||
} catch (IOException e) {
|
||||
log.warn("Failed to delete file {}: {}", path, e.getMessage());
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private ZIPImportResponse createErrorResponse(String message) {
|
||||
ZIPImportResponse response = new ZIPImportResponse();
|
||||
response.setSuccess(false);
|
||||
response.setMessage(message);
|
||||
return response;
|
||||
}
|
||||
|
||||
// Inner classes
|
||||
|
||||
private static class ZIPSession {
|
||||
private final String sessionId;
|
||||
private final Path tempDir;
|
||||
private final Map<String, FileInfoDto> files;
|
||||
private final long createdAt;
|
||||
|
||||
public ZIPSession(String sessionId, Path tempDir, List<FileInfoDto> fileList) {
|
||||
this.sessionId = sessionId;
|
||||
this.tempDir = tempDir;
|
||||
this.files = new HashMap<>();
|
||||
for (FileInfoDto file : fileList) {
|
||||
this.files.put(file.getFileName(), file);
|
||||
}
|
||||
this.createdAt = System.currentTimeMillis();
|
||||
}
|
||||
|
||||
public Path getTempDir() {
|
||||
return tempDir;
|
||||
}
|
||||
|
||||
public FileInfoDto getFileInfo(String fileName) {
|
||||
return files.get(fileName);
|
||||
}
|
||||
|
||||
public boolean isExpired() {
|
||||
return System.currentTimeMillis() - createdAt > ZIP_SESSION_TIMEOUT_MS;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Temporary MultipartFile implementation for extracted files
|
||||
*/
|
||||
private static class TempMultipartFile implements MultipartFile {
|
||||
private final byte[] data;
|
||||
private final String filename;
|
||||
private final String contentType;
|
||||
|
||||
public TempMultipartFile(byte[] data, String filename, String contentType) {
|
||||
this.data = data;
|
||||
this.filename = filename;
|
||||
this.contentType = contentType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "file";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getOriginalFilename() {
|
||||
return filename;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getContentType() {
|
||||
return contentType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return data == null || data.length == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getSize() {
|
||||
return data != null ? data.length : 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getBytes() {
|
||||
return data;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream getInputStream() {
|
||||
return new ByteArrayInputStream(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void transferTo(java.io.File dest) throws IOException {
|
||||
try (java.io.FileOutputStream fos = new java.io.FileOutputStream(dest)) {
|
||||
fos.write(data);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void transferTo(java.nio.file.Path dest) throws IOException {
|
||||
Files.write(dest, data);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -21,8 +21,8 @@ spring:
|
||||
|
||||
servlet:
|
||||
multipart:
|
||||
max-file-size: 600MB # Increased for large backup restore (425MB+)
|
||||
max-request-size: 610MB # Slightly higher to account for form data
|
||||
max-file-size: 4096MB # 4GB for large backup restore
|
||||
max-request-size: 4150MB # Slightly higher to account for form data
|
||||
|
||||
jackson:
|
||||
serialization:
|
||||
@@ -33,7 +33,7 @@ spring:
|
||||
server:
|
||||
port: 8080
|
||||
tomcat:
|
||||
max-http-request-size: 650MB # Tomcat HTTP request size limit (separate from multipart)
|
||||
max-http-request-size: 4200MB # Tomcat HTTP request size limit (4GB + overhead)
|
||||
|
||||
storycove:
|
||||
app:
|
||||
|
||||
@@ -0,0 +1,296 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.dto.FileImportResponse;
|
||||
import com.storycove.dto.PDFImportRequest;
|
||||
import com.storycove.entity.*;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.DisplayName;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.mock.web.MockMultipartFile;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.mockito.ArgumentMatchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
/**
|
||||
* Tests for PDFImportService.
|
||||
* Note: These tests mock the PDF parsing since Apache PDFBox is complex to test.
|
||||
* Integration tests should be added separately to test actual PDF file parsing.
|
||||
*/
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class PDFImportServiceTest {
|
||||
|
||||
@Mock
|
||||
private StoryService storyService;
|
||||
|
||||
@Mock
|
||||
private AuthorService authorService;
|
||||
|
||||
@Mock
|
||||
private SeriesService seriesService;
|
||||
|
||||
@Mock
|
||||
private TagService tagService;
|
||||
|
||||
@Mock
|
||||
private HtmlSanitizationService sanitizationService;
|
||||
|
||||
@Mock
|
||||
private ImageService imageService;
|
||||
|
||||
@Mock
|
||||
private LibraryService libraryService;
|
||||
|
||||
@InjectMocks
|
||||
private PDFImportService pdfImportService;
|
||||
|
||||
private PDFImportRequest testRequest;
|
||||
private Story testStory;
|
||||
private Author testAuthor;
|
||||
private Series testSeries;
|
||||
private UUID storyId;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
storyId = UUID.randomUUID();
|
||||
|
||||
testStory = new Story();
|
||||
testStory.setId(storyId);
|
||||
testStory.setTitle("Test Story");
|
||||
testStory.setWordCount(1000);
|
||||
|
||||
testAuthor = new Author();
|
||||
testAuthor.setId(UUID.randomUUID());
|
||||
testAuthor.setName("Test Author");
|
||||
|
||||
testSeries = new Series();
|
||||
testSeries.setId(UUID.randomUUID());
|
||||
testSeries.setName("Test Series");
|
||||
|
||||
testRequest = new PDFImportRequest();
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// File Validation Tests
|
||||
// ========================================
|
||||
|
||||
@Test
|
||||
@DisplayName("Should reject null PDF file")
|
||||
void testNullPDFFile() {
|
||||
// Arrange
|
||||
testRequest.setPdfFile(null);
|
||||
|
||||
// Act
|
||||
FileImportResponse response = pdfImportService.importPDF(testRequest);
|
||||
|
||||
// Assert
|
||||
assertFalse(response.isSuccess());
|
||||
assertEquals("PDF file is required", response.getMessage());
|
||||
verify(storyService, never()).create(any(Story.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should reject empty PDF file")
|
||||
void testEmptyPDFFile() {
|
||||
// Arrange
|
||||
MockMultipartFile emptyFile = new MockMultipartFile(
|
||||
"file", "test.pdf", "application/pdf", new byte[0]
|
||||
);
|
||||
testRequest.setPdfFile(emptyFile);
|
||||
|
||||
// Act
|
||||
FileImportResponse response = pdfImportService.importPDF(testRequest);
|
||||
|
||||
// Assert
|
||||
assertFalse(response.isSuccess());
|
||||
assertEquals("PDF file is required", response.getMessage());
|
||||
verify(storyService, never()).create(any(Story.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should reject non-PDF file by extension")
|
||||
void testInvalidFileExtension() {
|
||||
// Arrange
|
||||
MockMultipartFile invalidFile = new MockMultipartFile(
|
||||
"file", "test.txt", "text/plain", "test content".getBytes()
|
||||
);
|
||||
testRequest.setPdfFile(invalidFile);
|
||||
|
||||
// Act
|
||||
FileImportResponse response = pdfImportService.importPDF(testRequest);
|
||||
|
||||
// Assert
|
||||
assertFalse(response.isSuccess());
|
||||
assertTrue(response.getMessage().contains("Invalid PDF file format"));
|
||||
verify(storyService, never()).create(any(Story.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should reject file exceeding 300MB size limit")
|
||||
void testFileSizeExceedsLimit() {
|
||||
// Arrange
|
||||
long fileSize = 301L * 1024 * 1024; // 301 MB
|
||||
MockMultipartFile largeFile = new MockMultipartFile(
|
||||
"file", "test.pdf", "application/pdf", new byte[(int)Math.min(fileSize, 1000)]
|
||||
) {
|
||||
@Override
|
||||
public long getSize() {
|
||||
return fileSize;
|
||||
}
|
||||
};
|
||||
testRequest.setPdfFile(largeFile);
|
||||
|
||||
// Act
|
||||
FileImportResponse response = pdfImportService.importPDF(testRequest);
|
||||
|
||||
// Assert
|
||||
assertFalse(response.isSuccess());
|
||||
assertTrue(response.getMessage().contains("Invalid PDF file format"));
|
||||
verify(storyService, never()).create(any(Story.class));
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// Author Handling Tests
|
||||
// ========================================
|
||||
|
||||
@Test
|
||||
@DisplayName("Should require author name when not in metadata")
|
||||
void testRequiresAuthorName() {
|
||||
// Arrange - Create a minimal valid PDF (will fail parsing but test validation)
|
||||
MockMultipartFile pdfFile = new MockMultipartFile(
|
||||
"file", "test.pdf", "application/pdf",
|
||||
"%PDF-1.4\n%%EOF".getBytes()
|
||||
);
|
||||
testRequest.setPdfFile(pdfFile);
|
||||
testRequest.setAuthorName(null);
|
||||
testRequest.setAuthorId(null);
|
||||
|
||||
// Act
|
||||
FileImportResponse response = pdfImportService.importPDF(testRequest);
|
||||
|
||||
// Assert
|
||||
assertFalse(response.isSuccess());
|
||||
// Should fail during import because author is required
|
||||
verify(storyService, never()).create(any(Story.class));
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// Validation Method Tests
|
||||
// ========================================
|
||||
|
||||
@Test
|
||||
@DisplayName("Should validate PDF file successfully")
|
||||
void testValidatePDFFile_Valid() {
|
||||
// Arrange
|
||||
MockMultipartFile pdfFile = new MockMultipartFile(
|
||||
"file", "test.pdf", "application/pdf",
|
||||
new byte[100]
|
||||
);
|
||||
|
||||
// Act
|
||||
List<String> errors = pdfImportService.validatePDFFile(pdfFile);
|
||||
|
||||
// Assert - Will have errors because it's not a real PDF, but tests the method exists
|
||||
assertNotNull(errors);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should return errors for null file in validation")
|
||||
void testValidatePDFFile_Null() {
|
||||
// Act
|
||||
List<String> errors = pdfImportService.validatePDFFile(null);
|
||||
|
||||
// Assert
|
||||
assertNotNull(errors);
|
||||
assertFalse(errors.isEmpty());
|
||||
assertTrue(errors.get(0).contains("required"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should return errors for empty file in validation")
|
||||
void testValidatePDFFile_Empty() {
|
||||
// Arrange
|
||||
MockMultipartFile emptyFile = new MockMultipartFile(
|
||||
"file", "test.pdf", "application/pdf", new byte[0]
|
||||
);
|
||||
|
||||
// Act
|
||||
List<String> errors = pdfImportService.validatePDFFile(emptyFile);
|
||||
|
||||
// Assert
|
||||
assertNotNull(errors);
|
||||
assertFalse(errors.isEmpty());
|
||||
assertTrue(errors.get(0).contains("required"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should return errors for oversized file in validation")
|
||||
void testValidatePDFFile_Oversized() {
|
||||
// Arrange
|
||||
long fileSize = 301L * 1024 * 1024; // 301 MB
|
||||
MockMultipartFile largeFile = new MockMultipartFile(
|
||||
"file", "test.pdf", "application/pdf", new byte[1000]
|
||||
) {
|
||||
@Override
|
||||
public long getSize() {
|
||||
return fileSize;
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
List<String> errors = pdfImportService.validatePDFFile(largeFile);
|
||||
|
||||
// Assert
|
||||
assertNotNull(errors);
|
||||
assertFalse(errors.isEmpty());
|
||||
assertTrue(errors.stream().anyMatch(e -> e.contains("300MB")));
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// Integration Tests (Mocked)
|
||||
// ========================================
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle extraction images flag")
|
||||
void testExtractImagesFlag() {
|
||||
// Arrange
|
||||
MockMultipartFile pdfFile = new MockMultipartFile(
|
||||
"file", "test.pdf", "application/pdf",
|
||||
"%PDF-1.4\n%%EOF".getBytes()
|
||||
);
|
||||
testRequest.setPdfFile(pdfFile);
|
||||
testRequest.setAuthorName("Test Author");
|
||||
testRequest.setExtractImages(false);
|
||||
|
||||
// Act
|
||||
FileImportResponse response = pdfImportService.importPDF(testRequest);
|
||||
|
||||
// Assert - Will fail parsing but tests that the flag is accepted
|
||||
assertNotNull(response);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should accept tags in request")
|
||||
void testAcceptTags() {
|
||||
// Arrange
|
||||
MockMultipartFile pdfFile = new MockMultipartFile(
|
||||
"file", "test.pdf", "application/pdf",
|
||||
"%PDF-1.4\n%%EOF".getBytes()
|
||||
);
|
||||
testRequest.setPdfFile(pdfFile);
|
||||
testRequest.setAuthorName("Test Author");
|
||||
testRequest.setTags(Arrays.asList("tag1", "tag2"));
|
||||
|
||||
// Act
|
||||
FileImportResponse response = pdfImportService.importPDF(testRequest);
|
||||
|
||||
// Assert - Will fail parsing but tests that tags are accepted
|
||||
assertNotNull(response);
|
||||
}
|
||||
}
|
||||
@@ -85,7 +85,8 @@ class StoryServiceTest {
|
||||
Story result = storyService.updateReadingProgress(testId, position);
|
||||
|
||||
assertEquals(0, result.getReadingPosition());
|
||||
assertNotNull(result.getLastReadAt());
|
||||
// When position is 0, lastReadAt should be reset to null so the story doesn't appear in "last read" sorting
|
||||
assertNull(result.getLastReadAt());
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
|
||||
@@ -111,7 +112,8 @@ class StoryServiceTest {
|
||||
Story result = storyService.updateReadingProgress(testId, position);
|
||||
|
||||
assertNull(result.getReadingPosition());
|
||||
assertNotNull(result.getLastReadAt());
|
||||
// When position is null, lastReadAt should be reset to null so the story doesn't appear in "last read" sorting
|
||||
assertNull(result.getLastReadAt());
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
|
||||
|
||||
@@ -0,0 +1,310 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.dto.*;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.DisplayName;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.mock.web.MockMultipartFile;
|
||||
|
||||
import java.util.*;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.mockito.ArgumentMatchers.*;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
/**
|
||||
* Tests for ZIPImportService.
|
||||
*/
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class ZIPImportServiceTest {
|
||||
|
||||
@Mock
|
||||
private EPUBImportService epubImportService;
|
||||
|
||||
@Mock
|
||||
private PDFImportService pdfImportService;
|
||||
|
||||
@InjectMocks
|
||||
private ZIPImportService zipImportService;
|
||||
|
||||
private ZIPImportRequest testImportRequest;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
testImportRequest = new ZIPImportRequest();
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// File Validation Tests
|
||||
// ========================================
|
||||
|
||||
@Test
|
||||
@DisplayName("Should reject null ZIP file")
|
||||
void testNullZIPFile() {
|
||||
// Act
|
||||
ZIPAnalysisResponse response = zipImportService.analyzeZIPFile(null);
|
||||
|
||||
// Assert
|
||||
assertFalse(response.isSuccess());
|
||||
assertEquals("ZIP file is required", response.getMessage());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should reject empty ZIP file")
|
||||
void testEmptyZIPFile() {
|
||||
// Arrange
|
||||
MockMultipartFile emptyFile = new MockMultipartFile(
|
||||
"file", "test.zip", "application/zip", new byte[0]
|
||||
);
|
||||
|
||||
// Act
|
||||
ZIPAnalysisResponse response = zipImportService.analyzeZIPFile(emptyFile);
|
||||
|
||||
// Assert
|
||||
assertFalse(response.isSuccess());
|
||||
assertEquals("ZIP file is required", response.getMessage());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should reject non-ZIP file")
|
||||
void testInvalidFileType() {
|
||||
// Arrange
|
||||
MockMultipartFile invalidFile = new MockMultipartFile(
|
||||
"file", "test.txt", "text/plain", "test content".getBytes()
|
||||
);
|
||||
|
||||
// Act
|
||||
ZIPAnalysisResponse response = zipImportService.analyzeZIPFile(invalidFile);
|
||||
|
||||
// Assert
|
||||
assertFalse(response.isSuccess());
|
||||
assertTrue(response.getMessage().contains("Invalid ZIP file format"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should reject oversized ZIP file")
|
||||
void testOversizedZIPFile() {
|
||||
// Arrange
|
||||
long fileSize = 1025L * 1024 * 1024; // 1025 MB (> 1GB limit)
|
||||
MockMultipartFile largeFile = new MockMultipartFile(
|
||||
"file", "test.zip", "application/zip", new byte[1000]
|
||||
) {
|
||||
@Override
|
||||
public long getSize() {
|
||||
return fileSize;
|
||||
}
|
||||
};
|
||||
|
||||
// Act
|
||||
ZIPAnalysisResponse response = zipImportService.analyzeZIPFile(largeFile);
|
||||
|
||||
// Assert
|
||||
assertFalse(response.isSuccess());
|
||||
assertTrue(response.getMessage().contains("exceeds"));
|
||||
assertTrue(response.getMessage().contains("1024MB") || response.getMessage().contains("1GB"));
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// Import Request Validation Tests
|
||||
// ========================================
|
||||
|
||||
@Test
|
||||
@DisplayName("Should reject import with invalid session ID")
|
||||
void testInvalidSessionId() {
|
||||
// Arrange
|
||||
testImportRequest.setZipSessionId("invalid-session-id");
|
||||
testImportRequest.setSelectedFiles(Arrays.asList("file1.epub"));
|
||||
|
||||
// Act
|
||||
ZIPImportResponse response = zipImportService.importFromZIP(testImportRequest);
|
||||
|
||||
// Assert
|
||||
assertFalse(response.isSuccess());
|
||||
assertTrue(response.getMessage().contains("Invalid") || response.getMessage().contains("expired"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should reject import with no selected files")
|
||||
void testNoSelectedFiles() {
|
||||
// Arrange
|
||||
testImportRequest.setZipSessionId("some-session-id");
|
||||
testImportRequest.setSelectedFiles(Collections.emptyList());
|
||||
|
||||
// Act
|
||||
ZIPImportResponse response = zipImportService.importFromZIP(testImportRequest);
|
||||
|
||||
// Assert
|
||||
assertFalse(response.isSuccess());
|
||||
assertTrue(response.getMessage().contains("No files selected") || response.getMessage().contains("Invalid"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should reject import with null selected files")
|
||||
void testNullSelectedFiles() {
|
||||
// Arrange
|
||||
testImportRequest.setZipSessionId("some-session-id");
|
||||
testImportRequest.setSelectedFiles(null);
|
||||
|
||||
// Act
|
||||
ZIPImportResponse response = zipImportService.importFromZIP(testImportRequest);
|
||||
|
||||
// Assert
|
||||
assertFalse(response.isSuccess());
|
||||
assertTrue(response.getMessage().contains("No files selected") || response.getMessage().contains("Invalid"));
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// ZIP Analysis Tests
|
||||
// ========================================
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle corrupted ZIP file gracefully")
|
||||
void testCorruptedZIPFile() {
|
||||
// Arrange
|
||||
MockMultipartFile corruptedFile = new MockMultipartFile(
|
||||
"file", "test.zip", "application/zip",
|
||||
"PK\3\4corrupted data".getBytes()
|
||||
);
|
||||
|
||||
// Act
|
||||
ZIPAnalysisResponse response = zipImportService.analyzeZIPFile(corruptedFile);
|
||||
|
||||
// Assert
|
||||
assertFalse(response.isSuccess());
|
||||
assertNotNull(response.getMessage());
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// Helper Method Tests
|
||||
// ========================================
|
||||
|
||||
@Test
|
||||
@DisplayName("Should accept default metadata in import request")
|
||||
void testDefaultMetadata() {
|
||||
// Arrange
|
||||
testImportRequest.setZipSessionId("test-session");
|
||||
testImportRequest.setSelectedFiles(Arrays.asList("file1.epub"));
|
||||
testImportRequest.setDefaultAuthorName("Default Author");
|
||||
testImportRequest.setDefaultTags(Arrays.asList("tag1", "tag2"));
|
||||
|
||||
// Act - will fail due to invalid session, but tests that metadata is accepted
|
||||
ZIPImportResponse response = zipImportService.importFromZIP(testImportRequest);
|
||||
|
||||
// Assert
|
||||
assertNotNull(response);
|
||||
assertFalse(response.isSuccess()); // Expected to fail due to invalid session
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should accept per-file metadata in import request")
|
||||
void testPerFileMetadata() {
|
||||
// Arrange
|
||||
Map<String, ZIPImportRequest.FileImportMetadata> fileMetadata = new HashMap<>();
|
||||
ZIPImportRequest.FileImportMetadata metadata = new ZIPImportRequest.FileImportMetadata();
|
||||
metadata.setAuthorName("Specific Author");
|
||||
metadata.setTags(Arrays.asList("tag1"));
|
||||
fileMetadata.put("file1.epub", metadata);
|
||||
|
||||
testImportRequest.setZipSessionId("test-session");
|
||||
testImportRequest.setSelectedFiles(Arrays.asList("file1.epub"));
|
||||
testImportRequest.setFileMetadata(fileMetadata);
|
||||
|
||||
// Act - will fail due to invalid session, but tests that metadata is accepted
|
||||
ZIPImportResponse response = zipImportService.importFromZIP(testImportRequest);
|
||||
|
||||
// Assert
|
||||
assertNotNull(response);
|
||||
assertFalse(response.isSuccess()); // Expected to fail due to invalid session
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should accept createMissing flags")
|
||||
void testCreateMissingFlags() {
|
||||
// Arrange
|
||||
testImportRequest.setZipSessionId("test-session");
|
||||
testImportRequest.setSelectedFiles(Arrays.asList("file1.epub"));
|
||||
testImportRequest.setCreateMissingAuthor(false);
|
||||
testImportRequest.setCreateMissingSeries(false);
|
||||
testImportRequest.setExtractImages(false);
|
||||
|
||||
// Act - will fail due to invalid session, but tests that flags are accepted
|
||||
ZIPImportResponse response = zipImportService.importFromZIP(testImportRequest);
|
||||
|
||||
// Assert
|
||||
assertNotNull(response);
|
||||
}
|
||||
|
||||
// ========================================
|
||||
// Response Object Tests
|
||||
// ========================================
|
||||
|
||||
@Test
|
||||
@DisplayName("ZIPImportResponse should calculate statistics correctly")
|
||||
void testZIPImportResponseStatistics() {
|
||||
// Arrange
|
||||
List<FileImportResponse> results = new ArrayList<>();
|
||||
|
||||
FileImportResponse success1 = FileImportResponse.success(UUID.randomUUID(), "Story 1", "EPUB");
|
||||
FileImportResponse success2 = FileImportResponse.success(UUID.randomUUID(), "Story 2", "PDF");
|
||||
FileImportResponse failure = FileImportResponse.error("Import failed", "story3.epub");
|
||||
|
||||
results.add(success1);
|
||||
results.add(success2);
|
||||
results.add(failure);
|
||||
|
||||
// Act
|
||||
ZIPImportResponse response = ZIPImportResponse.create(results);
|
||||
|
||||
// Assert
|
||||
assertNotNull(response);
|
||||
assertEquals(3, response.getTotalFiles());
|
||||
assertEquals(2, response.getSuccessfulImports());
|
||||
assertEquals(1, response.getFailedImports());
|
||||
assertTrue(response.isSuccess()); // Partial success
|
||||
assertTrue(response.getMessage().contains("2 imported"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("ZIPImportResponse should handle all failures")
|
||||
void testZIPImportResponseAllFailures() {
|
||||
// Arrange
|
||||
List<FileImportResponse> results = new ArrayList<>();
|
||||
results.add(FileImportResponse.error("Error 1", "file1.epub"));
|
||||
results.add(FileImportResponse.error("Error 2", "file2.pdf"));
|
||||
|
||||
// Act
|
||||
ZIPImportResponse response = ZIPImportResponse.create(results);
|
||||
|
||||
// Assert
|
||||
assertNotNull(response);
|
||||
assertEquals(2, response.getTotalFiles());
|
||||
assertEquals(0, response.getSuccessfulImports());
|
||||
assertEquals(2, response.getFailedImports());
|
||||
assertFalse(response.isSuccess());
|
||||
assertTrue(response.getMessage().contains("failed"));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("ZIPImportResponse should handle all successes")
|
||||
void testZIPImportResponseAllSuccesses() {
|
||||
// Arrange
|
||||
List<FileImportResponse> results = new ArrayList<>();
|
||||
results.add(FileImportResponse.success(UUID.randomUUID(), "Story 1", "EPUB"));
|
||||
results.add(FileImportResponse.success(UUID.randomUUID(), "Story 2", "PDF"));
|
||||
|
||||
// Act
|
||||
ZIPImportResponse response = ZIPImportResponse.create(results);
|
||||
|
||||
// Assert
|
||||
assertNotNull(response);
|
||||
assertEquals(2, response.getTotalFiles());
|
||||
assertEquals(2, response.getSuccessfulImports());
|
||||
assertEquals(0, response.getFailedImports());
|
||||
assertTrue(response.isSuccess());
|
||||
assertTrue(response.getMessage().contains("All files imported successfully"));
|
||||
}
|
||||
}
|
||||
@@ -124,7 +124,7 @@ configs:
|
||||
}
|
||||
server {
|
||||
listen 80;
|
||||
client_max_body_size 600M;
|
||||
client_max_body_size 2048M;
|
||||
location / {
|
||||
proxy_pass http://frontend;
|
||||
proxy_http_version 1.1;
|
||||
@@ -145,8 +145,8 @@ configs:
|
||||
proxy_connect_timeout 900s;
|
||||
proxy_send_timeout 900s;
|
||||
proxy_read_timeout 900s;
|
||||
# Large upload settings
|
||||
client_max_body_size 600M;
|
||||
# Large upload settings (2GB for backups)
|
||||
client_max_body_size 2048M;
|
||||
proxy_request_buffering off;
|
||||
proxy_max_temp_file_size 0;
|
||||
}
|
||||
|
||||
829
frontend/src/app/import/file/page.tsx
Normal file
829
frontend/src/app/import/file/page.tsx
Normal file
@@ -0,0 +1,829 @@
|
||||
'use client';
|
||||
|
||||
import { useState } from 'react';
|
||||
import { useRouter } from 'next/navigation';
|
||||
import { DocumentArrowUpIcon, CheckCircleIcon, XCircleIcon } from '@heroicons/react/24/outline';
|
||||
import Button from '@/components/ui/Button';
|
||||
import { Input } from '@/components/ui/Input';
|
||||
import ImportLayout from '@/components/layout/ImportLayout';
|
||||
import AuthorSelector from '@/components/stories/AuthorSelector';
|
||||
import SeriesSelector from '@/components/stories/SeriesSelector';
|
||||
|
||||
type FileType = 'epub' | 'pdf' | 'zip' | null;
|
||||
|
||||
interface ImportResponse {
|
||||
success: boolean;
|
||||
message: string;
|
||||
storyId?: string;
|
||||
storyTitle?: string;
|
||||
fileName?: string;
|
||||
fileType?: string;
|
||||
wordCount?: number;
|
||||
extractedImages?: number;
|
||||
warnings?: string[];
|
||||
errors?: string[];
|
||||
}
|
||||
|
||||
interface ZIPAnalysisResponse {
|
||||
success: boolean;
|
||||
message: string;
|
||||
zipFileName?: string;
|
||||
totalFiles?: number;
|
||||
validFiles?: number;
|
||||
files?: FileInfo[];
|
||||
warnings?: string[];
|
||||
}
|
||||
|
||||
interface FileInfo {
|
||||
fileName: string;
|
||||
fileType: string;
|
||||
fileSize: number;
|
||||
extractedTitle?: string;
|
||||
extractedAuthor?: string;
|
||||
hasMetadata: boolean;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
interface ZIPImportResponse {
|
||||
success: boolean;
|
||||
message: string;
|
||||
totalFiles: number;
|
||||
successfulImports: number;
|
||||
failedImports: number;
|
||||
results: ImportResponse[];
|
||||
warnings?: string[];
|
||||
}
|
||||
|
||||
export default function FileImportPage() {
|
||||
const router = useRouter();
|
||||
const [selectedFile, setSelectedFile] = useState<File | null>(null);
|
||||
const [fileType, setFileType] = useState<FileType>(null);
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [isValidating, setIsValidating] = useState(false);
|
||||
const [validationResult, setValidationResult] = useState<any>(null);
|
||||
const [importResult, setImportResult] = useState<ImportResponse | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
// ZIP-specific state
|
||||
const [zipAnalysis, setZipAnalysis] = useState<ZIPAnalysisResponse | null>(null);
|
||||
const [zipSessionId, setZipSessionId] = useState<string | null>(null);
|
||||
const [selectedZipFiles, setSelectedZipFiles] = useState<Set<string>>(new Set());
|
||||
const [fileMetadata, setFileMetadata] = useState<Map<string, any>>(new Map());
|
||||
const [zipImportResult, setZipImportResult] = useState<ZIPImportResponse | null>(null);
|
||||
|
||||
// Import options
|
||||
const [authorName, setAuthorName] = useState<string>('');
|
||||
const [authorId, setAuthorId] = useState<string | undefined>(undefined);
|
||||
const [seriesName, setSeriesName] = useState<string>('');
|
||||
const [seriesId, setSeriesId] = useState<string | undefined>(undefined);
|
||||
const [seriesVolume, setSeriesVolume] = useState<string>('');
|
||||
const [tags, setTags] = useState<string>('');
|
||||
const [createMissingAuthor, setCreateMissingAuthor] = useState(true);
|
||||
const [createMissingSeries, setCreateMissingSeries] = useState(true);
|
||||
const [extractImages, setExtractImages] = useState(true);
|
||||
const [preserveReadingPosition, setPreserveReadingPosition] = useState(true);
|
||||
|
||||
const detectFileType = (file: File): FileType => {
|
||||
const filename = file.name.toLowerCase();
|
||||
if (filename.endsWith('.epub')) return 'epub';
|
||||
if (filename.endsWith('.pdf')) return 'pdf';
|
||||
if (filename.endsWith('.zip')) return 'zip';
|
||||
return null;
|
||||
};
|
||||
|
||||
const handleFileChange = async (e: React.ChangeEvent<HTMLInputElement>) => {
|
||||
const file = e.target.files?.[0];
|
||||
if (file) {
|
||||
setSelectedFile(file);
|
||||
setValidationResult(null);
|
||||
setImportResult(null);
|
||||
setZipAnalysis(null);
|
||||
setZipSessionId(null);
|
||||
setSelectedZipFiles(new Set());
|
||||
setZipImportResult(null);
|
||||
setError(null);
|
||||
|
||||
const detectedType = detectFileType(file);
|
||||
setFileType(detectedType);
|
||||
|
||||
if (!detectedType) {
|
||||
setError('Unsupported file type. Please select an EPUB, PDF, or ZIP file.');
|
||||
return;
|
||||
}
|
||||
|
||||
if (detectedType === 'zip') {
|
||||
await analyzeZipFile(file);
|
||||
} else {
|
||||
await validateFile(file, detectedType);
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const validateFile = async (file: File, type: FileType) => {
|
||||
if (type === 'zip') return; // ZIP has its own analysis flow
|
||||
|
||||
setIsValidating(true);
|
||||
try {
|
||||
const token = localStorage.getItem('auth-token');
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
||||
const endpoint = type === 'epub' ? '/api/stories/epub/validate' : '/api/stories/pdf/validate';
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': token ? `Bearer ${token}` : '',
|
||||
},
|
||||
body: formData,
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const result = await response.json();
|
||||
setValidationResult(result);
|
||||
if (!result.valid) {
|
||||
setError(`${type?.toUpperCase() || 'File'} validation failed: ` + result.errors.join(', '));
|
||||
}
|
||||
} else if (response.status === 401 || response.status === 403) {
|
||||
setError('Authentication required. Please log in.');
|
||||
} else {
|
||||
setError(`Failed to validate ${type?.toUpperCase() || 'file'}`);
|
||||
}
|
||||
} catch (err) {
|
||||
setError(`Error validating ${type?.toUpperCase() || 'file'}: ` + (err as Error).message);
|
||||
} finally {
|
||||
setIsValidating(false);
|
||||
}
|
||||
};
|
||||
|
||||
const analyzeZipFile = async (file: File) => {
|
||||
setIsLoading(true);
|
||||
try {
|
||||
const token = localStorage.getItem('auth-token');
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
||||
const response = await fetch('/api/stories/zip/analyze', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': token ? `Bearer ${token}` : '',
|
||||
},
|
||||
body: formData,
|
||||
});
|
||||
|
||||
if (response.ok) {
|
||||
const result: ZIPAnalysisResponse = await response.json();
|
||||
setZipAnalysis(result);
|
||||
|
||||
if (result.success && result.warnings && result.warnings.length > 0) {
|
||||
// Extract session ID from warnings
|
||||
const sessionWarning = result.warnings.find(w => w.includes('Session ID:'));
|
||||
if (sessionWarning) {
|
||||
const match = sessionWarning.match(/Session ID: ([a-f0-9-]+)/);
|
||||
if (match) {
|
||||
setZipSessionId(match[1]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!result.success) {
|
||||
setError(result.message);
|
||||
} else if (result.files && result.files.length === 0) {
|
||||
setError('No valid EPUB or PDF files found in ZIP');
|
||||
}
|
||||
} else if (response.status === 401 || response.status === 403) {
|
||||
setError('Authentication required. Please log in.');
|
||||
} else {
|
||||
setError('Failed to analyze ZIP file');
|
||||
}
|
||||
} catch (err) {
|
||||
setError('Error analyzing ZIP file: ' + (err as Error).message);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
|
||||
if (!selectedFile) {
|
||||
setError('Please select a file');
|
||||
return;
|
||||
}
|
||||
|
||||
if (fileType === 'zip') {
|
||||
await handleZipImport();
|
||||
return;
|
||||
}
|
||||
|
||||
if (validationResult && !validationResult.valid) {
|
||||
setError(`Cannot import invalid ${fileType?.toUpperCase()} file`);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check PDF requires author
|
||||
if (fileType === 'pdf' && !authorName.trim()) {
|
||||
setError('PDF import requires an author name. Please provide an author name or ensure the PDF has author metadata.');
|
||||
return;
|
||||
}
|
||||
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const token = localStorage.getItem('auth-token');
|
||||
const formData = new FormData();
|
||||
formData.append('file', selectedFile);
|
||||
|
||||
if (authorId) {
|
||||
formData.append('authorId', authorId);
|
||||
} else if (authorName) {
|
||||
formData.append('authorName', authorName);
|
||||
}
|
||||
if (seriesId) {
|
||||
formData.append('seriesId', seriesId);
|
||||
} else if (seriesName) {
|
||||
formData.append('seriesName', seriesName);
|
||||
}
|
||||
if (seriesVolume) formData.append('seriesVolume', seriesVolume);
|
||||
if (tags) {
|
||||
const tagList = tags.split(',').map(t => t.trim()).filter(t => t.length > 0);
|
||||
tagList.forEach(tag => formData.append('tags', tag));
|
||||
}
|
||||
|
||||
formData.append('createMissingAuthor', createMissingAuthor.toString());
|
||||
formData.append('createMissingSeries', createMissingSeries.toString());
|
||||
|
||||
if (fileType === 'epub') {
|
||||
formData.append('preserveReadingPosition', preserveReadingPosition.toString());
|
||||
} else if (fileType === 'pdf') {
|
||||
formData.append('extractImages', extractImages.toString());
|
||||
}
|
||||
|
||||
const endpoint = fileType === 'epub' ? '/api/stories/epub/import' : '/api/stories/pdf/import';
|
||||
const response = await fetch(endpoint, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': token ? `Bearer ${token}` : '',
|
||||
},
|
||||
body: formData,
|
||||
});
|
||||
|
||||
const result = await response.json();
|
||||
|
||||
if (response.ok && result.success) {
|
||||
setImportResult(result);
|
||||
} else if (response.status === 401 || response.status === 403) {
|
||||
setError('Authentication required. Please log in.');
|
||||
} else {
|
||||
setError(result.message || `Failed to import ${fileType?.toUpperCase()}`);
|
||||
}
|
||||
} catch (err) {
|
||||
setError(`Error importing ${fileType?.toUpperCase()}: ` + (err as Error).message);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleZipImport = async () => {
|
||||
if (!zipSessionId) {
|
||||
setError('ZIP session expired. Please re-upload the ZIP file.');
|
||||
return;
|
||||
}
|
||||
|
||||
if (selectedZipFiles.size === 0) {
|
||||
setError('Please select at least one file to import');
|
||||
return;
|
||||
}
|
||||
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
const token = localStorage.getItem('auth-token');
|
||||
|
||||
const requestBody: any = {
|
||||
zipSessionId: zipSessionId,
|
||||
selectedFiles: Array.from(selectedZipFiles),
|
||||
defaultAuthorId: authorId || undefined,
|
||||
defaultAuthorName: authorName || undefined,
|
||||
defaultSeriesId: seriesId || undefined,
|
||||
defaultSeriesName: seriesName || undefined,
|
||||
defaultTags: tags ? tags.split(',').map(t => t.trim()).filter(t => t.length > 0) : undefined,
|
||||
createMissingAuthor,
|
||||
createMissingSeries,
|
||||
extractImages,
|
||||
};
|
||||
|
||||
// Add per-file metadata if any
|
||||
if (fileMetadata.size > 0) {
|
||||
const metadata: any = {};
|
||||
fileMetadata.forEach((value, key) => {
|
||||
metadata[key] = value;
|
||||
});
|
||||
requestBody.fileMetadata = metadata;
|
||||
}
|
||||
|
||||
const response = await fetch('/api/stories/zip/import', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': token ? `Bearer ${token}` : '',
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(requestBody),
|
||||
});
|
||||
|
||||
const result: ZIPImportResponse = await response.json();
|
||||
|
||||
if (response.ok) {
|
||||
setZipImportResult(result);
|
||||
} else if (response.status === 401 || response.status === 403) {
|
||||
setError('Authentication required. Please log in.');
|
||||
} else {
|
||||
setError(result.message || 'Failed to import files from ZIP');
|
||||
}
|
||||
} catch (err) {
|
||||
setError('Error importing from ZIP: ' + (err as Error).message);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const toggleZipFileSelection = (fileName: string) => {
|
||||
const newSelection = new Set(selectedZipFiles);
|
||||
if (newSelection.has(fileName)) {
|
||||
newSelection.delete(fileName);
|
||||
} else {
|
||||
newSelection.add(fileName);
|
||||
}
|
||||
setSelectedZipFiles(newSelection);
|
||||
};
|
||||
|
||||
const selectAllZipFiles = () => {
|
||||
if (zipAnalysis?.files) {
|
||||
const validFiles = zipAnalysis.files.filter(f => !f.error);
|
||||
setSelectedZipFiles(new Set(validFiles.map(f => f.fileName)));
|
||||
}
|
||||
};
|
||||
|
||||
const deselectAllZipFiles = () => {
|
||||
setSelectedZipFiles(new Set());
|
||||
};
|
||||
|
||||
const resetForm = () => {
|
||||
setSelectedFile(null);
|
||||
setFileType(null);
|
||||
setValidationResult(null);
|
||||
setImportResult(null);
|
||||
setZipAnalysis(null);
|
||||
setZipSessionId(null);
|
||||
setSelectedZipFiles(new Set());
|
||||
setFileMetadata(new Map());
|
||||
setZipImportResult(null);
|
||||
setError(null);
|
||||
setAuthorName('');
|
||||
setAuthorId(undefined);
|
||||
setSeriesName('');
|
||||
setSeriesId(undefined);
|
||||
setSeriesVolume('');
|
||||
setTags('');
|
||||
};
|
||||
|
||||
const handleAuthorChange = (name: string, id?: string) => {
|
||||
setAuthorName(name);
|
||||
setAuthorId(id);
|
||||
};
|
||||
|
||||
const handleSeriesChange = (name: string, id?: string) => {
|
||||
setSeriesName(name);
|
||||
setSeriesId(id);
|
||||
};
|
||||
|
||||
// Show success screen for single file import
|
||||
if (importResult?.success) {
|
||||
return (
|
||||
<ImportLayout
|
||||
title="Import Successful"
|
||||
description="Your file has been successfully imported into StoryCove"
|
||||
>
|
||||
<div className="space-y-6">
|
||||
<div className="bg-green-50 dark:bg-green-900/20 border border-green-200 dark:border-green-800 rounded-lg p-6">
|
||||
<h2 className="text-xl font-semibold text-green-600 dark:text-green-400 mb-2">Import Completed</h2>
|
||||
<p className="theme-text">
|
||||
Your {importResult.fileType || fileType?.toUpperCase()} file has been successfully imported.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="theme-card theme-shadow rounded-lg p-6">
|
||||
<div className="space-y-4">
|
||||
<div>
|
||||
<span className="font-semibold theme-header">Story Title:</span>
|
||||
<p className="theme-text">{importResult.storyTitle}</p>
|
||||
</div>
|
||||
|
||||
{importResult.wordCount && (
|
||||
<div>
|
||||
<span className="font-semibold theme-header">Word Count:</span>
|
||||
<p className="theme-text">{importResult.wordCount.toLocaleString()} words</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{importResult.extractedImages !== undefined && importResult.extractedImages > 0 && (
|
||||
<div>
|
||||
<span className="font-semibold theme-header">Extracted Images:</span>
|
||||
<p className="theme-text">{importResult.extractedImages}</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{importResult.warnings && importResult.warnings.length > 0 && (
|
||||
<div className="bg-yellow-50 dark:bg-yellow-900/20 border border-yellow-200 dark:border-yellow-800 rounded-lg p-4">
|
||||
<strong className="text-yellow-800 dark:text-yellow-200">Warnings:</strong>
|
||||
<ul className="list-disc list-inside mt-2 text-yellow-700 dark:text-yellow-300">
|
||||
{importResult.warnings.map((warning, index) => (
|
||||
<li key={index}>{warning}</li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="flex gap-4 mt-6">
|
||||
<Button
|
||||
onClick={() => router.push(`/stories/${importResult.storyId}`)}
|
||||
>
|
||||
View Story
|
||||
</Button>
|
||||
<Button
|
||||
onClick={resetForm}
|
||||
variant="secondary"
|
||||
>
|
||||
Import Another File
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</ImportLayout>
|
||||
);
|
||||
}
|
||||
|
||||
// Show success screen for ZIP import
|
||||
if (zipImportResult) {
|
||||
return (
|
||||
<ImportLayout
|
||||
title="ZIP Import Complete"
|
||||
description="Import results from your ZIP file"
|
||||
>
|
||||
<div className="space-y-6">
|
||||
<div className={`border rounded-lg p-6 ${
|
||||
zipImportResult.failedImports === 0
|
||||
? 'bg-green-50 dark:bg-green-900/20 border-green-200 dark:border-green-800'
|
||||
: 'bg-yellow-50 dark:bg-yellow-900/20 border-yellow-200 dark:border-yellow-800'
|
||||
}`}>
|
||||
<h2 className={`text-xl font-semibold mb-2 ${
|
||||
zipImportResult.failedImports === 0
|
||||
? 'text-green-600 dark:text-green-400'
|
||||
: 'text-yellow-600 dark:text-yellow-400'
|
||||
}`}>
|
||||
{zipImportResult.message}
|
||||
</h2>
|
||||
<p className="theme-text">
|
||||
{zipImportResult.successfulImports} of {zipImportResult.totalFiles} files imported successfully
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<div className="theme-card theme-shadow rounded-lg p-6">
|
||||
<h3 className="text-lg font-semibold theme-header mb-4">Import Results</h3>
|
||||
<div className="space-y-3">
|
||||
{zipImportResult.results.map((result, index) => (
|
||||
<div key={index} className={`p-4 rounded-lg border ${
|
||||
result.success
|
||||
? 'bg-green-50 dark:bg-green-900/10 border-green-200 dark:border-green-800'
|
||||
: 'bg-red-50 dark:bg-red-900/10 border-red-200 dark:border-red-800'
|
||||
}`}>
|
||||
<div className="flex items-start gap-3">
|
||||
{result.success ? (
|
||||
<CheckCircleIcon className="h-5 w-5 text-green-600 dark:text-green-400 flex-shrink-0 mt-0.5" />
|
||||
) : (
|
||||
<XCircleIcon className="h-5 w-5 text-red-600 dark:text-red-400 flex-shrink-0 mt-0.5" />
|
||||
)}
|
||||
<div className="flex-1">
|
||||
<p className="font-medium theme-header">
|
||||
{result.fileName || result.storyTitle || 'Unknown file'}
|
||||
</p>
|
||||
{result.success && result.storyTitle && (
|
||||
<p className="text-sm theme-text">
|
||||
Imported as: {result.storyTitle}
|
||||
{result.storyId && (
|
||||
<button
|
||||
onClick={() => router.push(`/stories/${result.storyId}`)}
|
||||
className="ml-2 text-xs text-blue-600 dark:text-blue-400 hover:underline"
|
||||
>
|
||||
View
|
||||
</button>
|
||||
)}
|
||||
</p>
|
||||
)}
|
||||
{!result.success && (
|
||||
<p className="text-sm text-red-600 dark:text-red-400">{result.message}</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<div className="flex gap-4 mt-6">
|
||||
<Button
|
||||
onClick={() => router.push('/library')}
|
||||
>
|
||||
Go to Library
|
||||
</Button>
|
||||
<Button
|
||||
onClick={resetForm}
|
||||
variant="secondary"
|
||||
>
|
||||
Import Another File
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</ImportLayout>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<ImportLayout
|
||||
title="Import from File"
|
||||
description="Upload an EPUB, PDF, or ZIP file to import stories into your library"
|
||||
>
|
||||
{error && (
|
||||
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-4 mb-6">
|
||||
<p className="text-red-800 dark:text-red-200">{error}</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<form onSubmit={handleSubmit} className="space-y-6">
|
||||
{/* File Upload */}
|
||||
<div className="theme-card theme-shadow rounded-lg p-6">
|
||||
<div className="mb-4">
|
||||
<h3 className="text-lg font-semibold theme-header mb-2">Select File</h3>
|
||||
<p className="theme-text">
|
||||
Choose an EPUB, PDF, or ZIP file from your device to import.
|
||||
</p>
|
||||
</div>
|
||||
<div className="space-y-4">
|
||||
<div>
|
||||
<label htmlFor="import-file" className="block text-sm font-medium theme-header mb-1">
|
||||
File (EPUB, PDF, or ZIP)
|
||||
</label>
|
||||
<Input
|
||||
id="import-file"
|
||||
type="file"
|
||||
accept=".epub,.pdf,.zip,application/epub+zip,application/pdf,application/zip"
|
||||
onChange={handleFileChange}
|
||||
disabled={isLoading || isValidating}
|
||||
/>
|
||||
</div>
|
||||
|
||||
{selectedFile && (
|
||||
<div className="flex items-center gap-2">
|
||||
<DocumentArrowUpIcon className="h-5 w-5 theme-text" />
|
||||
<span className="text-sm theme-text">
|
||||
{selectedFile.name} ({(selectedFile.size / 1024 / 1024).toFixed(2)} MB)
|
||||
{fileType && <span className="ml-2 inline-flex items-center px-2 py-1 rounded text-xs font-medium bg-blue-100 dark:bg-blue-900/20 text-blue-800 dark:text-blue-200">
|
||||
{fileType.toUpperCase()}
|
||||
</span>}
|
||||
</span>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{isValidating && (
|
||||
<div className="text-sm theme-accent">
|
||||
Validating file...
|
||||
</div>
|
||||
)}
|
||||
|
||||
{validationResult && fileType !== 'zip' && (
|
||||
<div className="text-sm">
|
||||
{validationResult.valid ? (
|
||||
<span className="inline-flex items-center px-2 py-1 rounded text-xs font-medium bg-green-100 dark:bg-green-900/20 text-green-800 dark:text-green-200">
|
||||
Valid {fileType?.toUpperCase()}
|
||||
</span>
|
||||
) : (
|
||||
<span className="inline-flex items-center px-2 py-1 rounded text-xs font-medium bg-red-100 dark:bg-red-900/20 text-red-800 dark:text-red-200">
|
||||
Invalid {fileType?.toUpperCase()}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* ZIP File Selection */}
|
||||
{fileType === 'zip' && zipAnalysis?.success && zipAnalysis.files && (
|
||||
<div className="theme-card theme-shadow rounded-lg p-6">
|
||||
<div className="mb-4 flex items-center justify-between">
|
||||
<div>
|
||||
<h3 className="text-lg font-semibold theme-header mb-2">Select Files to Import</h3>
|
||||
<p className="theme-text">
|
||||
{zipAnalysis.validFiles} valid files found in ZIP ({zipAnalysis.totalFiles} total)
|
||||
</p>
|
||||
</div>
|
||||
<div className="flex gap-2">
|
||||
<Button
|
||||
type="button"
|
||||
variant="secondary"
|
||||
size="sm"
|
||||
onClick={selectAllZipFiles}
|
||||
>
|
||||
Select All
|
||||
</Button>
|
||||
<Button
|
||||
type="button"
|
||||
variant="ghost"
|
||||
size="sm"
|
||||
onClick={deselectAllZipFiles}
|
||||
>
|
||||
Deselect All
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
<div className="space-y-2 max-h-96 overflow-y-auto">
|
||||
{zipAnalysis.files.map((file, index) => (
|
||||
<div
|
||||
key={index}
|
||||
className={`p-3 rounded-lg border ${
|
||||
file.error
|
||||
? 'bg-red-50 dark:bg-red-900/10 border-red-200 dark:border-red-800 opacity-50'
|
||||
: selectedZipFiles.has(file.fileName)
|
||||
? 'bg-blue-50 dark:bg-blue-900/10 border-blue-300 dark:border-blue-700'
|
||||
: 'theme-card border-gray-200 dark:border-gray-700'
|
||||
}`}
|
||||
>
|
||||
<div className="flex items-start gap-3">
|
||||
{!file.error && (
|
||||
<input
|
||||
type="checkbox"
|
||||
checked={selectedZipFiles.has(file.fileName)}
|
||||
onChange={() => toggleZipFileSelection(file.fileName)}
|
||||
className="mt-1"
|
||||
/>
|
||||
)}
|
||||
<div className="flex-1">
|
||||
<p className="font-medium theme-header">{file.fileName}</p>
|
||||
<p className="text-xs theme-text mt-1">
|
||||
{file.fileType} • {(file.fileSize / 1024).toFixed(2)} KB
|
||||
{file.extractedTitle && ` • ${file.extractedTitle}`}
|
||||
</p>
|
||||
{file.error && (
|
||||
<p className="text-xs text-red-600 dark:text-red-400 mt-1">{file.error}</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Import Options - Show for all file types */}
|
||||
{fileType && (!zipAnalysis || (zipAnalysis && selectedZipFiles.size > 0)) && (
|
||||
<div className="theme-card theme-shadow rounded-lg p-6">
|
||||
<div className="mb-4">
|
||||
<h3 className="text-lg font-semibold theme-header mb-2">Import Options</h3>
|
||||
<p className="theme-text">
|
||||
Configure how the {fileType === 'zip' ? 'files' : 'file'} should be imported.
|
||||
{fileType === 'zip' && ' These settings apply to all selected files.'}
|
||||
</p>
|
||||
</div>
|
||||
<div className="space-y-4">
|
||||
<AuthorSelector
|
||||
value={authorName}
|
||||
onChange={handleAuthorChange}
|
||||
placeholder={fileType === 'epub' ? 'Leave empty to use file metadata' : 'Required for PDF import'}
|
||||
required={fileType === 'pdf'}
|
||||
label={`Author${fileType === 'pdf' ? ' *' : ''}${fileType === 'zip' ? ' (Default)' : ''}`}
|
||||
error={fileType === 'pdf' && !authorName ? 'PDF import requires an author name. Select an existing author or enter a new one.' : undefined}
|
||||
/>
|
||||
|
||||
<SeriesSelector
|
||||
value={seriesName}
|
||||
onChange={handleSeriesChange}
|
||||
placeholder="Optional: Add to a series"
|
||||
label={`Series${fileType === 'zip' ? ' (Default)' : ''}`}
|
||||
authorId={authorId}
|
||||
/>
|
||||
|
||||
{seriesName && (
|
||||
<div>
|
||||
<label htmlFor="series-volume" className="block text-sm font-medium theme-header mb-1">Series Volume</label>
|
||||
<Input
|
||||
id="series-volume"
|
||||
type="number"
|
||||
value={seriesVolume}
|
||||
onChange={(e) => setSeriesVolume(e.target.value)}
|
||||
placeholder="Volume number in series"
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div>
|
||||
<label htmlFor="tags" className="block text-sm font-medium theme-header mb-1">
|
||||
Tags {fileType === 'zip' && '(Default)'}
|
||||
</label>
|
||||
<Input
|
||||
id="tags"
|
||||
value={tags}
|
||||
onChange={(e) => setTags(e.target.value)}
|
||||
placeholder="Comma-separated tags (e.g., fantasy, adventure, romance)"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div className="space-y-3">
|
||||
{fileType === 'epub' && (
|
||||
<div className="flex items-center">
|
||||
<input
|
||||
type="checkbox"
|
||||
id="preserve-reading-position"
|
||||
checked={preserveReadingPosition}
|
||||
onChange={(e) => setPreserveReadingPosition(e.target.checked)}
|
||||
className="mr-2"
|
||||
/>
|
||||
<label htmlFor="preserve-reading-position" className="text-sm theme-text">
|
||||
Preserve reading position from EPUB metadata
|
||||
</label>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{(fileType === 'pdf' || fileType === 'zip') && (
|
||||
<div className="flex items-center">
|
||||
<input
|
||||
type="checkbox"
|
||||
id="extract-images"
|
||||
checked={extractImages}
|
||||
onChange={(e) => setExtractImages(e.target.checked)}
|
||||
className="mr-2"
|
||||
/>
|
||||
<label htmlFor="extract-images" className="text-sm theme-text">
|
||||
Extract and store embedded images from PDFs
|
||||
</label>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="flex items-center">
|
||||
<input
|
||||
type="checkbox"
|
||||
id="create-missing-author"
|
||||
checked={createMissingAuthor}
|
||||
onChange={(e) => setCreateMissingAuthor(e.target.checked)}
|
||||
className="mr-2"
|
||||
/>
|
||||
<label htmlFor="create-missing-author" className="text-sm theme-text">
|
||||
Create author if not found
|
||||
</label>
|
||||
</div>
|
||||
|
||||
<div className="flex items-center">
|
||||
<input
|
||||
type="checkbox"
|
||||
id="create-missing-series"
|
||||
checked={createMissingSeries}
|
||||
onChange={(e) => setCreateMissingSeries(e.target.checked)}
|
||||
className="mr-2"
|
||||
/>
|
||||
<label htmlFor="create-missing-series" className="text-sm theme-text">
|
||||
Create series if not found
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Submit Button */}
|
||||
{fileType && fileType !== 'zip' && (
|
||||
<div className="flex justify-end">
|
||||
<Button
|
||||
type="submit"
|
||||
disabled={!selectedFile || isLoading || isValidating || (validationResult && !validationResult.valid)}
|
||||
loading={isLoading}
|
||||
>
|
||||
{isLoading ? 'Importing...' : `Import ${fileType.toUpperCase()}`}
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{fileType === 'zip' && zipAnalysis?.success && (
|
||||
<div className="flex justify-end">
|
||||
<Button
|
||||
type="submit"
|
||||
disabled={selectedZipFiles.size === 0 || isLoading}
|
||||
loading={isLoading}
|
||||
>
|
||||
{isLoading ? 'Importing...' : `Import ${selectedZipFiles.size} File${selectedZipFiles.size !== 1 ? 's' : ''}`}
|
||||
</Button>
|
||||
</div>
|
||||
)}
|
||||
</form>
|
||||
</ImportLayout>
|
||||
);
|
||||
}
|
||||
@@ -13,6 +13,7 @@ import SidebarLayout from '../../components/library/SidebarLayout';
|
||||
import ToolbarLayout from '../../components/library/ToolbarLayout';
|
||||
import MinimalLayout from '../../components/library/MinimalLayout';
|
||||
import { useLibraryLayout } from '../../hooks/useLibraryLayout';
|
||||
import { useLibraryFilters, clearLibraryFilters } from '../../hooks/useLibraryFilters';
|
||||
|
||||
type ViewMode = 'grid' | 'list';
|
||||
type SortOption = 'createdAt' | 'title' | 'authorName' | 'rating' | 'wordCount' | 'lastReadAt';
|
||||
@@ -26,17 +27,21 @@ export default function LibraryContent() {
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [searchLoading, setSearchLoading] = useState(false);
|
||||
const [randomLoading, setRandomLoading] = useState(false);
|
||||
const [searchQuery, setSearchQuery] = useState('');
|
||||
const [selectedTags, setSelectedTags] = useState<string[]>([]);
|
||||
const [viewMode, setViewMode] = useState<ViewMode>('list');
|
||||
const [sortOption, setSortOption] = useState<SortOption>('lastReadAt');
|
||||
const [sortDirection, setSortDirection] = useState<'asc' | 'desc'>('desc');
|
||||
|
||||
// Persisted filter state (survives navigation within session)
|
||||
const [searchQuery, setSearchQuery] = useLibraryFilters<string>('searchQuery', '');
|
||||
const [selectedTags, setSelectedTags] = useLibraryFilters<string[]>('selectedTags', []);
|
||||
const [viewMode, setViewMode] = useLibraryFilters<ViewMode>('viewMode', 'list');
|
||||
const [sortOption, setSortOption] = useLibraryFilters<SortOption>('sortOption', 'lastReadAt');
|
||||
const [sortDirection, setSortDirection] = useLibraryFilters<'asc' | 'desc'>('sortDirection', 'desc');
|
||||
const [advancedFilters, setAdvancedFilters] = useLibraryFilters<AdvancedFilters>('advancedFilters', {});
|
||||
|
||||
// Non-persisted state (resets on navigation)
|
||||
const [page, setPage] = useState(0);
|
||||
const [totalPages, setTotalPages] = useState(1);
|
||||
const [totalElements, setTotalElements] = useState(0);
|
||||
const [refreshTrigger, setRefreshTrigger] = useState(0);
|
||||
const [urlParamsProcessed, setUrlParamsProcessed] = useState(false);
|
||||
const [advancedFilters, setAdvancedFilters] = useState<AdvancedFilters>({});
|
||||
|
||||
// Initialize filters from URL parameters
|
||||
useEffect(() => {
|
||||
@@ -209,11 +214,15 @@ export default function LibraryContent() {
|
||||
}
|
||||
};
|
||||
|
||||
const clearFilters = () => {
|
||||
const handleClearFilters = () => {
|
||||
// Clear state
|
||||
setSearchQuery('');
|
||||
setSelectedTags([]);
|
||||
setAdvancedFilters({});
|
||||
setPage(0);
|
||||
// Clear sessionStorage
|
||||
clearLibraryFilters();
|
||||
// Trigger refresh
|
||||
setRefreshTrigger(prev => prev + 1);
|
||||
};
|
||||
|
||||
@@ -266,7 +275,7 @@ export default function LibraryContent() {
|
||||
onSortDirectionToggle: handleSortDirectionToggle,
|
||||
onAdvancedFiltersChange: handleAdvancedFiltersChange,
|
||||
onRandomStory: handleRandomStory,
|
||||
onClearFilters: clearFilters,
|
||||
onClearFilters: handleClearFilters,
|
||||
};
|
||||
|
||||
const renderContent = () => {
|
||||
@@ -280,7 +289,7 @@ export default function LibraryContent() {
|
||||
}
|
||||
</p>
|
||||
{searchQuery || selectedTags.length > 0 || Object.values(advancedFilters).some(v => v !== undefined && v !== '' && v !== 'all' && v !== false) ? (
|
||||
<Button variant="ghost" onClick={clearFilters}>
|
||||
<Button variant="ghost" onClick={handleClearFilters}>
|
||||
Clear Filters
|
||||
</Button>
|
||||
) : (
|
||||
|
||||
@@ -137,9 +137,10 @@ export default function TagMaintenancePage() {
|
||||
// Reload tags and reset selection
|
||||
await loadTags();
|
||||
setSelectedTagIds(new Set());
|
||||
} catch (error) {
|
||||
} catch (error: any) {
|
||||
console.error('Failed to delete tags:', error);
|
||||
alert('Failed to delete some tags. Please try again.');
|
||||
const errorMessage = error.response?.data?.error || error.message || 'Failed to delete some tags. Please try again.';
|
||||
alert(errorMessage);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
491
frontend/src/app/statistics/page.tsx
Normal file
491
frontend/src/app/statistics/page.tsx
Normal file
@@ -0,0 +1,491 @@
|
||||
'use client';
|
||||
|
||||
import { useState, useEffect } from 'react';
|
||||
import { useRouter } from 'next/navigation';
|
||||
import AppLayout from '@/components/layout/AppLayout';
|
||||
import { statisticsApi, getCurrentLibraryId } from '@/lib/api';
|
||||
import {
|
||||
LibraryOverviewStats,
|
||||
TopTagsStats,
|
||||
TopAuthorsStats,
|
||||
RatingStats,
|
||||
SourceDomainStats,
|
||||
ReadingProgressStats,
|
||||
ReadingActivityStats
|
||||
} from '@/types/api';
|
||||
|
||||
function StatisticsContent() {
|
||||
const router = useRouter();
|
||||
const [loading, setLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
// Statistics state
|
||||
const [overviewStats, setOverviewStats] = useState<LibraryOverviewStats | null>(null);
|
||||
const [topTags, setTopTags] = useState<TopTagsStats | null>(null);
|
||||
const [topAuthors, setTopAuthors] = useState<TopAuthorsStats | null>(null);
|
||||
const [ratingStats, setRatingStats] = useState<RatingStats | null>(null);
|
||||
const [sourceDomains, setSourceDomains] = useState<SourceDomainStats | null>(null);
|
||||
const [readingProgress, setReadingProgress] = useState<ReadingProgressStats | null>(null);
|
||||
const [readingActivity, setReadingActivity] = useState<ReadingActivityStats | null>(null);
|
||||
|
||||
useEffect(() => {
|
||||
loadStatistics();
|
||||
}, []);
|
||||
|
||||
const loadStatistics = async () => {
|
||||
try {
|
||||
setLoading(true);
|
||||
setError(null);
|
||||
|
||||
const libraryId = getCurrentLibraryId();
|
||||
if (!libraryId) {
|
||||
router.push('/library');
|
||||
return;
|
||||
}
|
||||
|
||||
// Load all statistics in parallel
|
||||
const [overview, tags, authors, ratings, domains, progress, activity] = await Promise.all([
|
||||
statisticsApi.getOverviewStatistics(libraryId),
|
||||
statisticsApi.getTopTags(libraryId, 20),
|
||||
statisticsApi.getTopAuthors(libraryId, 10),
|
||||
statisticsApi.getRatingStats(libraryId),
|
||||
statisticsApi.getSourceDomainStats(libraryId, 10),
|
||||
statisticsApi.getReadingProgress(libraryId),
|
||||
statisticsApi.getReadingActivity(libraryId),
|
||||
]);
|
||||
|
||||
setOverviewStats(overview);
|
||||
setTopTags(tags);
|
||||
setTopAuthors(authors);
|
||||
setRatingStats(ratings);
|
||||
setSourceDomains(domains);
|
||||
setReadingProgress(progress);
|
||||
setReadingActivity(activity);
|
||||
} catch (err) {
|
||||
console.error('Failed to load statistics:', err);
|
||||
setError('Failed to load statistics. Please try again.');
|
||||
} finally {
|
||||
setLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const formatNumber = (num: number): string => {
|
||||
return num.toLocaleString();
|
||||
};
|
||||
|
||||
const formatTime = (minutes: number): string => {
|
||||
const hours = Math.floor(minutes / 60);
|
||||
const mins = Math.round(minutes % 60);
|
||||
|
||||
if (hours > 24) {
|
||||
const days = Math.floor(hours / 24);
|
||||
const remainingHours = hours % 24;
|
||||
return `${days}d ${remainingHours}h`;
|
||||
}
|
||||
|
||||
if (hours > 0) {
|
||||
return `${hours}h ${mins}m`;
|
||||
}
|
||||
|
||||
return `${mins}m`;
|
||||
};
|
||||
|
||||
if (loading) {
|
||||
return (
|
||||
<div className="container mx-auto px-4 py-8">
|
||||
<div className="flex items-center justify-center min-h-[400px]">
|
||||
<div className="text-center">
|
||||
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-blue-600 mx-auto mb-4"></div>
|
||||
<p className="text-gray-600 dark:text-gray-400">Loading statistics...</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
if (error) {
|
||||
return (
|
||||
<div className="container mx-auto px-4 py-8">
|
||||
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-6">
|
||||
<h3 className="text-lg font-semibold text-red-800 dark:text-red-200 mb-2">Error</h3>
|
||||
<p className="text-red-600 dark:text-red-400">{error}</p>
|
||||
<button
|
||||
onClick={loadStatistics}
|
||||
className="mt-4 px-4 py-2 bg-red-600 text-white rounded hover:bg-red-700 transition-colors"
|
||||
>
|
||||
Try Again
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="container mx-auto px-4 py-8">
|
||||
<div className="mb-8">
|
||||
<h1 className="text-3xl font-bold text-gray-900 dark:text-white mb-2">Library Statistics</h1>
|
||||
<p className="text-gray-600 dark:text-gray-400">
|
||||
Insights and analytics for your story collection
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Collection Overview */}
|
||||
{overviewStats && (
|
||||
<section className="mb-8">
|
||||
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Collection Overview</h2>
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
|
||||
<StatCard title="Total Stories" value={formatNumber(overviewStats.totalStories)} />
|
||||
<StatCard title="Total Authors" value={formatNumber(overviewStats.totalAuthors)} />
|
||||
<StatCard title="Total Series" value={formatNumber(overviewStats.totalSeries)} />
|
||||
<StatCard title="Total Tags" value={formatNumber(overviewStats.totalTags)} />
|
||||
<StatCard title="Total Collections" value={formatNumber(overviewStats.totalCollections)} />
|
||||
<StatCard title="Source Domains" value={formatNumber(overviewStats.uniqueSourceDomains)} />
|
||||
</div>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{/* Content Metrics */}
|
||||
{overviewStats && (
|
||||
<section className="mb-8">
|
||||
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Content Metrics</h2>
|
||||
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
|
||||
<StatCard
|
||||
title="Total Words"
|
||||
value={formatNumber(overviewStats.totalWordCount)}
|
||||
subtitle={`${formatTime(overviewStats.totalReadingTimeMinutes)} reading time`}
|
||||
/>
|
||||
<StatCard
|
||||
title="Average Words per Story"
|
||||
value={formatNumber(Math.round(overviewStats.averageWordsPerStory))}
|
||||
subtitle={`${formatTime(overviewStats.averageReadingTimeMinutes)} avg reading time`}
|
||||
/>
|
||||
{overviewStats.longestStory && (
|
||||
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<h3 className="text-sm font-medium text-gray-500 dark:text-gray-400 mb-2">Longest Story</h3>
|
||||
<p className="text-2xl font-bold text-gray-900 dark:text-white mb-1">
|
||||
{formatNumber(overviewStats.longestStory.wordCount)} words
|
||||
</p>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400 truncate" title={overviewStats.longestStory.title}>
|
||||
{overviewStats.longestStory.title}
|
||||
</p>
|
||||
<p className="text-xs text-gray-500 dark:text-gray-500">
|
||||
by {overviewStats.longestStory.authorName}
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
{overviewStats.shortestStory && (
|
||||
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<h3 className="text-sm font-medium text-gray-500 dark:text-gray-400 mb-2">Shortest Story</h3>
|
||||
<p className="text-2xl font-bold text-gray-900 dark:text-white mb-1">
|
||||
{formatNumber(overviewStats.shortestStory.wordCount)} words
|
||||
</p>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400 truncate" title={overviewStats.shortestStory.title}>
|
||||
{overviewStats.shortestStory.title}
|
||||
</p>
|
||||
<p className="text-xs text-gray-500 dark:text-gray-500">
|
||||
by {overviewStats.shortestStory.authorName}
|
||||
</p>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{/* Reading Progress & Activity - Side by side */}
|
||||
<div className="grid grid-cols-1 lg:grid-cols-2 gap-8 mb-8">
|
||||
{/* Reading Progress */}
|
||||
{readingProgress && (
|
||||
<section>
|
||||
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Reading Progress</h2>
|
||||
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<div className="mb-6">
|
||||
<div className="flex justify-between items-center mb-2">
|
||||
<span className="text-sm font-medium text-gray-600 dark:text-gray-400">
|
||||
{formatNumber(readingProgress.readStories)} of {formatNumber(readingProgress.totalStories)} stories read
|
||||
</span>
|
||||
<span className="text-sm font-semibold text-blue-600 dark:text-blue-400">
|
||||
{readingProgress.percentageRead.toFixed(1)}%
|
||||
</span>
|
||||
</div>
|
||||
<div className="w-full bg-gray-200 dark:bg-gray-700 rounded-full h-3">
|
||||
<div
|
||||
className="bg-blue-600 h-3 rounded-full transition-all duration-500"
|
||||
style={{ width: `${readingProgress.percentageRead}%` }}
|
||||
></div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="grid grid-cols-2 gap-4">
|
||||
<div>
|
||||
<p className="text-sm text-gray-500 dark:text-gray-400">Words Read</p>
|
||||
<p className="text-xl font-semibold text-green-600 dark:text-green-400">
|
||||
{formatNumber(readingProgress.totalWordsRead)}
|
||||
</p>
|
||||
</div>
|
||||
<div>
|
||||
<p className="text-sm text-gray-500 dark:text-gray-400">Words Remaining</p>
|
||||
<p className="text-xl font-semibold text-orange-600 dark:text-orange-400">
|
||||
{formatNumber(readingProgress.totalWordsUnread)}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{/* Reading Activity - Last Week */}
|
||||
{readingActivity && (
|
||||
<section>
|
||||
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Last Week Activity</h2>
|
||||
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<div className="grid grid-cols-3 gap-4 mb-6">
|
||||
<div className="text-center">
|
||||
<p className="text-sm text-gray-500 dark:text-gray-400">Stories</p>
|
||||
<p className="text-2xl font-bold text-gray-900 dark:text-white">
|
||||
{formatNumber(readingActivity.storiesReadLastWeek)}
|
||||
</p>
|
||||
</div>
|
||||
<div className="text-center">
|
||||
<p className="text-sm text-gray-500 dark:text-gray-400">Words</p>
|
||||
<p className="text-2xl font-bold text-gray-900 dark:text-white">
|
||||
{formatNumber(readingActivity.wordsReadLastWeek)}
|
||||
</p>
|
||||
</div>
|
||||
<div className="text-center">
|
||||
<p className="text-sm text-gray-500 dark:text-gray-400">Time</p>
|
||||
<p className="text-2xl font-bold text-gray-900 dark:text-white">
|
||||
{formatTime(readingActivity.readingTimeMinutesLastWeek)}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Daily Activity Chart */}
|
||||
<div className="space-y-2">
|
||||
<p className="text-sm font-medium text-gray-600 dark:text-gray-400 mb-3">Daily Breakdown</p>
|
||||
{readingActivity.dailyActivity.map((day) => {
|
||||
const maxWords = Math.max(...readingActivity.dailyActivity.map(d => d.wordsRead), 1);
|
||||
const percentage = (day.wordsRead / maxWords) * 100;
|
||||
|
||||
return (
|
||||
<div key={day.date} className="flex items-center gap-3">
|
||||
<span className="text-xs text-gray-500 dark:text-gray-400 w-20">
|
||||
{new Date(day.date).toLocaleDateString('en-US', { month: 'short', day: 'numeric' })}
|
||||
</span>
|
||||
<div className="flex-1 bg-gray-200 dark:bg-gray-700 rounded-full h-6 relative">
|
||||
<div
|
||||
className="bg-blue-500 h-6 rounded-full transition-all duration-300"
|
||||
style={{ width: `${percentage}%` }}
|
||||
></div>
|
||||
{day.storiesRead > 0 && (
|
||||
<span className="absolute inset-0 flex items-center justify-center text-xs font-medium text-gray-700 dark:text-gray-300">
|
||||
{day.storiesRead} {day.storiesRead === 1 ? 'story' : 'stories'}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Ratings & Source Domains - Side by side */}
|
||||
<div className="grid grid-cols-1 lg:grid-cols-2 gap-8 mb-8">
|
||||
{/* Rating Statistics */}
|
||||
{ratingStats && (
|
||||
<section>
|
||||
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Rating Statistics</h2>
|
||||
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<div className="text-center mb-6">
|
||||
<p className="text-sm text-gray-500 dark:text-gray-400 mb-1">Average Rating</p>
|
||||
<p className="text-4xl font-bold text-yellow-500">
|
||||
{ratingStats.averageRating.toFixed(1)} ⭐
|
||||
</p>
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400 mt-2">
|
||||
{formatNumber(ratingStats.totalRatedStories)} rated • {formatNumber(ratingStats.totalUnratedStories)} unrated
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Rating Distribution */}
|
||||
<div className="space-y-2">
|
||||
{[5, 4, 3, 2, 1].map(rating => {
|
||||
const count = ratingStats.ratingDistribution[rating] || 0;
|
||||
const percentage = ratingStats.totalRatedStories > 0
|
||||
? (count / ratingStats.totalRatedStories) * 100
|
||||
: 0;
|
||||
|
||||
return (
|
||||
<div key={rating} className="flex items-center gap-2">
|
||||
<span className="text-sm font-medium text-gray-600 dark:text-gray-400 w-12">
|
||||
{rating} ⭐
|
||||
</span>
|
||||
<div className="flex-1 bg-gray-200 dark:bg-gray-700 rounded-full h-4">
|
||||
<div
|
||||
className="bg-yellow-500 h-4 rounded-full transition-all duration-300"
|
||||
style={{ width: `${percentage}%` }}
|
||||
></div>
|
||||
</div>
|
||||
<span className="text-sm text-gray-600 dark:text-gray-400 w-16 text-right">
|
||||
{formatNumber(count)}
|
||||
</span>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{/* Source Domains */}
|
||||
{sourceDomains && (
|
||||
<section>
|
||||
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Source Domains</h2>
|
||||
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<div className="grid grid-cols-2 gap-4 mb-6">
|
||||
<div className="text-center">
|
||||
<p className="text-sm text-gray-500 dark:text-gray-400">With Source</p>
|
||||
<p className="text-2xl font-bold text-green-600 dark:text-green-400">
|
||||
{formatNumber(sourceDomains.storiesWithSource)}
|
||||
</p>
|
||||
</div>
|
||||
<div className="text-center">
|
||||
<p className="text-sm text-gray-500 dark:text-gray-400">No Source</p>
|
||||
<p className="text-2xl font-bold text-gray-500 dark:text-gray-400">
|
||||
{formatNumber(sourceDomains.storiesWithoutSource)}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div className="space-y-3">
|
||||
<p className="text-sm font-medium text-gray-600 dark:text-gray-400">Top Domains</p>
|
||||
{sourceDomains.topDomains.slice(0, 5).map((domain, index) => (
|
||||
<div key={domain.domain} className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-2 flex-1 min-w-0">
|
||||
<span className="text-sm font-medium text-gray-500 dark:text-gray-400 w-5">
|
||||
{index + 1}.
|
||||
</span>
|
||||
<span className="text-sm text-gray-700 dark:text-gray-300 truncate" title={domain.domain}>
|
||||
{domain.domain}
|
||||
</span>
|
||||
</div>
|
||||
<span className="text-sm font-semibold text-blue-600 dark:text-blue-400 ml-2">
|
||||
{formatNumber(domain.storyCount)}
|
||||
</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{/* Top Tags & Top Authors - Side by side */}
|
||||
<div className="grid grid-cols-1 lg:grid-cols-2 gap-8">
|
||||
{/* Top Tags */}
|
||||
{topTags && (
|
||||
<section>
|
||||
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Most Used Tags</h2>
|
||||
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<div className="space-y-3">
|
||||
{topTags.topTags.slice(0, 10).map((tag, index) => {
|
||||
const maxCount = topTags.topTags[0]?.storyCount || 1;
|
||||
const percentage = (tag.storyCount / maxCount) * 100;
|
||||
|
||||
return (
|
||||
<div key={tag.tagName} className="flex items-center gap-3">
|
||||
<span className="text-sm font-medium text-gray-500 dark:text-gray-400 w-6">
|
||||
{index + 1}
|
||||
</span>
|
||||
<div className="flex-1">
|
||||
<div className="flex items-center justify-between mb-1">
|
||||
<span className="text-sm font-medium text-gray-700 dark:text-gray-300">
|
||||
{tag.tagName}
|
||||
</span>
|
||||
<span className="text-sm text-gray-600 dark:text-gray-400">
|
||||
{formatNumber(tag.storyCount)}
|
||||
</span>
|
||||
</div>
|
||||
<div className="w-full bg-gray-200 dark:bg-gray-700 rounded-full h-2">
|
||||
<div
|
||||
className="bg-purple-500 h-2 rounded-full transition-all duration-300"
|
||||
style={{ width: `${percentage}%` }}
|
||||
></div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
})}
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
)}
|
||||
|
||||
{/* Top Authors */}
|
||||
{topAuthors && (
|
||||
<section>
|
||||
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Top Authors</h2>
|
||||
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
{/* Tab switcher */}
|
||||
<div className="flex gap-2 mb-4">
|
||||
<button
|
||||
onClick={() => {/* Could add tab switching if needed */}}
|
||||
className="flex-1 px-4 py-2 text-sm font-medium bg-blue-100 dark:bg-blue-900/30 text-blue-700 dark:text-blue-300 rounded-lg"
|
||||
>
|
||||
By Stories
|
||||
</button>
|
||||
<button
|
||||
onClick={() => {/* Could add tab switching if needed */}}
|
||||
className="flex-1 px-4 py-2 text-sm font-medium text-gray-600 dark:text-gray-400 hover:bg-gray-100 dark:hover:bg-gray-700 rounded-lg"
|
||||
>
|
||||
By Words
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div className="space-y-3">
|
||||
{topAuthors.topAuthorsByStories.slice(0, 5).map((author, index) => (
|
||||
<div key={author.authorId} className="flex items-center justify-between p-3 bg-gray-50 dark:bg-gray-700/50 rounded-lg">
|
||||
<div className="flex items-center gap-3 flex-1 min-w-0">
|
||||
<span className="text-lg font-bold text-gray-400 dark:text-gray-500 w-6">
|
||||
{index + 1}
|
||||
</span>
|
||||
<div className="flex-1 min-w-0">
|
||||
<p className="text-sm font-medium text-gray-900 dark:text-white truncate" title={author.authorName}>
|
||||
{author.authorName}
|
||||
</p>
|
||||
<p className="text-xs text-gray-500 dark:text-gray-400">
|
||||
{formatNumber(author.storyCount)} stories • {formatNumber(author.totalWords)} words
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
|
||||
export default function StatisticsPage() {
|
||||
return (
|
||||
<AppLayout>
|
||||
<StatisticsContent />
|
||||
</AppLayout>
|
||||
);
|
||||
}
|
||||
|
||||
// Reusable stat card component
|
||||
function StatCard({ title, value, subtitle }: { title: string; value: string; subtitle?: string }) {
|
||||
return (
|
||||
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
|
||||
<h3 className="text-sm font-medium text-gray-500 dark:text-gray-400 mb-2">{title}</h3>
|
||||
<p className="text-2xl font-bold text-gray-900 dark:text-white">{value}</p>
|
||||
{subtitle && (
|
||||
<p className="text-sm text-gray-600 dark:text-gray-400 mt-1">{subtitle}</p>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -192,7 +192,7 @@ export default function EditStoryPage() {
|
||||
await storyApi.uploadCover(storyId, coverImage);
|
||||
}
|
||||
|
||||
router.push(`/stories/${storyId}`);
|
||||
router.push(`/stories/${storyId}/detail`);
|
||||
} catch (error: any) {
|
||||
console.error('Failed to update story:', error);
|
||||
const errorMessage = error.response?.data?.message || 'Failed to update story';
|
||||
|
||||
@@ -107,8 +107,8 @@ export default function StoryReadingPage() {
|
||||
(scrolled - contentTop + windowHeight * 0.3) / contentHeight
|
||||
));
|
||||
|
||||
// Convert to character position in the plain text content
|
||||
const textLength = story.contentPlain?.length || story.contentHtml?.length || 0;
|
||||
// Convert to character position in the HTML content (ALWAYS use contentHtml for consistency)
|
||||
const textLength = story.contentHtml?.length || 0;
|
||||
return Math.floor(scrollRatio * textLength);
|
||||
}, [story]);
|
||||
|
||||
@@ -116,7 +116,8 @@ export default function StoryReadingPage() {
|
||||
const calculateReadingPercentage = useCallback((currentPosition: number): number => {
|
||||
if (!story) return 0;
|
||||
|
||||
const totalLength = story.contentPlain?.length || story.contentHtml?.length || 0;
|
||||
// ALWAYS use contentHtml for consistency with position calculation
|
||||
const totalLength = story.contentHtml?.length || 0;
|
||||
if (totalLength === 0) return 0;
|
||||
|
||||
return Math.round((currentPosition / totalLength) * 100);
|
||||
@@ -126,7 +127,8 @@ export default function StoryReadingPage() {
|
||||
const scrollToCharacterPosition = useCallback((position: number) => {
|
||||
if (!contentRef.current || !story || hasScrolledToPosition) return;
|
||||
|
||||
const textLength = story.contentPlain?.length || story.contentHtml?.length || 0;
|
||||
// ALWAYS use contentHtml for consistency with position calculation
|
||||
const textLength = story.contentHtml?.length || 0;
|
||||
if (textLength === 0 || position === 0) return;
|
||||
|
||||
const ratio = position / textLength;
|
||||
|
||||
@@ -27,9 +27,9 @@ export default function Header() {
|
||||
description: 'Import a single story from a website'
|
||||
},
|
||||
{
|
||||
href: '/import/epub',
|
||||
label: 'Import EPUB',
|
||||
description: 'Import a story from an EPUB file'
|
||||
href: '/import/file',
|
||||
label: 'Import from File',
|
||||
description: 'Import from EPUB, PDF, or ZIP file'
|
||||
},
|
||||
{
|
||||
href: '/import/bulk',
|
||||
@@ -81,6 +81,12 @@ export default function Header() {
|
||||
>
|
||||
Authors
|
||||
</Link>
|
||||
<Link
|
||||
href="/statistics"
|
||||
className="theme-text hover:theme-accent transition-colors font-medium"
|
||||
>
|
||||
Statistics
|
||||
</Link>
|
||||
<Dropdown
|
||||
trigger="Add Story"
|
||||
items={addStoryItems}
|
||||
@@ -153,6 +159,13 @@ export default function Header() {
|
||||
>
|
||||
Authors
|
||||
</Link>
|
||||
<Link
|
||||
href="/statistics"
|
||||
className="theme-text hover:theme-accent transition-colors font-medium px-2 py-1"
|
||||
onClick={() => setIsMenuOpen(false)}
|
||||
>
|
||||
Statistics
|
||||
</Link>
|
||||
<div className="px-2 py-1">
|
||||
<div className="font-medium theme-text mb-1">Add Story</div>
|
||||
<div className="pl-4 space-y-1">
|
||||
|
||||
@@ -31,10 +31,10 @@ const importTabs: ImportTab[] = [
|
||||
description: 'Import a single story from a website'
|
||||
},
|
||||
{
|
||||
id: 'epub',
|
||||
label: 'Import EPUB',
|
||||
href: '/import/epub',
|
||||
description: 'Import a story from an EPUB file'
|
||||
id: 'file',
|
||||
label: 'Import from File',
|
||||
href: '/import/file',
|
||||
description: 'Import from EPUB, PDF, or ZIP file'
|
||||
},
|
||||
{
|
||||
id: 'bulk',
|
||||
|
||||
@@ -72,16 +72,8 @@ export default function StoryCard({
|
||||
return new Date(dateString).toLocaleDateString();
|
||||
};
|
||||
|
||||
const calculateReadingPercentage = (story: Story): number => {
|
||||
if (!story.readingPosition) return 0;
|
||||
|
||||
const totalLength = story.contentPlain?.length || story.contentHtml?.length || 0;
|
||||
if (totalLength === 0) return 0;
|
||||
|
||||
return Math.round((story.readingPosition / totalLength) * 100);
|
||||
};
|
||||
|
||||
const readingPercentage = calculateReadingPercentage(story);
|
||||
// Use the pre-calculated percentage from the backend
|
||||
const readingPercentage = story.readingProgressPercentage || 0;
|
||||
|
||||
if (viewMode === 'list') {
|
||||
return (
|
||||
|
||||
@@ -129,7 +129,8 @@ export default function TagEditModal({ tag, isOpen, onClose, onSave, onDelete }:
|
||||
onDelete(tag);
|
||||
onClose();
|
||||
} catch (error: any) {
|
||||
setErrors({ submit: error.message });
|
||||
const errorMessage = error.response?.data?.error || error.message || 'Failed to delete tag';
|
||||
setErrors({ submit: errorMessage });
|
||||
} finally {
|
||||
setSaving(false);
|
||||
}
|
||||
|
||||
68
frontend/src/hooks/useLibraryFilters.ts
Normal file
68
frontend/src/hooks/useLibraryFilters.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import { useState, useEffect, Dispatch, SetStateAction } from 'react';
|
||||
|
||||
/**
|
||||
* Custom hook for persisting library filter state in sessionStorage.
|
||||
* Filters are preserved during the browser session but cleared when the tab is closed.
|
||||
*
|
||||
* @param key - Unique identifier for the filter value in sessionStorage
|
||||
* @param defaultValue - Default value if no stored value exists
|
||||
* @returns Tuple of [value, setValue] similar to useState
|
||||
*/
|
||||
export function useLibraryFilters<T>(
|
||||
key: string,
|
||||
defaultValue: T
|
||||
): [T, Dispatch<SetStateAction<T>>] {
|
||||
// Initialize state from sessionStorage or use default value
|
||||
const [value, setValue] = useState<T>(() => {
|
||||
// SSR safety: sessionStorage only available in browser
|
||||
if (typeof window === 'undefined') {
|
||||
return defaultValue;
|
||||
}
|
||||
|
||||
try {
|
||||
const stored = sessionStorage.getItem(`library_filter_${key}`);
|
||||
if (stored === null) {
|
||||
return defaultValue;
|
||||
}
|
||||
return JSON.parse(stored) as T;
|
||||
} catch (error) {
|
||||
console.warn(`Failed to parse sessionStorage value for library_filter_${key}:`, error);
|
||||
return defaultValue;
|
||||
}
|
||||
});
|
||||
|
||||
// Persist to sessionStorage whenever value changes
|
||||
useEffect(() => {
|
||||
if (typeof window === 'undefined') return;
|
||||
|
||||
try {
|
||||
sessionStorage.setItem(`library_filter_${key}`, JSON.stringify(value));
|
||||
} catch (error) {
|
||||
console.warn(`Failed to save to sessionStorage for library_filter_${key}:`, error);
|
||||
}
|
||||
}, [key, value]);
|
||||
|
||||
return [value, setValue];
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all library filters from sessionStorage.
|
||||
* Useful for "Clear Filters" button or when switching libraries.
|
||||
*/
|
||||
export function clearLibraryFilters(): void {
|
||||
if (typeof window === 'undefined') return;
|
||||
|
||||
try {
|
||||
// Get all sessionStorage keys
|
||||
const keys = Object.keys(sessionStorage);
|
||||
|
||||
// Remove only library filter keys
|
||||
keys.forEach(key => {
|
||||
if (key.startsWith('library_filter_')) {
|
||||
sessionStorage.removeItem(key);
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
console.warn('Failed to clear library filters from sessionStorage:', error);
|
||||
}
|
||||
}
|
||||
@@ -1096,6 +1096,42 @@ export const statisticsApi = {
|
||||
const response = await api.get(`/libraries/${libraryId}/statistics/overview`);
|
||||
return response.data;
|
||||
},
|
||||
|
||||
getTopTags: async (libraryId: string, limit: number = 20): Promise<import('../types/api').TopTagsStats> => {
|
||||
const response = await api.get(`/libraries/${libraryId}/statistics/top-tags`, {
|
||||
params: { limit }
|
||||
});
|
||||
return response.data;
|
||||
},
|
||||
|
||||
getTopAuthors: async (libraryId: string, limit: number = 10): Promise<import('../types/api').TopAuthorsStats> => {
|
||||
const response = await api.get(`/libraries/${libraryId}/statistics/top-authors`, {
|
||||
params: { limit }
|
||||
});
|
||||
return response.data;
|
||||
},
|
||||
|
||||
getRatingStats: async (libraryId: string): Promise<import('../types/api').RatingStats> => {
|
||||
const response = await api.get(`/libraries/${libraryId}/statistics/ratings`);
|
||||
return response.data;
|
||||
},
|
||||
|
||||
getSourceDomainStats: async (libraryId: string, limit: number = 10): Promise<import('../types/api').SourceDomainStats> => {
|
||||
const response = await api.get(`/libraries/${libraryId}/statistics/source-domains`, {
|
||||
params: { limit }
|
||||
});
|
||||
return response.data;
|
||||
},
|
||||
|
||||
getReadingProgress: async (libraryId: string): Promise<import('../types/api').ReadingProgressStats> => {
|
||||
const response = await api.get(`/libraries/${libraryId}/statistics/reading-progress`);
|
||||
return response.data;
|
||||
},
|
||||
|
||||
getReadingActivity: async (libraryId: string): Promise<import('../types/api').ReadingActivityStats> => {
|
||||
const response = await api.get(`/libraries/${libraryId}/statistics/reading-activity`);
|
||||
return response.data;
|
||||
},
|
||||
};
|
||||
|
||||
// Image utility - now library-aware
|
||||
|
||||
@@ -16,6 +16,7 @@ export interface Story {
|
||||
tags: Tag[];
|
||||
tagNames?: string[] | null; // Used in search results
|
||||
readingPosition?: number;
|
||||
readingProgressPercentage?: number; // Pre-calculated percentage (0-100) from backend
|
||||
lastReadAt?: string;
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
@@ -234,3 +235,70 @@ export interface StoryWordCount {
|
||||
wordCount: number;
|
||||
readingTimeMinutes: number;
|
||||
}
|
||||
|
||||
// Top Tags Statistics
|
||||
export interface TopTagsStats {
|
||||
topTags: TagStats[];
|
||||
}
|
||||
|
||||
export interface TagStats {
|
||||
tagName: string;
|
||||
storyCount: number;
|
||||
}
|
||||
|
||||
// Top Authors Statistics
|
||||
export interface TopAuthorsStats {
|
||||
topAuthorsByStories: AuthorStats[];
|
||||
topAuthorsByWords: AuthorStats[];
|
||||
}
|
||||
|
||||
export interface AuthorStats {
|
||||
authorId: string;
|
||||
authorName: string;
|
||||
storyCount: number;
|
||||
totalWords: number;
|
||||
}
|
||||
|
||||
// Rating Statistics
|
||||
export interface RatingStats {
|
||||
averageRating: number;
|
||||
totalRatedStories: number;
|
||||
totalUnratedStories: number;
|
||||
ratingDistribution: Record<number, number>; // rating -> count
|
||||
}
|
||||
|
||||
// Source Domain Statistics
|
||||
export interface SourceDomainStats {
|
||||
topDomains: DomainStats[];
|
||||
storiesWithSource: number;
|
||||
storiesWithoutSource: number;
|
||||
}
|
||||
|
||||
export interface DomainStats {
|
||||
domain: string;
|
||||
storyCount: number;
|
||||
}
|
||||
|
||||
// Reading Progress Statistics
|
||||
export interface ReadingProgressStats {
|
||||
totalStories: number;
|
||||
readStories: number;
|
||||
unreadStories: number;
|
||||
percentageRead: number;
|
||||
totalWordsRead: number;
|
||||
totalWordsUnread: number;
|
||||
}
|
||||
|
||||
// Reading Activity Statistics
|
||||
export interface ReadingActivityStats {
|
||||
storiesReadLastWeek: number;
|
||||
wordsReadLastWeek: number;
|
||||
readingTimeMinutesLastWeek: number;
|
||||
dailyActivity: DailyActivity[];
|
||||
}
|
||||
|
||||
export interface DailyActivity {
|
||||
date: string; // YYYY-MM-DD
|
||||
storiesRead: number;
|
||||
wordsRead: number;
|
||||
}
|
||||
File diff suppressed because one or more lines are too long
@@ -13,7 +13,7 @@ http {
|
||||
|
||||
server {
|
||||
listen 80;
|
||||
client_max_body_size 600M;
|
||||
client_max_body_size 4096M; # 4GB for large backup uploads
|
||||
|
||||
# Frontend routes
|
||||
location / {
|
||||
@@ -55,8 +55,8 @@ http {
|
||||
proxy_connect_timeout 900s;
|
||||
proxy_send_timeout 900s;
|
||||
proxy_read_timeout 900s;
|
||||
# Large upload settings
|
||||
client_max_body_size 600M;
|
||||
# Large upload settings (4GB for backups)
|
||||
client_max_body_size 4096M;
|
||||
proxy_request_buffering off;
|
||||
proxy_max_temp_file_size 0;
|
||||
}
|
||||
|
||||
@@ -86,6 +86,7 @@
|
||||
<!-- Reading Status Fields -->
|
||||
<field name="isRead" type="boolean" indexed="true" stored="true"/>
|
||||
<field name="readingPosition" type="pint" indexed="true" stored="true"/>
|
||||
<field name="readingProgressPercentage" type="pint" indexed="true" stored="true"/>
|
||||
<field name="lastReadAt" type="pdate" indexed="true" stored="true"/>
|
||||
<field name="lastRead" type="pdate" indexed="true" stored="true"/>
|
||||
|
||||
@@ -112,6 +113,13 @@
|
||||
<field name="searchScore" type="pdouble" indexed="false" stored="true"/>
|
||||
<field name="highlights" type="strings" indexed="false" stored="true"/>
|
||||
|
||||
<!-- Statistics-specific Fields -->
|
||||
<field name="hasDescription" type="boolean" indexed="true" stored="true"/>
|
||||
<field name="hasCoverImage" type="boolean" indexed="true" stored="true"/>
|
||||
<field name="hasRating" type="boolean" indexed="true" stored="true"/>
|
||||
<field name="sourceDomain" type="string" indexed="true" stored="true"/>
|
||||
<field name="tagCount" type="pint" indexed="true" stored="true"/>
|
||||
|
||||
<!-- Combined search field for general queries -->
|
||||
<field name="text" type="text_general" indexed="true" stored="false" multiValued="true"/>
|
||||
|
||||
|
||||
Reference in New Issue
Block a user