Compare commits
58 Commits
feature/co
...
feature/ri
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b1dbd85346 | ||
|
|
aae8f8926b | ||
|
|
f1773873d4 | ||
|
|
54df3c471e | ||
|
|
64f97f5648 | ||
|
|
c0b3ae3b72 | ||
|
|
e5596b5a17 | ||
|
|
c7b516be31 | ||
|
|
c92308c24a | ||
|
|
f92dcc5314 | ||
|
|
702fcb33c1 | ||
|
|
11b2a8b071 | ||
|
|
d1289bd616 | ||
|
|
15708b5ab2 | ||
|
|
a660056003 | ||
|
|
35a5825e76 | ||
|
|
87a4999ffe | ||
|
|
4ee5fa2330 | ||
|
|
6128d61349 | ||
|
|
5e347f2e2e | ||
|
|
8eb126a304 | ||
|
|
3dc02420fe | ||
|
|
241a15a174 | ||
|
|
6b97c0a70f | ||
|
|
e952241e3c | ||
|
|
65f1c6edc7 | ||
|
|
40fe3fdb80 | ||
|
|
95ce5fb532 | ||
|
|
1a99d9830d | ||
|
|
6b83783381 | ||
|
|
460ec358ca | ||
|
|
1d14d3d7aa | ||
|
|
4357351ec8 | ||
|
|
4ab03953ae | ||
|
|
142d8328c2 | ||
|
|
c46108c317 | ||
|
|
75c207970d | ||
|
|
3b22d155db | ||
|
|
51e3d20c24 | ||
|
|
5d195b63ef | ||
|
|
5b3a9d183e | ||
|
|
379c8c170f | ||
|
|
090b858a54 | ||
|
|
b0c14d4b37 | ||
|
|
7227061d25 | ||
|
|
415eab07de | ||
|
|
e89331e059 | ||
|
|
370bef2f07 | ||
|
|
9e788c2018 | ||
|
|
590e2590d6 | ||
|
|
57859d7a84 | ||
|
|
5746001c4a | ||
|
|
c08082c0d6 | ||
|
|
860bf02d56 | ||
|
|
a501b27169 | ||
|
|
fcad028959 | ||
|
|
f95d7aa8bb | ||
| 5e8164c6a4 |
@@ -14,11 +14,18 @@ JWT_SECRET=secure_jwt_secret_here
|
||||
# Application Authentication
|
||||
APP_PASSWORD=application_password_here
|
||||
|
||||
# Search Engine Configuration
|
||||
SEARCH_ENGINE=typesense
|
||||
|
||||
# Typesense Search Configuration
|
||||
TYPESENSE_API_KEY=secure_api_key_here
|
||||
TYPESENSE_ENABLED=true
|
||||
TYPESENSE_REINDEX_INTERVAL=3600000
|
||||
|
||||
# OpenSearch Configuration
|
||||
OPENSEARCH_USERNAME=admin
|
||||
OPENSEARCH_PASSWORD=secure_opensearch_password_here
|
||||
|
||||
# Image Storage
|
||||
IMAGE_STORAGE_PATH=/app/images
|
||||
|
||||
|
||||
@@ -18,10 +18,9 @@ JWT_SECRET=REPLACE_WITH_SECURE_JWT_SECRET_MINIMUM_32_CHARS
|
||||
# Use a strong password in production
|
||||
APP_PASSWORD=REPLACE_WITH_SECURE_APP_PASSWORD
|
||||
|
||||
# Typesense Search Configuration
|
||||
TYPESENSE_API_KEY=REPLACE_WITH_SECURE_TYPESENSE_API_KEY
|
||||
TYPESENSE_ENABLED=true
|
||||
TYPESENSE_REINDEX_INTERVAL=3600000
|
||||
# OpenSearch Configuration
|
||||
OPENSEARCH_PASSWORD=REPLACE_WITH_SECURE_OPENSEARCH_PASSWORD
|
||||
SEARCH_ENGINE=opensearch
|
||||
|
||||
# Image Storage
|
||||
IMAGE_STORAGE_PATH=/app/images
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -46,4 +46,5 @@ Thumbs.db
|
||||
|
||||
# Application data
|
||||
images/
|
||||
data/
|
||||
data/
|
||||
backend/cookies.txt
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 37 KiB |
466
EPUB_IMPORT_EXPORT_SPECIFICATION.md
Normal file
466
EPUB_IMPORT_EXPORT_SPECIFICATION.md
Normal file
@@ -0,0 +1,466 @@
|
||||
# EPUB Import/Export Specification
|
||||
|
||||
## 🎉 Phase 1 & 2 Implementation Complete
|
||||
|
||||
**Status**: Both Phase 1 and Phase 2 fully implemented and operational as of August 2025
|
||||
|
||||
**Phase 1 Achievements**:
|
||||
- ✅ Complete EPUB import functionality with validation and error handling
|
||||
- ✅ Single story EPUB export with XML validation fixes
|
||||
- ✅ Reading position preservation using EPUB CFI standards
|
||||
- ✅ Full frontend UI integration with navigation and authentication
|
||||
- ✅ Moved export button to Story Detail View for better UX
|
||||
- ✅ Added EPUB import to main Add Story menu dropdown
|
||||
|
||||
**Phase 2 Enhancements**:
|
||||
- ✅ **Enhanced Cover Processing**: Automatic extraction and optimization of cover images during EPUB import
|
||||
- ✅ **Advanced Metadata Extraction**: Comprehensive extraction of subjects/tags, keywords, publisher, language, publication dates, and identifiers
|
||||
- ✅ **Collection EPUB Export**: Full collection export with table of contents, proper chapter structure, and metadata aggregation
|
||||
- ✅ **Image Validation**: Robust cover image processing with format detection, resizing, and storage management
|
||||
- ✅ **API Endpoints**: Complete REST API for both individual story and collection EPUB operations
|
||||
|
||||
## Overview
|
||||
|
||||
This specification defines the requirements and implementation details for importing and exporting EPUB files in StoryCove. The feature enables users to import stories from EPUB files and export their stories/collections as EPUB files with preserved reading positions.
|
||||
|
||||
## Scope
|
||||
|
||||
### In Scope
|
||||
- **EPUB Import**: Parse DRM-free EPUB files and import as stories
|
||||
- **EPUB Export**: Export individual stories and collections as EPUB files
|
||||
- **Reading Position Preservation**: Store and restore reading positions using EPUB standards
|
||||
- **Metadata Handling**: Extract and preserve story metadata (title, author, cover, etc.)
|
||||
- **Content Processing**: HTML content sanitization and formatting
|
||||
|
||||
### Out of Scope (Phase 1)
|
||||
- DRM-protected EPUB files (future consideration)
|
||||
- Real-time reading position sync between devices
|
||||
- Advanced EPUB features (audio, video, interactive content)
|
||||
- EPUB validation beyond basic structure
|
||||
|
||||
## Technical Architecture
|
||||
|
||||
### Backend Implementation
|
||||
- **Language**: Java (Spring Boot)
|
||||
- **Primary Library**: EPUBLib (nl.siegmann.epublib:epublib-core:3.1)
|
||||
- **Processing**: Server-side generation and parsing
|
||||
- **File Handling**: Multipart file upload for import, streaming download for export
|
||||
|
||||
### Dependencies
|
||||
```xml
|
||||
<dependency>
|
||||
<groupId>com.positiondev.epublib</groupId>
|
||||
<artifactId>epublib-core</artifactId>
|
||||
<version>3.1</version>
|
||||
</dependency>
|
||||
```
|
||||
|
||||
### Phase 1 Implementation Notes
|
||||
- **EPUBImportService**: Implemented with full validation, metadata extraction, and reading position handling
|
||||
- **EPUBExportService**: Implemented with XML validation fixes for EPUB reader compatibility
|
||||
- **ReadingPosition Entity**: Created with EPUB CFI support and database indexing
|
||||
- **Authentication**: All endpoints secured with JWT authentication and proper frontend integration
|
||||
- **UI Integration**: Export moved to Story Detail View, Import added to main navigation menu
|
||||
- **XML Compliance**: Fixed XHTML validation issues by properly formatting self-closing tags (`<br>` → `<br />`)
|
||||
|
||||
## EPUB Import Specification
|
||||
|
||||
### Supported Formats
|
||||
- **EPUB 2.0** and **EPUB 3.x** formats
|
||||
- **DRM-Free** files only
|
||||
- **Maximum file size**: 50MB
|
||||
- **Supported content**: Text-based stories with HTML content
|
||||
|
||||
### Import Process Flow
|
||||
1. **File Upload**: User uploads EPUB file via web interface
|
||||
2. **Validation**: Check file format, size, and basic EPUB structure
|
||||
3. **Parsing**: Extract metadata, content, and resources using EPUBLib
|
||||
4. **Content Processing**: Sanitize HTML content using existing Jsoup pipeline
|
||||
5. **Story Creation**: Create Story entity with extracted data
|
||||
6. **Preview**: Show extracted story details for user confirmation
|
||||
7. **Finalization**: Save story to database with imported metadata
|
||||
|
||||
### Metadata Mapping
|
||||
```java
|
||||
// EPUB Metadata → StoryCove Story Entity
|
||||
epub.getMetadata().getFirstTitle() → story.title
|
||||
epub.getMetadata().getAuthors().get(0) → story.authorName
|
||||
epub.getMetadata().getDescriptions().get(0) → story.summary
|
||||
epub.getCoverImage() → story.coverPath
|
||||
epub.getMetadata().getSubjects() → story.tags
|
||||
```
|
||||
|
||||
### Content Extraction
|
||||
- **Multi-chapter EPUBs**: Combine all content files into single HTML
|
||||
- **Chapter separation**: Insert `<hr>` or `<h2>` tags between chapters
|
||||
- **HTML sanitization**: Apply existing sanitization rules
|
||||
- **Image handling**: Extract and store cover images, inline images optional
|
||||
|
||||
### API Endpoints
|
||||
|
||||
#### POST /api/stories/import-epub
|
||||
```java
|
||||
@PostMapping("/import-epub")
|
||||
public ResponseEntity<?> importEPUB(@RequestParam("file") MultipartFile file) {
|
||||
// Implementation in EPUBImportService
|
||||
}
|
||||
```
|
||||
|
||||
**Request**: Multipart file upload
|
||||
**Response**:
|
||||
```json
|
||||
{
|
||||
"message": "EPUB imported successfully",
|
||||
"storyId": "uuid",
|
||||
"extractedData": {
|
||||
"title": "Story Title",
|
||||
"author": "Author Name",
|
||||
"summary": "Story description",
|
||||
"chapterCount": 12,
|
||||
"wordCount": 45000,
|
||||
"hasCovers": true
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## EPUB Export Specification
|
||||
|
||||
### Export Types
|
||||
1. **Single Story Export**: Convert one story to EPUB
|
||||
2. **Collection Export**: Multiple stories as single EPUB with chapters
|
||||
|
||||
### EPUB Structure Generation
|
||||
```
|
||||
story.epub
|
||||
├── mimetype
|
||||
├── META-INF/
|
||||
│ └── container.xml
|
||||
└── OEBPS/
|
||||
├── content.opf # Package metadata
|
||||
├── toc.ncx # Navigation
|
||||
├── stylesheet.css # Styling
|
||||
├── cover.html # Cover page
|
||||
├── chapter001.xhtml # Story content
|
||||
├── images/
|
||||
│ └── cover.jpg # Cover image
|
||||
└── fonts/ (optional)
|
||||
```
|
||||
|
||||
### Reading Position Implementation
|
||||
|
||||
#### EPUB 3 CFI (Canonical Fragment Identifier)
|
||||
```xml
|
||||
<!-- In content.opf metadata -->
|
||||
<meta property="epub-cfi" content="/6/4[chap01]!/4[body01]/10[para05]/3:142"/>
|
||||
<meta property="reading-percentage" content="0.65"/>
|
||||
<meta property="last-read-timestamp" content="2023-12-07T10:30:00Z"/>
|
||||
```
|
||||
|
||||
#### StoryCove Custom Metadata (Fallback)
|
||||
```xml
|
||||
<meta name="storycove:reading-chapter" content="3"/>
|
||||
<meta name="storycove:reading-paragraph" content="15"/>
|
||||
<meta name="storycove:reading-offset" content="142"/>
|
||||
<meta name="storycove:reading-percentage" content="0.65"/>
|
||||
```
|
||||
|
||||
#### CFI Generation Logic
|
||||
```java
|
||||
public String generateCFI(ReadingPosition position) {
|
||||
return String.format("/6/%d[chap%02d]!/4[body01]/%d[para%02d]/3:%d",
|
||||
(position.getChapterIndex() * 2) + 4,
|
||||
position.getChapterIndex(),
|
||||
(position.getParagraphIndex() * 2) + 4,
|
||||
position.getParagraphIndex(),
|
||||
position.getCharacterOffset());
|
||||
}
|
||||
```
|
||||
|
||||
### API Endpoints
|
||||
|
||||
#### GET /api/stories/{id}/export-epub
|
||||
```java
|
||||
@GetMapping("/{id}/export-epub")
|
||||
public ResponseEntity<StreamingResponseBody> exportStory(@PathVariable UUID id) {
|
||||
// Implementation in EPUBExportService
|
||||
}
|
||||
```
|
||||
|
||||
**Response**: EPUB file download with headers:
|
||||
```
|
||||
Content-Type: application/epub+zip
|
||||
Content-Disposition: attachment; filename="story-title.epub"
|
||||
```
|
||||
|
||||
#### GET /api/collections/{id}/export-epub
|
||||
```java
|
||||
@GetMapping("/{id}/export-epub")
|
||||
public ResponseEntity<StreamingResponseBody> exportCollection(@PathVariable UUID id) {
|
||||
// Implementation in EPUBExportService
|
||||
}
|
||||
```
|
||||
|
||||
**Response**: Multi-story EPUB with table of contents
|
||||
|
||||
## Data Models
|
||||
|
||||
### ReadingPosition Entity
|
||||
```java
|
||||
@Entity
|
||||
@Table(name = "reading_positions")
|
||||
public class ReadingPosition {
|
||||
@Id
|
||||
private UUID id;
|
||||
|
||||
@ManyToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "story_id")
|
||||
private Story story;
|
||||
|
||||
@Column(name = "chapter_index")
|
||||
private Integer chapterIndex = 0;
|
||||
|
||||
@Column(name = "paragraph_index")
|
||||
private Integer paragraphIndex = 0;
|
||||
|
||||
@Column(name = "character_offset")
|
||||
private Integer characterOffset = 0;
|
||||
|
||||
@Column(name = "progress_percentage")
|
||||
private Double progressPercentage = 0.0;
|
||||
|
||||
@Column(name = "epub_cfi")
|
||||
private String canonicalFragmentIdentifier;
|
||||
|
||||
@Column(name = "last_read_at")
|
||||
private LocalDateTime lastReadAt;
|
||||
|
||||
@Column(name = "device_identifier")
|
||||
private String deviceIdentifier;
|
||||
|
||||
// Constructors, getters, setters
|
||||
}
|
||||
```
|
||||
|
||||
### EPUB Import Request DTO
|
||||
```java
|
||||
public class EPUBImportRequest {
|
||||
private String filename;
|
||||
private Long fileSize;
|
||||
private Boolean preserveChapterStructure = true;
|
||||
private Boolean extractCover = true;
|
||||
private String targetCollectionId; // Optional: add to specific collection
|
||||
}
|
||||
```
|
||||
|
||||
### EPUB Export Options DTO
|
||||
```java
|
||||
public class EPUBExportOptions {
|
||||
private Boolean includeReadingPosition = true;
|
||||
private Boolean includeCoverImage = true;
|
||||
private Boolean includeMetadata = true;
|
||||
private String cssStylesheet; // Optional custom CSS
|
||||
private EPUBVersion version = EPUBVersion.EPUB3;
|
||||
}
|
||||
```
|
||||
|
||||
## Service Layer Architecture
|
||||
|
||||
### EPUBImportService
|
||||
```java
|
||||
@Service
|
||||
public class EPUBImportService {
|
||||
|
||||
// Core import method
|
||||
public Story importEPUBFile(MultipartFile file, EPUBImportRequest request);
|
||||
|
||||
// Helper methods
|
||||
private void validateEPUBFile(MultipartFile file);
|
||||
private Book parseEPUBStructure(InputStream inputStream);
|
||||
private Story extractStoryData(Book epub);
|
||||
private String combineChapterContent(Book epub);
|
||||
private void extractAndSaveCover(Book epub, Story story);
|
||||
private List<String> extractTags(Book epub);
|
||||
private ReadingPosition extractReadingPosition(Book epub);
|
||||
}
|
||||
```
|
||||
|
||||
### EPUBExportService
|
||||
```java
|
||||
@Service
|
||||
public class EPUBExportService {
|
||||
|
||||
// Core export methods
|
||||
public byte[] exportSingleStory(UUID storyId, EPUBExportOptions options);
|
||||
public byte[] exportCollection(UUID collectionId, EPUBExportOptions options);
|
||||
|
||||
// Helper methods
|
||||
private Book createEPUBStructure(Story story, ReadingPosition position);
|
||||
private Book createCollectionEPUB(Collection collection, List<ReadingPosition> positions);
|
||||
private void addReadingPositionMetadata(Book book, ReadingPosition position);
|
||||
private String generateCFI(ReadingPosition position);
|
||||
private Resource createChapterResource(Story story);
|
||||
private Resource createStylesheetResource();
|
||||
private void addCoverImage(Book book, Story story);
|
||||
}
|
||||
```
|
||||
|
||||
## Frontend Integration
|
||||
|
||||
### Import UI Flow
|
||||
1. **Upload Interface**: File input with EPUB validation
|
||||
2. **Progress Indicator**: Show parsing progress
|
||||
3. **Preview Screen**: Display extracted metadata for confirmation
|
||||
4. **Confirmation**: Allow editing of title, author, summary before saving
|
||||
5. **Success**: Redirect to created story
|
||||
|
||||
### Export UI Flow
|
||||
1. **Export Button**: Available on story detail and collection pages
|
||||
2. **Options Modal**: Allow selection of export options
|
||||
3. **Progress Indicator**: Show EPUB generation progress
|
||||
4. **Download**: Automatic file download on completion
|
||||
|
||||
### Frontend API Calls
|
||||
```typescript
|
||||
// Import EPUB
|
||||
const importEPUB = async (file: File) => {
|
||||
const formData = new FormData();
|
||||
formData.append('file', file);
|
||||
|
||||
const response = await fetch('/api/stories/import-epub', {
|
||||
method: 'POST',
|
||||
body: formData,
|
||||
});
|
||||
|
||||
return await response.json();
|
||||
};
|
||||
|
||||
// Export Story
|
||||
const exportStoryEPUB = async (storyId: string) => {
|
||||
const response = await fetch(`/api/stories/${storyId}/export-epub`, {
|
||||
method: 'GET',
|
||||
});
|
||||
|
||||
const blob = await response.blob();
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = `${storyTitle}.epub`;
|
||||
a.click();
|
||||
};
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
### Import Errors
|
||||
- **Invalid EPUB format**: "Invalid EPUB file format"
|
||||
- **File too large**: "File size exceeds 50MB limit"
|
||||
- **DRM protected**: "DRM-protected EPUBs not supported"
|
||||
- **Corrupted file**: "EPUB file appears to be corrupted"
|
||||
- **No content**: "EPUB contains no readable content"
|
||||
|
||||
### Export Errors
|
||||
- **Story not found**: "Story not found or access denied"
|
||||
- **Missing content**: "Story has no content to export"
|
||||
- **Generation failure**: "Failed to generate EPUB file"
|
||||
|
||||
## Security Considerations
|
||||
|
||||
### File Upload Security
|
||||
- **File type validation**: Verify EPUB MIME type and structure
|
||||
- **Size limits**: Enforce maximum file size limits
|
||||
- **Content sanitization**: Apply existing HTML sanitization
|
||||
- **Virus scanning**: Consider integration with antivirus scanning
|
||||
|
||||
### Content Security
|
||||
- **HTML sanitization**: Apply existing Jsoup rules to imported content
|
||||
- **Image validation**: Validate extracted cover images
|
||||
- **Metadata escaping**: Escape special characters in metadata
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### Unit Tests
|
||||
- EPUB parsing and validation logic
|
||||
- CFI generation and parsing
|
||||
- Metadata extraction accuracy
|
||||
- Content sanitization
|
||||
|
||||
### Integration Tests
|
||||
- End-to-end import/export workflow
|
||||
- Reading position preservation
|
||||
- Multi-story collection export
|
||||
- Error handling scenarios
|
||||
|
||||
### Test Data
|
||||
- Sample EPUB files for various scenarios
|
||||
- EPUBs with and without reading positions
|
||||
- Multi-chapter EPUBs
|
||||
- EPUBs with covers and metadata
|
||||
|
||||
## Performance Considerations
|
||||
|
||||
### Import Performance
|
||||
- **Streaming processing**: Process large EPUBs without loading entirely into memory
|
||||
- **Async processing**: Consider async import for large files
|
||||
- **Progress tracking**: Provide progress feedback for large imports
|
||||
|
||||
### Export Performance
|
||||
- **Caching**: Cache generated EPUBs for repeated exports
|
||||
- **Streaming**: Stream EPUB generation for large collections
|
||||
- **Resource optimization**: Optimize image and content sizes
|
||||
|
||||
## Future Enhancements (Out of Scope)
|
||||
|
||||
### Phase 2 Considerations
|
||||
- **DRM support**: Research legal and technical feasibility
|
||||
- **Reading position sync**: Real-time sync across devices
|
||||
- **Advanced EPUB features**: Enhanced typography, annotations
|
||||
- **Bulk operations**: Import/export multiple EPUBs
|
||||
- **EPUB validation**: Full EPUB compliance checking
|
||||
|
||||
### Integration Possibilities
|
||||
- **Cloud storage**: Export directly to Google Drive, Dropbox
|
||||
- **E-reader sync**: Direct sync with Kindle, Kobo devices
|
||||
- **Reading analytics**: Track reading patterns and statistics
|
||||
|
||||
## Implementation Phases
|
||||
|
||||
### Phase 1: Core Functionality ✅ **COMPLETED**
|
||||
- [x] Basic EPUB import (DRM-free)
|
||||
- [x] Single story export
|
||||
- [x] Reading position storage and retrieval
|
||||
- [x] Frontend UI integration
|
||||
|
||||
### Phase 2: Enhanced Features ✅ **COMPLETED**
|
||||
- [x] Collection export with table of contents
|
||||
- [x] Advanced metadata handling (subjects, keywords, publisher, language, etc.)
|
||||
- [x] Enhanced cover image processing for import/export
|
||||
- [x] Comprehensive error handling
|
||||
|
||||
### Phase 3: Advanced Features
|
||||
- [ ] DRM exploration (legal research required)
|
||||
- [ ] Reading position sync
|
||||
- [ ] Advanced EPUB features
|
||||
- [ ] Analytics and reporting
|
||||
|
||||
## Acceptance Criteria
|
||||
|
||||
### Import Success Criteria ✅ **COMPLETED**
|
||||
- [x] Successfully parse EPUB 2.0 and 3.x files
|
||||
- [x] Extract title, author, summary, and content accurately
|
||||
- [x] Preserve formatting and basic HTML structure
|
||||
- [x] Handle cover images correctly
|
||||
- [x] Import reading positions when present
|
||||
- [x] Provide clear error messages for invalid files
|
||||
|
||||
### Export Success Criteria ✅ **FULLY COMPLETED**
|
||||
- [x] Generate valid EPUB files compatible with major readers
|
||||
- [x] Include accurate metadata and content
|
||||
- [x] Embed reading positions using CFI standard
|
||||
- [x] Support single story export
|
||||
- [x] Support collection export with proper structure
|
||||
- [x] Generate proper table of contents for collections
|
||||
- [x] Include cover images when available
|
||||
|
||||
---
|
||||
|
||||
*This specification serves as the implementation guide for the EPUB import/export feature. All implementation decisions should reference this document for consistency and completeness.*
|
||||
889
OPENSEARCH_MIGRATION_SPECIFICATION.md
Normal file
889
OPENSEARCH_MIGRATION_SPECIFICATION.md
Normal file
@@ -0,0 +1,889 @@
|
||||
# StoryCove Search Migration Specification: Typesense to OpenSearch
|
||||
|
||||
## Executive Summary
|
||||
|
||||
This document specifies the migration from Typesense to OpenSearch for the StoryCove application. The migration will be implemented using a parallel approach, maintaining Typesense functionality while gradually transitioning to OpenSearch, ensuring zero downtime and the ability to rollback if needed.
|
||||
|
||||
**Migration Goals:**
|
||||
- Solve random query reliability issues
|
||||
- Improve complex filtering performance
|
||||
- Maintain feature parity during transition
|
||||
- Zero downtime migration
|
||||
- Improved developer experience
|
||||
|
||||
---
|
||||
|
||||
## Current State Analysis
|
||||
|
||||
### Typesense Implementation Overview
|
||||
|
||||
**Service Architecture:**
|
||||
- `TypesenseService.java` (~2000 lines) - Primary search service
|
||||
- 3 search indexes: Stories, Authors, Collections
|
||||
- Multi-library support with dynamic collection names
|
||||
- Integration with Spring Boot backend
|
||||
|
||||
**Core Functionality:**
|
||||
1. **Full-text Search**: Stories, Authors with complex query building
|
||||
2. **Random Story Selection**: `_rand()` function with fallback logic
|
||||
3. **Advanced Filtering**: 15+ filter conditions with boolean logic
|
||||
4. **Faceting**: Tag aggregations and counts
|
||||
5. **Autocomplete**: Search suggestions with typeahead
|
||||
6. **CRUD Operations**: Index/update/delete for all entity types
|
||||
|
||||
**Current Issues Identified:**
|
||||
- `_rand()` function unreliability requiring complex fallback logic
|
||||
- Complex filter query building with escaping issues
|
||||
- Limited aggregation capabilities
|
||||
- Inconsistent API behavior across query patterns
|
||||
- Multi-collection management complexity
|
||||
|
||||
### Data Models and Schema
|
||||
|
||||
**Story Index Fields:**
|
||||
```java
|
||||
// Core fields
|
||||
UUID id, String title, String description, String sourceUrl
|
||||
Integer wordCount, Integer rating, Integer volume
|
||||
Boolean isRead, LocalDateTime lastReadAt, Integer readingPosition
|
||||
|
||||
// Relationships
|
||||
UUID authorId, String authorName
|
||||
UUID seriesId, String seriesName
|
||||
List<String> tagNames
|
||||
|
||||
// Metadata
|
||||
LocalDateTime createdAt, LocalDateTime updatedAt
|
||||
String coverPath, String sourceDomain
|
||||
```
|
||||
|
||||
**Author Index Fields:**
|
||||
```java
|
||||
UUID id, String name, String notes
|
||||
Integer authorRating, Double averageStoryRating, Integer storyCount
|
||||
List<String> urls, String avatarImagePath
|
||||
LocalDateTime createdAt, LocalDateTime updatedAt
|
||||
```
|
||||
|
||||
**Collection Index Fields:**
|
||||
```java
|
||||
UUID id, String name, String description
|
||||
List<String> tagNames, Boolean archived
|
||||
LocalDateTime createdAt, LocalDateTime updatedAt
|
||||
Integer storyCount, Integer currentPosition
|
||||
```
|
||||
|
||||
### API Endpoints Current State
|
||||
|
||||
**Search Endpoints Analysis:**
|
||||
|
||||
**✅ USED by Frontend (Must Implement):**
|
||||
- `GET /api/stories/search` - Main story search with complex filtering (CRITICAL)
|
||||
- `GET /api/stories/random` - Random story selection with filters (CRITICAL)
|
||||
- `GET /api/authors/search-typesense` - Author search (HIGH)
|
||||
- `GET /api/tags/autocomplete` - Tag suggestions (MEDIUM)
|
||||
- `POST /api/stories/reindex-typesense` - Admin reindex operations (MEDIUM)
|
||||
- `POST /api/authors/reindex-typesense` - Admin reindex operations (MEDIUM)
|
||||
- `POST /api/stories/recreate-typesense-collection` - Admin recreate (MEDIUM)
|
||||
- `POST /api/authors/recreate-typesense-collection` - Admin recreate (MEDIUM)
|
||||
|
||||
**❌ UNUSED by Frontend (Skip Implementation):**
|
||||
- `GET /api/stories/search/suggestions` - Not used by frontend
|
||||
- `GET /api/authors/search` - Superseded by typesense version
|
||||
- `GET /api/series/search` - Not used by frontend
|
||||
- `GET /api/tags/search` - Superseded by autocomplete
|
||||
- `POST /api/search/reindex` - Not used by frontend
|
||||
- `GET /api/search/health` - Not used by frontend
|
||||
|
||||
**Scope Reduction: ~40% fewer endpoints to implement**
|
||||
|
||||
**Search Parameters (Stories):**
|
||||
```
|
||||
query, page, size, authors[], tags[], minRating, maxRating
|
||||
sortBy, sortDir, facetBy[]
|
||||
minWordCount, maxWordCount, createdAfter, createdBefore
|
||||
lastReadAfter, lastReadBefore, unratedOnly, readingStatus
|
||||
hasReadingProgress, hasCoverImage, sourceDomain, seriesFilter
|
||||
minTagCount, popularOnly, hiddenGemsOnly
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Target OpenSearch Architecture
|
||||
|
||||
### Service Layer Design
|
||||
|
||||
**New Components:**
|
||||
```
|
||||
OpenSearchService.java - Primary search service (mirrors TypesenseService API)
|
||||
OpenSearchConfig.java - Configuration and client setup
|
||||
SearchMigrationService.java - Handles parallel operation during migration
|
||||
SearchServiceAdapter.java - Abstraction layer for service switching
|
||||
```
|
||||
|
||||
**Index Strategy:**
|
||||
- **Single-node deployment** for development/small installations
|
||||
- **Index-per-library** approach: `stories-{libraryId}`, `authors-{libraryId}`, `collections-{libraryId}`
|
||||
- **Index templates** for consistent mapping across libraries
|
||||
- **Aliases** for easy switching and zero-downtime updates
|
||||
|
||||
### OpenSearch Index Mappings
|
||||
|
||||
**Stories Index Mapping:**
|
||||
```json
|
||||
{
|
||||
"settings": {
|
||||
"number_of_shards": 1,
|
||||
"number_of_replicas": 0,
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"story_analyzer": {
|
||||
"type": "custom",
|
||||
"tokenizer": "standard",
|
||||
"filter": ["lowercase", "stop", "snowball"]
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"id": {"type": "keyword"},
|
||||
"title": {
|
||||
"type": "text",
|
||||
"analyzer": "story_analyzer",
|
||||
"fields": {"keyword": {"type": "keyword"}}
|
||||
},
|
||||
"description": {
|
||||
"type": "text",
|
||||
"analyzer": "story_analyzer"
|
||||
},
|
||||
"authorName": {
|
||||
"type": "text",
|
||||
"analyzer": "story_analyzer",
|
||||
"fields": {"keyword": {"type": "keyword"}}
|
||||
},
|
||||
"seriesName": {
|
||||
"type": "text",
|
||||
"fields": {"keyword": {"type": "keyword"}}
|
||||
},
|
||||
"tagNames": {"type": "keyword"},
|
||||
"wordCount": {"type": "integer"},
|
||||
"rating": {"type": "integer"},
|
||||
"volume": {"type": "integer"},
|
||||
"isRead": {"type": "boolean"},
|
||||
"readingPosition": {"type": "integer"},
|
||||
"lastReadAt": {"type": "date"},
|
||||
"createdAt": {"type": "date"},
|
||||
"updatedAt": {"type": "date"},
|
||||
"coverPath": {"type": "keyword"},
|
||||
"sourceUrl": {"type": "keyword"},
|
||||
"sourceDomain": {"type": "keyword"}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Authors Index Mapping:**
|
||||
```json
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"id": {"type": "keyword"},
|
||||
"name": {
|
||||
"type": "text",
|
||||
"analyzer": "story_analyzer",
|
||||
"fields": {"keyword": {"type": "keyword"}}
|
||||
},
|
||||
"notes": {"type": "text"},
|
||||
"authorRating": {"type": "integer"},
|
||||
"averageStoryRating": {"type": "float"},
|
||||
"storyCount": {"type": "integer"},
|
||||
"urls": {"type": "keyword"},
|
||||
"avatarImagePath": {"type": "keyword"},
|
||||
"createdAt": {"type": "date"},
|
||||
"updatedAt": {"type": "date"}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Collections Index Mapping:**
|
||||
```json
|
||||
{
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"id": {"type": "keyword"},
|
||||
"name": {
|
||||
"type": "text",
|
||||
"fields": {"keyword": {"type": "keyword"}}
|
||||
},
|
||||
"description": {"type": "text"},
|
||||
"tagNames": {"type": "keyword"},
|
||||
"archived": {"type": "boolean"},
|
||||
"storyCount": {"type": "integer"},
|
||||
"currentPosition": {"type": "integer"},
|
||||
"createdAt": {"type": "date"},
|
||||
"updatedAt": {"type": "date"}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Query Translation Strategy
|
||||
|
||||
**Random Story Queries:**
|
||||
```java
|
||||
// Typesense (problematic)
|
||||
String sortBy = seed != null ? "_rand(" + seed + ")" : "_rand()";
|
||||
|
||||
// OpenSearch (reliable)
|
||||
QueryBuilder randomQuery = QueryBuilders.functionScoreQuery(
|
||||
QueryBuilders.boolQuery().must(filters),
|
||||
ScoreFunctionBuilders.randomFunction(seed != null ? seed.intValue() : null)
|
||||
);
|
||||
```
|
||||
|
||||
**Complex Filtering:**
|
||||
```java
|
||||
// Build bool query with multiple filter conditions
|
||||
BoolQueryBuilder boolQuery = QueryBuilders.boolQuery()
|
||||
.must(QueryBuilders.multiMatchQuery(query, "title", "description", "authorName"))
|
||||
.filter(QueryBuilders.termsQuery("tagNames", tags))
|
||||
.filter(QueryBuilders.rangeQuery("wordCount").gte(minWords).lte(maxWords))
|
||||
.filter(QueryBuilders.rangeQuery("rating").gte(minRating).lte(maxRating));
|
||||
```
|
||||
|
||||
**Faceting/Aggregations:**
|
||||
```java
|
||||
// Tags aggregation
|
||||
AggregationBuilder tagsAgg = AggregationBuilders
|
||||
.terms("tags")
|
||||
.field("tagNames")
|
||||
.size(100);
|
||||
|
||||
// Rating ranges
|
||||
AggregationBuilder ratingRanges = AggregationBuilders
|
||||
.range("rating_ranges")
|
||||
.field("rating")
|
||||
.addRange("unrated", 0, 1)
|
||||
.addRange("low", 1, 3)
|
||||
.addRange("high", 4, 6);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Revised Implementation Phases (Scope Reduced by 40%)
|
||||
|
||||
### Phase 1: Infrastructure Setup (Week 1)
|
||||
|
||||
**Objectives:**
|
||||
- Add OpenSearch to Docker Compose
|
||||
- Create basic OpenSearch service
|
||||
- Establish index templates and mappings
|
||||
- **Focus**: Only stories, authors, and tags indexes (skip series, collections)
|
||||
|
||||
**Deliverables:**
|
||||
1. **Docker Compose Updates:**
|
||||
```yaml
|
||||
opensearch:
|
||||
image: opensearchproject/opensearch:2.11.0
|
||||
environment:
|
||||
- discovery.type=single-node
|
||||
- DISABLE_SECURITY_PLUGIN=true
|
||||
- OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx1g
|
||||
ports:
|
||||
- "9200:9200"
|
||||
volumes:
|
||||
- opensearch_data:/usr/share/opensearch/data
|
||||
```
|
||||
|
||||
2. **OpenSearchConfig.java:**
|
||||
```java
|
||||
@Configuration
|
||||
@ConditionalOnProperty(name = "storycove.opensearch.enabled", havingValue = "true")
|
||||
public class OpenSearchConfig {
|
||||
@Bean
|
||||
public OpenSearchClient openSearchClient() {
|
||||
// Client configuration
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
3. **Basic Index Creation:**
|
||||
- Create index templates for stories, authors, collections
|
||||
- Implement index creation with proper mappings
|
||||
- Add health check endpoint
|
||||
|
||||
**Success Criteria:**
|
||||
- OpenSearch container starts successfully
|
||||
- Basic connectivity established
|
||||
- Index templates created and validated
|
||||
|
||||
### Phase 2: Core Service Implementation (Week 2)
|
||||
|
||||
**Objectives:**
|
||||
- Implement OpenSearchService with core functionality
|
||||
- Create service abstraction layer
|
||||
- Implement basic search operations
|
||||
- **Focus**: Only critical endpoints (stories search, random, authors)
|
||||
|
||||
**Deliverables:**
|
||||
1. **OpenSearchService.java** - Core service implementing:
|
||||
- `indexStory()`, `updateStory()`, `deleteStory()`
|
||||
- `searchStories()` with basic query support (CRITICAL)
|
||||
- `getRandomStoryId()` with reliable seed support (CRITICAL)
|
||||
- `indexAuthor()`, `updateAuthor()`, `deleteAuthor()`
|
||||
- `searchAuthors()` for authors page (HIGH)
|
||||
- `bulkIndexStories()`, `bulkIndexAuthors()` for initial data loading
|
||||
|
||||
2. **SearchServiceAdapter.java** - Abstraction layer:
|
||||
```java
|
||||
@Service
|
||||
public class SearchServiceAdapter {
|
||||
@Autowired(required = false)
|
||||
private TypesenseService typesenseService;
|
||||
|
||||
@Autowired(required = false)
|
||||
private OpenSearchService openSearchService;
|
||||
|
||||
@Value("${storycove.search.provider:typesense}")
|
||||
private String searchProvider;
|
||||
|
||||
public SearchResultDto<StorySearchDto> searchStories(...) {
|
||||
return "opensearch".equals(searchProvider)
|
||||
? openSearchService.searchStories(...)
|
||||
: typesenseService.searchStories(...);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
3. **Basic Query Implementation:**
|
||||
- Full-text search across title/description/author
|
||||
- Basic filtering (tags, rating, word count)
|
||||
- Pagination and sorting
|
||||
|
||||
**Success Criteria:**
|
||||
- Basic search functionality working
|
||||
- Service abstraction layer functional
|
||||
- Can switch between Typesense and OpenSearch via configuration
|
||||
|
||||
### Phase 3: Advanced Features Implementation (Week 3)
|
||||
|
||||
**Objectives:**
|
||||
- Implement complex filtering (all 15+ filter types)
|
||||
- Add random story functionality
|
||||
- Implement faceting/aggregations
|
||||
- Add autocomplete/suggestions
|
||||
|
||||
**Deliverables:**
|
||||
1. **Complex Query Builder:**
|
||||
- All filter conditions from original implementation
|
||||
- Date range filtering with proper timezone handling
|
||||
- Boolean logic for reading status, coverage, series filters
|
||||
|
||||
2. **Random Story Implementation:**
|
||||
```java
|
||||
public Optional<UUID> getRandomStoryId(String searchQuery, List<String> tags, Long seed, ...) {
|
||||
BoolQueryBuilder baseQuery = buildFilterQuery(searchQuery, tags, ...);
|
||||
|
||||
QueryBuilder randomQuery = QueryBuilders.functionScoreQuery(
|
||||
baseQuery,
|
||||
ScoreFunctionBuilders.randomFunction(seed != null ? seed.intValue() : null)
|
||||
);
|
||||
|
||||
SearchRequest request = new SearchRequest("stories-" + getCurrentLibraryId())
|
||||
.source(new SearchSourceBuilder()
|
||||
.query(randomQuery)
|
||||
.size(1)
|
||||
.fetchSource(new String[]{"id"}, null));
|
||||
|
||||
// Execute and return result
|
||||
}
|
||||
```
|
||||
|
||||
3. **Faceting Implementation:**
|
||||
- Tag aggregations with counts
|
||||
- Rating range aggregations
|
||||
- Author aggregations
|
||||
- Custom facet builders
|
||||
|
||||
4. **Autocomplete Service:**
|
||||
- Suggest-based implementation using completion fields
|
||||
- Prefix matching for story titles and author names
|
||||
|
||||
**Success Criteria:**
|
||||
- All filter conditions working correctly
|
||||
- Random story selection reliable with seed support
|
||||
- Faceting returns accurate counts
|
||||
- Autocomplete responsive and accurate
|
||||
|
||||
### Phase 4: Data Migration & Parallel Operation (Week 4)
|
||||
|
||||
**Objectives:**
|
||||
- Implement bulk data migration from database
|
||||
- Enable parallel operation (write to both systems)
|
||||
- Comprehensive testing of OpenSearch functionality
|
||||
|
||||
**Deliverables:**
|
||||
1. **Migration Service:**
|
||||
```java
|
||||
@Service
|
||||
public class SearchMigrationService {
|
||||
public void performFullMigration() {
|
||||
// Migrate all libraries
|
||||
List<Library> libraries = libraryService.findAll();
|
||||
for (Library library : libraries) {
|
||||
migrateLibraryData(library);
|
||||
}
|
||||
}
|
||||
|
||||
private void migrateLibraryData(Library library) {
|
||||
// Create indexes for library
|
||||
// Bulk load stories, authors, collections
|
||||
// Verify data integrity
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
2. **Dual-Write Implementation:**
|
||||
- Modify all entity update operations to write to both systems
|
||||
- Add configuration flag for dual-write mode
|
||||
- Error handling for partial failures
|
||||
|
||||
3. **Data Validation Tools:**
|
||||
- Compare search result counts between systems
|
||||
- Validate random story selection consistency
|
||||
- Check faceting accuracy
|
||||
|
||||
**Success Criteria:**
|
||||
- Complete data migration with 100% accuracy
|
||||
- Dual-write operations working without errors
|
||||
- Search result parity between systems verified
|
||||
|
||||
### Phase 5: API Integration & Testing (Week 5)
|
||||
|
||||
**Objectives:**
|
||||
- Update controller endpoints to use OpenSearch
|
||||
- Comprehensive integration testing
|
||||
- Performance testing and optimization
|
||||
|
||||
**Deliverables:**
|
||||
1. **Controller Updates:**
|
||||
- Modify controllers to use SearchServiceAdapter
|
||||
- Add migration controls for gradual rollout
|
||||
- Implement A/B testing capability
|
||||
|
||||
2. **Integration Tests:**
|
||||
```java
|
||||
@SpringBootTest
|
||||
@TestMethodOrder(OrderAnnotation.class)
|
||||
public class OpenSearchIntegrationTest {
|
||||
@Test
|
||||
@Order(1)
|
||||
void testBasicSearch() {
|
||||
// Test basic story search functionality
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(2)
|
||||
void testComplexFiltering() {
|
||||
// Test all 15+ filter conditions
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(3)
|
||||
void testRandomStory() {
|
||||
// Test random story with and without seed
|
||||
}
|
||||
|
||||
@Test
|
||||
@Order(4)
|
||||
void testFaceting() {
|
||||
// Test aggregation accuracy
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
3. **Performance Testing:**
|
||||
- Load testing with realistic data volumes
|
||||
- Query performance benchmarking
|
||||
- Memory usage monitoring
|
||||
|
||||
**Success Criteria:**
|
||||
- All integration tests passing
|
||||
- Performance meets or exceeds Typesense baseline
|
||||
- Memory usage within acceptable limits (< 2GB)
|
||||
|
||||
### Phase 6: Production Rollout & Monitoring (Week 6)
|
||||
|
||||
**Objectives:**
|
||||
- Production deployment with feature flags
|
||||
- Gradual user migration with monitoring
|
||||
- Rollback capability testing
|
||||
|
||||
**Deliverables:**
|
||||
1. **Feature Flag Implementation:**
|
||||
```java
|
||||
@Component
|
||||
public class SearchFeatureFlags {
|
||||
@Value("${storycove.search.opensearch.enabled:false}")
|
||||
private boolean openSearchEnabled;
|
||||
|
||||
@Value("${storycove.search.opensearch.percentage:0}")
|
||||
private int rolloutPercentage;
|
||||
|
||||
public boolean shouldUseOpenSearch(String userId) {
|
||||
if (!openSearchEnabled) return false;
|
||||
return userId.hashCode() % 100 < rolloutPercentage;
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
2. **Monitoring & Alerting:**
|
||||
- Query performance metrics
|
||||
- Error rate monitoring
|
||||
- Search result accuracy validation
|
||||
- User experience metrics
|
||||
|
||||
3. **Rollback Procedures:**
|
||||
- Immediate rollback to Typesense capability
|
||||
- Data consistency verification
|
||||
- Performance rollback triggers
|
||||
|
||||
**Success Criteria:**
|
||||
- Successful production deployment
|
||||
- Zero user-facing issues during rollout
|
||||
- Monitoring showing improved performance
|
||||
- Rollback procedures validated
|
||||
|
||||
### Phase 7: Cleanup & Documentation (Week 7)
|
||||
|
||||
**Objectives:**
|
||||
- Remove Typesense dependencies
|
||||
- Update documentation
|
||||
- Performance optimization
|
||||
|
||||
**Deliverables:**
|
||||
1. **Code Cleanup:**
|
||||
- Remove TypesenseService and related classes
|
||||
- Clean up Docker Compose configuration
|
||||
- Remove unused dependencies
|
||||
|
||||
2. **Documentation Updates:**
|
||||
- Update deployment documentation
|
||||
- Search API documentation
|
||||
- Troubleshooting guides
|
||||
|
||||
3. **Performance Tuning:**
|
||||
- Index optimization
|
||||
- Query performance tuning
|
||||
- Resource allocation optimization
|
||||
|
||||
**Success Criteria:**
|
||||
- Typesense completely removed
|
||||
- Documentation up to date
|
||||
- Optimized performance in production
|
||||
|
||||
---
|
||||
|
||||
## Data Migration Strategy
|
||||
|
||||
### Pre-Migration Validation
|
||||
|
||||
**Data Integrity Checks:**
|
||||
1. Count validation: Ensure all stories/authors/collections are present
|
||||
2. Field validation: Verify all required fields are populated
|
||||
3. Relationship validation: Check author-story and series-story relationships
|
||||
4. Library separation: Ensure proper multi-library data isolation
|
||||
|
||||
**Migration Process:**
|
||||
|
||||
1. **Index Creation:**
|
||||
```java
|
||||
// Create indexes with proper mappings for each library
|
||||
for (Library library : libraries) {
|
||||
String storiesIndex = "stories-" + library.getId();
|
||||
createIndexWithMapping(storiesIndex, getStoriesMapping());
|
||||
createIndexWithMapping("authors-" + library.getId(), getAuthorsMapping());
|
||||
createIndexWithMapping("collections-" + library.getId(), getCollectionsMapping());
|
||||
}
|
||||
```
|
||||
|
||||
2. **Bulk Data Loading:**
|
||||
```java
|
||||
// Load in batches to manage memory usage
|
||||
int batchSize = 1000;
|
||||
List<Story> allStories = storyService.findByLibraryId(libraryId);
|
||||
|
||||
for (int i = 0; i < allStories.size(); i += batchSize) {
|
||||
List<Story> batch = allStories.subList(i, Math.min(i + batchSize, allStories.size()));
|
||||
List<StoryDocument> documents = batch.stream()
|
||||
.map(this::convertToSearchDocument)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
bulkIndexStories(documents, "stories-" + libraryId);
|
||||
}
|
||||
```
|
||||
|
||||
3. **Post-Migration Validation:**
|
||||
- Count comparison between database and OpenSearch
|
||||
- Spot-check random records for field accuracy
|
||||
- Test search functionality with known queries
|
||||
- Verify faceting counts match expected values
|
||||
|
||||
### Rollback Strategy
|
||||
|
||||
**Immediate Rollback Triggers:**
|
||||
- Search error rate > 1%
|
||||
- Query performance degradation > 50%
|
||||
- Data inconsistency detected
|
||||
- Memory usage > 4GB sustained
|
||||
|
||||
**Rollback Process:**
|
||||
1. Update feature flag to disable OpenSearch
|
||||
2. Verify Typesense still operational
|
||||
3. Clear OpenSearch indexes to free resources
|
||||
4. Investigate and document issues
|
||||
|
||||
**Data Consistency During Rollback:**
|
||||
- Continue dual-write during investigation
|
||||
- Re-sync any missed updates to OpenSearch
|
||||
- Validate data integrity before retry
|
||||
|
||||
---
|
||||
|
||||
## Testing Strategy
|
||||
|
||||
### Unit Tests
|
||||
|
||||
**OpenSearchService Unit Tests:**
|
||||
```java
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
class OpenSearchServiceTest {
|
||||
@Mock private OpenSearchClient client;
|
||||
@InjectMocks private OpenSearchService service;
|
||||
|
||||
@Test
|
||||
void testSearchStoriesBasicQuery() {
|
||||
// Mock OpenSearch response
|
||||
// Test basic search functionality
|
||||
}
|
||||
|
||||
@Test
|
||||
void testComplexFilterQuery() {
|
||||
// Test complex boolean query building
|
||||
}
|
||||
|
||||
@Test
|
||||
void testRandomStorySelection() {
|
||||
// Test random query with seed
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
**Query Builder Tests:**
|
||||
- Test all 15+ filter conditions
|
||||
- Validate query structure and parameters
|
||||
- Test edge cases and null handling
|
||||
|
||||
### Integration Tests
|
||||
|
||||
**Full Search Integration:**
|
||||
```java
|
||||
@SpringBootTest
|
||||
@Testcontainers
|
||||
class OpenSearchIntegrationTest {
|
||||
@Container
|
||||
static OpenSearchContainer opensearch = new OpenSearchContainer("opensearchproject/opensearch:2.11.0");
|
||||
|
||||
@Test
|
||||
void testEndToEndStorySearch() {
|
||||
// Insert test data
|
||||
// Perform search via controller
|
||||
// Validate results
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Performance Tests
|
||||
|
||||
**Load Testing Scenarios:**
|
||||
1. **Concurrent Search Load:**
|
||||
- 50 concurrent users performing searches
|
||||
- Mixed query complexity
|
||||
- Duration: 10 minutes
|
||||
|
||||
2. **Bulk Indexing Performance:**
|
||||
- Index 10,000 stories in batches
|
||||
- Measure throughput and memory usage
|
||||
|
||||
3. **Random Query Performance:**
|
||||
- 1000 random story requests with different seeds
|
||||
- Compare with Typesense baseline
|
||||
|
||||
### Acceptance Tests
|
||||
|
||||
**Functional Requirements:**
|
||||
- All existing search functionality preserved
|
||||
- Random story selection improved reliability
|
||||
- Faceting accuracy maintained
|
||||
- Multi-library separation working
|
||||
|
||||
**Performance Requirements:**
|
||||
- Search response time < 100ms for 95th percentile
|
||||
- Random story selection < 50ms
|
||||
- Index update operations < 10ms
|
||||
- Memory usage < 2GB in production
|
||||
|
||||
---
|
||||
|
||||
## Risk Analysis & Mitigation
|
||||
|
||||
### Technical Risks
|
||||
|
||||
**Risk: OpenSearch Memory Usage**
|
||||
- *Probability: Medium*
|
||||
- *Impact: High*
|
||||
- *Mitigation: Resource monitoring, index optimization, container limits*
|
||||
|
||||
**Risk: Query Performance Regression**
|
||||
- *Probability: Low*
|
||||
- *Impact: High*
|
||||
- *Mitigation: Performance testing, query optimization, caching layer*
|
||||
|
||||
**Risk: Data Migration Accuracy**
|
||||
- *Probability: Low*
|
||||
- *Impact: Critical*
|
||||
- *Mitigation: Comprehensive validation, dual-write verification, rollback procedures*
|
||||
|
||||
**Risk: Complex Filter Compatibility**
|
||||
- *Probability: Medium*
|
||||
- *Impact: Medium*
|
||||
- *Mitigation: Extensive testing, gradual rollout, feature flags*
|
||||
|
||||
### Operational Risks
|
||||
|
||||
**Risk: Production Deployment Issues**
|
||||
- *Probability: Medium*
|
||||
- *Impact: High*
|
||||
- *Mitigation: Staging environment testing, gradual rollout, immediate rollback capability*
|
||||
|
||||
**Risk: Team Learning Curve**
|
||||
- *Probability: High*
|
||||
- *Impact: Low*
|
||||
- *Mitigation: Documentation, training, gradual responsibility transfer*
|
||||
|
||||
### Business Continuity
|
||||
|
||||
**Zero-Downtime Requirements:**
|
||||
- Maintain Typesense during entire migration
|
||||
- Feature flag-based switching
|
||||
- Immediate rollback capability
|
||||
- Health monitoring with automated alerts
|
||||
|
||||
---
|
||||
|
||||
## Success Criteria
|
||||
|
||||
### Functional Requirements ✅
|
||||
- [ ] All search functionality migrated successfully
|
||||
- [ ] Random story selection working reliably with seeds
|
||||
- [ ] Complex filtering (15+ conditions) working accurately
|
||||
- [ ] Faceting/aggregation results match expected values
|
||||
- [ ] Multi-library support maintained
|
||||
- [ ] Autocomplete functionality preserved
|
||||
|
||||
### Performance Requirements ✅
|
||||
- [ ] Search response time ≤ 100ms (95th percentile)
|
||||
- [ ] Random story selection ≤ 50ms
|
||||
- [ ] Index operations ≤ 10ms
|
||||
- [ ] Memory usage ≤ 2GB sustained
|
||||
- [ ] Zero search downtime during migration
|
||||
|
||||
### Technical Requirements ✅
|
||||
- [ ] Code quality maintained (test coverage ≥ 80%)
|
||||
- [ ] Documentation updated and comprehensive
|
||||
- [ ] Monitoring and alerting implemented
|
||||
- [ ] Rollback procedures tested and validated
|
||||
- [ ] Typesense dependencies cleanly removed
|
||||
|
||||
---
|
||||
|
||||
## Timeline Summary
|
||||
|
||||
| Phase | Duration | Key Deliverables | Risk Level |
|
||||
|-------|----------|------------------|------------|
|
||||
| 1. Infrastructure | 1 week | Docker setup, basic service | Low |
|
||||
| 2. Core Service | 1 week | Basic search operations | Medium |
|
||||
| 3. Advanced Features | 1 week | Complex filtering, random queries | High |
|
||||
| 4. Data Migration | 1 week | Full data migration, dual-write | High |
|
||||
| 5. API Integration | 1 week | Controller updates, testing | Medium |
|
||||
| 6. Production Rollout | 1 week | Gradual deployment, monitoring | High |
|
||||
| 7. Cleanup | 1 week | Remove Typesense, documentation | Low |
|
||||
|
||||
**Total Estimated Duration: 7 weeks**
|
||||
|
||||
---
|
||||
|
||||
## Configuration Management
|
||||
|
||||
### Environment Variables
|
||||
|
||||
```bash
|
||||
# OpenSearch Configuration
|
||||
OPENSEARCH_HOST=opensearch
|
||||
OPENSEARCH_PORT=9200
|
||||
OPENSEARCH_USERNAME=admin
|
||||
OPENSEARCH_PASSWORD=${OPENSEARCH_PASSWORD}
|
||||
|
||||
# Feature Flags
|
||||
STORYCOVE_OPENSEARCH_ENABLED=true
|
||||
STORYCOVE_SEARCH_PROVIDER=opensearch
|
||||
STORYCOVE_SEARCH_DUAL_WRITE=true
|
||||
STORYCOVE_OPENSEARCH_ROLLOUT_PERCENTAGE=100
|
||||
|
||||
# Performance Tuning
|
||||
OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx2g
|
||||
STORYCOVE_SEARCH_BATCH_SIZE=1000
|
||||
STORYCOVE_SEARCH_TIMEOUT=30s
|
||||
```
|
||||
|
||||
### Docker Compose Updates
|
||||
|
||||
```yaml
|
||||
# Add to docker-compose.yml
|
||||
opensearch:
|
||||
image: opensearchproject/opensearch:2.11.0
|
||||
environment:
|
||||
- discovery.type=single-node
|
||||
- DISABLE_SECURITY_PLUGIN=true
|
||||
- OPENSEARCH_JAVA_OPTS=-Xms512m -Xmx2g
|
||||
volumes:
|
||||
- opensearch_data:/usr/share/opensearch/data
|
||||
networks:
|
||||
- storycove-network
|
||||
|
||||
volumes:
|
||||
opensearch_data:
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Conclusion
|
||||
|
||||
This specification provides a comprehensive roadmap for migrating StoryCove from Typesense to OpenSearch. The phased approach ensures minimal risk while delivering improved reliability and performance, particularly for random story queries.
|
||||
|
||||
The parallel implementation strategy allows for thorough validation and provides confidence in the migration while maintaining the ability to rollback if issues arise. Upon successful completion, StoryCove will have a more robust and scalable search infrastructure that better supports its growth and feature requirements.
|
||||
|
||||
**Next Steps:**
|
||||
1. Review and approve this specification
|
||||
2. Set up development environment with OpenSearch
|
||||
3. Begin Phase 1 implementation
|
||||
4. Establish monitoring and success metrics
|
||||
5. Execute migration according to timeline
|
||||
|
||||
---
|
||||
|
||||
*Document Version: 1.0*
|
||||
*Last Updated: 2025-01-17*
|
||||
*Author: Claude Code Assistant*
|
||||
118
PORTABLE_TEXT_SETUP.md
Normal file
118
PORTABLE_TEXT_SETUP.md
Normal file
@@ -0,0 +1,118 @@
|
||||
# Portable Text Editor Setup Instructions
|
||||
|
||||
## Current Status
|
||||
|
||||
⚠️ **Temporarily Reverted to Original Editor**
|
||||
|
||||
Due to npm cache permission issues preventing Docker builds, I've temporarily reverted the imports back to `RichTextEditor`. The Portable Text implementation is complete and ready to activate once the npm issue is resolved.
|
||||
|
||||
## Files Ready for Portable Text
|
||||
|
||||
- ✅ `PortableTextEditor.tsx` - Complete implementation
|
||||
- ✅ `schema.ts` - Portable Text schema
|
||||
- ✅ `conversion.ts` - HTML ↔ Portable Text conversion
|
||||
- ✅ `package.json.with-portabletext` - Updated dependencies
|
||||
|
||||
## Docker Build Issue Resolution
|
||||
|
||||
The error `npm ci` requires `package-lock.json` but npm cache permissions prevent generating it.
|
||||
|
||||
### Solution Steps:
|
||||
|
||||
1. **Fix npm permissions:**
|
||||
```bash
|
||||
sudo chown -R $(whoami) ~/.npm
|
||||
```
|
||||
|
||||
2. **Switch to Portable Text setup:**
|
||||
```bash
|
||||
cd frontend
|
||||
mv package.json package.json.original
|
||||
mv package.json.with-portabletext package.json
|
||||
npm install # This will generate package-lock.json
|
||||
```
|
||||
|
||||
3. **Update component imports** (change RichTextEditor → PortableTextEditor):
|
||||
```typescript
|
||||
// In src/app/add-story/page.tsx and src/app/stories/[id]/edit/page.tsx
|
||||
import PortableTextEditor from '../../components/stories/PortableTextEditor';
|
||||
// And update the JSX to use <PortableTextEditor ... />
|
||||
```
|
||||
|
||||
4. **Build and test:**
|
||||
```bash
|
||||
npm run build
|
||||
docker-compose build
|
||||
```
|
||||
|
||||
## Implementation Complete
|
||||
|
||||
✅ **Portable Text Schema** - Defines formatting options matching the original editor
|
||||
✅ **HTML ↔ Portable Text Conversion** - Seamless conversion between formats
|
||||
✅ **Sanitization Integration** - Uses existing sanitization strategy
|
||||
✅ **Component Replacement** - PortableTextEditor replaces RichTextEditor
|
||||
✅ **Image Processing** - Maintains existing image processing functionality
|
||||
✅ **Toolbar** - All formatting buttons from original editor
|
||||
✅ **Keyboard Shortcuts** - Ctrl+B, Ctrl+I, Ctrl+Shift+1-6
|
||||
|
||||
## Features Maintained
|
||||
|
||||
### 1. **Formatting Options**
|
||||
- Bold, Italic, Underline, Strike, Code
|
||||
- Headings H1-H6
|
||||
- Paragraphs and Blockquotes
|
||||
- All original toolbar buttons
|
||||
|
||||
### 2. **Visual & HTML Modes**
|
||||
- Visual mode: Structured Portable Text editing
|
||||
- HTML mode: Direct HTML editing (fallback)
|
||||
- Live preview in HTML mode
|
||||
|
||||
### 3. **Image Processing**
|
||||
- Existing image processing pipeline maintained
|
||||
- Background image download and conversion
|
||||
- Processing status indicators
|
||||
- Warning system
|
||||
|
||||
### 4. **Paste Handling**
|
||||
- Rich text paste from websites
|
||||
- Image processing during paste
|
||||
- HTML sanitization
|
||||
- Structured content conversion
|
||||
|
||||
### 5. **Maximization & Resizing**
|
||||
- Fullscreen editing mode
|
||||
- Resizable editor height
|
||||
- Keyboard shortcuts (Escape to exit)
|
||||
|
||||
## Benefits of Portable Text
|
||||
|
||||
1. **Structured Content** - Content is stored as JSON, not just HTML
|
||||
2. **Future-Proof** - Easy to export/migrate content
|
||||
3. **Better Search** - Structured content works better with Typesense
|
||||
4. **Extensible** - Easy to add custom block types (images, etc.)
|
||||
5. **Sanitization** - Inherently safer than HTML parsing
|
||||
|
||||
## Next Steps
|
||||
|
||||
1. Install the npm packages using one of the methods above
|
||||
2. Test the editor functionality
|
||||
3. Verify image processing works correctly
|
||||
4. Optional: Add custom image block types for enhanced image handling
|
||||
|
||||
## File Structure
|
||||
|
||||
```
|
||||
frontend/src/
|
||||
├── components/stories/
|
||||
│ ├── PortableTextEditor.tsx # New editor component
|
||||
│ └── RichTextEditor.tsx # Original (can be removed after testing)
|
||||
├── lib/portabletext/
|
||||
│ ├── schema.ts # Portable Text schema and types
|
||||
│ └── conversion.ts # HTML ↔ Portable Text conversion
|
||||
└── app/
|
||||
├── add-story/page.tsx # Updated to use PortableTextEditor
|
||||
└── stories/[id]/edit/page.tsx # Updated to use PortableTextEditor
|
||||
```
|
||||
|
||||
The implementation is backward compatible and maintains all existing functionality while providing the benefits of structured content editing.
|
||||
131
README.md
131
README.md
@@ -131,9 +131,12 @@ cd backend
|
||||
### 🎨 **User Experience**
|
||||
- **Dark/Light Mode**: Automatic theme switching with system preference detection
|
||||
- **Responsive Design**: Optimized for desktop, tablet, and mobile
|
||||
- **Reading Mode**: Distraction-free reading interface
|
||||
- **Reading Mode**: Distraction-free reading interface with real-time progress tracking
|
||||
- **Reading Position Memory**: Character-based position tracking with smooth auto-scroll restoration
|
||||
- **Smart Tag Filtering**: Dynamic tag filters with live story counts in library view
|
||||
- **Keyboard Navigation**: Full keyboard accessibility
|
||||
- **Rich Text Editor**: Visual and source editing modes for story content
|
||||
- **Progress Indicators**: Visual reading progress bars and completion tracking
|
||||
|
||||
### 🔒 **Security & Administration**
|
||||
- **JWT Authentication**: Secure token-based authentication
|
||||
@@ -158,43 +161,75 @@ cd backend
|
||||
|
||||
## 📖 Documentation
|
||||
|
||||
- **[API Documentation](docs/API.md)**: Complete REST API reference with examples
|
||||
- **[Data Model](docs/DATA_MODEL.md)**: Detailed database schema and relationships
|
||||
- **[Technical Specification](storycove-spec.md)**: Comprehensive technical specification
|
||||
- **[Technical Specification](storycove-spec.md)**: Complete technical specification with API documentation, data models, and all feature specifications
|
||||
- **[Web Scraper Specification](storycove-scraper-spec.md)**: URL content grabbing functionality
|
||||
- **Environment Configuration**: Multi-environment deployment setup (see above)
|
||||
- **Development Setup**: Local development environment setup (see below)
|
||||
|
||||
> **Note**: All feature specifications (Collections, Tag Enhancements, EPUB Import/Export) have been consolidated into the main technical specification for easier maintenance and reference.
|
||||
|
||||
## 🗄️ Data Model
|
||||
|
||||
StoryCove uses a PostgreSQL database with the following core entities:
|
||||
|
||||
### **Stories**
|
||||
- **Primary Key**: UUID
|
||||
- **Fields**: title, summary, description, content_html, content_plain, source_url, word_count, rating, volume, cover_path
|
||||
- **Relationships**: Many-to-One with Author, Many-to-One with Series, Many-to-Many with Tags
|
||||
- **Features**: Automatic word count calculation, HTML sanitization, plain text extraction
|
||||
- **Fields**: title, summary, description, content_html, content_plain, source_url, word_count, rating, volume, cover_path, is_read, reading_position, last_read_at, created_at, updated_at
|
||||
- **Relationships**: Many-to-One with Author, Many-to-One with Series, Many-to-Many with Tags, One-to-Many with ReadingPositions
|
||||
- **Features**: Automatic word count calculation, HTML sanitization, plain text extraction, reading progress tracking, duplicate detection
|
||||
|
||||
### **Authors**
|
||||
- **Primary Key**: UUID
|
||||
- **Fields**: name, notes, author_rating, avatar_image_path
|
||||
- **Relationships**: One-to-Many with Stories, One-to-Many with Author URLs
|
||||
- **Features**: URL collection storage, rating system, statistics calculation
|
||||
- **Fields**: name, notes, author_rating, avatar_image_path, created_at, updated_at
|
||||
- **Relationships**: One-to-Many with Stories, One-to-Many with Author URLs (via @ElementCollection)
|
||||
- **Features**: URL collection storage, rating system, statistics calculation, average story rating calculation
|
||||
|
||||
### **Collections**
|
||||
- **Primary Key**: UUID
|
||||
- **Fields**: name, description, rating, cover_image_path, is_archived, created_at, updated_at
|
||||
- **Relationships**: Many-to-Many with Tags, One-to-Many with CollectionStories
|
||||
- **Features**: Story ordering with gap-based positioning, statistics calculation, EPUB export, Typesense search
|
||||
|
||||
### **CollectionStories** (Junction Table)
|
||||
- **Composite Key**: collection_id, story_id
|
||||
- **Fields**: position, added_at
|
||||
- **Relationships**: Links Collections to Stories with ordering
|
||||
- **Features**: Gap-based positioning for efficient reordering
|
||||
|
||||
### **Series**
|
||||
- **Primary Key**: UUID
|
||||
- **Fields**: name, description
|
||||
- **Fields**: name, description, created_at
|
||||
- **Relationships**: One-to-Many with Stories (ordered by volume)
|
||||
- **Features**: Volume-based story ordering, navigation methods
|
||||
- **Features**: Volume-based story ordering, navigation methods (next/previous story)
|
||||
|
||||
### **Tags**
|
||||
- **Primary Key**: UUID
|
||||
- **Fields**: name (unique)
|
||||
- **Relationships**: Many-to-Many with Stories
|
||||
- **Features**: Autocomplete support, usage statistics
|
||||
- **Fields**: name (unique), color (hex), description, created_at
|
||||
- **Relationships**: Many-to-Many with Stories, Many-to-Many with Collections, One-to-Many with TagAliases
|
||||
- **Features**: Color coding, alias system, autocomplete support, usage statistics, AI-powered suggestions
|
||||
|
||||
### **Join Tables**
|
||||
- **story_tags**: Links stories to tags
|
||||
- **author_urls**: Stores multiple URLs per author
|
||||
### **TagAliases**
|
||||
- **Primary Key**: UUID
|
||||
- **Fields**: alias_name (unique), canonical_tag_id, created_from_merge, created_at
|
||||
- **Relationships**: Many-to-One with Tag (canonical)
|
||||
- **Features**: Transparent alias resolution, merge tracking, autocomplete integration
|
||||
|
||||
### **ReadingPositions**
|
||||
- **Primary Key**: UUID
|
||||
- **Fields**: story_id, chapter_index, chapter_title, word_position, character_position, percentage_complete, epub_cfi, context_before, context_after, created_at, updated_at
|
||||
- **Relationships**: Many-to-One with Story
|
||||
- **Features**: Advanced reading position tracking, EPUB CFI support, context preservation, percentage calculation
|
||||
|
||||
### **Libraries**
|
||||
- **Primary Key**: UUID
|
||||
- **Fields**: name, description, is_default, created_at, updated_at
|
||||
- **Features**: Multi-library support, library switching functionality
|
||||
|
||||
### **Core Join Tables**
|
||||
- **story_tags**: Links stories to tags (Many-to-Many)
|
||||
- **collection_tags**: Links collections to tags (Many-to-Many)
|
||||
- **collection_stories**: Links collections to stories with ordering
|
||||
- **author_urls**: Stores multiple URLs per author (@ElementCollection)
|
||||
|
||||
## 🔌 REST API Reference
|
||||
|
||||
@@ -206,6 +241,7 @@ StoryCove uses a PostgreSQL database with the following core entities:
|
||||
### **Stories** (`/api/stories`)
|
||||
- `GET /` - List stories (paginated)
|
||||
- `GET /{id}` - Get specific story
|
||||
- `GET /{id}/read` - Get story for reading interface
|
||||
- `POST /` - Create new story
|
||||
- `PUT /{id}` - Update story
|
||||
- `DELETE /{id}` - Delete story
|
||||
@@ -214,13 +250,28 @@ StoryCove uses a PostgreSQL database with the following core entities:
|
||||
- `POST /{id}/rating` - Set story rating
|
||||
- `POST /{id}/tags/{tagId}` - Add tag to story
|
||||
- `DELETE /{id}/tags/{tagId}` - Remove tag from story
|
||||
- `GET /search` - Search stories (Typesense)
|
||||
- `POST /{id}/reading-progress` - Update reading position
|
||||
- `POST /{id}/reading-status` - Mark story as read/unread
|
||||
- `GET /{id}/collections` - Get collections containing story
|
||||
- `GET /random` - Get random story with optional filters
|
||||
- `GET /check-duplicate` - Check for duplicate stories
|
||||
- `GET /search` - Search stories (Typesense with faceting)
|
||||
- `GET /search/suggestions` - Get search suggestions
|
||||
- `GET /author/{authorId}` - Stories by author
|
||||
- `GET /series/{seriesId}` - Stories in series
|
||||
- `GET /tags/{tagName}` - Stories with tag
|
||||
- `GET /recent` - Recent stories
|
||||
- `GET /top-rated` - Top-rated stories
|
||||
- `POST /batch/add-to-collection` - Add multiple stories to collection
|
||||
- `POST /reindex` - Manual Typesense reindex
|
||||
- `POST /reindex-typesense` - Reindex stories in Typesense
|
||||
- `POST /recreate-typesense-collection` - Recreate Typesense collection
|
||||
|
||||
#### **EPUB Import/Export** (`/api/stories/epub`)
|
||||
- `POST /import` - Import story from EPUB file
|
||||
- `POST /export` - Export story as EPUB with options
|
||||
- `GET /{id}/epub` - Export story as EPUB (simple)
|
||||
- `POST /validate` - Validate EPUB file structure
|
||||
|
||||
### **Authors** (`/api/authors`)
|
||||
- `GET /` - List authors (paginated)
|
||||
@@ -240,14 +291,49 @@ StoryCove uses a PostgreSQL database with the following core entities:
|
||||
### **Tags** (`/api/tags`)
|
||||
- `GET /` - List tags (paginated)
|
||||
- `GET /{id}` - Get specific tag
|
||||
- `POST /` - Create new tag
|
||||
- `PUT /{id}` - Update tag
|
||||
- `POST /` - Create new tag (with color and description)
|
||||
- `PUT /{id}` - Update tag (name, color, description)
|
||||
- `DELETE /{id}` - Delete tag
|
||||
- `GET /search` - Search tags
|
||||
- `GET /autocomplete` - Tag autocomplete
|
||||
- `GET /autocomplete` - Tag autocomplete with alias resolution
|
||||
- `GET /popular` - Most used tags
|
||||
- `GET /unused` - Unused tags
|
||||
- `GET /stats` - Tag statistics
|
||||
- `GET /collections` - Tags used by collections
|
||||
- `GET /resolve/{name}` - Resolve tag name (handles aliases)
|
||||
|
||||
#### **Tag Aliases** (`/api/tags/{tagId}/aliases`)
|
||||
- `POST /` - Add alias to tag
|
||||
- `DELETE /{aliasId}` - Remove alias from tag
|
||||
|
||||
#### **Tag Management**
|
||||
- `POST /merge` - Merge multiple tags into one
|
||||
- `POST /merge/preview` - Preview tag merge operation
|
||||
- `POST /suggest` - AI-powered tag suggestions for content
|
||||
|
||||
### **Collections** (`/api/collections`)
|
||||
- `GET /` - Search and list collections (Typesense)
|
||||
- `GET /{id}` - Get collection details
|
||||
- `POST /` - Create new collection (JSON or multipart)
|
||||
- `PUT /{id}` - Update collection metadata
|
||||
- `DELETE /{id}` - Delete collection
|
||||
- `PUT /{id}/archive` - Archive/unarchive collection
|
||||
- `POST /{id}/cover` - Upload collection cover image
|
||||
- `DELETE /{id}/cover` - Remove collection cover image
|
||||
- `GET /{id}/stats` - Get collection statistics
|
||||
|
||||
#### **Collection Story Management**
|
||||
- `POST /{id}/stories` - Add stories to collection
|
||||
- `DELETE /{id}/stories/{storyId}` - Remove story from collection
|
||||
- `PUT /{id}/stories/order` - Reorder stories in collection
|
||||
- `GET /{id}/read/{storyId}` - Get story with collection context
|
||||
|
||||
#### **Collection EPUB Export**
|
||||
- `GET /{id}/epub` - Export collection as EPUB
|
||||
- `POST /{id}/epub` - Export collection as EPUB with options
|
||||
|
||||
#### **Collection Management**
|
||||
- `POST /reindex-typesense` - Reindex collections in Typesense
|
||||
|
||||
### **Series** (`/api/series`)
|
||||
- `GET /` - List series (paginated)
|
||||
@@ -295,6 +381,7 @@ All API endpoints use JSON format with proper HTTP status codes:
|
||||
- **Backend**: Spring Boot 3, Java 21, PostgreSQL, Typesense
|
||||
- **Infrastructure**: Docker, Docker Compose, Nginx
|
||||
- **Security**: JWT authentication, HTML sanitization, CORS
|
||||
- **Search**: Typesense with faceting and full-text search capabilities
|
||||
|
||||
### **Local Development Setup**
|
||||
|
||||
|
||||
305
TAG_ENHANCEMENT_SPECIFICATION.md
Normal file
305
TAG_ENHANCEMENT_SPECIFICATION.md
Normal file
@@ -0,0 +1,305 @@
|
||||
# Tag Enhancement Specification
|
||||
|
||||
> **✅ Implementation Status: COMPLETED**
|
||||
> This feature has been fully implemented and is available in the system.
|
||||
> All tag enhancements including colors, aliases, merging, and AI suggestions are working.
|
||||
> Last updated: January 2025
|
||||
|
||||
## Overview
|
||||
|
||||
This document outlines the comprehensive enhancement of the tagging functionality in StoryCove, including color tags, tag deletion, merging, and aliases. These features will be accessible through a new "Tag Maintenance" page linked from the Settings page.
|
||||
|
||||
## Features
|
||||
|
||||
### 1. Color Tags
|
||||
|
||||
**Purpose**: Assign optional colors to tags for visual distinction and better organization.
|
||||
|
||||
**Implementation Details**:
|
||||
- **Color Selection**: Predefined color palette that complements the app's theme
|
||||
- **Custom Colors**: Fallback option with full color picker for advanced users
|
||||
- **Default Behavior**: Tags without colors use consistent default styling
|
||||
- **Accessibility**: All colors ensure sufficient contrast ratios
|
||||
|
||||
**UI Design**:
|
||||
```
|
||||
Color Selection Interface:
|
||||
[Theme Blue] [Theme Green] [Theme Purple] [Theme Orange] ... [Custom ▼]
|
||||
```
|
||||
|
||||
**Database Changes**:
|
||||
```sql
|
||||
ALTER TABLE tags ADD COLUMN color VARCHAR(7); -- hex colors like #3B82F6
|
||||
ALTER TABLE tags ADD COLUMN description TEXT;
|
||||
```
|
||||
|
||||
### 2. Tag Deletion
|
||||
|
||||
**Purpose**: Remove unused or unwanted tags from the system.
|
||||
|
||||
**Safety Features**:
|
||||
- Show impact: "This tag is used by X stories"
|
||||
- Confirmation dialog with story count
|
||||
- Option to reassign stories to different tag before deletion
|
||||
- Simple workflow appropriate for single-user application
|
||||
|
||||
**Behavior**:
|
||||
- Display number of affected stories
|
||||
- Require confirmation for deletion
|
||||
- Optionally allow reassignment to another tag
|
||||
|
||||
### 3. Tag Merging
|
||||
|
||||
**Purpose**: Combine similar tags into a single canonical tag to reduce duplication.
|
||||
|
||||
**Workflow**:
|
||||
1. User selects multiple tags to merge
|
||||
2. User chooses which tag name becomes canonical
|
||||
3. System shows merge preview with story counts
|
||||
4. All story associations transfer to canonical tag
|
||||
5. **Automatic Aliasing**: Merged tags automatically become aliases
|
||||
|
||||
**Example**:
|
||||
```
|
||||
Merge Preview:
|
||||
• "magictf" (5 stories) → "magic tf" (12 stories)
|
||||
• Result: "magic tf" (17 stories)
|
||||
• "magictf" will become an alias for "magic tf"
|
||||
```
|
||||
|
||||
**Technical Implementation**:
|
||||
```sql
|
||||
-- Merge operation (atomic transaction)
|
||||
BEGIN TRANSACTION;
|
||||
UPDATE story_tags SET tag_id = target_tag_id WHERE tag_id = source_tag_id;
|
||||
INSERT INTO tag_aliases (alias_name, canonical_tag_id, created_from_merge)
|
||||
VALUES (source_tag_name, target_tag_id, TRUE);
|
||||
DELETE FROM tags WHERE id = source_tag_id;
|
||||
COMMIT;
|
||||
```
|
||||
|
||||
### 4. Tag Aliases
|
||||
|
||||
**Purpose**: Prevent tag duplication by allowing alternative names that resolve to canonical tags.
|
||||
|
||||
**Key Features**:
|
||||
- **Transparent Resolution**: Users type "magictf" and automatically get "magic tf"
|
||||
- **Hover Display**: Show aliases when hovering over tags
|
||||
- **Import Integration**: Automatic alias resolution during story imports
|
||||
- **Auto-Generation**: Created automatically during tag merges
|
||||
|
||||
**Database Schema**:
|
||||
```sql
|
||||
CREATE TABLE tag_aliases (
|
||||
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
|
||||
alias_name VARCHAR(255) UNIQUE NOT NULL,
|
||||
canonical_tag_id UUID NOT NULL REFERENCES tags(id) ON DELETE CASCADE,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
created_from_merge BOOLEAN DEFAULT FALSE
|
||||
);
|
||||
|
||||
CREATE INDEX idx_tag_aliases_name ON tag_aliases(alias_name);
|
||||
```
|
||||
|
||||
**UI Behavior**:
|
||||
- Tags with aliases show subtle indicator (e.g., small "+" icon)
|
||||
- Hover tooltip displays:
|
||||
```
|
||||
magic tf
|
||||
────────────
|
||||
Aliases: magictf, magic_tf, magic-transformation
|
||||
```
|
||||
|
||||
## Tag Maintenance Page
|
||||
|
||||
### Access
|
||||
- Reachable only through Settings page
|
||||
- Button: "Tag Maintenance" or "Manage Tags"
|
||||
|
||||
### Main Interface
|
||||
|
||||
**Tag Management Table**:
|
||||
```
|
||||
┌─ Search: [____________] [Color Filter ▼] [Sort: Usage ▼]
|
||||
├─
|
||||
├─ ☐ magic tf 🔵 (17 stories) [+2 aliases] [Edit] [Delete]
|
||||
├─ ☐ transformation 🟢 (34 stories) [+1 alias] [Edit] [Delete]
|
||||
├─ ☐ sci-fi 🟣 (45 stories) [Edit] [Delete]
|
||||
└─
|
||||
[Merge Selected] [Bulk Delete] [Export/Import Tags]
|
||||
```
|
||||
|
||||
**Features**:
|
||||
- Searchable and filterable tag list
|
||||
- Sortable by name, usage count, creation date
|
||||
- Bulk selection for merge/delete operations
|
||||
- Visual indicators for color and alias count
|
||||
|
||||
### Tag Edit Modal
|
||||
|
||||
```
|
||||
Edit Tag: "magic tf"
|
||||
┌─ Name: [magic tf ]
|
||||
├─ Color: [🔵] [Theme Colors...] [Custom...]
|
||||
├─ Description: [Optional description]
|
||||
├─
|
||||
├─ Aliases (2):
|
||||
│ • magictf [Remove]
|
||||
│ • magic_tf [Remove]
|
||||
│ [Add Alias: ____________] [Add]
|
||||
├─
|
||||
├─ Used by 17 stories [View Stories]
|
||||
└─ [Save] [Cancel]
|
||||
```
|
||||
|
||||
**Functionality**:
|
||||
- Edit tag name, color, and description
|
||||
- Manage aliases (add/remove)
|
||||
- View associated stories
|
||||
- Prevent circular alias references
|
||||
|
||||
### Merge Interface
|
||||
|
||||
**Selection Process**:
|
||||
1. Select multiple tags from main table
|
||||
2. Click "Merge Selected"
|
||||
3. Choose canonical tag name
|
||||
4. Preview merge results
|
||||
5. Confirm operation
|
||||
|
||||
**Preview Display**:
|
||||
- Show before/after story counts
|
||||
- List all aliases that will be created
|
||||
- Highlight any conflicts or issues
|
||||
|
||||
## Integration Points
|
||||
|
||||
### 1. Import/Scraping Enhancement
|
||||
|
||||
```javascript
|
||||
// Tag resolution during imports
|
||||
const resolveTagName = async (inputTag) => {
|
||||
const alias = await tagApi.findAlias(inputTag);
|
||||
return alias ? alias.canonicalTag : inputTag;
|
||||
};
|
||||
```
|
||||
|
||||
### 2. Tag Input Components
|
||||
|
||||
**Enhanced Autocomplete**:
|
||||
- Include both canonical names and aliases in suggestions
|
||||
- Show resolution: "magictf → magic tf" in dropdown
|
||||
- Always save canonical name to database
|
||||
|
||||
### 3. Search Functionality
|
||||
|
||||
**Transparent Alias Search**:
|
||||
- Search for "magictf" includes stories tagged with "magic tf"
|
||||
- User doesn't need to know about canonical/alias distinction
|
||||
- Expand search queries to include all aliases
|
||||
|
||||
### 4. Display Components
|
||||
|
||||
**Tag Rendering**:
|
||||
- Apply colors consistently across all tag displays
|
||||
- Show alias indicator where appropriate
|
||||
- Implement hover tooltips for alias information
|
||||
|
||||
## Implementation Phases
|
||||
|
||||
### Phase 1: Core Infrastructure
|
||||
- [ ] Database schema updates (tags.color, tag_aliases table)
|
||||
- [ ] Basic tag editing functionality (name, color, description)
|
||||
- [ ] Color palette component with theme colors
|
||||
- [ ] Tag edit modal interface
|
||||
|
||||
### Phase 2: Merging & Aliasing
|
||||
- [ ] Tag merge functionality with automatic alias creation
|
||||
- [ ] Alias resolution in import/scraping logic
|
||||
- [ ] Tag input component enhancements
|
||||
- [ ] Search integration with alias expansion
|
||||
|
||||
### Phase 3: UI Polish & Advanced Features
|
||||
- [ ] Hover tooltips for alias display
|
||||
- [ ] Bulk operations (merge multiple, bulk delete)
|
||||
- [ ] Advanced filtering and sorting options
|
||||
- [ ] Tag maintenance page integration with Settings
|
||||
|
||||
### Phase 4: Smart Features (Optional)
|
||||
- [ ] Auto-merge suggestions for similar tag names
|
||||
- [ ] Color auto-assignment based on usage patterns
|
||||
- [ ] Import intelligence and learning from user decisions
|
||||
|
||||
## Technical Considerations
|
||||
|
||||
### Performance
|
||||
- Index alias names for fast lookup during imports
|
||||
- Optimize tag queries with proper database indexing
|
||||
- Consider caching for frequently accessed tag/alias mappings
|
||||
|
||||
### Data Integrity
|
||||
- Prevent circular alias references
|
||||
- Atomic transactions for merge operations
|
||||
- Cascade deletion handling for tag relationships
|
||||
|
||||
### User Experience
|
||||
- Clear visual feedback for all operations
|
||||
- Comprehensive preview before destructive actions
|
||||
- Consistent color and styling across the application
|
||||
|
||||
### Accessibility
|
||||
- Sufficient color contrast for all tag colors
|
||||
- Keyboard navigation support
|
||||
- Screen reader compatibility
|
||||
- Don't rely solely on color for information
|
||||
|
||||
## API Endpoints
|
||||
|
||||
### New Endpoints Needed
|
||||
- `GET /api/tags/{id}/aliases` - Get aliases for a tag
|
||||
- `POST /api/tags/merge` - Merge multiple tags
|
||||
- `POST /api/tags/{id}/aliases` - Add alias to tag
|
||||
- `DELETE /api/tags/{id}/aliases/{aliasId}` - Remove alias
|
||||
- `PUT /api/tags/{id}/color` - Update tag color
|
||||
- `GET /api/tags/resolve/{name}` - Resolve tag name (check aliases)
|
||||
|
||||
### Enhanced Endpoints
|
||||
- `GET /api/tags` - Include color and alias count in response
|
||||
- `PUT /api/tags/{id}` - Support color and description updates
|
||||
- `DELETE /api/tags/{id}` - Enhanced with story impact information
|
||||
|
||||
## Configuration
|
||||
|
||||
### Theme Color Palette
|
||||
Define a curated set of colors that work well with both light and dark themes:
|
||||
- Primary blues: #3B82F6, #1D4ED8, #60A5FA
|
||||
- Greens: #10B981, #059669, #34D399
|
||||
- Purples: #8B5CF6, #7C3AED, #A78BFA
|
||||
- Warm tones: #F59E0B, #D97706, #F97316
|
||||
- Neutrals: #6B7280, #4B5563, #9CA3AF
|
||||
|
||||
### Settings Integration
|
||||
- Add "Tag Maintenance" button to Settings page
|
||||
- Consider adding tag-related preferences (default colors, etc.)
|
||||
|
||||
## Success Criteria
|
||||
|
||||
1. **Color Tags**: Tags can be assigned colors that display consistently throughout the application
|
||||
2. **Tag Deletion**: Users can safely delete tags with appropriate warnings and reassignment options
|
||||
3. **Tag Merging**: Similar tags can be merged with automatic alias creation
|
||||
4. **Alias Resolution**: Imports automatically resolve aliases to canonical tags
|
||||
5. **User Experience**: All operations are intuitive with clear feedback and preview options
|
||||
6. **Performance**: Tag operations remain fast even with large numbers of tags and aliases
|
||||
7. **Data Integrity**: No orphaned references or circular alias chains
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
- **Tag Statistics**: Usage analytics and trends
|
||||
- **Tag Recommendations**: AI-powered tag suggestions during story import
|
||||
- **Tag Templates**: Predefined tag sets for common story types
|
||||
- **Export/Import**: Backup and restore tag configurations
|
||||
- **Tag Validation**: Rules for tag naming conventions
|
||||
|
||||
---
|
||||
|
||||
*This specification serves as the definitive guide for implementing the tag enhancement features in StoryCove. All implementation should refer back to this document to ensure consistency and completeness.*
|
||||
@@ -2,15 +2,15 @@ FROM openjdk:17-jdk-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY pom.xml .
|
||||
COPY src ./src
|
||||
# Install Maven
|
||||
RUN apt-get update && apt-get install -y maven && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
RUN apt-get update && apt-get install -y maven && \
|
||||
mvn clean package -DskipTests && \
|
||||
apt-get remove -y maven && \
|
||||
apt-get autoremove -y && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
# Copy source code
|
||||
COPY . .
|
||||
|
||||
# Build the application
|
||||
RUN mvn clean package -DskipTests
|
||||
|
||||
EXPOSE 8080
|
||||
|
||||
CMD ["java", "-jar", "target/storycove-backend-0.0.1-SNAPSHOT.jar"]
|
||||
ENTRYPOINT ["java", "-jar", "target/storycove-backend-0.0.1-SNAPSHOT.jar"]
|
||||
1
backend/backend.log
Normal file
1
backend/backend.log
Normal file
@@ -0,0 +1 @@
|
||||
(eval):1: no such file or directory: ./mvnw
|
||||
4
backend/cookies_new.txt
Normal file
4
backend/cookies_new.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
# Netscape HTTP Cookie File
|
||||
# https://curl.se/docs/http-cookies.html
|
||||
# This file was generated by libcurl! Edit at your own risk.
|
||||
|
||||
@@ -5,7 +5,7 @@
|
||||
<parent>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-parent</artifactId>
|
||||
<version>3.2.0</version>
|
||||
<version>3.5.5</version>
|
||||
<relativePath/>
|
||||
</parent>
|
||||
|
||||
@@ -17,7 +17,7 @@
|
||||
|
||||
<properties>
|
||||
<java.version>17</java.version>
|
||||
<testcontainers.version>1.19.3</testcontainers.version>
|
||||
<testcontainers.version>1.21.3</testcontainers.version>
|
||||
</properties>
|
||||
|
||||
<dependencyManagement>
|
||||
@@ -49,6 +49,10 @@
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-validation</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-starter-actuator</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.postgresql</groupId>
|
||||
<artifactId>postgresql</artifactId>
|
||||
@@ -56,18 +60,18 @@
|
||||
<dependency>
|
||||
<groupId>io.jsonwebtoken</groupId>
|
||||
<artifactId>jjwt-api</artifactId>
|
||||
<version>0.12.3</version>
|
||||
<version>0.13.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.jsonwebtoken</groupId>
|
||||
<artifactId>jjwt-impl</artifactId>
|
||||
<version>0.12.3</version>
|
||||
<version>0.13.0</version>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.jsonwebtoken</groupId>
|
||||
<artifactId>jjwt-jackson</artifactId>
|
||||
<version>0.12.3</version>
|
||||
<version>0.13.0</version>
|
||||
<scope>runtime</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
@@ -80,9 +84,22 @@
|
||||
<artifactId>httpclient5</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.typesense</groupId>
|
||||
<artifactId>typesense-java</artifactId>
|
||||
<version>1.3.0</version>
|
||||
<groupId>org.opensearch.client</groupId>
|
||||
<artifactId>opensearch-java</artifactId>
|
||||
<version>3.2.0</version>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents.core5</groupId>
|
||||
<artifactId>httpcore5</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.httpcomponents.core5</groupId>
|
||||
<artifactId>httpcore5-h2</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.positiondev.epublib</groupId>
|
||||
<artifactId>epublib-core</artifactId>
|
||||
<version>3.1</version>
|
||||
</dependency>
|
||||
|
||||
<!-- Test dependencies -->
|
||||
@@ -114,6 +131,13 @@
|
||||
<groupId>org.springframework.boot</groupId>
|
||||
<artifactId>spring-boot-maven-plugin</artifactId>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<configuration>
|
||||
<parameters>true</parameters>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
@@ -0,0 +1,64 @@
|
||||
package com.storycove.config;
|
||||
|
||||
import com.storycove.service.LibraryService;
|
||||
import com.zaxxer.hikari.HikariConfig;
|
||||
import com.zaxxer.hikari.HikariDataSource;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.context.annotation.DependsOn;
|
||||
import org.springframework.context.annotation.Primary;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
|
||||
/**
|
||||
* Database configuration that sets up library-aware datasource routing.
|
||||
*
|
||||
* This configuration replaces the default Spring Boot datasource with a routing
|
||||
* datasource that automatically directs all database operations to the appropriate
|
||||
* library-specific database based on the current active library.
|
||||
*/
|
||||
@Configuration
|
||||
public class DatabaseConfig {
|
||||
|
||||
@Value("${spring.datasource.url}")
|
||||
private String baseDbUrl;
|
||||
|
||||
@Value("${spring.datasource.username}")
|
||||
private String dbUsername;
|
||||
|
||||
@Value("${spring.datasource.password}")
|
||||
private String dbPassword;
|
||||
|
||||
/**
|
||||
* Create a fallback datasource for when no library is active.
|
||||
* This connects to the main database specified in application.yml.
|
||||
*/
|
||||
@Bean(name = "fallbackDataSource")
|
||||
public DataSource fallbackDataSource() {
|
||||
HikariConfig config = new HikariConfig();
|
||||
config.setJdbcUrl(baseDbUrl);
|
||||
config.setUsername(dbUsername);
|
||||
config.setPassword(dbPassword);
|
||||
config.setDriverClassName("org.postgresql.Driver");
|
||||
config.setMaximumPoolSize(10);
|
||||
config.setConnectionTimeout(30000);
|
||||
|
||||
return new HikariDataSource(config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Primary datasource bean - uses smart routing that excludes authentication operations
|
||||
*/
|
||||
@Bean(name = "dataSource")
|
||||
@Primary
|
||||
@DependsOn("libraryService")
|
||||
public DataSource primaryDataSource(LibraryService libraryService) {
|
||||
SmartRoutingDataSource routingDataSource = new SmartRoutingDataSource(
|
||||
libraryService, baseDbUrl, dbUsername, dbPassword);
|
||||
routingDataSource.setDefaultTargetDataSource(fallbackDataSource());
|
||||
routingDataSource.setTargetDataSources(new java.util.HashMap<>());
|
||||
return routingDataSource;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,65 @@
|
||||
package com.storycove.config;
|
||||
|
||||
import com.storycove.service.LibraryService;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
|
||||
|
||||
/**
|
||||
* Custom DataSource router that dynamically routes database calls to the appropriate
|
||||
* library-specific datasource based on the current active library.
|
||||
*
|
||||
* This makes ALL Spring Data JPA repositories automatically library-aware without
|
||||
* requiring changes to existing repository or service code.
|
||||
*/
|
||||
public class LibraryAwareDataSource extends AbstractRoutingDataSource {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(LibraryAwareDataSource.class);
|
||||
|
||||
private final LibraryService libraryService;
|
||||
|
||||
public LibraryAwareDataSource(LibraryService libraryService) {
|
||||
this.libraryService = libraryService;
|
||||
// Set empty target datasources to satisfy AbstractRoutingDataSource requirements
|
||||
// We override determineTargetDataSource() so this won't be used
|
||||
setTargetDataSources(new java.util.HashMap<>());
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Object determineCurrentLookupKey() {
|
||||
String currentLibraryId = libraryService.getCurrentLibraryId();
|
||||
logger.debug("Routing database call to library: {}", currentLibraryId);
|
||||
return currentLibraryId;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected javax.sql.DataSource determineTargetDataSource() {
|
||||
try {
|
||||
// Check if LibraryService is properly initialized
|
||||
if (libraryService == null) {
|
||||
logger.debug("LibraryService not available, using default datasource");
|
||||
return getResolvedDefaultDataSource();
|
||||
}
|
||||
|
||||
// Check if any library is currently active
|
||||
String currentLibraryId = libraryService.getCurrentLibraryId();
|
||||
if (currentLibraryId == null) {
|
||||
logger.debug("No active library, using default datasource");
|
||||
return getResolvedDefaultDataSource();
|
||||
}
|
||||
|
||||
// Try to get the current library datasource
|
||||
javax.sql.DataSource libraryDataSource = libraryService.getCurrentDataSource();
|
||||
logger.debug("Successfully routing database call to library: {}", currentLibraryId);
|
||||
return libraryDataSource;
|
||||
|
||||
} catch (IllegalStateException e) {
|
||||
// This is expected during authentication, startup, or when no library is active
|
||||
logger.debug("No active library (IllegalStateException) - using default datasource: {}", e.getMessage());
|
||||
return getResolvedDefaultDataSource();
|
||||
} catch (Exception e) {
|
||||
logger.warn("Unexpected error determining target datasource, falling back to default: {}", e.getMessage(), e);
|
||||
return getResolvedDefaultDataSource();
|
||||
}
|
||||
}
|
||||
}
|
||||
211
backend/src/main/java/com/storycove/config/OpenSearchConfig.java
Normal file
211
backend/src/main/java/com/storycove/config/OpenSearchConfig.java
Normal file
@@ -0,0 +1,211 @@
|
||||
package com.storycove.config;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
|
||||
import org.apache.hc.client5.http.auth.AuthScope;
|
||||
import org.apache.hc.client5.http.auth.UsernamePasswordCredentials;
|
||||
import org.apache.hc.client5.http.impl.auth.BasicCredentialsProvider;
|
||||
import org.apache.hc.client5.http.impl.nio.PoolingAsyncClientConnectionManager;
|
||||
import org.apache.hc.client5.http.impl.nio.PoolingAsyncClientConnectionManagerBuilder;
|
||||
import org.apache.hc.client5.http.ssl.ClientTlsStrategyBuilder;
|
||||
import org.apache.hc.core5.http.HttpHost;
|
||||
import org.apache.hc.core5.util.Timeout;
|
||||
import org.opensearch.client.json.jackson.JacksonJsonpMapper;
|
||||
import org.opensearch.client.opensearch.OpenSearchClient;
|
||||
import org.opensearch.client.transport.OpenSearchTransport;
|
||||
import org.opensearch.client.transport.httpclient5.ApacheHttpClient5TransportBuilder;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
||||
import javax.net.ssl.SSLContext;
|
||||
import javax.net.ssl.TrustManager;
|
||||
import javax.net.ssl.X509TrustManager;
|
||||
import java.io.FileInputStream;
|
||||
import java.security.KeyStore;
|
||||
import java.security.cert.X509Certificate;
|
||||
|
||||
@Configuration
|
||||
public class OpenSearchConfig {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(OpenSearchConfig.class);
|
||||
|
||||
private final OpenSearchProperties properties;
|
||||
|
||||
public OpenSearchConfig(@Qualifier("openSearchProperties") OpenSearchProperties properties) {
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
@Bean
|
||||
public OpenSearchClient openSearchClient() throws Exception {
|
||||
logger.info("Initializing OpenSearch client for profile: {}", properties.getProfile());
|
||||
|
||||
// Create credentials provider
|
||||
BasicCredentialsProvider credentialsProvider = createCredentialsProvider();
|
||||
|
||||
// Create SSL context based on environment
|
||||
SSLContext sslContext = createSSLContext();
|
||||
|
||||
// Create connection manager with pooling
|
||||
PoolingAsyncClientConnectionManager connectionManager = createConnectionManager(sslContext);
|
||||
|
||||
// Create custom ObjectMapper for proper date serialization
|
||||
ObjectMapper objectMapper = new ObjectMapper();
|
||||
objectMapper.registerModule(new JavaTimeModule());
|
||||
objectMapper.disable(com.fasterxml.jackson.databind.SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
|
||||
|
||||
// Create the transport with all configurations and custom Jackson mapper
|
||||
OpenSearchTransport transport = ApacheHttpClient5TransportBuilder
|
||||
.builder(new HttpHost(properties.getScheme(), properties.getHost(), properties.getPort()))
|
||||
.setMapper(new JacksonJsonpMapper(objectMapper))
|
||||
.setHttpClientConfigCallback(httpClientBuilder -> {
|
||||
// Only set credentials provider if authentication is configured
|
||||
if (properties.getUsername() != null && !properties.getUsername().isEmpty() &&
|
||||
properties.getPassword() != null && !properties.getPassword().isEmpty()) {
|
||||
httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider);
|
||||
}
|
||||
|
||||
httpClientBuilder.setConnectionManager(connectionManager);
|
||||
|
||||
// Set timeouts
|
||||
httpClientBuilder.setDefaultRequestConfig(
|
||||
org.apache.hc.client5.http.config.RequestConfig.custom()
|
||||
.setConnectionRequestTimeout(Timeout.ofMilliseconds(properties.getConnection().getTimeout()))
|
||||
.setResponseTimeout(Timeout.ofMilliseconds(properties.getConnection().getSocketTimeout()))
|
||||
.build()
|
||||
);
|
||||
|
||||
return httpClientBuilder;
|
||||
})
|
||||
.build();
|
||||
|
||||
OpenSearchClient client = new OpenSearchClient(transport);
|
||||
|
||||
// Test connection
|
||||
testConnection(client);
|
||||
|
||||
return client;
|
||||
}
|
||||
|
||||
private BasicCredentialsProvider createCredentialsProvider() {
|
||||
BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider();
|
||||
|
||||
// Only set credentials if username and password are provided
|
||||
if (properties.getUsername() != null && !properties.getUsername().isEmpty() &&
|
||||
properties.getPassword() != null && !properties.getPassword().isEmpty()) {
|
||||
credentialsProvider.setCredentials(
|
||||
new AuthScope(properties.getHost(), properties.getPort()),
|
||||
new UsernamePasswordCredentials(
|
||||
properties.getUsername(),
|
||||
properties.getPassword().toCharArray()
|
||||
)
|
||||
);
|
||||
logger.info("OpenSearch credentials configured for user: {}", properties.getUsername());
|
||||
} else {
|
||||
logger.info("OpenSearch running without authentication (no credentials configured)");
|
||||
}
|
||||
|
||||
return credentialsProvider;
|
||||
}
|
||||
|
||||
private SSLContext createSSLContext() throws Exception {
|
||||
SSLContext sslContext;
|
||||
|
||||
if (isProduction() && !properties.getSecurity().isTrustAllCertificates()) {
|
||||
// Production SSL configuration with proper certificate validation
|
||||
sslContext = createProductionSSLContext();
|
||||
} else {
|
||||
// Development SSL configuration (trust all certificates)
|
||||
sslContext = createDevelopmentSSLContext();
|
||||
}
|
||||
|
||||
return sslContext;
|
||||
}
|
||||
|
||||
private SSLContext createProductionSSLContext() throws Exception {
|
||||
logger.info("Configuring production SSL context with certificate validation");
|
||||
|
||||
SSLContext sslContext = SSLContext.getInstance("TLS");
|
||||
|
||||
// Load custom keystore/truststore if provided
|
||||
if (properties.getSecurity().getTruststorePath() != null) {
|
||||
KeyStore trustStore = KeyStore.getInstance("JKS");
|
||||
try (FileInputStream fis = new FileInputStream(properties.getSecurity().getTruststorePath())) {
|
||||
trustStore.load(fis, properties.getSecurity().getTruststorePassword().toCharArray());
|
||||
}
|
||||
|
||||
javax.net.ssl.TrustManagerFactory tmf =
|
||||
javax.net.ssl.TrustManagerFactory.getInstance(javax.net.ssl.TrustManagerFactory.getDefaultAlgorithm());
|
||||
tmf.init(trustStore);
|
||||
|
||||
sslContext.init(null, tmf.getTrustManagers(), null);
|
||||
} else {
|
||||
// Use default system SSL context for production
|
||||
sslContext.init(null, null, null);
|
||||
}
|
||||
|
||||
return sslContext;
|
||||
}
|
||||
|
||||
private SSLContext createDevelopmentSSLContext() throws Exception {
|
||||
logger.warn("Configuring development SSL context - TRUSTING ALL CERTIFICATES (not for production!)");
|
||||
|
||||
SSLContext sslContext = SSLContext.getInstance("TLS");
|
||||
sslContext.init(null, new TrustManager[] {
|
||||
new X509TrustManager() {
|
||||
public X509Certificate[] getAcceptedIssuers() { return null; }
|
||||
public void checkClientTrusted(X509Certificate[] certs, String authType) {}
|
||||
public void checkServerTrusted(X509Certificate[] certs, String authType) {}
|
||||
}
|
||||
}, null);
|
||||
|
||||
return sslContext;
|
||||
}
|
||||
|
||||
private PoolingAsyncClientConnectionManager createConnectionManager(SSLContext sslContext) {
|
||||
PoolingAsyncClientConnectionManagerBuilder builder = PoolingAsyncClientConnectionManagerBuilder.create();
|
||||
|
||||
// Configure TLS strategy
|
||||
if (properties.getScheme().equals("https")) {
|
||||
if (isProduction() && properties.getSecurity().isSslVerification()) {
|
||||
// Production TLS with hostname verification
|
||||
builder.setTlsStrategy(ClientTlsStrategyBuilder.create()
|
||||
.setSslContext(sslContext)
|
||||
.build());
|
||||
} else {
|
||||
// Development TLS without hostname verification
|
||||
builder.setTlsStrategy(ClientTlsStrategyBuilder.create()
|
||||
.setSslContext(sslContext)
|
||||
.setHostnameVerifier((hostname, session) -> true)
|
||||
.build());
|
||||
}
|
||||
}
|
||||
|
||||
PoolingAsyncClientConnectionManager connectionManager = builder.build();
|
||||
|
||||
// Configure connection pool settings
|
||||
connectionManager.setMaxTotal(properties.getConnection().getMaxConnectionsTotal());
|
||||
connectionManager.setDefaultMaxPerRoute(properties.getConnection().getMaxConnectionsPerRoute());
|
||||
|
||||
return connectionManager;
|
||||
}
|
||||
|
||||
private boolean isProduction() {
|
||||
return "production".equalsIgnoreCase(properties.getProfile());
|
||||
}
|
||||
|
||||
private void testConnection(OpenSearchClient client) {
|
||||
try {
|
||||
var response = client.info();
|
||||
logger.info("OpenSearch connection successful - Version: {}, Cluster: {}",
|
||||
response.version().number(),
|
||||
response.clusterName());
|
||||
} catch (Exception e) {
|
||||
logger.warn("OpenSearch connection test failed during initialization: {}", e.getMessage());
|
||||
logger.debug("OpenSearch connection test full error", e);
|
||||
// Don't throw exception here - let the client be created and handle failures in service methods
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,164 @@
|
||||
package com.storycove.config;
|
||||
|
||||
import org.springframework.boot.context.properties.ConfigurationProperties;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
@Component
|
||||
@ConfigurationProperties(prefix = "storycove.opensearch")
|
||||
public class OpenSearchProperties {
|
||||
|
||||
private String host = "localhost";
|
||||
private int port = 9200;
|
||||
private String scheme = "https";
|
||||
private String username = "admin";
|
||||
private String password;
|
||||
private String profile = "development";
|
||||
|
||||
private Security security = new Security();
|
||||
private Connection connection = new Connection();
|
||||
private Indices indices = new Indices();
|
||||
private Bulk bulk = new Bulk();
|
||||
private Health health = new Health();
|
||||
|
||||
// Getters and setters
|
||||
public String getHost() { return host; }
|
||||
public void setHost(String host) { this.host = host; }
|
||||
|
||||
public int getPort() { return port; }
|
||||
public void setPort(int port) { this.port = port; }
|
||||
|
||||
public String getScheme() { return scheme; }
|
||||
public void setScheme(String scheme) { this.scheme = scheme; }
|
||||
|
||||
public String getUsername() { return username; }
|
||||
public void setUsername(String username) { this.username = username; }
|
||||
|
||||
public String getPassword() { return password; }
|
||||
public void setPassword(String password) { this.password = password; }
|
||||
|
||||
public String getProfile() { return profile; }
|
||||
public void setProfile(String profile) { this.profile = profile; }
|
||||
|
||||
public Security getSecurity() { return security; }
|
||||
public void setSecurity(Security security) { this.security = security; }
|
||||
|
||||
public Connection getConnection() { return connection; }
|
||||
public void setConnection(Connection connection) { this.connection = connection; }
|
||||
|
||||
public Indices getIndices() { return indices; }
|
||||
public void setIndices(Indices indices) { this.indices = indices; }
|
||||
|
||||
public Bulk getBulk() { return bulk; }
|
||||
public void setBulk(Bulk bulk) { this.bulk = bulk; }
|
||||
|
||||
public Health getHealth() { return health; }
|
||||
public void setHealth(Health health) { this.health = health; }
|
||||
|
||||
public static class Security {
|
||||
private boolean sslVerification = false;
|
||||
private boolean trustAllCertificates = true;
|
||||
private String keystorePath;
|
||||
private String keystorePassword;
|
||||
private String truststorePath;
|
||||
private String truststorePassword;
|
||||
|
||||
// Getters and setters
|
||||
public boolean isSslVerification() { return sslVerification; }
|
||||
public void setSslVerification(boolean sslVerification) { this.sslVerification = sslVerification; }
|
||||
|
||||
public boolean isTrustAllCertificates() { return trustAllCertificates; }
|
||||
public void setTrustAllCertificates(boolean trustAllCertificates) { this.trustAllCertificates = trustAllCertificates; }
|
||||
|
||||
public String getKeystorePath() { return keystorePath; }
|
||||
public void setKeystorePath(String keystorePath) { this.keystorePath = keystorePath; }
|
||||
|
||||
public String getKeystorePassword() { return keystorePassword; }
|
||||
public void setKeystorePassword(String keystorePassword) { this.keystorePassword = keystorePassword; }
|
||||
|
||||
public String getTruststorePath() { return truststorePath; }
|
||||
public void setTruststorePath(String truststorePath) { this.truststorePath = truststorePath; }
|
||||
|
||||
public String getTruststorePassword() { return truststorePassword; }
|
||||
public void setTruststorePassword(String truststorePassword) { this.truststorePassword = truststorePassword; }
|
||||
}
|
||||
|
||||
public static class Connection {
|
||||
private int timeout = 30000;
|
||||
private int socketTimeout = 60000;
|
||||
private int maxConnectionsPerRoute = 10;
|
||||
private int maxConnectionsTotal = 30;
|
||||
private boolean retryOnFailure = true;
|
||||
private int maxRetries = 3;
|
||||
|
||||
// Getters and setters
|
||||
public int getTimeout() { return timeout; }
|
||||
public void setTimeout(int timeout) { this.timeout = timeout; }
|
||||
|
||||
public int getSocketTimeout() { return socketTimeout; }
|
||||
public void setSocketTimeout(int socketTimeout) { this.socketTimeout = socketTimeout; }
|
||||
|
||||
public int getMaxConnectionsPerRoute() { return maxConnectionsPerRoute; }
|
||||
public void setMaxConnectionsPerRoute(int maxConnectionsPerRoute) { this.maxConnectionsPerRoute = maxConnectionsPerRoute; }
|
||||
|
||||
public int getMaxConnectionsTotal() { return maxConnectionsTotal; }
|
||||
public void setMaxConnectionsTotal(int maxConnectionsTotal) { this.maxConnectionsTotal = maxConnectionsTotal; }
|
||||
|
||||
public boolean isRetryOnFailure() { return retryOnFailure; }
|
||||
public void setRetryOnFailure(boolean retryOnFailure) { this.retryOnFailure = retryOnFailure; }
|
||||
|
||||
public int getMaxRetries() { return maxRetries; }
|
||||
public void setMaxRetries(int maxRetries) { this.maxRetries = maxRetries; }
|
||||
}
|
||||
|
||||
public static class Indices {
|
||||
private int defaultShards = 1;
|
||||
private int defaultReplicas = 0;
|
||||
private String refreshInterval = "1s";
|
||||
|
||||
// Getters and setters
|
||||
public int getDefaultShards() { return defaultShards; }
|
||||
public void setDefaultShards(int defaultShards) { this.defaultShards = defaultShards; }
|
||||
|
||||
public int getDefaultReplicas() { return defaultReplicas; }
|
||||
public void setDefaultReplicas(int defaultReplicas) { this.defaultReplicas = defaultReplicas; }
|
||||
|
||||
public String getRefreshInterval() { return refreshInterval; }
|
||||
public void setRefreshInterval(String refreshInterval) { this.refreshInterval = refreshInterval; }
|
||||
}
|
||||
|
||||
public static class Bulk {
|
||||
private int actions = 1000;
|
||||
private long size = 5242880; // 5MB
|
||||
private int timeout = 10000;
|
||||
private int concurrentRequests = 1;
|
||||
|
||||
// Getters and setters
|
||||
public int getActions() { return actions; }
|
||||
public void setActions(int actions) { this.actions = actions; }
|
||||
|
||||
public long getSize() { return size; }
|
||||
public void setSize(long size) { this.size = size; }
|
||||
|
||||
public int getTimeout() { return timeout; }
|
||||
public void setTimeout(int timeout) { this.timeout = timeout; }
|
||||
|
||||
public int getConcurrentRequests() { return concurrentRequests; }
|
||||
public void setConcurrentRequests(int concurrentRequests) { this.concurrentRequests = concurrentRequests; }
|
||||
}
|
||||
|
||||
public static class Health {
|
||||
private int checkInterval = 30000;
|
||||
private int slowQueryThreshold = 5000;
|
||||
private boolean enableMetrics = true;
|
||||
|
||||
// Getters and setters
|
||||
public int getCheckInterval() { return checkInterval; }
|
||||
public void setCheckInterval(int checkInterval) { this.checkInterval = checkInterval; }
|
||||
|
||||
public int getSlowQueryThreshold() { return slowQueryThreshold; }
|
||||
public void setSlowQueryThreshold(int slowQueryThreshold) { this.slowQueryThreshold = slowQueryThreshold; }
|
||||
|
||||
public boolean isEnableMetrics() { return enableMetrics; }
|
||||
public void setEnableMetrics(boolean enableMetrics) { this.enableMetrics = enableMetrics; }
|
||||
}
|
||||
}
|
||||
@@ -56,7 +56,10 @@ public class SecurityConfig {
|
||||
@Bean
|
||||
public CorsConfigurationSource corsConfigurationSource() {
|
||||
CorsConfiguration configuration = new CorsConfiguration();
|
||||
configuration.setAllowedOriginPatterns(Arrays.asList(allowedOrigins.split(",")));
|
||||
List<String> origins = Arrays.stream(allowedOrigins.split(","))
|
||||
.map(String::trim)
|
||||
.toList();
|
||||
configuration.setAllowedOriginPatterns(origins);
|
||||
configuration.setAllowedMethods(Arrays.asList("GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"));
|
||||
configuration.setAllowedHeaders(List.of("*"));
|
||||
configuration.setAllowCredentials(true);
|
||||
|
||||
@@ -0,0 +1,158 @@
|
||||
package com.storycove.config;
|
||||
|
||||
import com.storycove.service.LibraryService;
|
||||
import com.zaxxer.hikari.HikariConfig;
|
||||
import com.zaxxer.hikari.HikariDataSource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
|
||||
import org.springframework.web.context.request.RequestContextHolder;
|
||||
import org.springframework.web.context.request.ServletRequestAttributes;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import java.util.Map;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
/**
|
||||
* Smart routing datasource that:
|
||||
* 1. Routes to library-specific databases when a library is active
|
||||
* 2. Excludes authentication operations (keeps them on default database)
|
||||
* 3. Uses request context to determine when routing is appropriate
|
||||
*/
|
||||
public class SmartRoutingDataSource extends AbstractRoutingDataSource {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(SmartRoutingDataSource.class);
|
||||
|
||||
private final LibraryService libraryService;
|
||||
private final Map<String, DataSource> libraryDataSources = new ConcurrentHashMap<>();
|
||||
|
||||
// Database connection details - will be injected via constructor
|
||||
private final String baseDbUrl;
|
||||
private final String dbUsername;
|
||||
private final String dbPassword;
|
||||
|
||||
public SmartRoutingDataSource(LibraryService libraryService, String baseDbUrl, String dbUsername, String dbPassword) {
|
||||
this.libraryService = libraryService;
|
||||
this.baseDbUrl = baseDbUrl;
|
||||
this.dbUsername = dbUsername;
|
||||
this.dbPassword = dbPassword;
|
||||
|
||||
logger.info("SmartRoutingDataSource initialized with database: {}", baseDbUrl);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected Object determineCurrentLookupKey() {
|
||||
try {
|
||||
// Check if this is an authentication request - if so, use default database
|
||||
if (isAuthenticationRequest()) {
|
||||
logger.debug("Authentication request detected, using default database");
|
||||
return null; // null means use default datasource
|
||||
}
|
||||
|
||||
// Check if we have an active library
|
||||
if (libraryService != null) {
|
||||
String currentLibraryId = libraryService.getCurrentLibraryId();
|
||||
if (currentLibraryId != null && !currentLibraryId.trim().isEmpty()) {
|
||||
logger.info("ROUTING: Directing to library-specific database: {}", currentLibraryId);
|
||||
return currentLibraryId;
|
||||
} else {
|
||||
logger.info("ROUTING: No active library, using default database");
|
||||
}
|
||||
} else {
|
||||
logger.info("ROUTING: LibraryService is null, using default database");
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.debug("Error determining lookup key, falling back to default database", e);
|
||||
}
|
||||
|
||||
return null; // Use default datasource
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if the current request is an authentication request that should use the default database
|
||||
*/
|
||||
private boolean isAuthenticationRequest() {
|
||||
try {
|
||||
ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
|
||||
if (attributes != null) {
|
||||
String requestURI = attributes.getRequest().getRequestURI();
|
||||
String method = attributes.getRequest().getMethod();
|
||||
|
||||
// Authentication endpoints that should use default database
|
||||
if (requestURI.contains("/auth/") ||
|
||||
requestURI.contains("/login") ||
|
||||
requestURI.contains("/api/libraries/switch") ||
|
||||
(requestURI.contains("/api/libraries") && "POST".equals(method))) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.debug("Could not determine request context", e);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected DataSource determineTargetDataSource() {
|
||||
Object lookupKey = determineCurrentLookupKey();
|
||||
|
||||
if (lookupKey != null) {
|
||||
String libraryId = (String) lookupKey;
|
||||
return getLibraryDataSource(libraryId);
|
||||
}
|
||||
|
||||
return getDefaultDataSource();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get or create a datasource for the specified library
|
||||
*/
|
||||
private DataSource getLibraryDataSource(String libraryId) {
|
||||
return libraryDataSources.computeIfAbsent(libraryId, id -> {
|
||||
try {
|
||||
HikariConfig config = new HikariConfig();
|
||||
|
||||
// Replace database name in URL with library-specific name
|
||||
String libraryUrl = baseDbUrl.replaceAll("/[^/]*$", "/" + "storycove_" + id);
|
||||
|
||||
config.setJdbcUrl(libraryUrl);
|
||||
config.setUsername(dbUsername);
|
||||
config.setPassword(dbPassword);
|
||||
config.setDriverClassName("org.postgresql.Driver");
|
||||
config.setMaximumPoolSize(5); // Smaller pool for library-specific databases
|
||||
config.setConnectionTimeout(10000);
|
||||
config.setMaxLifetime(600000); // 10 minutes
|
||||
|
||||
logger.info("Created new datasource for library: {} -> {}", id, libraryUrl);
|
||||
return new HikariDataSource(config);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to create datasource for library: {}", id, e);
|
||||
return getDefaultDataSource();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private DataSource getDefaultDataSource() {
|
||||
// Use the default target datasource that was set in the configuration
|
||||
try {
|
||||
return (DataSource) super.determineTargetDataSource();
|
||||
} catch (Exception e) {
|
||||
logger.debug("Could not get default datasource via super method", e);
|
||||
}
|
||||
|
||||
// Fallback: create a basic datasource
|
||||
logger.warn("No default datasource available, creating fallback");
|
||||
HikariConfig config = new HikariConfig();
|
||||
config.setJdbcUrl(baseDbUrl);
|
||||
config.setUsername(dbUsername);
|
||||
config.setPassword(dbPassword);
|
||||
config.setDriverClassName("org.postgresql.Driver");
|
||||
config.setMaximumPoolSize(10);
|
||||
config.setConnectionTimeout(30000);
|
||||
return new HikariDataSource(config);
|
||||
}
|
||||
}
|
||||
@@ -1,37 +0,0 @@
|
||||
package com.storycove.config;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.typesense.api.Client;
|
||||
import org.typesense.resources.Node;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
@Configuration
|
||||
public class TypesenseConfig {
|
||||
|
||||
@Value("${storycove.typesense.api-key}")
|
||||
private String apiKey;
|
||||
|
||||
@Value("${storycove.typesense.host}")
|
||||
private String host;
|
||||
|
||||
@Value("${storycove.typesense.port}")
|
||||
private int port;
|
||||
|
||||
@Bean
|
||||
@ConditionalOnProperty(name = "storycove.typesense.enabled", havingValue = "true", matchIfMissing = true)
|
||||
public Client typesenseClient() {
|
||||
List<Node> nodes = new ArrayList<>();
|
||||
nodes.add(new Node("http", host, String.valueOf(port)));
|
||||
|
||||
org.typesense.api.Configuration configuration = new org.typesense.api.Configuration(
|
||||
nodes, java.time.Duration.ofSeconds(10), apiKey
|
||||
);
|
||||
|
||||
return new Client(configuration);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,163 @@
|
||||
package com.storycove.controller;
|
||||
|
||||
import com.storycove.entity.Author;
|
||||
import com.storycove.entity.Story;
|
||||
import com.storycove.service.AuthorService;
|
||||
import com.storycove.service.OpenSearchService;
|
||||
import com.storycove.service.SearchServiceAdapter;
|
||||
import com.storycove.service.StoryService;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
/**
|
||||
* Admin controller for managing OpenSearch operations.
|
||||
* Provides endpoints for reindexing and index management.
|
||||
*/
|
||||
@RestController
|
||||
@RequestMapping("/api/admin/search")
|
||||
public class AdminSearchController {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(AdminSearchController.class);
|
||||
|
||||
@Autowired
|
||||
private SearchServiceAdapter searchServiceAdapter;
|
||||
|
||||
@Autowired
|
||||
private StoryService storyService;
|
||||
|
||||
@Autowired
|
||||
private AuthorService authorService;
|
||||
|
||||
@Autowired(required = false)
|
||||
private OpenSearchService openSearchService;
|
||||
|
||||
/**
|
||||
* Get current search status
|
||||
*/
|
||||
@GetMapping("/status")
|
||||
public ResponseEntity<Map<String, Object>> getSearchStatus() {
|
||||
try {
|
||||
var status = searchServiceAdapter.getSearchStatus();
|
||||
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"primaryEngine", status.getPrimaryEngine(),
|
||||
"dualWrite", status.isDualWrite(),
|
||||
"openSearchAvailable", status.isOpenSearchAvailable()
|
||||
));
|
||||
} catch (Exception e) {
|
||||
logger.error("Error getting search status", e);
|
||||
return ResponseEntity.internalServerError().body(Map.of(
|
||||
"error", "Failed to get search status: " + e.getMessage()
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Reindex all data in OpenSearch
|
||||
*/
|
||||
@PostMapping("/opensearch/reindex")
|
||||
public ResponseEntity<Map<String, Object>> reindexOpenSearch() {
|
||||
try {
|
||||
logger.info("Starting OpenSearch full reindex");
|
||||
|
||||
if (!searchServiceAdapter.isSearchServiceAvailable()) {
|
||||
return ResponseEntity.badRequest().body(Map.of(
|
||||
"success", false,
|
||||
"error", "OpenSearch is not available or healthy"
|
||||
));
|
||||
}
|
||||
|
||||
// Get all data from services
|
||||
List<Story> allStories = storyService.findAllWithAssociations();
|
||||
List<Author> allAuthors = authorService.findAllWithStories();
|
||||
|
||||
// Bulk index directly in OpenSearch
|
||||
if (openSearchService != null) {
|
||||
openSearchService.bulkIndexStories(allStories);
|
||||
openSearchService.bulkIndexAuthors(allAuthors);
|
||||
} else {
|
||||
return ResponseEntity.badRequest().body(Map.of(
|
||||
"success", false,
|
||||
"error", "OpenSearch service not available"
|
||||
));
|
||||
}
|
||||
|
||||
int totalIndexed = allStories.size() + allAuthors.size();
|
||||
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
"message", String.format("Reindexed %d stories and %d authors in OpenSearch",
|
||||
allStories.size(), allAuthors.size()),
|
||||
"storiesCount", allStories.size(),
|
||||
"authorsCount", allAuthors.size(),
|
||||
"totalCount", totalIndexed
|
||||
));
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error during OpenSearch reindex", e);
|
||||
return ResponseEntity.internalServerError().body(Map.of(
|
||||
"success", false,
|
||||
"error", "OpenSearch reindex failed: " + e.getMessage()
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Recreate OpenSearch indices
|
||||
*/
|
||||
@PostMapping("/opensearch/recreate")
|
||||
public ResponseEntity<Map<String, Object>> recreateOpenSearchIndices() {
|
||||
try {
|
||||
logger.info("Starting OpenSearch indices recreation");
|
||||
|
||||
if (!searchServiceAdapter.isSearchServiceAvailable()) {
|
||||
return ResponseEntity.badRequest().body(Map.of(
|
||||
"success", false,
|
||||
"error", "OpenSearch is not available or healthy"
|
||||
));
|
||||
}
|
||||
|
||||
// Recreate indices
|
||||
if (openSearchService != null) {
|
||||
openSearchService.recreateIndices();
|
||||
} else {
|
||||
return ResponseEntity.badRequest().body(Map.of(
|
||||
"success", false,
|
||||
"error", "OpenSearch service not available"
|
||||
));
|
||||
}
|
||||
|
||||
// Get all data and reindex
|
||||
List<Story> allStories = storyService.findAllWithAssociations();
|
||||
List<Author> allAuthors = authorService.findAllWithStories();
|
||||
|
||||
// Bulk index after recreation
|
||||
openSearchService.bulkIndexStories(allStories);
|
||||
openSearchService.bulkIndexAuthors(allAuthors);
|
||||
|
||||
int totalIndexed = allStories.size() + allAuthors.size();
|
||||
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
"message", String.format("Recreated OpenSearch indices and indexed %d stories and %d authors",
|
||||
allStories.size(), allAuthors.size()),
|
||||
"storiesCount", allStories.size(),
|
||||
"authorsCount", allAuthors.size(),
|
||||
"totalCount", totalIndexed
|
||||
));
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error during OpenSearch indices recreation", e);
|
||||
return ResponseEntity.internalServerError().body(Map.of(
|
||||
"success", false,
|
||||
"error", "OpenSearch indices recreation failed: " + e.getMessage()
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,5 +1,6 @@
|
||||
package com.storycove.controller;
|
||||
|
||||
import com.storycove.service.LibraryService;
|
||||
import com.storycove.service.PasswordAuthenticationService;
|
||||
import com.storycove.util.JwtUtil;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
@@ -18,18 +19,21 @@ import java.time.Duration;
|
||||
public class AuthController {
|
||||
|
||||
private final PasswordAuthenticationService passwordService;
|
||||
private final LibraryService libraryService;
|
||||
private final JwtUtil jwtUtil;
|
||||
|
||||
public AuthController(PasswordAuthenticationService passwordService, JwtUtil jwtUtil) {
|
||||
public AuthController(PasswordAuthenticationService passwordService, LibraryService libraryService, JwtUtil jwtUtil) {
|
||||
this.passwordService = passwordService;
|
||||
this.libraryService = libraryService;
|
||||
this.jwtUtil = jwtUtil;
|
||||
}
|
||||
|
||||
@PostMapping("/login")
|
||||
public ResponseEntity<?> login(@Valid @RequestBody LoginRequest request, HttpServletResponse response) {
|
||||
if (passwordService.authenticate(request.getPassword())) {
|
||||
String token = jwtUtil.generateToken();
|
||||
|
||||
// Use new library-aware authentication
|
||||
String token = passwordService.authenticateAndSwitchLibrary(request.getPassword());
|
||||
|
||||
if (token != null) {
|
||||
// Set httpOnly cookie
|
||||
ResponseCookie cookie = ResponseCookie.from("token", token)
|
||||
.httpOnly(true)
|
||||
@@ -40,7 +44,8 @@ public class AuthController {
|
||||
|
||||
response.addHeader(HttpHeaders.SET_COOKIE, cookie.toString());
|
||||
|
||||
return ResponseEntity.ok(new LoginResponse("Authentication successful", token));
|
||||
String libraryInfo = passwordService.getCurrentLibraryInfo();
|
||||
return ResponseEntity.ok(new LoginResponse("Authentication successful - " + libraryInfo, token));
|
||||
} else {
|
||||
return ResponseEntity.status(401).body(new ErrorResponse("Invalid password"));
|
||||
}
|
||||
@@ -48,6 +53,9 @@ public class AuthController {
|
||||
|
||||
@PostMapping("/logout")
|
||||
public ResponseEntity<?> logout(HttpServletResponse response) {
|
||||
// Clear authentication state
|
||||
libraryService.clearAuthentication();
|
||||
|
||||
// Clear the cookie
|
||||
ResponseCookie cookie = ResponseCookie.from("token", "")
|
||||
.httpOnly(true)
|
||||
|
||||
@@ -4,7 +4,7 @@ import com.storycove.dto.*;
|
||||
import com.storycove.entity.Author;
|
||||
import com.storycove.service.AuthorService;
|
||||
import com.storycove.service.ImageService;
|
||||
import com.storycove.service.TypesenseService;
|
||||
import com.storycove.service.SearchServiceAdapter;
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import jakarta.validation.Valid;
|
||||
import org.slf4j.Logger;
|
||||
@@ -32,12 +32,12 @@ public class AuthorController {
|
||||
|
||||
private final AuthorService authorService;
|
||||
private final ImageService imageService;
|
||||
private final TypesenseService typesenseService;
|
||||
private final SearchServiceAdapter searchServiceAdapter;
|
||||
|
||||
public AuthorController(AuthorService authorService, ImageService imageService, TypesenseService typesenseService) {
|
||||
public AuthorController(AuthorService authorService, ImageService imageService, SearchServiceAdapter searchServiceAdapter) {
|
||||
this.authorService = authorService;
|
||||
this.imageService = imageService;
|
||||
this.typesenseService = typesenseService;
|
||||
this.searchServiceAdapter = searchServiceAdapter;
|
||||
}
|
||||
|
||||
@GetMapping
|
||||
@@ -65,10 +65,12 @@ public class AuthorController {
|
||||
|
||||
@PostMapping
|
||||
public ResponseEntity<AuthorDto> createAuthor(@Valid @RequestBody CreateAuthorRequest request) {
|
||||
logger.info("Creating new author: {}", request.getName());
|
||||
Author author = new Author();
|
||||
updateAuthorFromRequest(author, request);
|
||||
|
||||
Author savedAuthor = authorService.create(author);
|
||||
logger.info("Successfully created author: {} (ID: {})", savedAuthor.getName(), savedAuthor.getId());
|
||||
return ResponseEntity.status(HttpStatus.CREATED).body(convertToDto(savedAuthor));
|
||||
}
|
||||
|
||||
@@ -81,13 +83,7 @@ public class AuthorController {
|
||||
@RequestParam(required = false, name = "authorRating") Integer rating,
|
||||
@RequestParam(required = false, name = "avatar") MultipartFile avatarFile) {
|
||||
|
||||
System.out.println("DEBUG: MULTIPART PUT called with:");
|
||||
System.out.println(" - name: " + name);
|
||||
System.out.println(" - notes: " + notes);
|
||||
System.out.println(" - urls: " + urls);
|
||||
System.out.println(" - rating: " + rating);
|
||||
System.out.println(" - avatar: " + (avatarFile != null ? avatarFile.getOriginalFilename() : "null"));
|
||||
|
||||
logger.info("Updating author with multipart data (ID: {})", id);
|
||||
try {
|
||||
Author existingAuthor = authorService.findById(id);
|
||||
|
||||
@@ -104,7 +100,6 @@ public class AuthorController {
|
||||
|
||||
// Handle rating update
|
||||
if (rating != null) {
|
||||
System.out.println("DEBUG: Setting author rating via PUT: " + rating);
|
||||
existingAuthor.setAuthorRating(rating);
|
||||
}
|
||||
|
||||
@@ -115,6 +110,7 @@ public class AuthorController {
|
||||
}
|
||||
|
||||
Author updatedAuthor = authorService.update(id, existingAuthor);
|
||||
logger.info("Successfully updated author: {} via multipart", updatedAuthor.getName());
|
||||
return ResponseEntity.ok(convertToDto(updatedAuthor));
|
||||
|
||||
} catch (Exception e) {
|
||||
@@ -125,31 +121,27 @@ public class AuthorController {
|
||||
@PutMapping(value = "/{id}", consumes = "application/json")
|
||||
public ResponseEntity<AuthorDto> updateAuthorJson(@PathVariable UUID id,
|
||||
@Valid @RequestBody UpdateAuthorRequest request) {
|
||||
System.out.println("DEBUG: JSON PUT called with:");
|
||||
System.out.println(" - name: " + request.getName());
|
||||
System.out.println(" - notes: " + request.getNotes());
|
||||
System.out.println(" - urls: " + request.getUrls());
|
||||
System.out.println(" - rating: " + request.getRating());
|
||||
logger.info("Updating author with JSON data: {} (ID: {})", request.getName(), id);
|
||||
|
||||
Author existingAuthor = authorService.findById(id);
|
||||
updateAuthorFromRequest(existingAuthor, request);
|
||||
|
||||
Author updatedAuthor = authorService.update(id, existingAuthor);
|
||||
logger.info("Successfully updated author: {} via JSON", updatedAuthor.getName());
|
||||
return ResponseEntity.ok(convertToDto(updatedAuthor));
|
||||
}
|
||||
|
||||
@PutMapping("/{id}")
|
||||
public ResponseEntity<String> updateAuthorGeneric(@PathVariable UUID id, HttpServletRequest request) {
|
||||
System.out.println("DEBUG: GENERIC PUT called!");
|
||||
System.out.println(" - Content-Type: " + request.getContentType());
|
||||
System.out.println(" - Method: " + request.getMethod());
|
||||
|
||||
return ResponseEntity.status(415).body("Unsupported Media Type. Expected multipart/form-data or application/json");
|
||||
}
|
||||
|
||||
@DeleteMapping("/{id}")
|
||||
public ResponseEntity<?> deleteAuthor(@PathVariable UUID id) {
|
||||
logger.info("Deleting author with ID: {}", id);
|
||||
authorService.delete(id);
|
||||
logger.info("Successfully deleted author with ID: {}", id);
|
||||
return ResponseEntity.ok(Map.of("message", "Author deleted successfully"));
|
||||
}
|
||||
|
||||
@@ -177,11 +169,8 @@ public class AuthorController {
|
||||
|
||||
@PostMapping("/{id}/rating")
|
||||
public ResponseEntity<AuthorDto> rateAuthor(@PathVariable UUID id, @RequestBody RatingRequest request) {
|
||||
System.out.println("DEBUG: Rating author " + id + " with rating " + request.getRating());
|
||||
Author author = authorService.setRating(id, request.getRating());
|
||||
System.out.println("DEBUG: After setRating, author rating is: " + author.getAuthorRating());
|
||||
AuthorDto dto = convertToDto(author);
|
||||
System.out.println("DEBUG: Final DTO rating is: " + dto.getAuthorRating());
|
||||
return ResponseEntity.ok(dto);
|
||||
}
|
||||
|
||||
@@ -211,9 +200,7 @@ public class AuthorController {
|
||||
@PostMapping("/{id}/test-rating/{rating}")
|
||||
public ResponseEntity<Map<String, Object>> testSetRating(@PathVariable UUID id, @PathVariable Integer rating) {
|
||||
try {
|
||||
System.out.println("DEBUG: Test setting rating " + rating + " for author " + id);
|
||||
Author author = authorService.setRating(id, rating);
|
||||
System.out.println("DEBUG: After test setRating, got: " + author.getAuthorRating());
|
||||
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
@@ -231,13 +218,11 @@ public class AuthorController {
|
||||
@PostMapping("/{id}/test-put-rating")
|
||||
public ResponseEntity<Map<String, Object>> testPutWithRating(@PathVariable UUID id, @RequestParam Integer rating) {
|
||||
try {
|
||||
System.out.println("DEBUG: Test PUT with rating " + rating + " for author " + id);
|
||||
|
||||
Author existingAuthor = authorService.findById(id);
|
||||
existingAuthor.setAuthorRating(rating);
|
||||
Author updatedAuthor = authorService.update(id, existingAuthor);
|
||||
|
||||
System.out.println("DEBUG: After PUT update, rating is: " + updatedAuthor.getAuthorRating());
|
||||
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
@@ -273,7 +258,17 @@ public class AuthorController {
|
||||
@RequestParam(defaultValue = "name") String sortBy,
|
||||
@RequestParam(defaultValue = "asc") String sortOrder) {
|
||||
|
||||
SearchResultDto<AuthorSearchDto> searchResults = typesenseService.searchAuthors(q, page, size, sortBy, sortOrder);
|
||||
// Use SearchServiceAdapter to handle routing between search engines
|
||||
List<AuthorSearchDto> authorSearchResults = searchServiceAdapter.searchAuthors(q, size);
|
||||
|
||||
// Create SearchResultDto to match expected return format
|
||||
SearchResultDto<AuthorSearchDto> searchResults = new SearchResultDto<>();
|
||||
searchResults.setResults(authorSearchResults);
|
||||
searchResults.setQuery(q);
|
||||
searchResults.setPage(page);
|
||||
searchResults.setPerPage(size);
|
||||
searchResults.setTotalHits(authorSearchResults.size());
|
||||
searchResults.setSearchTimeMs(0); // SearchServiceAdapter doesn't provide timing
|
||||
|
||||
// Convert AuthorSearchDto results to AuthorDto
|
||||
SearchResultDto<AuthorDto> results = new SearchResultDto<>();
|
||||
@@ -298,7 +293,7 @@ public class AuthorController {
|
||||
public ResponseEntity<Map<String, Object>> reindexAuthorsTypesense() {
|
||||
try {
|
||||
List<Author> allAuthors = authorService.findAllWithStories();
|
||||
typesenseService.reindexAllAuthors(allAuthors);
|
||||
searchServiceAdapter.bulkIndexAuthors(allAuthors);
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
"message", "Reindexed " + allAuthors.size() + " authors",
|
||||
@@ -318,7 +313,7 @@ public class AuthorController {
|
||||
try {
|
||||
// This will delete the existing collection and recreate it with correct schema
|
||||
List<Author> allAuthors = authorService.findAllWithStories();
|
||||
typesenseService.reindexAllAuthors(allAuthors);
|
||||
searchServiceAdapter.bulkIndexAuthors(allAuthors);
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
"message", "Recreated authors collection and indexed " + allAuthors.size() + " authors",
|
||||
@@ -336,7 +331,7 @@ public class AuthorController {
|
||||
@GetMapping("/typesense-schema")
|
||||
public ResponseEntity<Map<String, Object>> getAuthorsTypesenseSchema() {
|
||||
try {
|
||||
Map<String, Object> schema = typesenseService.getAuthorsCollectionSchema();
|
||||
Map<String, Object> schema = Map.of("status", "authors collection schema retrieved from search service");
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
"schema", schema
|
||||
@@ -350,6 +345,44 @@ public class AuthorController {
|
||||
}
|
||||
}
|
||||
|
||||
@PostMapping("/clean-author-names")
|
||||
public ResponseEntity<Map<String, Object>> cleanAuthorNames() {
|
||||
try {
|
||||
List<Author> allAuthors = authorService.findAllWithStories();
|
||||
int cleanedCount = 0;
|
||||
|
||||
for (Author author : allAuthors) {
|
||||
String originalName = author.getName();
|
||||
String cleanedName = originalName != null ? originalName.trim() : "";
|
||||
|
||||
if (!cleanedName.equals(originalName)) {
|
||||
logger.info("Cleaning author name: '{}' -> '{}'", originalName, cleanedName);
|
||||
author.setName(cleanedName);
|
||||
authorService.update(author.getId(), author);
|
||||
cleanedCount++;
|
||||
}
|
||||
}
|
||||
|
||||
// Reindex all authors after cleaning
|
||||
if (cleanedCount > 0) {
|
||||
searchServiceAdapter.bulkIndexAuthors(allAuthors);
|
||||
}
|
||||
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
"message", "Cleaned " + cleanedCount + " author names and reindexed",
|
||||
"cleanedCount", cleanedCount,
|
||||
"totalAuthors", allAuthors.size()
|
||||
));
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to clean author names", e);
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", false,
|
||||
"error", e.getMessage()
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
@GetMapping("/top-rated")
|
||||
public ResponseEntity<List<AuthorSummaryDto>> getTopRatedAuthors(@RequestParam(defaultValue = "10") int limit) {
|
||||
Pageable pageable = PageRequest.of(0, limit);
|
||||
@@ -389,7 +422,6 @@ public class AuthorController {
|
||||
author.setUrls(updateReq.getUrls());
|
||||
}
|
||||
if (updateReq.getRating() != null) {
|
||||
System.out.println("DEBUG: Setting author rating via JSON: " + updateReq.getRating());
|
||||
author.setAuthorRating(updateReq.getRating());
|
||||
}
|
||||
}
|
||||
@@ -402,9 +434,6 @@ public class AuthorController {
|
||||
dto.setNotes(author.getNotes());
|
||||
dto.setAvatarImagePath(author.getAvatarImagePath());
|
||||
|
||||
// Debug logging for author rating
|
||||
System.out.println("DEBUG: Converting author " + author.getName() +
|
||||
" with rating: " + author.getAuthorRating());
|
||||
|
||||
dto.setAuthorRating(author.getAuthorRating());
|
||||
dto.setUrls(author.getUrls());
|
||||
@@ -415,7 +444,6 @@ public class AuthorController {
|
||||
// Calculate and set average story rating
|
||||
dto.setAverageStoryRating(authorService.calculateAverageStoryRating(author.getId()));
|
||||
|
||||
System.out.println("DEBUG: DTO authorRating set to: " + dto.getAuthorRating());
|
||||
|
||||
return dto;
|
||||
}
|
||||
|
||||
@@ -6,7 +6,9 @@ import com.storycove.entity.CollectionStory;
|
||||
import com.storycove.entity.Story;
|
||||
import com.storycove.entity.Tag;
|
||||
import com.storycove.service.CollectionService;
|
||||
import com.storycove.service.EPUBExportService;
|
||||
import com.storycove.service.ImageService;
|
||||
import com.storycove.service.ReadingTimeService;
|
||||
import jakarta.validation.Valid;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -28,12 +30,18 @@ public class CollectionController {
|
||||
|
||||
private final CollectionService collectionService;
|
||||
private final ImageService imageService;
|
||||
private final ReadingTimeService readingTimeService;
|
||||
private final EPUBExportService epubExportService;
|
||||
|
||||
@Autowired
|
||||
public CollectionController(CollectionService collectionService,
|
||||
ImageService imageService) {
|
||||
ImageService imageService,
|
||||
ReadingTimeService readingTimeService,
|
||||
EPUBExportService epubExportService) {
|
||||
this.collectionService = collectionService;
|
||||
this.imageService = imageService;
|
||||
this.readingTimeService = readingTimeService;
|
||||
this.epubExportService = epubExportService;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -48,8 +56,6 @@ public class CollectionController {
|
||||
@RequestParam(required = false) List<String> tags,
|
||||
@RequestParam(defaultValue = "false") boolean archived) {
|
||||
|
||||
logger.info("COLLECTIONS: Search request - search='{}', tags={}, archived={}, page={}, limit={}",
|
||||
search, tags, archived, page, limit);
|
||||
|
||||
// MANDATORY: Use Typesense for all search/filter operations
|
||||
SearchResultDto<Collection> results = collectionService.searchCollections(search, tags, archived, page, limit);
|
||||
@@ -86,13 +92,14 @@ public class CollectionController {
|
||||
*/
|
||||
@PostMapping
|
||||
public ResponseEntity<Collection> createCollection(@Valid @RequestBody CreateCollectionRequest request) {
|
||||
logger.info("Creating new collection: {}", request.getName());
|
||||
Collection collection = collectionService.createCollection(
|
||||
request.getName(),
|
||||
request.getDescription(),
|
||||
request.getTagNames(),
|
||||
request.getStoryIds()
|
||||
);
|
||||
|
||||
logger.info("Successfully created collection: {} (ID: {})", collection.getName(), collection.getId());
|
||||
return ResponseEntity.status(HttpStatus.CREATED).body(collection);
|
||||
}
|
||||
|
||||
@@ -107,6 +114,7 @@ public class CollectionController {
|
||||
@RequestParam(required = false) List<UUID> storyIds,
|
||||
@RequestParam(required = false, name = "coverImage") MultipartFile coverImage) {
|
||||
|
||||
logger.info("Creating new collection with image: {}", name);
|
||||
try {
|
||||
// Create collection first
|
||||
Collection collection = collectionService.createCollection(name, description, tags, storyIds);
|
||||
@@ -120,6 +128,7 @@ public class CollectionController {
|
||||
);
|
||||
}
|
||||
|
||||
logger.info("Successfully created collection with image: {} (ID: {})", collection.getName(), collection.getId());
|
||||
return ResponseEntity.status(HttpStatus.CREATED).body(collection);
|
||||
|
||||
} catch (Exception e) {
|
||||
@@ -152,7 +161,9 @@ public class CollectionController {
|
||||
*/
|
||||
@DeleteMapping("/{id}")
|
||||
public ResponseEntity<Map<String, String>> deleteCollection(@PathVariable UUID id) {
|
||||
logger.info("Deleting collection with ID: {}", id);
|
||||
collectionService.deleteCollection(id);
|
||||
logger.info("Successfully deleted collection with ID: {}", id);
|
||||
return ResponseEntity.ok(Map.of("message", "Collection deleted successfully"));
|
||||
}
|
||||
|
||||
@@ -270,6 +281,107 @@ public class CollectionController {
|
||||
return ResponseEntity.ok(Map.of("message", "Cover removed successfully"));
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/collections/reindex-typesense - Reindex all collections in Typesense
|
||||
*/
|
||||
@PostMapping("/reindex-typesense")
|
||||
public ResponseEntity<Map<String, Object>> reindexCollectionsTypesense() {
|
||||
try {
|
||||
List<Collection> allCollections = collectionService.findAllWithTags();
|
||||
// Collections are not indexed in search engine yet
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
"message", "Collections indexing not yet implemented in OpenSearch",
|
||||
"count", allCollections.size()
|
||||
));
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to reindex collections", e);
|
||||
return ResponseEntity.badRequest().body(Map.of(
|
||||
"success", false,
|
||||
"error", e.getMessage()
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* GET /api/collections/{id}/epub - Export collection as EPUB
|
||||
*/
|
||||
@GetMapping("/{id}/epub")
|
||||
public ResponseEntity<org.springframework.core.io.Resource> exportCollectionAsEPUB(@PathVariable UUID id) {
|
||||
logger.info("Exporting collection {} to EPUB", id);
|
||||
|
||||
try {
|
||||
Collection collection = collectionService.findById(id);
|
||||
List<Story> stories = collection.getCollectionStories().stream()
|
||||
.sorted((cs1, cs2) -> Integer.compare(cs1.getPosition(), cs2.getPosition()))
|
||||
.map(cs -> cs.getStory())
|
||||
.collect(java.util.stream.Collectors.toList());
|
||||
|
||||
if (stories.isEmpty()) {
|
||||
logger.warn("Collection {} contains no stories for export", id);
|
||||
return ResponseEntity.badRequest()
|
||||
.body(null);
|
||||
}
|
||||
|
||||
EPUBExportRequest request = new EPUBExportRequest();
|
||||
request.setIncludeCoverImage(true);
|
||||
request.setIncludeMetadata(true);
|
||||
request.setIncludeReadingPosition(false); // Collections don't have reading positions
|
||||
|
||||
org.springframework.core.io.Resource resource = epubExportService.exportCollectionAsEPUB(id, request);
|
||||
String filename = epubExportService.getCollectionEPUBFilename(collection);
|
||||
|
||||
logger.info("Successfully exported collection EPUB: {}", filename);
|
||||
|
||||
return ResponseEntity.ok()
|
||||
.header("Content-Disposition", "attachment; filename=\"" + filename + "\"")
|
||||
.header("Content-Type", "application/epub+zip")
|
||||
.body(resource);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error exporting collection EPUB: {}", e.getMessage(), e);
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/collections/{id}/epub - Export collection as EPUB with custom options
|
||||
*/
|
||||
@PostMapping("/{id}/epub")
|
||||
public ResponseEntity<org.springframework.core.io.Resource> exportCollectionAsEPUBWithOptions(
|
||||
@PathVariable UUID id,
|
||||
@Valid @RequestBody EPUBExportRequest request) {
|
||||
logger.info("Exporting collection {} to EPUB with custom options", id);
|
||||
|
||||
try {
|
||||
Collection collection = collectionService.findById(id);
|
||||
List<Story> stories = collection.getCollectionStories().stream()
|
||||
.sorted((cs1, cs2) -> Integer.compare(cs1.getPosition(), cs2.getPosition()))
|
||||
.map(cs -> cs.getStory())
|
||||
.collect(java.util.stream.Collectors.toList());
|
||||
|
||||
if (stories.isEmpty()) {
|
||||
logger.warn("Collection {} contains no stories for export", id);
|
||||
return ResponseEntity.badRequest()
|
||||
.body(null);
|
||||
}
|
||||
|
||||
org.springframework.core.io.Resource resource = epubExportService.exportCollectionAsEPUB(id, request);
|
||||
String filename = epubExportService.getCollectionEPUBFilename(collection);
|
||||
|
||||
logger.info("Successfully exported collection EPUB with options: {}", filename);
|
||||
|
||||
return ResponseEntity.ok()
|
||||
.header("Content-Disposition", "attachment; filename=\"" + filename + "\"")
|
||||
.header("Content-Type", "application/epub+zip")
|
||||
.body(resource);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error exporting collection EPUB: {}", e.getMessage(), e);
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
|
||||
}
|
||||
}
|
||||
|
||||
// Mapper methods
|
||||
|
||||
private CollectionDto mapToCollectionDto(Collection collection) {
|
||||
@@ -290,6 +402,11 @@ public class CollectionController {
|
||||
.toList());
|
||||
}
|
||||
|
||||
// Map tag names for search results
|
||||
if (collection.getTagNames() != null) {
|
||||
dto.setTagNames(collection.getTagNames());
|
||||
}
|
||||
|
||||
// Map collection stories (lightweight)
|
||||
if (collection.getCollectionStories() != null) {
|
||||
dto.setCollectionStories(collection.getCollectionStories().stream()
|
||||
@@ -300,7 +417,7 @@ public class CollectionController {
|
||||
// Set calculated properties
|
||||
dto.setStoryCount(collection.getStoryCount());
|
||||
dto.setTotalWordCount(collection.getTotalWordCount());
|
||||
dto.setEstimatedReadingTime(collection.getEstimatedReadingTime());
|
||||
dto.setEstimatedReadingTime(readingTimeService.calculateReadingTime(collection.getTotalWordCount()));
|
||||
dto.setAverageStoryRating(collection.getAverageStoryRating());
|
||||
|
||||
return dto;
|
||||
|
||||
@@ -0,0 +1,117 @@
|
||||
package com.storycove.controller;
|
||||
|
||||
import com.storycove.dto.HtmlSanitizationConfigDto;
|
||||
import com.storycove.service.HtmlSanitizationService;
|
||||
import com.storycove.service.ImageService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/config")
|
||||
public class ConfigController {
|
||||
|
||||
private final HtmlSanitizationService htmlSanitizationService;
|
||||
private final ImageService imageService;
|
||||
|
||||
@Value("${app.reading.speed.default:200}")
|
||||
private int defaultReadingSpeed;
|
||||
|
||||
@Autowired
|
||||
public ConfigController(HtmlSanitizationService htmlSanitizationService, ImageService imageService) {
|
||||
this.htmlSanitizationService = htmlSanitizationService;
|
||||
this.imageService = imageService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the HTML sanitization configuration for frontend use
|
||||
* This allows the frontend to use the same sanitization rules as the backend
|
||||
*/
|
||||
@GetMapping("/html-sanitization")
|
||||
public ResponseEntity<HtmlSanitizationConfigDto> getHtmlSanitizationConfig() {
|
||||
HtmlSanitizationConfigDto config = htmlSanitizationService.getConfiguration();
|
||||
return ResponseEntity.ok(config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get application settings configuration
|
||||
*/
|
||||
@GetMapping("/settings")
|
||||
public ResponseEntity<Map<String, Object>> getSettings() {
|
||||
Map<String, Object> settings = Map.of(
|
||||
"defaultReadingSpeed", defaultReadingSpeed
|
||||
);
|
||||
return ResponseEntity.ok(settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get reading speed for calculation purposes
|
||||
*/
|
||||
@GetMapping("/reading-speed")
|
||||
public ResponseEntity<Map<String, Integer>> getReadingSpeed() {
|
||||
return ResponseEntity.ok(Map.of("wordsPerMinute", defaultReadingSpeed));
|
||||
}
|
||||
|
||||
/**
|
||||
* Preview orphaned content images cleanup (dry run)
|
||||
*/
|
||||
@PostMapping("/cleanup/images/preview")
|
||||
public ResponseEntity<Map<String, Object>> previewImageCleanup() {
|
||||
try {
|
||||
ImageService.ContentImageCleanupResult result = imageService.cleanupOrphanedContentImages(true);
|
||||
|
||||
Map<String, Object> response = Map.of(
|
||||
"success", true,
|
||||
"orphanedCount", result.getOrphanedImages().size(),
|
||||
"totalSizeBytes", result.getTotalSizeBytes(),
|
||||
"formattedSize", result.getFormattedSize(),
|
||||
"foldersToDelete", result.getFoldersToDelete(),
|
||||
"referencedImagesCount", result.getTotalReferencedImages(),
|
||||
"errors", result.getErrors(),
|
||||
"hasErrors", result.hasErrors(),
|
||||
"dryRun", true
|
||||
);
|
||||
|
||||
return ResponseEntity.ok(response);
|
||||
|
||||
} catch (Exception e) {
|
||||
return ResponseEntity.status(500).body(Map.of(
|
||||
"success", false,
|
||||
"error", "Failed to preview image cleanup: " + e.getMessage()
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Execute orphaned content images cleanup
|
||||
*/
|
||||
@PostMapping("/cleanup/images/execute")
|
||||
public ResponseEntity<Map<String, Object>> executeImageCleanup() {
|
||||
try {
|
||||
ImageService.ContentImageCleanupResult result = imageService.cleanupOrphanedContentImages(false);
|
||||
|
||||
Map<String, Object> response = Map.of(
|
||||
"success", true,
|
||||
"deletedCount", result.getOrphanedImages().size(),
|
||||
"totalSizeBytes", result.getTotalSizeBytes(),
|
||||
"formattedSize", result.getFormattedSize(),
|
||||
"foldersDeleted", result.getFoldersToDelete(),
|
||||
"referencedImagesCount", result.getTotalReferencedImages(),
|
||||
"errors", result.getErrors(),
|
||||
"hasErrors", result.hasErrors(),
|
||||
"dryRun", false
|
||||
);
|
||||
|
||||
return ResponseEntity.ok(response);
|
||||
|
||||
} catch (Exception e) {
|
||||
return ResponseEntity.status(500).body(Map.of(
|
||||
"success", false,
|
||||
"error", "Failed to execute image cleanup: " + e.getMessage()
|
||||
));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,154 @@
|
||||
package com.storycove.controller;
|
||||
|
||||
import com.storycove.service.DatabaseManagementService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.Map;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/database")
|
||||
public class DatabaseController {
|
||||
|
||||
@Autowired
|
||||
private DatabaseManagementService databaseManagementService;
|
||||
|
||||
@PostMapping("/backup")
|
||||
public ResponseEntity<Resource> backupDatabase() {
|
||||
try {
|
||||
Resource backup = databaseManagementService.createBackup();
|
||||
|
||||
String timestamp = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"));
|
||||
String filename = "storycove_backup_" + timestamp + ".sql";
|
||||
|
||||
return ResponseEntity.ok()
|
||||
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + filename + "\"")
|
||||
.contentType(MediaType.APPLICATION_OCTET_STREAM)
|
||||
.body(backup);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Failed to create database backup: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
@PostMapping("/restore")
|
||||
public ResponseEntity<Map<String, Object>> restoreDatabase(@RequestParam("file") MultipartFile file) {
|
||||
try {
|
||||
if (file.isEmpty()) {
|
||||
return ResponseEntity.badRequest()
|
||||
.body(Map.of("success", false, "message", "No file uploaded"));
|
||||
}
|
||||
|
||||
if (!file.getOriginalFilename().endsWith(".sql")) {
|
||||
return ResponseEntity.badRequest()
|
||||
.body(Map.of("success", false, "message", "Invalid file type. Please upload a .sql file"));
|
||||
}
|
||||
|
||||
databaseManagementService.restoreFromBackup(file.getInputStream());
|
||||
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
"message", "Database restored successfully from " + file.getOriginalFilename()
|
||||
));
|
||||
} catch (IOException e) {
|
||||
return ResponseEntity.internalServerError()
|
||||
.body(Map.of("success", false, "message", "Failed to read backup file: " + e.getMessage()));
|
||||
} catch (Exception e) {
|
||||
return ResponseEntity.internalServerError()
|
||||
.body(Map.of("success", false, "message", "Failed to restore database: " + e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
@PostMapping("/clear")
|
||||
public ResponseEntity<Map<String, Object>> clearDatabase() {
|
||||
try {
|
||||
int deletedRecords = databaseManagementService.clearAllData();
|
||||
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
"message", "Database cleared successfully",
|
||||
"deletedRecords", deletedRecords
|
||||
));
|
||||
} catch (Exception e) {
|
||||
return ResponseEntity.internalServerError()
|
||||
.body(Map.of("success", false, "message", "Failed to clear database: " + e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
@PostMapping("/backup-complete")
|
||||
public ResponseEntity<Resource> backupComplete() {
|
||||
try {
|
||||
Resource backup = databaseManagementService.createCompleteBackup();
|
||||
|
||||
String timestamp = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"));
|
||||
String filename = "storycove_complete_backup_" + timestamp + ".zip";
|
||||
|
||||
return ResponseEntity.ok()
|
||||
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + filename + "\"")
|
||||
.header(HttpHeaders.CONTENT_TYPE, "application/zip")
|
||||
.body(backup);
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Failed to create complete backup: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
@PostMapping("/restore-complete")
|
||||
public ResponseEntity<Map<String, Object>> restoreComplete(@RequestParam("file") MultipartFile file) {
|
||||
System.err.println("Complete restore endpoint called with file: " + (file != null ? file.getOriginalFilename() : "null"));
|
||||
try {
|
||||
if (file.isEmpty()) {
|
||||
System.err.println("File is empty - returning bad request");
|
||||
return ResponseEntity.badRequest()
|
||||
.body(Map.of("success", false, "message", "No file uploaded"));
|
||||
}
|
||||
|
||||
if (!file.getOriginalFilename().endsWith(".zip")) {
|
||||
System.err.println("Invalid file type: " + file.getOriginalFilename());
|
||||
return ResponseEntity.badRequest()
|
||||
.body(Map.of("success", false, "message", "Invalid file type. Please upload a .zip file"));
|
||||
}
|
||||
|
||||
System.err.println("File validation passed, calling restore service...");
|
||||
databaseManagementService.restoreFromCompleteBackup(file.getInputStream());
|
||||
System.err.println("Restore service completed successfully");
|
||||
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
"message", "Complete backup restored successfully from " + file.getOriginalFilename()
|
||||
));
|
||||
} catch (IOException e) {
|
||||
System.err.println("IOException during restore: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
return ResponseEntity.internalServerError()
|
||||
.body(Map.of("success", false, "message", "Failed to read backup file: " + e.getMessage()));
|
||||
} catch (Exception e) {
|
||||
System.err.println("Exception during restore: " + e.getMessage());
|
||||
e.printStackTrace();
|
||||
return ResponseEntity.internalServerError()
|
||||
.body(Map.of("success", false, "message", "Failed to restore complete backup: " + e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
@PostMapping("/clear-complete")
|
||||
public ResponseEntity<Map<String, Object>> clearComplete() {
|
||||
try {
|
||||
int deletedRecords = databaseManagementService.clearAllDataAndFiles();
|
||||
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
"message", "Database and files cleared successfully",
|
||||
"deletedRecords", deletedRecords
|
||||
));
|
||||
} catch (Exception e) {
|
||||
return ResponseEntity.internalServerError()
|
||||
.body(Map.of("success", false, "message", "Failed to clear database and files: " + e.getMessage()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package com.storycove.controller;
|
||||
|
||||
import com.storycove.service.ImageService;
|
||||
import com.storycove.service.LibraryService;
|
||||
import org.springframework.core.io.FileSystemResource;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
@@ -10,6 +11,7 @@ import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
@@ -21,9 +23,17 @@ import java.util.Map;
|
||||
public class FileController {
|
||||
|
||||
private final ImageService imageService;
|
||||
private final LibraryService libraryService;
|
||||
|
||||
public FileController(ImageService imageService) {
|
||||
public FileController(ImageService imageService, LibraryService libraryService) {
|
||||
this.imageService = imageService;
|
||||
this.libraryService = libraryService;
|
||||
}
|
||||
|
||||
private String getCurrentLibraryId() {
|
||||
String libraryId = libraryService.getCurrentLibraryId();
|
||||
System.out.println("FileController - Current Library ID: " + libraryId);
|
||||
return libraryId != null ? libraryId : "default";
|
||||
}
|
||||
|
||||
@PostMapping("/upload/cover")
|
||||
@@ -34,7 +44,11 @@ public class FileController {
|
||||
Map<String, String> response = new HashMap<>();
|
||||
response.put("message", "Cover uploaded successfully");
|
||||
response.put("path", imagePath);
|
||||
response.put("url", "/api/files/images/" + imagePath);
|
||||
String currentLibraryId = getCurrentLibraryId();
|
||||
String imageUrl = "/api/files/images/" + currentLibraryId + "/" + imagePath;
|
||||
response.put("url", imageUrl);
|
||||
|
||||
System.out.println("Upload response - path: " + imagePath + ", url: " + imageUrl);
|
||||
|
||||
return ResponseEntity.ok(response);
|
||||
} catch (IllegalArgumentException e) {
|
||||
@@ -53,7 +67,8 @@ public class FileController {
|
||||
Map<String, String> response = new HashMap<>();
|
||||
response.put("message", "Avatar uploaded successfully");
|
||||
response.put("path", imagePath);
|
||||
response.put("url", "/api/files/images/" + imagePath);
|
||||
String currentLibraryId = getCurrentLibraryId();
|
||||
response.put("url", "/api/files/images/" + currentLibraryId + "/" + imagePath);
|
||||
|
||||
return ResponseEntity.ok(response);
|
||||
} catch (IllegalArgumentException e) {
|
||||
@@ -64,17 +79,18 @@ public class FileController {
|
||||
}
|
||||
}
|
||||
|
||||
@GetMapping("/images/**")
|
||||
public ResponseEntity<Resource> serveImage(@RequestParam String path) {
|
||||
@GetMapping("/images/{libraryId}/**")
|
||||
public ResponseEntity<Resource> serveImage(@PathVariable String libraryId, HttpServletRequest request) {
|
||||
try {
|
||||
// Extract path from the URL
|
||||
String imagePath = path.replace("/api/files/images/", "");
|
||||
// Extract the full request path after /api/files/images/{libraryId}/
|
||||
String requestURI = request.getRequestURI();
|
||||
String imagePath = requestURI.replaceFirst(".*/api/files/images/" + libraryId + "/", "");
|
||||
|
||||
if (!imageService.imageExists(imagePath)) {
|
||||
if (!imageService.imageExistsInLibrary(imagePath, libraryId)) {
|
||||
return ResponseEntity.notFound().build();
|
||||
}
|
||||
|
||||
Path fullPath = imageService.getImagePath(imagePath);
|
||||
Path fullPath = imageService.getImagePathInLibrary(imagePath, libraryId);
|
||||
Resource resource = new FileSystemResource(fullPath);
|
||||
|
||||
if (!resource.exists()) {
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
package com.storycove.controller;
|
||||
|
||||
import com.storycove.dto.HtmlSanitizationConfigDto;
|
||||
import com.storycove.service.HtmlSanitizationService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/config")
|
||||
public class HtmlSanitizationController {
|
||||
|
||||
private final HtmlSanitizationService htmlSanitizationService;
|
||||
|
||||
@Autowired
|
||||
public HtmlSanitizationController(HtmlSanitizationService htmlSanitizationService) {
|
||||
this.htmlSanitizationService = htmlSanitizationService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the HTML sanitization configuration for frontend use
|
||||
* This allows the frontend to use the same sanitization rules as the backend
|
||||
*/
|
||||
@GetMapping("/html-sanitization")
|
||||
public ResponseEntity<HtmlSanitizationConfigDto> getHtmlSanitizationConfig() {
|
||||
HtmlSanitizationConfigDto config = htmlSanitizationService.getConfiguration();
|
||||
return ResponseEntity.ok(config);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,242 @@
|
||||
package com.storycove.controller;
|
||||
|
||||
import com.storycove.dto.LibraryDto;
|
||||
import com.storycove.service.LibraryService;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/libraries")
|
||||
public class LibraryController {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(LibraryController.class);
|
||||
|
||||
private final LibraryService libraryService;
|
||||
|
||||
@Autowired
|
||||
public LibraryController(LibraryService libraryService) {
|
||||
this.libraryService = libraryService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all available libraries (for settings UI)
|
||||
*/
|
||||
@GetMapping
|
||||
public ResponseEntity<List<LibraryDto>> getAllLibraries() {
|
||||
try {
|
||||
List<LibraryDto> libraries = libraryService.getAllLibraries();
|
||||
return ResponseEntity.ok(libraries);
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to get libraries", e);
|
||||
return ResponseEntity.internalServerError().build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current active library info
|
||||
*/
|
||||
@GetMapping("/current")
|
||||
public ResponseEntity<LibraryDto> getCurrentLibrary() {
|
||||
try {
|
||||
var library = libraryService.getCurrentLibrary();
|
||||
if (library == null) {
|
||||
return ResponseEntity.noContent().build();
|
||||
}
|
||||
|
||||
LibraryDto dto = new LibraryDto(
|
||||
library.getId(),
|
||||
library.getName(),
|
||||
library.getDescription(),
|
||||
true, // always active since it's current
|
||||
library.isInitialized()
|
||||
);
|
||||
|
||||
return ResponseEntity.ok(dto);
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to get current library", e);
|
||||
return ResponseEntity.internalServerError().build();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Switch to a different library (requires re-authentication)
|
||||
* This endpoint returns a switching status that the frontend can poll
|
||||
*/
|
||||
@PostMapping("/switch")
|
||||
public ResponseEntity<Map<String, Object>> initiateLibrarySwitch(@RequestBody Map<String, String> request) {
|
||||
try {
|
||||
String password = request.get("password");
|
||||
if (password == null || password.trim().isEmpty()) {
|
||||
return ResponseEntity.badRequest().body(Map.of("error", "Password required"));
|
||||
}
|
||||
|
||||
String libraryId = libraryService.authenticateAndGetLibrary(password);
|
||||
if (libraryId == null) {
|
||||
return ResponseEntity.status(401).body(Map.of("error", "Invalid password"));
|
||||
}
|
||||
|
||||
// Check if already on this library
|
||||
if (libraryId.equals(libraryService.getCurrentLibraryId())) {
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"status", "already_active",
|
||||
"message", "Already using this library"
|
||||
));
|
||||
}
|
||||
|
||||
// Initiate switch in background thread
|
||||
new Thread(() -> {
|
||||
try {
|
||||
libraryService.switchToLibrary(libraryId);
|
||||
logger.info("Library switch completed: {}", libraryId);
|
||||
} catch (Exception e) {
|
||||
logger.error("Library switch failed: {}", libraryId, e);
|
||||
}
|
||||
}).start();
|
||||
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"status", "switching",
|
||||
"targetLibrary", libraryId,
|
||||
"message", "Switching to library, please wait..."
|
||||
));
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to initiate library switch", e);
|
||||
return ResponseEntity.internalServerError().body(Map.of("error", "Server error"));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check library switch status
|
||||
*/
|
||||
@GetMapping("/switch/status")
|
||||
public ResponseEntity<Map<String, Object>> getLibrarySwitchStatus() {
|
||||
try {
|
||||
var currentLibrary = libraryService.getCurrentLibrary();
|
||||
boolean isReady = currentLibrary != null;
|
||||
|
||||
Map<String, Object> response = new HashMap<>();
|
||||
response.put("ready", isReady);
|
||||
if (isReady) {
|
||||
response.put("currentLibrary", currentLibrary.getId());
|
||||
response.put("currentLibraryName", currentLibrary.getName());
|
||||
} else {
|
||||
response.put("currentLibrary", null);
|
||||
response.put("currentLibraryName", null);
|
||||
}
|
||||
|
||||
return ResponseEntity.ok(response);
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to get switch status", e);
|
||||
return ResponseEntity.ok(Map.of("ready", false, "error", "Status check failed"));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Change password for current library
|
||||
*/
|
||||
@PostMapping("/password")
|
||||
public ResponseEntity<Map<String, Object>> changePassword(@RequestBody Map<String, String> request) {
|
||||
try {
|
||||
String currentPassword = request.get("currentPassword");
|
||||
String newPassword = request.get("newPassword");
|
||||
|
||||
if (currentPassword == null || newPassword == null) {
|
||||
return ResponseEntity.badRequest().body(Map.of("error", "Current and new passwords required"));
|
||||
}
|
||||
|
||||
String currentLibraryId = libraryService.getCurrentLibraryId();
|
||||
if (currentLibraryId == null) {
|
||||
return ResponseEntity.badRequest().body(Map.of("error", "No active library"));
|
||||
}
|
||||
|
||||
boolean success = libraryService.changeLibraryPassword(currentLibraryId, currentPassword, newPassword);
|
||||
if (success) {
|
||||
return ResponseEntity.ok(Map.of("success", true, "message", "Password changed successfully"));
|
||||
} else {
|
||||
return ResponseEntity.badRequest().body(Map.of("error", "Current password is incorrect"));
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to change password", e);
|
||||
return ResponseEntity.internalServerError().body(Map.of("error", "Server error"));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new library
|
||||
*/
|
||||
@PostMapping("/create")
|
||||
public ResponseEntity<Map<String, Object>> createLibrary(@RequestBody Map<String, String> request) {
|
||||
try {
|
||||
String name = request.get("name");
|
||||
String description = request.get("description");
|
||||
String password = request.get("password");
|
||||
|
||||
if (name == null || name.trim().isEmpty() || password == null || password.trim().isEmpty()) {
|
||||
return ResponseEntity.badRequest().body(Map.of("error", "Name and password are required"));
|
||||
}
|
||||
|
||||
var newLibrary = libraryService.createNewLibrary(name.trim(), description, password);
|
||||
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
"library", Map.of(
|
||||
"id", newLibrary.getId(),
|
||||
"name", newLibrary.getName(),
|
||||
"description", newLibrary.getDescription()
|
||||
),
|
||||
"message", "Library created successfully. You can now log in with the new password to access it."
|
||||
));
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to create library", e);
|
||||
return ResponseEntity.internalServerError().body(Map.of("error", "Server error"));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update library metadata (name and description)
|
||||
*/
|
||||
@PutMapping("/{libraryId}/metadata")
|
||||
public ResponseEntity<Map<String, Object>> updateLibraryMetadata(
|
||||
@PathVariable String libraryId,
|
||||
@RequestBody Map<String, String> updates) {
|
||||
|
||||
try {
|
||||
String newName = updates.get("name");
|
||||
String newDescription = updates.get("description");
|
||||
|
||||
if (newName == null || newName.trim().isEmpty()) {
|
||||
return ResponseEntity.badRequest().body(Map.of("error", "Library name is required"));
|
||||
}
|
||||
|
||||
// Update the library
|
||||
libraryService.updateLibraryMetadata(libraryId, newName, newDescription);
|
||||
|
||||
// Return updated library info
|
||||
LibraryDto updatedLibrary = libraryService.getLibraryById(libraryId);
|
||||
if (updatedLibrary != null) {
|
||||
Map<String, Object> response = new HashMap<>();
|
||||
response.put("success", true);
|
||||
response.put("message", "Library metadata updated successfully");
|
||||
response.put("library", updatedLibrary);
|
||||
return ResponseEntity.ok(response);
|
||||
} else {
|
||||
return ResponseEntity.notFound().build();
|
||||
}
|
||||
|
||||
} catch (IllegalArgumentException e) {
|
||||
return ResponseEntity.badRequest().body(Map.of("error", e.getMessage()));
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to update library metadata for {}: {}", libraryId, e.getMessage(), e);
|
||||
return ResponseEntity.internalServerError().body(Map.of("error", "Failed to update library metadata"));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -2,7 +2,7 @@ package com.storycove.controller;
|
||||
|
||||
import com.storycove.entity.Story;
|
||||
import com.storycove.service.StoryService;
|
||||
import com.storycove.service.TypesenseService;
|
||||
import com.storycove.service.SearchServiceAdapter;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
@@ -14,25 +14,19 @@ import java.util.Map;
|
||||
@RequestMapping("/api/search")
|
||||
public class SearchController {
|
||||
|
||||
private final TypesenseService typesenseService;
|
||||
private final SearchServiceAdapter searchServiceAdapter;
|
||||
private final StoryService storyService;
|
||||
|
||||
public SearchController(@Autowired(required = false) TypesenseService typesenseService, StoryService storyService) {
|
||||
this.typesenseService = typesenseService;
|
||||
public SearchController(SearchServiceAdapter searchServiceAdapter, StoryService storyService) {
|
||||
this.searchServiceAdapter = searchServiceAdapter;
|
||||
this.storyService = storyService;
|
||||
}
|
||||
|
||||
@PostMapping("/reindex")
|
||||
public ResponseEntity<?> reindexAllStories() {
|
||||
if (typesenseService == null) {
|
||||
return ResponseEntity.badRequest().body(Map.of(
|
||||
"error", "Typesense service is not available"
|
||||
));
|
||||
}
|
||||
|
||||
try {
|
||||
List<Story> allStories = storyService.findAll();
|
||||
typesenseService.reindexAllStories(allStories);
|
||||
searchServiceAdapter.bulkIndexStories(allStories);
|
||||
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"message", "Successfully reindexed all stories",
|
||||
@@ -47,17 +41,8 @@ public class SearchController {
|
||||
|
||||
@GetMapping("/health")
|
||||
public ResponseEntity<?> searchHealthCheck() {
|
||||
if (typesenseService == null) {
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"status", "disabled",
|
||||
"message", "Typesense service is disabled"
|
||||
));
|
||||
}
|
||||
|
||||
try {
|
||||
// Try a simple search to test connectivity
|
||||
typesenseService.searchSuggestions("test", 1);
|
||||
|
||||
// Search service is operational if it's injected
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"status", "healthy",
|
||||
"message", "Search service is operational"
|
||||
|
||||
@@ -14,6 +14,7 @@ import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageImpl;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.data.domain.Sort;
|
||||
@@ -25,6 +26,7 @@ import org.springframework.web.multipart.MultipartFile;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@@ -39,8 +41,11 @@ public class StoryController {
|
||||
private final SeriesService seriesService;
|
||||
private final HtmlSanitizationService sanitizationService;
|
||||
private final ImageService imageService;
|
||||
private final TypesenseService typesenseService;
|
||||
private final SearchServiceAdapter searchServiceAdapter;
|
||||
private final CollectionService collectionService;
|
||||
private final ReadingTimeService readingTimeService;
|
||||
private final EPUBImportService epubImportService;
|
||||
private final EPUBExportService epubExportService;
|
||||
|
||||
public StoryController(StoryService storyService,
|
||||
AuthorService authorService,
|
||||
@@ -48,14 +53,20 @@ public class StoryController {
|
||||
HtmlSanitizationService sanitizationService,
|
||||
ImageService imageService,
|
||||
CollectionService collectionService,
|
||||
@Autowired(required = false) TypesenseService typesenseService) {
|
||||
SearchServiceAdapter searchServiceAdapter,
|
||||
ReadingTimeService readingTimeService,
|
||||
EPUBImportService epubImportService,
|
||||
EPUBExportService epubExportService) {
|
||||
this.storyService = storyService;
|
||||
this.authorService = authorService;
|
||||
this.seriesService = seriesService;
|
||||
this.sanitizationService = sanitizationService;
|
||||
this.imageService = imageService;
|
||||
this.collectionService = collectionService;
|
||||
this.typesenseService = typesenseService;
|
||||
this.searchServiceAdapter = searchServiceAdapter;
|
||||
this.readingTimeService = readingTimeService;
|
||||
this.epubImportService = epubImportService;
|
||||
this.epubExportService = epubExportService;
|
||||
}
|
||||
|
||||
@GetMapping
|
||||
@@ -75,31 +86,92 @@ public class StoryController {
|
||||
return ResponseEntity.ok(storyDtos);
|
||||
}
|
||||
|
||||
@GetMapping("/random")
|
||||
public ResponseEntity<StorySummaryDto> getRandomStory(
|
||||
@RequestParam(required = false) String searchQuery,
|
||||
@RequestParam(required = false) List<String> tags,
|
||||
@RequestParam(required = false) Long seed,
|
||||
// Advanced filters
|
||||
@RequestParam(required = false) Integer minWordCount,
|
||||
@RequestParam(required = false) Integer maxWordCount,
|
||||
@RequestParam(required = false) String createdAfter,
|
||||
@RequestParam(required = false) String createdBefore,
|
||||
@RequestParam(required = false) String lastReadAfter,
|
||||
@RequestParam(required = false) String lastReadBefore,
|
||||
@RequestParam(required = false) Integer minRating,
|
||||
@RequestParam(required = false) Integer maxRating,
|
||||
@RequestParam(required = false) Boolean unratedOnly,
|
||||
@RequestParam(required = false) String readingStatus,
|
||||
@RequestParam(required = false) Boolean hasReadingProgress,
|
||||
@RequestParam(required = false) Boolean hasCoverImage,
|
||||
@RequestParam(required = false) String sourceDomain,
|
||||
@RequestParam(required = false) String seriesFilter,
|
||||
@RequestParam(required = false) Integer minTagCount,
|
||||
@RequestParam(required = false) Boolean popularOnly,
|
||||
@RequestParam(required = false) Boolean hiddenGemsOnly) {
|
||||
|
||||
logger.info("Getting random story with filters - searchQuery: {}, tags: {}, seed: {}",
|
||||
searchQuery, tags, seed);
|
||||
|
||||
Optional<Story> randomStory = storyService.findRandomStory(searchQuery, tags, seed,
|
||||
minWordCount, maxWordCount, createdAfter, createdBefore, lastReadAfter, lastReadBefore,
|
||||
minRating, maxRating, unratedOnly, readingStatus, hasReadingProgress, hasCoverImage,
|
||||
sourceDomain, seriesFilter, minTagCount, popularOnly, hiddenGemsOnly);
|
||||
|
||||
if (randomStory.isPresent()) {
|
||||
StorySummaryDto storyDto = convertToSummaryDto(randomStory.get());
|
||||
return ResponseEntity.ok(storyDto);
|
||||
} else {
|
||||
return ResponseEntity.noContent().build(); // 204 No Content when no stories match filters
|
||||
}
|
||||
}
|
||||
|
||||
@GetMapping("/{id}")
|
||||
public ResponseEntity<StoryDto> getStoryById(@PathVariable UUID id) {
|
||||
Story story = storyService.findById(id);
|
||||
return ResponseEntity.ok(convertToDto(story));
|
||||
}
|
||||
|
||||
@GetMapping("/{id}/read")
|
||||
public ResponseEntity<StoryReadingDto> getStoryForReading(@PathVariable UUID id) {
|
||||
logger.info("Getting story {} for reading", id);
|
||||
Story story = storyService.findById(id);
|
||||
return ResponseEntity.ok(convertToReadingDto(story));
|
||||
}
|
||||
|
||||
@PostMapping
|
||||
public ResponseEntity<StoryDto> createStory(@Valid @RequestBody CreateStoryRequest request) {
|
||||
logger.info("Creating new story: {}", request.getTitle());
|
||||
Story story = new Story();
|
||||
updateStoryFromRequest(story, request);
|
||||
|
||||
Story savedStory = storyService.createWithTagNames(story, request.getTagNames());
|
||||
logger.info("Successfully created story: {} (ID: {})", savedStory.getTitle(), savedStory.getId());
|
||||
return ResponseEntity.status(HttpStatus.CREATED).body(convertToDto(savedStory));
|
||||
}
|
||||
|
||||
@PutMapping("/{id}")
|
||||
public ResponseEntity<StoryDto> updateStory(@PathVariable UUID id,
|
||||
@Valid @RequestBody UpdateStoryRequest request) {
|
||||
logger.info("Updating story: {} (ID: {})", request.getTitle(), id);
|
||||
|
||||
// Handle author creation/lookup at controller level before calling service
|
||||
if (request.getAuthorName() != null && !request.getAuthorName().trim().isEmpty() && request.getAuthorId() == null) {
|
||||
Author author = findOrCreateAuthor(request.getAuthorName().trim());
|
||||
request.setAuthorId(author.getId());
|
||||
request.setAuthorName(null); // Clear author name since we now have the ID
|
||||
}
|
||||
|
||||
Story updatedStory = storyService.updateWithTagNames(id, request);
|
||||
logger.info("Successfully updated story: {}", updatedStory.getTitle());
|
||||
return ResponseEntity.ok(convertToDto(updatedStory));
|
||||
}
|
||||
|
||||
@DeleteMapping("/{id}")
|
||||
public ResponseEntity<?> deleteStory(@PathVariable UUID id) {
|
||||
logger.info("Deleting story with ID: {}", id);
|
||||
storyService.delete(id);
|
||||
logger.info("Successfully deleted story with ID: {}", id);
|
||||
return ResponseEntity.ok(Map.of("message", "Story deleted successfully"));
|
||||
}
|
||||
|
||||
@@ -143,15 +215,58 @@ public class StoryController {
|
||||
return ResponseEntity.ok(convertToDto(story));
|
||||
}
|
||||
|
||||
@PostMapping("/{id}/reading-progress")
|
||||
public ResponseEntity<StoryDto> updateReadingProgress(@PathVariable UUID id, @RequestBody ReadingProgressRequest request) {
|
||||
logger.info("Updating reading progress for story {} to position {}", id, request.getPosition());
|
||||
Story story = storyService.updateReadingProgress(id, request.getPosition());
|
||||
return ResponseEntity.ok(convertToDto(story));
|
||||
}
|
||||
|
||||
@PostMapping("/{id}/reading-status")
|
||||
public ResponseEntity<StoryDto> updateReadingStatus(@PathVariable UUID id, @RequestBody ReadingStatusRequest request) {
|
||||
logger.info("Updating reading status for story {} to {}", id, request.getIsRead() ? "read" : "unread");
|
||||
Story story = storyService.updateReadingStatus(id, request.getIsRead());
|
||||
return ResponseEntity.ok(convertToDto(story));
|
||||
}
|
||||
|
||||
@PostMapping("/{id}/process-content-images")
|
||||
public ResponseEntity<Map<String, Object>> processContentImages(@PathVariable UUID id, @RequestBody ProcessContentImagesRequest request) {
|
||||
logger.info("Processing content images for story {}", id);
|
||||
|
||||
try {
|
||||
// Process the HTML content to download and replace image URLs
|
||||
ImageService.ContentImageProcessingResult result = imageService.processContentImages(request.getHtmlContent(), id);
|
||||
|
||||
// If there are warnings, let the client decide whether to proceed
|
||||
if (result.hasWarnings()) {
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"processedContent", result.getProcessedContent(),
|
||||
"warnings", result.getWarnings(),
|
||||
"downloadedImages", result.getDownloadedImages(),
|
||||
"hasWarnings", true
|
||||
));
|
||||
}
|
||||
|
||||
// Success - no warnings
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"processedContent", result.getProcessedContent(),
|
||||
"downloadedImages", result.getDownloadedImages(),
|
||||
"hasWarnings", false
|
||||
));
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to process content images for story {}", id, e);
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
|
||||
.body(Map.of("error", "Failed to process content images: " + e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
@PostMapping("/reindex")
|
||||
public ResponseEntity<String> manualReindex() {
|
||||
if (typesenseService == null) {
|
||||
return ResponseEntity.ok("Typesense is not enabled, no reindexing performed");
|
||||
}
|
||||
|
||||
try {
|
||||
List<Story> allStories = storyService.findAllWithAssociations();
|
||||
typesenseService.reindexAllStories(allStories);
|
||||
searchServiceAdapter.bulkIndexStories(allStories);
|
||||
return ResponseEntity.ok("Successfully reindexed " + allStories.size() + " stories");
|
||||
} catch (Exception e) {
|
||||
return ResponseEntity.status(500).body("Failed to reindex stories: " + e.getMessage());
|
||||
@@ -162,7 +277,7 @@ public class StoryController {
|
||||
public ResponseEntity<Map<String, Object>> reindexStoriesTypesense() {
|
||||
try {
|
||||
List<Story> allStories = storyService.findAllWithAssociations();
|
||||
typesenseService.reindexAllStories(allStories);
|
||||
searchServiceAdapter.bulkIndexStories(allStories);
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
"message", "Reindexed " + allStories.size() + " stories",
|
||||
@@ -182,7 +297,7 @@ public class StoryController {
|
||||
try {
|
||||
// This will delete the existing collection and recreate it with correct schema
|
||||
List<Story> allStories = storyService.findAllWithAssociations();
|
||||
typesenseService.reindexAllStories(allStories);
|
||||
searchServiceAdapter.bulkIndexStories(allStories);
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
"message", "Recreated stories collection and indexed " + allStories.size() + " stories",
|
||||
@@ -207,17 +322,55 @@ public class StoryController {
|
||||
@RequestParam(required = false) Integer minRating,
|
||||
@RequestParam(required = false) Integer maxRating,
|
||||
@RequestParam(required = false) String sortBy,
|
||||
@RequestParam(required = false) String sortDir) {
|
||||
@RequestParam(required = false) String sortDir,
|
||||
@RequestParam(required = false) List<String> facetBy,
|
||||
// Advanced filters
|
||||
@RequestParam(required = false) Integer minWordCount,
|
||||
@RequestParam(required = false) Integer maxWordCount,
|
||||
@RequestParam(required = false) String createdAfter,
|
||||
@RequestParam(required = false) String createdBefore,
|
||||
@RequestParam(required = false) String lastReadAfter,
|
||||
@RequestParam(required = false) String lastReadBefore,
|
||||
@RequestParam(required = false) Boolean unratedOnly,
|
||||
@RequestParam(required = false) String readingStatus,
|
||||
@RequestParam(required = false) Boolean hasReadingProgress,
|
||||
@RequestParam(required = false) Boolean hasCoverImage,
|
||||
@RequestParam(required = false) String sourceDomain,
|
||||
@RequestParam(required = false) String seriesFilter,
|
||||
@RequestParam(required = false) Integer minTagCount,
|
||||
@RequestParam(required = false) Boolean popularOnly,
|
||||
@RequestParam(required = false) Boolean hiddenGemsOnly) {
|
||||
|
||||
logger.info("CONTROLLER DEBUG: Search request - query='{}', tags={}, authors={}", query, tags, authors);
|
||||
|
||||
if (typesenseService != null) {
|
||||
SearchResultDto<StorySearchDto> results = typesenseService.searchStories(
|
||||
query, page, size, authors, tags, minRating, maxRating, sortBy, sortDir);
|
||||
// Use SearchServiceAdapter to handle routing between search engines
|
||||
try {
|
||||
// Convert authors list to single author string (for now, use first author)
|
||||
String authorFilter = (authors != null && !authors.isEmpty()) ? authors.get(0) : null;
|
||||
|
||||
// DEBUG: Log all received parameters
|
||||
logger.info("CONTROLLER DEBUG - Received parameters:");
|
||||
logger.info(" readingStatus: '{}'", readingStatus);
|
||||
logger.info(" seriesFilter: '{}'", seriesFilter);
|
||||
logger.info(" hasReadingProgress: {}", hasReadingProgress);
|
||||
logger.info(" hasCoverImage: {}", hasCoverImage);
|
||||
logger.info(" createdAfter: '{}'", createdAfter);
|
||||
logger.info(" lastReadAfter: '{}'", lastReadAfter);
|
||||
logger.info(" unratedOnly: {}", unratedOnly);
|
||||
|
||||
SearchResultDto<StorySearchDto> results = searchServiceAdapter.searchStories(
|
||||
query, tags, authorFilter, seriesFilter, minWordCount, maxWordCount,
|
||||
minRating != null ? minRating.floatValue() : null,
|
||||
null, // isRead - now handled by readingStatus advanced filter
|
||||
null, // isFavorite - now handled by readingStatus advanced filter
|
||||
sortBy, sortDir, page, size, facetBy,
|
||||
// Advanced filters
|
||||
createdAfter, createdBefore, lastReadAfter, lastReadBefore,
|
||||
unratedOnly, readingStatus, hasReadingProgress, hasCoverImage,
|
||||
sourceDomain, seriesFilter, minTagCount, popularOnly, hiddenGemsOnly);
|
||||
return ResponseEntity.ok(results);
|
||||
} else {
|
||||
// Fallback to basic search if Typesense is not available
|
||||
return ResponseEntity.badRequest().body(null);
|
||||
} catch (Exception e) {
|
||||
logger.error("Search failed", e);
|
||||
return ResponseEntity.internalServerError().body(null);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -226,10 +379,12 @@ public class StoryController {
|
||||
@RequestParam String query,
|
||||
@RequestParam(defaultValue = "5") int limit) {
|
||||
|
||||
if (typesenseService != null) {
|
||||
List<String> suggestions = typesenseService.searchSuggestions(query, limit);
|
||||
// Use SearchServiceAdapter to handle routing between search engines
|
||||
try {
|
||||
List<String> suggestions = searchServiceAdapter.getTagSuggestions(query, limit);
|
||||
return ResponseEntity.ok(suggestions);
|
||||
} else {
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to get search suggestions", e);
|
||||
return ResponseEntity.ok(new ArrayList<>());
|
||||
}
|
||||
}
|
||||
@@ -353,25 +508,55 @@ public class StoryController {
|
||||
story.setDescription(updateReq.getDescription());
|
||||
}
|
||||
if (updateReq.getContentHtml() != null) {
|
||||
story.setContentHtml(sanitizationService.sanitize(updateReq.getContentHtml()));
|
||||
logger.info("Content before sanitization (length: {}): {}",
|
||||
updateReq.getContentHtml().length(),
|
||||
updateReq.getContentHtml().substring(0, Math.min(500, updateReq.getContentHtml().length())));
|
||||
String sanitizedContent = sanitizationService.sanitize(updateReq.getContentHtml());
|
||||
logger.info("Content after sanitization (length: {}): {}",
|
||||
sanitizedContent.length(),
|
||||
sanitizedContent.substring(0, Math.min(500, sanitizedContent.length())));
|
||||
story.setContentHtml(sanitizedContent);
|
||||
}
|
||||
if (updateReq.getSourceUrl() != null) {
|
||||
story.setSourceUrl(updateReq.getSourceUrl());
|
||||
}
|
||||
if (updateReq.getVolume() != null) {
|
||||
story.setVolume(updateReq.getVolume());
|
||||
}
|
||||
// Volume will be handled in series logic below
|
||||
// Handle author - either by ID or by name
|
||||
if (updateReq.getAuthorId() != null) {
|
||||
Author author = authorService.findById(updateReq.getAuthorId());
|
||||
story.setAuthor(author);
|
||||
} else if (updateReq.getAuthorName() != null && !updateReq.getAuthorName().trim().isEmpty()) {
|
||||
Author author = findOrCreateAuthor(updateReq.getAuthorName().trim());
|
||||
story.setAuthor(author);
|
||||
}
|
||||
// Handle series - either by ID or by name
|
||||
// Handle series - either by ID, by name, or remove from series
|
||||
if (updateReq.getSeriesId() != null) {
|
||||
Series series = seriesService.findById(updateReq.getSeriesId());
|
||||
story.setSeries(series);
|
||||
} else if (updateReq.getSeriesName() != null && !updateReq.getSeriesName().trim().isEmpty()) {
|
||||
Series series = seriesService.findOrCreate(updateReq.getSeriesName().trim());
|
||||
story.setSeries(series);
|
||||
} else if (updateReq.getSeriesName() != null) {
|
||||
logger.info("Processing series update: seriesName='{}', isEmpty={}", updateReq.getSeriesName(), updateReq.getSeriesName().trim().isEmpty());
|
||||
if (updateReq.getSeriesName().trim().isEmpty()) {
|
||||
// Empty series name means remove from series
|
||||
logger.info("Removing story from series");
|
||||
if (story.getSeries() != null) {
|
||||
story.getSeries().removeStory(story);
|
||||
story.setSeries(null);
|
||||
story.setVolume(null);
|
||||
logger.info("Story removed from series");
|
||||
}
|
||||
} else {
|
||||
// Non-empty series name means add to series
|
||||
logger.info("Adding story to series: '{}', volume: {}", updateReq.getSeriesName().trim(), updateReq.getVolume());
|
||||
Series series = seriesService.findOrCreate(updateReq.getSeriesName().trim());
|
||||
story.setSeries(series);
|
||||
// Set volume only if series is being set
|
||||
if (updateReq.getVolume() != null) {
|
||||
story.setVolume(updateReq.getVolume());
|
||||
logger.info("Story added to series: {} with volume: {}", series.getName(), updateReq.getVolume());
|
||||
} else {
|
||||
logger.info("Story added to series: {} with no volume", series.getName());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Note: Tags are now handled in StoryService.updateWithTagNames()
|
||||
@@ -385,7 +570,6 @@ public class StoryController {
|
||||
dto.setSummary(story.getSummary());
|
||||
dto.setDescription(story.getDescription());
|
||||
dto.setContentHtml(story.getContentHtml());
|
||||
dto.setContentPlain(story.getContentPlain());
|
||||
dto.setSourceUrl(story.getSourceUrl());
|
||||
dto.setCoverPath(story.getCoverPath());
|
||||
dto.setWordCount(story.getWordCount());
|
||||
@@ -394,6 +578,48 @@ public class StoryController {
|
||||
dto.setCreatedAt(story.getCreatedAt());
|
||||
dto.setUpdatedAt(story.getUpdatedAt());
|
||||
|
||||
// Reading progress fields
|
||||
dto.setIsRead(story.getIsRead());
|
||||
dto.setReadingPosition(story.getReadingPosition());
|
||||
dto.setLastReadAt(story.getLastReadAt());
|
||||
|
||||
if (story.getAuthor() != null) {
|
||||
dto.setAuthorId(story.getAuthor().getId());
|
||||
dto.setAuthorName(story.getAuthor().getName());
|
||||
}
|
||||
|
||||
if (story.getSeries() != null) {
|
||||
dto.setSeriesId(story.getSeries().getId());
|
||||
dto.setSeriesName(story.getSeries().getName());
|
||||
}
|
||||
|
||||
dto.setTags(story.getTags().stream()
|
||||
.map(this::convertTagToDto)
|
||||
.collect(Collectors.toList()));
|
||||
|
||||
return dto;
|
||||
}
|
||||
|
||||
private StoryReadingDto convertToReadingDto(Story story) {
|
||||
StoryReadingDto dto = new StoryReadingDto();
|
||||
dto.setId(story.getId());
|
||||
dto.setTitle(story.getTitle());
|
||||
dto.setSummary(story.getSummary());
|
||||
dto.setDescription(story.getDescription());
|
||||
dto.setContentHtml(story.getContentHtml());
|
||||
dto.setSourceUrl(story.getSourceUrl());
|
||||
dto.setCoverPath(story.getCoverPath());
|
||||
dto.setWordCount(story.getWordCount());
|
||||
dto.setRating(story.getRating());
|
||||
dto.setVolume(story.getVolume());
|
||||
dto.setCreatedAt(story.getCreatedAt());
|
||||
dto.setUpdatedAt(story.getUpdatedAt());
|
||||
|
||||
// Reading progress fields
|
||||
dto.setIsRead(story.getIsRead());
|
||||
dto.setReadingPosition(story.getReadingPosition());
|
||||
dto.setLastReadAt(story.getLastReadAt());
|
||||
|
||||
if (story.getAuthor() != null) {
|
||||
dto.setAuthorId(story.getAuthor().getId());
|
||||
dto.setAuthorName(story.getAuthor().getName());
|
||||
@@ -426,6 +652,11 @@ public class StoryController {
|
||||
dto.setUpdatedAt(story.getUpdatedAt());
|
||||
dto.setPartOfSeries(story.isPartOfSeries());
|
||||
|
||||
// Reading progress fields
|
||||
dto.setIsRead(story.getIsRead());
|
||||
dto.setReadingPosition(story.getReadingPosition());
|
||||
dto.setLastReadAt(story.getLastReadAt());
|
||||
|
||||
if (story.getAuthor() != null) {
|
||||
dto.setAuthorId(story.getAuthor().getId());
|
||||
dto.setAuthorName(story.getAuthor().getName());
|
||||
@@ -447,8 +678,11 @@ public class StoryController {
|
||||
TagDto tagDto = new TagDto();
|
||||
tagDto.setId(tag.getId());
|
||||
tagDto.setName(tag.getName());
|
||||
tagDto.setColor(tag.getColor());
|
||||
tagDto.setDescription(tag.getDescription());
|
||||
tagDto.setCreatedAt(tag.getCreatedAt());
|
||||
// storyCount can be set if needed, but it might be expensive to calculate for each tag
|
||||
tagDto.setStoryCount(tag.getStories() != null ? tag.getStories().size() : 0);
|
||||
tagDto.setAliasCount(tag.getAliases() != null ? tag.getAliases().size() : 0);
|
||||
return tagDto;
|
||||
}
|
||||
|
||||
@@ -467,12 +701,151 @@ public class StoryController {
|
||||
// to avoid circular references and keep it lightweight
|
||||
dto.setStoryCount(collection.getStoryCount());
|
||||
dto.setTotalWordCount(collection.getTotalWordCount());
|
||||
dto.setEstimatedReadingTime(collection.getEstimatedReadingTime());
|
||||
dto.setEstimatedReadingTime(readingTimeService.calculateReadingTime(collection.getTotalWordCount()));
|
||||
dto.setAverageStoryRating(collection.getAverageStoryRating());
|
||||
|
||||
return dto;
|
||||
}
|
||||
|
||||
@GetMapping("/check-duplicate")
|
||||
public ResponseEntity<Map<String, Object>> checkDuplicate(
|
||||
@RequestParam String title,
|
||||
@RequestParam String authorName) {
|
||||
try {
|
||||
List<Story> duplicates = storyService.findPotentialDuplicates(title, authorName);
|
||||
|
||||
Map<String, Object> response = Map.of(
|
||||
"hasDuplicates", !duplicates.isEmpty(),
|
||||
"count", duplicates.size(),
|
||||
"duplicates", duplicates.stream()
|
||||
.map(story -> Map.of(
|
||||
"id", story.getId(),
|
||||
"title", story.getTitle(),
|
||||
"authorName", story.getAuthor() != null ? story.getAuthor().getName() : "",
|
||||
"createdAt", story.getCreatedAt()
|
||||
))
|
||||
.collect(Collectors.toList())
|
||||
);
|
||||
|
||||
return ResponseEntity.ok(response);
|
||||
} catch (Exception e) {
|
||||
logger.error("Error checking for duplicates", e);
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
|
||||
.body(Map.of("error", "Failed to check for duplicates"));
|
||||
}
|
||||
}
|
||||
|
||||
// EPUB Import endpoint
|
||||
@PostMapping("/epub/import")
|
||||
public ResponseEntity<EPUBImportResponse> importEPUB(
|
||||
@RequestParam("file") MultipartFile file,
|
||||
@RequestParam(required = false) UUID authorId,
|
||||
@RequestParam(required = false) String authorName,
|
||||
@RequestParam(required = false) UUID seriesId,
|
||||
@RequestParam(required = false) String seriesName,
|
||||
@RequestParam(required = false) Integer seriesVolume,
|
||||
@RequestParam(required = false) List<String> tags,
|
||||
@RequestParam(defaultValue = "true") Boolean preserveReadingPosition,
|
||||
@RequestParam(defaultValue = "false") Boolean overwriteExisting,
|
||||
@RequestParam(defaultValue = "true") Boolean createMissingAuthor,
|
||||
@RequestParam(defaultValue = "true") Boolean createMissingSeries) {
|
||||
|
||||
logger.info("Importing EPUB file: {}", file.getOriginalFilename());
|
||||
|
||||
EPUBImportRequest request = new EPUBImportRequest();
|
||||
request.setEpubFile(file);
|
||||
request.setAuthorId(authorId);
|
||||
request.setAuthorName(authorName);
|
||||
request.setSeriesId(seriesId);
|
||||
request.setSeriesName(seriesName);
|
||||
request.setSeriesVolume(seriesVolume);
|
||||
request.setTags(tags);
|
||||
request.setPreserveReadingPosition(preserveReadingPosition);
|
||||
request.setOverwriteExisting(overwriteExisting);
|
||||
request.setCreateMissingAuthor(createMissingAuthor);
|
||||
request.setCreateMissingSeries(createMissingSeries);
|
||||
|
||||
try {
|
||||
EPUBImportResponse response = epubImportService.importEPUB(request);
|
||||
|
||||
if (response.isSuccess()) {
|
||||
logger.info("Successfully imported EPUB: {} (Story ID: {})",
|
||||
response.getStoryTitle(), response.getStoryId());
|
||||
return ResponseEntity.ok(response);
|
||||
} else {
|
||||
logger.warn("EPUB import failed: {}", response.getMessage());
|
||||
return ResponseEntity.badRequest().body(response);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error importing EPUB: {}", e.getMessage(), e);
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
|
||||
.body(EPUBImportResponse.error("Internal server error: " + e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
// EPUB Export endpoint
|
||||
@PostMapping("/epub/export")
|
||||
public ResponseEntity<org.springframework.core.io.Resource> exportEPUB(
|
||||
@Valid @RequestBody EPUBExportRequest request) {
|
||||
|
||||
logger.info("Exporting story {} to EPUB", request.getStoryId());
|
||||
|
||||
try {
|
||||
if (!epubExportService.canExportStory(request.getStoryId())) {
|
||||
return ResponseEntity.badRequest().build();
|
||||
}
|
||||
|
||||
org.springframework.core.io.Resource resource = epubExportService.exportStoryAsEPUB(request);
|
||||
Story story = storyService.findById(request.getStoryId());
|
||||
String filename = epubExportService.getEPUBFilename(story);
|
||||
|
||||
logger.info("Successfully exported EPUB: {}", filename);
|
||||
|
||||
return ResponseEntity.ok()
|
||||
.header("Content-Disposition", "attachment; filename=\"" + filename + "\"")
|
||||
.header("Content-Type", "application/epub+zip")
|
||||
.body(resource);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error exporting EPUB: {}", e.getMessage(), e);
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
|
||||
}
|
||||
}
|
||||
|
||||
// EPUB Export by story ID (GET endpoint)
|
||||
@GetMapping("/{id}/epub")
|
||||
public ResponseEntity<org.springframework.core.io.Resource> exportStoryAsEPUB(@PathVariable UUID id) {
|
||||
logger.info("Exporting story {} to EPUB via GET", id);
|
||||
|
||||
EPUBExportRequest request = new EPUBExportRequest(id);
|
||||
return exportEPUB(request);
|
||||
}
|
||||
|
||||
// Validate EPUB file
|
||||
@PostMapping("/epub/validate")
|
||||
public ResponseEntity<Map<String, Object>> validateEPUBFile(@RequestParam("file") MultipartFile file) {
|
||||
logger.info("Validating EPUB file: {}", file.getOriginalFilename());
|
||||
|
||||
try {
|
||||
List<String> errors = epubImportService.validateEPUBFile(file);
|
||||
|
||||
Map<String, Object> response = Map.of(
|
||||
"valid", errors.isEmpty(),
|
||||
"errors", errors,
|
||||
"filename", file.getOriginalFilename(),
|
||||
"size", file.getSize()
|
||||
);
|
||||
|
||||
return ResponseEntity.ok(response);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error validating EPUB file: {}", e.getMessage(), e);
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
|
||||
.body(Map.of("error", "Failed to validate EPUB file"));
|
||||
}
|
||||
}
|
||||
|
||||
// Request DTOs
|
||||
public static class CreateStoryRequest {
|
||||
private String title;
|
||||
@@ -520,6 +893,7 @@ public class StoryController {
|
||||
private String sourceUrl;
|
||||
private Integer volume;
|
||||
private UUID authorId;
|
||||
private String authorName;
|
||||
private UUID seriesId;
|
||||
private String seriesName;
|
||||
private List<String> tagNames;
|
||||
@@ -539,6 +913,8 @@ public class StoryController {
|
||||
public void setVolume(Integer volume) { this.volume = volume; }
|
||||
public UUID getAuthorId() { return authorId; }
|
||||
public void setAuthorId(UUID authorId) { this.authorId = authorId; }
|
||||
public String getAuthorName() { return authorName; }
|
||||
public void setAuthorName(String authorName) { this.authorName = authorName; }
|
||||
public UUID getSeriesId() { return seriesId; }
|
||||
public void setSeriesId(UUID seriesId) { this.seriesId = seriesId; }
|
||||
public String getSeriesName() { return seriesName; }
|
||||
|
||||
@@ -1,9 +1,13 @@
|
||||
package com.storycove.controller;
|
||||
|
||||
import com.storycove.dto.TagDto;
|
||||
import com.storycove.dto.TagAliasDto;
|
||||
import com.storycove.entity.Tag;
|
||||
import com.storycove.entity.TagAlias;
|
||||
import com.storycove.service.TagService;
|
||||
import jakarta.validation.Valid;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
@@ -21,6 +25,7 @@ import java.util.stream.Collectors;
|
||||
@RequestMapping("/api/tags")
|
||||
public class TagController {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(TagController.class);
|
||||
private final TagService tagService;
|
||||
|
||||
public TagController(TagService tagService) {
|
||||
@@ -54,6 +59,8 @@ public class TagController {
|
||||
public ResponseEntity<TagDto> createTag(@Valid @RequestBody CreateTagRequest request) {
|
||||
Tag tag = new Tag();
|
||||
tag.setName(request.getName());
|
||||
tag.setColor(request.getColor());
|
||||
tag.setDescription(request.getDescription());
|
||||
|
||||
Tag savedTag = tagService.create(tag);
|
||||
return ResponseEntity.status(HttpStatus.CREATED).body(convertToDto(savedTag));
|
||||
@@ -66,6 +73,12 @@ public class TagController {
|
||||
if (request.getName() != null) {
|
||||
existingTag.setName(request.getName());
|
||||
}
|
||||
if (request.getColor() != null) {
|
||||
existingTag.setColor(request.getColor());
|
||||
}
|
||||
if (request.getDescription() != null) {
|
||||
existingTag.setDescription(request.getDescription());
|
||||
}
|
||||
|
||||
Tag updatedTag = tagService.update(id, existingTag);
|
||||
return ResponseEntity.ok(convertToDto(updatedTag));
|
||||
@@ -95,7 +108,7 @@ public class TagController {
|
||||
@RequestParam String query,
|
||||
@RequestParam(defaultValue = "10") int limit) {
|
||||
|
||||
List<Tag> tags = tagService.findByNameStartingWith(query, limit);
|
||||
List<Tag> tags = tagService.findByNameOrAliasStartingWith(query, limit);
|
||||
List<TagDto> tagDtos = tags.stream().map(this::convertToDto).collect(Collectors.toList());
|
||||
|
||||
return ResponseEntity.ok(tagDtos);
|
||||
@@ -132,29 +145,257 @@ public class TagController {
|
||||
return ResponseEntity.ok(stats);
|
||||
}
|
||||
|
||||
@GetMapping("/collections")
|
||||
public ResponseEntity<List<TagDto>> getTagsUsedByCollections() {
|
||||
List<Tag> tags = tagService.findTagsUsedByCollections();
|
||||
List<TagDto> tagDtos = tags.stream()
|
||||
.map(this::convertToDtoWithCollectionCount)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
return ResponseEntity.ok(tagDtos);
|
||||
}
|
||||
|
||||
// Tag alias endpoints
|
||||
@PostMapping("/{tagId}/aliases")
|
||||
public ResponseEntity<TagAliasDto> addAlias(@PathVariable UUID tagId,
|
||||
@RequestBody Map<String, String> request) {
|
||||
String aliasName = request.get("aliasName");
|
||||
if (aliasName == null || aliasName.trim().isEmpty()) {
|
||||
return ResponseEntity.badRequest().build();
|
||||
}
|
||||
|
||||
try {
|
||||
TagAlias alias = tagService.addAlias(tagId, aliasName.trim());
|
||||
TagAliasDto dto = new TagAliasDto();
|
||||
dto.setId(alias.getId());
|
||||
dto.setAliasName(alias.getAliasName());
|
||||
dto.setCanonicalTagId(alias.getCanonicalTag().getId());
|
||||
dto.setCanonicalTagName(alias.getCanonicalTag().getName());
|
||||
dto.setCreatedFromMerge(alias.getCreatedFromMerge());
|
||||
dto.setCreatedAt(alias.getCreatedAt());
|
||||
|
||||
return ResponseEntity.status(HttpStatus.CREATED).body(dto);
|
||||
} catch (Exception e) {
|
||||
return ResponseEntity.badRequest().build();
|
||||
}
|
||||
}
|
||||
|
||||
@DeleteMapping("/{tagId}/aliases/{aliasId}")
|
||||
public ResponseEntity<?> removeAlias(@PathVariable UUID tagId, @PathVariable UUID aliasId) {
|
||||
try {
|
||||
tagService.removeAlias(tagId, aliasId);
|
||||
return ResponseEntity.ok(Map.of("message", "Alias removed successfully"));
|
||||
} catch (Exception e) {
|
||||
return ResponseEntity.badRequest().body(Map.of("error", e.getMessage()));
|
||||
}
|
||||
}
|
||||
|
||||
@GetMapping("/resolve/{name}")
|
||||
public ResponseEntity<TagDto> resolveTag(@PathVariable String name) {
|
||||
try {
|
||||
Tag resolvedTag = tagService.resolveTagByName(name);
|
||||
if (resolvedTag != null) {
|
||||
return ResponseEntity.ok(convertToDto(resolvedTag));
|
||||
} else {
|
||||
return ResponseEntity.notFound().build();
|
||||
}
|
||||
} catch (Exception e) {
|
||||
return ResponseEntity.notFound().build();
|
||||
}
|
||||
}
|
||||
|
||||
@PostMapping("/merge")
|
||||
public ResponseEntity<?> mergeTags(@Valid @RequestBody MergeTagsRequest request) {
|
||||
try {
|
||||
Tag resultTag = tagService.mergeTags(request.getSourceTagUUIDs(), request.getTargetTagUUID());
|
||||
return ResponseEntity.ok(convertToDto(resultTag));
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to merge tags", e);
|
||||
String errorMessage = e.getMessage() != null ? e.getMessage() : "Unknown error occurred";
|
||||
return ResponseEntity.badRequest().body(Map.of("error", errorMessage));
|
||||
}
|
||||
}
|
||||
|
||||
@PostMapping("/merge/preview")
|
||||
public ResponseEntity<?> previewMerge(@Valid @RequestBody MergeTagsRequest request) {
|
||||
try {
|
||||
MergePreviewResponse preview = tagService.previewMerge(request.getSourceTagUUIDs(), request.getTargetTagUUID());
|
||||
return ResponseEntity.ok(preview);
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to preview merge", e);
|
||||
String errorMessage = e.getMessage() != null ? e.getMessage() : "Unknown error occurred";
|
||||
return ResponseEntity.badRequest().body(Map.of("error", errorMessage));
|
||||
}
|
||||
}
|
||||
|
||||
@PostMapping("/suggest")
|
||||
public ResponseEntity<List<TagSuggestion>> suggestTags(@RequestBody TagSuggestionRequest request) {
|
||||
try {
|
||||
List<TagSuggestion> suggestions = tagService.suggestTags(
|
||||
request.getTitle(),
|
||||
request.getContent(),
|
||||
request.getSummary(),
|
||||
request.getLimit() != null ? request.getLimit() : 10
|
||||
);
|
||||
return ResponseEntity.ok(suggestions);
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to suggest tags", e);
|
||||
return ResponseEntity.ok(List.of()); // Return empty list on error
|
||||
}
|
||||
}
|
||||
|
||||
private TagDto convertToDto(Tag tag) {
|
||||
TagDto dto = new TagDto();
|
||||
dto.setId(tag.getId());
|
||||
dto.setName(tag.getName());
|
||||
dto.setColor(tag.getColor());
|
||||
dto.setDescription(tag.getDescription());
|
||||
dto.setStoryCount(tag.getStories() != null ? tag.getStories().size() : 0);
|
||||
dto.setCollectionCount(tag.getCollections() != null ? tag.getCollections().size() : 0);
|
||||
dto.setAliasCount(tag.getAliases() != null ? tag.getAliases().size() : 0);
|
||||
dto.setCreatedAt(tag.getCreatedAt());
|
||||
// updatedAt field not present in Tag entity per spec
|
||||
|
||||
// Convert aliases to DTOs for full context
|
||||
if (tag.getAliases() != null && !tag.getAliases().isEmpty()) {
|
||||
List<TagAliasDto> aliaseDtos = tag.getAliases().stream()
|
||||
.map(alias -> {
|
||||
TagAliasDto aliasDto = new TagAliasDto();
|
||||
aliasDto.setId(alias.getId());
|
||||
aliasDto.setAliasName(alias.getAliasName());
|
||||
aliasDto.setCanonicalTagId(alias.getCanonicalTag().getId());
|
||||
aliasDto.setCanonicalTagName(alias.getCanonicalTag().getName());
|
||||
aliasDto.setCreatedFromMerge(alias.getCreatedFromMerge());
|
||||
aliasDto.setCreatedAt(alias.getCreatedAt());
|
||||
return aliasDto;
|
||||
})
|
||||
.collect(Collectors.toList());
|
||||
dto.setAliases(aliaseDtos);
|
||||
}
|
||||
|
||||
return dto;
|
||||
}
|
||||
|
||||
private TagDto convertToDtoWithCollectionCount(Tag tag) {
|
||||
TagDto dto = new TagDto();
|
||||
dto.setId(tag.getId());
|
||||
dto.setName(tag.getName());
|
||||
dto.setCollectionCount(tag.getCollections() != null ? tag.getCollections().size() : 0);
|
||||
dto.setCreatedAt(tag.getCreatedAt());
|
||||
// Don't set storyCount for collection-focused endpoint
|
||||
|
||||
return dto;
|
||||
}
|
||||
|
||||
// Request DTOs
|
||||
public static class CreateTagRequest {
|
||||
private String name;
|
||||
private String color;
|
||||
private String description;
|
||||
|
||||
public String getName() { return name; }
|
||||
public void setName(String name) { this.name = name; }
|
||||
|
||||
public String getColor() { return color; }
|
||||
public void setColor(String color) { this.color = color; }
|
||||
|
||||
public String getDescription() { return description; }
|
||||
public void setDescription(String description) { this.description = description; }
|
||||
}
|
||||
|
||||
public static class UpdateTagRequest {
|
||||
private String name;
|
||||
private String color;
|
||||
private String description;
|
||||
|
||||
public String getName() { return name; }
|
||||
public void setName(String name) { this.name = name; }
|
||||
|
||||
public String getColor() { return color; }
|
||||
public void setColor(String color) { this.color = color; }
|
||||
|
||||
public String getDescription() { return description; }
|
||||
public void setDescription(String description) { this.description = description; }
|
||||
}
|
||||
|
||||
public static class MergeTagsRequest {
|
||||
private List<String> sourceTagIds;
|
||||
private String targetTagId;
|
||||
|
||||
public List<String> getSourceTagIds() { return sourceTagIds; }
|
||||
public void setSourceTagIds(List<String> sourceTagIds) { this.sourceTagIds = sourceTagIds; }
|
||||
|
||||
public String getTargetTagId() { return targetTagId; }
|
||||
public void setTargetTagId(String targetTagId) { this.targetTagId = targetTagId; }
|
||||
|
||||
// Helper methods to convert to UUID
|
||||
public List<UUID> getSourceTagUUIDs() {
|
||||
return sourceTagIds != null ? sourceTagIds.stream().map(UUID::fromString).toList() : null;
|
||||
}
|
||||
|
||||
public UUID getTargetTagUUID() {
|
||||
return targetTagId != null ? UUID.fromString(targetTagId) : null;
|
||||
}
|
||||
}
|
||||
|
||||
public static class MergePreviewResponse {
|
||||
private String targetTagName;
|
||||
private int targetStoryCount;
|
||||
private int totalResultStoryCount;
|
||||
private List<String> aliasesToCreate;
|
||||
|
||||
public String getTargetTagName() { return targetTagName; }
|
||||
public void setTargetTagName(String targetTagName) { this.targetTagName = targetTagName; }
|
||||
|
||||
public int getTargetStoryCount() { return targetStoryCount; }
|
||||
public void setTargetStoryCount(int targetStoryCount) { this.targetStoryCount = targetStoryCount; }
|
||||
|
||||
public int getTotalResultStoryCount() { return totalResultStoryCount; }
|
||||
public void setTotalResultStoryCount(int totalResultStoryCount) { this.totalResultStoryCount = totalResultStoryCount; }
|
||||
|
||||
public List<String> getAliasesToCreate() { return aliasesToCreate; }
|
||||
public void setAliasesToCreate(List<String> aliasesToCreate) { this.aliasesToCreate = aliasesToCreate; }
|
||||
}
|
||||
|
||||
public static class TagSuggestionRequest {
|
||||
private String title;
|
||||
private String content;
|
||||
private String summary;
|
||||
private Integer limit;
|
||||
|
||||
public String getTitle() { return title; }
|
||||
public void setTitle(String title) { this.title = title; }
|
||||
|
||||
public String getContent() { return content; }
|
||||
public void setContent(String content) { this.content = content; }
|
||||
|
||||
public String getSummary() { return summary; }
|
||||
public void setSummary(String summary) { this.summary = summary; }
|
||||
|
||||
public Integer getLimit() { return limit; }
|
||||
public void setLimit(Integer limit) { this.limit = limit; }
|
||||
}
|
||||
|
||||
public static class TagSuggestion {
|
||||
private String tagName;
|
||||
private double confidence;
|
||||
private String reason;
|
||||
|
||||
public TagSuggestion() {}
|
||||
|
||||
public TagSuggestion(String tagName, double confidence, String reason) {
|
||||
this.tagName = tagName;
|
||||
this.confidence = confidence;
|
||||
this.reason = reason;
|
||||
}
|
||||
|
||||
public String getTagName() { return tagName; }
|
||||
public void setTagName(String tagName) { this.tagName = tagName; }
|
||||
|
||||
public double getConfidence() { return confidence; }
|
||||
public void setConfidence(double confidence) { this.confidence = confidence; }
|
||||
|
||||
public String getReason() { return reason; }
|
||||
public void setReason(String reason) { this.reason = reason; }
|
||||
}
|
||||
}
|
||||
@@ -16,6 +16,7 @@ public class CollectionDto {
|
||||
private String coverImagePath;
|
||||
private Boolean isArchived;
|
||||
private List<TagDto> tags;
|
||||
private List<String> tagNames; // For search results
|
||||
private List<CollectionStoryDto> collectionStories;
|
||||
private Integer storyCount;
|
||||
private Integer totalWordCount;
|
||||
@@ -83,6 +84,14 @@ public class CollectionDto {
|
||||
this.tags = tags;
|
||||
}
|
||||
|
||||
public List<String> getTagNames() {
|
||||
return tagNames;
|
||||
}
|
||||
|
||||
public void setTagNames(List<String> tagNames) {
|
||||
this.tagNames = tagNames;
|
||||
}
|
||||
|
||||
public List<CollectionStoryDto> getCollectionStories() {
|
||||
return collectionStories;
|
||||
}
|
||||
|
||||
115
backend/src/main/java/com/storycove/dto/EPUBExportRequest.java
Normal file
115
backend/src/main/java/com/storycove/dto/EPUBExportRequest.java
Normal file
@@ -0,0 +1,115 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public class EPUBExportRequest {
|
||||
|
||||
@NotNull(message = "Story ID is required")
|
||||
private UUID storyId;
|
||||
|
||||
private String customTitle;
|
||||
|
||||
private String customAuthor;
|
||||
|
||||
private Boolean includeReadingPosition = true;
|
||||
|
||||
private Boolean includeCoverImage = true;
|
||||
|
||||
private Boolean includeMetadata = true;
|
||||
|
||||
private List<String> customMetadata;
|
||||
|
||||
private String language = "en";
|
||||
|
||||
private Boolean splitByChapters = false;
|
||||
|
||||
private Integer maxWordsPerChapter;
|
||||
|
||||
public EPUBExportRequest() {}
|
||||
|
||||
public EPUBExportRequest(UUID storyId) {
|
||||
this.storyId = storyId;
|
||||
}
|
||||
|
||||
public UUID getStoryId() {
|
||||
return storyId;
|
||||
}
|
||||
|
||||
public void setStoryId(UUID storyId) {
|
||||
this.storyId = storyId;
|
||||
}
|
||||
|
||||
public String getCustomTitle() {
|
||||
return customTitle;
|
||||
}
|
||||
|
||||
public void setCustomTitle(String customTitle) {
|
||||
this.customTitle = customTitle;
|
||||
}
|
||||
|
||||
public String getCustomAuthor() {
|
||||
return customAuthor;
|
||||
}
|
||||
|
||||
public void setCustomAuthor(String customAuthor) {
|
||||
this.customAuthor = customAuthor;
|
||||
}
|
||||
|
||||
public Boolean getIncludeReadingPosition() {
|
||||
return includeReadingPosition;
|
||||
}
|
||||
|
||||
public void setIncludeReadingPosition(Boolean includeReadingPosition) {
|
||||
this.includeReadingPosition = includeReadingPosition;
|
||||
}
|
||||
|
||||
public Boolean getIncludeCoverImage() {
|
||||
return includeCoverImage;
|
||||
}
|
||||
|
||||
public void setIncludeCoverImage(Boolean includeCoverImage) {
|
||||
this.includeCoverImage = includeCoverImage;
|
||||
}
|
||||
|
||||
public Boolean getIncludeMetadata() {
|
||||
return includeMetadata;
|
||||
}
|
||||
|
||||
public void setIncludeMetadata(Boolean includeMetadata) {
|
||||
this.includeMetadata = includeMetadata;
|
||||
}
|
||||
|
||||
public List<String> getCustomMetadata() {
|
||||
return customMetadata;
|
||||
}
|
||||
|
||||
public void setCustomMetadata(List<String> customMetadata) {
|
||||
this.customMetadata = customMetadata;
|
||||
}
|
||||
|
||||
public String getLanguage() {
|
||||
return language;
|
||||
}
|
||||
|
||||
public void setLanguage(String language) {
|
||||
this.language = language;
|
||||
}
|
||||
|
||||
public Boolean getSplitByChapters() {
|
||||
return splitByChapters;
|
||||
}
|
||||
|
||||
public void setSplitByChapters(Boolean splitByChapters) {
|
||||
this.splitByChapters = splitByChapters;
|
||||
}
|
||||
|
||||
public Integer getMaxWordsPerChapter() {
|
||||
return maxWordsPerChapter;
|
||||
}
|
||||
|
||||
public void setMaxWordsPerChapter(Integer maxWordsPerChapter) {
|
||||
this.maxWordsPerChapter = maxWordsPerChapter;
|
||||
}
|
||||
}
|
||||
133
backend/src/main/java/com/storycove/dto/EPUBImportRequest.java
Normal file
133
backend/src/main/java/com/storycove/dto/EPUBImportRequest.java
Normal file
@@ -0,0 +1,133 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public class EPUBImportRequest {
|
||||
|
||||
@NotNull(message = "EPUB file is required")
|
||||
private MultipartFile epubFile;
|
||||
|
||||
private UUID authorId;
|
||||
|
||||
private String authorName;
|
||||
|
||||
private UUID seriesId;
|
||||
|
||||
private String seriesName;
|
||||
|
||||
private Integer seriesVolume;
|
||||
|
||||
private List<String> tags;
|
||||
|
||||
private Boolean preserveReadingPosition = true;
|
||||
|
||||
private Boolean overwriteExisting = false;
|
||||
|
||||
private Boolean createMissingAuthor = true;
|
||||
|
||||
private Boolean createMissingSeries = true;
|
||||
|
||||
private Boolean extractCover = true;
|
||||
|
||||
public EPUBImportRequest() {}
|
||||
|
||||
public MultipartFile getEpubFile() {
|
||||
return epubFile;
|
||||
}
|
||||
|
||||
public void setEpubFile(MultipartFile epubFile) {
|
||||
this.epubFile = epubFile;
|
||||
}
|
||||
|
||||
public UUID getAuthorId() {
|
||||
return authorId;
|
||||
}
|
||||
|
||||
public void setAuthorId(UUID authorId) {
|
||||
this.authorId = authorId;
|
||||
}
|
||||
|
||||
public String getAuthorName() {
|
||||
return authorName;
|
||||
}
|
||||
|
||||
public void setAuthorName(String authorName) {
|
||||
this.authorName = authorName;
|
||||
}
|
||||
|
||||
public UUID getSeriesId() {
|
||||
return seriesId;
|
||||
}
|
||||
|
||||
public void setSeriesId(UUID seriesId) {
|
||||
this.seriesId = seriesId;
|
||||
}
|
||||
|
||||
public String getSeriesName() {
|
||||
return seriesName;
|
||||
}
|
||||
|
||||
public void setSeriesName(String seriesName) {
|
||||
this.seriesName = seriesName;
|
||||
}
|
||||
|
||||
public Integer getSeriesVolume() {
|
||||
return seriesVolume;
|
||||
}
|
||||
|
||||
public void setSeriesVolume(Integer seriesVolume) {
|
||||
this.seriesVolume = seriesVolume;
|
||||
}
|
||||
|
||||
public List<String> getTags() {
|
||||
return tags;
|
||||
}
|
||||
|
||||
public void setTags(List<String> tags) {
|
||||
this.tags = tags;
|
||||
}
|
||||
|
||||
public Boolean getPreserveReadingPosition() {
|
||||
return preserveReadingPosition;
|
||||
}
|
||||
|
||||
public void setPreserveReadingPosition(Boolean preserveReadingPosition) {
|
||||
this.preserveReadingPosition = preserveReadingPosition;
|
||||
}
|
||||
|
||||
public Boolean getOverwriteExisting() {
|
||||
return overwriteExisting;
|
||||
}
|
||||
|
||||
public void setOverwriteExisting(Boolean overwriteExisting) {
|
||||
this.overwriteExisting = overwriteExisting;
|
||||
}
|
||||
|
||||
public Boolean getCreateMissingAuthor() {
|
||||
return createMissingAuthor;
|
||||
}
|
||||
|
||||
public void setCreateMissingAuthor(Boolean createMissingAuthor) {
|
||||
this.createMissingAuthor = createMissingAuthor;
|
||||
}
|
||||
|
||||
public Boolean getCreateMissingSeries() {
|
||||
return createMissingSeries;
|
||||
}
|
||||
|
||||
public void setCreateMissingSeries(Boolean createMissingSeries) {
|
||||
this.createMissingSeries = createMissingSeries;
|
||||
}
|
||||
|
||||
public Boolean getExtractCover() {
|
||||
return extractCover;
|
||||
}
|
||||
|
||||
public void setExtractCover(Boolean extractCover) {
|
||||
this.extractCover = extractCover;
|
||||
}
|
||||
}
|
||||
107
backend/src/main/java/com/storycove/dto/EPUBImportResponse.java
Normal file
107
backend/src/main/java/com/storycove/dto/EPUBImportResponse.java
Normal file
@@ -0,0 +1,107 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public class EPUBImportResponse {
|
||||
|
||||
private boolean success;
|
||||
private String message;
|
||||
private UUID storyId;
|
||||
private String storyTitle;
|
||||
private Integer totalChapters;
|
||||
private Integer wordCount;
|
||||
private ReadingPositionDto readingPosition;
|
||||
private List<String> warnings;
|
||||
private List<String> errors;
|
||||
|
||||
public EPUBImportResponse() {}
|
||||
|
||||
public EPUBImportResponse(boolean success, String message) {
|
||||
this.success = success;
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public static EPUBImportResponse success(UUID storyId, String storyTitle) {
|
||||
EPUBImportResponse response = new EPUBImportResponse(true, "EPUB imported successfully");
|
||||
response.setStoryId(storyId);
|
||||
response.setStoryTitle(storyTitle);
|
||||
return response;
|
||||
}
|
||||
|
||||
public static EPUBImportResponse error(String message) {
|
||||
return new EPUBImportResponse(false, message);
|
||||
}
|
||||
|
||||
public boolean isSuccess() {
|
||||
return success;
|
||||
}
|
||||
|
||||
public void setSuccess(boolean success) {
|
||||
this.success = success;
|
||||
}
|
||||
|
||||
public String getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
public void setMessage(String message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
public UUID getStoryId() {
|
||||
return storyId;
|
||||
}
|
||||
|
||||
public void setStoryId(UUID storyId) {
|
||||
this.storyId = storyId;
|
||||
}
|
||||
|
||||
public String getStoryTitle() {
|
||||
return storyTitle;
|
||||
}
|
||||
|
||||
public void setStoryTitle(String storyTitle) {
|
||||
this.storyTitle = storyTitle;
|
||||
}
|
||||
|
||||
public Integer getTotalChapters() {
|
||||
return totalChapters;
|
||||
}
|
||||
|
||||
public void setTotalChapters(Integer totalChapters) {
|
||||
this.totalChapters = totalChapters;
|
||||
}
|
||||
|
||||
public Integer getWordCount() {
|
||||
return wordCount;
|
||||
}
|
||||
|
||||
public void setWordCount(Integer wordCount) {
|
||||
this.wordCount = wordCount;
|
||||
}
|
||||
|
||||
public ReadingPositionDto getReadingPosition() {
|
||||
return readingPosition;
|
||||
}
|
||||
|
||||
public void setReadingPosition(ReadingPositionDto readingPosition) {
|
||||
this.readingPosition = readingPosition;
|
||||
}
|
||||
|
||||
public List<String> getWarnings() {
|
||||
return warnings;
|
||||
}
|
||||
|
||||
public void setWarnings(List<String> warnings) {
|
||||
this.warnings = warnings;
|
||||
}
|
||||
|
||||
public List<String> getErrors() {
|
||||
return errors;
|
||||
}
|
||||
|
||||
public void setErrors(List<String> errors) {
|
||||
this.errors = errors;
|
||||
}
|
||||
}
|
||||
31
backend/src/main/java/com/storycove/dto/FacetCountDto.java
Normal file
31
backend/src/main/java/com/storycove/dto/FacetCountDto.java
Normal file
@@ -0,0 +1,31 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
public class FacetCountDto {
|
||||
|
||||
private String value;
|
||||
private int count;
|
||||
|
||||
public FacetCountDto() {}
|
||||
|
||||
public FacetCountDto(String value, int count) {
|
||||
this.value = value;
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
// Getters and Setters
|
||||
public String getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public void setValue(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public int getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(int count) {
|
||||
this.count = count;
|
||||
}
|
||||
}
|
||||
@@ -8,6 +8,7 @@ public class HtmlSanitizationConfigDto {
|
||||
private Map<String, List<String>> allowedAttributes;
|
||||
private List<String> allowedCssProperties;
|
||||
private Map<String, List<String>> removedAttributes;
|
||||
private Map<String, Map<String, List<String>>> allowedProtocols;
|
||||
private String description;
|
||||
|
||||
public HtmlSanitizationConfigDto() {}
|
||||
@@ -44,6 +45,14 @@ public class HtmlSanitizationConfigDto {
|
||||
this.removedAttributes = removedAttributes;
|
||||
}
|
||||
|
||||
public Map<String, Map<String, List<String>>> getAllowedProtocols() {
|
||||
return allowedProtocols;
|
||||
}
|
||||
|
||||
public void setAllowedProtocols(Map<String, Map<String, List<String>>> allowedProtocols) {
|
||||
this.allowedProtocols = allowedProtocols;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
61
backend/src/main/java/com/storycove/dto/LibraryDto.java
Normal file
61
backend/src/main/java/com/storycove/dto/LibraryDto.java
Normal file
@@ -0,0 +1,61 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
public class LibraryDto {
|
||||
private String id;
|
||||
private String name;
|
||||
private String description;
|
||||
private boolean isActive;
|
||||
private boolean isInitialized;
|
||||
|
||||
// Constructors
|
||||
public LibraryDto() {}
|
||||
|
||||
public LibraryDto(String id, String name, String description, boolean isActive, boolean isInitialized) {
|
||||
this.id = id;
|
||||
this.name = name;
|
||||
this.description = description;
|
||||
this.isActive = isActive;
|
||||
this.isInitialized = isInitialized;
|
||||
}
|
||||
|
||||
// Getters and Setters
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public boolean isActive() {
|
||||
return isActive;
|
||||
}
|
||||
|
||||
public void setActive(boolean active) {
|
||||
isActive = active;
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return isInitialized;
|
||||
}
|
||||
|
||||
public void setInitialized(boolean initialized) {
|
||||
isInitialized = initialized;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
|
||||
public class ProcessContentImagesRequest {
|
||||
|
||||
@NotBlank(message = "HTML content is required")
|
||||
private String htmlContent;
|
||||
|
||||
public ProcessContentImagesRequest() {}
|
||||
|
||||
public ProcessContentImagesRequest(String htmlContent) {
|
||||
this.htmlContent = htmlContent;
|
||||
}
|
||||
|
||||
public String getHtmlContent() {
|
||||
return htmlContent;
|
||||
}
|
||||
|
||||
public void setHtmlContent(String htmlContent) {
|
||||
this.htmlContent = htmlContent;
|
||||
}
|
||||
}
|
||||
124
backend/src/main/java/com/storycove/dto/ReadingPositionDto.java
Normal file
124
backend/src/main/java/com/storycove/dto/ReadingPositionDto.java
Normal file
@@ -0,0 +1,124 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.UUID;
|
||||
|
||||
public class ReadingPositionDto {
|
||||
|
||||
private UUID id;
|
||||
private UUID storyId;
|
||||
private Integer chapterIndex;
|
||||
private String chapterTitle;
|
||||
private Integer wordPosition;
|
||||
private Integer characterPosition;
|
||||
private Double percentageComplete;
|
||||
private String epubCfi;
|
||||
private String contextBefore;
|
||||
private String contextAfter;
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime updatedAt;
|
||||
|
||||
public ReadingPositionDto() {}
|
||||
|
||||
public ReadingPositionDto(UUID storyId, Integer chapterIndex, Integer wordPosition) {
|
||||
this.storyId = storyId;
|
||||
this.chapterIndex = chapterIndex;
|
||||
this.wordPosition = wordPosition;
|
||||
}
|
||||
|
||||
public UUID getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(UUID id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public UUID getStoryId() {
|
||||
return storyId;
|
||||
}
|
||||
|
||||
public void setStoryId(UUID storyId) {
|
||||
this.storyId = storyId;
|
||||
}
|
||||
|
||||
public Integer getChapterIndex() {
|
||||
return chapterIndex;
|
||||
}
|
||||
|
||||
public void setChapterIndex(Integer chapterIndex) {
|
||||
this.chapterIndex = chapterIndex;
|
||||
}
|
||||
|
||||
public String getChapterTitle() {
|
||||
return chapterTitle;
|
||||
}
|
||||
|
||||
public void setChapterTitle(String chapterTitle) {
|
||||
this.chapterTitle = chapterTitle;
|
||||
}
|
||||
|
||||
public Integer getWordPosition() {
|
||||
return wordPosition;
|
||||
}
|
||||
|
||||
public void setWordPosition(Integer wordPosition) {
|
||||
this.wordPosition = wordPosition;
|
||||
}
|
||||
|
||||
public Integer getCharacterPosition() {
|
||||
return characterPosition;
|
||||
}
|
||||
|
||||
public void setCharacterPosition(Integer characterPosition) {
|
||||
this.characterPosition = characterPosition;
|
||||
}
|
||||
|
||||
public Double getPercentageComplete() {
|
||||
return percentageComplete;
|
||||
}
|
||||
|
||||
public void setPercentageComplete(Double percentageComplete) {
|
||||
this.percentageComplete = percentageComplete;
|
||||
}
|
||||
|
||||
public String getEpubCfi() {
|
||||
return epubCfi;
|
||||
}
|
||||
|
||||
public void setEpubCfi(String epubCfi) {
|
||||
this.epubCfi = epubCfi;
|
||||
}
|
||||
|
||||
public String getContextBefore() {
|
||||
return contextBefore;
|
||||
}
|
||||
|
||||
public void setContextBefore(String contextBefore) {
|
||||
this.contextBefore = contextBefore;
|
||||
}
|
||||
|
||||
public String getContextAfter() {
|
||||
return contextAfter;
|
||||
}
|
||||
|
||||
public void setContextAfter(String contextAfter) {
|
||||
this.contextAfter = contextAfter;
|
||||
}
|
||||
|
||||
public LocalDateTime getCreatedAt() {
|
||||
return createdAt;
|
||||
}
|
||||
|
||||
public void setCreatedAt(LocalDateTime createdAt) {
|
||||
this.createdAt = createdAt;
|
||||
}
|
||||
|
||||
public LocalDateTime getUpdatedAt() {
|
||||
return updatedAt;
|
||||
}
|
||||
|
||||
public void setUpdatedAt(LocalDateTime updatedAt) {
|
||||
this.updatedAt = updatedAt;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import jakarta.validation.constraints.Min;
|
||||
|
||||
public class ReadingProgressRequest {
|
||||
|
||||
@Min(value = 0, message = "Reading position must be non-negative")
|
||||
private Integer position;
|
||||
|
||||
public ReadingProgressRequest() {}
|
||||
|
||||
public ReadingProgressRequest(Integer position) {
|
||||
this.position = position;
|
||||
}
|
||||
|
||||
public Integer getPosition() {
|
||||
return position;
|
||||
}
|
||||
|
||||
public void setPosition(Integer position) {
|
||||
this.position = position;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
|
||||
public class ReadingStatusRequest {
|
||||
|
||||
@NotNull(message = "Reading status is required")
|
||||
private Boolean isRead;
|
||||
|
||||
public ReadingStatusRequest() {}
|
||||
|
||||
public ReadingStatusRequest(Boolean isRead) {
|
||||
this.isRead = isRead;
|
||||
}
|
||||
|
||||
public Boolean getIsRead() {
|
||||
return isRead;
|
||||
}
|
||||
|
||||
public void setIsRead(Boolean isRead) {
|
||||
this.isRead = isRead;
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class SearchResultDto<T> {
|
||||
|
||||
@@ -10,6 +11,7 @@ public class SearchResultDto<T> {
|
||||
private int perPage;
|
||||
private String query;
|
||||
private long searchTimeMs;
|
||||
private Map<String, List<FacetCountDto>> facets;
|
||||
|
||||
public SearchResultDto() {}
|
||||
|
||||
@@ -22,6 +24,16 @@ public class SearchResultDto<T> {
|
||||
this.searchTimeMs = searchTimeMs;
|
||||
}
|
||||
|
||||
public SearchResultDto(List<T> results, long totalHits, int page, int perPage, String query, long searchTimeMs, Map<String, List<FacetCountDto>> facets) {
|
||||
this.results = results;
|
||||
this.totalHits = totalHits;
|
||||
this.page = page;
|
||||
this.perPage = perPage;
|
||||
this.query = query;
|
||||
this.searchTimeMs = searchTimeMs;
|
||||
this.facets = facets;
|
||||
}
|
||||
|
||||
// Getters and Setters
|
||||
public List<T> getResults() {
|
||||
return results;
|
||||
@@ -70,4 +82,12 @@ public class SearchResultDto<T> {
|
||||
public void setSearchTimeMs(long searchTimeMs) {
|
||||
this.searchTimeMs = searchTimeMs;
|
||||
}
|
||||
|
||||
public Map<String, List<FacetCountDto>> getFacets() {
|
||||
return facets;
|
||||
}
|
||||
|
||||
public void setFacets(Map<String, List<FacetCountDto>> facets) {
|
||||
this.facets = facets;
|
||||
}
|
||||
}
|
||||
@@ -21,13 +21,18 @@ public class StoryDto {
|
||||
private String description;
|
||||
|
||||
private String contentHtml;
|
||||
private String contentPlain;
|
||||
// contentPlain removed for performance - use StoryReadingDto when content is needed
|
||||
private String sourceUrl;
|
||||
private String coverPath;
|
||||
private Integer wordCount;
|
||||
private Integer rating;
|
||||
private Integer volume;
|
||||
|
||||
// Reading progress fields
|
||||
private Boolean isRead;
|
||||
private Integer readingPosition;
|
||||
private LocalDateTime lastReadAt;
|
||||
|
||||
// Related entities as simple references
|
||||
private UUID authorId;
|
||||
private String authorName;
|
||||
@@ -85,13 +90,6 @@ public class StoryDto {
|
||||
this.contentHtml = contentHtml;
|
||||
}
|
||||
|
||||
public String getContentPlain() {
|
||||
return contentPlain;
|
||||
}
|
||||
|
||||
public void setContentPlain(String contentPlain) {
|
||||
this.contentPlain = contentPlain;
|
||||
}
|
||||
|
||||
public String getSourceUrl() {
|
||||
return sourceUrl;
|
||||
@@ -133,6 +131,30 @@ public class StoryDto {
|
||||
this.volume = volume;
|
||||
}
|
||||
|
||||
public Boolean getIsRead() {
|
||||
return isRead;
|
||||
}
|
||||
|
||||
public void setIsRead(Boolean isRead) {
|
||||
this.isRead = isRead;
|
||||
}
|
||||
|
||||
public Integer getReadingPosition() {
|
||||
return readingPosition;
|
||||
}
|
||||
|
||||
public void setReadingPosition(Integer readingPosition) {
|
||||
this.readingPosition = readingPosition;
|
||||
}
|
||||
|
||||
public LocalDateTime getLastReadAt() {
|
||||
return lastReadAt;
|
||||
}
|
||||
|
||||
public void setLastReadAt(LocalDateTime lastReadAt) {
|
||||
this.lastReadAt = lastReadAt;
|
||||
}
|
||||
|
||||
public UUID getAuthorId() {
|
||||
return authorId;
|
||||
}
|
||||
|
||||
202
backend/src/main/java/com/storycove/dto/StoryReadingDto.java
Normal file
202
backend/src/main/java/com/storycove/dto/StoryReadingDto.java
Normal file
@@ -0,0 +1,202 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* Story DTO specifically for reading view.
|
||||
* Contains contentHtml but excludes contentPlain for performance.
|
||||
*/
|
||||
public class StoryReadingDto {
|
||||
|
||||
private UUID id;
|
||||
private String title;
|
||||
private String summary;
|
||||
private String description;
|
||||
private String contentHtml; // For reading - includes HTML
|
||||
// contentPlain excluded for performance
|
||||
private String sourceUrl;
|
||||
private String coverPath;
|
||||
private Integer wordCount;
|
||||
private Integer rating;
|
||||
private Integer volume;
|
||||
|
||||
// Reading progress fields
|
||||
private Boolean isRead;
|
||||
private Integer readingPosition;
|
||||
private LocalDateTime lastReadAt;
|
||||
|
||||
// Related entities as simple references
|
||||
private UUID authorId;
|
||||
private String authorName;
|
||||
private UUID seriesId;
|
||||
private String seriesName;
|
||||
private List<TagDto> tags;
|
||||
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime updatedAt;
|
||||
|
||||
public StoryReadingDto() {}
|
||||
|
||||
// Getters and Setters
|
||||
public UUID getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(UUID id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getTitle() {
|
||||
return title;
|
||||
}
|
||||
|
||||
public void setTitle(String title) {
|
||||
this.title = title;
|
||||
}
|
||||
|
||||
public String getSummary() {
|
||||
return summary;
|
||||
}
|
||||
|
||||
public void setSummary(String summary) {
|
||||
this.summary = summary;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getContentHtml() {
|
||||
return contentHtml;
|
||||
}
|
||||
|
||||
public void setContentHtml(String contentHtml) {
|
||||
this.contentHtml = contentHtml;
|
||||
}
|
||||
|
||||
public String getSourceUrl() {
|
||||
return sourceUrl;
|
||||
}
|
||||
|
||||
public void setSourceUrl(String sourceUrl) {
|
||||
this.sourceUrl = sourceUrl;
|
||||
}
|
||||
|
||||
public String getCoverPath() {
|
||||
return coverPath;
|
||||
}
|
||||
|
||||
public void setCoverPath(String coverPath) {
|
||||
this.coverPath = coverPath;
|
||||
}
|
||||
|
||||
public Integer getWordCount() {
|
||||
return wordCount;
|
||||
}
|
||||
|
||||
public void setWordCount(Integer wordCount) {
|
||||
this.wordCount = wordCount;
|
||||
}
|
||||
|
||||
public Integer getRating() {
|
||||
return rating;
|
||||
}
|
||||
|
||||
public void setRating(Integer rating) {
|
||||
this.rating = rating;
|
||||
}
|
||||
|
||||
public Integer getVolume() {
|
||||
return volume;
|
||||
}
|
||||
|
||||
public void setVolume(Integer volume) {
|
||||
this.volume = volume;
|
||||
}
|
||||
|
||||
public Boolean getIsRead() {
|
||||
return isRead;
|
||||
}
|
||||
|
||||
public void setIsRead(Boolean isRead) {
|
||||
this.isRead = isRead;
|
||||
}
|
||||
|
||||
public Integer getReadingPosition() {
|
||||
return readingPosition;
|
||||
}
|
||||
|
||||
public void setReadingPosition(Integer readingPosition) {
|
||||
this.readingPosition = readingPosition;
|
||||
}
|
||||
|
||||
public LocalDateTime getLastReadAt() {
|
||||
return lastReadAt;
|
||||
}
|
||||
|
||||
public void setLastReadAt(LocalDateTime lastReadAt) {
|
||||
this.lastReadAt = lastReadAt;
|
||||
}
|
||||
|
||||
public UUID getAuthorId() {
|
||||
return authorId;
|
||||
}
|
||||
|
||||
public void setAuthorId(UUID authorId) {
|
||||
this.authorId = authorId;
|
||||
}
|
||||
|
||||
public String getAuthorName() {
|
||||
return authorName;
|
||||
}
|
||||
|
||||
public void setAuthorName(String authorName) {
|
||||
this.authorName = authorName;
|
||||
}
|
||||
|
||||
public UUID getSeriesId() {
|
||||
return seriesId;
|
||||
}
|
||||
|
||||
public void setSeriesId(UUID seriesId) {
|
||||
this.seriesId = seriesId;
|
||||
}
|
||||
|
||||
public String getSeriesName() {
|
||||
return seriesName;
|
||||
}
|
||||
|
||||
public void setSeriesName(String seriesName) {
|
||||
this.seriesName = seriesName;
|
||||
}
|
||||
|
||||
public List<TagDto> getTags() {
|
||||
return tags;
|
||||
}
|
||||
|
||||
public void setTags(List<TagDto> tags) {
|
||||
this.tags = tags;
|
||||
}
|
||||
|
||||
public LocalDateTime getCreatedAt() {
|
||||
return createdAt;
|
||||
}
|
||||
|
||||
public void setCreatedAt(LocalDateTime createdAt) {
|
||||
this.createdAt = createdAt;
|
||||
}
|
||||
|
||||
public LocalDateTime getUpdatedAt() {
|
||||
return updatedAt;
|
||||
}
|
||||
|
||||
public void setUpdatedAt(LocalDateTime updatedAt) {
|
||||
this.updatedAt = updatedAt;
|
||||
}
|
||||
}
|
||||
@@ -9,13 +9,17 @@ public class StorySearchDto {
|
||||
private UUID id;
|
||||
private String title;
|
||||
private String description;
|
||||
private String contentPlain;
|
||||
private String sourceUrl;
|
||||
private String coverPath;
|
||||
private Integer wordCount;
|
||||
private Integer rating;
|
||||
private Integer volume;
|
||||
|
||||
// Reading status
|
||||
private Boolean isRead;
|
||||
private Integer readingPosition;
|
||||
private LocalDateTime lastReadAt;
|
||||
|
||||
// Author info
|
||||
private UUID authorId;
|
||||
private String authorName;
|
||||
@@ -29,6 +33,9 @@ public class StorySearchDto {
|
||||
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime updatedAt;
|
||||
|
||||
// Alias for createdAt to match frontend expectations
|
||||
private LocalDateTime dateAdded;
|
||||
|
||||
// Search-specific fields
|
||||
private double searchScore;
|
||||
@@ -61,13 +68,6 @@ public class StorySearchDto {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getContentPlain() {
|
||||
return contentPlain;
|
||||
}
|
||||
|
||||
public void setContentPlain(String contentPlain) {
|
||||
this.contentPlain = contentPlain;
|
||||
}
|
||||
|
||||
public String getSourceUrl() {
|
||||
return sourceUrl;
|
||||
@@ -109,6 +109,30 @@ public class StorySearchDto {
|
||||
this.volume = volume;
|
||||
}
|
||||
|
||||
public Boolean getIsRead() {
|
||||
return isRead;
|
||||
}
|
||||
|
||||
public void setIsRead(Boolean isRead) {
|
||||
this.isRead = isRead;
|
||||
}
|
||||
|
||||
public LocalDateTime getLastReadAt() {
|
||||
return lastReadAt;
|
||||
}
|
||||
|
||||
public void setLastReadAt(LocalDateTime lastReadAt) {
|
||||
this.lastReadAt = lastReadAt;
|
||||
}
|
||||
|
||||
public Integer getReadingPosition() {
|
||||
return readingPosition;
|
||||
}
|
||||
|
||||
public void setReadingPosition(Integer readingPosition) {
|
||||
this.readingPosition = readingPosition;
|
||||
}
|
||||
|
||||
public UUID getAuthorId() {
|
||||
return authorId;
|
||||
}
|
||||
@@ -164,6 +188,14 @@ public class StorySearchDto {
|
||||
public void setUpdatedAt(LocalDateTime updatedAt) {
|
||||
this.updatedAt = updatedAt;
|
||||
}
|
||||
|
||||
public LocalDateTime getDateAdded() {
|
||||
return dateAdded;
|
||||
}
|
||||
|
||||
public void setDateAdded(LocalDateTime dateAdded) {
|
||||
this.dateAdded = dateAdded;
|
||||
}
|
||||
|
||||
public double getSearchScore() {
|
||||
return searchScore;
|
||||
|
||||
@@ -20,6 +20,11 @@ public class StorySummaryDto {
|
||||
private Integer rating;
|
||||
private Integer volume;
|
||||
|
||||
// Reading progress fields
|
||||
private Boolean isRead;
|
||||
private Integer readingPosition;
|
||||
private LocalDateTime lastReadAt;
|
||||
|
||||
// Related entities as simple references
|
||||
private UUID authorId;
|
||||
private String authorName;
|
||||
@@ -106,6 +111,30 @@ public class StorySummaryDto {
|
||||
this.volume = volume;
|
||||
}
|
||||
|
||||
public Boolean getIsRead() {
|
||||
return isRead;
|
||||
}
|
||||
|
||||
public void setIsRead(Boolean isRead) {
|
||||
this.isRead = isRead;
|
||||
}
|
||||
|
||||
public Integer getReadingPosition() {
|
||||
return readingPosition;
|
||||
}
|
||||
|
||||
public void setReadingPosition(Integer readingPosition) {
|
||||
this.readingPosition = readingPosition;
|
||||
}
|
||||
|
||||
public LocalDateTime getLastReadAt() {
|
||||
return lastReadAt;
|
||||
}
|
||||
|
||||
public void setLastReadAt(LocalDateTime lastReadAt) {
|
||||
this.lastReadAt = lastReadAt;
|
||||
}
|
||||
|
||||
public UUID getAuthorId() {
|
||||
return authorId;
|
||||
}
|
||||
|
||||
77
backend/src/main/java/com/storycove/dto/TagAliasDto.java
Normal file
77
backend/src/main/java/com/storycove/dto/TagAliasDto.java
Normal file
@@ -0,0 +1,77 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.Size;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.UUID;
|
||||
|
||||
public class TagAliasDto {
|
||||
|
||||
private UUID id;
|
||||
|
||||
@NotBlank(message = "Alias name is required")
|
||||
@Size(max = 100, message = "Alias name must not exceed 100 characters")
|
||||
private String aliasName;
|
||||
|
||||
private UUID canonicalTagId;
|
||||
private String canonicalTagName; // For convenience in frontend
|
||||
private Boolean createdFromMerge;
|
||||
private LocalDateTime createdAt;
|
||||
|
||||
public TagAliasDto() {}
|
||||
|
||||
public TagAliasDto(String aliasName, UUID canonicalTagId) {
|
||||
this.aliasName = aliasName;
|
||||
this.canonicalTagId = canonicalTagId;
|
||||
}
|
||||
|
||||
// Getters and Setters
|
||||
public UUID getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(UUID id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getAliasName() {
|
||||
return aliasName;
|
||||
}
|
||||
|
||||
public void setAliasName(String aliasName) {
|
||||
this.aliasName = aliasName;
|
||||
}
|
||||
|
||||
public UUID getCanonicalTagId() {
|
||||
return canonicalTagId;
|
||||
}
|
||||
|
||||
public void setCanonicalTagId(UUID canonicalTagId) {
|
||||
this.canonicalTagId = canonicalTagId;
|
||||
}
|
||||
|
||||
public String getCanonicalTagName() {
|
||||
return canonicalTagName;
|
||||
}
|
||||
|
||||
public void setCanonicalTagName(String canonicalTagName) {
|
||||
this.canonicalTagName = canonicalTagName;
|
||||
}
|
||||
|
||||
public Boolean getCreatedFromMerge() {
|
||||
return createdFromMerge;
|
||||
}
|
||||
|
||||
public void setCreatedFromMerge(Boolean createdFromMerge) {
|
||||
this.createdFromMerge = createdFromMerge;
|
||||
}
|
||||
|
||||
public LocalDateTime getCreatedAt() {
|
||||
return createdAt;
|
||||
}
|
||||
|
||||
public void setCreatedAt(LocalDateTime createdAt) {
|
||||
this.createdAt = createdAt;
|
||||
}
|
||||
}
|
||||
@@ -4,6 +4,7 @@ import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.Size;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
public class TagDto {
|
||||
@@ -14,7 +15,16 @@ public class TagDto {
|
||||
@Size(max = 100, message = "Tag name must not exceed 100 characters")
|
||||
private String name;
|
||||
|
||||
@Size(max = 7, message = "Color must be a valid hex color code")
|
||||
private String color;
|
||||
|
||||
@Size(max = 500, message = "Description must not exceed 500 characters")
|
||||
private String description;
|
||||
|
||||
private Integer storyCount;
|
||||
private Integer collectionCount;
|
||||
private Integer aliasCount;
|
||||
private List<TagAliasDto> aliases;
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime updatedAt;
|
||||
|
||||
@@ -41,6 +51,22 @@ public class TagDto {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getColor() {
|
||||
return color;
|
||||
}
|
||||
|
||||
public void setColor(String color) {
|
||||
this.color = color;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public Integer getStoryCount() {
|
||||
return storyCount;
|
||||
}
|
||||
@@ -49,6 +75,30 @@ public class TagDto {
|
||||
this.storyCount = storyCount;
|
||||
}
|
||||
|
||||
public Integer getCollectionCount() {
|
||||
return collectionCount;
|
||||
}
|
||||
|
||||
public void setCollectionCount(Integer collectionCount) {
|
||||
this.collectionCount = collectionCount;
|
||||
}
|
||||
|
||||
public Integer getAliasCount() {
|
||||
return aliasCount;
|
||||
}
|
||||
|
||||
public void setAliasCount(Integer aliasCount) {
|
||||
this.aliasCount = aliasCount;
|
||||
}
|
||||
|
||||
public List<TagAliasDto> getAliases() {
|
||||
return aliases;
|
||||
}
|
||||
|
||||
public void setAliases(List<TagAliasDto> aliases) {
|
||||
this.aliases = aliases;
|
||||
}
|
||||
|
||||
public LocalDateTime getCreatedAt() {
|
||||
return createdAt;
|
||||
}
|
||||
|
||||
@@ -52,6 +52,10 @@ public class Collection {
|
||||
)
|
||||
private Set<Tag> tags = new HashSet<>();
|
||||
|
||||
// Transient field for search results - tag names only to avoid lazy loading issues
|
||||
@Transient
|
||||
private List<String> tagNames;
|
||||
|
||||
@CreationTimestamp
|
||||
@Column(name = "created_at", nullable = false, updatable = false)
|
||||
private LocalDateTime createdAt;
|
||||
@@ -192,6 +196,14 @@ public class Collection {
|
||||
this.tags = tags;
|
||||
}
|
||||
|
||||
public List<String> getTagNames() {
|
||||
return tagNames;
|
||||
}
|
||||
|
||||
public void setTagNames(List<String> tagNames) {
|
||||
this.tagNames = tagNames;
|
||||
}
|
||||
|
||||
public LocalDateTime getCreatedAt() {
|
||||
return createdAt;
|
||||
}
|
||||
|
||||
93
backend/src/main/java/com/storycove/entity/Library.java
Normal file
93
backend/src/main/java/com/storycove/entity/Library.java
Normal file
@@ -0,0 +1,93 @@
|
||||
package com.storycove.entity;
|
||||
|
||||
public class Library {
|
||||
private String id;
|
||||
private String name;
|
||||
private String description;
|
||||
private String passwordHash;
|
||||
private String dbName;
|
||||
private String typesenseCollection;
|
||||
private String imagePath;
|
||||
private boolean initialized;
|
||||
|
||||
// Constructors
|
||||
public Library() {}
|
||||
|
||||
public Library(String id, String name, String description, String passwordHash, String dbName) {
|
||||
this.id = id;
|
||||
this.name = name;
|
||||
this.description = description;
|
||||
this.passwordHash = passwordHash;
|
||||
this.dbName = dbName;
|
||||
this.typesenseCollection = "stories_" + id;
|
||||
this.imagePath = "/images/" + id;
|
||||
this.initialized = false;
|
||||
}
|
||||
|
||||
// Getters and Setters
|
||||
public String getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(String id) {
|
||||
this.id = id;
|
||||
this.typesenseCollection = "stories_" + id;
|
||||
this.imagePath = "/images/" + id;
|
||||
}
|
||||
|
||||
public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
public String getPasswordHash() {
|
||||
return passwordHash;
|
||||
}
|
||||
|
||||
public void setPasswordHash(String passwordHash) {
|
||||
this.passwordHash = passwordHash;
|
||||
}
|
||||
|
||||
public String getDbName() {
|
||||
return dbName;
|
||||
}
|
||||
|
||||
public void setDbName(String dbName) {
|
||||
this.dbName = dbName;
|
||||
}
|
||||
|
||||
public String getTypesenseCollection() {
|
||||
return typesenseCollection;
|
||||
}
|
||||
|
||||
public void setTypesenseCollection(String typesenseCollection) {
|
||||
this.typesenseCollection = typesenseCollection;
|
||||
}
|
||||
|
||||
public String getImagePath() {
|
||||
return imagePath;
|
||||
}
|
||||
|
||||
public void setImagePath(String imagePath) {
|
||||
this.imagePath = imagePath;
|
||||
}
|
||||
|
||||
public boolean isInitialized() {
|
||||
return initialized;
|
||||
}
|
||||
|
||||
public void setInitialized(boolean initialized) {
|
||||
this.initialized = initialized;
|
||||
}
|
||||
}
|
||||
230
backend/src/main/java/com/storycove/entity/ReadingPosition.java
Normal file
230
backend/src/main/java/com/storycove/entity/ReadingPosition.java
Normal file
@@ -0,0 +1,230 @@
|
||||
package com.storycove.entity;
|
||||
|
||||
import jakarta.persistence.*;
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
import org.hibernate.annotations.CreationTimestamp;
|
||||
import org.hibernate.annotations.UpdateTimestamp;
|
||||
import com.fasterxml.jackson.annotation.JsonBackReference;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.UUID;
|
||||
|
||||
@Entity
|
||||
@Table(name = "reading_positions", indexes = {
|
||||
@Index(name = "idx_reading_position_story", columnList = "story_id")
|
||||
})
|
||||
public class ReadingPosition {
|
||||
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.UUID)
|
||||
private UUID id;
|
||||
|
||||
@NotNull
|
||||
@ManyToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "story_id", nullable = false)
|
||||
@JsonBackReference("story-reading-positions")
|
||||
private Story story;
|
||||
|
||||
@Column(name = "chapter_index")
|
||||
private Integer chapterIndex;
|
||||
|
||||
@Column(name = "chapter_title")
|
||||
private String chapterTitle;
|
||||
|
||||
@Column(name = "word_position")
|
||||
private Integer wordPosition;
|
||||
|
||||
@Column(name = "character_position")
|
||||
private Integer characterPosition;
|
||||
|
||||
@Column(name = "percentage_complete")
|
||||
private Double percentageComplete;
|
||||
|
||||
@Column(name = "epub_cfi", columnDefinition = "TEXT")
|
||||
private String epubCfi;
|
||||
|
||||
@Column(name = "context_before", length = 500)
|
||||
private String contextBefore;
|
||||
|
||||
@Column(name = "context_after", length = 500)
|
||||
private String contextAfter;
|
||||
|
||||
@CreationTimestamp
|
||||
@Column(name = "created_at", nullable = false, updatable = false)
|
||||
private LocalDateTime createdAt;
|
||||
|
||||
@UpdateTimestamp
|
||||
@Column(name = "updated_at", nullable = false)
|
||||
private LocalDateTime updatedAt;
|
||||
|
||||
public ReadingPosition() {}
|
||||
|
||||
public ReadingPosition(Story story) {
|
||||
this.story = story;
|
||||
this.chapterIndex = 0;
|
||||
this.wordPosition = 0;
|
||||
this.characterPosition = 0;
|
||||
this.percentageComplete = 0.0;
|
||||
}
|
||||
|
||||
public ReadingPosition(Story story, Integer chapterIndex, Integer wordPosition) {
|
||||
this.story = story;
|
||||
this.chapterIndex = chapterIndex;
|
||||
this.wordPosition = wordPosition;
|
||||
this.characterPosition = 0;
|
||||
this.percentageComplete = 0.0;
|
||||
}
|
||||
|
||||
public void updatePosition(Integer chapterIndex, Integer wordPosition, Integer characterPosition) {
|
||||
this.chapterIndex = chapterIndex;
|
||||
this.wordPosition = wordPosition;
|
||||
this.characterPosition = characterPosition;
|
||||
calculatePercentageComplete();
|
||||
}
|
||||
|
||||
public void updatePositionWithCfi(String epubCfi, Integer chapterIndex, Integer wordPosition) {
|
||||
this.epubCfi = epubCfi;
|
||||
this.chapterIndex = chapterIndex;
|
||||
this.wordPosition = wordPosition;
|
||||
calculatePercentageComplete();
|
||||
}
|
||||
|
||||
private void calculatePercentageComplete() {
|
||||
if (story != null && story.getWordCount() != null && story.getWordCount() > 0) {
|
||||
int totalWords = story.getWordCount();
|
||||
int currentPosition = (chapterIndex != null ? chapterIndex * 1000 : 0) +
|
||||
(wordPosition != null ? wordPosition : 0);
|
||||
this.percentageComplete = Math.min(100.0, (double) currentPosition / totalWords * 100);
|
||||
}
|
||||
}
|
||||
|
||||
public boolean isAtBeginning() {
|
||||
return (chapterIndex == null || chapterIndex == 0) &&
|
||||
(wordPosition == null || wordPosition == 0);
|
||||
}
|
||||
|
||||
public boolean isCompleted() {
|
||||
return percentageComplete != null && percentageComplete >= 95.0;
|
||||
}
|
||||
|
||||
// Getters and Setters
|
||||
public UUID getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(UUID id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public Story getStory() {
|
||||
return story;
|
||||
}
|
||||
|
||||
public void setStory(Story story) {
|
||||
this.story = story;
|
||||
}
|
||||
|
||||
public Integer getChapterIndex() {
|
||||
return chapterIndex;
|
||||
}
|
||||
|
||||
public void setChapterIndex(Integer chapterIndex) {
|
||||
this.chapterIndex = chapterIndex;
|
||||
}
|
||||
|
||||
public String getChapterTitle() {
|
||||
return chapterTitle;
|
||||
}
|
||||
|
||||
public void setChapterTitle(String chapterTitle) {
|
||||
this.chapterTitle = chapterTitle;
|
||||
}
|
||||
|
||||
public Integer getWordPosition() {
|
||||
return wordPosition;
|
||||
}
|
||||
|
||||
public void setWordPosition(Integer wordPosition) {
|
||||
this.wordPosition = wordPosition;
|
||||
}
|
||||
|
||||
public Integer getCharacterPosition() {
|
||||
return characterPosition;
|
||||
}
|
||||
|
||||
public void setCharacterPosition(Integer characterPosition) {
|
||||
this.characterPosition = characterPosition;
|
||||
}
|
||||
|
||||
public Double getPercentageComplete() {
|
||||
return percentageComplete;
|
||||
}
|
||||
|
||||
public void setPercentageComplete(Double percentageComplete) {
|
||||
this.percentageComplete = percentageComplete;
|
||||
}
|
||||
|
||||
public String getEpubCfi() {
|
||||
return epubCfi;
|
||||
}
|
||||
|
||||
public void setEpubCfi(String epubCfi) {
|
||||
this.epubCfi = epubCfi;
|
||||
}
|
||||
|
||||
public String getContextBefore() {
|
||||
return contextBefore;
|
||||
}
|
||||
|
||||
public void setContextBefore(String contextBefore) {
|
||||
this.contextBefore = contextBefore;
|
||||
}
|
||||
|
||||
public String getContextAfter() {
|
||||
return contextAfter;
|
||||
}
|
||||
|
||||
public void setContextAfter(String contextAfter) {
|
||||
this.contextAfter = contextAfter;
|
||||
}
|
||||
|
||||
public LocalDateTime getCreatedAt() {
|
||||
return createdAt;
|
||||
}
|
||||
|
||||
public void setCreatedAt(LocalDateTime createdAt) {
|
||||
this.createdAt = createdAt;
|
||||
}
|
||||
|
||||
public LocalDateTime getUpdatedAt() {
|
||||
return updatedAt;
|
||||
}
|
||||
|
||||
public void setUpdatedAt(LocalDateTime updatedAt) {
|
||||
this.updatedAt = updatedAt;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof ReadingPosition)) return false;
|
||||
ReadingPosition that = (ReadingPosition) o;
|
||||
return id != null && id.equals(that.id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getClass().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "ReadingPosition{" +
|
||||
"id=" + id +
|
||||
", storyId=" + (story != null ? story.getId() : null) +
|
||||
", chapterIndex=" + chapterIndex +
|
||||
", wordPosition=" + wordPosition +
|
||||
", percentageComplete=" + percentageComplete +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -55,6 +55,15 @@ public class Story {
|
||||
@Column(name = "volume")
|
||||
private Integer volume;
|
||||
|
||||
@Column(name = "is_read")
|
||||
private Boolean isRead = false;
|
||||
|
||||
@Column(name = "reading_position")
|
||||
private Integer readingPosition = 0;
|
||||
|
||||
@Column(name = "last_read_at")
|
||||
private LocalDateTime lastReadAt;
|
||||
|
||||
@ManyToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "author_id")
|
||||
@JsonBackReference("author-stories")
|
||||
@@ -212,6 +221,30 @@ public class Story {
|
||||
this.volume = volume;
|
||||
}
|
||||
|
||||
public Boolean getIsRead() {
|
||||
return isRead;
|
||||
}
|
||||
|
||||
public void setIsRead(Boolean isRead) {
|
||||
this.isRead = isRead;
|
||||
}
|
||||
|
||||
public Integer getReadingPosition() {
|
||||
return readingPosition;
|
||||
}
|
||||
|
||||
public void setReadingPosition(Integer readingPosition) {
|
||||
this.readingPosition = readingPosition;
|
||||
}
|
||||
|
||||
public LocalDateTime getLastReadAt() {
|
||||
return lastReadAt;
|
||||
}
|
||||
|
||||
public void setLastReadAt(LocalDateTime lastReadAt) {
|
||||
this.lastReadAt = lastReadAt;
|
||||
}
|
||||
|
||||
public Author getAuthor() {
|
||||
return author;
|
||||
}
|
||||
@@ -252,6 +285,37 @@ public class Story {
|
||||
this.updatedAt = updatedAt;
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the reading progress and timestamp
|
||||
*/
|
||||
public void updateReadingProgress(Integer position) {
|
||||
this.readingPosition = position;
|
||||
this.lastReadAt = LocalDateTime.now();
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks the story as read and updates the reading position to the end
|
||||
*/
|
||||
public void markAsRead() {
|
||||
this.isRead = true;
|
||||
this.lastReadAt = LocalDateTime.now();
|
||||
// Set reading position to the end of content if available
|
||||
if (contentPlain != null) {
|
||||
this.readingPosition = contentPlain.length();
|
||||
} else if (contentHtml != null) {
|
||||
this.readingPosition = contentHtml.length();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks the story as unread and resets reading position
|
||||
*/
|
||||
public void markAsUnread() {
|
||||
this.isRead = false;
|
||||
this.readingPosition = 0;
|
||||
this.lastReadAt = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
@@ -272,6 +336,8 @@ public class Story {
|
||||
", title='" + title + '\'' +
|
||||
", wordCount=" + wordCount +
|
||||
", rating=" + rating +
|
||||
", isRead=" + isRead +
|
||||
", readingPosition=" + readingPosition +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -5,6 +5,7 @@ import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import org.hibernate.annotations.CreationTimestamp;
|
||||
import com.fasterxml.jackson.annotation.JsonBackReference;
|
||||
import com.fasterxml.jackson.annotation.JsonManagedReference;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.HashSet;
|
||||
@@ -24,11 +25,27 @@ public class Tag {
|
||||
@Column(nullable = false, unique = true)
|
||||
private String name;
|
||||
|
||||
@Size(max = 7, message = "Color must be a valid hex color code")
|
||||
@Column(length = 7)
|
||||
private String color; // hex color like #3B82F6
|
||||
|
||||
@Size(max = 500, message = "Description must not exceed 500 characters")
|
||||
@Column(length = 500)
|
||||
private String description;
|
||||
|
||||
|
||||
@ManyToMany(mappedBy = "tags")
|
||||
@JsonBackReference("story-tags")
|
||||
private Set<Story> stories = new HashSet<>();
|
||||
|
||||
@ManyToMany(mappedBy = "tags")
|
||||
@JsonBackReference("collection-tags")
|
||||
private Set<Collection> collections = new HashSet<>();
|
||||
|
||||
@OneToMany(mappedBy = "canonicalTag", cascade = CascadeType.ALL, orphanRemoval = true)
|
||||
@JsonManagedReference("tag-aliases")
|
||||
private Set<TagAlias> aliases = new HashSet<>();
|
||||
|
||||
@CreationTimestamp
|
||||
@Column(name = "created_at", nullable = false, updatable = false)
|
||||
private LocalDateTime createdAt;
|
||||
@@ -39,6 +56,12 @@ public class Tag {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public Tag(String name, String color, String description) {
|
||||
this.name = name;
|
||||
this.color = color;
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
|
||||
|
||||
// Getters and Setters
|
||||
@@ -58,6 +81,22 @@ public class Tag {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public String getColor() {
|
||||
return color;
|
||||
}
|
||||
|
||||
public void setColor(String color) {
|
||||
this.color = color;
|
||||
}
|
||||
|
||||
public String getDescription() {
|
||||
return description;
|
||||
}
|
||||
|
||||
public void setDescription(String description) {
|
||||
this.description = description;
|
||||
}
|
||||
|
||||
|
||||
public Set<Story> getStories() {
|
||||
return stories;
|
||||
@@ -67,6 +106,22 @@ public class Tag {
|
||||
this.stories = stories;
|
||||
}
|
||||
|
||||
public Set<Collection> getCollections() {
|
||||
return collections;
|
||||
}
|
||||
|
||||
public void setCollections(Set<Collection> collections) {
|
||||
this.collections = collections;
|
||||
}
|
||||
|
||||
public Set<TagAlias> getAliases() {
|
||||
return aliases;
|
||||
}
|
||||
|
||||
public void setAliases(Set<TagAlias> aliases) {
|
||||
this.aliases = aliases;
|
||||
}
|
||||
|
||||
public LocalDateTime getCreatedAt() {
|
||||
return createdAt;
|
||||
}
|
||||
|
||||
113
backend/src/main/java/com/storycove/entity/TagAlias.java
Normal file
113
backend/src/main/java/com/storycove/entity/TagAlias.java
Normal file
@@ -0,0 +1,113 @@
|
||||
package com.storycove.entity;
|
||||
|
||||
import jakarta.persistence.*;
|
||||
import jakarta.validation.constraints.NotBlank;
|
||||
import jakarta.validation.constraints.Size;
|
||||
import org.hibernate.annotations.CreationTimestamp;
|
||||
import com.fasterxml.jackson.annotation.JsonManagedReference;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.UUID;
|
||||
|
||||
@Entity
|
||||
@Table(name = "tag_aliases")
|
||||
public class TagAlias {
|
||||
|
||||
@Id
|
||||
@GeneratedValue(strategy = GenerationType.UUID)
|
||||
private UUID id;
|
||||
|
||||
@NotBlank(message = "Alias name is required")
|
||||
@Size(max = 100, message = "Alias name must not exceed 100 characters")
|
||||
@Column(name = "alias_name", nullable = false, unique = true)
|
||||
private String aliasName;
|
||||
|
||||
@ManyToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "canonical_tag_id", nullable = false)
|
||||
@JsonManagedReference("tag-aliases")
|
||||
private Tag canonicalTag;
|
||||
|
||||
@Column(name = "created_from_merge", nullable = false)
|
||||
private Boolean createdFromMerge = false;
|
||||
|
||||
@CreationTimestamp
|
||||
@Column(name = "created_at", nullable = false, updatable = false)
|
||||
private LocalDateTime createdAt;
|
||||
|
||||
public TagAlias() {}
|
||||
|
||||
public TagAlias(String aliasName, Tag canonicalTag) {
|
||||
this.aliasName = aliasName;
|
||||
this.canonicalTag = canonicalTag;
|
||||
}
|
||||
|
||||
public TagAlias(String aliasName, Tag canonicalTag, Boolean createdFromMerge) {
|
||||
this.aliasName = aliasName;
|
||||
this.canonicalTag = canonicalTag;
|
||||
this.createdFromMerge = createdFromMerge;
|
||||
}
|
||||
|
||||
// Getters and Setters
|
||||
public UUID getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(UUID id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
public String getAliasName() {
|
||||
return aliasName;
|
||||
}
|
||||
|
||||
public void setAliasName(String aliasName) {
|
||||
this.aliasName = aliasName;
|
||||
}
|
||||
|
||||
public Tag getCanonicalTag() {
|
||||
return canonicalTag;
|
||||
}
|
||||
|
||||
public void setCanonicalTag(Tag canonicalTag) {
|
||||
this.canonicalTag = canonicalTag;
|
||||
}
|
||||
|
||||
public Boolean getCreatedFromMerge() {
|
||||
return createdFromMerge;
|
||||
}
|
||||
|
||||
public void setCreatedFromMerge(Boolean createdFromMerge) {
|
||||
this.createdFromMerge = createdFromMerge;
|
||||
}
|
||||
|
||||
public LocalDateTime getCreatedAt() {
|
||||
return createdAt;
|
||||
}
|
||||
|
||||
public void setCreatedAt(LocalDateTime createdAt) {
|
||||
this.createdAt = createdAt;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
if (!(o instanceof TagAlias)) return false;
|
||||
TagAlias tagAlias = (TagAlias) o;
|
||||
return id != null && id.equals(tagAlias.id);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getClass().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "TagAlias{" +
|
||||
"id=" + id +
|
||||
", aliasName='" + aliasName + '\'' +
|
||||
", canonicalTag=" + (canonicalTag != null ? canonicalTag.getName() : null) +
|
||||
", createdFromMerge=" + createdFromMerge +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -52,4 +52,5 @@ public interface AuthorRepository extends JpaRepository<Author, UUID> {
|
||||
|
||||
@Query(value = "SELECT author_rating FROM authors WHERE id = :id", nativeQuery = true)
|
||||
Integer findAuthorRatingById(@Param("id") UUID id);
|
||||
|
||||
}
|
||||
@@ -45,4 +45,11 @@ public interface CollectionRepository extends JpaRepository<Collection, UUID> {
|
||||
*/
|
||||
@Query("SELECT c FROM Collection c WHERE c.isArchived = false ORDER BY c.updatedAt DESC")
|
||||
List<Collection> findAllActiveCollections();
|
||||
|
||||
/**
|
||||
* Find all collections with tags for reindexing operations
|
||||
*/
|
||||
@Query("SELECT c FROM Collection c LEFT JOIN FETCH c.tags ORDER BY c.updatedAt DESC")
|
||||
List<Collection> findAllWithTags();
|
||||
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
package com.storycove.repository;
|
||||
|
||||
import com.storycove.entity.ReadingPosition;
|
||||
import com.storycove.entity.Story;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
@Repository
|
||||
public interface ReadingPositionRepository extends JpaRepository<ReadingPosition, UUID> {
|
||||
|
||||
Optional<ReadingPosition> findByStoryId(UUID storyId);
|
||||
|
||||
Optional<ReadingPosition> findByStory(Story story);
|
||||
|
||||
List<ReadingPosition> findByStoryIdIn(List<UUID> storyIds);
|
||||
|
||||
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.story.id = :storyId ORDER BY rp.updatedAt DESC")
|
||||
List<ReadingPosition> findByStoryIdOrderByUpdatedAtDesc(@Param("storyId") UUID storyId);
|
||||
|
||||
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.percentageComplete >= :minPercentage")
|
||||
List<ReadingPosition> findByMinimumPercentageComplete(@Param("minPercentage") Double minPercentage);
|
||||
|
||||
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.percentageComplete >= 95.0")
|
||||
List<ReadingPosition> findCompletedReadings();
|
||||
|
||||
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.percentageComplete > 0 AND rp.percentageComplete < 95.0")
|
||||
List<ReadingPosition> findInProgressReadings();
|
||||
|
||||
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.updatedAt >= :since ORDER BY rp.updatedAt DESC")
|
||||
List<ReadingPosition> findRecentlyUpdated(@Param("since") LocalDateTime since);
|
||||
|
||||
@Query("SELECT rp FROM ReadingPosition rp ORDER BY rp.updatedAt DESC")
|
||||
List<ReadingPosition> findAllOrderByUpdatedAtDesc();
|
||||
|
||||
@Query("SELECT COUNT(rp) FROM ReadingPosition rp WHERE rp.percentageComplete >= 95.0")
|
||||
long countCompletedReadings();
|
||||
|
||||
@Query("SELECT COUNT(rp) FROM ReadingPosition rp WHERE rp.percentageComplete > 0 AND rp.percentageComplete < 95.0")
|
||||
long countInProgressReadings();
|
||||
|
||||
@Query("SELECT AVG(rp.percentageComplete) FROM ReadingPosition rp WHERE rp.percentageComplete > 0")
|
||||
Double findAverageReadingProgress();
|
||||
|
||||
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.epubCfi IS NOT NULL")
|
||||
List<ReadingPosition> findPositionsWithEpubCfi();
|
||||
|
||||
boolean existsByStoryId(UUID storyId);
|
||||
|
||||
void deleteByStoryId(UUID storyId);
|
||||
}
|
||||
@@ -114,4 +114,130 @@ public interface StoryRepository extends JpaRepository<Story, UUID> {
|
||||
"LEFT JOIN FETCH s.series " +
|
||||
"LEFT JOIN FETCH s.tags")
|
||||
List<Story> findAllWithAssociations();
|
||||
|
||||
@Query("SELECT s FROM Story s WHERE UPPER(s.title) = UPPER(:title) AND UPPER(s.author.name) = UPPER(:authorName)")
|
||||
List<Story> findByTitleAndAuthorNameIgnoreCase(@Param("title") String title, @Param("authorName") String authorName);
|
||||
|
||||
/**
|
||||
* Count all stories for random selection (no filters)
|
||||
*/
|
||||
@Query(value = "SELECT COUNT(*) FROM stories", nativeQuery = true)
|
||||
long countAllStories();
|
||||
|
||||
/**
|
||||
* Count stories matching tag name filter for random selection
|
||||
*/
|
||||
@Query(value = "SELECT COUNT(DISTINCT s.id) FROM stories s " +
|
||||
"JOIN story_tags st ON s.id = st.story_id " +
|
||||
"JOIN tags t ON st.tag_id = t.id " +
|
||||
"WHERE UPPER(t.name) = UPPER(?1)",
|
||||
nativeQuery = true)
|
||||
long countStoriesByTagName(String tagName);
|
||||
|
||||
/**
|
||||
* Find a random story using offset (no filters)
|
||||
*/
|
||||
@Query(value = "SELECT s.* FROM stories s ORDER BY s.id OFFSET ?1 LIMIT 1", nativeQuery = true)
|
||||
Optional<Story> findRandomStory(long offset);
|
||||
|
||||
/**
|
||||
* Find a random story matching tag name filter using offset
|
||||
*/
|
||||
@Query(value = "SELECT s.* FROM stories s " +
|
||||
"JOIN story_tags st ON s.id = st.story_id " +
|
||||
"JOIN tags t ON st.tag_id = t.id " +
|
||||
"WHERE UPPER(t.name) = UPPER(?1) " +
|
||||
"ORDER BY s.id OFFSET ?2 LIMIT 1",
|
||||
nativeQuery = true)
|
||||
Optional<Story> findRandomStoryByTagName(String tagName, long offset);
|
||||
|
||||
/**
|
||||
* Count stories matching multiple tags (ALL tags must be present)
|
||||
*/
|
||||
@Query(value = "SELECT COUNT(*) FROM (" +
|
||||
" SELECT DISTINCT s.id FROM stories s " +
|
||||
" JOIN story_tags st ON s.id = st.story_id " +
|
||||
" JOIN tags t ON st.tag_id = t.id " +
|
||||
" WHERE UPPER(t.name) IN (?1) " +
|
||||
" GROUP BY s.id " +
|
||||
" HAVING COUNT(DISTINCT t.name) = ?2" +
|
||||
") as matched_stories",
|
||||
nativeQuery = true)
|
||||
long countStoriesByMultipleTags(List<String> upperCaseTagNames, int tagCount);
|
||||
|
||||
/**
|
||||
* Find random story matching multiple tags (ALL tags must be present)
|
||||
*/
|
||||
@Query(value = "SELECT s.* FROM stories s " +
|
||||
"JOIN story_tags st ON s.id = st.story_id " +
|
||||
"JOIN tags t ON st.tag_id = t.id " +
|
||||
"WHERE UPPER(t.name) IN (?1) " +
|
||||
"GROUP BY s.id, s.title, s.summary, s.description, s.content_html, s.content_plain, s.source_url, s.cover_path, s.word_count, s.rating, s.volume, s.is_read, s.reading_position, s.last_read_at, s.author_id, s.series_id, s.created_at, s.updated_at " +
|
||||
"HAVING COUNT(DISTINCT t.name) = ?2 " +
|
||||
"ORDER BY s.id OFFSET ?3 LIMIT 1",
|
||||
nativeQuery = true)
|
||||
Optional<Story> findRandomStoryByMultipleTags(List<String> upperCaseTagNames, int tagCount, long offset);
|
||||
|
||||
/**
|
||||
* Count stories matching text search (title, author, tags)
|
||||
*/
|
||||
@Query(value = "SELECT COUNT(DISTINCT s.id) FROM stories s " +
|
||||
"LEFT JOIN authors a ON s.author_id = a.id " +
|
||||
"LEFT JOIN story_tags st ON s.id = st.story_id " +
|
||||
"LEFT JOIN tags t ON st.tag_id = t.id " +
|
||||
"WHERE (UPPER(s.title) LIKE UPPER(?1) OR UPPER(a.name) LIKE UPPER(?1) OR UPPER(t.name) LIKE UPPER(?1))",
|
||||
nativeQuery = true)
|
||||
long countStoriesByTextSearch(String searchPattern);
|
||||
|
||||
/**
|
||||
* Find random story matching text search (title, author, tags)
|
||||
*/
|
||||
@Query(value = "SELECT DISTINCT s.* FROM stories s " +
|
||||
"LEFT JOIN authors a ON s.author_id = a.id " +
|
||||
"LEFT JOIN story_tags st ON s.id = st.story_id " +
|
||||
"LEFT JOIN tags t ON st.tag_id = t.id " +
|
||||
"WHERE (UPPER(s.title) LIKE UPPER(?1) OR UPPER(a.name) LIKE UPPER(?1) OR UPPER(t.name) LIKE UPPER(?1)) " +
|
||||
"ORDER BY s.id OFFSET ?2 LIMIT 1",
|
||||
nativeQuery = true)
|
||||
Optional<Story> findRandomStoryByTextSearch(String searchPattern, long offset);
|
||||
|
||||
/**
|
||||
* Count stories matching both text search AND tags
|
||||
*/
|
||||
@Query(value = "SELECT COUNT(DISTINCT s.id) FROM stories s " +
|
||||
"LEFT JOIN authors a ON s.author_id = a.id " +
|
||||
"LEFT JOIN story_tags st ON s.id = st.story_id " +
|
||||
"LEFT JOIN tags t ON st.tag_id = t.id " +
|
||||
"WHERE (UPPER(s.title) LIKE UPPER(?1) OR UPPER(a.name) LIKE UPPER(?1) OR UPPER(t.name) LIKE UPPER(?1)) " +
|
||||
"AND s.id IN (" +
|
||||
" SELECT s2.id FROM stories s2 " +
|
||||
" JOIN story_tags st2 ON s2.id = st2.story_id " +
|
||||
" JOIN tags t2 ON st2.tag_id = t2.id " +
|
||||
" WHERE UPPER(t2.name) IN (?2) " +
|
||||
" GROUP BY s2.id " +
|
||||
" HAVING COUNT(DISTINCT t2.name) = ?3" +
|
||||
")",
|
||||
nativeQuery = true)
|
||||
long countStoriesByTextSearchAndTags(String searchPattern, List<String> upperCaseTagNames, int tagCount);
|
||||
|
||||
/**
|
||||
* Find random story matching both text search AND tags
|
||||
*/
|
||||
@Query(value = "SELECT DISTINCT s.* FROM stories s " +
|
||||
"LEFT JOIN authors a ON s.author_id = a.id " +
|
||||
"LEFT JOIN story_tags st ON s.id = st.story_id " +
|
||||
"LEFT JOIN tags t ON st.tag_id = t.id " +
|
||||
"WHERE (UPPER(s.title) LIKE UPPER(?1) OR UPPER(a.name) LIKE UPPER(?1) OR UPPER(t.name) LIKE UPPER(?1)) " +
|
||||
"AND s.id IN (" +
|
||||
" SELECT s2.id FROM stories s2 " +
|
||||
" JOIN story_tags st2 ON s2.id = st2.story_id " +
|
||||
" JOIN tags t2 ON st2.tag_id = t2.id " +
|
||||
" WHERE UPPER(t2.name) IN (?2) " +
|
||||
" GROUP BY s2.id " +
|
||||
" HAVING COUNT(DISTINCT t2.name) = ?3" +
|
||||
") " +
|
||||
"ORDER BY s.id OFFSET ?4 LIMIT 1",
|
||||
nativeQuery = true)
|
||||
Optional<Story> findRandomStoryByTextSearchAndTags(String searchPattern, List<String> upperCaseTagNames, int tagCount, long offset);
|
||||
|
||||
}
|
||||
@@ -0,0 +1,60 @@
|
||||
package com.storycove.repository;
|
||||
|
||||
import com.storycove.entity.TagAlias;
|
||||
import com.storycove.entity.Tag;
|
||||
import org.springframework.data.jpa.repository.JpaRepository;
|
||||
import org.springframework.data.jpa.repository.Query;
|
||||
import org.springframework.data.repository.query.Param;
|
||||
import org.springframework.stereotype.Repository;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
@Repository
|
||||
public interface TagAliasRepository extends JpaRepository<TagAlias, UUID> {
|
||||
|
||||
/**
|
||||
* Find alias by exact alias name (case-insensitive)
|
||||
*/
|
||||
@Query("SELECT ta FROM TagAlias ta WHERE LOWER(ta.aliasName) = LOWER(:aliasName)")
|
||||
Optional<TagAlias> findByAliasNameIgnoreCase(@Param("aliasName") String aliasName);
|
||||
|
||||
/**
|
||||
* Find all aliases for a specific canonical tag
|
||||
*/
|
||||
List<TagAlias> findByCanonicalTag(Tag canonicalTag);
|
||||
|
||||
/**
|
||||
* Find all aliases for a specific canonical tag ID
|
||||
*/
|
||||
@Query("SELECT ta FROM TagAlias ta WHERE ta.canonicalTag.id = :tagId")
|
||||
List<TagAlias> findByCanonicalTagId(@Param("tagId") UUID tagId);
|
||||
|
||||
/**
|
||||
* Find aliases created from merge operations
|
||||
*/
|
||||
List<TagAlias> findByCreatedFromMergeTrue();
|
||||
|
||||
/**
|
||||
* Check if an alias name already exists
|
||||
*/
|
||||
boolean existsByAliasNameIgnoreCase(String aliasName);
|
||||
|
||||
/**
|
||||
* Delete all aliases for a specific tag
|
||||
*/
|
||||
void deleteByCanonicalTag(Tag canonicalTag);
|
||||
|
||||
/**
|
||||
* Count aliases for a specific tag
|
||||
*/
|
||||
@Query("SELECT COUNT(ta) FROM TagAlias ta WHERE ta.canonicalTag.id = :tagId")
|
||||
long countByCanonicalTagId(@Param("tagId") UUID tagId);
|
||||
|
||||
/**
|
||||
* Find aliases that start with the given prefix (case-insensitive)
|
||||
*/
|
||||
@Query("SELECT ta FROM TagAlias ta WHERE LOWER(ta.aliasName) LIKE LOWER(CONCAT(:prefix, '%'))")
|
||||
List<TagAlias> findByAliasNameStartingWithIgnoreCase(@Param("prefix") String prefix);
|
||||
}
|
||||
@@ -17,8 +17,12 @@ public interface TagRepository extends JpaRepository<Tag, UUID> {
|
||||
|
||||
Optional<Tag> findByName(String name);
|
||||
|
||||
Optional<Tag> findByNameIgnoreCase(String name);
|
||||
|
||||
boolean existsByName(String name);
|
||||
|
||||
boolean existsByNameIgnoreCase(String name);
|
||||
|
||||
List<Tag> findByNameContainingIgnoreCase(String name);
|
||||
|
||||
Page<Tag> findByNameContainingIgnoreCase(String name, Pageable pageable);
|
||||
@@ -54,4 +58,7 @@ public interface TagRepository extends JpaRepository<Tag, UUID> {
|
||||
|
||||
@Query("SELECT COUNT(t) FROM Tag t WHERE SIZE(t.stories) > 0")
|
||||
long countUsedTags();
|
||||
|
||||
@Query("SELECT t FROM Tag t WHERE SIZE(t.collections) > 0 ORDER BY SIZE(t.collections) DESC, t.name ASC")
|
||||
List<Tag> findTagsUsedByCollections();
|
||||
}
|
||||
@@ -1,84 +0,0 @@
|
||||
package com.storycove.scheduled;
|
||||
|
||||
import com.storycove.entity.Story;
|
||||
import com.storycove.service.StoryService;
|
||||
import com.storycove.service.TypesenseService;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.List;
|
||||
|
||||
/**
|
||||
* Scheduled task to periodically reindex all stories in Typesense
|
||||
* to ensure search index stays synchronized with database changes.
|
||||
*/
|
||||
@Component
|
||||
@ConditionalOnProperty(name = "storycove.typesense.enabled", havingValue = "true", matchIfMissing = true)
|
||||
public class TypesenseIndexScheduler {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(TypesenseIndexScheduler.class);
|
||||
|
||||
private final StoryService storyService;
|
||||
private final TypesenseService typesenseService;
|
||||
|
||||
@Autowired
|
||||
public TypesenseIndexScheduler(StoryService storyService,
|
||||
@Autowired(required = false) TypesenseService typesenseService) {
|
||||
this.storyService = storyService;
|
||||
this.typesenseService = typesenseService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Scheduled task that runs periodically to reindex all stories in Typesense.
|
||||
* This ensures the search index stays synchronized with any database changes
|
||||
* that might have occurred outside of the normal story update flow.
|
||||
*
|
||||
* Interval is configurable via storycove.typesense.reindex-interval property (default: 1 hour).
|
||||
*/
|
||||
@Scheduled(fixedRateString = "${storycove.typesense.reindex-interval:3600000}")
|
||||
public void reindexAllStories() {
|
||||
if (typesenseService == null) {
|
||||
logger.debug("TypesenseService is not available, skipping scheduled reindexing");
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info("Starting scheduled Typesense reindexing at {}", LocalDateTime.now());
|
||||
|
||||
try {
|
||||
long startTime = System.currentTimeMillis();
|
||||
|
||||
// Get all stories from database with eagerly loaded associations
|
||||
List<Story> allStories = storyService.findAllWithAssociations();
|
||||
|
||||
if (allStories.isEmpty()) {
|
||||
logger.info("No stories found in database, skipping reindexing");
|
||||
return;
|
||||
}
|
||||
|
||||
// Perform full reindex
|
||||
typesenseService.reindexAllStories(allStories);
|
||||
|
||||
long endTime = System.currentTimeMillis();
|
||||
long duration = endTime - startTime;
|
||||
|
||||
logger.info("Completed scheduled Typesense reindexing of {} stories in {}ms",
|
||||
allStories.size(), duration);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to complete scheduled Typesense reindexing", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Manual trigger for reindexing - can be called from other services or endpoints if needed
|
||||
*/
|
||||
public void triggerManualReindex() {
|
||||
logger.info("Manual Typesense reindexing triggered");
|
||||
reindexAllStories();
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ package com.storycove.security;
|
||||
import com.storycove.util.JwtUtil;
|
||||
import jakarta.servlet.FilterChain;
|
||||
import jakarta.servlet.ServletException;
|
||||
import jakarta.servlet.http.Cookie;
|
||||
import jakarta.servlet.http.HttpServletRequest;
|
||||
import jakarta.servlet.http.HttpServletResponse;
|
||||
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
|
||||
@@ -28,13 +29,27 @@ public class JwtAuthenticationFilter extends OncePerRequestFilter {
|
||||
HttpServletResponse response,
|
||||
FilterChain filterChain) throws ServletException, IOException {
|
||||
|
||||
String authHeader = request.getHeader("Authorization");
|
||||
String token = null;
|
||||
|
||||
// First try to get token from Authorization header
|
||||
String authHeader = request.getHeader("Authorization");
|
||||
if (authHeader != null && authHeader.startsWith("Bearer ")) {
|
||||
token = authHeader.substring(7);
|
||||
}
|
||||
|
||||
// If no token in header, try to get from cookies
|
||||
if (token == null) {
|
||||
Cookie[] cookies = request.getCookies();
|
||||
if (cookies != null) {
|
||||
for (Cookie cookie : cookies) {
|
||||
if ("token".equals(cookie.getName())) {
|
||||
token = cookie.getValue();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (token != null && jwtUtil.validateToken(token) && !jwtUtil.isTokenExpired(token)) {
|
||||
String subject = jwtUtil.getSubjectFromToken(token);
|
||||
|
||||
|
||||
@@ -11,21 +11,21 @@ import org.springframework.stereotype.Component;
|
||||
import java.util.List;
|
||||
|
||||
@Component
|
||||
@ConditionalOnProperty(name = "storycove.typesense.enabled", havingValue = "true", matchIfMissing = true)
|
||||
@ConditionalOnProperty(name = "storycove.search.enabled", havingValue = "true", matchIfMissing = true)
|
||||
public class AuthorIndexScheduler {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(AuthorIndexScheduler.class);
|
||||
|
||||
private final AuthorService authorService;
|
||||
private final TypesenseService typesenseService;
|
||||
private final SearchServiceAdapter searchServiceAdapter;
|
||||
|
||||
@Autowired
|
||||
public AuthorIndexScheduler(AuthorService authorService, TypesenseService typesenseService) {
|
||||
public AuthorIndexScheduler(AuthorService authorService, SearchServiceAdapter searchServiceAdapter) {
|
||||
this.authorService = authorService;
|
||||
this.typesenseService = typesenseService;
|
||||
this.searchServiceAdapter = searchServiceAdapter;
|
||||
}
|
||||
|
||||
@Scheduled(fixedRateString = "${storycove.typesense.author-reindex-interval:7200000}") // 2 hours default
|
||||
@Scheduled(fixedRateString = "${storycove.search.author-reindex-interval:7200000}") // 2 hours default
|
||||
public void reindexAllAuthors() {
|
||||
try {
|
||||
logger.info("Starting scheduled author reindexing...");
|
||||
@@ -34,7 +34,7 @@ public class AuthorIndexScheduler {
|
||||
logger.info("Found {} authors to reindex", allAuthors.size());
|
||||
|
||||
if (!allAuthors.isEmpty()) {
|
||||
typesenseService.reindexAllAuthors(allAuthors);
|
||||
searchServiceAdapter.bulkIndexAuthors(allAuthors);
|
||||
logger.info("Successfully completed scheduled author reindexing");
|
||||
} else {
|
||||
logger.info("No authors found to reindex");
|
||||
|
||||
@@ -28,12 +28,12 @@ public class AuthorService {
|
||||
private static final Logger logger = LoggerFactory.getLogger(AuthorService.class);
|
||||
|
||||
private final AuthorRepository authorRepository;
|
||||
private final TypesenseService typesenseService;
|
||||
private final SearchServiceAdapter searchServiceAdapter;
|
||||
|
||||
@Autowired
|
||||
public AuthorService(AuthorRepository authorRepository, TypesenseService typesenseService) {
|
||||
public AuthorService(AuthorRepository authorRepository, SearchServiceAdapter searchServiceAdapter) {
|
||||
this.authorRepository = authorRepository;
|
||||
this.typesenseService = typesenseService;
|
||||
this.searchServiceAdapter = searchServiceAdapter;
|
||||
}
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
@@ -132,12 +132,8 @@ public class AuthorService {
|
||||
validateAuthorForCreate(author);
|
||||
Author savedAuthor = authorRepository.save(author);
|
||||
|
||||
// Index in Typesense
|
||||
try {
|
||||
typesenseService.indexAuthor(savedAuthor);
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to index author in Typesense: " + savedAuthor.getName(), e);
|
||||
}
|
||||
// Index in OpenSearch
|
||||
searchServiceAdapter.indexAuthor(savedAuthor);
|
||||
|
||||
return savedAuthor;
|
||||
}
|
||||
@@ -154,12 +150,8 @@ public class AuthorService {
|
||||
updateAuthorFields(existingAuthor, authorUpdates);
|
||||
Author savedAuthor = authorRepository.save(existingAuthor);
|
||||
|
||||
// Update in Typesense
|
||||
try {
|
||||
typesenseService.updateAuthor(savedAuthor);
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to update author in Typesense: " + savedAuthor.getName(), e);
|
||||
}
|
||||
// Update in OpenSearch
|
||||
searchServiceAdapter.updateAuthor(savedAuthor);
|
||||
|
||||
return savedAuthor;
|
||||
}
|
||||
@@ -174,12 +166,8 @@ public class AuthorService {
|
||||
|
||||
authorRepository.delete(author);
|
||||
|
||||
// Remove from Typesense
|
||||
try {
|
||||
typesenseService.deleteAuthor(id.toString());
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to delete author from Typesense: " + author.getName(), e);
|
||||
}
|
||||
// Remove from OpenSearch
|
||||
searchServiceAdapter.deleteAuthor(id);
|
||||
}
|
||||
|
||||
public Author addUrl(UUID id, String url) {
|
||||
@@ -187,12 +175,8 @@ public class AuthorService {
|
||||
author.addUrl(url);
|
||||
Author savedAuthor = authorRepository.save(author);
|
||||
|
||||
// Update in Typesense
|
||||
try {
|
||||
typesenseService.updateAuthor(savedAuthor);
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to update author in Typesense after adding URL: " + savedAuthor.getName(), e);
|
||||
}
|
||||
// Update in OpenSearch
|
||||
searchServiceAdapter.updateAuthor(savedAuthor);
|
||||
|
||||
return savedAuthor;
|
||||
}
|
||||
@@ -202,12 +186,8 @@ public class AuthorService {
|
||||
author.removeUrl(url);
|
||||
Author savedAuthor = authorRepository.save(author);
|
||||
|
||||
// Update in Typesense
|
||||
try {
|
||||
typesenseService.updateAuthor(savedAuthor);
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to update author in Typesense after removing URL: " + savedAuthor.getName(), e);
|
||||
}
|
||||
// Update in OpenSearch
|
||||
searchServiceAdapter.updateAuthor(savedAuthor);
|
||||
|
||||
return savedAuthor;
|
||||
}
|
||||
@@ -232,7 +212,7 @@ public class AuthorService {
|
||||
rating, author.getName(), author.getAuthorRating());
|
||||
|
||||
author.setAuthorRating(rating);
|
||||
Author savedAuthor = authorRepository.save(author);
|
||||
authorRepository.save(author);
|
||||
|
||||
// Flush and refresh to ensure the entity is up-to-date
|
||||
authorRepository.flush();
|
||||
@@ -241,12 +221,8 @@ public class AuthorService {
|
||||
logger.debug("Saved author rating: {} for author: {}",
|
||||
refreshedAuthor.getAuthorRating(), refreshedAuthor.getName());
|
||||
|
||||
// Update in Typesense
|
||||
try {
|
||||
typesenseService.updateAuthor(refreshedAuthor);
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to update author in Typesense after rating: " + refreshedAuthor.getName(), e);
|
||||
}
|
||||
// Update in OpenSearch
|
||||
searchServiceAdapter.updateAuthor(refreshedAuthor);
|
||||
|
||||
return refreshedAuthor;
|
||||
}
|
||||
@@ -289,12 +265,8 @@ public class AuthorService {
|
||||
author.setAvatarImagePath(avatarPath);
|
||||
Author savedAuthor = authorRepository.save(author);
|
||||
|
||||
// Update in Typesense
|
||||
try {
|
||||
typesenseService.updateAuthor(savedAuthor);
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to update author in Typesense after setting avatar: " + savedAuthor.getName(), e);
|
||||
}
|
||||
// Update in OpenSearch
|
||||
searchServiceAdapter.updateAuthor(savedAuthor);
|
||||
|
||||
return savedAuthor;
|
||||
}
|
||||
@@ -304,12 +276,8 @@ public class AuthorService {
|
||||
author.setAvatarImagePath(null);
|
||||
Author savedAuthor = authorRepository.save(author);
|
||||
|
||||
// Update in Typesense
|
||||
try {
|
||||
typesenseService.updateAuthor(savedAuthor);
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to update author in Typesense after removing avatar: " + savedAuthor.getName(), e);
|
||||
}
|
||||
// Update in OpenSearch
|
||||
searchServiceAdapter.updateAuthor(savedAuthor);
|
||||
|
||||
return savedAuthor;
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ public class CollectionSearchResult extends Collection {
|
||||
|
||||
private Integer storedStoryCount;
|
||||
private Integer storedTotalWordCount;
|
||||
private int wordsPerMinute = 200; // Default, can be overridden
|
||||
|
||||
public CollectionSearchResult(Collection collection) {
|
||||
this.setId(collection.getId());
|
||||
@@ -20,6 +21,7 @@ public class CollectionSearchResult extends Collection {
|
||||
this.setCreatedAt(collection.getCreatedAt());
|
||||
this.setUpdatedAt(collection.getUpdatedAt());
|
||||
this.setCoverImagePath(collection.getCoverImagePath());
|
||||
this.setTagNames(collection.getTagNames()); // Copy tag names for search results
|
||||
// Note: don't copy collectionStories or tags to avoid lazy loading issues
|
||||
}
|
||||
|
||||
@@ -31,6 +33,10 @@ public class CollectionSearchResult extends Collection {
|
||||
this.storedTotalWordCount = totalWordCount;
|
||||
}
|
||||
|
||||
public void setWordsPerMinute(int wordsPerMinute) {
|
||||
this.wordsPerMinute = wordsPerMinute;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getStoryCount() {
|
||||
return storedStoryCount != null ? storedStoryCount : 0;
|
||||
@@ -43,8 +49,7 @@ public class CollectionSearchResult extends Collection {
|
||||
|
||||
@Override
|
||||
public int getEstimatedReadingTime() {
|
||||
// Assuming 200 words per minute reading speed
|
||||
return Math.max(1, getTotalWordCount() / 200);
|
||||
return Math.max(1, getTotalWordCount() / wordsPerMinute);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.dto.SearchResultDto;
|
||||
import com.storycove.dto.StoryReadingDto;
|
||||
import com.storycove.dto.TagDto;
|
||||
import com.storycove.entity.Collection;
|
||||
import com.storycove.entity.CollectionStory;
|
||||
import com.storycove.entity.Story;
|
||||
@@ -9,14 +11,10 @@ import com.storycove.repository.CollectionRepository;
|
||||
import com.storycove.repository.CollectionStoryRepository;
|
||||
import com.storycove.repository.StoryRepository;
|
||||
import com.storycove.repository.TagRepository;
|
||||
import com.storycove.service.exception.DuplicateResourceException;
|
||||
import com.storycove.service.exception.ResourceNotFoundException;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.PageRequest;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
@@ -33,19 +31,22 @@ public class CollectionService {
|
||||
private final CollectionStoryRepository collectionStoryRepository;
|
||||
private final StoryRepository storyRepository;
|
||||
private final TagRepository tagRepository;
|
||||
private final TypesenseService typesenseService;
|
||||
private final SearchServiceAdapter searchServiceAdapter;
|
||||
private final ReadingTimeService readingTimeService;
|
||||
|
||||
@Autowired
|
||||
public CollectionService(CollectionRepository collectionRepository,
|
||||
CollectionStoryRepository collectionStoryRepository,
|
||||
StoryRepository storyRepository,
|
||||
TagRepository tagRepository,
|
||||
@Autowired(required = false) TypesenseService typesenseService) {
|
||||
SearchServiceAdapter searchServiceAdapter,
|
||||
ReadingTimeService readingTimeService) {
|
||||
this.collectionRepository = collectionRepository;
|
||||
this.collectionStoryRepository = collectionStoryRepository;
|
||||
this.storyRepository = storyRepository;
|
||||
this.tagRepository = tagRepository;
|
||||
this.typesenseService = typesenseService;
|
||||
this.searchServiceAdapter = searchServiceAdapter;
|
||||
this.readingTimeService = readingTimeService;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -53,13 +54,10 @@ public class CollectionService {
|
||||
* This method MUST be used instead of JPA queries for listing collections
|
||||
*/
|
||||
public SearchResultDto<Collection> searchCollections(String query, List<String> tags, boolean includeArchived, int page, int limit) {
|
||||
if (typesenseService == null) {
|
||||
logger.warn("Typesense service not available, returning empty results");
|
||||
return new SearchResultDto<>(new ArrayList<>(), 0, page, limit, query != null ? query : "", 0);
|
||||
}
|
||||
|
||||
// Delegate to TypesenseService for all search operations
|
||||
return typesenseService.searchCollections(query, tags, includeArchived, page, limit);
|
||||
// Collections are currently handled at database level, not indexed in search engine
|
||||
// Return empty result for now as collections search is not implemented in OpenSearch
|
||||
logger.warn("Collections search not yet implemented in OpenSearch, returning empty results");
|
||||
return new SearchResultDto<>(new ArrayList<>(), 0, page, limit, query != null ? query : "", 0);
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -78,6 +76,13 @@ public class CollectionService {
|
||||
.orElseThrow(() -> new ResourceNotFoundException("Collection not found with id: " + id));
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all collections with tags for reindexing
|
||||
*/
|
||||
public List<Collection> findAllWithTags() {
|
||||
return collectionRepository.findAllWithTags();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new collection with optional initial stories
|
||||
*/
|
||||
@@ -99,10 +104,7 @@ public class CollectionService {
|
||||
savedCollection = findById(savedCollection.getId());
|
||||
}
|
||||
|
||||
// Index in Typesense
|
||||
if (typesenseService != null) {
|
||||
typesenseService.indexCollection(savedCollection);
|
||||
}
|
||||
// Collections are not indexed in search engine yet
|
||||
|
||||
logger.info("Created collection: {} with {} stories", name, initialStoryIds != null ? initialStoryIds.size() : 0);
|
||||
return savedCollection;
|
||||
@@ -132,10 +134,7 @@ public class CollectionService {
|
||||
|
||||
Collection savedCollection = collectionRepository.save(collection);
|
||||
|
||||
// Update in Typesense
|
||||
if (typesenseService != null) {
|
||||
typesenseService.indexCollection(savedCollection);
|
||||
}
|
||||
// Collections are not indexed in search engine yet
|
||||
|
||||
logger.info("Updated collection: {}", id);
|
||||
return savedCollection;
|
||||
@@ -147,10 +146,7 @@ public class CollectionService {
|
||||
public void deleteCollection(UUID id) {
|
||||
Collection collection = findByIdBasic(id);
|
||||
|
||||
// Remove from Typesense first
|
||||
if (typesenseService != null) {
|
||||
typesenseService.removeCollection(id);
|
||||
}
|
||||
// Collections are not indexed in search engine yet
|
||||
|
||||
collectionRepository.delete(collection);
|
||||
logger.info("Deleted collection: {}", id);
|
||||
@@ -165,10 +161,7 @@ public class CollectionService {
|
||||
|
||||
Collection savedCollection = collectionRepository.save(collection);
|
||||
|
||||
// Update in Typesense
|
||||
if (typesenseService != null) {
|
||||
typesenseService.indexCollection(savedCollection);
|
||||
}
|
||||
// Collections are not indexed in search engine yet
|
||||
|
||||
logger.info("{} collection: {}", archived ? "Archived" : "Unarchived", id);
|
||||
return savedCollection;
|
||||
@@ -213,10 +206,7 @@ public class CollectionService {
|
||||
}
|
||||
|
||||
// Update collection in Typesense
|
||||
if (typesenseService != null) {
|
||||
Collection updatedCollection = findById(collectionId);
|
||||
typesenseService.indexCollection(updatedCollection);
|
||||
}
|
||||
// Collections are not indexed in search engine yet
|
||||
|
||||
long totalStories = collectionStoryRepository.countByCollectionId(collectionId);
|
||||
|
||||
@@ -241,10 +231,7 @@ public class CollectionService {
|
||||
collectionStoryRepository.delete(collectionStory);
|
||||
|
||||
// Update collection in Typesense
|
||||
if (typesenseService != null) {
|
||||
Collection updatedCollection = findById(collectionId);
|
||||
typesenseService.indexCollection(updatedCollection);
|
||||
}
|
||||
// Collections are not indexed in search engine yet
|
||||
|
||||
logger.info("Removed story {} from collection {}", storyId, collectionId);
|
||||
}
|
||||
@@ -254,7 +241,7 @@ public class CollectionService {
|
||||
*/
|
||||
@Transactional
|
||||
public void reorderStories(UUID collectionId, List<Map<String, Object>> storyOrders) {
|
||||
Collection collection = findByIdBasic(collectionId);
|
||||
findByIdBasic(collectionId); // Validate collection exists
|
||||
|
||||
// Two-phase update to avoid unique constraint violations:
|
||||
// Phase 1: Set all positions to negative values (temporary)
|
||||
@@ -277,10 +264,7 @@ public class CollectionService {
|
||||
}
|
||||
|
||||
// Update collection in Typesense
|
||||
if (typesenseService != null) {
|
||||
Collection updatedCollection = findById(collectionId);
|
||||
typesenseService.indexCollection(updatedCollection);
|
||||
}
|
||||
// Collections are not indexed in search engine yet
|
||||
|
||||
logger.info("Reordered {} stories in collection {}", storyOrders.size(), collectionId);
|
||||
}
|
||||
@@ -326,7 +310,7 @@ public class CollectionService {
|
||||
);
|
||||
|
||||
return Map.of(
|
||||
"story", story,
|
||||
"story", convertToReadingDto(story),
|
||||
"collection", collectionContext
|
||||
);
|
||||
}
|
||||
@@ -344,7 +328,7 @@ public class CollectionService {
|
||||
int totalWordCount = collectionStories.stream()
|
||||
.mapToInt(cs -> cs.getStory().getWordCount() != null ? cs.getStory().getWordCount() : 0)
|
||||
.sum();
|
||||
int estimatedReadingTime = Math.max(1, totalWordCount / 200); // 200 words per minute
|
||||
int estimatedReadingTime = readingTimeService.calculateReadingTime(totalWordCount);
|
||||
|
||||
double averageStoryRating = collectionStories.stream()
|
||||
.filter(cs -> cs.getStory().getRating() != null)
|
||||
@@ -415,9 +399,54 @@ public class CollectionService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all collections for indexing (used by TypesenseService)
|
||||
* Get all collections for indexing (used by SearchServiceAdapter)
|
||||
*/
|
||||
public List<Collection> findAllForIndexing() {
|
||||
return collectionRepository.findAllActiveCollections();
|
||||
}
|
||||
|
||||
private StoryReadingDto convertToReadingDto(Story story) {
|
||||
StoryReadingDto dto = new StoryReadingDto();
|
||||
dto.setId(story.getId());
|
||||
dto.setTitle(story.getTitle());
|
||||
dto.setSummary(story.getSummary());
|
||||
dto.setDescription(story.getDescription());
|
||||
dto.setContentHtml(story.getContentHtml());
|
||||
dto.setSourceUrl(story.getSourceUrl());
|
||||
dto.setCoverPath(story.getCoverPath());
|
||||
dto.setWordCount(story.getWordCount());
|
||||
dto.setRating(story.getRating());
|
||||
dto.setVolume(story.getVolume());
|
||||
dto.setCreatedAt(story.getCreatedAt());
|
||||
dto.setUpdatedAt(story.getUpdatedAt());
|
||||
|
||||
// Reading progress fields
|
||||
dto.setIsRead(story.getIsRead());
|
||||
dto.setReadingPosition(story.getReadingPosition());
|
||||
dto.setLastReadAt(story.getLastReadAt());
|
||||
|
||||
if (story.getAuthor() != null) {
|
||||
dto.setAuthorId(story.getAuthor().getId());
|
||||
dto.setAuthorName(story.getAuthor().getName());
|
||||
}
|
||||
|
||||
if (story.getSeries() != null) {
|
||||
dto.setSeriesId(story.getSeries().getId());
|
||||
dto.setSeriesName(story.getSeries().getName());
|
||||
}
|
||||
|
||||
dto.setTags(story.getTags().stream()
|
||||
.map(this::convertTagToDto)
|
||||
.collect(Collectors.toList()));
|
||||
|
||||
return dto;
|
||||
}
|
||||
|
||||
private TagDto convertTagToDto(Tag tag) {
|
||||
TagDto dto = new TagDto();
|
||||
dto.setId(tag.getId());
|
||||
dto.setName(tag.getName());
|
||||
dto.setStoryCount(tag.getStories().size());
|
||||
return dto;
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,584 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.dto.EPUBExportRequest;
|
||||
import com.storycove.entity.Collection;
|
||||
import com.storycove.entity.ReadingPosition;
|
||||
import com.storycove.entity.Story;
|
||||
import com.storycove.repository.ReadingPositionRepository;
|
||||
import com.storycove.service.exception.ResourceNotFoundException;
|
||||
|
||||
import nl.siegmann.epublib.domain.*;
|
||||
import nl.siegmann.epublib.epub.EpubWriter;
|
||||
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.jsoup.nodes.Element;
|
||||
import org.jsoup.select.Elements;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.core.io.ByteArrayResource;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Service
|
||||
@Transactional
|
||||
public class EPUBExportService {
|
||||
|
||||
private final StoryService storyService;
|
||||
private final ReadingPositionRepository readingPositionRepository;
|
||||
private final CollectionService collectionService;
|
||||
|
||||
@Autowired
|
||||
public EPUBExportService(StoryService storyService,
|
||||
ReadingPositionRepository readingPositionRepository,
|
||||
CollectionService collectionService) {
|
||||
this.storyService = storyService;
|
||||
this.readingPositionRepository = readingPositionRepository;
|
||||
this.collectionService = collectionService;
|
||||
}
|
||||
|
||||
public Resource exportStoryAsEPUB(EPUBExportRequest request) throws IOException {
|
||||
Story story = storyService.findById(request.getStoryId());
|
||||
|
||||
Book book = createEPUBBook(story, request);
|
||||
|
||||
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
|
||||
EpubWriter epubWriter = new EpubWriter();
|
||||
epubWriter.write(book, outputStream);
|
||||
|
||||
return new ByteArrayResource(outputStream.toByteArray());
|
||||
}
|
||||
|
||||
public Resource exportCollectionAsEPUB(UUID collectionId, EPUBExportRequest request) throws IOException {
|
||||
Collection collection = collectionService.findById(collectionId);
|
||||
List<Story> stories = collection.getCollectionStories().stream()
|
||||
.sorted((cs1, cs2) -> Integer.compare(cs1.getPosition(), cs2.getPosition()))
|
||||
.map(cs -> cs.getStory())
|
||||
.collect(Collectors.toList());
|
||||
|
||||
if (stories.isEmpty()) {
|
||||
throw new ResourceNotFoundException("Collection contains no stories to export");
|
||||
}
|
||||
|
||||
Book book = createCollectionEPUBBook(collection, stories, request);
|
||||
|
||||
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
|
||||
EpubWriter epubWriter = new EpubWriter();
|
||||
epubWriter.write(book, outputStream);
|
||||
|
||||
return new ByteArrayResource(outputStream.toByteArray());
|
||||
}
|
||||
|
||||
private Book createEPUBBook(Story story, EPUBExportRequest request) throws IOException {
|
||||
Book book = new Book();
|
||||
|
||||
setupMetadata(book, story, request);
|
||||
|
||||
addCoverImage(book, story, request);
|
||||
|
||||
addContent(book, story, request);
|
||||
|
||||
addReadingPosition(book, story, request);
|
||||
|
||||
return book;
|
||||
}
|
||||
|
||||
private Book createCollectionEPUBBook(Collection collection, List<Story> stories, EPUBExportRequest request) throws IOException {
|
||||
Book book = new Book();
|
||||
|
||||
setupCollectionMetadata(book, collection, stories, request);
|
||||
|
||||
addCollectionCoverImage(book, collection, request);
|
||||
|
||||
addCollectionContent(book, stories, request);
|
||||
|
||||
return book;
|
||||
}
|
||||
|
||||
private void setupMetadata(Book book, Story story, EPUBExportRequest request) {
|
||||
Metadata metadata = book.getMetadata();
|
||||
|
||||
String title = request.getCustomTitle() != null ?
|
||||
request.getCustomTitle() : story.getTitle();
|
||||
metadata.addTitle(title);
|
||||
|
||||
String authorName = request.getCustomAuthor() != null ?
|
||||
request.getCustomAuthor() :
|
||||
(story.getAuthor() != null ? story.getAuthor().getName() : "Unknown Author");
|
||||
metadata.addAuthor(new Author(authorName));
|
||||
|
||||
metadata.setLanguage(request.getLanguage() != null ? request.getLanguage() : "en");
|
||||
|
||||
metadata.addIdentifier(new Identifier("storycove", story.getId().toString()));
|
||||
|
||||
if (story.getDescription() != null) {
|
||||
metadata.addDescription(story.getDescription());
|
||||
}
|
||||
|
||||
if (request.getIncludeMetadata()) {
|
||||
metadata.addDate(new Date(java.util.Date.from(
|
||||
story.getCreatedAt().atZone(java.time.ZoneId.systemDefault()).toInstant()
|
||||
), Date.Event.CREATION));
|
||||
|
||||
if (story.getSeries() != null) {
|
||||
// Add series and metadata info to description instead of using addMeta
|
||||
StringBuilder description = new StringBuilder();
|
||||
if (story.getDescription() != null) {
|
||||
description.append(story.getDescription()).append("\n\n");
|
||||
}
|
||||
|
||||
description.append("Series: ").append(story.getSeries().getName());
|
||||
if (story.getVolume() != null) {
|
||||
description.append(" (Volume ").append(story.getVolume()).append(")");
|
||||
}
|
||||
description.append("\n");
|
||||
|
||||
if (story.getWordCount() != null) {
|
||||
description.append("Word Count: ").append(story.getWordCount()).append("\n");
|
||||
}
|
||||
|
||||
if (story.getRating() != null) {
|
||||
description.append("Rating: ").append(story.getRating()).append("/5\n");
|
||||
}
|
||||
|
||||
if (!story.getTags().isEmpty()) {
|
||||
String tags = story.getTags().stream()
|
||||
.map(tag -> tag.getName())
|
||||
.reduce((a, b) -> a + ", " + b)
|
||||
.orElse("");
|
||||
description.append("Tags: ").append(tags).append("\n");
|
||||
}
|
||||
|
||||
description.append("\nGenerated by StoryCove on ")
|
||||
.append(LocalDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME));
|
||||
|
||||
metadata.addDescription(description.toString());
|
||||
}
|
||||
}
|
||||
|
||||
if (request.getCustomMetadata() != null && !request.getCustomMetadata().isEmpty()) {
|
||||
// Add custom metadata to description since addMeta doesn't exist
|
||||
StringBuilder customDesc = new StringBuilder();
|
||||
for (String customMeta : request.getCustomMetadata()) {
|
||||
String[] parts = customMeta.split(":", 2);
|
||||
if (parts.length == 2) {
|
||||
customDesc.append(parts[0].trim()).append(": ").append(parts[1].trim()).append("\n");
|
||||
}
|
||||
}
|
||||
if (customDesc.length() > 0) {
|
||||
String existingDesc = metadata.getDescriptions().isEmpty() ? "" : metadata.getDescriptions().get(0);
|
||||
metadata.addDescription(existingDesc + "\n" + customDesc.toString());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private void addCoverImage(Book book, Story story, EPUBExportRequest request) {
|
||||
if (!request.getIncludeCoverImage() || story.getCoverPath() == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
Path coverPath = Paths.get(story.getCoverPath());
|
||||
if (Files.exists(coverPath)) {
|
||||
byte[] coverImageData = Files.readAllBytes(coverPath);
|
||||
String mimeType = Files.probeContentType(coverPath);
|
||||
if (mimeType == null) {
|
||||
mimeType = "image/jpeg";
|
||||
}
|
||||
|
||||
nl.siegmann.epublib.domain.Resource coverResource =
|
||||
new nl.siegmann.epublib.domain.Resource(coverImageData, "cover.jpg");
|
||||
|
||||
book.setCoverImage(coverResource);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// Skip cover image on error
|
||||
}
|
||||
}
|
||||
|
||||
private void addContent(Book book, Story story, EPUBExportRequest request) {
|
||||
String content = story.getContentHtml();
|
||||
if (content == null) {
|
||||
content = story.getContentPlain() != null ?
|
||||
"<p>" + story.getContentPlain().replace("\n", "</p><p>") + "</p>" :
|
||||
"<p>No content available</p>";
|
||||
}
|
||||
|
||||
if (request.getSplitByChapters()) {
|
||||
addChapterizedContent(book, content, request);
|
||||
} else {
|
||||
addSingleChapterContent(book, content, story);
|
||||
}
|
||||
}
|
||||
|
||||
private void addSingleChapterContent(Book book, String content, Story story) {
|
||||
String html = createChapterHTML(story.getTitle(), content);
|
||||
|
||||
nl.siegmann.epublib.domain.Resource chapterResource =
|
||||
new nl.siegmann.epublib.domain.Resource(html.getBytes(), "chapter.html");
|
||||
|
||||
book.addSection(story.getTitle(), chapterResource);
|
||||
}
|
||||
|
||||
private void addChapterizedContent(Book book, String content, EPUBExportRequest request) {
|
||||
Document doc = Jsoup.parse(content);
|
||||
Elements chapters = doc.select("div.chapter, h1, h2, h3");
|
||||
|
||||
if (chapters.isEmpty()) {
|
||||
List<String> paragraphs = splitByWords(content,
|
||||
request.getMaxWordsPerChapter() != null ? request.getMaxWordsPerChapter() : 2000);
|
||||
|
||||
for (int i = 0; i < paragraphs.size(); i++) {
|
||||
String chapterTitle = "Chapter " + (i + 1);
|
||||
String html = createChapterHTML(chapterTitle, paragraphs.get(i));
|
||||
|
||||
nl.siegmann.epublib.domain.Resource chapterResource =
|
||||
new nl.siegmann.epublib.domain.Resource(html.getBytes(), "chapter" + (i + 1) + ".html");
|
||||
|
||||
book.addSection(chapterTitle, chapterResource);
|
||||
}
|
||||
} else {
|
||||
for (int i = 0; i < chapters.size(); i++) {
|
||||
Element chapter = chapters.get(i);
|
||||
String chapterTitle = chapter.text();
|
||||
if (chapterTitle.trim().isEmpty()) {
|
||||
chapterTitle = "Chapter " + (i + 1);
|
||||
}
|
||||
|
||||
String chapterContent = chapter.html();
|
||||
String html = createChapterHTML(chapterTitle, chapterContent);
|
||||
|
||||
nl.siegmann.epublib.domain.Resource chapterResource =
|
||||
new nl.siegmann.epublib.domain.Resource(html.getBytes(), "chapter" + (i + 1) + ".html");
|
||||
|
||||
book.addSection(chapterTitle, chapterResource);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private List<String> splitByWords(String content, int maxWordsPerChapter) {
|
||||
String[] words = content.split("\\s+");
|
||||
List<String> chapters = new ArrayList<>();
|
||||
StringBuilder currentChapter = new StringBuilder();
|
||||
int wordCount = 0;
|
||||
|
||||
for (String word : words) {
|
||||
currentChapter.append(word).append(" ");
|
||||
wordCount++;
|
||||
|
||||
if (wordCount >= maxWordsPerChapter) {
|
||||
chapters.add(currentChapter.toString().trim());
|
||||
currentChapter = new StringBuilder();
|
||||
wordCount = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (currentChapter.length() > 0) {
|
||||
chapters.add(currentChapter.toString().trim());
|
||||
}
|
||||
|
||||
return chapters;
|
||||
}
|
||||
|
||||
private String createChapterHTML(String title, String content) {
|
||||
return "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
|
||||
"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.1//EN\" " +
|
||||
"\"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd\">" +
|
||||
"<html xmlns=\"http://www.w3.org/1999/xhtml\">" +
|
||||
"<head>" +
|
||||
"<title>" + escapeHtml(title) + "</title>" +
|
||||
"<style type=\"text/css\">" +
|
||||
"body { font-family: serif; margin: 1em; }" +
|
||||
"h1 { text-align: center; }" +
|
||||
"p { text-indent: 1em; margin: 0.5em 0; }" +
|
||||
"</style>" +
|
||||
"</head>" +
|
||||
"<body>" +
|
||||
"<h1>" + escapeHtml(title) + "</h1>" +
|
||||
fixHtmlForXhtml(content) +
|
||||
"</body>" +
|
||||
"</html>";
|
||||
}
|
||||
|
||||
private void addReadingPosition(Book book, Story story, EPUBExportRequest request) {
|
||||
if (!request.getIncludeReadingPosition()) {
|
||||
return;
|
||||
}
|
||||
|
||||
Optional<ReadingPosition> positionOpt = readingPositionRepository.findByStoryId(story.getId());
|
||||
if (positionOpt.isPresent()) {
|
||||
ReadingPosition position = positionOpt.get();
|
||||
Metadata metadata = book.getMetadata();
|
||||
|
||||
// Add reading position to description since addMeta doesn't exist
|
||||
StringBuilder positionDesc = new StringBuilder();
|
||||
if (position.getEpubCfi() != null) {
|
||||
positionDesc.append("EPUB CFI: ").append(position.getEpubCfi()).append("\n");
|
||||
}
|
||||
|
||||
if (position.getChapterIndex() != null && position.getWordPosition() != null) {
|
||||
positionDesc.append("Reading Position: Chapter ")
|
||||
.append(position.getChapterIndex())
|
||||
.append(", Word ").append(position.getWordPosition()).append("\n");
|
||||
}
|
||||
|
||||
if (position.getPercentageComplete() != null) {
|
||||
positionDesc.append("Reading Progress: ")
|
||||
.append(String.format("%.1f%%", position.getPercentageComplete())).append("\n");
|
||||
}
|
||||
|
||||
positionDesc.append("Last Read: ")
|
||||
.append(position.getUpdatedAt().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME));
|
||||
|
||||
String existingDesc = metadata.getDescriptions().isEmpty() ? "" : metadata.getDescriptions().get(0);
|
||||
metadata.addDescription(existingDesc + "\n\n--- Reading Position ---\n" + positionDesc.toString());
|
||||
}
|
||||
}
|
||||
|
||||
private String fixHtmlForXhtml(String html) {
|
||||
if (html == null) return "";
|
||||
|
||||
// Fix common XHTML validation issues
|
||||
String fixed = html
|
||||
// Fix self-closing tags to be XHTML compliant
|
||||
.replaceAll("<br>", "<br />")
|
||||
.replaceAll("<hr>", "<hr />")
|
||||
.replaceAll("<img([^>]*)>", "<img$1 />")
|
||||
.replaceAll("<input([^>]*)>", "<input$1 />")
|
||||
.replaceAll("<area([^>]*)>", "<area$1 />")
|
||||
.replaceAll("<base([^>]*)>", "<base$1 />")
|
||||
.replaceAll("<col([^>]*)>", "<col$1 />")
|
||||
.replaceAll("<embed([^>]*)>", "<embed$1 />")
|
||||
.replaceAll("<link([^>]*)>", "<link$1 />")
|
||||
.replaceAll("<meta([^>]*)>", "<meta$1 />")
|
||||
.replaceAll("<param([^>]*)>", "<param$1 />")
|
||||
.replaceAll("<source([^>]*)>", "<source$1 />")
|
||||
.replaceAll("<track([^>]*)>", "<track$1 />")
|
||||
.replaceAll("<wbr([^>]*)>", "<wbr$1 />");
|
||||
|
||||
return fixed;
|
||||
}
|
||||
|
||||
private String escapeHtml(String text) {
|
||||
if (text == null) return "";
|
||||
return text.replace("&", "&")
|
||||
.replace("<", "<")
|
||||
.replace(">", ">")
|
||||
.replace("\"", """)
|
||||
.replace("'", "'");
|
||||
}
|
||||
|
||||
public String getEPUBFilename(Story story) {
|
||||
StringBuilder filename = new StringBuilder();
|
||||
|
||||
if (story.getAuthor() != null) {
|
||||
filename.append(sanitizeFilename(story.getAuthor().getName()))
|
||||
.append(" - ");
|
||||
}
|
||||
|
||||
filename.append(sanitizeFilename(story.getTitle()));
|
||||
|
||||
if (story.getSeries() != null && story.getVolume() != null) {
|
||||
filename.append(" (")
|
||||
.append(sanitizeFilename(story.getSeries().getName()))
|
||||
.append(" ")
|
||||
.append(story.getVolume())
|
||||
.append(")");
|
||||
}
|
||||
|
||||
filename.append(".epub");
|
||||
|
||||
return filename.toString();
|
||||
}
|
||||
|
||||
private String sanitizeFilename(String filename) {
|
||||
if (filename == null) return "unknown";
|
||||
return filename.replaceAll("[^a-zA-Z0-9._\\- ]", "")
|
||||
.trim()
|
||||
.replaceAll("\\s+", "_");
|
||||
}
|
||||
|
||||
private void setupCollectionMetadata(Book book, Collection collection, List<Story> stories, EPUBExportRequest request) {
|
||||
Metadata metadata = book.getMetadata();
|
||||
|
||||
String title = request.getCustomTitle() != null ?
|
||||
request.getCustomTitle() : collection.getName();
|
||||
metadata.addTitle(title);
|
||||
|
||||
// Use collection creator as author, or combine story authors
|
||||
String authorName = "Collection";
|
||||
if (stories.size() == 1) {
|
||||
Story story = stories.get(0);
|
||||
authorName = story.getAuthor() != null ? story.getAuthor().getName() : "Unknown Author";
|
||||
} else {
|
||||
// For multiple stories, use "Various Authors" or collection name
|
||||
authorName = "Various Authors";
|
||||
}
|
||||
|
||||
if (request.getCustomAuthor() != null) {
|
||||
authorName = request.getCustomAuthor();
|
||||
}
|
||||
|
||||
metadata.addAuthor(new Author(authorName));
|
||||
metadata.setLanguage(request.getLanguage() != null ? request.getLanguage() : "en");
|
||||
metadata.addIdentifier(new Identifier("storycove-collection", collection.getId().toString()));
|
||||
|
||||
// Create description from collection description and story list
|
||||
StringBuilder description = new StringBuilder();
|
||||
if (collection.getDescription() != null && !collection.getDescription().trim().isEmpty()) {
|
||||
description.append(collection.getDescription()).append("\n\n");
|
||||
}
|
||||
|
||||
description.append("This collection contains ").append(stories.size()).append(" stories:\n");
|
||||
for (int i = 0; i < stories.size() && i < 10; i++) {
|
||||
Story story = stories.get(i);
|
||||
description.append((i + 1)).append(". ").append(story.getTitle());
|
||||
if (story.getAuthor() != null) {
|
||||
description.append(" by ").append(story.getAuthor().getName());
|
||||
}
|
||||
description.append("\n");
|
||||
}
|
||||
if (stories.size() > 10) {
|
||||
description.append("... and ").append(stories.size() - 10).append(" more stories.");
|
||||
}
|
||||
|
||||
metadata.addDescription(description.toString());
|
||||
|
||||
if (request.getIncludeMetadata()) {
|
||||
metadata.addDate(new Date(java.util.Date.from(
|
||||
collection.getCreatedAt().atZone(java.time.ZoneId.systemDefault()).toInstant()
|
||||
), Date.Event.CREATION));
|
||||
|
||||
// Add collection statistics to description
|
||||
int totalWordCount = stories.stream().mapToInt(s -> s.getWordCount() != null ? s.getWordCount() : 0).sum();
|
||||
description.append("\n\nTotal Word Count: ").append(totalWordCount);
|
||||
description.append("\nGenerated by StoryCove on ")
|
||||
.append(LocalDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME));
|
||||
|
||||
metadata.addDescription(description.toString());
|
||||
}
|
||||
}
|
||||
|
||||
private void addCollectionCoverImage(Book book, Collection collection, EPUBExportRequest request) {
|
||||
if (!request.getIncludeCoverImage()) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
// Try to use collection cover first
|
||||
if (collection.getCoverImagePath() != null) {
|
||||
Path coverPath = Paths.get(collection.getCoverImagePath());
|
||||
if (Files.exists(coverPath)) {
|
||||
byte[] coverImageData = Files.readAllBytes(coverPath);
|
||||
String mimeType = Files.probeContentType(coverPath);
|
||||
if (mimeType == null) {
|
||||
mimeType = "image/jpeg";
|
||||
}
|
||||
|
||||
nl.siegmann.epublib.domain.Resource coverResource =
|
||||
new nl.siegmann.epublib.domain.Resource(coverImageData, "collection-cover.jpg");
|
||||
|
||||
book.setCoverImage(coverResource);
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Could generate a composite cover from story covers
|
||||
// For now, skip cover if collection doesn't have one
|
||||
|
||||
} catch (IOException e) {
|
||||
// Skip cover image on error
|
||||
}
|
||||
}
|
||||
|
||||
private void addCollectionContent(Book book, List<Story> stories, EPUBExportRequest request) {
|
||||
// Create table of contents chapter
|
||||
StringBuilder tocContent = new StringBuilder();
|
||||
tocContent.append("<h1>Table of Contents</h1>\n<ul>\n");
|
||||
|
||||
for (int i = 0; i < stories.size(); i++) {
|
||||
Story story = stories.get(i);
|
||||
tocContent.append("<li><a href=\"#story").append(i + 1).append("\">")
|
||||
.append(escapeHtml(story.getTitle()));
|
||||
if (story.getAuthor() != null) {
|
||||
tocContent.append(" by ").append(escapeHtml(story.getAuthor().getName()));
|
||||
}
|
||||
tocContent.append("</a></li>\n");
|
||||
}
|
||||
|
||||
tocContent.append("</ul>\n");
|
||||
|
||||
String tocHtml = createChapterHTML("Table of Contents", tocContent.toString());
|
||||
nl.siegmann.epublib.domain.Resource tocResource =
|
||||
new nl.siegmann.epublib.domain.Resource(tocHtml.getBytes(), "toc.html");
|
||||
book.addSection("Table of Contents", tocResource);
|
||||
|
||||
// Add each story as a chapter
|
||||
for (int i = 0; i < stories.size(); i++) {
|
||||
Story story = stories.get(i);
|
||||
String storyContent = story.getContentHtml();
|
||||
|
||||
if (storyContent == null) {
|
||||
storyContent = story.getContentPlain() != null ?
|
||||
"<p>" + story.getContentPlain().replace("\n", "</p><p>") + "</p>" :
|
||||
"<p>No content available</p>";
|
||||
}
|
||||
|
||||
// Add story metadata header
|
||||
StringBuilder storyHtml = new StringBuilder();
|
||||
storyHtml.append("<div id=\"story").append(i + 1).append("\">\n");
|
||||
storyHtml.append("<h1>").append(escapeHtml(story.getTitle())).append("</h1>\n");
|
||||
if (story.getAuthor() != null) {
|
||||
storyHtml.append("<p><em>by ").append(escapeHtml(story.getAuthor().getName())).append("</em></p>\n");
|
||||
}
|
||||
if (story.getDescription() != null && !story.getDescription().trim().isEmpty()) {
|
||||
storyHtml.append("<div class=\"summary\">\n")
|
||||
.append("<p>").append(escapeHtml(story.getDescription())).append("</p>\n")
|
||||
.append("</div>\n");
|
||||
}
|
||||
storyHtml.append("<hr />\n");
|
||||
storyHtml.append(fixHtmlForXhtml(storyContent));
|
||||
storyHtml.append("</div>\n");
|
||||
|
||||
String chapterTitle = story.getTitle();
|
||||
if (story.getAuthor() != null) {
|
||||
chapterTitle += " by " + story.getAuthor().getName();
|
||||
}
|
||||
|
||||
String html = createChapterHTML(chapterTitle, storyHtml.toString());
|
||||
nl.siegmann.epublib.domain.Resource storyResource =
|
||||
new nl.siegmann.epublib.domain.Resource(html.getBytes(), "story" + (i + 1) + ".html");
|
||||
|
||||
book.addSection(chapterTitle, storyResource);
|
||||
}
|
||||
}
|
||||
|
||||
public boolean canExportStory(UUID storyId) {
|
||||
try {
|
||||
Story story = storyService.findById(storyId);
|
||||
return story.getContentHtml() != null || story.getContentPlain() != null;
|
||||
} catch (ResourceNotFoundException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public String getCollectionEPUBFilename(Collection collection) {
|
||||
StringBuilder filename = new StringBuilder();
|
||||
filename.append(sanitizeFilename(collection.getName()));
|
||||
filename.append("_collection.epub");
|
||||
return filename.toString();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,548 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.dto.EPUBImportRequest;
|
||||
import com.storycove.dto.EPUBImportResponse;
|
||||
import com.storycove.dto.ReadingPositionDto;
|
||||
import com.storycove.entity.*;
|
||||
import com.storycove.repository.ReadingPositionRepository;
|
||||
import com.storycove.service.exception.InvalidFileException;
|
||||
import com.storycove.service.exception.ResourceNotFoundException;
|
||||
|
||||
import nl.siegmann.epublib.domain.Book;
|
||||
import nl.siegmann.epublib.domain.Metadata;
|
||||
import nl.siegmann.epublib.domain.Resource;
|
||||
import nl.siegmann.epublib.domain.SpineReference;
|
||||
import nl.siegmann.epublib.epub.EpubReader;
|
||||
|
||||
import org.jsoup.Jsoup;
|
||||
import org.jsoup.nodes.Document;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
|
||||
@Service
|
||||
@Transactional
|
||||
public class EPUBImportService {
|
||||
|
||||
private final StoryService storyService;
|
||||
private final AuthorService authorService;
|
||||
private final SeriesService seriesService;
|
||||
private final TagService tagService;
|
||||
private final ReadingPositionRepository readingPositionRepository;
|
||||
private final HtmlSanitizationService sanitizationService;
|
||||
private final ImageService imageService;
|
||||
|
||||
@Autowired
|
||||
public EPUBImportService(StoryService storyService,
|
||||
AuthorService authorService,
|
||||
SeriesService seriesService,
|
||||
TagService tagService,
|
||||
ReadingPositionRepository readingPositionRepository,
|
||||
HtmlSanitizationService sanitizationService,
|
||||
ImageService imageService) {
|
||||
this.storyService = storyService;
|
||||
this.authorService = authorService;
|
||||
this.seriesService = seriesService;
|
||||
this.tagService = tagService;
|
||||
this.readingPositionRepository = readingPositionRepository;
|
||||
this.sanitizationService = sanitizationService;
|
||||
this.imageService = imageService;
|
||||
}
|
||||
|
||||
public EPUBImportResponse importEPUB(EPUBImportRequest request) {
|
||||
try {
|
||||
MultipartFile epubFile = request.getEpubFile();
|
||||
|
||||
if (epubFile == null || epubFile.isEmpty()) {
|
||||
return EPUBImportResponse.error("EPUB file is required");
|
||||
}
|
||||
|
||||
if (!isValidEPUBFile(epubFile)) {
|
||||
return EPUBImportResponse.error("Invalid EPUB file format");
|
||||
}
|
||||
|
||||
Book book = parseEPUBFile(epubFile);
|
||||
|
||||
Story story = createStoryFromEPUB(book, request);
|
||||
|
||||
Story savedStory = storyService.create(story);
|
||||
|
||||
// Process embedded images if content contains any
|
||||
String originalContent = story.getContentHtml();
|
||||
if (originalContent != null && originalContent.contains("<img")) {
|
||||
try {
|
||||
ImageService.ContentImageProcessingResult imageResult =
|
||||
imageService.processContentImages(originalContent, savedStory.getId());
|
||||
|
||||
// Update story content with processed images if changed
|
||||
if (!imageResult.getProcessedContent().equals(originalContent)) {
|
||||
savedStory.setContentHtml(imageResult.getProcessedContent());
|
||||
savedStory = storyService.update(savedStory.getId(), savedStory);
|
||||
|
||||
// Log the image processing results
|
||||
System.out.println("EPUB Import - Image processing completed for story " + savedStory.getId() +
|
||||
". Downloaded " + imageResult.getDownloadedImages().size() + " images.");
|
||||
|
||||
if (imageResult.hasWarnings()) {
|
||||
System.out.println("EPUB Import - Image processing warnings: " +
|
||||
String.join(", ", imageResult.getWarnings()));
|
||||
}
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// Log error but don't fail the import
|
||||
System.err.println("EPUB Import - Failed to process embedded images for story " +
|
||||
savedStory.getId() + ": " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
EPUBImportResponse response = EPUBImportResponse.success(savedStory.getId(), savedStory.getTitle());
|
||||
response.setWordCount(savedStory.getWordCount());
|
||||
response.setTotalChapters(book.getSpine().size());
|
||||
|
||||
if (request.getPreserveReadingPosition() != null && request.getPreserveReadingPosition()) {
|
||||
ReadingPosition readingPosition = extractReadingPosition(book, savedStory);
|
||||
if (readingPosition != null) {
|
||||
ReadingPosition savedPosition = readingPositionRepository.save(readingPosition);
|
||||
response.setReadingPosition(convertToDto(savedPosition));
|
||||
}
|
||||
}
|
||||
|
||||
return response;
|
||||
|
||||
} catch (Exception e) {
|
||||
return EPUBImportResponse.error("Failed to import EPUB: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private boolean isValidEPUBFile(MultipartFile file) {
|
||||
String filename = file.getOriginalFilename();
|
||||
if (filename == null || !filename.toLowerCase().endsWith(".epub")) {
|
||||
return false;
|
||||
}
|
||||
|
||||
String contentType = file.getContentType();
|
||||
return "application/epub+zip".equals(contentType) ||
|
||||
"application/zip".equals(contentType) ||
|
||||
contentType == null;
|
||||
}
|
||||
|
||||
private Book parseEPUBFile(MultipartFile epubFile) throws IOException {
|
||||
try (InputStream inputStream = epubFile.getInputStream()) {
|
||||
EpubReader epubReader = new EpubReader();
|
||||
return epubReader.readEpub(inputStream);
|
||||
} catch (Exception e) {
|
||||
throw new InvalidFileException("Failed to parse EPUB file: " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
private Story createStoryFromEPUB(Book book, EPUBImportRequest request) {
|
||||
Metadata metadata = book.getMetadata();
|
||||
|
||||
String title = extractTitle(metadata);
|
||||
String authorName = extractAuthorName(metadata, request);
|
||||
String description = extractDescription(metadata);
|
||||
String content = extractContent(book);
|
||||
|
||||
Story story = new Story();
|
||||
story.setTitle(title);
|
||||
story.setDescription(description);
|
||||
story.setContentHtml(sanitizationService.sanitize(content));
|
||||
|
||||
// Extract and process cover image
|
||||
if (request.getExtractCover() == null || request.getExtractCover()) {
|
||||
String coverPath = extractAndSaveCoverImage(book);
|
||||
if (coverPath != null) {
|
||||
story.setCoverPath(coverPath);
|
||||
}
|
||||
}
|
||||
|
||||
if (request.getAuthorId() != null) {
|
||||
try {
|
||||
Author author = authorService.findById(request.getAuthorId());
|
||||
story.setAuthor(author);
|
||||
} catch (ResourceNotFoundException e) {
|
||||
if (request.getCreateMissingAuthor()) {
|
||||
Author newAuthor = createAuthor(authorName);
|
||||
story.setAuthor(newAuthor);
|
||||
}
|
||||
}
|
||||
} else if (authorName != null && request.getCreateMissingAuthor()) {
|
||||
Author author = findOrCreateAuthor(authorName);
|
||||
story.setAuthor(author);
|
||||
}
|
||||
|
||||
if (request.getSeriesId() != null && request.getSeriesVolume() != null) {
|
||||
try {
|
||||
Series series = seriesService.findById(request.getSeriesId());
|
||||
story.setSeries(series);
|
||||
story.setVolume(request.getSeriesVolume());
|
||||
} catch (ResourceNotFoundException e) {
|
||||
if (request.getCreateMissingSeries() && request.getSeriesName() != null) {
|
||||
Series newSeries = createSeries(request.getSeriesName());
|
||||
story.setSeries(newSeries);
|
||||
story.setVolume(request.getSeriesVolume());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Handle tags from request or extract from EPUB metadata
|
||||
List<String> allTags = new ArrayList<>();
|
||||
if (request.getTags() != null && !request.getTags().isEmpty()) {
|
||||
allTags.addAll(request.getTags());
|
||||
}
|
||||
|
||||
// Extract subjects/keywords from EPUB metadata
|
||||
List<String> epubTags = extractTags(metadata);
|
||||
if (epubTags != null && !epubTags.isEmpty()) {
|
||||
allTags.addAll(epubTags);
|
||||
}
|
||||
|
||||
// Remove duplicates and create tags
|
||||
allTags.stream()
|
||||
.distinct()
|
||||
.forEach(tagName -> {
|
||||
Tag tag = tagService.findOrCreate(tagName.trim());
|
||||
story.addTag(tag);
|
||||
});
|
||||
|
||||
// Extract additional metadata for potential future use
|
||||
extractAdditionalMetadata(metadata, story);
|
||||
|
||||
return story;
|
||||
}
|
||||
|
||||
private String extractTitle(Metadata metadata) {
|
||||
List<String> titles = metadata.getTitles();
|
||||
if (titles != null && !titles.isEmpty()) {
|
||||
return titles.get(0);
|
||||
}
|
||||
return "Untitled EPUB";
|
||||
}
|
||||
|
||||
private String extractAuthorName(Metadata metadata, EPUBImportRequest request) {
|
||||
if (request.getAuthorName() != null && !request.getAuthorName().trim().isEmpty()) {
|
||||
return request.getAuthorName().trim();
|
||||
}
|
||||
|
||||
if (metadata.getAuthors() != null && !metadata.getAuthors().isEmpty()) {
|
||||
return metadata.getAuthors().get(0).getFirstname() + " " + metadata.getAuthors().get(0).getLastname();
|
||||
}
|
||||
|
||||
return "Unknown Author";
|
||||
}
|
||||
|
||||
private String extractDescription(Metadata metadata) {
|
||||
List<String> descriptions = metadata.getDescriptions();
|
||||
if (descriptions != null && !descriptions.isEmpty()) {
|
||||
return descriptions.get(0);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private List<String> extractTags(Metadata metadata) {
|
||||
List<String> tags = new ArrayList<>();
|
||||
|
||||
// Extract subjects (main source of tags in EPUB)
|
||||
List<String> subjects = metadata.getSubjects();
|
||||
if (subjects != null && !subjects.isEmpty()) {
|
||||
tags.addAll(subjects);
|
||||
}
|
||||
|
||||
// Extract keywords from meta tags
|
||||
String keywords = metadata.getMetaAttribute("keywords");
|
||||
if (keywords != null && !keywords.trim().isEmpty()) {
|
||||
String[] keywordArray = keywords.split("[,;]");
|
||||
for (String keyword : keywordArray) {
|
||||
String trimmed = keyword.trim();
|
||||
if (!trimmed.isEmpty()) {
|
||||
tags.add(trimmed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Extract genre information
|
||||
String genre = metadata.getMetaAttribute("genre");
|
||||
if (genre != null && !genre.trim().isEmpty()) {
|
||||
tags.add(genre.trim());
|
||||
}
|
||||
|
||||
return tags;
|
||||
}
|
||||
|
||||
private void extractAdditionalMetadata(Metadata metadata, Story story) {
|
||||
// Extract language (could be useful for future i18n)
|
||||
String language = metadata.getLanguage();
|
||||
if (language != null && !language.trim().isEmpty()) {
|
||||
// Store as metadata in story description if needed
|
||||
// For now, we'll just log it for potential future use
|
||||
System.out.println("EPUB Language: " + language);
|
||||
}
|
||||
|
||||
// Extract publisher information
|
||||
List<String> publishers = metadata.getPublishers();
|
||||
if (publishers != null && !publishers.isEmpty()) {
|
||||
String publisher = publishers.get(0);
|
||||
// Could append to description or store separately in future
|
||||
System.out.println("EPUB Publisher: " + publisher);
|
||||
}
|
||||
|
||||
// Extract publication date
|
||||
List<nl.siegmann.epublib.domain.Date> dates = metadata.getDates();
|
||||
if (dates != null && !dates.isEmpty()) {
|
||||
for (nl.siegmann.epublib.domain.Date date : dates) {
|
||||
System.out.println("EPUB Date (" + date.getEvent() + "): " + date.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
// Extract ISBN or other identifiers
|
||||
List<nl.siegmann.epublib.domain.Identifier> identifiers = metadata.getIdentifiers();
|
||||
if (identifiers != null && !identifiers.isEmpty()) {
|
||||
for (nl.siegmann.epublib.domain.Identifier identifier : identifiers) {
|
||||
System.out.println("EPUB Identifier (" + identifier.getScheme() + "): " + identifier.getValue());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private String extractContent(Book book) {
|
||||
StringBuilder contentBuilder = new StringBuilder();
|
||||
|
||||
List<SpineReference> spine = book.getSpine().getSpineReferences();
|
||||
for (SpineReference spineRef : spine) {
|
||||
try {
|
||||
Resource resource = spineRef.getResource();
|
||||
if (resource != null && resource.getData() != null) {
|
||||
String html = new String(resource.getData(), "UTF-8");
|
||||
|
||||
Document doc = Jsoup.parse(html);
|
||||
doc.select("script, style").remove();
|
||||
|
||||
String chapterContent = doc.body() != null ? doc.body().html() : doc.html();
|
||||
|
||||
contentBuilder.append("<div class=\"chapter\">")
|
||||
.append(chapterContent)
|
||||
.append("</div>");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// Skip this chapter on error
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
return contentBuilder.toString();
|
||||
}
|
||||
|
||||
private Author findOrCreateAuthor(String authorName) {
|
||||
Optional<Author> existingAuthor = authorService.findByNameOptional(authorName);
|
||||
if (existingAuthor.isPresent()) {
|
||||
return existingAuthor.get();
|
||||
}
|
||||
return createAuthor(authorName);
|
||||
}
|
||||
|
||||
private Author createAuthor(String authorName) {
|
||||
Author author = new Author();
|
||||
author.setName(authorName);
|
||||
return authorService.create(author);
|
||||
}
|
||||
|
||||
private Series createSeries(String seriesName) {
|
||||
Series series = new Series();
|
||||
series.setName(seriesName);
|
||||
return seriesService.create(series);
|
||||
}
|
||||
|
||||
private ReadingPosition extractReadingPosition(Book book, Story story) {
|
||||
try {
|
||||
Metadata metadata = book.getMetadata();
|
||||
|
||||
String positionMeta = metadata.getMetaAttribute("reading-position");
|
||||
String cfiMeta = metadata.getMetaAttribute("epub-cfi");
|
||||
|
||||
ReadingPosition position = new ReadingPosition(story);
|
||||
|
||||
if (cfiMeta != null) {
|
||||
position.setEpubCfi(cfiMeta);
|
||||
}
|
||||
|
||||
if (positionMeta != null) {
|
||||
try {
|
||||
String[] parts = positionMeta.split(":");
|
||||
if (parts.length >= 2) {
|
||||
position.setChapterIndex(Integer.parseInt(parts[0]));
|
||||
position.setWordPosition(Integer.parseInt(parts[1]));
|
||||
}
|
||||
} catch (NumberFormatException e) {
|
||||
// Ignore invalid position format
|
||||
}
|
||||
}
|
||||
|
||||
return position;
|
||||
|
||||
} catch (Exception e) {
|
||||
// Return null if no reading position found
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private String extractAndSaveCoverImage(Book book) {
|
||||
try {
|
||||
Resource coverResource = book.getCoverImage();
|
||||
if (coverResource != null && coverResource.getData() != null) {
|
||||
// Create a temporary MultipartFile from the EPUB cover data
|
||||
byte[] imageData = coverResource.getData();
|
||||
String mediaType = coverResource.getMediaType() != null ?
|
||||
coverResource.getMediaType().toString() : "image/jpeg";
|
||||
|
||||
// Determine file extension from media type
|
||||
String extension = getExtensionFromMediaType(mediaType);
|
||||
String filename = "epub_cover_" + System.currentTimeMillis() + "." + extension;
|
||||
|
||||
// Create a custom MultipartFile implementation for the cover image
|
||||
MultipartFile coverFile = new EPUBCoverMultipartFile(imageData, filename, mediaType);
|
||||
|
||||
// Use ImageService to process and save the cover
|
||||
return imageService.uploadImage(coverFile, ImageService.ImageType.COVER);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
// Log error but don't fail the import
|
||||
System.err.println("Failed to extract cover image: " + e.getMessage());
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private String getExtensionFromMediaType(String mediaType) {
|
||||
switch (mediaType.toLowerCase()) {
|
||||
case "image/jpeg":
|
||||
case "image/jpg":
|
||||
return "jpg";
|
||||
case "image/png":
|
||||
return "png";
|
||||
case "image/gif":
|
||||
return "gif";
|
||||
case "image/webp":
|
||||
return "webp";
|
||||
default:
|
||||
return "jpg"; // Default fallback
|
||||
}
|
||||
}
|
||||
|
||||
private ReadingPositionDto convertToDto(ReadingPosition position) {
|
||||
if (position == null) return null;
|
||||
|
||||
ReadingPositionDto dto = new ReadingPositionDto();
|
||||
dto.setId(position.getId());
|
||||
dto.setStoryId(position.getStory().getId());
|
||||
dto.setChapterIndex(position.getChapterIndex());
|
||||
dto.setChapterTitle(position.getChapterTitle());
|
||||
dto.setWordPosition(position.getWordPosition());
|
||||
dto.setCharacterPosition(position.getCharacterPosition());
|
||||
dto.setPercentageComplete(position.getPercentageComplete());
|
||||
dto.setEpubCfi(position.getEpubCfi());
|
||||
dto.setContextBefore(position.getContextBefore());
|
||||
dto.setContextAfter(position.getContextAfter());
|
||||
dto.setCreatedAt(position.getCreatedAt());
|
||||
dto.setUpdatedAt(position.getUpdatedAt());
|
||||
|
||||
return dto;
|
||||
}
|
||||
|
||||
public List<String> validateEPUBFile(MultipartFile file) {
|
||||
List<String> errors = new ArrayList<>();
|
||||
|
||||
if (file == null || file.isEmpty()) {
|
||||
errors.add("EPUB file is required");
|
||||
return errors;
|
||||
}
|
||||
|
||||
if (!isValidEPUBFile(file)) {
|
||||
errors.add("Invalid EPUB file format. Only .epub files are supported");
|
||||
}
|
||||
|
||||
if (file.getSize() > 100 * 1024 * 1024) { // 100MB limit
|
||||
errors.add("EPUB file size exceeds 100MB limit");
|
||||
}
|
||||
|
||||
try {
|
||||
Book book = parseEPUBFile(file);
|
||||
if (book.getMetadata() == null) {
|
||||
errors.add("EPUB file contains no metadata");
|
||||
}
|
||||
if (book.getSpine() == null || book.getSpine().isEmpty()) {
|
||||
errors.add("EPUB file contains no readable content");
|
||||
}
|
||||
} catch (Exception e) {
|
||||
errors.add("Failed to parse EPUB file: " + e.getMessage());
|
||||
}
|
||||
|
||||
return errors;
|
||||
}
|
||||
|
||||
/**
|
||||
* Custom MultipartFile implementation for EPUB cover images
|
||||
*/
|
||||
private static class EPUBCoverMultipartFile implements MultipartFile {
|
||||
private final byte[] data;
|
||||
private final String filename;
|
||||
private final String contentType;
|
||||
|
||||
public EPUBCoverMultipartFile(byte[] data, String filename, String contentType) {
|
||||
this.data = data;
|
||||
this.filename = filename;
|
||||
this.contentType = contentType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "coverImage";
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getOriginalFilename() {
|
||||
return filename;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getContentType() {
|
||||
return contentType;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isEmpty() {
|
||||
return data == null || data.length == 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getSize() {
|
||||
return data != null ? data.length : 0;
|
||||
}
|
||||
|
||||
@Override
|
||||
public byte[] getBytes() {
|
||||
return data;
|
||||
}
|
||||
|
||||
@Override
|
||||
public InputStream getInputStream() {
|
||||
return new java.io.ByteArrayInputStream(data);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void transferTo(java.io.File dest) throws IOException {
|
||||
try (java.io.FileOutputStream fos = new java.io.FileOutputStream(dest)) {
|
||||
fos.write(data);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void transferTo(java.nio.file.Path dest) throws IOException {
|
||||
java.nio.file.Files.write(dest, data);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -54,7 +54,7 @@ public class HtmlSanitizationService {
|
||||
"p", "br", "div", "span", "h1", "h2", "h3", "h4", "h5", "h6",
|
||||
"b", "strong", "i", "em", "u", "s", "strike", "del", "ins",
|
||||
"sup", "sub", "small", "big", "mark", "pre", "code",
|
||||
"ul", "ol", "li", "dl", "dt", "dd", "a",
|
||||
"ul", "ol", "li", "dl", "dt", "dd", "a", "img",
|
||||
"table", "thead", "tbody", "tfoot", "tr", "th", "td", "caption",
|
||||
"blockquote", "cite", "q", "hr"
|
||||
));
|
||||
@@ -65,13 +65,13 @@ public class HtmlSanitizationService {
|
||||
}
|
||||
|
||||
private void createSafelist() {
|
||||
this.allowlist = new Safelist();
|
||||
|
||||
this.allowlist = Safelist.relaxed();
|
||||
|
||||
// Add allowed tags
|
||||
if (config.getAllowedTags() != null) {
|
||||
config.getAllowedTags().forEach(allowlist::addTags);
|
||||
}
|
||||
|
||||
|
||||
// Add allowed attributes
|
||||
if (config.getAllowedAttributes() != null) {
|
||||
for (Map.Entry<String, List<String>> entry : config.getAllowedAttributes().entrySet()) {
|
||||
@@ -82,8 +82,35 @@ public class HtmlSanitizationService {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Special handling for img tags - allow all src attributes and validate later
|
||||
allowlist.removeProtocols("img", "src", "http", "https");
|
||||
// This is the key: preserve relative URLs by not restricting them
|
||||
allowlist.preserveRelativeLinks(true);
|
||||
|
||||
// Configure allowed protocols for other attributes
|
||||
if (config.getAllowedProtocols() != null) {
|
||||
for (Map.Entry<String, Map<String, List<String>>> tagEntry : config.getAllowedProtocols().entrySet()) {
|
||||
String tag = tagEntry.getKey();
|
||||
Map<String, List<String>> attributeProtocols = tagEntry.getValue();
|
||||
|
||||
if (attributeProtocols != null) {
|
||||
for (Map.Entry<String, List<String>> attrEntry : attributeProtocols.entrySet()) {
|
||||
String attribute = attrEntry.getKey();
|
||||
List<String> protocols = attrEntry.getValue();
|
||||
|
||||
if (protocols != null && !("img".equals(tag) && "src".equals(attribute))) {
|
||||
// Skip img src since we handled it above
|
||||
allowlist.addProtocols(tag, attribute, protocols.toArray(new String[0]));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Configured Jsoup Safelist with preserveRelativeLinks=true for local image URLs");
|
||||
|
||||
// Remove specific attributes (like href from links for security)
|
||||
// Remove specific attributes if needed (deprecated in favor of protocol control)
|
||||
if (config.getRemovedAttributes() != null) {
|
||||
for (Map.Entry<String, List<String>> entry : config.getRemovedAttributes().entrySet()) {
|
||||
String tag = entry.getKey();
|
||||
@@ -114,8 +141,10 @@ public class HtmlSanitizationService {
|
||||
if (html == null || html.trim().isEmpty()) {
|
||||
return "";
|
||||
}
|
||||
|
||||
return Jsoup.clean(html, allowlist);
|
||||
logger.info("Content before sanitization: "+html);
|
||||
String saniztedHtml = Jsoup.clean(html, allowlist.preserveRelativeLinks(true));
|
||||
logger.info("Content after sanitization: "+saniztedHtml);
|
||||
return saniztedHtml;
|
||||
}
|
||||
|
||||
public String extractPlainText(String html) {
|
||||
|
||||
@@ -1,5 +1,8 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
@@ -7,28 +10,43 @@ import org.springframework.web.multipart.MultipartFile;
|
||||
import javax.imageio.ImageIO;
|
||||
import java.awt.*;
|
||||
import java.awt.image.BufferedImage;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.*;
|
||||
import java.net.HttpURLConnection;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.*;
|
||||
import java.util.List;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
@Service
|
||||
public class ImageService {
|
||||
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(ImageService.class);
|
||||
|
||||
private static final Set<String> ALLOWED_CONTENT_TYPES = Set.of(
|
||||
"image/jpeg", "image/jpg", "image/png", "image/webp"
|
||||
"image/jpeg", "image/jpg", "image/png"
|
||||
);
|
||||
|
||||
private static final Set<String> ALLOWED_EXTENSIONS = Set.of(
|
||||
"jpg", "jpeg", "png", "webp"
|
||||
"jpg", "jpeg", "png"
|
||||
);
|
||||
|
||||
@Value("${storycove.images.upload-dir:/app/images}")
|
||||
private String uploadDir;
|
||||
private String baseUploadDir;
|
||||
|
||||
@Autowired
|
||||
private LibraryService libraryService;
|
||||
|
||||
@Autowired
|
||||
private StoryService storyService;
|
||||
|
||||
private String getUploadDir() {
|
||||
String libraryPath = libraryService.getCurrentImagePath();
|
||||
return baseUploadDir + libraryPath;
|
||||
}
|
||||
|
||||
@Value("${storycove.images.cover.max-width:800}")
|
||||
private int coverMaxWidth;
|
||||
@@ -44,14 +62,15 @@ public class ImageService {
|
||||
|
||||
public enum ImageType {
|
||||
COVER("covers"),
|
||||
AVATAR("avatars");
|
||||
|
||||
AVATAR("avatars"),
|
||||
CONTENT("content");
|
||||
|
||||
private final String directory;
|
||||
|
||||
|
||||
ImageType(String directory) {
|
||||
this.directory = directory;
|
||||
}
|
||||
|
||||
|
||||
public String getDirectory() {
|
||||
return directory;
|
||||
}
|
||||
@@ -61,7 +80,7 @@ public class ImageService {
|
||||
validateFile(file);
|
||||
|
||||
// Create directories if they don't exist
|
||||
Path typeDir = Paths.get(uploadDir, imageType.getDirectory());
|
||||
Path typeDir = Paths.get(getUploadDir(), imageType.getDirectory());
|
||||
Files.createDirectories(typeDir);
|
||||
|
||||
// Generate unique filename
|
||||
@@ -88,7 +107,7 @@ public class ImageService {
|
||||
}
|
||||
|
||||
try {
|
||||
Path fullPath = Paths.get(uploadDir, imagePath);
|
||||
Path fullPath = Paths.get(getUploadDir(), imagePath);
|
||||
return Files.deleteIfExists(fullPath);
|
||||
} catch (IOException e) {
|
||||
return false;
|
||||
@@ -96,7 +115,7 @@ public class ImageService {
|
||||
}
|
||||
|
||||
public Path getImagePath(String imagePath) {
|
||||
return Paths.get(uploadDir, imagePath);
|
||||
return Paths.get(getUploadDir(), imagePath);
|
||||
}
|
||||
|
||||
public boolean imageExists(String imagePath) {
|
||||
@@ -107,6 +126,19 @@ public class ImageService {
|
||||
return Files.exists(getImagePath(imagePath));
|
||||
}
|
||||
|
||||
public boolean imageExistsInLibrary(String imagePath, String libraryId) {
|
||||
if (imagePath == null || imagePath.trim().isEmpty() || libraryId == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return Files.exists(getImagePathInLibrary(imagePath, libraryId));
|
||||
}
|
||||
|
||||
public Path getImagePathInLibrary(String imagePath, String libraryId) {
|
||||
String libraryPath = libraryService.getImagePathForLibrary(libraryId);
|
||||
return Paths.get(baseUploadDir + libraryPath, imagePath);
|
||||
}
|
||||
|
||||
private void validateFile(MultipartFile file) throws IOException {
|
||||
if (file == null || file.isEmpty()) {
|
||||
throw new IllegalArgumentException("File is empty");
|
||||
@@ -160,6 +192,9 @@ public class ImageService {
|
||||
maxWidth = avatarMaxSize;
|
||||
maxHeight = avatarMaxSize;
|
||||
break;
|
||||
case CONTENT:
|
||||
// Content images are not resized
|
||||
return new Dimension(originalWidth, originalHeight);
|
||||
default:
|
||||
return new Dimension(originalWidth, originalHeight);
|
||||
}
|
||||
@@ -206,4 +241,504 @@ public class ImageService {
|
||||
String extension = getFileExtension(filename);
|
||||
return ALLOWED_EXTENSIONS.contains(extension);
|
||||
}
|
||||
|
||||
// Content image processing methods
|
||||
|
||||
/**
|
||||
* Process HTML content and download all referenced images, replacing URLs with local paths
|
||||
*/
|
||||
public ContentImageProcessingResult processContentImages(String htmlContent, UUID storyId) {
|
||||
logger.info("Processing content images for story: {}, content length: {}", storyId,
|
||||
htmlContent != null ? htmlContent.length() : 0);
|
||||
|
||||
List<String> warnings = new ArrayList<>();
|
||||
List<String> downloadedImages = new ArrayList<>();
|
||||
|
||||
if (htmlContent == null || htmlContent.trim().isEmpty()) {
|
||||
logger.info("No content to process for story: {}", storyId);
|
||||
return new ContentImageProcessingResult(htmlContent, warnings, downloadedImages);
|
||||
}
|
||||
|
||||
// Find all img tags with src attributes
|
||||
Pattern imgPattern = Pattern.compile("<img[^>]+src=[\"']([^\"']+)[\"'][^>]*>", Pattern.CASE_INSENSITIVE);
|
||||
Matcher matcher = imgPattern.matcher(htmlContent);
|
||||
|
||||
int imageCount = 0;
|
||||
int externalImageCount = 0;
|
||||
|
||||
StringBuffer processedContent = new StringBuffer();
|
||||
|
||||
while (matcher.find()) {
|
||||
String fullImgTag = matcher.group(0);
|
||||
String imageUrl = matcher.group(1);
|
||||
imageCount++;
|
||||
|
||||
logger.info("Found image #{}: {} in tag: {}", imageCount, imageUrl, fullImgTag);
|
||||
|
||||
try {
|
||||
// Skip if it's already a local path or data URL
|
||||
if (imageUrl.startsWith("/") || imageUrl.startsWith("data:")) {
|
||||
logger.info("Skipping local/data URL: {}", imageUrl);
|
||||
matcher.appendReplacement(processedContent, Matcher.quoteReplacement(fullImgTag));
|
||||
continue;
|
||||
}
|
||||
|
||||
externalImageCount++;
|
||||
logger.info("Processing external image #{}: {}", externalImageCount, imageUrl);
|
||||
|
||||
// Download and store the image
|
||||
String localPath = downloadImageFromUrl(imageUrl, storyId);
|
||||
downloadedImages.add(localPath);
|
||||
|
||||
// Generate local URL
|
||||
String localUrl = getLocalImageUrl(storyId, localPath);
|
||||
logger.info("Downloaded image: {} -> {}", imageUrl, localUrl);
|
||||
|
||||
// Replace the src attribute with the local path - handle both single and double quotes
|
||||
String newImgTag = fullImgTag
|
||||
.replaceFirst("src=\"" + Pattern.quote(imageUrl) + "\"", "src=\"" + localUrl + "\"")
|
||||
.replaceFirst("src='" + Pattern.quote(imageUrl) + "'", "src=\"" + localUrl + "\"");
|
||||
|
||||
// If replacement didn't work, try a more generic approach
|
||||
if (newImgTag.equals(fullImgTag)) {
|
||||
logger.warn("Standard replacement failed for image URL: {}, trying generic replacement", imageUrl);
|
||||
newImgTag = fullImgTag.replaceAll("src\\s*=\\s*[\"']?" + Pattern.quote(imageUrl) + "[\"']?", "src=\"" + localUrl + "\"");
|
||||
}
|
||||
|
||||
logger.info("Replaced img tag: {} -> {}", fullImgTag, newImgTag);
|
||||
matcher.appendReplacement(processedContent, Matcher.quoteReplacement(newImgTag));
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to download image: {} - {}", imageUrl, e.getMessage(), e);
|
||||
warnings.add("Failed to download image: " + imageUrl + " - " + e.getMessage());
|
||||
// Keep original URL in case of failure
|
||||
matcher.appendReplacement(processedContent, Matcher.quoteReplacement(fullImgTag));
|
||||
}
|
||||
}
|
||||
|
||||
matcher.appendTail(processedContent);
|
||||
|
||||
logger.info("Finished processing images for story: {}. Found {} total images, {} external. Downloaded {} images, {} warnings",
|
||||
storyId, imageCount, externalImageCount, downloadedImages.size(), warnings.size());
|
||||
|
||||
return new ContentImageProcessingResult(processedContent.toString(), warnings, downloadedImages);
|
||||
}
|
||||
|
||||
/**
|
||||
* Download an image from a URL and store it locally
|
||||
*/
|
||||
private String downloadImageFromUrl(String imageUrl, UUID storyId) throws IOException {
|
||||
URL url = new URL(imageUrl);
|
||||
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
|
||||
|
||||
// Set a reasonable user agent to avoid blocks
|
||||
connection.setRequestProperty("User-Agent", "Mozilla/5.0 (StoryCove Image Processor)");
|
||||
connection.setConnectTimeout(30000); // 30 seconds
|
||||
connection.setReadTimeout(30000);
|
||||
|
||||
try (InputStream inputStream = connection.getInputStream()) {
|
||||
// Get content type to determine file extension
|
||||
String contentType = connection.getContentType();
|
||||
String extension = getExtensionFromContentType(contentType);
|
||||
|
||||
if (extension == null) {
|
||||
// Try to extract from URL
|
||||
extension = getExtensionFromUrl(imageUrl);
|
||||
}
|
||||
|
||||
if (extension == null || !ALLOWED_EXTENSIONS.contains(extension.toLowerCase())) {
|
||||
throw new IllegalArgumentException("Unsupported image format: " + contentType);
|
||||
}
|
||||
|
||||
// Create directories for content images
|
||||
Path contentDir = Paths.get(getUploadDir(), ImageType.CONTENT.getDirectory(), storyId.toString());
|
||||
Files.createDirectories(contentDir);
|
||||
|
||||
// Generate unique filename
|
||||
String filename = UUID.randomUUID().toString() + "." + extension.toLowerCase();
|
||||
Path filePath = contentDir.resolve(filename);
|
||||
|
||||
// Read and validate the image
|
||||
byte[] imageData = inputStream.readAllBytes();
|
||||
ByteArrayInputStream bais = new ByteArrayInputStream(imageData);
|
||||
BufferedImage image = ImageIO.read(bais);
|
||||
|
||||
if (image == null) {
|
||||
throw new IOException("Invalid image format");
|
||||
}
|
||||
|
||||
// Save the image
|
||||
Files.write(filePath, imageData);
|
||||
|
||||
// Return relative path
|
||||
return ImageType.CONTENT.getDirectory() + "/" + storyId.toString() + "/" + filename;
|
||||
|
||||
} finally {
|
||||
connection.disconnect();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate local image URL for serving
|
||||
*/
|
||||
private String getLocalImageUrl(UUID storyId, String imagePath) {
|
||||
String currentLibraryId = libraryService.getCurrentLibraryId();
|
||||
if (currentLibraryId == null || currentLibraryId.trim().isEmpty()) {
|
||||
logger.warn("Current library ID is null or empty when generating local image URL for story: {}", storyId);
|
||||
return "/api/files/images/default/" + imagePath;
|
||||
}
|
||||
String localUrl = "/api/files/images/" + currentLibraryId + "/" + imagePath;
|
||||
logger.info("Generated local image URL: {} for story: {}", localUrl, storyId);
|
||||
return localUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get file extension from content type
|
||||
*/
|
||||
private String getExtensionFromContentType(String contentType) {
|
||||
if (contentType == null) return null;
|
||||
|
||||
switch (contentType.toLowerCase()) {
|
||||
case "image/jpeg":
|
||||
case "image/jpg":
|
||||
return "jpg";
|
||||
case "image/png":
|
||||
return "png";
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract file extension from URL
|
||||
*/
|
||||
private String getExtensionFromUrl(String url) {
|
||||
try {
|
||||
String path = new URL(url).getPath();
|
||||
int lastDot = path.lastIndexOf('.');
|
||||
if (lastDot > 0 && lastDot < path.length() - 1) {
|
||||
return path.substring(lastDot + 1).toLowerCase();
|
||||
}
|
||||
} catch (Exception ignored) {
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cleanup orphaned content images that are no longer referenced in any story
|
||||
*/
|
||||
public ContentImageCleanupResult cleanupOrphanedContentImages(boolean dryRun) {
|
||||
logger.info("Starting orphaned content image cleanup (dryRun: {})", dryRun);
|
||||
|
||||
final Set<String> referencedImages;
|
||||
List<String> orphanedImages = new ArrayList<>();
|
||||
List<String> errors = new ArrayList<>();
|
||||
long totalSizeBytes = 0;
|
||||
int foldersToDelete = 0;
|
||||
|
||||
// Step 1: Collect all image references from all story content
|
||||
logger.info("Scanning all story content for image references...");
|
||||
referencedImages = collectAllImageReferences();
|
||||
logger.info("Found {} unique image references in story content", referencedImages.size());
|
||||
|
||||
try {
|
||||
// Step 2: Scan the content images directory
|
||||
Path contentImagesDir = Paths.get(getUploadDir(), ImageType.CONTENT.getDirectory());
|
||||
|
||||
if (!Files.exists(contentImagesDir)) {
|
||||
logger.info("Content images directory does not exist: {}", contentImagesDir);
|
||||
return new ContentImageCleanupResult(orphanedImages, 0, 0, referencedImages.size(), errors, dryRun);
|
||||
}
|
||||
|
||||
logger.info("Scanning content images directory: {}", contentImagesDir);
|
||||
|
||||
// Walk through all story directories
|
||||
Files.walk(contentImagesDir, 2)
|
||||
.filter(Files::isDirectory)
|
||||
.filter(path -> !path.equals(contentImagesDir)) // Skip the root content directory
|
||||
.forEach(storyDir -> {
|
||||
try {
|
||||
String storyId = storyDir.getFileName().toString();
|
||||
logger.debug("Checking story directory: {}", storyId);
|
||||
|
||||
// Check if this story still exists
|
||||
boolean storyExists = storyService.findByIdOptional(UUID.fromString(storyId)).isPresent();
|
||||
|
||||
if (!storyExists) {
|
||||
logger.info("Found orphaned story directory (story deleted): {}", storyId);
|
||||
// Mark entire directory for deletion
|
||||
try {
|
||||
Files.walk(storyDir)
|
||||
.filter(Files::isRegularFile)
|
||||
.forEach(file -> {
|
||||
try {
|
||||
long size = Files.size(file);
|
||||
orphanedImages.add(file.toString());
|
||||
// Add to total size (will be updated in main scope)
|
||||
} catch (IOException e) {
|
||||
errors.add("Failed to get size for " + file + ": " + e.getMessage());
|
||||
}
|
||||
});
|
||||
} catch (IOException e) {
|
||||
errors.add("Failed to scan orphaned story directory " + storyDir + ": " + e.getMessage());
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
// Check individual files in the story directory
|
||||
try {
|
||||
Files.walk(storyDir)
|
||||
.filter(Files::isRegularFile)
|
||||
.forEach(imageFile -> {
|
||||
try {
|
||||
String imagePath = getRelativeImagePath(imageFile);
|
||||
|
||||
if (!referencedImages.contains(imagePath)) {
|
||||
logger.debug("Found orphaned image: {}", imagePath);
|
||||
orphanedImages.add(imageFile.toString());
|
||||
}
|
||||
} catch (Exception e) {
|
||||
errors.add("Error checking image file " + imageFile + ": " + e.getMessage());
|
||||
}
|
||||
});
|
||||
} catch (IOException e) {
|
||||
errors.add("Failed to scan story directory " + storyDir + ": " + e.getMessage());
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
errors.add("Error processing story directory " + storyDir + ": " + e.getMessage());
|
||||
}
|
||||
});
|
||||
|
||||
// Calculate total size and count empty directories
|
||||
for (String orphanedImage : orphanedImages) {
|
||||
try {
|
||||
Path imagePath = Paths.get(orphanedImage);
|
||||
if (Files.exists(imagePath)) {
|
||||
totalSizeBytes += Files.size(imagePath);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
errors.add("Failed to get size for " + orphanedImage + ": " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// Count empty directories that would be removed
|
||||
try {
|
||||
foldersToDelete = (int) Files.walk(contentImagesDir)
|
||||
.filter(Files::isDirectory)
|
||||
.filter(path -> !path.equals(contentImagesDir))
|
||||
.filter(this::isDirectoryEmptyOrWillBeEmpty)
|
||||
.count();
|
||||
} catch (IOException e) {
|
||||
errors.add("Failed to count empty directories: " + e.getMessage());
|
||||
}
|
||||
|
||||
// Step 3: Delete orphaned files if not dry run
|
||||
if (!dryRun && !orphanedImages.isEmpty()) {
|
||||
logger.info("Deleting {} orphaned images...", orphanedImages.size());
|
||||
|
||||
Set<Path> directoriesToCheck = new HashSet<>();
|
||||
|
||||
for (String orphanedImage : orphanedImages) {
|
||||
try {
|
||||
Path imagePath = Paths.get(orphanedImage);
|
||||
if (Files.exists(imagePath)) {
|
||||
directoriesToCheck.add(imagePath.getParent());
|
||||
Files.delete(imagePath);
|
||||
logger.debug("Deleted orphaned image: {}", imagePath);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
errors.add("Failed to delete " + orphanedImage + ": " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
// Clean up empty directories
|
||||
for (Path dir : directoriesToCheck) {
|
||||
try {
|
||||
if (Files.exists(dir) && isDirEmpty(dir)) {
|
||||
Files.delete(dir);
|
||||
logger.info("Deleted empty story directory: {}", dir);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
errors.add("Failed to delete empty directory " + dir + ": " + e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
logger.info("Orphaned content image cleanup completed. Found {} orphaned files ({} bytes)",
|
||||
orphanedImages.size(), totalSizeBytes);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error during orphaned content image cleanup", e);
|
||||
errors.add("General cleanup error: " + e.getMessage());
|
||||
}
|
||||
|
||||
return new ContentImageCleanupResult(orphanedImages, totalSizeBytes, foldersToDelete, referencedImages.size(), errors, dryRun);
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect all image references from all story content
|
||||
*/
|
||||
private Set<String> collectAllImageReferences() {
|
||||
Set<String> referencedImages = new HashSet<>();
|
||||
|
||||
try {
|
||||
// Get all stories
|
||||
List<com.storycove.entity.Story> allStories = storyService.findAllWithAssociations();
|
||||
|
||||
// Pattern to match local image URLs in content
|
||||
Pattern imagePattern = Pattern.compile("src=[\"']([^\"']*(?:content/[^\"']*\\.(jpg|jpeg|png)))[\"']", Pattern.CASE_INSENSITIVE);
|
||||
|
||||
for (com.storycove.entity.Story story : allStories) {
|
||||
if (story.getContentHtml() != null) {
|
||||
Matcher matcher = imagePattern.matcher(story.getContentHtml());
|
||||
|
||||
while (matcher.find()) {
|
||||
String imageSrc = matcher.group(1);
|
||||
|
||||
// Convert to relative path format that matches our file system
|
||||
String relativePath = convertSrcToRelativePath(imageSrc);
|
||||
if (relativePath != null) {
|
||||
referencedImages.add(relativePath);
|
||||
logger.debug("Found image reference in story {}: {}", story.getId(), relativePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Error collecting image references from stories", e);
|
||||
}
|
||||
|
||||
return referencedImages;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an image src attribute to relative file path
|
||||
*/
|
||||
private String convertSrcToRelativePath(String src) {
|
||||
try {
|
||||
// Handle both /api/files/images/libraryId/content/... and relative content/... paths
|
||||
if (src.contains("/content/")) {
|
||||
int contentIndex = src.indexOf("/content/");
|
||||
return src.substring(contentIndex + 1); // Remove leading slash, keep "content/..."
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.debug("Failed to convert src to relative path: {}", src);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get relative image path from absolute file path
|
||||
*/
|
||||
private String getRelativeImagePath(Path imageFile) {
|
||||
try {
|
||||
Path uploadDir = Paths.get(getUploadDir());
|
||||
Path relativePath = uploadDir.relativize(imageFile);
|
||||
return relativePath.toString().replace('\\', '/'); // Normalize path separators
|
||||
} catch (Exception e) {
|
||||
logger.debug("Failed to get relative path for: {}", imageFile);
|
||||
return imageFile.toString();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if directory is empty or will be empty after cleanup
|
||||
*/
|
||||
private boolean isDirectoryEmptyOrWillBeEmpty(Path dir) {
|
||||
try {
|
||||
return Files.walk(dir)
|
||||
.filter(Files::isRegularFile)
|
||||
.count() == 0;
|
||||
} catch (IOException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if directory is empty
|
||||
*/
|
||||
private boolean isDirEmpty(Path dir) {
|
||||
try {
|
||||
return Files.list(dir).count() == 0;
|
||||
} catch (IOException e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clean up content images for a story
|
||||
*/
|
||||
public void deleteContentImages(UUID storyId) {
|
||||
try {
|
||||
Path contentDir = Paths.get(getUploadDir(), ImageType.CONTENT.getDirectory(), storyId.toString());
|
||||
if (Files.exists(contentDir)) {
|
||||
Files.walk(contentDir)
|
||||
.sorted(Comparator.reverseOrder())
|
||||
.map(Path::toFile)
|
||||
.forEach(java.io.File::delete);
|
||||
}
|
||||
} catch (IOException e) {
|
||||
// Log but don't throw - this is cleanup
|
||||
System.err.println("Failed to clean up content images for story " + storyId + ": " + e.getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Result class for content image processing
|
||||
*/
|
||||
public static class ContentImageProcessingResult {
|
||||
private final String processedContent;
|
||||
private final List<String> warnings;
|
||||
private final List<String> downloadedImages;
|
||||
|
||||
public ContentImageProcessingResult(String processedContent, List<String> warnings, List<String> downloadedImages) {
|
||||
this.processedContent = processedContent;
|
||||
this.warnings = warnings;
|
||||
this.downloadedImages = downloadedImages;
|
||||
}
|
||||
|
||||
public String getProcessedContent() { return processedContent; }
|
||||
public List<String> getWarnings() { return warnings; }
|
||||
public List<String> getDownloadedImages() { return downloadedImages; }
|
||||
public boolean hasWarnings() { return !warnings.isEmpty(); }
|
||||
}
|
||||
|
||||
/**
|
||||
* Result class for orphaned image cleanup
|
||||
*/
|
||||
public static class ContentImageCleanupResult {
|
||||
private final List<String> orphanedImages;
|
||||
private final long totalSizeBytes;
|
||||
private final int foldersToDelete;
|
||||
private final int totalReferencedImages;
|
||||
private final List<String> errors;
|
||||
private final boolean dryRun;
|
||||
|
||||
public ContentImageCleanupResult(List<String> orphanedImages, long totalSizeBytes, int foldersToDelete,
|
||||
int totalReferencedImages, List<String> errors, boolean dryRun) {
|
||||
this.orphanedImages = orphanedImages;
|
||||
this.totalSizeBytes = totalSizeBytes;
|
||||
this.foldersToDelete = foldersToDelete;
|
||||
this.totalReferencedImages = totalReferencedImages;
|
||||
this.errors = errors;
|
||||
this.dryRun = dryRun;
|
||||
}
|
||||
|
||||
public List<String> getOrphanedImages() { return orphanedImages; }
|
||||
public long getTotalSizeBytes() { return totalSizeBytes; }
|
||||
public int getFoldersToDelete() { return foldersToDelete; }
|
||||
public int getTotalReferencedImages() { return totalReferencedImages; }
|
||||
public List<String> getErrors() { return errors; }
|
||||
public boolean isDryRun() { return dryRun; }
|
||||
public boolean hasErrors() { return !errors.isEmpty(); }
|
||||
|
||||
public String getFormattedSize() {
|
||||
if (totalSizeBytes < 1024) return totalSizeBytes + " B";
|
||||
if (totalSizeBytes < 1024 * 1024) return String.format("%.1f KB", totalSizeBytes / 1024.0);
|
||||
if (totalSizeBytes < 1024 * 1024 * 1024) return String.format("%.1f MB", totalSizeBytes / (1024.0 * 1024.0));
|
||||
return String.format("%.1f GB", totalSizeBytes / (1024.0 * 1024.0 * 1024.0));
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,73 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import javax.sql.DataSource;
|
||||
import java.sql.Connection;
|
||||
import java.sql.SQLException;
|
||||
|
||||
/**
|
||||
* Base service class that provides library-aware database access.
|
||||
*
|
||||
* This approach is safer than routing at the datasource level because:
|
||||
* 1. It doesn't interfere with Spring's initialization process
|
||||
* 2. It allows fine-grained control over which operations are library-aware
|
||||
* 3. It provides clear separation between authentication (uses default DB) and library operations
|
||||
*/
|
||||
@Component
|
||||
public class LibraryAwareService {
|
||||
|
||||
@Autowired
|
||||
private LibraryService libraryService;
|
||||
|
||||
@Autowired
|
||||
@Qualifier("dataSource")
|
||||
private DataSource defaultDataSource;
|
||||
|
||||
/**
|
||||
* Get a database connection for the current active library.
|
||||
* Falls back to default datasource if no library is active.
|
||||
*/
|
||||
public Connection getCurrentLibraryConnection() throws SQLException {
|
||||
try {
|
||||
// Try to get library-specific connection
|
||||
DataSource libraryDataSource = libraryService.getCurrentDataSource();
|
||||
return libraryDataSource.getConnection();
|
||||
} catch (IllegalStateException e) {
|
||||
// No active library - use default datasource
|
||||
return defaultDataSource.getConnection();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a database connection for the default/fallback database.
|
||||
* Use this for authentication and system-level operations.
|
||||
*/
|
||||
public Connection getDefaultConnection() throws SQLException {
|
||||
return defaultDataSource.getConnection();
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if a library is currently active
|
||||
*/
|
||||
public boolean hasActiveLibrary() {
|
||||
try {
|
||||
return libraryService.getCurrentLibraryId() != null;
|
||||
} catch (Exception e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current active library ID, or null if none
|
||||
*/
|
||||
public String getCurrentLibraryId() {
|
||||
try {
|
||||
return libraryService.getCurrentLibraryId();
|
||||
} catch (Exception e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
830
backend/src/main/java/com/storycove/service/LibraryService.java
Normal file
830
backend/src/main/java/com/storycove/service/LibraryService.java
Normal file
@@ -0,0 +1,830 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.entity.Library;
|
||||
import com.storycove.dto.LibraryDto;
|
||||
import com.fasterxml.jackson.core.type.TypeReference;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import com.zaxxer.hikari.HikariConfig;
|
||||
import com.zaxxer.hikari.HikariDataSource;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.ApplicationContext;
|
||||
import org.springframework.context.ApplicationContextAware;
|
||||
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import jakarta.annotation.PreDestroy;
|
||||
import javax.sql.DataSource;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.sql.SQLException;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
@Service
|
||||
public class LibraryService implements ApplicationContextAware {
|
||||
private static final Logger logger = LoggerFactory.getLogger(LibraryService.class);
|
||||
|
||||
@Value("${spring.datasource.url}")
|
||||
private String baseDbUrl;
|
||||
|
||||
@Value("${spring.datasource.username}")
|
||||
private String dbUsername;
|
||||
|
||||
@Value("${spring.datasource.password}")
|
||||
private String dbPassword;
|
||||
|
||||
|
||||
private final ObjectMapper objectMapper = new ObjectMapper();
|
||||
private final BCryptPasswordEncoder passwordEncoder = new BCryptPasswordEncoder();
|
||||
private final Map<String, Library> libraries = new ConcurrentHashMap<>();
|
||||
|
||||
// Spring ApplicationContext for accessing other services without circular dependencies
|
||||
private ApplicationContext applicationContext;
|
||||
|
||||
// Current active resources
|
||||
private volatile String currentLibraryId;
|
||||
|
||||
// Security: Track if user has explicitly authenticated in this session
|
||||
private volatile boolean explicitlyAuthenticated = false;
|
||||
|
||||
private static final String LIBRARIES_CONFIG_PATH = "/app/config/libraries.json";
|
||||
private static final Path libraryConfigDir = Paths.get("/app/config");
|
||||
|
||||
@Override
|
||||
public void setApplicationContext(ApplicationContext applicationContext) {
|
||||
this.applicationContext = applicationContext;
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
public void initialize() {
|
||||
loadLibrariesFromFile();
|
||||
|
||||
// If no libraries exist, create a default one
|
||||
if (libraries.isEmpty()) {
|
||||
createDefaultLibrary();
|
||||
}
|
||||
|
||||
// Security: Do NOT automatically switch to any library on startup
|
||||
// Users must authenticate before accessing any library
|
||||
explicitlyAuthenticated = false;
|
||||
currentLibraryId = null;
|
||||
|
||||
if (!libraries.isEmpty()) {
|
||||
logger.info("Loaded {} libraries. Authentication required to access any library.", libraries.size());
|
||||
} else {
|
||||
logger.info("No libraries found. A default library will be created on first authentication.");
|
||||
}
|
||||
|
||||
logger.info("Security: Application startup completed. All users must re-authenticate.");
|
||||
}
|
||||
|
||||
@PreDestroy
|
||||
public void cleanup() {
|
||||
currentLibraryId = null;
|
||||
explicitlyAuthenticated = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear authentication state (for logout)
|
||||
*/
|
||||
public void clearAuthentication() {
|
||||
explicitlyAuthenticated = false;
|
||||
currentLibraryId = null;
|
||||
logger.info("Authentication cleared - user must re-authenticate to access libraries");
|
||||
}
|
||||
|
||||
|
||||
public String authenticateAndGetLibrary(String password) {
|
||||
for (Library library : libraries.values()) {
|
||||
if (passwordEncoder.matches(password, library.getPasswordHash())) {
|
||||
// Mark as explicitly authenticated for this session
|
||||
explicitlyAuthenticated = true;
|
||||
logger.info("User explicitly authenticated for library: {}", library.getId());
|
||||
return library.getId();
|
||||
}
|
||||
}
|
||||
return null; // Authentication failed
|
||||
}
|
||||
|
||||
/**
|
||||
* Switch to library after authentication with forced reindexing
|
||||
* This ensures OpenSearch is always up-to-date after login
|
||||
*/
|
||||
public synchronized void switchToLibraryAfterAuthentication(String libraryId) throws Exception {
|
||||
logger.info("Switching to library after authentication: {} (forcing reindex)", libraryId);
|
||||
switchToLibrary(libraryId, true);
|
||||
}
|
||||
|
||||
public synchronized void switchToLibrary(String libraryId) throws Exception {
|
||||
switchToLibrary(libraryId, false);
|
||||
}
|
||||
|
||||
public synchronized void switchToLibrary(String libraryId, boolean forceReindex) throws Exception {
|
||||
// Security: Only allow library switching after explicit authentication
|
||||
if (!explicitlyAuthenticated) {
|
||||
throw new IllegalStateException("Library switching requires explicit authentication. Please log in first.");
|
||||
}
|
||||
|
||||
if (libraryId.equals(currentLibraryId) && !forceReindex) {
|
||||
return; // Already active and no forced reindex requested
|
||||
}
|
||||
|
||||
Library library = libraries.get(libraryId);
|
||||
if (library == null) {
|
||||
throw new IllegalArgumentException("Library not found: " + libraryId);
|
||||
}
|
||||
|
||||
String previousLibraryId = currentLibraryId;
|
||||
|
||||
if (libraryId.equals(currentLibraryId) && forceReindex) {
|
||||
logger.info("Forcing reindex for current library: {} ({})", library.getName(), libraryId);
|
||||
} else {
|
||||
logger.info("Switching to library: {} ({})", library.getName(), libraryId);
|
||||
}
|
||||
|
||||
// Close current resources
|
||||
closeCurrentResources();
|
||||
|
||||
// Set new active library (datasource routing handled by SmartRoutingDataSource)
|
||||
currentLibraryId = libraryId;
|
||||
// OpenSearch indexes are global - no per-library initialization needed
|
||||
logger.info("Library switched to OpenSearch mode for library: {}", libraryId);
|
||||
|
||||
logger.info("Successfully switched to library: {}", library.getName());
|
||||
|
||||
// Perform complete reindex AFTER library switch is fully complete
|
||||
// This ensures database routing is properly established
|
||||
if (forceReindex || !libraryId.equals(previousLibraryId)) {
|
||||
logger.info("Starting post-switch OpenSearch reindex for library: {}", libraryId);
|
||||
|
||||
// Run reindex asynchronously to avoid blocking authentication response
|
||||
// and allow time for database routing to fully stabilize
|
||||
String finalLibraryId = libraryId;
|
||||
new Thread(() -> {
|
||||
try {
|
||||
// Give routing time to stabilize
|
||||
Thread.sleep(500);
|
||||
logger.info("Starting async OpenSearch reindex for library: {}", finalLibraryId);
|
||||
|
||||
SearchServiceAdapter searchService = applicationContext.getBean(SearchServiceAdapter.class);
|
||||
// Get all stories and authors for reindexing
|
||||
StoryService storyService = applicationContext.getBean(StoryService.class);
|
||||
AuthorService authorService = applicationContext.getBean(AuthorService.class);
|
||||
|
||||
var allStories = storyService.findAllWithAssociations();
|
||||
var allAuthors = authorService.findAllWithStories();
|
||||
|
||||
searchService.bulkIndexStories(allStories);
|
||||
searchService.bulkIndexAuthors(allAuthors);
|
||||
|
||||
logger.info("Completed async OpenSearch reindexing for library: {} ({} stories, {} authors)",
|
||||
finalLibraryId, allStories.size(), allAuthors.size());
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to async reindex OpenSearch for library {}: {}", finalLibraryId, e.getMessage());
|
||||
}
|
||||
}, "OpenSearchReindex-" + libraryId).start();
|
||||
}
|
||||
}
|
||||
|
||||
public DataSource getCurrentDataSource() {
|
||||
if (currentLibraryId == null) {
|
||||
throw new IllegalStateException("No active library - please authenticate first");
|
||||
}
|
||||
// Return the Spring-managed primary datasource which handles routing automatically
|
||||
try {
|
||||
return applicationContext.getBean("dataSource", DataSource.class);
|
||||
} catch (Exception e) {
|
||||
throw new IllegalStateException("Failed to get routing datasource", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public String getCurrentLibraryId() {
|
||||
return currentLibraryId;
|
||||
}
|
||||
|
||||
public Library getCurrentLibrary() {
|
||||
if (currentLibraryId == null) {
|
||||
return null;
|
||||
}
|
||||
return libraries.get(currentLibraryId);
|
||||
}
|
||||
|
||||
public List<LibraryDto> getAllLibraries() {
|
||||
List<LibraryDto> result = new ArrayList<>();
|
||||
for (Library library : libraries.values()) {
|
||||
boolean isActive = library.getId().equals(currentLibraryId);
|
||||
result.add(new LibraryDto(
|
||||
library.getId(),
|
||||
library.getName(),
|
||||
library.getDescription(),
|
||||
isActive,
|
||||
library.isInitialized()
|
||||
));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
public LibraryDto getLibraryById(String libraryId) {
|
||||
Library library = libraries.get(libraryId);
|
||||
if (library != null) {
|
||||
boolean isActive = library.getId().equals(currentLibraryId);
|
||||
return new LibraryDto(
|
||||
library.getId(),
|
||||
library.getName(),
|
||||
library.getDescription(),
|
||||
isActive,
|
||||
library.isInitialized()
|
||||
);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
public String getCurrentImagePath() {
|
||||
Library current = getCurrentLibrary();
|
||||
return current != null ? current.getImagePath() : "/images/default";
|
||||
}
|
||||
|
||||
public String getImagePathForLibrary(String libraryId) {
|
||||
if (libraryId == null) {
|
||||
return "/images/default";
|
||||
}
|
||||
|
||||
Library library = libraries.get(libraryId);
|
||||
return library != null ? library.getImagePath() : "/images/default";
|
||||
}
|
||||
|
||||
public boolean changeLibraryPassword(String libraryId, String currentPassword, String newPassword) {
|
||||
Library library = libraries.get(libraryId);
|
||||
if (library == null) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Verify current password
|
||||
if (!passwordEncoder.matches(currentPassword, library.getPasswordHash())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Update password
|
||||
library.setPasswordHash(passwordEncoder.encode(newPassword));
|
||||
saveLibrariesToFile();
|
||||
|
||||
logger.info("Password changed for library: {}", library.getName());
|
||||
return true;
|
||||
}
|
||||
|
||||
public Library createNewLibrary(String name, String description, String password) {
|
||||
// Generate unique ID
|
||||
String id = name.toLowerCase().replaceAll("[^a-z0-9]", "");
|
||||
int counter = 1;
|
||||
String originalId = id;
|
||||
while (libraries.containsKey(id)) {
|
||||
id = originalId + counter++;
|
||||
}
|
||||
|
||||
Library newLibrary = new Library(
|
||||
id,
|
||||
name,
|
||||
description,
|
||||
passwordEncoder.encode(password),
|
||||
"storycove_" + id
|
||||
);
|
||||
|
||||
try {
|
||||
// Test database creation by creating a connection
|
||||
DataSource testDs = createDataSource(newLibrary.getDbName());
|
||||
testDs.getConnection().close(); // This will create the database and schema if it doesn't exist
|
||||
|
||||
// Initialize library resources (image directories)
|
||||
initializeNewLibraryResources(id);
|
||||
|
||||
newLibrary.setInitialized(true);
|
||||
logger.info("Database and resources created for library: {}", newLibrary.getDbName());
|
||||
} catch (Exception e) {
|
||||
logger.warn("Database/resource creation failed for library {}: {}", id, e.getMessage());
|
||||
// Continue anyway - resources will be created when needed
|
||||
}
|
||||
|
||||
libraries.put(id, newLibrary);
|
||||
saveLibrariesToFile();
|
||||
|
||||
logger.info("Created new library: {} ({})", name, id);
|
||||
return newLibrary;
|
||||
}
|
||||
|
||||
private void loadLibrariesFromFile() {
|
||||
try {
|
||||
File configFile = new File(LIBRARIES_CONFIG_PATH);
|
||||
if (configFile.exists()) {
|
||||
String content = Files.readString(Paths.get(LIBRARIES_CONFIG_PATH));
|
||||
Map<String, Object> config = objectMapper.readValue(content, new TypeReference<Map<String, Object>>() {});
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
Map<String, Map<String, Object>> librariesData = (Map<String, Map<String, Object>>) config.get("libraries");
|
||||
|
||||
for (Map.Entry<String, Map<String, Object>> entry : librariesData.entrySet()) {
|
||||
String id = entry.getKey();
|
||||
Map<String, Object> data = entry.getValue();
|
||||
|
||||
Library library = new Library();
|
||||
library.setId(id);
|
||||
library.setName((String) data.get("name"));
|
||||
library.setDescription((String) data.get("description"));
|
||||
library.setPasswordHash((String) data.get("passwordHash"));
|
||||
library.setDbName((String) data.get("dbName"));
|
||||
library.setInitialized((Boolean) data.getOrDefault("initialized", false));
|
||||
|
||||
libraries.put(id, library);
|
||||
logger.info("Loaded library: {} ({})", library.getName(), id);
|
||||
}
|
||||
} else {
|
||||
logger.info("No libraries configuration file found, will create default");
|
||||
}
|
||||
} catch (IOException e) {
|
||||
logger.error("Failed to load libraries configuration", e);
|
||||
}
|
||||
}
|
||||
|
||||
private void createDefaultLibrary() {
|
||||
// Check if we're migrating from the old single-library system
|
||||
String existingDbName = extractDatabaseName(baseDbUrl);
|
||||
|
||||
Library defaultLibrary = new Library(
|
||||
"main",
|
||||
"Main Library",
|
||||
"Your existing story collection (migrated)",
|
||||
passwordEncoder.encode("temp-password-change-me"), // Temporary password
|
||||
existingDbName // Use existing database name
|
||||
);
|
||||
defaultLibrary.setInitialized(true); // Mark as initialized since it has existing data
|
||||
|
||||
libraries.put("main", defaultLibrary);
|
||||
saveLibrariesToFile();
|
||||
|
||||
logger.warn("=".repeat(80));
|
||||
logger.warn("MIGRATION: Created 'Main Library' for your existing data");
|
||||
logger.warn("Temporary password: 'temp-password-change-me'");
|
||||
logger.warn("IMPORTANT: Please set a proper password in Settings > Library Settings");
|
||||
logger.warn("=".repeat(80));
|
||||
}
|
||||
|
||||
private String extractDatabaseName(String jdbcUrl) {
|
||||
// Extract database name from JDBC URL like "jdbc:postgresql://db:5432/storycove"
|
||||
int lastSlash = jdbcUrl.lastIndexOf('/');
|
||||
if (lastSlash != -1 && lastSlash < jdbcUrl.length() - 1) {
|
||||
String dbPart = jdbcUrl.substring(lastSlash + 1);
|
||||
// Remove any query parameters
|
||||
int queryStart = dbPart.indexOf('?');
|
||||
return queryStart != -1 ? dbPart.substring(0, queryStart) : dbPart;
|
||||
}
|
||||
return "storycove"; // fallback
|
||||
}
|
||||
|
||||
private void saveLibrariesToFile() {
|
||||
try {
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
Map<String, Map<String, Object>> librariesData = new HashMap<>();
|
||||
|
||||
for (Library library : libraries.values()) {
|
||||
Map<String, Object> data = new HashMap<>();
|
||||
data.put("name", library.getName());
|
||||
data.put("description", library.getDescription());
|
||||
data.put("passwordHash", library.getPasswordHash());
|
||||
data.put("dbName", library.getDbName());
|
||||
data.put("initialized", library.isInitialized());
|
||||
|
||||
librariesData.put(library.getId(), data);
|
||||
}
|
||||
|
||||
config.put("libraries", librariesData);
|
||||
|
||||
// Ensure config directory exists
|
||||
new File("/app/config").mkdirs();
|
||||
|
||||
String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(config);
|
||||
Files.writeString(Paths.get(LIBRARIES_CONFIG_PATH), json);
|
||||
|
||||
logger.info("Saved libraries configuration");
|
||||
} catch (IOException e) {
|
||||
logger.error("Failed to save libraries configuration", e);
|
||||
}
|
||||
}
|
||||
|
||||
private DataSource createDataSource(String dbName) {
|
||||
String url = baseDbUrl.replaceAll("/[^/]*$", "/" + dbName);
|
||||
logger.info("Creating DataSource for: {}", url);
|
||||
|
||||
// First, ensure the database exists
|
||||
ensureDatabaseExists(dbName);
|
||||
|
||||
HikariConfig config = new HikariConfig();
|
||||
config.setJdbcUrl(url);
|
||||
config.setUsername(dbUsername);
|
||||
config.setPassword(dbPassword);
|
||||
config.setDriverClassName("org.postgresql.Driver");
|
||||
config.setMaximumPoolSize(10);
|
||||
config.setConnectionTimeout(30000);
|
||||
|
||||
return new HikariDataSource(config);
|
||||
}
|
||||
|
||||
private void ensureDatabaseExists(String dbName) {
|
||||
// Connect to the 'postgres' database to create the new database
|
||||
String adminUrl = baseDbUrl.replaceAll("/[^/]*$", "/postgres");
|
||||
|
||||
HikariConfig adminConfig = new HikariConfig();
|
||||
adminConfig.setJdbcUrl(adminUrl);
|
||||
adminConfig.setUsername(dbUsername);
|
||||
adminConfig.setPassword(dbPassword);
|
||||
adminConfig.setDriverClassName("org.postgresql.Driver");
|
||||
adminConfig.setMaximumPoolSize(1);
|
||||
adminConfig.setConnectionTimeout(30000);
|
||||
|
||||
boolean databaseCreated = false;
|
||||
|
||||
try (HikariDataSource adminDataSource = new HikariDataSource(adminConfig);
|
||||
var connection = adminDataSource.getConnection();
|
||||
var statement = connection.createStatement()) {
|
||||
|
||||
// Check if database exists
|
||||
String checkQuery = "SELECT 1 FROM pg_database WHERE datname = ?";
|
||||
try (var preparedStatement = connection.prepareStatement(checkQuery)) {
|
||||
preparedStatement.setString(1, dbName);
|
||||
try (var resultSet = preparedStatement.executeQuery()) {
|
||||
if (resultSet.next()) {
|
||||
logger.info("Database {} already exists", dbName);
|
||||
return; // Database exists, nothing to do
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Create database if it doesn't exist
|
||||
// Note: Database names cannot be parameterized, but we validate the name is safe
|
||||
if (!dbName.matches("^[a-zA-Z][a-zA-Z0-9_]*$")) {
|
||||
throw new IllegalArgumentException("Invalid database name: " + dbName);
|
||||
}
|
||||
|
||||
String createQuery = "CREATE DATABASE " + dbName;
|
||||
statement.executeUpdate(createQuery);
|
||||
logger.info("Created database: {}", dbName);
|
||||
databaseCreated = true;
|
||||
|
||||
} catch (SQLException e) {
|
||||
logger.error("Failed to ensure database {} exists: {}", dbName, e.getMessage());
|
||||
throw new RuntimeException("Database creation failed", e);
|
||||
}
|
||||
|
||||
// If we just created the database, initialize its schema
|
||||
if (databaseCreated) {
|
||||
initializeNewDatabaseSchema(dbName);
|
||||
}
|
||||
}
|
||||
|
||||
private void initializeNewDatabaseSchema(String dbName) {
|
||||
logger.info("Initializing schema for new database: {}", dbName);
|
||||
|
||||
// Create a temporary DataSource for the new database to initialize schema
|
||||
String newDbUrl = baseDbUrl.replaceAll("/[^/]*$", "/" + dbName);
|
||||
|
||||
HikariConfig config = new HikariConfig();
|
||||
config.setJdbcUrl(newDbUrl);
|
||||
config.setUsername(dbUsername);
|
||||
config.setPassword(dbPassword);
|
||||
config.setDriverClassName("org.postgresql.Driver");
|
||||
config.setMaximumPoolSize(1);
|
||||
config.setConnectionTimeout(30000);
|
||||
|
||||
try (HikariDataSource tempDataSource = new HikariDataSource(config)) {
|
||||
// Use Hibernate to create the schema
|
||||
// This mimics what Spring Boot does during startup
|
||||
createSchemaUsingHibernate(tempDataSource);
|
||||
logger.info("Schema initialized for database: {}", dbName);
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to initialize schema for database {}: {}", dbName, e.getMessage());
|
||||
throw new RuntimeException("Schema initialization failed", e);
|
||||
}
|
||||
}
|
||||
|
||||
public void initializeNewLibraryResources(String libraryId) {
|
||||
Library library = libraries.get(libraryId);
|
||||
if (library == null) {
|
||||
throw new IllegalArgumentException("Library not found: " + libraryId);
|
||||
}
|
||||
|
||||
try {
|
||||
logger.info("Initializing resources for new library: {}", library.getName());
|
||||
|
||||
// 1. Create image directory structure
|
||||
initializeImageDirectories(library);
|
||||
|
||||
// 2. OpenSearch indexes are global and managed automatically
|
||||
// No per-library initialization needed for OpenSearch
|
||||
|
||||
logger.info("Successfully initialized resources for library: {}", library.getName());
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to initialize resources for library {}: {}", libraryId, e.getMessage());
|
||||
throw new RuntimeException("Library resource initialization failed", e);
|
||||
}
|
||||
}
|
||||
|
||||
private void initializeImageDirectories(Library library) {
|
||||
try {
|
||||
// Create the library-specific image directory
|
||||
String imagePath = "/app/images/" + library.getId();
|
||||
java.nio.file.Path libraryImagePath = java.nio.file.Paths.get(imagePath);
|
||||
|
||||
if (!java.nio.file.Files.exists(libraryImagePath)) {
|
||||
java.nio.file.Files.createDirectories(libraryImagePath);
|
||||
logger.info("Created image directory: {}", imagePath);
|
||||
|
||||
// Create subdirectories for different image types
|
||||
java.nio.file.Files.createDirectories(libraryImagePath.resolve("stories"));
|
||||
java.nio.file.Files.createDirectories(libraryImagePath.resolve("authors"));
|
||||
java.nio.file.Files.createDirectories(libraryImagePath.resolve("collections"));
|
||||
|
||||
logger.info("Created image subdirectories for library: {}", library.getId());
|
||||
} else {
|
||||
logger.info("Image directory already exists: {}", imagePath);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to create image directories for library {}: {}", library.getId(), e.getMessage());
|
||||
throw new RuntimeException("Image directory creation failed", e);
|
||||
}
|
||||
}
|
||||
|
||||
private void createSchemaUsingHibernate(DataSource dataSource) {
|
||||
// Create the essential tables manually using the same DDL that Hibernate would generate
|
||||
// This is simpler than setting up a full Hibernate configuration for schema creation
|
||||
|
||||
String[] createTableStatements = {
|
||||
// Authors table
|
||||
"""
|
||||
CREATE TABLE authors (
|
||||
author_rating integer,
|
||||
created_at timestamp(6) not null,
|
||||
updated_at timestamp(6) not null,
|
||||
id uuid not null,
|
||||
avatar_image_path varchar(255),
|
||||
name varchar(255) not null,
|
||||
notes TEXT,
|
||||
primary key (id)
|
||||
)
|
||||
""",
|
||||
|
||||
// Author URLs table
|
||||
"""
|
||||
CREATE TABLE author_urls (
|
||||
author_id uuid not null,
|
||||
url varchar(255)
|
||||
)
|
||||
""",
|
||||
|
||||
// Series table
|
||||
"""
|
||||
CREATE TABLE series (
|
||||
created_at timestamp(6) not null,
|
||||
id uuid not null,
|
||||
description varchar(1000),
|
||||
name varchar(255) not null,
|
||||
primary key (id)
|
||||
)
|
||||
""",
|
||||
|
||||
// Tags table
|
||||
"""
|
||||
CREATE TABLE tags (
|
||||
color varchar(7),
|
||||
created_at timestamp(6) not null,
|
||||
id uuid not null,
|
||||
description varchar(500),
|
||||
name varchar(255) not null unique,
|
||||
primary key (id)
|
||||
)
|
||||
""",
|
||||
|
||||
// Tag aliases table
|
||||
"""
|
||||
CREATE TABLE tag_aliases (
|
||||
created_from_merge boolean not null,
|
||||
created_at timestamp(6) not null,
|
||||
canonical_tag_id uuid not null,
|
||||
id uuid not null,
|
||||
alias_name varchar(255) not null unique,
|
||||
primary key (id)
|
||||
)
|
||||
""",
|
||||
|
||||
// Collections table
|
||||
"""
|
||||
CREATE TABLE collections (
|
||||
is_archived boolean not null,
|
||||
rating integer,
|
||||
created_at timestamp(6) not null,
|
||||
updated_at timestamp(6) not null,
|
||||
id uuid not null,
|
||||
cover_image_path varchar(500),
|
||||
name varchar(500) not null,
|
||||
description TEXT,
|
||||
primary key (id)
|
||||
)
|
||||
""",
|
||||
|
||||
// Stories table
|
||||
"""
|
||||
CREATE TABLE stories (
|
||||
is_read boolean,
|
||||
rating integer,
|
||||
reading_position integer,
|
||||
volume integer,
|
||||
word_count integer,
|
||||
created_at timestamp(6) not null,
|
||||
last_read_at timestamp(6),
|
||||
updated_at timestamp(6) not null,
|
||||
author_id uuid,
|
||||
id uuid not null,
|
||||
series_id uuid,
|
||||
description varchar(1000),
|
||||
content_html TEXT,
|
||||
content_plain TEXT,
|
||||
cover_path varchar(255),
|
||||
source_url varchar(255),
|
||||
summary TEXT,
|
||||
title varchar(255) not null,
|
||||
primary key (id)
|
||||
)
|
||||
""",
|
||||
|
||||
// Reading positions table
|
||||
"""
|
||||
CREATE TABLE reading_positions (
|
||||
chapter_index integer,
|
||||
character_position integer,
|
||||
percentage_complete float(53),
|
||||
word_position integer,
|
||||
created_at timestamp(6) not null,
|
||||
updated_at timestamp(6) not null,
|
||||
id uuid not null,
|
||||
story_id uuid not null,
|
||||
context_after varchar(500),
|
||||
context_before varchar(500),
|
||||
chapter_title varchar(255),
|
||||
epub_cfi TEXT,
|
||||
primary key (id)
|
||||
)
|
||||
""",
|
||||
|
||||
// Junction tables
|
||||
"""
|
||||
CREATE TABLE story_tags (
|
||||
story_id uuid not null,
|
||||
tag_id uuid not null,
|
||||
primary key (story_id, tag_id)
|
||||
)
|
||||
""",
|
||||
|
||||
"""
|
||||
CREATE TABLE collection_stories (
|
||||
position integer not null,
|
||||
added_at timestamp(6) not null,
|
||||
collection_id uuid not null,
|
||||
story_id uuid not null,
|
||||
primary key (collection_id, story_id),
|
||||
unique (collection_id, position)
|
||||
)
|
||||
""",
|
||||
|
||||
"""
|
||||
CREATE TABLE collection_tags (
|
||||
collection_id uuid not null,
|
||||
tag_id uuid not null,
|
||||
primary key (collection_id, tag_id)
|
||||
)
|
||||
"""
|
||||
};
|
||||
|
||||
String[] createIndexStatements = {
|
||||
"CREATE INDEX idx_reading_position_story ON reading_positions (story_id)"
|
||||
};
|
||||
|
||||
String[] createConstraintStatements = {
|
||||
// Foreign key constraints
|
||||
"ALTER TABLE author_urls ADD CONSTRAINT FKdqhp51m0uveybsts098gd79uo FOREIGN KEY (author_id) REFERENCES authors",
|
||||
"ALTER TABLE stories ADD CONSTRAINT FKhwecpqeaxy40ftrctef1u7gw7 FOREIGN KEY (author_id) REFERENCES authors",
|
||||
"ALTER TABLE stories ADD CONSTRAINT FK1kulyvy7wwcolp2gkndt57cp7 FOREIGN KEY (series_id) REFERENCES series",
|
||||
"ALTER TABLE reading_positions ADD CONSTRAINT FKglfhdhflan3pgyr2u0gxi21i5 FOREIGN KEY (story_id) REFERENCES stories",
|
||||
"ALTER TABLE story_tags ADD CONSTRAINT FKmans33ijt0nf65t0sng2r848j FOREIGN KEY (tag_id) REFERENCES tags",
|
||||
"ALTER TABLE story_tags ADD CONSTRAINT FKq9guid7swnjxwdpgxj3jo1rsi FOREIGN KEY (story_id) REFERENCES stories",
|
||||
"ALTER TABLE tag_aliases ADD CONSTRAINT FKqfsawmcj3ey4yycb6958y24ch FOREIGN KEY (canonical_tag_id) REFERENCES tags",
|
||||
"ALTER TABLE collection_stories ADD CONSTRAINT FKr55ho4vhj0wp03x13iskr1jds FOREIGN KEY (collection_id) REFERENCES collections",
|
||||
"ALTER TABLE collection_stories ADD CONSTRAINT FK7n41tbbrt7r2e81hpu3612r1o FOREIGN KEY (story_id) REFERENCES stories",
|
||||
"ALTER TABLE collection_tags ADD CONSTRAINT FKceq7ggev8n8ibjui1x5yo4x67 FOREIGN KEY (tag_id) REFERENCES tags",
|
||||
"ALTER TABLE collection_tags ADD CONSTRAINT FKq9sa5s8csdpbphrvb48tts8jt FOREIGN KEY (collection_id) REFERENCES collections"
|
||||
};
|
||||
|
||||
try (var connection = dataSource.getConnection();
|
||||
var statement = connection.createStatement()) {
|
||||
|
||||
// Create tables
|
||||
for (String sql : createTableStatements) {
|
||||
statement.executeUpdate(sql);
|
||||
}
|
||||
|
||||
// Create indexes
|
||||
for (String sql : createIndexStatements) {
|
||||
statement.executeUpdate(sql);
|
||||
}
|
||||
|
||||
// Create constraints
|
||||
for (String sql : createConstraintStatements) {
|
||||
statement.executeUpdate(sql);
|
||||
}
|
||||
|
||||
logger.info("Successfully created all database tables and constraints");
|
||||
|
||||
} catch (SQLException e) {
|
||||
logger.error("Failed to create database schema", e);
|
||||
throw new RuntimeException("Schema creation failed", e);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void closeCurrentResources() {
|
||||
// No need to close datasource - SmartRoutingDataSource handles this
|
||||
// OpenSearch service is managed by Spring - no explicit cleanup needed
|
||||
// Don't clear currentLibraryId here - only when explicitly switching
|
||||
}
|
||||
|
||||
/**
|
||||
* Update library metadata (name and description)
|
||||
*/
|
||||
public synchronized void updateLibraryMetadata(String libraryId, String newName, String newDescription) throws Exception {
|
||||
if (libraryId == null || libraryId.trim().isEmpty()) {
|
||||
throw new IllegalArgumentException("Library ID cannot be null or empty");
|
||||
}
|
||||
|
||||
Library library = libraries.get(libraryId);
|
||||
if (library == null) {
|
||||
throw new IllegalArgumentException("Library not found: " + libraryId);
|
||||
}
|
||||
|
||||
// Validate new name
|
||||
if (newName == null || newName.trim().isEmpty()) {
|
||||
throw new IllegalArgumentException("Library name cannot be null or empty");
|
||||
}
|
||||
|
||||
String oldName = library.getName();
|
||||
String oldDescription = library.getDescription();
|
||||
|
||||
// Update the library object
|
||||
library.setName(newName.trim());
|
||||
library.setDescription(newDescription != null ? newDescription.trim() : "");
|
||||
|
||||
try {
|
||||
// Save to configuration file
|
||||
saveLibraryConfiguration(library);
|
||||
|
||||
logger.info("Updated library metadata - ID: {}, Name: '{}' -> '{}', Description: '{}' -> '{}'",
|
||||
libraryId, oldName, newName, oldDescription, library.getDescription());
|
||||
|
||||
} catch (Exception e) {
|
||||
// Rollback changes on failure
|
||||
library.setName(oldName);
|
||||
library.setDescription(oldDescription);
|
||||
throw new RuntimeException("Failed to update library metadata: " + e.getMessage(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Save library configuration to file
|
||||
*/
|
||||
private void saveLibraryConfiguration(Library library) throws Exception {
|
||||
Path libraryConfigPath = libraryConfigDir.resolve(library.getId() + ".json");
|
||||
|
||||
// Create library configuration object
|
||||
Map<String, Object> config = new HashMap<>();
|
||||
config.put("id", library.getId());
|
||||
config.put("name", library.getName());
|
||||
config.put("description", library.getDescription());
|
||||
config.put("passwordHash", library.getPasswordHash());
|
||||
config.put("dbName", library.getDbName());
|
||||
config.put("imagePath", library.getImagePath());
|
||||
config.put("initialized", library.isInitialized());
|
||||
|
||||
// Write to file
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
String configJson = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(config);
|
||||
Files.writeString(libraryConfigPath, configJson, StandardCharsets.UTF_8);
|
||||
|
||||
logger.debug("Saved library configuration to: {}", libraryConfigPath);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,133 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.config.OpenSearchProperties;
|
||||
import org.opensearch.client.opensearch.OpenSearchClient;
|
||||
import org.opensearch.client.opensearch.cluster.HealthRequest;
|
||||
import org.opensearch.client.opensearch.cluster.HealthResponse;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.actuate.health.Health;
|
||||
import org.springframework.boot.actuate.health.HealthIndicator;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
@Service
|
||||
@ConditionalOnProperty(name = "storycove.search.engine", havingValue = "opensearch")
|
||||
public class OpenSearchHealthService implements HealthIndicator {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(OpenSearchHealthService.class);
|
||||
|
||||
private final OpenSearchClient openSearchClient;
|
||||
private final OpenSearchProperties properties;
|
||||
|
||||
private final AtomicReference<Health> lastKnownHealth = new AtomicReference<>(Health.unknown().build());
|
||||
private LocalDateTime lastCheckTime = LocalDateTime.now();
|
||||
|
||||
@Autowired
|
||||
public OpenSearchHealthService(OpenSearchClient openSearchClient, OpenSearchProperties properties) {
|
||||
this.openSearchClient = openSearchClient;
|
||||
this.properties = properties;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Health health() {
|
||||
return lastKnownHealth.get();
|
||||
}
|
||||
|
||||
@Scheduled(fixedDelayString = "#{@openSearchProperties.health.checkInterval}")
|
||||
public void performHealthCheck() {
|
||||
try {
|
||||
HealthResponse clusterHealth = openSearchClient.cluster().health(
|
||||
HealthRequest.of(h -> h.timeout(t -> t.time("10s")))
|
||||
);
|
||||
|
||||
Health.Builder healthBuilder = Health.up()
|
||||
.withDetail("cluster_name", clusterHealth.clusterName())
|
||||
.withDetail("status", clusterHealth.status().jsonValue())
|
||||
.withDetail("number_of_nodes", clusterHealth.numberOfNodes())
|
||||
.withDetail("number_of_data_nodes", clusterHealth.numberOfDataNodes())
|
||||
.withDetail("active_primary_shards", clusterHealth.activePrimaryShards())
|
||||
.withDetail("active_shards", clusterHealth.activeShards())
|
||||
.withDetail("relocating_shards", clusterHealth.relocatingShards())
|
||||
.withDetail("initializing_shards", clusterHealth.initializingShards())
|
||||
.withDetail("unassigned_shards", clusterHealth.unassignedShards())
|
||||
.withDetail("last_check", LocalDateTime.now());
|
||||
|
||||
// Check if cluster status is concerning
|
||||
switch (clusterHealth.status()) {
|
||||
case Red:
|
||||
healthBuilder = Health.down()
|
||||
.withDetail("reason", "Cluster status is RED - some primary shards are unassigned");
|
||||
break;
|
||||
case Yellow:
|
||||
if (isProduction()) {
|
||||
healthBuilder = Health.down()
|
||||
.withDetail("reason", "Cluster status is YELLOW - some replica shards are unassigned (critical in production)");
|
||||
} else {
|
||||
// Yellow is acceptable in development (single node clusters)
|
||||
healthBuilder.withDetail("warning", "Cluster status is YELLOW - acceptable for development");
|
||||
}
|
||||
break;
|
||||
case Green:
|
||||
// All good
|
||||
break;
|
||||
}
|
||||
|
||||
lastKnownHealth.set(healthBuilder.build());
|
||||
lastCheckTime = LocalDateTime.now();
|
||||
|
||||
if (properties.getHealth().isEnableMetrics()) {
|
||||
logMetrics(clusterHealth);
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("OpenSearch health check failed", e);
|
||||
Health unhealthyStatus = Health.down()
|
||||
.withDetail("error", e.getMessage())
|
||||
.withDetail("last_successful_check", lastCheckTime)
|
||||
.withDetail("current_time", LocalDateTime.now())
|
||||
.build();
|
||||
lastKnownHealth.set(unhealthyStatus);
|
||||
}
|
||||
}
|
||||
|
||||
private void logMetrics(HealthResponse clusterHealth) {
|
||||
logger.info("OpenSearch Cluster Metrics - Status: {}, Nodes: {}, Active Shards: {}, Unassigned: {}",
|
||||
clusterHealth.status().jsonValue(),
|
||||
clusterHealth.numberOfNodes(),
|
||||
clusterHealth.activeShards(),
|
||||
clusterHealth.unassignedShards());
|
||||
}
|
||||
|
||||
private boolean isProduction() {
|
||||
return "production".equalsIgnoreCase(properties.getProfile());
|
||||
}
|
||||
|
||||
/**
|
||||
* Manual health check for immediate status
|
||||
*/
|
||||
public boolean isClusterHealthy() {
|
||||
Health currentHealth = lastKnownHealth.get();
|
||||
return currentHealth.getStatus() == org.springframework.boot.actuate.health.Status.UP;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get detailed cluster information
|
||||
*/
|
||||
public String getClusterInfo() {
|
||||
try {
|
||||
var info = openSearchClient.info();
|
||||
return String.format("OpenSearch %s (Cluster: %s, Lucene: %s)",
|
||||
info.version().number(),
|
||||
info.clusterName(),
|
||||
info.version().luceneVersion());
|
||||
} catch (Exception e) {
|
||||
return "Unable to retrieve cluster information: " + e.getMessage();
|
||||
}
|
||||
}
|
||||
}
|
||||
1077
backend/src/main/java/com/storycove/service/OpenSearchService.java
Normal file
1077
backend/src/main/java/com/storycove/service/OpenSearchService.java
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,36 +1,83 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import com.storycove.util.JwtUtil;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.security.crypto.password.PasswordEncoder;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
@Service
|
||||
public class PasswordAuthenticationService {
|
||||
|
||||
@Value("${storycove.auth.password}")
|
||||
private String applicationPassword;
|
||||
private static final Logger logger = LoggerFactory.getLogger(PasswordAuthenticationService.class);
|
||||
|
||||
private final PasswordEncoder passwordEncoder;
|
||||
private final LibraryService libraryService;
|
||||
private final JwtUtil jwtUtil;
|
||||
|
||||
public PasswordAuthenticationService(PasswordEncoder passwordEncoder) {
|
||||
@Autowired
|
||||
public PasswordAuthenticationService(
|
||||
PasswordEncoder passwordEncoder,
|
||||
LibraryService libraryService,
|
||||
JwtUtil jwtUtil) {
|
||||
this.passwordEncoder = passwordEncoder;
|
||||
this.libraryService = libraryService;
|
||||
this.jwtUtil = jwtUtil;
|
||||
}
|
||||
|
||||
public boolean authenticate(String providedPassword) {
|
||||
/**
|
||||
* Authenticate user and switch to the appropriate library
|
||||
* Returns JWT token if authentication successful, null otherwise
|
||||
*/
|
||||
public String authenticateAndSwitchLibrary(String providedPassword) {
|
||||
if (providedPassword == null || providedPassword.trim().isEmpty()) {
|
||||
return false;
|
||||
return null;
|
||||
}
|
||||
|
||||
// If application password starts with {bcrypt}, it's already encoded
|
||||
if (applicationPassword.startsWith("{bcrypt}") || applicationPassword.startsWith("$2")) {
|
||||
return passwordEncoder.matches(providedPassword, applicationPassword);
|
||||
// Find which library this password belongs to
|
||||
String libraryId = libraryService.authenticateAndGetLibrary(providedPassword);
|
||||
if (libraryId == null) {
|
||||
logger.warn("Authentication failed - invalid password");
|
||||
return null;
|
||||
}
|
||||
|
||||
// Otherwise, compare directly (for development/testing)
|
||||
return applicationPassword.equals(providedPassword);
|
||||
try {
|
||||
// Switch to the authenticated library with forced reindexing (may take 2-3 seconds)
|
||||
libraryService.switchToLibraryAfterAuthentication(libraryId);
|
||||
|
||||
// Generate JWT token with library context
|
||||
String token = jwtUtil.generateToken("user", libraryId);
|
||||
|
||||
logger.info("Successfully authenticated and switched to library: {}", libraryId);
|
||||
return token;
|
||||
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to switch to library: {}", libraryId, e);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Legacy method - kept for backward compatibility
|
||||
*/
|
||||
@Deprecated
|
||||
public boolean authenticate(String providedPassword) {
|
||||
return authenticateAndSwitchLibrary(providedPassword) != null;
|
||||
}
|
||||
|
||||
public String encodePassword(String rawPassword) {
|
||||
return passwordEncoder.encode(rawPassword);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current library info for authenticated user
|
||||
*/
|
||||
public String getCurrentLibraryInfo() {
|
||||
var library = libraryService.getCurrentLibrary();
|
||||
if (library != null) {
|
||||
return String.format("Library: %s (%s)", library.getName(), library.getId());
|
||||
}
|
||||
return "No library active";
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
@Service
|
||||
public class ReadingTimeService {
|
||||
|
||||
@Value("${app.reading.speed.default:200}")
|
||||
private int defaultWordsPerMinute;
|
||||
|
||||
/**
|
||||
* Calculate estimated reading time in minutes for the given word count
|
||||
* @param wordCount the number of words to read
|
||||
* @return estimated reading time in minutes (minimum 1 minute)
|
||||
*/
|
||||
public int calculateReadingTime(int wordCount) {
|
||||
return Math.max(1, wordCount / defaultWordsPerMinute);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current words per minute setting
|
||||
* @return words per minute reading speed
|
||||
*/
|
||||
public int getWordsPerMinute() {
|
||||
return defaultWordsPerMinute;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,278 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.dto.AuthorSearchDto;
|
||||
import com.storycove.dto.SearchResultDto;
|
||||
import com.storycove.dto.StorySearchDto;
|
||||
import com.storycove.entity.Author;
|
||||
import com.storycove.entity.Story;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
|
||||
/**
|
||||
* Service adapter that provides a unified interface for search operations.
|
||||
*
|
||||
* This adapter directly delegates to OpenSearchService.
|
||||
*/
|
||||
@Service
|
||||
public class SearchServiceAdapter {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(SearchServiceAdapter.class);
|
||||
|
||||
@Autowired
|
||||
private OpenSearchService openSearchService;
|
||||
|
||||
// ===============================
|
||||
// SEARCH OPERATIONS
|
||||
// ===============================
|
||||
|
||||
/**
|
||||
* Search stories with unified interface
|
||||
*/
|
||||
public SearchResultDto<StorySearchDto> searchStories(String query, List<String> tags, String author,
|
||||
String series, Integer minWordCount, Integer maxWordCount,
|
||||
Float minRating, Boolean isRead, Boolean isFavorite,
|
||||
String sortBy, String sortOrder, int page, int size,
|
||||
List<String> facetBy,
|
||||
// Advanced filters
|
||||
String createdAfter, String createdBefore,
|
||||
String lastReadAfter, String lastReadBefore,
|
||||
Boolean unratedOnly, String readingStatus,
|
||||
Boolean hasReadingProgress, Boolean hasCoverImage,
|
||||
String sourceDomain, String seriesFilter,
|
||||
Integer minTagCount, Boolean popularOnly,
|
||||
Boolean hiddenGemsOnly) {
|
||||
return openSearchService.searchStories(query, tags, author, series, minWordCount, maxWordCount,
|
||||
minRating, isRead, isFavorite, sortBy, sortOrder, page, size, facetBy,
|
||||
createdAfter, createdBefore, lastReadAfter, lastReadBefore, unratedOnly, readingStatus,
|
||||
hasReadingProgress, hasCoverImage, sourceDomain, seriesFilter, minTagCount, popularOnly,
|
||||
hiddenGemsOnly);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get random stories with unified interface
|
||||
*/
|
||||
public List<StorySearchDto> getRandomStories(int count, List<String> tags, String author,
|
||||
String series, Integer minWordCount, Integer maxWordCount,
|
||||
Float minRating, Boolean isRead, Boolean isFavorite,
|
||||
Long seed) {
|
||||
return openSearchService.getRandomStories(count, tags, author, series, minWordCount, maxWordCount,
|
||||
minRating, isRead, isFavorite, seed);
|
||||
}
|
||||
|
||||
/**
|
||||
* Recreate search indices
|
||||
*/
|
||||
public void recreateIndices() {
|
||||
try {
|
||||
openSearchService.recreateIndices();
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to recreate search indices", e);
|
||||
throw new RuntimeException("Failed to recreate search indices", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform complete reindex of all data
|
||||
*/
|
||||
public void performCompleteReindex() {
|
||||
try {
|
||||
recreateIndices();
|
||||
logger.info("Search indices recreated successfully");
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to perform complete reindex", e);
|
||||
throw new RuntimeException("Failed to perform complete reindex", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get random story ID with unified interface
|
||||
*/
|
||||
public String getRandomStoryId(Long seed) {
|
||||
return openSearchService.getRandomStoryId(seed);
|
||||
}
|
||||
|
||||
/**
|
||||
* Search authors with unified interface
|
||||
*/
|
||||
public List<AuthorSearchDto> searchAuthors(String query, int limit) {
|
||||
return openSearchService.searchAuthors(query, limit);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get tag suggestions with unified interface
|
||||
*/
|
||||
public List<String> getTagSuggestions(String query, int limit) {
|
||||
return openSearchService.getTagSuggestions(query, limit);
|
||||
}
|
||||
|
||||
// ===============================
|
||||
// INDEX OPERATIONS
|
||||
// ===============================
|
||||
|
||||
/**
|
||||
* Index a story in OpenSearch
|
||||
*/
|
||||
public void indexStory(Story story) {
|
||||
try {
|
||||
openSearchService.indexStory(story);
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to index story {}", story.getId(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update a story in OpenSearch
|
||||
*/
|
||||
public void updateStory(Story story) {
|
||||
try {
|
||||
openSearchService.updateStory(story);
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to update story {}", story.getId(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete a story from OpenSearch
|
||||
*/
|
||||
public void deleteStory(UUID storyId) {
|
||||
try {
|
||||
openSearchService.deleteStory(storyId);
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to delete story {}", storyId, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Index an author in OpenSearch
|
||||
*/
|
||||
public void indexAuthor(Author author) {
|
||||
try {
|
||||
openSearchService.indexAuthor(author);
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to index author {}", author.getId(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update an author in OpenSearch
|
||||
*/
|
||||
public void updateAuthor(Author author) {
|
||||
try {
|
||||
openSearchService.updateAuthor(author);
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to update author {}", author.getId(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete an author from OpenSearch
|
||||
*/
|
||||
public void deleteAuthor(UUID authorId) {
|
||||
try {
|
||||
openSearchService.deleteAuthor(authorId);
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to delete author {}", authorId, e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk index stories in OpenSearch
|
||||
*/
|
||||
public void bulkIndexStories(List<Story> stories) {
|
||||
try {
|
||||
openSearchService.bulkIndexStories(stories);
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to bulk index {} stories", stories.size(), e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Bulk index authors in OpenSearch
|
||||
*/
|
||||
public void bulkIndexAuthors(List<Author> authors) {
|
||||
try {
|
||||
openSearchService.bulkIndexAuthors(authors);
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to bulk index {} authors", authors.size(), e);
|
||||
}
|
||||
}
|
||||
|
||||
// ===============================
|
||||
// UTILITY METHODS
|
||||
// ===============================
|
||||
|
||||
/**
|
||||
* Check if search service is available and healthy
|
||||
*/
|
||||
public boolean isSearchServiceAvailable() {
|
||||
return openSearchService.testConnection();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current search engine name
|
||||
*/
|
||||
public String getCurrentSearchEngine() {
|
||||
return "opensearch";
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if dual-write is enabled
|
||||
*/
|
||||
public boolean isDualWriteEnabled() {
|
||||
return false; // No longer supported
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if we can switch to OpenSearch
|
||||
*/
|
||||
public boolean canSwitchToOpenSearch() {
|
||||
return true; // Already using OpenSearch
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if we can switch to Typesense
|
||||
*/
|
||||
public boolean canSwitchToTypesense() {
|
||||
return false; // Typesense no longer available
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current search status for admin interface
|
||||
*/
|
||||
public SearchStatus getSearchStatus() {
|
||||
return new SearchStatus(
|
||||
"opensearch",
|
||||
false, // no dual-write
|
||||
false, // no typesense
|
||||
openSearchService.testConnection()
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* DTO for search status
|
||||
*/
|
||||
public static class SearchStatus {
|
||||
private final String primaryEngine;
|
||||
private final boolean dualWrite;
|
||||
private final boolean typesenseAvailable;
|
||||
private final boolean openSearchAvailable;
|
||||
|
||||
public SearchStatus(String primaryEngine, boolean dualWrite,
|
||||
boolean typesenseAvailable, boolean openSearchAvailable) {
|
||||
this.primaryEngine = primaryEngine;
|
||||
this.dualWrite = dualWrite;
|
||||
this.typesenseAvailable = typesenseAvailable;
|
||||
this.openSearchAvailable = openSearchAvailable;
|
||||
}
|
||||
|
||||
public String getPrimaryEngine() { return primaryEngine; }
|
||||
public boolean isDualWrite() { return dualWrite; }
|
||||
public boolean isTypesenseAvailable() { return typesenseAvailable; }
|
||||
public boolean isOpenSearchAvailable() { return openSearchAvailable; }
|
||||
}
|
||||
}
|
||||
@@ -5,6 +5,8 @@ import com.storycove.repository.SeriesRepository;
|
||||
import com.storycove.service.exception.DuplicateResourceException;
|
||||
import com.storycove.service.exception.ResourceNotFoundException;
|
||||
import jakarta.validation.Valid;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
@@ -20,6 +22,8 @@ import java.util.UUID;
|
||||
@Validated
|
||||
@Transactional
|
||||
public class SeriesService {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(SeriesService.class);
|
||||
|
||||
private final SeriesRepository seriesRepository;
|
||||
|
||||
|
||||
@@ -4,13 +4,15 @@ import com.storycove.entity.Author;
|
||||
import com.storycove.entity.Series;
|
||||
import com.storycove.entity.Story;
|
||||
import com.storycove.entity.Tag;
|
||||
import com.storycove.repository.ReadingPositionRepository;
|
||||
import com.storycove.repository.StoryRepository;
|
||||
import com.storycove.repository.TagRepository;
|
||||
import com.storycove.service.exception.DuplicateResourceException;
|
||||
import com.storycove.service.exception.ResourceNotFoundException;
|
||||
import jakarta.validation.Valid;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
import org.springframework.stereotype.Service;
|
||||
@@ -18,40 +20,47 @@ import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
@Service
|
||||
@Validated
|
||||
@Transactional
|
||||
public class StoryService {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(StoryService.class);
|
||||
|
||||
private final StoryRepository storyRepository;
|
||||
private final TagRepository tagRepository;
|
||||
private final ReadingPositionRepository readingPositionRepository;
|
||||
private final AuthorService authorService;
|
||||
private final TagService tagService;
|
||||
private final SeriesService seriesService;
|
||||
private final HtmlSanitizationService sanitizationService;
|
||||
private final TypesenseService typesenseService;
|
||||
private final SearchServiceAdapter searchServiceAdapter;
|
||||
|
||||
@Autowired
|
||||
public StoryService(StoryRepository storyRepository,
|
||||
TagRepository tagRepository,
|
||||
ReadingPositionRepository readingPositionRepository,
|
||||
AuthorService authorService,
|
||||
TagService tagService,
|
||||
SeriesService seriesService,
|
||||
HtmlSanitizationService sanitizationService,
|
||||
@Autowired(required = false) TypesenseService typesenseService) {
|
||||
SearchServiceAdapter searchServiceAdapter) {
|
||||
this.storyRepository = storyRepository;
|
||||
this.tagRepository = tagRepository;
|
||||
this.readingPositionRepository = readingPositionRepository;
|
||||
this.authorService = authorService;
|
||||
this.tagService = tagService;
|
||||
this.seriesService = seriesService;
|
||||
this.sanitizationService = sanitizationService;
|
||||
this.typesenseService = typesenseService;
|
||||
this.searchServiceAdapter = searchServiceAdapter;
|
||||
}
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
@@ -74,11 +83,13 @@ public class StoryService {
|
||||
return storyRepository.findById(id)
|
||||
.orElseThrow(() -> new ResourceNotFoundException("Story", id.toString()));
|
||||
}
|
||||
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public Optional<Story> findByIdOptional(UUID id) {
|
||||
return storyRepository.findById(id);
|
||||
}
|
||||
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public Optional<Story> findByTitle(String title) {
|
||||
@@ -114,7 +125,7 @@ public class StoryService {
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public List<Story> findBySeries(UUID seriesId) {
|
||||
Series series = seriesService.findById(seriesId);
|
||||
seriesService.findById(seriesId); // Validate series exists
|
||||
return storyRepository.findBySeriesOrderByVolume(seriesId);
|
||||
}
|
||||
|
||||
@@ -228,10 +239,8 @@ public class StoryService {
|
||||
story.addTag(tag);
|
||||
Story savedStory = storyRepository.save(story);
|
||||
|
||||
// Update Typesense index with new tag information
|
||||
if (typesenseService != null) {
|
||||
typesenseService.updateStory(savedStory);
|
||||
}
|
||||
// Update search index with new tag information
|
||||
searchServiceAdapter.updateStory(savedStory);
|
||||
|
||||
return savedStory;
|
||||
}
|
||||
@@ -245,10 +254,8 @@ public class StoryService {
|
||||
story.removeTag(tag);
|
||||
Story savedStory = storyRepository.save(story);
|
||||
|
||||
// Update Typesense index with updated tag information
|
||||
if (typesenseService != null) {
|
||||
typesenseService.updateStory(savedStory);
|
||||
}
|
||||
// Update search index with updated tag information
|
||||
searchServiceAdapter.updateStory(savedStory);
|
||||
|
||||
return savedStory;
|
||||
}
|
||||
@@ -263,11 +270,44 @@ public class StoryService {
|
||||
story.setRating(rating);
|
||||
Story savedStory = storyRepository.save(story);
|
||||
|
||||
// Update Typesense index with new rating
|
||||
if (typesenseService != null) {
|
||||
typesenseService.updateStory(savedStory);
|
||||
// Update search index with new rating
|
||||
searchServiceAdapter.updateStory(savedStory);
|
||||
|
||||
return savedStory;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public Story updateReadingProgress(UUID id, Integer position) {
|
||||
if (position != null && position < 0) {
|
||||
throw new IllegalArgumentException("Reading position must be non-negative");
|
||||
}
|
||||
|
||||
Story story = findById(id);
|
||||
story.updateReadingProgress(position);
|
||||
Story savedStory = storyRepository.save(story);
|
||||
|
||||
// Update search index with new reading progress
|
||||
searchServiceAdapter.updateStory(savedStory);
|
||||
|
||||
return savedStory;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public Story updateReadingStatus(UUID id, Boolean isRead) {
|
||||
Story story = findById(id);
|
||||
|
||||
if (Boolean.TRUE.equals(isRead)) {
|
||||
story.markAsRead();
|
||||
} else {
|
||||
story.setIsRead(false);
|
||||
story.setLastReadAt(LocalDateTime.now());
|
||||
}
|
||||
|
||||
Story savedStory = storyRepository.save(story);
|
||||
|
||||
// Update search index with new reading status
|
||||
searchServiceAdapter.updateStory(savedStory);
|
||||
|
||||
return savedStory;
|
||||
}
|
||||
|
||||
@@ -308,10 +348,8 @@ public class StoryService {
|
||||
updateStoryTags(savedStory, story.getTags());
|
||||
}
|
||||
|
||||
// Index in Typesense (if available)
|
||||
if (typesenseService != null) {
|
||||
typesenseService.indexStory(savedStory);
|
||||
}
|
||||
// Index in search engine
|
||||
searchServiceAdapter.indexStory(savedStory);
|
||||
|
||||
return savedStory;
|
||||
}
|
||||
@@ -338,10 +376,8 @@ public class StoryService {
|
||||
updateStoryTagsByNames(savedStory, tagNames);
|
||||
}
|
||||
|
||||
// Index in Typesense (if available)
|
||||
if (typesenseService != null) {
|
||||
typesenseService.indexStory(savedStory);
|
||||
}
|
||||
// Index in search engine
|
||||
searchServiceAdapter.indexStory(savedStory);
|
||||
|
||||
return savedStory;
|
||||
}
|
||||
@@ -359,10 +395,8 @@ public class StoryService {
|
||||
updateStoryFields(existingStory, storyUpdates);
|
||||
Story updatedStory = storyRepository.save(existingStory);
|
||||
|
||||
// Update in Typesense (if available)
|
||||
if (typesenseService != null) {
|
||||
typesenseService.updateStory(updatedStory);
|
||||
}
|
||||
// Update in search engine
|
||||
searchServiceAdapter.updateStory(updatedStory);
|
||||
|
||||
return updatedStory;
|
||||
}
|
||||
@@ -382,10 +416,8 @@ public class StoryService {
|
||||
|
||||
Story updatedStory = storyRepository.save(existingStory);
|
||||
|
||||
// Update in Typesense (if available)
|
||||
if (typesenseService != null) {
|
||||
typesenseService.updateStory(updatedStory);
|
||||
}
|
||||
// Update in search engine
|
||||
searchServiceAdapter.updateStory(updatedStory);
|
||||
|
||||
return updatedStory;
|
||||
}
|
||||
@@ -393,18 +425,20 @@ public class StoryService {
|
||||
public void delete(UUID id) {
|
||||
Story story = findById(id);
|
||||
|
||||
// Clean up reading positions first (to avoid foreign key constraint violations)
|
||||
readingPositionRepository.deleteByStoryId(id);
|
||||
|
||||
// Remove from series if part of one
|
||||
if (story.getSeries() != null) {
|
||||
story.getSeries().removeStory(story);
|
||||
}
|
||||
|
||||
// Remove tags (this will update tag usage counts)
|
||||
story.getTags().forEach(tag -> story.removeTag(tag));
|
||||
// Create a copy to avoid ConcurrentModificationException
|
||||
new ArrayList<>(story.getTags()).forEach(tag -> story.removeTag(tag));
|
||||
|
||||
// Delete from Typesense first (if available)
|
||||
if (typesenseService != null) {
|
||||
typesenseService.deleteStory(story.getId().toString());
|
||||
}
|
||||
// Delete from search engine first
|
||||
searchServiceAdapter.deleteStory(story.getId());
|
||||
|
||||
storyRepository.delete(story);
|
||||
}
|
||||
@@ -562,17 +596,33 @@ public class StoryService {
|
||||
if (updateReq.getVolume() != null) {
|
||||
story.setVolume(updateReq.getVolume());
|
||||
}
|
||||
// Handle author - either by ID or by name
|
||||
if (updateReq.getAuthorId() != null) {
|
||||
Author author = authorService.findById(updateReq.getAuthorId());
|
||||
story.setAuthor(author);
|
||||
}
|
||||
// Handle series - either by ID or by name
|
||||
if (updateReq.getSeriesId() != null) {
|
||||
Series series = seriesService.findById(updateReq.getSeriesId());
|
||||
story.setSeries(series);
|
||||
} else if (updateReq.getSeriesName() != null) {
|
||||
if (updateReq.getSeriesName().trim().isEmpty()) {
|
||||
// Empty series name means remove from series
|
||||
story.setSeries(null);
|
||||
} else {
|
||||
// Find or create series by name
|
||||
Series series = seriesService.findByNameOptional(updateReq.getSeriesName().trim())
|
||||
.orElseGet(() -> {
|
||||
Series newSeries = new Series();
|
||||
newSeries.setName(updateReq.getSeriesName().trim());
|
||||
return seriesService.create(newSeries);
|
||||
});
|
||||
story.setSeries(series);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void updateStoryTagsByNames(Story story, java.util.List<String> tagNames) {
|
||||
// Clear existing tags first
|
||||
Set<Tag> existingTags = new HashSet<>(story.getTags());
|
||||
@@ -593,4 +643,140 @@ public class StoryService {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public List<Story> findPotentialDuplicates(String title, String authorName) {
|
||||
if (title == null || title.trim().isEmpty() || authorName == null || authorName.trim().isEmpty()) {
|
||||
return List.of();
|
||||
}
|
||||
return storyRepository.findByTitleAndAuthorNameIgnoreCase(title.trim(), authorName.trim());
|
||||
}
|
||||
|
||||
/**
|
||||
* Find a random story based on optional filters.
|
||||
* Uses search service for consistency with Library search functionality.
|
||||
* Supports text search and multiple tags using the same logic as the Library view.
|
||||
* @param searchQuery Optional search query
|
||||
* @param tags Optional list of tags to filter by
|
||||
* @return Optional containing the random story if found
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public Optional<Story> findRandomStory(String searchQuery, List<String> tags) {
|
||||
return findRandomStory(searchQuery, tags, null, null, null, null, null, null, null,
|
||||
null, null, null, null, null, null, null, null, null, null, null);
|
||||
}
|
||||
|
||||
public Optional<Story> findRandomStory(String searchQuery, List<String> tags, Long seed) {
|
||||
return findRandomStory(searchQuery, tags, seed, null, null, null, null, null, null,
|
||||
null, null, null, null, null, null, null, null, null, null, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Find a random story based on optional filters with seed support.
|
||||
* Uses search service for consistency with Library search functionality.
|
||||
* Supports text search and multiple tags using the same logic as the Library view.
|
||||
* @param searchQuery Optional search query
|
||||
* @param tags Optional list of tags to filter by
|
||||
* @param seed Optional seed for consistent randomization (null for truly random)
|
||||
* @return Optional containing the random story if found
|
||||
*/
|
||||
@Transactional(readOnly = true)
|
||||
public Optional<Story> findRandomStory(String searchQuery, List<String> tags, Long seed,
|
||||
Integer minWordCount, Integer maxWordCount,
|
||||
String createdAfter, String createdBefore,
|
||||
String lastReadAfter, String lastReadBefore,
|
||||
Integer minRating, Integer maxRating, Boolean unratedOnly,
|
||||
String readingStatus, Boolean hasReadingProgress,
|
||||
Boolean hasCoverImage, String sourceDomain,
|
||||
String seriesFilter, Integer minTagCount,
|
||||
Boolean popularOnly, Boolean hiddenGemsOnly) {
|
||||
|
||||
// Use search service for consistency with Library search
|
||||
try {
|
||||
String randomStoryId = searchServiceAdapter.getRandomStoryId(seed);
|
||||
if (randomStoryId != null) {
|
||||
return storyRepository.findById(UUID.fromString(randomStoryId));
|
||||
}
|
||||
return Optional.empty();
|
||||
} catch (Exception e) {
|
||||
// Fallback to database queries if search service fails
|
||||
logger.warn("Search service random story lookup failed, falling back to database queries", e);
|
||||
}
|
||||
|
||||
// Fallback to repository-based implementation (global routing handles library selection)
|
||||
return findRandomStoryFromRepository(searchQuery, tags);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Find random story using repository methods (for default database or when library-aware fails)
|
||||
*/
|
||||
private Optional<Story> findRandomStoryFromRepository(String searchQuery, List<String> tags) {
|
||||
// Clean up inputs
|
||||
String cleanSearchQuery = (searchQuery != null && !searchQuery.trim().isEmpty()) ? searchQuery.trim() : null;
|
||||
List<String> cleanTags = (tags != null) ? tags.stream()
|
||||
.filter(tag -> tag != null && !tag.trim().isEmpty())
|
||||
.map(String::trim)
|
||||
.collect(Collectors.toList()) : List.of();
|
||||
|
||||
long totalCount = 0;
|
||||
Optional<Story> randomStory = Optional.empty();
|
||||
|
||||
if (cleanSearchQuery != null && !cleanTags.isEmpty()) {
|
||||
// Both search query and tags
|
||||
String searchPattern = "%" + cleanSearchQuery + "%";
|
||||
List<String> upperCaseTags = cleanTags.stream()
|
||||
.map(String::toUpperCase)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
totalCount = storyRepository.countStoriesByTextSearchAndTags(searchPattern, upperCaseTags, cleanTags.size());
|
||||
if (totalCount > 0) {
|
||||
long randomOffset = (long) (Math.random() * totalCount);
|
||||
randomStory = storyRepository.findRandomStoryByTextSearchAndTags(searchPattern, upperCaseTags, cleanTags.size(), randomOffset);
|
||||
}
|
||||
|
||||
} else if (cleanSearchQuery != null) {
|
||||
// Only search query
|
||||
String searchPattern = "%" + cleanSearchQuery + "%";
|
||||
totalCount = storyRepository.countStoriesByTextSearch(searchPattern);
|
||||
if (totalCount > 0) {
|
||||
long randomOffset = (long) (Math.random() * totalCount);
|
||||
randomStory = storyRepository.findRandomStoryByTextSearch(searchPattern, randomOffset);
|
||||
}
|
||||
|
||||
} else if (!cleanTags.isEmpty()) {
|
||||
// Only tags
|
||||
if (cleanTags.size() == 1) {
|
||||
// Single tag - use optimized single tag query
|
||||
totalCount = storyRepository.countStoriesByTagName(cleanTags.get(0));
|
||||
if (totalCount > 0) {
|
||||
long randomOffset = (long) (Math.random() * totalCount);
|
||||
randomStory = storyRepository.findRandomStoryByTagName(cleanTags.get(0), randomOffset);
|
||||
}
|
||||
} else {
|
||||
// Multiple tags
|
||||
List<String> upperCaseTags = cleanTags.stream()
|
||||
.map(String::toUpperCase)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
totalCount = storyRepository.countStoriesByMultipleTags(upperCaseTags, cleanTags.size());
|
||||
if (totalCount > 0) {
|
||||
long randomOffset = (long) (Math.random() * totalCount);
|
||||
randomStory = storyRepository.findRandomStoryByMultipleTags(upperCaseTags, cleanTags.size(), randomOffset);
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
// No filters - get random from all stories
|
||||
totalCount = storyRepository.countAllStories();
|
||||
if (totalCount > 0) {
|
||||
long randomOffset = (long) (Math.random() * totalCount);
|
||||
randomStory = storyRepository.findRandomStory(randomOffset);
|
||||
}
|
||||
}
|
||||
|
||||
return randomStory;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -1,10 +1,15 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.entity.Story;
|
||||
import com.storycove.entity.Tag;
|
||||
import com.storycove.entity.TagAlias;
|
||||
import com.storycove.repository.TagRepository;
|
||||
import com.storycove.repository.TagAliasRepository;
|
||||
import com.storycove.service.exception.DuplicateResourceException;
|
||||
import com.storycove.service.exception.ResourceNotFoundException;
|
||||
import jakarta.validation.Valid;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.data.domain.Page;
|
||||
import org.springframework.data.domain.Pageable;
|
||||
@@ -12,20 +17,27 @@ import org.springframework.stereotype.Service;
|
||||
import org.springframework.transaction.annotation.Transactional;
|
||||
import org.springframework.validation.annotation.Validated;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Optional;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
@Service
|
||||
@Validated
|
||||
@Transactional
|
||||
public class TagService {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(TagService.class);
|
||||
|
||||
private final TagRepository tagRepository;
|
||||
private final TagAliasRepository tagAliasRepository;
|
||||
|
||||
@Autowired
|
||||
public TagService(TagRepository tagRepository) {
|
||||
public TagService(TagRepository tagRepository, TagAliasRepository tagAliasRepository) {
|
||||
this.tagRepository = tagRepository;
|
||||
this.tagAliasRepository = tagAliasRepository;
|
||||
}
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
@@ -191,6 +203,11 @@ public class TagService {
|
||||
public long countUsedTags() {
|
||||
return tagRepository.countUsedTags();
|
||||
}
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public List<Tag> findTagsUsedByCollections() {
|
||||
return tagRepository.findTagsUsedByCollections();
|
||||
}
|
||||
|
||||
private void validateTagForCreate(Tag tag) {
|
||||
if (existsByName(tag.getName())) {
|
||||
@@ -202,5 +219,273 @@ public class TagService {
|
||||
if (updates.getName() != null) {
|
||||
existing.setName(updates.getName());
|
||||
}
|
||||
if (updates.getColor() != null) {
|
||||
existing.setColor(updates.getColor());
|
||||
}
|
||||
if (updates.getDescription() != null) {
|
||||
existing.setDescription(updates.getDescription());
|
||||
}
|
||||
}
|
||||
|
||||
// Tag alias management methods
|
||||
|
||||
public TagAlias addAlias(UUID tagId, String aliasName) {
|
||||
Tag canonicalTag = findById(tagId);
|
||||
|
||||
// Check if alias already exists (case-insensitive)
|
||||
if (tagAliasRepository.existsByAliasNameIgnoreCase(aliasName)) {
|
||||
throw new DuplicateResourceException("Tag alias", aliasName);
|
||||
}
|
||||
|
||||
// Check if alias name conflicts with existing tag names
|
||||
if (tagRepository.existsByNameIgnoreCase(aliasName)) {
|
||||
throw new DuplicateResourceException("Tag alias conflicts with existing tag name", aliasName);
|
||||
}
|
||||
|
||||
TagAlias alias = new TagAlias();
|
||||
alias.setAliasName(aliasName);
|
||||
alias.setCanonicalTag(canonicalTag);
|
||||
alias.setCreatedFromMerge(false);
|
||||
|
||||
return tagAliasRepository.save(alias);
|
||||
}
|
||||
|
||||
public void removeAlias(UUID tagId, UUID aliasId) {
|
||||
findById(tagId); // Validate tag exists
|
||||
TagAlias alias = tagAliasRepository.findById(aliasId)
|
||||
.orElseThrow(() -> new ResourceNotFoundException("Tag alias", aliasId.toString()));
|
||||
|
||||
// Verify the alias belongs to the specified tag
|
||||
if (!alias.getCanonicalTag().getId().equals(tagId)) {
|
||||
throw new IllegalArgumentException("Alias does not belong to the specified tag");
|
||||
}
|
||||
|
||||
tagAliasRepository.delete(alias);
|
||||
}
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public Tag resolveTagByName(String name) {
|
||||
// First try to find exact tag match
|
||||
Optional<Tag> directMatch = tagRepository.findByNameIgnoreCase(name);
|
||||
if (directMatch.isPresent()) {
|
||||
return directMatch.get();
|
||||
}
|
||||
|
||||
// Then try to find by alias
|
||||
Optional<TagAlias> aliasMatch = tagAliasRepository.findByAliasNameIgnoreCase(name);
|
||||
if (aliasMatch.isPresent()) {
|
||||
return aliasMatch.get().getCanonicalTag();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public Tag mergeTags(List<UUID> sourceTagIds, UUID targetTagId) {
|
||||
// Validate target tag exists
|
||||
Tag targetTag = findById(targetTagId);
|
||||
|
||||
// Validate source tags exist and are different from target
|
||||
List<Tag> sourceTags = sourceTagIds.stream()
|
||||
.filter(id -> !id.equals(targetTagId)) // Don't merge tag with itself
|
||||
.map(this::findById)
|
||||
.toList();
|
||||
|
||||
if (sourceTags.isEmpty()) {
|
||||
throw new IllegalArgumentException("No valid source tags to merge");
|
||||
}
|
||||
|
||||
// Perform the merge atomically
|
||||
for (Tag sourceTag : sourceTags) {
|
||||
// Move all stories from source tag to target tag
|
||||
// Create a copy to avoid ConcurrentModificationException
|
||||
List<Story> storiesToMove = new ArrayList<>(sourceTag.getStories());
|
||||
storiesToMove.forEach(story -> {
|
||||
story.removeTag(sourceTag);
|
||||
story.addTag(targetTag);
|
||||
});
|
||||
|
||||
// Create alias for the source tag name
|
||||
TagAlias alias = new TagAlias();
|
||||
alias.setAliasName(sourceTag.getName());
|
||||
alias.setCanonicalTag(targetTag);
|
||||
alias.setCreatedFromMerge(true);
|
||||
tagAliasRepository.save(alias);
|
||||
|
||||
// Delete the source tag
|
||||
tagRepository.delete(sourceTag);
|
||||
}
|
||||
|
||||
return tagRepository.save(targetTag);
|
||||
}
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public List<Tag> findByNameOrAliasStartingWith(String query, int limit) {
|
||||
// Find tags that start with the query
|
||||
List<Tag> directMatches = tagRepository.findByNameStartingWithIgnoreCase(query.toLowerCase());
|
||||
|
||||
// Find tags via aliases that start with the query
|
||||
List<TagAlias> aliasMatches = tagAliasRepository.findByAliasNameStartingWithIgnoreCase(query.toLowerCase());
|
||||
List<Tag> aliasTagMatches = aliasMatches.stream()
|
||||
.map(TagAlias::getCanonicalTag)
|
||||
.distinct()
|
||||
.toList();
|
||||
|
||||
// Combine and deduplicate
|
||||
Set<Tag> allMatches = new HashSet<>(directMatches);
|
||||
allMatches.addAll(aliasTagMatches);
|
||||
|
||||
// Convert to list and limit results
|
||||
return allMatches.stream()
|
||||
.sorted((a, b) -> a.getName().compareToIgnoreCase(b.getName()))
|
||||
.limit(limit)
|
||||
.toList();
|
||||
}
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public com.storycove.controller.TagController.MergePreviewResponse previewMerge(List<UUID> sourceTagIds, UUID targetTagId) {
|
||||
// Validate target tag exists
|
||||
Tag targetTag = findById(targetTagId);
|
||||
|
||||
// Validate source tags exist and are different from target
|
||||
List<Tag> sourceTags = sourceTagIds.stream()
|
||||
.filter(id -> !id.equals(targetTagId))
|
||||
.map(this::findById)
|
||||
.toList();
|
||||
|
||||
if (sourceTags.isEmpty()) {
|
||||
throw new IllegalArgumentException("No valid source tags to merge");
|
||||
}
|
||||
|
||||
// Calculate preview data
|
||||
int targetStoryCount = targetTag.getStories().size();
|
||||
|
||||
// Collect all unique stories from all tags (including target) to handle overlaps correctly
|
||||
Set<Story> allUniqueStories = new HashSet<>(targetTag.getStories());
|
||||
for (Tag sourceTag : sourceTags) {
|
||||
allUniqueStories.addAll(sourceTag.getStories());
|
||||
}
|
||||
int totalStories = allUniqueStories.size();
|
||||
|
||||
List<String> aliasesToCreate = sourceTags.stream()
|
||||
.map(Tag::getName)
|
||||
.toList();
|
||||
|
||||
// Create response object using the controller's inner class
|
||||
var preview = new com.storycove.controller.TagController.MergePreviewResponse();
|
||||
preview.setTargetTagName(targetTag.getName());
|
||||
preview.setTargetStoryCount(targetStoryCount);
|
||||
preview.setTotalResultStoryCount(totalStories);
|
||||
preview.setAliasesToCreate(aliasesToCreate);
|
||||
|
||||
return preview;
|
||||
}
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public List<com.storycove.controller.TagController.TagSuggestion> suggestTags(String title, String content, String summary, int limit) {
|
||||
List<com.storycove.controller.TagController.TagSuggestion> suggestions = new ArrayList<>();
|
||||
|
||||
// Get all existing tags for matching
|
||||
List<Tag> existingTags = findAll();
|
||||
|
||||
// Combine all text for analysis
|
||||
String combinedText = (title != null ? title : "") + " " +
|
||||
(summary != null ? summary : "") + " " +
|
||||
(content != null ? stripHtml(content) : "");
|
||||
|
||||
if (combinedText.trim().isEmpty()) {
|
||||
return suggestions;
|
||||
}
|
||||
|
||||
String lowerText = combinedText.toLowerCase();
|
||||
|
||||
// Score each existing tag based on how well it matches the content
|
||||
for (Tag tag : existingTags) {
|
||||
double score = calculateTagRelevanceScore(tag, lowerText, title, summary);
|
||||
|
||||
if (score > 0.1) { // Only suggest tags with reasonable confidence
|
||||
String reason = generateReason(tag, lowerText, title, summary);
|
||||
suggestions.add(new com.storycove.controller.TagController.TagSuggestion(
|
||||
tag.getName(), score, reason
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by confidence score (descending) and limit results
|
||||
return suggestions.stream()
|
||||
.sorted((a, b) -> Double.compare(b.getConfidence(), a.getConfidence()))
|
||||
.limit(limit)
|
||||
.collect(java.util.stream.Collectors.toList());
|
||||
}
|
||||
|
||||
private double calculateTagRelevanceScore(Tag tag, String lowerText, String title, String summary) {
|
||||
String tagName = tag.getName().toLowerCase();
|
||||
double score = 0.0;
|
||||
|
||||
// Exact matches get highest score
|
||||
if (lowerText.contains(" " + tagName + " ") || lowerText.startsWith(tagName + " ") || lowerText.endsWith(" " + tagName)) {
|
||||
score += 0.8;
|
||||
}
|
||||
|
||||
// Partial matches in title get high score
|
||||
if (title != null && title.toLowerCase().contains(tagName)) {
|
||||
score += 0.6;
|
||||
}
|
||||
|
||||
// Partial matches in summary get medium score
|
||||
if (summary != null && summary.toLowerCase().contains(tagName)) {
|
||||
score += 0.4;
|
||||
}
|
||||
|
||||
// Word-based matching (split tag name and look for individual words)
|
||||
String[] tagWords = tagName.split("[\\s-_]+");
|
||||
int matchedWords = 0;
|
||||
for (String word : tagWords) {
|
||||
if (word.length() > 2 && lowerText.contains(word)) {
|
||||
matchedWords++;
|
||||
}
|
||||
}
|
||||
if (tagWords.length > 0) {
|
||||
score += 0.3 * ((double) matchedWords / tagWords.length);
|
||||
}
|
||||
|
||||
// Boost score based on tag popularity (more used tags are more likely to be relevant)
|
||||
int storyCount = tag.getStories() != null ? tag.getStories().size() : 0;
|
||||
if (storyCount > 0) {
|
||||
score += Math.min(0.2, storyCount * 0.01); // Small boost, capped at 0.2
|
||||
}
|
||||
|
||||
return Math.min(1.0, score); // Cap at 1.0
|
||||
}
|
||||
|
||||
private String generateReason(Tag tag, String lowerText, String title, String summary) {
|
||||
String tagName = tag.getName().toLowerCase();
|
||||
|
||||
if (title != null && title.toLowerCase().contains(tagName)) {
|
||||
return "Found in title";
|
||||
}
|
||||
|
||||
if (summary != null && summary.toLowerCase().contains(tagName)) {
|
||||
return "Found in summary";
|
||||
}
|
||||
|
||||
if (lowerText.contains(" " + tagName + " ") || lowerText.startsWith(tagName + " ") || lowerText.endsWith(" " + tagName)) {
|
||||
return "Exact match in content";
|
||||
}
|
||||
|
||||
String[] tagWords = tagName.split("[\\s-_]+");
|
||||
for (String word : tagWords) {
|
||||
if (word.length() > 2 && lowerText.contains(word)) {
|
||||
return "Related keywords found";
|
||||
}
|
||||
}
|
||||
|
||||
return "Similar content";
|
||||
}
|
||||
|
||||
private String stripHtml(String html) {
|
||||
if (html == null) return "";
|
||||
// Simple HTML tag removal - replace with a proper HTML parser if needed
|
||||
return html.replaceAll("<[^>]+>", " ").replaceAll("\\s+", " ").trim();
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,12 @@
|
||||
package com.storycove.service.exception;
|
||||
|
||||
public class InvalidFileException extends RuntimeException {
|
||||
|
||||
public InvalidFileException(String message) {
|
||||
super(message);
|
||||
}
|
||||
|
||||
public InvalidFileException(String message, Throwable cause) {
|
||||
super(message, cause);
|
||||
}
|
||||
}
|
||||
@@ -3,35 +3,64 @@ package com.storycove.util;
|
||||
import io.jsonwebtoken.Claims;
|
||||
import io.jsonwebtoken.Jwts;
|
||||
import io.jsonwebtoken.security.Keys;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Component;
|
||||
|
||||
import jakarta.annotation.PostConstruct;
|
||||
import javax.crypto.SecretKey;
|
||||
import java.security.SecureRandom;
|
||||
import java.util.Base64;
|
||||
import java.util.Date;
|
||||
|
||||
@Component
|
||||
public class JwtUtil {
|
||||
|
||||
@Value("${storycove.jwt.secret}")
|
||||
private static final Logger logger = LoggerFactory.getLogger(JwtUtil.class);
|
||||
|
||||
// Security: Generate new secret on each startup to invalidate all existing tokens
|
||||
private String secret;
|
||||
|
||||
@Value("${storycove.jwt.expiration:86400000}") // 24 hours default
|
||||
private Long expiration;
|
||||
|
||||
@PostConstruct
|
||||
public void initialize() {
|
||||
// Generate a new random secret on startup to invalidate all existing JWT tokens
|
||||
// This ensures users must re-authenticate after application restart
|
||||
SecureRandom random = new SecureRandom();
|
||||
byte[] secretBytes = new byte[64]; // 512 bits
|
||||
random.nextBytes(secretBytes);
|
||||
this.secret = Base64.getEncoder().encodeToString(secretBytes);
|
||||
|
||||
logger.info("JWT secret rotated on startup - all existing tokens invalidated");
|
||||
logger.info("Users will need to re-authenticate after application restart for security");
|
||||
}
|
||||
|
||||
private SecretKey getSigningKey() {
|
||||
return Keys.hmacShaKeyFor(secret.getBytes());
|
||||
}
|
||||
|
||||
public String generateToken() {
|
||||
return generateToken("user", null);
|
||||
}
|
||||
|
||||
public String generateToken(String subject, String libraryId) {
|
||||
Date now = new Date();
|
||||
Date expiryDate = new Date(now.getTime() + expiration);
|
||||
|
||||
return Jwts.builder()
|
||||
.subject("user")
|
||||
var builder = Jwts.builder()
|
||||
.subject(subject)
|
||||
.issuedAt(now)
|
||||
.expiration(expiryDate)
|
||||
.signWith(getSigningKey())
|
||||
.compact();
|
||||
.expiration(expiryDate);
|
||||
|
||||
// Add library context if provided
|
||||
if (libraryId != null) {
|
||||
builder.claim("libraryId", libraryId);
|
||||
}
|
||||
|
||||
return builder.signWith(getSigningKey()).compact();
|
||||
}
|
||||
|
||||
public boolean validateToken(String token) {
|
||||
@@ -62,4 +91,13 @@ public class JwtUtil {
|
||||
public String getSubjectFromToken(String token) {
|
||||
return getClaimsFromToken(token).getSubject();
|
||||
}
|
||||
|
||||
public String getLibraryIdFromToken(String token) {
|
||||
try {
|
||||
Claims claims = getClaimsFromToken(token);
|
||||
return claims.get("libraryId", String.class);
|
||||
} catch (Exception e) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -16,8 +16,14 @@ spring:
|
||||
|
||||
servlet:
|
||||
multipart:
|
||||
max-file-size: 5MB
|
||||
max-request-size: 10MB
|
||||
max-file-size: 256MB # Increased for backup restore
|
||||
max-request-size: 260MB # Slightly higher to account for form data
|
||||
|
||||
jackson:
|
||||
serialization:
|
||||
write-dates-as-timestamps: false
|
||||
deserialization:
|
||||
adjust-dates-to-context-time-zone: false
|
||||
|
||||
server:
|
||||
port: 8080
|
||||
@@ -28,20 +34,78 @@ storycove:
|
||||
cors:
|
||||
allowed-origins: ${STORYCOVE_CORS_ALLOWED_ORIGINS:http://localhost:3000,http://localhost:6925}
|
||||
jwt:
|
||||
secret: ${JWT_SECRET:default-secret-key}
|
||||
secret: ${JWT_SECRET} # REQUIRED: Must be at least 32 characters, no default for security
|
||||
expiration: 86400000 # 24 hours
|
||||
auth:
|
||||
password: ${APP_PASSWORD:admin}
|
||||
typesense:
|
||||
api-key: ${TYPESENSE_API_KEY:xyz}
|
||||
host: ${TYPESENSE_HOST:localhost}
|
||||
port: ${TYPESENSE_PORT:8108}
|
||||
enabled: ${TYPESENSE_ENABLED:true}
|
||||
reindex-interval: ${TYPESENSE_REINDEX_INTERVAL:3600000} # 1 hour in milliseconds
|
||||
password: ${APP_PASSWORD} # REQUIRED: No default password for security
|
||||
search:
|
||||
engine: opensearch # OpenSearch is the only search engine
|
||||
opensearch:
|
||||
# Connection settings
|
||||
host: ${OPENSEARCH_HOST:localhost}
|
||||
port: ${OPENSEARCH_PORT:9200}
|
||||
scheme: ${OPENSEARCH_SCHEME:http}
|
||||
username: ${OPENSEARCH_USERNAME:}
|
||||
password: ${OPENSEARCH_PASSWORD:} # Empty when security is disabled
|
||||
|
||||
# Environment-specific configuration
|
||||
profile: ${SPRING_PROFILES_ACTIVE:development} # development, staging, production
|
||||
|
||||
# Security settings
|
||||
security:
|
||||
ssl-verification: ${OPENSEARCH_SSL_VERIFICATION:false}
|
||||
trust-all-certificates: ${OPENSEARCH_TRUST_ALL_CERTS:true}
|
||||
keystore-path: ${OPENSEARCH_KEYSTORE_PATH:}
|
||||
keystore-password: ${OPENSEARCH_KEYSTORE_PASSWORD:}
|
||||
truststore-path: ${OPENSEARCH_TRUSTSTORE_PATH:}
|
||||
truststore-password: ${OPENSEARCH_TRUSTSTORE_PASSWORD:}
|
||||
|
||||
# Connection pool settings
|
||||
connection:
|
||||
timeout: ${OPENSEARCH_CONNECTION_TIMEOUT:30000} # 30 seconds
|
||||
socket-timeout: ${OPENSEARCH_SOCKET_TIMEOUT:60000} # 60 seconds
|
||||
max-connections-per-route: ${OPENSEARCH_MAX_CONN_PER_ROUTE:10}
|
||||
max-connections-total: ${OPENSEARCH_MAX_CONN_TOTAL:30}
|
||||
retry-on-failure: ${OPENSEARCH_RETRY_ON_FAILURE:true}
|
||||
max-retries: ${OPENSEARCH_MAX_RETRIES:3}
|
||||
|
||||
# Index settings
|
||||
indices:
|
||||
default-shards: ${OPENSEARCH_DEFAULT_SHARDS:1}
|
||||
default-replicas: ${OPENSEARCH_DEFAULT_REPLICAS:0}
|
||||
refresh-interval: ${OPENSEARCH_REFRESH_INTERVAL:1s}
|
||||
|
||||
# Bulk operations
|
||||
bulk:
|
||||
actions: ${OPENSEARCH_BULK_ACTIONS:1000}
|
||||
size: ${OPENSEARCH_BULK_SIZE:5242880} # 5MB
|
||||
timeout: ${OPENSEARCH_BULK_TIMEOUT:10000} # 10 seconds
|
||||
concurrent-requests: ${OPENSEARCH_BULK_CONCURRENT:1}
|
||||
|
||||
# Health and monitoring
|
||||
health:
|
||||
check-interval: ${OPENSEARCH_HEALTH_CHECK_INTERVAL:30000} # 30 seconds
|
||||
slow-query-threshold: ${OPENSEARCH_SLOW_QUERY_THRESHOLD:5000} # 5 seconds
|
||||
enable-metrics: ${OPENSEARCH_ENABLE_METRICS:true}
|
||||
images:
|
||||
storage-path: ${IMAGE_STORAGE_PATH:/app/images}
|
||||
|
||||
management:
|
||||
endpoints:
|
||||
web:
|
||||
exposure:
|
||||
include: health,info,prometheus
|
||||
endpoint:
|
||||
health:
|
||||
show-details: when-authorized
|
||||
show-components: always
|
||||
health:
|
||||
opensearch:
|
||||
enabled: ${OPENSEARCH_HEALTH_ENABLED:true}
|
||||
|
||||
logging:
|
||||
level:
|
||||
com.storycove: DEBUG
|
||||
org.springframework.security: DEBUG
|
||||
com.storycove: ${LOG_LEVEL:INFO} # Use INFO for production, DEBUG for development
|
||||
org.springframework.security: WARN # Reduce security logging
|
||||
org.springframework.web: WARN
|
||||
org.hibernate.SQL: ${SQL_LOG_LEVEL:WARN} # Control SQL logging separately
|
||||
@@ -4,7 +4,7 @@
|
||||
"b", "strong", "i", "em", "u", "s", "strike", "del", "ins",
|
||||
"sup", "sub", "small", "big", "mark", "pre", "code", "kbd", "samp", "var",
|
||||
"ul", "ol", "li", "dl", "dt", "dd",
|
||||
"a", "table", "thead", "tbody", "tfoot", "tr", "th", "td", "caption", "colgroup", "col",
|
||||
"a", "img", "table", "thead", "tbody", "tfoot", "tr", "th", "td", "caption", "colgroup", "col",
|
||||
"blockquote", "cite", "q", "hr", "details", "summary"
|
||||
],
|
||||
"allowedAttributes": {
|
||||
@@ -17,7 +17,8 @@
|
||||
"h4": ["class", "style"],
|
||||
"h5": ["class", "style"],
|
||||
"h6": ["class", "style"],
|
||||
"a": ["class"],
|
||||
"a": ["class", "href", "title"],
|
||||
"img": ["src", "alt", "width", "height", "class", "style"],
|
||||
"table": ["class", "style"],
|
||||
"th": ["class", "style", "colspan", "rowspan"],
|
||||
"td": ["class", "style", "colspan", "rowspan"],
|
||||
@@ -38,8 +39,13 @@
|
||||
"font-weight", "font-style", "text-align", "text-decoration", "margin",
|
||||
"padding", "text-indent", "line-height"
|
||||
],
|
||||
"removedAttributes": {
|
||||
"a": ["href", "target"]
|
||||
"allowedProtocols": {
|
||||
"a": {
|
||||
"href": ["http", "https", "#", "/"]
|
||||
},
|
||||
"img": {
|
||||
"src": ["http", "https", "data", "/", "cid"]
|
||||
}
|
||||
},
|
||||
"description": "HTML sanitization configuration for StoryCove story content. This configuration is shared between frontend (DOMPurify) and backend (Jsoup) to ensure consistency."
|
||||
}
|
||||
178
backend/src/main/resources/opensearch/README.md
Normal file
178
backend/src/main/resources/opensearch/README.md
Normal file
@@ -0,0 +1,178 @@
|
||||
# OpenSearch Configuration - Best Practices Implementation
|
||||
|
||||
## Overview
|
||||
|
||||
This directory contains a production-ready OpenSearch configuration following industry best practices for security, scalability, and maintainability.
|
||||
|
||||
## Architecture
|
||||
|
||||
### 📁 Directory Structure
|
||||
```
|
||||
opensearch/
|
||||
├── config/
|
||||
│ ├── opensearch-development.yml # Development-specific settings
|
||||
│ └── opensearch-production.yml # Production-specific settings
|
||||
├── mappings/
|
||||
│ ├── stories-mapping.json # Story index mapping
|
||||
│ ├── authors-mapping.json # Author index mapping
|
||||
│ └── collections-mapping.json # Collection index mapping
|
||||
├── templates/
|
||||
│ ├── stories-template.json # Index template for stories_*
|
||||
│ └── index-lifecycle-policy.json # ILM policy for index management
|
||||
└── README.md # This file
|
||||
```
|
||||
|
||||
## ✅ Best Practices Implemented
|
||||
|
||||
### 🔒 **Security**
|
||||
- **Environment-Aware SSL Configuration**
|
||||
- Production: Full certificate validation with custom truststore support
|
||||
- Development: Optional certificate validation for local development
|
||||
- **Proper Authentication**: Basic auth with secure credential management
|
||||
- **Connection Security**: TLS 1.3 support with hostname verification
|
||||
|
||||
### 🏗️ **Configuration Management**
|
||||
- **Externalized Configuration**: JSON/YAML files instead of hardcoded values
|
||||
- **Environment-Specific Settings**: Different configs for dev/staging/prod
|
||||
- **Type-Safe Properties**: Strongly-typed configuration classes
|
||||
- **Validation**: Configuration validation at startup
|
||||
|
||||
### 📈 **Scalability & Performance**
|
||||
- **Connection Pooling**: Configurable connection pool with timeout management
|
||||
- **Environment-Aware Sharding**:
|
||||
- Development: 1 shard, 0 replicas (single node)
|
||||
- Production: 3 shards, 1 replica (high availability)
|
||||
- **Bulk Operations**: Optimized bulk indexing with configurable batch sizes
|
||||
- **Index Templates**: Automatic application of settings to new indexes
|
||||
|
||||
### 🔄 **Index Lifecycle Management**
|
||||
- **Automated Index Rollover**: Based on size, document count, and age
|
||||
- **Hot-Warm-Cold Architecture**: Optimized storage costs
|
||||
- **Retention Policies**: Automatic cleanup of old data
|
||||
- **Force Merge**: Optimization in warm phase
|
||||
|
||||
### 📊 **Monitoring & Observability**
|
||||
- **Health Checks**: Automatic cluster health monitoring
|
||||
- **Spring Boot Actuator**: Health endpoints for monitoring systems
|
||||
- **Metrics Collection**: Configurable performance metrics
|
||||
- **Slow Query Detection**: Configurable thresholds for query performance
|
||||
|
||||
### 🛡️ **Error Handling & Resilience**
|
||||
- **Connection Retry Logic**: Automatic retry with backoff
|
||||
- **Circuit Breaker Pattern**: Fail-fast for unhealthy clusters
|
||||
- **Graceful Degradation**: Graceful handling when OpenSearch unavailable
|
||||
- **Detailed Error Logging**: Comprehensive error tracking
|
||||
|
||||
## 🚀 Usage
|
||||
|
||||
### Development Environment
|
||||
```yaml
|
||||
# application-development.yml
|
||||
storycove:
|
||||
opensearch:
|
||||
profile: development
|
||||
security:
|
||||
ssl-verification: false
|
||||
trust-all-certificates: true
|
||||
indices:
|
||||
default-shards: 1
|
||||
default-replicas: 0
|
||||
```
|
||||
|
||||
### Production Environment
|
||||
```yaml
|
||||
# application-production.yml
|
||||
storycove:
|
||||
opensearch:
|
||||
profile: production
|
||||
security:
|
||||
ssl-verification: true
|
||||
trust-all-certificates: false
|
||||
truststore-path: /etc/ssl/opensearch-truststore.jks
|
||||
indices:
|
||||
default-shards: 3
|
||||
default-replicas: 1
|
||||
```
|
||||
|
||||
## 📋 Environment Variables
|
||||
|
||||
### Required
|
||||
- `OPENSEARCH_PASSWORD`: Admin password for OpenSearch cluster
|
||||
|
||||
### Optional (with sensible defaults)
|
||||
- `OPENSEARCH_HOST`: Cluster hostname (default: localhost)
|
||||
- `OPENSEARCH_PORT`: Cluster port (default: 9200)
|
||||
- `OPENSEARCH_USERNAME`: Admin username (default: admin)
|
||||
- `OPENSEARCH_SSL_VERIFICATION`: Enable SSL verification (default: false for dev)
|
||||
- `OPENSEARCH_MAX_CONN_TOTAL`: Max connections (default: 30 for dev, 200 for prod)
|
||||
|
||||
## 🎯 Index Templates
|
||||
|
||||
Index templates automatically apply configuration to new indexes:
|
||||
|
||||
```json
|
||||
{
|
||||
"index_patterns": ["stories_*"],
|
||||
"template": {
|
||||
"settings": {
|
||||
"number_of_shards": "#{ENV_SPECIFIC}",
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"story_analyzer": {
|
||||
"type": "standard",
|
||||
"stopwords": "_english_"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## 🔍 Health Monitoring
|
||||
|
||||
Access health information:
|
||||
- **Application Health**: `/actuator/health`
|
||||
- **OpenSearch Specific**: `/actuator/health/opensearch`
|
||||
- **Detailed Metrics**: Available when `enable-metrics: true`
|
||||
|
||||
## 🔄 Deployment Strategy
|
||||
|
||||
Recommended deployment approach:
|
||||
|
||||
1. **Development**: Test OpenSearch configuration locally
|
||||
2. **Staging**: Validate performance and accuracy in staging environment
|
||||
3. **Production**: Deploy with proper monitoring and backup procedures
|
||||
|
||||
## 🛠️ Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **SSL Certificate Errors**
|
||||
- Development: Set `trust-all-certificates: true`
|
||||
- Production: Provide valid truststore path
|
||||
|
||||
2. **Connection Timeouts**
|
||||
- Increase `connection.timeout` values
|
||||
- Check network connectivity and firewall rules
|
||||
|
||||
3. **Index Creation Failures**
|
||||
- Verify cluster health with `/actuator/health/opensearch`
|
||||
- Check OpenSearch logs for detailed error messages
|
||||
|
||||
4. **Performance Issues**
|
||||
- Monitor slow queries with configurable thresholds
|
||||
- Adjust bulk operation settings
|
||||
- Review shard allocation and replica settings
|
||||
|
||||
## 🔮 Future Enhancements
|
||||
|
||||
- **Multi-Cluster Support**: Connect to multiple OpenSearch clusters
|
||||
- **Advanced Security**: Integration with OpenSearch Security plugin
|
||||
- **Custom Analyzers**: Domain-specific text analysis
|
||||
- **Index Aliases**: Zero-downtime index updates
|
||||
- **Machine Learning**: Integration with OpenSearch ML features
|
||||
|
||||
---
|
||||
|
||||
This configuration provides a solid foundation that scales from development to enterprise production environments while maintaining security, performance, and operational excellence.
|
||||
@@ -0,0 +1,32 @@
|
||||
# OpenSearch Development Configuration
|
||||
opensearch:
|
||||
cluster:
|
||||
name: "storycove-dev"
|
||||
initial_master_nodes: ["opensearch-node"]
|
||||
|
||||
# Development settings - single node, minimal resources
|
||||
indices:
|
||||
default_settings:
|
||||
number_of_shards: 1
|
||||
number_of_replicas: 0
|
||||
refresh_interval: "1s"
|
||||
|
||||
# Security settings for development
|
||||
security:
|
||||
ssl_verification: false
|
||||
trust_all_certificates: true
|
||||
|
||||
# Connection settings
|
||||
connection:
|
||||
timeout: "30s"
|
||||
socket_timeout: "60s"
|
||||
max_connections_per_route: 10
|
||||
max_connections_total: 30
|
||||
|
||||
# Index management
|
||||
index_management:
|
||||
auto_create_templates: true
|
||||
template_patterns:
|
||||
stories: "stories_*"
|
||||
authors: "authors_*"
|
||||
collections: "collections_*"
|
||||
@@ -0,0 +1,60 @@
|
||||
# OpenSearch Production Configuration
|
||||
opensearch:
|
||||
cluster:
|
||||
name: "storycove-prod"
|
||||
|
||||
# Production settings - multi-shard, with replicas
|
||||
indices:
|
||||
default_settings:
|
||||
number_of_shards: 3
|
||||
number_of_replicas: 1
|
||||
refresh_interval: "30s"
|
||||
max_result_window: 50000
|
||||
|
||||
# Index lifecycle policies
|
||||
lifecycle:
|
||||
hot_phase_duration: "7d"
|
||||
warm_phase_duration: "30d"
|
||||
cold_phase_duration: "90d"
|
||||
delete_after: "1y"
|
||||
|
||||
# Security settings for production
|
||||
security:
|
||||
ssl_verification: true
|
||||
trust_all_certificates: false
|
||||
certificate_verification: true
|
||||
tls_version: "TLSv1.3"
|
||||
|
||||
# Connection settings
|
||||
connection:
|
||||
timeout: "10s"
|
||||
socket_timeout: "30s"
|
||||
max_connections_per_route: 50
|
||||
max_connections_total: 200
|
||||
retry_on_failure: true
|
||||
max_retries: 3
|
||||
retry_delay: "1s"
|
||||
|
||||
# Performance tuning
|
||||
performance:
|
||||
bulk_actions: 1000
|
||||
bulk_size: "5MB"
|
||||
bulk_timeout: "10s"
|
||||
concurrent_requests: 4
|
||||
|
||||
# Monitoring and observability
|
||||
monitoring:
|
||||
health_check_interval: "30s"
|
||||
slow_query_threshold: "5s"
|
||||
enable_metrics: true
|
||||
|
||||
# Index management
|
||||
index_management:
|
||||
auto_create_templates: true
|
||||
template_patterns:
|
||||
stories: "stories_*"
|
||||
authors: "authors_*"
|
||||
collections: "collections_*"
|
||||
retention_policy:
|
||||
enabled: true
|
||||
default_retention: "1y"
|
||||
@@ -0,0 +1,79 @@
|
||||
{
|
||||
"settings": {
|
||||
"number_of_shards": 1,
|
||||
"number_of_replicas": 0,
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"name_analyzer": {
|
||||
"type": "standard",
|
||||
"stopwords": "_english_"
|
||||
},
|
||||
"autocomplete_analyzer": {
|
||||
"type": "custom",
|
||||
"tokenizer": "standard",
|
||||
"filter": ["lowercase", "edge_ngram"]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"edge_ngram": {
|
||||
"type": "edge_ngram",
|
||||
"min_gram": 2,
|
||||
"max_gram": 20
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"name": {
|
||||
"type": "text",
|
||||
"analyzer": "name_analyzer",
|
||||
"fields": {
|
||||
"autocomplete": {
|
||||
"type": "text",
|
||||
"analyzer": "autocomplete_analyzer"
|
||||
},
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"bio": {
|
||||
"type": "text",
|
||||
"analyzer": "name_analyzer"
|
||||
},
|
||||
"urls": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"imageUrl": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"storyCount": {
|
||||
"type": "integer"
|
||||
},
|
||||
"averageRating": {
|
||||
"type": "float"
|
||||
},
|
||||
"totalWordCount": {
|
||||
"type": "long"
|
||||
},
|
||||
"totalReadingTime": {
|
||||
"type": "integer"
|
||||
},
|
||||
"createdAt": {
|
||||
"type": "date",
|
||||
"format": "strict_date_optional_time||epoch_millis"
|
||||
},
|
||||
"updatedAt": {
|
||||
"type": "date",
|
||||
"format": "strict_date_optional_time||epoch_millis"
|
||||
},
|
||||
"libraryId": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,73 @@
|
||||
{
|
||||
"settings": {
|
||||
"number_of_shards": 1,
|
||||
"number_of_replicas": 0,
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"collection_analyzer": {
|
||||
"type": "standard",
|
||||
"stopwords": "_english_"
|
||||
},
|
||||
"autocomplete_analyzer": {
|
||||
"type": "custom",
|
||||
"tokenizer": "standard",
|
||||
"filter": ["lowercase", "edge_ngram"]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"edge_ngram": {
|
||||
"type": "edge_ngram",
|
||||
"min_gram": 2,
|
||||
"max_gram": 20
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"name": {
|
||||
"type": "text",
|
||||
"analyzer": "collection_analyzer",
|
||||
"fields": {
|
||||
"autocomplete": {
|
||||
"type": "text",
|
||||
"analyzer": "autocomplete_analyzer"
|
||||
},
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"description": {
|
||||
"type": "text",
|
||||
"analyzer": "collection_analyzer"
|
||||
},
|
||||
"storyCount": {
|
||||
"type": "integer"
|
||||
},
|
||||
"totalWordCount": {
|
||||
"type": "long"
|
||||
},
|
||||
"averageRating": {
|
||||
"type": "float"
|
||||
},
|
||||
"isPublic": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"createdAt": {
|
||||
"type": "date",
|
||||
"format": "strict_date_optional_time||epoch_millis"
|
||||
},
|
||||
"updatedAt": {
|
||||
"type": "date",
|
||||
"format": "strict_date_optional_time||epoch_millis"
|
||||
},
|
||||
"libraryId": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,120 @@
|
||||
{
|
||||
"settings": {
|
||||
"number_of_shards": 1,
|
||||
"number_of_replicas": 0,
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"story_analyzer": {
|
||||
"type": "standard",
|
||||
"stopwords": "_english_"
|
||||
},
|
||||
"autocomplete_analyzer": {
|
||||
"type": "custom",
|
||||
"tokenizer": "standard",
|
||||
"filter": ["lowercase", "edge_ngram"]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"edge_ngram": {
|
||||
"type": "edge_ngram",
|
||||
"min_gram": 2,
|
||||
"max_gram": 20
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"title": {
|
||||
"type": "text",
|
||||
"analyzer": "story_analyzer",
|
||||
"fields": {
|
||||
"autocomplete": {
|
||||
"type": "text",
|
||||
"analyzer": "autocomplete_analyzer"
|
||||
},
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"content": {
|
||||
"type": "text",
|
||||
"analyzer": "story_analyzer"
|
||||
},
|
||||
"summary": {
|
||||
"type": "text",
|
||||
"analyzer": "story_analyzer"
|
||||
},
|
||||
"authorNames": {
|
||||
"type": "text",
|
||||
"analyzer": "story_analyzer",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"authorIds": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"tagNames": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"seriesTitle": {
|
||||
"type": "text",
|
||||
"analyzer": "story_analyzer",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"seriesId": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"wordCount": {
|
||||
"type": "integer"
|
||||
},
|
||||
"rating": {
|
||||
"type": "float"
|
||||
},
|
||||
"readingTime": {
|
||||
"type": "integer"
|
||||
},
|
||||
"language": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"status": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"createdAt": {
|
||||
"type": "date",
|
||||
"format": "strict_date_optional_time||epoch_millis"
|
||||
},
|
||||
"updatedAt": {
|
||||
"type": "date",
|
||||
"format": "strict_date_optional_time||epoch_millis"
|
||||
},
|
||||
"publishedAt": {
|
||||
"type": "date",
|
||||
"format": "strict_date_optional_time||epoch_millis"
|
||||
},
|
||||
"isRead": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"isFavorite": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"readingProgress": {
|
||||
"type": "float"
|
||||
},
|
||||
"libraryId": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,77 @@
|
||||
{
|
||||
"policy": {
|
||||
"description": "StoryCove index lifecycle policy",
|
||||
"default_state": "hot",
|
||||
"states": [
|
||||
{
|
||||
"name": "hot",
|
||||
"actions": [
|
||||
{
|
||||
"rollover": {
|
||||
"min_size": "50gb",
|
||||
"min_doc_count": 1000000,
|
||||
"min_age": "7d"
|
||||
}
|
||||
}
|
||||
],
|
||||
"transitions": [
|
||||
{
|
||||
"state_name": "warm",
|
||||
"conditions": {
|
||||
"min_age": "7d"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "warm",
|
||||
"actions": [
|
||||
{
|
||||
"replica_count": {
|
||||
"number_of_replicas": 0
|
||||
}
|
||||
},
|
||||
{
|
||||
"force_merge": {
|
||||
"max_num_segments": 1
|
||||
}
|
||||
}
|
||||
],
|
||||
"transitions": [
|
||||
{
|
||||
"state_name": "cold",
|
||||
"conditions": {
|
||||
"min_age": "30d"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "cold",
|
||||
"actions": [],
|
||||
"transitions": [
|
||||
{
|
||||
"state_name": "delete",
|
||||
"conditions": {
|
||||
"min_age": "365d"
|
||||
}
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"name": "delete",
|
||||
"actions": [
|
||||
{
|
||||
"delete": {}
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"ism_template": [
|
||||
{
|
||||
"index_patterns": ["stories_*", "authors_*", "collections_*"],
|
||||
"priority": 100
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,124 @@
|
||||
{
|
||||
"index_patterns": ["stories_*"],
|
||||
"priority": 1,
|
||||
"template": {
|
||||
"settings": {
|
||||
"number_of_shards": 1,
|
||||
"number_of_replicas": 0,
|
||||
"analysis": {
|
||||
"analyzer": {
|
||||
"story_analyzer": {
|
||||
"type": "standard",
|
||||
"stopwords": "_english_"
|
||||
},
|
||||
"autocomplete_analyzer": {
|
||||
"type": "custom",
|
||||
"tokenizer": "standard",
|
||||
"filter": ["lowercase", "edge_ngram"]
|
||||
}
|
||||
},
|
||||
"filter": {
|
||||
"edge_ngram": {
|
||||
"type": "edge_ngram",
|
||||
"min_gram": 2,
|
||||
"max_gram": 20
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"mappings": {
|
||||
"properties": {
|
||||
"id": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"title": {
|
||||
"type": "text",
|
||||
"analyzer": "story_analyzer",
|
||||
"fields": {
|
||||
"autocomplete": {
|
||||
"type": "text",
|
||||
"analyzer": "autocomplete_analyzer"
|
||||
},
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"content": {
|
||||
"type": "text",
|
||||
"analyzer": "story_analyzer"
|
||||
},
|
||||
"summary": {
|
||||
"type": "text",
|
||||
"analyzer": "story_analyzer"
|
||||
},
|
||||
"authorNames": {
|
||||
"type": "text",
|
||||
"analyzer": "story_analyzer",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"authorIds": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"tagNames": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"seriesTitle": {
|
||||
"type": "text",
|
||||
"analyzer": "story_analyzer",
|
||||
"fields": {
|
||||
"keyword": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
},
|
||||
"seriesId": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"wordCount": {
|
||||
"type": "integer"
|
||||
},
|
||||
"rating": {
|
||||
"type": "float"
|
||||
},
|
||||
"readingTime": {
|
||||
"type": "integer"
|
||||
},
|
||||
"language": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"status": {
|
||||
"type": "keyword"
|
||||
},
|
||||
"createdAt": {
|
||||
"type": "date",
|
||||
"format": "strict_date_optional_time||epoch_millis"
|
||||
},
|
||||
"updatedAt": {
|
||||
"type": "date",
|
||||
"format": "strict_date_optional_time||epoch_millis"
|
||||
},
|
||||
"publishedAt": {
|
||||
"type": "date",
|
||||
"format": "strict_date_optional_time||epoch_millis"
|
||||
},
|
||||
"isRead": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"isFavorite": {
|
||||
"type": "boolean"
|
||||
},
|
||||
"readingProgress": {
|
||||
"type": "float"
|
||||
},
|
||||
"libraryId": {
|
||||
"type": "keyword"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,12 +1,8 @@
|
||||
package com.storycove.config;
|
||||
|
||||
import com.storycove.service.TypesenseService;
|
||||
import org.springframework.boot.test.context.TestConfiguration;
|
||||
import org.springframework.boot.test.mock.mockito.MockBean;
|
||||
|
||||
@TestConfiguration
|
||||
public class TestConfig {
|
||||
|
||||
@MockBean
|
||||
public TypesenseService typesenseService;
|
||||
// Test configuration
|
||||
}
|
||||
@@ -15,10 +15,12 @@ public abstract class BaseRepositoryTest {
|
||||
private static final PostgreSQLContainer<?> postgres;
|
||||
|
||||
static {
|
||||
postgres = new PostgreSQLContainer<>("postgres:15-alpine")
|
||||
@SuppressWarnings("resource") // Container is managed by shutdown hook
|
||||
PostgreSQLContainer<?> container = new PostgreSQLContainer<>("postgres:15-alpine")
|
||||
.withDatabaseName("storycove_test")
|
||||
.withUsername("test")
|
||||
.withPassword("test");
|
||||
postgres = container;
|
||||
postgres.start();
|
||||
|
||||
// Add shutdown hook to properly close the container
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.entity.Author;
|
||||
import com.storycove.entity.Story;
|
||||
import com.storycove.repository.AuthorRepository;
|
||||
import com.storycove.service.exception.DuplicateResourceException;
|
||||
import com.storycove.service.exception.ResourceNotFoundException;
|
||||
@@ -8,7 +9,6 @@ import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.DisplayName;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.InjectMocks;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
import org.springframework.data.domain.Page;
|
||||
@@ -22,8 +22,8 @@ import java.util.UUID;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.mockito.Mockito.times;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
@DisplayName("Author Service Unit Tests")
|
||||
@@ -32,7 +32,6 @@ class AuthorServiceTest {
|
||||
@Mock
|
||||
private AuthorRepository authorRepository;
|
||||
|
||||
@InjectMocks
|
||||
private AuthorService authorService;
|
||||
|
||||
private Author testAuthor;
|
||||
@@ -44,6 +43,10 @@ class AuthorServiceTest {
|
||||
testAuthor = new Author("Test Author");
|
||||
testAuthor.setId(testId);
|
||||
testAuthor.setNotes("Test notes");
|
||||
|
||||
// Initialize service with mock SearchServiceAdapter
|
||||
SearchServiceAdapter mockSearchServiceAdapter = mock(SearchServiceAdapter.class);
|
||||
authorService = new AuthorService(authorRepository, mockSearchServiceAdapter);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -172,7 +175,7 @@ class AuthorServiceTest {
|
||||
when(authorRepository.existsByName("Updated Author")).thenReturn(false);
|
||||
when(authorRepository.save(any(Author.class))).thenReturn(testAuthor);
|
||||
|
||||
Author result = authorService.update(testId, updates);
|
||||
authorService.update(testId, updates);
|
||||
|
||||
assertEquals("Updated Author", testAuthor.getName());
|
||||
assertEquals("Updated notes", testAuthor.getNotes());
|
||||
@@ -307,4 +310,133 @@ class AuthorServiceTest {
|
||||
assertEquals(5L, count);
|
||||
verify(authorRepository).countRecentAuthors(any(java.time.LocalDateTime.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should set author rating with validation")
|
||||
void shouldSetAuthorRating() {
|
||||
when(authorRepository.findById(testId)).thenReturn(Optional.of(testAuthor));
|
||||
when(authorRepository.save(any(Author.class))).thenReturn(testAuthor);
|
||||
|
||||
authorService.setRating(testId, 4);
|
||||
|
||||
assertEquals(4, testAuthor.getAuthorRating());
|
||||
verify(authorRepository, times(2)).findById(testId); // Called twice: once initially, once after flush
|
||||
verify(authorRepository).save(testAuthor);
|
||||
verify(authorRepository).flush();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should throw exception for invalid rating range")
|
||||
void shouldThrowExceptionForInvalidRating() {
|
||||
assertThrows(IllegalArgumentException.class, () -> authorService.setRating(testId, 0));
|
||||
assertThrows(IllegalArgumentException.class, () -> authorService.setRating(testId, 6));
|
||||
|
||||
verify(authorRepository, never()).findById(any());
|
||||
verify(authorRepository, never()).save(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle null rating")
|
||||
void shouldHandleNullRating() {
|
||||
when(authorRepository.findById(testId)).thenReturn(Optional.of(testAuthor));
|
||||
when(authorRepository.save(any(Author.class))).thenReturn(testAuthor);
|
||||
|
||||
authorService.setRating(testId, null);
|
||||
|
||||
assertNull(testAuthor.getAuthorRating());
|
||||
verify(authorRepository, times(2)).findById(testId); // Called twice: once initially, once after flush
|
||||
verify(authorRepository).save(testAuthor);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should find all authors with stories")
|
||||
void shouldFindAllAuthorsWithStories() {
|
||||
List<Author> authors = List.of(testAuthor);
|
||||
when(authorRepository.findAll()).thenReturn(authors);
|
||||
|
||||
List<Author> result = authorService.findAllWithStories();
|
||||
|
||||
assertEquals(1, result.size());
|
||||
verify(authorRepository).findAll();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should get author rating from database")
|
||||
void shouldGetAuthorRatingFromDb() {
|
||||
when(authorRepository.findAuthorRatingById(testId)).thenReturn(4);
|
||||
|
||||
Integer rating = authorService.getAuthorRatingFromDb(testId);
|
||||
|
||||
assertEquals(4, rating);
|
||||
verify(authorRepository).findAuthorRatingById(testId);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should calculate average story rating")
|
||||
void shouldCalculateAverageStoryRating() {
|
||||
// Setup test author with stories
|
||||
Story story1 = new Story("Story 1");
|
||||
story1.setRating(4);
|
||||
Story story2 = new Story("Story 2");
|
||||
story2.setRating(5);
|
||||
|
||||
testAuthor.getStories().add(story1);
|
||||
testAuthor.getStories().add(story2);
|
||||
|
||||
when(authorRepository.findById(testId)).thenReturn(Optional.of(testAuthor));
|
||||
|
||||
Double avgRating = authorService.calculateAverageStoryRating(testId);
|
||||
|
||||
assertEquals(4.5, avgRating);
|
||||
verify(authorRepository).findById(testId);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should find authors with stories using repository method")
|
||||
void shouldFindAuthorsWithStoriesFromRepository() {
|
||||
List<Author> authors = List.of(testAuthor);
|
||||
when(authorRepository.findAuthorsWithStories()).thenReturn(authors);
|
||||
|
||||
List<Author> result = authorService.findAuthorsWithStories();
|
||||
|
||||
assertEquals(1, result.size());
|
||||
verify(authorRepository).findAuthorsWithStories();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should find top rated authors")
|
||||
void shouldFindTopRatedAuthors() {
|
||||
List<Author> authors = List.of(testAuthor);
|
||||
when(authorRepository.findTopRatedAuthors()).thenReturn(authors);
|
||||
|
||||
List<Author> result = authorService.findTopRatedAuthors();
|
||||
|
||||
assertEquals(1, result.size());
|
||||
verify(authorRepository).findTopRatedAuthors();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should find most prolific authors")
|
||||
void shouldFindMostProlificAuthors() {
|
||||
List<Author> authors = List.of(testAuthor);
|
||||
when(authorRepository.findMostProlificAuthors()).thenReturn(authors);
|
||||
|
||||
List<Author> result = authorService.findMostProlificAuthors();
|
||||
|
||||
assertEquals(1, result.size());
|
||||
verify(authorRepository).findMostProlificAuthors();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should find authors by URL domain")
|
||||
void shouldFindAuthorsByUrlDomain() {
|
||||
List<Author> authors = List.of(testAuthor);
|
||||
when(authorRepository.findByUrlDomain("example.com")).thenReturn(authors);
|
||||
|
||||
List<Author> result = authorService.findByUrlDomain("example.com");
|
||||
|
||||
assertEquals(1, result.size());
|
||||
verify(authorRepository).findByUrlDomain("example.com");
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,224 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.entity.Story;
|
||||
import com.storycove.repository.ReadingPositionRepository;
|
||||
import com.storycove.repository.StoryRepository;
|
||||
import com.storycove.repository.TagRepository;
|
||||
import com.storycove.service.exception.ResourceNotFoundException;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.DisplayName;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
@DisplayName("Story Service Unit Tests - Reading Progress")
|
||||
class StoryServiceTest {
|
||||
|
||||
@Mock
|
||||
private StoryRepository storyRepository;
|
||||
|
||||
@Mock
|
||||
private TagRepository tagRepository;
|
||||
|
||||
@Mock
|
||||
private ReadingPositionRepository readingPositionRepository;
|
||||
|
||||
@Mock
|
||||
private SearchServiceAdapter searchServiceAdapter;
|
||||
|
||||
private StoryService storyService;
|
||||
private Story testStory;
|
||||
private UUID testId;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
testId = UUID.randomUUID();
|
||||
testStory = new Story("Test Story");
|
||||
testStory.setId(testId);
|
||||
testStory.setContentHtml("<p>Test content for reading progress tracking</p>");
|
||||
|
||||
// Create StoryService with mocked dependencies
|
||||
storyService = new StoryService(
|
||||
storyRepository,
|
||||
tagRepository,
|
||||
readingPositionRepository,
|
||||
null, // authorService - not needed for reading progress tests
|
||||
null, // tagService - not needed for reading progress tests
|
||||
null, // seriesService - not needed for reading progress tests
|
||||
null, // sanitizationService - not needed for reading progress tests
|
||||
searchServiceAdapter
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should update reading progress successfully")
|
||||
void shouldUpdateReadingProgress() {
|
||||
Integer position = 150;
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
|
||||
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
|
||||
|
||||
Story result = storyService.updateReadingProgress(testId, position);
|
||||
|
||||
assertEquals(position, result.getReadingPosition());
|
||||
assertNotNull(result.getLastReadAt());
|
||||
verify(storyRepository).findById(testId);
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should update reading progress with zero position")
|
||||
void shouldUpdateReadingProgressWithZeroPosition() {
|
||||
Integer position = 0;
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
|
||||
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
|
||||
|
||||
Story result = storyService.updateReadingProgress(testId, position);
|
||||
|
||||
assertEquals(0, result.getReadingPosition());
|
||||
assertNotNull(result.getLastReadAt());
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should throw exception for negative reading position")
|
||||
void shouldThrowExceptionForNegativeReadingPosition() {
|
||||
Integer position = -1;
|
||||
|
||||
assertThrows(IllegalArgumentException.class,
|
||||
() -> storyService.updateReadingProgress(testId, position));
|
||||
|
||||
verify(storyRepository, never()).findById(any());
|
||||
verify(storyRepository, never()).save(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle null reading position")
|
||||
void shouldHandleNullReadingPosition() {
|
||||
Integer position = null;
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
|
||||
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
|
||||
|
||||
Story result = storyService.updateReadingProgress(testId, position);
|
||||
|
||||
assertNull(result.getReadingPosition());
|
||||
assertNotNull(result.getLastReadAt());
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should throw exception when story not found for reading progress update")
|
||||
void shouldThrowExceptionWhenStoryNotFoundForReadingProgress() {
|
||||
Integer position = 100;
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.empty());
|
||||
|
||||
assertThrows(ResourceNotFoundException.class,
|
||||
() -> storyService.updateReadingProgress(testId, position));
|
||||
|
||||
verify(storyRepository).findById(testId);
|
||||
verify(storyRepository, never()).save(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should mark story as read")
|
||||
void shouldMarkStoryAsRead() {
|
||||
Boolean isRead = true;
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
|
||||
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
|
||||
|
||||
Story result = storyService.updateReadingStatus(testId, isRead);
|
||||
|
||||
assertTrue(result.getIsRead());
|
||||
assertNotNull(result.getLastReadAt());
|
||||
// When marked as read, position should be set to content length
|
||||
assertTrue(result.getReadingPosition() > 0);
|
||||
verify(storyRepository).findById(testId);
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should mark story as unread")
|
||||
void shouldMarkStoryAsUnread() {
|
||||
Boolean isRead = false;
|
||||
// First mark story as read to test transition
|
||||
testStory.markAsRead();
|
||||
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
|
||||
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
|
||||
|
||||
Story result = storyService.updateReadingStatus(testId, isRead);
|
||||
|
||||
assertFalse(result.getIsRead());
|
||||
assertNotNull(result.getLastReadAt());
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle null reading status")
|
||||
void shouldHandleNullReadingStatus() {
|
||||
Boolean isRead = null;
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
|
||||
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
|
||||
|
||||
Story result = storyService.updateReadingStatus(testId, isRead);
|
||||
|
||||
assertFalse(result.getIsRead());
|
||||
assertNotNull(result.getLastReadAt());
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should throw exception when story not found for reading status update")
|
||||
void shouldThrowExceptionWhenStoryNotFoundForReadingStatus() {
|
||||
Boolean isRead = true;
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.empty());
|
||||
|
||||
assertThrows(ResourceNotFoundException.class,
|
||||
() -> storyService.updateReadingStatus(testId, isRead));
|
||||
|
||||
verify(storyRepository).findById(testId);
|
||||
verify(storyRepository, never()).save(any());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
@DisplayName("Should update lastReadAt timestamp when updating progress")
|
||||
void shouldUpdateLastReadAtWhenUpdatingProgress() {
|
||||
Integer position = 50;
|
||||
LocalDateTime beforeUpdate = LocalDateTime.now().minusMinutes(1);
|
||||
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
|
||||
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
|
||||
|
||||
Story result = storyService.updateReadingProgress(testId, position);
|
||||
|
||||
assertNotNull(result.getLastReadAt());
|
||||
assertTrue(result.getLastReadAt().isAfter(beforeUpdate));
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should update lastReadAt timestamp when updating status")
|
||||
void shouldUpdateLastReadAtWhenUpdatingStatus() {
|
||||
Boolean isRead = true;
|
||||
LocalDateTime beforeUpdate = LocalDateTime.now().minusMinutes(1);
|
||||
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
|
||||
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
|
||||
|
||||
Story result = storyService.updateReadingStatus(testId, isRead);
|
||||
|
||||
assertNotNull(result.getLastReadAt());
|
||||
assertTrue(result.getLastReadAt().isAfter(beforeUpdate));
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
}
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user