111 Commits

Author SHA1 Message Date
Stefan Hardegger
4e02cd8eaa fix image 2025-09-30 17:03:49 +02:00
Stefan Hardegger
48b0087b01 fix embedded images on deviantart 2025-09-30 16:18:05 +02:00
Stefan Hardegger
c291559366 Fix Image Processing 2025-09-28 20:06:52 +02:00
Stefan Hardegger
622cf9ac76 fix image processing 2025-09-27 09:29:40 +02:00
Stefan Hardegger
df5e124115 fix image processing 2025-09-27 09:15:01 +02:00
Stefan Hardegger
2b4cb1456f fix orphaned file discovery 2025-09-27 08:46:17 +02:00
Stefan Hardegger
c2e5445196 fix 2025-09-27 08:32:11 +02:00
Stefan Hardegger
360b69effc fix cleanup 2025-09-27 08:15:09 +02:00
Stefan Hardegger
3bc8bb9e0c backup / restore improvement 2025-09-26 22:34:21 +02:00
Stefan Hardegger
7ca4823573 backup / restore improvement 2025-09-26 22:26:26 +02:00
Stefan Hardegger
5325169495 maintenance improvements 2025-09-26 21:41:33 +02:00
Stefan Hardegger
74cdd5dc57 solr random fix 2025-09-26 15:05:27 +02:00
Stefan Hardegger
574f20bfd7 dependency 2025-09-26 08:28:32 +02:00
Stefan Hardegger
c8249c94d6 new editor 2025-09-26 08:22:54 +02:00
Stefan Hardegger
51a1a69b45 solr migration button 2025-09-23 14:57:16 +02:00
Stefan Hardegger
6ee2d67027 solr migration button 2025-09-23 14:42:38 +02:00
Stefan Hardegger
9472210d8b solr migration button 2025-09-23 14:18:56 +02:00
Stefan Hardegger
62f017c4ca solr fix 2025-09-23 13:58:49 +02:00
Stefan Hardegger
857871273d fix pre formatting 2025-09-22 15:43:25 +02:00
Stefan Hardegger
a9521a9da1 fix saving stories. 2025-09-22 13:52:48 +02:00
Stefan Hardegger
1f41974208 ff 2025-09-22 12:43:05 +02:00
Stefan Hardegger
b68fde71c0 ff 2025-09-22 12:28:31 +02:00
Stefan Hardegger
f61be90d5c ff 2025-09-22 10:13:49 +02:00
Stefan Hardegger
87f37567fb replacing opensearch with solr 2025-09-22 09:44:50 +02:00
Stefan Hardegger
9e684a956b ff 2025-09-21 19:25:11 +02:00
Stefan Hardegger
379ef0d209 ff 2025-09-21 19:21:26 +02:00
Stefan Hardegger
b1ff684df6 asd 2025-09-21 19:18:03 +02:00
Stefan Hardegger
0032590030 fix? 2025-09-21 19:13:39 +02:00
Stefan Hardegger
db38d68399 fix? 2025-09-21 19:10:06 +02:00
Stefan Hardegger
48a0865199 fa 2025-09-21 18:04:36 +02:00
Stefan Hardegger
7daed22d2d another try 2025-09-21 17:53:52 +02:00
Stefan Hardegger
6c02b8831f asd 2025-09-21 17:47:03 +02:00
Stefan Hardegger
042f80dd2a another try 2025-09-21 17:38:57 +02:00
Stefan Hardegger
a472c11ac8 fix 2025-09-21 17:30:15 +02:00
Stefan Hardegger
a037dd92af fix 2025-09-21 17:21:49 +02:00
Stefan Hardegger
634de0b6a5 fix 2025-09-21 16:43:47 +02:00
Stefan Hardegger
b4635b56a3 fix 2025-09-21 16:39:41 +02:00
Stefan Hardegger
bfb68e81a8 fix 2025-09-21 16:34:28 +02:00
Stefan Hardegger
1247a3420e fix 2025-09-21 16:23:44 +02:00
Stefan Hardegger
6caee8a007 config 2025-09-21 16:21:53 +02:00
Stefan Hardegger
cf93d3b3a6 opensearch config 2025-09-21 16:14:20 +02:00
Stefan Hardegger
53cb296adc opensearch config 2025-09-21 16:10:07 +02:00
Stefan Hardegger
f71b70d03b opensearch config 2025-09-21 16:07:48 +02:00
Stefan Hardegger
0bdc3f4731 adjustment 2025-09-21 15:59:15 +02:00
Stefan Hardegger
345065c03b missing dependencies 2025-09-21 15:53:03 +02:00
Stefan Hardegger
c50dc618bf build adjustment 2025-09-21 15:47:14 +02:00
Stefan Hardegger
96e6ced8da adjustment 2025-09-21 15:37:48 +02:00
Stefan Hardegger
4738ae3a75 opefully build fix 2025-09-21 15:30:27 +02:00
Stefan Hardegger
591ca5a149 disable opensearch security 2025-09-21 15:08:20 +02:00
Stefan Hardegger
41ff3a9961 correction 2025-09-21 14:55:43 +02:00
Stefan Hardegger
0101c0ca2c bugfixes, and logging cleanup 2025-09-21 14:55:43 +02:00
58bb7f8229 revert a5628019f8
revert revert b1dbd85346

revert richtext replacement
2025-09-21 14:54:39 +02:00
a5628019f8 revert b1dbd85346
revert richtext replacement
2025-09-21 10:13:48 +02:00
Stefan Hardegger
b1dbd85346 richtext replacement 2025-09-21 10:10:04 +02:00
Stefan Hardegger
aae8f8926b removing typesense 2025-09-20 14:39:51 +02:00
Stefan Hardegger
f1773873d4 Full parallel implementation of typesense and opensearch 2025-09-20 09:40:09 +02:00
Stefan Hardegger
54df3c471e phase 1 2025-09-18 07:46:10 +02:00
Stefan Hardegger
64f97f5648 Settings reorganization 2025-09-17 15:06:35 +02:00
Stefan Hardegger
c0b3ae3b72 embedded image finishing 2025-09-17 10:28:35 +02:00
Stefan Hardegger
e5596b5a17 fix port mapping 2025-09-16 15:06:40 +02:00
Stefan Hardegger
c7b516be31 phase 1 and 2 of embedded images 2025-09-16 14:58:50 +02:00
Stefan Hardegger
c92308c24a layout enhancement. Reading position reset 2025-09-16 09:34:27 +02:00
Stefan Hardegger
f92dcc5314 Advanced Filters - Build optimizations 2025-09-04 15:49:24 +02:00
Stefan Hardegger
702fcb33c1 Improvements to Editor 2025-09-02 09:28:06 +02:00
Stefan Hardegger
11b2a8b071 revert postgres version 2025-09-01 16:19:14 +02:00
Stefan Hardegger
d1289bd616 Security Updates and random improvement. 2025-09-01 16:02:19 +02:00
Stefan Hardegger
15708b5ab2 Table of Content functionality 2025-08-22 09:03:21 +02:00
Stefan Hardegger
a660056003 Various improvements 2025-08-21 13:55:38 +02:00
Stefan Hardegger
35a5825e76 Fix cover images display 2025-08-21 12:38:48 +02:00
Stefan Hardegger
87a4999ffe Fixing Database switching functionality. 2025-08-21 08:54:28 +02:00
Stefan Hardegger
4ee5fa2330 fix 2025-08-20 15:11:41 +02:00
Stefan Hardegger
6128d61349 Library Switching functionality 2025-08-20 15:10:40 +02:00
Stefan Hardegger
5e347f2e2e Incrase permitted upload size 2025-08-20 08:11:36 +02:00
Stefan Hardegger
8eb126a304 performance 2025-08-18 19:27:57 +02:00
Stefan Hardegger
3dc02420fe performance optimization in library view 2025-08-18 19:03:42 +02:00
Stefan Hardegger
241a15a174 Series auto complete 2025-08-18 14:19:14 +02:00
Stefan Hardegger
6b97c0a70f fix loop 2025-08-18 10:41:32 +02:00
Stefan Hardegger
e952241e3c fix 2025-08-18 10:32:02 +02:00
Stefan Hardegger
65f1c6edc7 fix 2025-08-18 10:16:20 +02:00
Stefan Hardegger
40fe3fdb80 Improvements, Fixes. 2025-08-18 10:04:38 +02:00
Stefan Hardegger
95ce5fb532 Bugfixes and Improvements Tag Management 2025-08-18 08:54:18 +02:00
Stefan Hardegger
1a99d9830d Tag Enhancement + bugfixes 2025-08-17 17:16:40 +02:00
Stefan Hardegger
6b83783381 Small improvements 2025-08-15 07:58:36 +02:00
Stefan Hardegger
460ec358ca New Switchable Library Layout 2025-08-14 19:46:50 +02:00
Stefan Hardegger
1d14d3d7aa Fix for Random Story Function 2025-08-14 13:14:46 +02:00
Stefan Hardegger
4357351ec8 randomized 2025-08-13 14:49:57 +02:00
Stefan Hardegger
4ab03953ae random story selector 2025-08-13 14:48:40 +02:00
Stefan Hardegger
142d8328c2 revert security config 2025-08-12 15:14:14 +02:00
Stefan Hardegger
c46108c317 various improvements and performance enhancements 2025-08-12 14:55:51 +02:00
Stefan Hardegger
75c207970d Changing Authors 2025-08-12 12:57:34 +02:00
Stefan Hardegger
3b22d155db restructuring 2025-08-11 14:40:56 +02:00
Stefan Hardegger
51e3d20c24 various fixes 2025-08-11 08:15:20 +02:00
Stefan Hardegger
5d195b63ef Fix dead links 2025-08-08 15:05:10 +02:00
Stefan Hardegger
5b3a9d183e Image Handling in Epub Import/export 2025-08-08 14:50:49 +02:00
Stefan Hardegger
379c8c170f Various improvements & Epub support 2025-08-08 14:09:14 +02:00
Stefan Hardegger
090b858a54 Bugfix 2025-07-31 13:43:23 +02:00
Stefan Hardegger
b0c14d4b37 DB Backup Bugfix 2025-07-31 08:36:33 +02:00
Stefan Hardegger
7227061d25 DB Backup Bugfix 2025-07-31 08:25:47 +02:00
Stefan Hardegger
415eab07de DB Backup Bugfix 2025-07-31 07:54:43 +02:00
Stefan Hardegger
e89331e059 DB Backup Bugfix 2025-07-31 07:46:14 +02:00
Stefan Hardegger
370bef2f07 DB Backup Bug 2025-07-31 07:38:05 +02:00
Stefan Hardegger
9e788c2018 bugfix DB Backup 2025-07-31 07:30:23 +02:00
Stefan Hardegger
590e2590d6 DB Backup and Restore 2025-07-31 07:12:12 +02:00
Stefan Hardegger
57859d7a84 Reading Progress 2025-07-29 14:53:44 +02:00
Stefan Hardegger
5746001c4a Bugfixes 2025-07-29 11:02:46 +02:00
Stefan Hardegger
c08082c0d6 Correct tag facets handling 2025-07-28 14:37:58 +02:00
Stefan Hardegger
860bf02d56 Dockerfile improvement 2025-07-28 14:28:01 +02:00
Stefan Hardegger
a501b27169 Saving reading position 2025-07-28 14:09:19 +02:00
Stefan Hardegger
fcad028959 scraping and improvements 2025-07-28 13:52:09 +02:00
Stefan Hardegger
f95d7aa8bb Various Fixes and QoL enhancements. 2025-07-26 12:05:54 +02:00
5e8164c6a4 Merge pull request 'feature/collections' (#1) from feature/collections into main
Reviewed-on: #1
2025-07-25 14:22:57 +02:00
205 changed files with 35511 additions and 3409 deletions

View File

@@ -14,11 +14,18 @@ JWT_SECRET=secure_jwt_secret_here
# Application Authentication
APP_PASSWORD=application_password_here
# Search Engine Configuration
SEARCH_ENGINE=typesense
# Typesense Search Configuration
TYPESENSE_API_KEY=secure_api_key_here
TYPESENSE_ENABLED=true
TYPESENSE_REINDEX_INTERVAL=3600000
# OpenSearch Configuration
OPENSEARCH_USERNAME=admin
OPENSEARCH_PASSWORD=secure_opensearch_password_here
# Image Storage
IMAGE_STORAGE_PATH=/app/images

View File

@@ -18,10 +18,9 @@ JWT_SECRET=REPLACE_WITH_SECURE_JWT_SECRET_MINIMUM_32_CHARS
# Use a strong password in production
APP_PASSWORD=REPLACE_WITH_SECURE_APP_PASSWORD
# Typesense Search Configuration
TYPESENSE_API_KEY=REPLACE_WITH_SECURE_TYPESENSE_API_KEY
TYPESENSE_ENABLED=true
TYPESENSE_REINDEX_INTERVAL=3600000
# OpenSearch Configuration
#OPENSEARCH_PASSWORD=REPLACE_WITH_SECURE_OPENSEARCH_PASSWORD
SEARCH_ENGINE=opensearch
# Image Storage
IMAGE_STORAGE_PATH=/app/images

1
.gitignore vendored
View File

@@ -47,3 +47,4 @@ Thumbs.db
# Application data
images/
data/
backend/cookies.txt

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

220
ASYNC_IMAGE_PROCESSING.md Normal file
View File

@@ -0,0 +1,220 @@
# Async Image Processing Implementation
## Overview
The image processing system has been updated to handle external images asynchronously, preventing timeouts when processing stories with many images. This provides real-time progress updates to users showing which images are being processed.
## Backend Components
### 1. `ImageProcessingProgressService`
- Tracks progress for individual story image processing sessions
- Thread-safe with `ConcurrentHashMap` for multi-user support
- Provides progress information: total images, processed count, current image, status, errors
### 2. `AsyncImageProcessingService`
- Handles asynchronous image processing using Spring's `@Async` annotation
- Counts external images before processing
- Provides progress callbacks during processing
- Updates story content when processing completes
- Automatic cleanup of progress data after completion
### 3. Enhanced `ImageService`
- Added `processContentImagesWithProgress()` method with callback support
- Progress callbacks provide real-time updates during image download/processing
- Maintains compatibility with existing synchronous processing
### 4. Updated `StoryController`
- `POST /api/stories` and `PUT /api/stories/{id}` now trigger async image processing
- `GET /api/stories/{id}/image-processing-progress` endpoint for progress polling
- Processing starts immediately after story save and returns control to user
## Frontend Components
### 1. `ImageProcessingProgressTracker` (Utility Class)
```typescript
const tracker = new ImageProcessingProgressTracker(storyId);
tracker.onProgress((progress) => {
console.log(`Processing ${progress.processedImages}/${progress.totalImages}`);
});
tracker.onComplete(() => console.log('Done!'));
tracker.start();
```
### 2. `ImageProcessingProgressComponent` (React Component)
```tsx
<ImageProcessingProgressComponent
storyId={storyId}
autoStart={true}
onComplete={() => refreshStory()}
/>
```
## User Experience
### Before (Synchronous)
1. User saves story with external images
2. Request hangs for 30+ seconds processing images
3. Browser may timeout
4. No feedback about progress
5. User doesn't know if it's working
### After (Asynchronous)
1. User saves story with external images
2. Save completes immediately
3. Progress indicator appears: "Processing 5 images. Currently image 2 of 5..."
4. User can continue using the application
5. Progress updates every second
6. Story automatically refreshes when processing completes
## API Endpoints
### Progress Endpoint
```
GET /api/stories/{id}/image-processing-progress
```
**Response when processing:**
```json
{
"isProcessing": true,
"totalImages": 5,
"processedImages": 2,
"currentImageUrl": "https://example.com/image.jpg",
"status": "Processing image 3 of 5",
"progressPercentage": 40.0,
"completed": false,
"error": ""
}
```
**Response when completed:**
```json
{
"isProcessing": false,
"totalImages": 5,
"processedImages": 5,
"currentImageUrl": "",
"status": "Completed: 5 images processed",
"progressPercentage": 100.0,
"completed": true,
"error": ""
}
```
**Response when no processing:**
```json
{
"isProcessing": false,
"message": "No active image processing"
}
```
## Integration Examples
### React Hook Usage
```tsx
import { useImageProcessingProgress } from '../utils/imageProcessingProgress';
function StoryEditor({ storyId }) {
const { progress, isTracking, startTracking } = useImageProcessingProgress(storyId);
const handleSave = async () => {
await saveStory();
startTracking(); // Start monitoring progress
};
return (
<div>
{isTracking && progress && (
<div className="progress-indicator">
Processing {progress.processedImages}/{progress.totalImages} images...
</div>
)}
<button onClick={handleSave}>Save Story</button>
</div>
);
}
```
### Manual Progress Tracking
```typescript
// After saving a story with external images
const tracker = new ImageProcessingProgressTracker(storyId);
tracker.onProgress((progress) => {
updateProgressBar(progress.progressPercentage);
showStatus(progress.status);
if (progress.currentImageUrl) {
showCurrentImage(progress.currentImageUrl);
}
});
tracker.onComplete((finalProgress) => {
hideProgressBar();
showNotification('Image processing completed!');
refreshStoryContent(); // Reload story with processed images
});
tracker.onError((error) => {
hideProgressBar();
showError(`Image processing failed: ${error}`);
});
tracker.start();
```
## Configuration
### Polling Interval
Default: 1 second (1000ms)
```typescript
const tracker = new ImageProcessingProgressTracker(storyId, 500); // Poll every 500ms
```
### Timeout
Default: 5 minutes (300000ms)
```typescript
const tracker = new ImageProcessingProgressTracker(storyId, 1000, 600000); // 10 minute timeout
```
### Spring Async Configuration
The backend uses Spring's default async executor. For production, consider configuring a custom thread pool in your application properties:
```yaml
spring:
task:
execution:
pool:
core-size: 4
max-size: 8
queue-capacity: 100
```
## Error Handling
### Backend Errors
- Network timeouts downloading images
- Invalid image formats
- Disk space issues
- All errors are logged and returned in progress status
### Frontend Errors
- Network failures during progress polling
- Timeout if processing takes too long
- Graceful degradation - user can continue working
## Benefits
1. **No More Timeouts**: Large image processing operations won't timeout HTTP requests
2. **Better UX**: Users get real-time feedback about processing progress
3. **Improved Performance**: Users can continue using the app while images process
4. **Error Visibility**: Clear error messages when image processing fails
5. **Scalability**: Multiple users can process images simultaneously without blocking
## Future Enhancements
1. **WebSocket Support**: Replace polling with WebSocket for real-time push updates
2. **Batch Processing**: Queue multiple stories for batch image processing
3. **Retry Logic**: Automatic retry for failed image downloads
4. **Progress Persistence**: Save progress to database for recovery after server restart
5. **Image Optimization**: Automatic resize/compress images during processing

View File

@@ -0,0 +1,466 @@
# EPUB Import/Export Specification
## 🎉 Phase 1 & 2 Implementation Complete
**Status**: Both Phase 1 and Phase 2 fully implemented and operational as of August 2025
**Phase 1 Achievements**:
- ✅ Complete EPUB import functionality with validation and error handling
- ✅ Single story EPUB export with XML validation fixes
- ✅ Reading position preservation using EPUB CFI standards
- ✅ Full frontend UI integration with navigation and authentication
- ✅ Moved export button to Story Detail View for better UX
- ✅ Added EPUB import to main Add Story menu dropdown
**Phase 2 Enhancements**:
-**Enhanced Cover Processing**: Automatic extraction and optimization of cover images during EPUB import
-**Advanced Metadata Extraction**: Comprehensive extraction of subjects/tags, keywords, publisher, language, publication dates, and identifiers
-**Collection EPUB Export**: Full collection export with table of contents, proper chapter structure, and metadata aggregation
-**Image Validation**: Robust cover image processing with format detection, resizing, and storage management
-**API Endpoints**: Complete REST API for both individual story and collection EPUB operations
## Overview
This specification defines the requirements and implementation details for importing and exporting EPUB files in StoryCove. The feature enables users to import stories from EPUB files and export their stories/collections as EPUB files with preserved reading positions.
## Scope
### In Scope
- **EPUB Import**: Parse DRM-free EPUB files and import as stories
- **EPUB Export**: Export individual stories and collections as EPUB files
- **Reading Position Preservation**: Store and restore reading positions using EPUB standards
- **Metadata Handling**: Extract and preserve story metadata (title, author, cover, etc.)
- **Content Processing**: HTML content sanitization and formatting
### Out of Scope (Phase 1)
- DRM-protected EPUB files (future consideration)
- Real-time reading position sync between devices
- Advanced EPUB features (audio, video, interactive content)
- EPUB validation beyond basic structure
## Technical Architecture
### Backend Implementation
- **Language**: Java (Spring Boot)
- **Primary Library**: EPUBLib (nl.siegmann.epublib:epublib-core:3.1)
- **Processing**: Server-side generation and parsing
- **File Handling**: Multipart file upload for import, streaming download for export
### Dependencies
```xml
<dependency>
<groupId>com.positiondev.epublib</groupId>
<artifactId>epublib-core</artifactId>
<version>3.1</version>
</dependency>
```
### Phase 1 Implementation Notes
- **EPUBImportService**: Implemented with full validation, metadata extraction, and reading position handling
- **EPUBExportService**: Implemented with XML validation fixes for EPUB reader compatibility
- **ReadingPosition Entity**: Created with EPUB CFI support and database indexing
- **Authentication**: All endpoints secured with JWT authentication and proper frontend integration
- **UI Integration**: Export moved to Story Detail View, Import added to main navigation menu
- **XML Compliance**: Fixed XHTML validation issues by properly formatting self-closing tags (`<br>``<br />`)
## EPUB Import Specification
### Supported Formats
- **EPUB 2.0** and **EPUB 3.x** formats
- **DRM-Free** files only
- **Maximum file size**: 50MB
- **Supported content**: Text-based stories with HTML content
### Import Process Flow
1. **File Upload**: User uploads EPUB file via web interface
2. **Validation**: Check file format, size, and basic EPUB structure
3. **Parsing**: Extract metadata, content, and resources using EPUBLib
4. **Content Processing**: Sanitize HTML content using existing Jsoup pipeline
5. **Story Creation**: Create Story entity with extracted data
6. **Preview**: Show extracted story details for user confirmation
7. **Finalization**: Save story to database with imported metadata
### Metadata Mapping
```java
// EPUB Metadata → StoryCove Story Entity
epub.getMetadata().getFirstTitle() story.title
epub.getMetadata().getAuthors().get(0) story.authorName
epub.getMetadata().getDescriptions().get(0) story.summary
epub.getCoverImage() story.coverPath
epub.getMetadata().getSubjects() story.tags
```
### Content Extraction
- **Multi-chapter EPUBs**: Combine all content files into single HTML
- **Chapter separation**: Insert `<hr>` or `<h2>` tags between chapters
- **HTML sanitization**: Apply existing sanitization rules
- **Image handling**: Extract and store cover images, inline images optional
### API Endpoints
#### POST /api/stories/import-epub
```java
@PostMapping("/import-epub")
public ResponseEntity<?> importEPUB(@RequestParam("file") MultipartFile file) {
// Implementation in EPUBImportService
}
```
**Request**: Multipart file upload
**Response**:
```json
{
"message": "EPUB imported successfully",
"storyId": "uuid",
"extractedData": {
"title": "Story Title",
"author": "Author Name",
"summary": "Story description",
"chapterCount": 12,
"wordCount": 45000,
"hasCovers": true
}
}
```
## EPUB Export Specification
### Export Types
1. **Single Story Export**: Convert one story to EPUB
2. **Collection Export**: Multiple stories as single EPUB with chapters
### EPUB Structure Generation
```
story.epub
├── mimetype
├── META-INF/
│ └── container.xml
└── OEBPS/
├── content.opf # Package metadata
├── toc.ncx # Navigation
├── stylesheet.css # Styling
├── cover.html # Cover page
├── chapter001.xhtml # Story content
├── images/
│ └── cover.jpg # Cover image
└── fonts/ (optional)
```
### Reading Position Implementation
#### EPUB 3 CFI (Canonical Fragment Identifier)
```xml
<!-- In content.opf metadata -->
<meta property="epub-cfi" content="/6/4[chap01]!/4[body01]/10[para05]/3:142"/>
<meta property="reading-percentage" content="0.65"/>
<meta property="last-read-timestamp" content="2023-12-07T10:30:00Z"/>
```
#### StoryCove Custom Metadata (Fallback)
```xml
<meta name="storycove:reading-chapter" content="3"/>
<meta name="storycove:reading-paragraph" content="15"/>
<meta name="storycove:reading-offset" content="142"/>
<meta name="storycove:reading-percentage" content="0.65"/>
```
#### CFI Generation Logic
```java
public String generateCFI(ReadingPosition position) {
return String.format("/6/%d[chap%02d]!/4[body01]/%d[para%02d]/3:%d",
(position.getChapterIndex() * 2) + 4,
position.getChapterIndex(),
(position.getParagraphIndex() * 2) + 4,
position.getParagraphIndex(),
position.getCharacterOffset());
}
```
### API Endpoints
#### GET /api/stories/{id}/export-epub
```java
@GetMapping("/{id}/export-epub")
public ResponseEntity<StreamingResponseBody> exportStory(@PathVariable UUID id) {
// Implementation in EPUBExportService
}
```
**Response**: EPUB file download with headers:
```
Content-Type: application/epub+zip
Content-Disposition: attachment; filename="story-title.epub"
```
#### GET /api/collections/{id}/export-epub
```java
@GetMapping("/{id}/export-epub")
public ResponseEntity<StreamingResponseBody> exportCollection(@PathVariable UUID id) {
// Implementation in EPUBExportService
}
```
**Response**: Multi-story EPUB with table of contents
## Data Models
### ReadingPosition Entity
```java
@Entity
@Table(name = "reading_positions")
public class ReadingPosition {
@Id
private UUID id;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "story_id")
private Story story;
@Column(name = "chapter_index")
private Integer chapterIndex = 0;
@Column(name = "paragraph_index")
private Integer paragraphIndex = 0;
@Column(name = "character_offset")
private Integer characterOffset = 0;
@Column(name = "progress_percentage")
private Double progressPercentage = 0.0;
@Column(name = "epub_cfi")
private String canonicalFragmentIdentifier;
@Column(name = "last_read_at")
private LocalDateTime lastReadAt;
@Column(name = "device_identifier")
private String deviceIdentifier;
// Constructors, getters, setters
}
```
### EPUB Import Request DTO
```java
public class EPUBImportRequest {
private String filename;
private Long fileSize;
private Boolean preserveChapterStructure = true;
private Boolean extractCover = true;
private String targetCollectionId; // Optional: add to specific collection
}
```
### EPUB Export Options DTO
```java
public class EPUBExportOptions {
private Boolean includeReadingPosition = true;
private Boolean includeCoverImage = true;
private Boolean includeMetadata = true;
private String cssStylesheet; // Optional custom CSS
private EPUBVersion version = EPUBVersion.EPUB3;
}
```
## Service Layer Architecture
### EPUBImportService
```java
@Service
public class EPUBImportService {
// Core import method
public Story importEPUBFile(MultipartFile file, EPUBImportRequest request);
// Helper methods
private void validateEPUBFile(MultipartFile file);
private Book parseEPUBStructure(InputStream inputStream);
private Story extractStoryData(Book epub);
private String combineChapterContent(Book epub);
private void extractAndSaveCover(Book epub, Story story);
private List<String> extractTags(Book epub);
private ReadingPosition extractReadingPosition(Book epub);
}
```
### EPUBExportService
```java
@Service
public class EPUBExportService {
// Core export methods
public byte[] exportSingleStory(UUID storyId, EPUBExportOptions options);
public byte[] exportCollection(UUID collectionId, EPUBExportOptions options);
// Helper methods
private Book createEPUBStructure(Story story, ReadingPosition position);
private Book createCollectionEPUB(Collection collection, List<ReadingPosition> positions);
private void addReadingPositionMetadata(Book book, ReadingPosition position);
private String generateCFI(ReadingPosition position);
private Resource createChapterResource(Story story);
private Resource createStylesheetResource();
private void addCoverImage(Book book, Story story);
}
```
## Frontend Integration
### Import UI Flow
1. **Upload Interface**: File input with EPUB validation
2. **Progress Indicator**: Show parsing progress
3. **Preview Screen**: Display extracted metadata for confirmation
4. **Confirmation**: Allow editing of title, author, summary before saving
5. **Success**: Redirect to created story
### Export UI Flow
1. **Export Button**: Available on story detail and collection pages
2. **Options Modal**: Allow selection of export options
3. **Progress Indicator**: Show EPUB generation progress
4. **Download**: Automatic file download on completion
### Frontend API Calls
```typescript
// Import EPUB
const importEPUB = async (file: File) => {
const formData = new FormData();
formData.append('file', file);
const response = await fetch('/api/stories/import-epub', {
method: 'POST',
body: formData,
});
return await response.json();
};
// Export Story
const exportStoryEPUB = async (storyId: string) => {
const response = await fetch(`/api/stories/${storyId}/export-epub`, {
method: 'GET',
});
const blob = await response.blob();
const url = window.URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = `${storyTitle}.epub`;
a.click();
};
```
## Error Handling
### Import Errors
- **Invalid EPUB format**: "Invalid EPUB file format"
- **File too large**: "File size exceeds 50MB limit"
- **DRM protected**: "DRM-protected EPUBs not supported"
- **Corrupted file**: "EPUB file appears to be corrupted"
- **No content**: "EPUB contains no readable content"
### Export Errors
- **Story not found**: "Story not found or access denied"
- **Missing content**: "Story has no content to export"
- **Generation failure**: "Failed to generate EPUB file"
## Security Considerations
### File Upload Security
- **File type validation**: Verify EPUB MIME type and structure
- **Size limits**: Enforce maximum file size limits
- **Content sanitization**: Apply existing HTML sanitization
- **Virus scanning**: Consider integration with antivirus scanning
### Content Security
- **HTML sanitization**: Apply existing Jsoup rules to imported content
- **Image validation**: Validate extracted cover images
- **Metadata escaping**: Escape special characters in metadata
## Testing Strategy
### Unit Tests
- EPUB parsing and validation logic
- CFI generation and parsing
- Metadata extraction accuracy
- Content sanitization
### Integration Tests
- End-to-end import/export workflow
- Reading position preservation
- Multi-story collection export
- Error handling scenarios
### Test Data
- Sample EPUB files for various scenarios
- EPUBs with and without reading positions
- Multi-chapter EPUBs
- EPUBs with covers and metadata
## Performance Considerations
### Import Performance
- **Streaming processing**: Process large EPUBs without loading entirely into memory
- **Async processing**: Consider async import for large files
- **Progress tracking**: Provide progress feedback for large imports
### Export Performance
- **Caching**: Cache generated EPUBs for repeated exports
- **Streaming**: Stream EPUB generation for large collections
- **Resource optimization**: Optimize image and content sizes
## Future Enhancements (Out of Scope)
### Phase 2 Considerations
- **DRM support**: Research legal and technical feasibility
- **Reading position sync**: Real-time sync across devices
- **Advanced EPUB features**: Enhanced typography, annotations
- **Bulk operations**: Import/export multiple EPUBs
- **EPUB validation**: Full EPUB compliance checking
### Integration Possibilities
- **Cloud storage**: Export directly to Google Drive, Dropbox
- **E-reader sync**: Direct sync with Kindle, Kobo devices
- **Reading analytics**: Track reading patterns and statistics
## Implementation Phases
### Phase 1: Core Functionality ✅ **COMPLETED**
- [x] Basic EPUB import (DRM-free)
- [x] Single story export
- [x] Reading position storage and retrieval
- [x] Frontend UI integration
### Phase 2: Enhanced Features ✅ **COMPLETED**
- [x] Collection export with table of contents
- [x] Advanced metadata handling (subjects, keywords, publisher, language, etc.)
- [x] Enhanced cover image processing for import/export
- [x] Comprehensive error handling
### Phase 3: Advanced Features
- [ ] DRM exploration (legal research required)
- [ ] Reading position sync
- [ ] Advanced EPUB features
- [ ] Analytics and reporting
## Acceptance Criteria
### Import Success Criteria ✅ **COMPLETED**
- [x] Successfully parse EPUB 2.0 and 3.x files
- [x] Extract title, author, summary, and content accurately
- [x] Preserve formatting and basic HTML structure
- [x] Handle cover images correctly
- [x] Import reading positions when present
- [x] Provide clear error messages for invalid files
### Export Success Criteria ✅ **FULLY COMPLETED**
- [x] Generate valid EPUB files compatible with major readers
- [x] Include accurate metadata and content
- [x] Embed reading positions using CFI standard
- [x] Support single story export
- [x] Support collection export with proper structure
- [x] Generate proper table of contents for collections
- [x] Include cover images when available
---
*This specification serves as the implementation guide for the EPUB import/export feature. All implementation decisions should reference this document for consistency and completeness.*

118
PORTABLE_TEXT_SETUP.md Normal file
View File

@@ -0,0 +1,118 @@
# Portable Text Editor Setup Instructions
## Current Status
⚠️ **Temporarily Reverted to Original Editor**
Due to npm cache permission issues preventing Docker builds, I've temporarily reverted the imports back to `RichTextEditor`. The Portable Text implementation is complete and ready to activate once the npm issue is resolved.
## Files Ready for Portable Text
-`PortableTextEditor.tsx` - Complete implementation
-`schema.ts` - Portable Text schema
-`conversion.ts` - HTML ↔ Portable Text conversion
-`package.json.with-portabletext` - Updated dependencies
## Docker Build Issue Resolution
The error `npm ci` requires `package-lock.json` but npm cache permissions prevent generating it.
### Solution Steps:
1. **Fix npm permissions:**
```bash
sudo chown -R $(whoami) ~/.npm
```
2. **Switch to Portable Text setup:**
```bash
cd frontend
mv package.json package.json.original
mv package.json.with-portabletext package.json
npm install # This will generate package-lock.json
```
3. **Update component imports** (change RichTextEditor → PortableTextEditor):
```typescript
// In src/app/add-story/page.tsx and src/app/stories/[id]/edit/page.tsx
import PortableTextEditor from '../../components/stories/PortableTextEditor';
// And update the JSX to use <PortableTextEditor ... />
```
4. **Build and test:**
```bash
npm run build
docker-compose build
```
## Implementation Complete
**Portable Text Schema** - Defines formatting options matching the original editor
**HTML ↔ Portable Text Conversion** - Seamless conversion between formats
**Sanitization Integration** - Uses existing sanitization strategy
**Component Replacement** - PortableTextEditor replaces RichTextEditor
**Image Processing** - Maintains existing image processing functionality
**Toolbar** - All formatting buttons from original editor
**Keyboard Shortcuts** - Ctrl+B, Ctrl+I, Ctrl+Shift+1-6
## Features Maintained
### 1. **Formatting Options**
- Bold, Italic, Underline, Strike, Code
- Headings H1-H6
- Paragraphs and Blockquotes
- All original toolbar buttons
### 2. **Visual & HTML Modes**
- Visual mode: Structured Portable Text editing
- HTML mode: Direct HTML editing (fallback)
- Live preview in HTML mode
### 3. **Image Processing**
- Existing image processing pipeline maintained
- Background image download and conversion
- Processing status indicators
- Warning system
### 4. **Paste Handling**
- Rich text paste from websites
- Image processing during paste
- HTML sanitization
- Structured content conversion
### 5. **Maximization & Resizing**
- Fullscreen editing mode
- Resizable editor height
- Keyboard shortcuts (Escape to exit)
## Benefits of Portable Text
1. **Structured Content** - Content is stored as JSON, not just HTML
2. **Future-Proof** - Easy to export/migrate content
3. **Better Search** - Structured content works better with Typesense
4. **Extensible** - Easy to add custom block types (images, etc.)
5. **Sanitization** - Inherently safer than HTML parsing
## Next Steps
1. Install the npm packages using one of the methods above
2. Test the editor functionality
3. Verify image processing works correctly
4. Optional: Add custom image block types for enhanced image handling
## File Structure
```
frontend/src/
├── components/stories/
│ ├── PortableTextEditor.tsx # New editor component
│ └── RichTextEditor.tsx # Original (can be removed after testing)
├── lib/portabletext/
│ ├── schema.ts # Portable Text schema and types
│ └── conversion.ts # HTML ↔ Portable Text conversion
└── app/
├── add-story/page.tsx # Updated to use PortableTextEditor
└── stories/[id]/edit/page.tsx # Updated to use PortableTextEditor
```
The implementation is backward compatible and maintains all existing functionality while providing the benefits of structured content editing.

131
README.md
View File

@@ -131,9 +131,12 @@ cd backend
### 🎨 **User Experience**
- **Dark/Light Mode**: Automatic theme switching with system preference detection
- **Responsive Design**: Optimized for desktop, tablet, and mobile
- **Reading Mode**: Distraction-free reading interface
- **Reading Mode**: Distraction-free reading interface with real-time progress tracking
- **Reading Position Memory**: Character-based position tracking with smooth auto-scroll restoration
- **Smart Tag Filtering**: Dynamic tag filters with live story counts in library view
- **Keyboard Navigation**: Full keyboard accessibility
- **Rich Text Editor**: Visual and source editing modes for story content
- **Progress Indicators**: Visual reading progress bars and completion tracking
### 🔒 **Security & Administration**
- **JWT Authentication**: Secure token-based authentication
@@ -158,43 +161,75 @@ cd backend
## 📖 Documentation
- **[API Documentation](docs/API.md)**: Complete REST API reference with examples
- **[Data Model](docs/DATA_MODEL.md)**: Detailed database schema and relationships
- **[Technical Specification](storycove-spec.md)**: Comprehensive technical specification
- **[Technical Specification](storycove-spec.md)**: Complete technical specification with API documentation, data models, and all feature specifications
- **[Web Scraper Specification](storycove-scraper-spec.md)**: URL content grabbing functionality
- **Environment Configuration**: Multi-environment deployment setup (see above)
- **Development Setup**: Local development environment setup (see below)
> **Note**: All feature specifications (Collections, Tag Enhancements, EPUB Import/Export) have been consolidated into the main technical specification for easier maintenance and reference.
## 🗄️ Data Model
StoryCove uses a PostgreSQL database with the following core entities:
### **Stories**
- **Primary Key**: UUID
- **Fields**: title, summary, description, content_html, content_plain, source_url, word_count, rating, volume, cover_path
- **Relationships**: Many-to-One with Author, Many-to-One with Series, Many-to-Many with Tags
- **Features**: Automatic word count calculation, HTML sanitization, plain text extraction
- **Fields**: title, summary, description, content_html, content_plain, source_url, word_count, rating, volume, cover_path, is_read, reading_position, last_read_at, created_at, updated_at
- **Relationships**: Many-to-One with Author, Many-to-One with Series, Many-to-Many with Tags, One-to-Many with ReadingPositions
- **Features**: Automatic word count calculation, HTML sanitization, plain text extraction, reading progress tracking, duplicate detection
### **Authors**
- **Primary Key**: UUID
- **Fields**: name, notes, author_rating, avatar_image_path
- **Relationships**: One-to-Many with Stories, One-to-Many with Author URLs
- **Features**: URL collection storage, rating system, statistics calculation
- **Fields**: name, notes, author_rating, avatar_image_path, created_at, updated_at
- **Relationships**: One-to-Many with Stories, One-to-Many with Author URLs (via @ElementCollection)
- **Features**: URL collection storage, rating system, statistics calculation, average story rating calculation
### **Collections**
- **Primary Key**: UUID
- **Fields**: name, description, rating, cover_image_path, is_archived, created_at, updated_at
- **Relationships**: Many-to-Many with Tags, One-to-Many with CollectionStories
- **Features**: Story ordering with gap-based positioning, statistics calculation, EPUB export, Typesense search
### **CollectionStories** (Junction Table)
- **Composite Key**: collection_id, story_id
- **Fields**: position, added_at
- **Relationships**: Links Collections to Stories with ordering
- **Features**: Gap-based positioning for efficient reordering
### **Series**
- **Primary Key**: UUID
- **Fields**: name, description
- **Fields**: name, description, created_at
- **Relationships**: One-to-Many with Stories (ordered by volume)
- **Features**: Volume-based story ordering, navigation methods
- **Features**: Volume-based story ordering, navigation methods (next/previous story)
### **Tags**
- **Primary Key**: UUID
- **Fields**: name (unique)
- **Relationships**: Many-to-Many with Stories
- **Features**: Autocomplete support, usage statistics
- **Fields**: name (unique), color (hex), description, created_at
- **Relationships**: Many-to-Many with Stories, Many-to-Many with Collections, One-to-Many with TagAliases
- **Features**: Color coding, alias system, autocomplete support, usage statistics, AI-powered suggestions
### **Join Tables**
- **story_tags**: Links stories to tags
- **author_urls**: Stores multiple URLs per author
### **TagAliases**
- **Primary Key**: UUID
- **Fields**: alias_name (unique), canonical_tag_id, created_from_merge, created_at
- **Relationships**: Many-to-One with Tag (canonical)
- **Features**: Transparent alias resolution, merge tracking, autocomplete integration
### **ReadingPositions**
- **Primary Key**: UUID
- **Fields**: story_id, chapter_index, chapter_title, word_position, character_position, percentage_complete, epub_cfi, context_before, context_after, created_at, updated_at
- **Relationships**: Many-to-One with Story
- **Features**: Advanced reading position tracking, EPUB CFI support, context preservation, percentage calculation
### **Libraries**
- **Primary Key**: UUID
- **Fields**: name, description, is_default, created_at, updated_at
- **Features**: Multi-library support, library switching functionality
### **Core Join Tables**
- **story_tags**: Links stories to tags (Many-to-Many)
- **collection_tags**: Links collections to tags (Many-to-Many)
- **collection_stories**: Links collections to stories with ordering
- **author_urls**: Stores multiple URLs per author (@ElementCollection)
## 🔌 REST API Reference
@@ -206,6 +241,7 @@ StoryCove uses a PostgreSQL database with the following core entities:
### **Stories** (`/api/stories`)
- `GET /` - List stories (paginated)
- `GET /{id}` - Get specific story
- `GET /{id}/read` - Get story for reading interface
- `POST /` - Create new story
- `PUT /{id}` - Update story
- `DELETE /{id}` - Delete story
@@ -214,13 +250,28 @@ StoryCove uses a PostgreSQL database with the following core entities:
- `POST /{id}/rating` - Set story rating
- `POST /{id}/tags/{tagId}` - Add tag to story
- `DELETE /{id}/tags/{tagId}` - Remove tag from story
- `GET /search` - Search stories (Typesense)
- `POST /{id}/reading-progress` - Update reading position
- `POST /{id}/reading-status` - Mark story as read/unread
- `GET /{id}/collections` - Get collections containing story
- `GET /random` - Get random story with optional filters
- `GET /check-duplicate` - Check for duplicate stories
- `GET /search` - Search stories (Typesense with faceting)
- `GET /search/suggestions` - Get search suggestions
- `GET /author/{authorId}` - Stories by author
- `GET /series/{seriesId}` - Stories in series
- `GET /tags/{tagName}` - Stories with tag
- `GET /recent` - Recent stories
- `GET /top-rated` - Top-rated stories
- `POST /batch/add-to-collection` - Add multiple stories to collection
- `POST /reindex` - Manual Typesense reindex
- `POST /reindex-typesense` - Reindex stories in Typesense
- `POST /recreate-typesense-collection` - Recreate Typesense collection
#### **EPUB Import/Export** (`/api/stories/epub`)
- `POST /import` - Import story from EPUB file
- `POST /export` - Export story as EPUB with options
- `GET /{id}/epub` - Export story as EPUB (simple)
- `POST /validate` - Validate EPUB file structure
### **Authors** (`/api/authors`)
- `GET /` - List authors (paginated)
@@ -240,14 +291,49 @@ StoryCove uses a PostgreSQL database with the following core entities:
### **Tags** (`/api/tags`)
- `GET /` - List tags (paginated)
- `GET /{id}` - Get specific tag
- `POST /` - Create new tag
- `PUT /{id}` - Update tag
- `POST /` - Create new tag (with color and description)
- `PUT /{id}` - Update tag (name, color, description)
- `DELETE /{id}` - Delete tag
- `GET /search` - Search tags
- `GET /autocomplete` - Tag autocomplete
- `GET /autocomplete` - Tag autocomplete with alias resolution
- `GET /popular` - Most used tags
- `GET /unused` - Unused tags
- `GET /stats` - Tag statistics
- `GET /collections` - Tags used by collections
- `GET /resolve/{name}` - Resolve tag name (handles aliases)
#### **Tag Aliases** (`/api/tags/{tagId}/aliases`)
- `POST /` - Add alias to tag
- `DELETE /{aliasId}` - Remove alias from tag
#### **Tag Management**
- `POST /merge` - Merge multiple tags into one
- `POST /merge/preview` - Preview tag merge operation
- `POST /suggest` - AI-powered tag suggestions for content
### **Collections** (`/api/collections`)
- `GET /` - Search and list collections (Typesense)
- `GET /{id}` - Get collection details
- `POST /` - Create new collection (JSON or multipart)
- `PUT /{id}` - Update collection metadata
- `DELETE /{id}` - Delete collection
- `PUT /{id}/archive` - Archive/unarchive collection
- `POST /{id}/cover` - Upload collection cover image
- `DELETE /{id}/cover` - Remove collection cover image
- `GET /{id}/stats` - Get collection statistics
#### **Collection Story Management**
- `POST /{id}/stories` - Add stories to collection
- `DELETE /{id}/stories/{storyId}` - Remove story from collection
- `PUT /{id}/stories/order` - Reorder stories in collection
- `GET /{id}/read/{storyId}` - Get story with collection context
#### **Collection EPUB Export**
- `GET /{id}/epub` - Export collection as EPUB
- `POST /{id}/epub` - Export collection as EPUB with options
#### **Collection Management**
- `POST /reindex-typesense` - Reindex collections in Typesense
### **Series** (`/api/series`)
- `GET /` - List series (paginated)
@@ -295,6 +381,7 @@ All API endpoints use JSON format with proper HTTP status codes:
- **Backend**: Spring Boot 3, Java 21, PostgreSQL, Typesense
- **Infrastructure**: Docker, Docker Compose, Nginx
- **Security**: JWT authentication, HTML sanitization, CORS
- **Search**: Typesense with faceting and full-text search capabilities
### **Local Development Setup**

244
SOLR_LIBRARY_MIGRATION.md Normal file
View File

@@ -0,0 +1,244 @@
# Solr Library Separation Migration Guide
This guide explains how to migrate existing StoryCove deployments to support proper library separation in Solr search.
## What Changed
The Solr service has been enhanced to support multi-tenant library separation by:
- Adding a `libraryId` field to all Solr documents
- Filtering all search queries by the current library context
- Ensuring complete data isolation between libraries
## Migration Options
### Option 1: Docker Volume Reset (Recommended for Docker)
**Best for**: Development, staging, and Docker-based deployments where data loss is acceptable.
```bash
# Stop the application
docker-compose down
# Remove only the Solr data volume (preserves database and images)
docker volume rm storycove_solr_data
# Restart - Solr will recreate cores with new schema
docker-compose up -d
# Wait for services to start, then trigger reindex via admin panel
```
**Pros**: Clean, simple, guaranteed to work
**Cons**: Requires downtime, loses existing search index
### Option 2: Schema API Migration (Production Safe)
**Best for**: Production environments where you need to preserve uptime.
**Method A: Automatic (Recommended)**
```bash
# Single endpoint that adds field and migrates data
curl -X POST "http://your-app-host/api/admin/search/solr/migrate-library-schema" \
-H "Authorization: Bearer YOUR_JWT_TOKEN"
```
**Method B: Manual Steps**
```bash
# Step 1: Add libraryId field via app API
curl -X POST "http://your-app-host/api/admin/search/solr/add-library-field" \
-H "Authorization: Bearer YOUR_JWT_TOKEN"
# Step 2: Run migration
curl -X POST "http://your-app-host/api/admin/search/solr/migrate-library-schema" \
-H "Authorization: Bearer YOUR_JWT_TOKEN"
```
**Method C: Direct Solr API (if app API fails)**
```bash
# Add libraryId field to stories core
curl -X POST "http://your-solr-host:8983/solr/storycove_stories/schema" \
-H "Content-Type: application/json" \
-d '{
"add-field": {
"name": "libraryId",
"type": "string",
"indexed": true,
"stored": true,
"required": false
}
}'
# Add libraryId field to authors core
curl -X POST "http://your-solr-host:8983/solr/storycove_authors/schema" \
-H "Content-Type: application/json" \
-d '{
"add-field": {
"name": "libraryId",
"type": "string",
"indexed": true,
"stored": true,
"required": false
}
}'
# Then run the migration
curl -X POST "http://your-app-host/api/admin/search/solr/migrate-library-schema" \
-H "Authorization: Bearer YOUR_JWT_TOKEN"
```
**Pros**: No downtime, preserves service availability, automatic field addition
**Cons**: Requires API access
### Option 3: Application-Level Migration (Recommended for Production)
**Best for**: Production environments with proper admin access.
1. **Deploy the code changes** to your environment
2. **Access the admin panel** of your application
3. **Navigate to search settings**
4. **Use the "Migrate Library Schema" button** or API endpoint:
```
POST /api/admin/search/solr/migrate-library-schema
```
**Pros**: User-friendly, handles all complexity internally
**Cons**: Requires admin access to application
## Step-by-Step Migration Process
### For Docker Deployments
1. **Backup your data** (optional but recommended):
```bash
# Backup database
docker-compose exec postgres pg_dump -U storycove storycove > backup.sql
```
2. **Pull the latest code** with library separation fixes
3. **Choose migration approach**:
- **Quick & Clean**: Use Option 1 (volume reset)
- **Production**: Use Option 2 or 3
4. **Verify migration**:
- Log in with different library passwords
- Perform searches to confirm isolation
- Check that new content gets indexed with library IDs
### For Kubernetes/Production Deployments
1. **Update your deployment** with the new container images
2. **Add the libraryId field** to Solr schema using Option 2
3. **Use the migration endpoint** (Option 3):
```bash
kubectl exec -it deployment/storycove-backend -- \
curl -X POST http://localhost:8080/api/admin/search/solr/migrate-library-schema
```
4. **Monitor logs** for successful migration
## Verification Steps
After migration, verify that library separation is working:
1. **Test with multiple libraries**:
- Log in with Library A password
- Add/search content
- Log in with Library B password
- Confirm Library A content is not visible
2. **Check Solr directly** (if accessible):
```bash
# Should show documents with libraryId field
curl "http://solr:8983/solr/storycove_stories/select?q=*:*&fl=id,title,libraryId&rows=5"
```
3. **Monitor application logs** for any library separation errors
## Troubleshooting
### "unknown field 'libraryId'" Error
**Problem**: `ERROR: [doc=xxx] unknown field 'libraryId'`
**Cause**: The Solr schema doesn't have the libraryId field yet.
**Solutions**:
1. **Use the automated migration** (adds field automatically):
```bash
curl -X POST "http://your-app/api/admin/search/solr/migrate-library-schema"
```
2. **Add field manually first**:
```bash
# Add field via app API
curl -X POST "http://your-app/api/admin/search/solr/add-library-field"
# Then run migration
curl -X POST "http://your-app/api/admin/search/solr/migrate-library-schema"
```
3. **Direct Solr API** (if app API fails):
```bash
# Add to both cores
curl -X POST "http://solr:8983/solr/storycove_stories/schema" \
-H "Content-Type: application/json" \
-d '{"add-field":{"name":"libraryId","type":"string","indexed":true,"stored":true}}'
curl -X POST "http://solr:8983/solr/storycove_authors/schema" \
-H "Content-Type: application/json" \
-d '{"add-field":{"name":"libraryId","type":"string","indexed":true,"stored":true}}'
```
4. **For development**: Use Option 1 (volume reset) for clean restart
### Migration Endpoint Returns Error
Common causes:
- Solr is not available (check connectivity)
- No active library context (ensure user is authenticated)
- Insufficient permissions (check JWT token/authentication)
### Search Results Still Mixed
This indicates incomplete migration:
- Clear all Solr data and reindex completely
- Verify that all documents have libraryId field
- Check that search queries include library filters
## Environment-Specific Notes
### Development
- Use Option 1 (volume reset) for simplicity
- Data loss is acceptable in dev environments
### Staging
- Use Option 2 or 3 to test production migration procedures
- Verify migration process before applying to production
### Production
- **Always backup data first**
- Use Option 2 (Schema API) or Option 3 (Admin endpoint)
- Plan for brief performance impact during reindexing
- Monitor system resources during bulk reindexing
## Performance Considerations
- **Reindexing time**: Depends on data size (typically 1000 docs/second)
- **Memory usage**: May increase during bulk indexing
- **Search performance**: Minimal impact from library filtering
- **Storage**: Slight increase due to libraryId field
## Rollback Plan
If issues occur:
1. **Immediate**: Restart Solr to previous state (if using Option 1)
2. **Schema revert**: Remove libraryId field via Schema API
3. **Code rollback**: Deploy previous version without library separation
4. **Data restore**: Restore from backup if necessary
This migration enables proper multi-tenant isolation while maintaining search performance and functionality.

View File

@@ -0,0 +1,305 @@
# Tag Enhancement Specification
> **✅ Implementation Status: COMPLETED**
> This feature has been fully implemented and is available in the system.
> All tag enhancements including colors, aliases, merging, and AI suggestions are working.
> Last updated: January 2025
## Overview
This document outlines the comprehensive enhancement of the tagging functionality in StoryCove, including color tags, tag deletion, merging, and aliases. These features will be accessible through a new "Tag Maintenance" page linked from the Settings page.
## Features
### 1. Color Tags
**Purpose**: Assign optional colors to tags for visual distinction and better organization.
**Implementation Details**:
- **Color Selection**: Predefined color palette that complements the app's theme
- **Custom Colors**: Fallback option with full color picker for advanced users
- **Default Behavior**: Tags without colors use consistent default styling
- **Accessibility**: All colors ensure sufficient contrast ratios
**UI Design**:
```
Color Selection Interface:
[Theme Blue] [Theme Green] [Theme Purple] [Theme Orange] ... [Custom ▼]
```
**Database Changes**:
```sql
ALTER TABLE tags ADD COLUMN color VARCHAR(7); -- hex colors like #3B82F6
ALTER TABLE tags ADD COLUMN description TEXT;
```
### 2. Tag Deletion
**Purpose**: Remove unused or unwanted tags from the system.
**Safety Features**:
- Show impact: "This tag is used by X stories"
- Confirmation dialog with story count
- Option to reassign stories to different tag before deletion
- Simple workflow appropriate for single-user application
**Behavior**:
- Display number of affected stories
- Require confirmation for deletion
- Optionally allow reassignment to another tag
### 3. Tag Merging
**Purpose**: Combine similar tags into a single canonical tag to reduce duplication.
**Workflow**:
1. User selects multiple tags to merge
2. User chooses which tag name becomes canonical
3. System shows merge preview with story counts
4. All story associations transfer to canonical tag
5. **Automatic Aliasing**: Merged tags automatically become aliases
**Example**:
```
Merge Preview:
• "magictf" (5 stories) → "magic tf" (12 stories)
• Result: "magic tf" (17 stories)
• "magictf" will become an alias for "magic tf"
```
**Technical Implementation**:
```sql
-- Merge operation (atomic transaction)
BEGIN TRANSACTION;
UPDATE story_tags SET tag_id = target_tag_id WHERE tag_id = source_tag_id;
INSERT INTO tag_aliases (alias_name, canonical_tag_id, created_from_merge)
VALUES (source_tag_name, target_tag_id, TRUE);
DELETE FROM tags WHERE id = source_tag_id;
COMMIT;
```
### 4. Tag Aliases
**Purpose**: Prevent tag duplication by allowing alternative names that resolve to canonical tags.
**Key Features**:
- **Transparent Resolution**: Users type "magictf" and automatically get "magic tf"
- **Hover Display**: Show aliases when hovering over tags
- **Import Integration**: Automatic alias resolution during story imports
- **Auto-Generation**: Created automatically during tag merges
**Database Schema**:
```sql
CREATE TABLE tag_aliases (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
alias_name VARCHAR(255) UNIQUE NOT NULL,
canonical_tag_id UUID NOT NULL REFERENCES tags(id) ON DELETE CASCADE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
created_from_merge BOOLEAN DEFAULT FALSE
);
CREATE INDEX idx_tag_aliases_name ON tag_aliases(alias_name);
```
**UI Behavior**:
- Tags with aliases show subtle indicator (e.g., small "+" icon)
- Hover tooltip displays:
```
magic tf
────────────
Aliases: magictf, magic_tf, magic-transformation
```
## Tag Maintenance Page
### Access
- Reachable only through Settings page
- Button: "Tag Maintenance" or "Manage Tags"
### Main Interface
**Tag Management Table**:
```
┌─ Search: [____________] [Color Filter ▼] [Sort: Usage ▼]
├─
├─ ☐ magic tf 🔵 (17 stories) [+2 aliases] [Edit] [Delete]
├─ ☐ transformation 🟢 (34 stories) [+1 alias] [Edit] [Delete]
├─ ☐ sci-fi 🟣 (45 stories) [Edit] [Delete]
└─
[Merge Selected] [Bulk Delete] [Export/Import Tags]
```
**Features**:
- Searchable and filterable tag list
- Sortable by name, usage count, creation date
- Bulk selection for merge/delete operations
- Visual indicators for color and alias count
### Tag Edit Modal
```
Edit Tag: "magic tf"
┌─ Name: [magic tf ]
├─ Color: [🔵] [Theme Colors...] [Custom...]
├─ Description: [Optional description]
├─
├─ Aliases (2):
│ • magictf [Remove]
│ • magic_tf [Remove]
│ [Add Alias: ____________] [Add]
├─
├─ Used by 17 stories [View Stories]
└─ [Save] [Cancel]
```
**Functionality**:
- Edit tag name, color, and description
- Manage aliases (add/remove)
- View associated stories
- Prevent circular alias references
### Merge Interface
**Selection Process**:
1. Select multiple tags from main table
2. Click "Merge Selected"
3. Choose canonical tag name
4. Preview merge results
5. Confirm operation
**Preview Display**:
- Show before/after story counts
- List all aliases that will be created
- Highlight any conflicts or issues
## Integration Points
### 1. Import/Scraping Enhancement
```javascript
// Tag resolution during imports
const resolveTagName = async (inputTag) => {
const alias = await tagApi.findAlias(inputTag);
return alias ? alias.canonicalTag : inputTag;
};
```
### 2. Tag Input Components
**Enhanced Autocomplete**:
- Include both canonical names and aliases in suggestions
- Show resolution: "magictf → magic tf" in dropdown
- Always save canonical name to database
### 3. Search Functionality
**Transparent Alias Search**:
- Search for "magictf" includes stories tagged with "magic tf"
- User doesn't need to know about canonical/alias distinction
- Expand search queries to include all aliases
### 4. Display Components
**Tag Rendering**:
- Apply colors consistently across all tag displays
- Show alias indicator where appropriate
- Implement hover tooltips for alias information
## Implementation Phases
### Phase 1: Core Infrastructure
- [ ] Database schema updates (tags.color, tag_aliases table)
- [ ] Basic tag editing functionality (name, color, description)
- [ ] Color palette component with theme colors
- [ ] Tag edit modal interface
### Phase 2: Merging & Aliasing
- [ ] Tag merge functionality with automatic alias creation
- [ ] Alias resolution in import/scraping logic
- [ ] Tag input component enhancements
- [ ] Search integration with alias expansion
### Phase 3: UI Polish & Advanced Features
- [ ] Hover tooltips for alias display
- [ ] Bulk operations (merge multiple, bulk delete)
- [ ] Advanced filtering and sorting options
- [ ] Tag maintenance page integration with Settings
### Phase 4: Smart Features (Optional)
- [ ] Auto-merge suggestions for similar tag names
- [ ] Color auto-assignment based on usage patterns
- [ ] Import intelligence and learning from user decisions
## Technical Considerations
### Performance
- Index alias names for fast lookup during imports
- Optimize tag queries with proper database indexing
- Consider caching for frequently accessed tag/alias mappings
### Data Integrity
- Prevent circular alias references
- Atomic transactions for merge operations
- Cascade deletion handling for tag relationships
### User Experience
- Clear visual feedback for all operations
- Comprehensive preview before destructive actions
- Consistent color and styling across the application
### Accessibility
- Sufficient color contrast for all tag colors
- Keyboard navigation support
- Screen reader compatibility
- Don't rely solely on color for information
## API Endpoints
### New Endpoints Needed
- `GET /api/tags/{id}/aliases` - Get aliases for a tag
- `POST /api/tags/merge` - Merge multiple tags
- `POST /api/tags/{id}/aliases` - Add alias to tag
- `DELETE /api/tags/{id}/aliases/{aliasId}` - Remove alias
- `PUT /api/tags/{id}/color` - Update tag color
- `GET /api/tags/resolve/{name}` - Resolve tag name (check aliases)
### Enhanced Endpoints
- `GET /api/tags` - Include color and alias count in response
- `PUT /api/tags/{id}` - Support color and description updates
- `DELETE /api/tags/{id}` - Enhanced with story impact information
## Configuration
### Theme Color Palette
Define a curated set of colors that work well with both light and dark themes:
- Primary blues: #3B82F6, #1D4ED8, #60A5FA
- Greens: #10B981, #059669, #34D399
- Purples: #8B5CF6, #7C3AED, #A78BFA
- Warm tones: #F59E0B, #D97706, #F97316
- Neutrals: #6B7280, #4B5563, #9CA3AF
### Settings Integration
- Add "Tag Maintenance" button to Settings page
- Consider adding tag-related preferences (default colors, etc.)
## Success Criteria
1. **Color Tags**: Tags can be assigned colors that display consistently throughout the application
2. **Tag Deletion**: Users can safely delete tags with appropriate warnings and reassignment options
3. **Tag Merging**: Similar tags can be merged with automatic alias creation
4. **Alias Resolution**: Imports automatically resolve aliases to canonical tags
5. **User Experience**: All operations are intuitive with clear feedback and preview options
6. **Performance**: Tag operations remain fast even with large numbers of tags and aliases
7. **Data Integrity**: No orphaned references or circular alias chains
## Future Enhancements
- **Tag Statistics**: Usage analytics and trends
- **Tag Recommendations**: AI-powered tag suggestions during story import
- **Tag Templates**: Predefined tag sets for common story types
- **Export/Import**: Backup and restore tag configurations
- **Tag Validation**: Rules for tag naming conventions
---
*This specification serves as the definitive guide for implementing the tag enhancement features in StoryCove. All implementation should refer back to this document to ensure consistency and completeness.*

View File

@@ -2,15 +2,20 @@ FROM openjdk:17-jdk-slim
WORKDIR /app
COPY pom.xml .
COPY src ./src
RUN apt-get update && apt-get install -y maven && \
mvn clean package -DskipTests && \
apt-get remove -y maven && \
apt-get autoremove -y && \
# Install Maven and PostgreSQL 15 client tools
RUN apt-get update && apt-get install -y wget ca-certificates gnupg maven && \
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
echo "deb http://apt.postgresql.org/pub/repos/apt/ bullseye-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
apt-get update && \
apt-get install -y postgresql-client-15 && \
rm -rf /var/lib/apt/lists/*
# Copy source code
COPY . .
# Build the application
RUN mvn clean package -DskipTests
EXPOSE 8080
CMD ["java", "-jar", "target/storycove-backend-0.0.1-SNAPSHOT.jar"]
ENTRYPOINT ["java", "-jar", "target/storycove-backend-0.0.1-SNAPSHOT.jar"]

1
backend/backend.log Normal file
View File

@@ -0,0 +1 @@
(eval):1: no such file or directory: ./mvnw

4
backend/cookies_new.txt Normal file
View File

@@ -0,0 +1,4 @@
# Netscape HTTP Cookie File
# https://curl.se/docs/http-cookies.html
# This file was generated by libcurl! Edit at your own risk.

View File

@@ -5,7 +5,7 @@
<parent>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId>
<version>3.2.0</version>
<version>3.5.5</version>
<relativePath/>
</parent>
@@ -17,7 +17,7 @@
<properties>
<java.version>17</java.version>
<testcontainers.version>1.19.3</testcontainers.version>
<testcontainers.version>1.21.3</testcontainers.version>
</properties>
<dependencyManagement>
@@ -49,6 +49,10 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-validation</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
</dependency>
<dependency>
<groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId>
@@ -56,18 +60,18 @@
<dependency>
<groupId>io.jsonwebtoken</groupId>
<artifactId>jjwt-api</artifactId>
<version>0.12.3</version>
<version>0.13.0</version>
</dependency>
<dependency>
<groupId>io.jsonwebtoken</groupId>
<artifactId>jjwt-impl</artifactId>
<version>0.12.3</version>
<version>0.13.0</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>io.jsonwebtoken</groupId>
<artifactId>jjwt-jackson</artifactId>
<version>0.12.3</version>
<version>0.13.0</version>
<scope>runtime</scope>
</dependency>
<dependency>
@@ -80,9 +84,38 @@
<artifactId>httpclient5</artifactId>
</dependency>
<dependency>
<groupId>org.typesense</groupId>
<artifactId>typesense-java</artifactId>
<version>1.3.0</version>
<groupId>org.apache.solr</groupId>
<artifactId>solr-solrj</artifactId>
<version>9.9.0</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-client</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents.core5</groupId>
<artifactId>httpcore5</artifactId>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents.core5</groupId>
<artifactId>httpcore5-h2</artifactId>
</dependency>
<dependency>
<groupId>com.positiondev.epublib</groupId>
<artifactId>epublib-core</artifactId>
<version>3.1</version>
</dependency>
<!-- Test dependencies -->
@@ -114,6 +147,13 @@
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<parameters>true</parameters>
</configuration>
</plugin>
</plugins>
</build>
</project>

View File

@@ -2,10 +2,12 @@ package com.storycove;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling;
@SpringBootApplication
@EnableScheduling
@EnableAsync
public class StoryCoveApplication {
public static void main(String[] args) {

View File

@@ -0,0 +1,64 @@
package com.storycove.config;
import com.storycove.service.LibraryService;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import org.springframework.context.annotation.Primary;
import javax.sql.DataSource;
/**
* Database configuration that sets up library-aware datasource routing.
*
* This configuration replaces the default Spring Boot datasource with a routing
* datasource that automatically directs all database operations to the appropriate
* library-specific database based on the current active library.
*/
@Configuration
public class DatabaseConfig {
@Value("${spring.datasource.url}")
private String baseDbUrl;
@Value("${spring.datasource.username}")
private String dbUsername;
@Value("${spring.datasource.password}")
private String dbPassword;
/**
* Create a fallback datasource for when no library is active.
* This connects to the main database specified in application.yml.
*/
@Bean(name = "fallbackDataSource")
public DataSource fallbackDataSource() {
HikariConfig config = new HikariConfig();
config.setJdbcUrl(baseDbUrl);
config.setUsername(dbUsername);
config.setPassword(dbPassword);
config.setDriverClassName("org.postgresql.Driver");
config.setMaximumPoolSize(10);
config.setConnectionTimeout(30000);
return new HikariDataSource(config);
}
/**
* Primary datasource bean - uses smart routing that excludes authentication operations
*/
@Bean(name = "dataSource")
@Primary
@DependsOn("libraryService")
public DataSource primaryDataSource(LibraryService libraryService) {
SmartRoutingDataSource routingDataSource = new SmartRoutingDataSource(
libraryService, baseDbUrl, dbUsername, dbPassword);
routingDataSource.setDefaultTargetDataSource(fallbackDataSource());
routingDataSource.setTargetDataSources(new java.util.HashMap<>());
return routingDataSource;
}
}

View File

@@ -0,0 +1,65 @@
package com.storycove.config;
import com.storycove.service.LibraryService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
/**
* Custom DataSource router that dynamically routes database calls to the appropriate
* library-specific datasource based on the current active library.
*
* This makes ALL Spring Data JPA repositories automatically library-aware without
* requiring changes to existing repository or service code.
*/
public class LibraryAwareDataSource extends AbstractRoutingDataSource {
private static final Logger logger = LoggerFactory.getLogger(LibraryAwareDataSource.class);
private final LibraryService libraryService;
public LibraryAwareDataSource(LibraryService libraryService) {
this.libraryService = libraryService;
// Set empty target datasources to satisfy AbstractRoutingDataSource requirements
// We override determineTargetDataSource() so this won't be used
setTargetDataSources(new java.util.HashMap<>());
}
@Override
protected Object determineCurrentLookupKey() {
String currentLibraryId = libraryService.getCurrentLibraryId();
logger.debug("Routing database call to library: {}", currentLibraryId);
return currentLibraryId;
}
@Override
protected javax.sql.DataSource determineTargetDataSource() {
try {
// Check if LibraryService is properly initialized
if (libraryService == null) {
logger.debug("LibraryService not available, using default datasource");
return getResolvedDefaultDataSource();
}
// Check if any library is currently active
String currentLibraryId = libraryService.getCurrentLibraryId();
if (currentLibraryId == null) {
logger.debug("No active library, using default datasource");
return getResolvedDefaultDataSource();
}
// Try to get the current library datasource
javax.sql.DataSource libraryDataSource = libraryService.getCurrentDataSource();
logger.debug("Successfully routing database call to library: {}", currentLibraryId);
return libraryDataSource;
} catch (IllegalStateException e) {
// This is expected during authentication, startup, or when no library is active
logger.debug("No active library (IllegalStateException) - using default datasource: {}", e.getMessage());
return getResolvedDefaultDataSource();
} catch (Exception e) {
logger.warn("Unexpected error determining target datasource, falling back to default: {}", e.getMessage(), e);
return getResolvedDefaultDataSource();
}
}
}

View File

@@ -56,7 +56,10 @@ public class SecurityConfig {
@Bean
public CorsConfigurationSource corsConfigurationSource() {
CorsConfiguration configuration = new CorsConfiguration();
configuration.setAllowedOriginPatterns(Arrays.asList(allowedOrigins.split(",")));
List<String> origins = Arrays.stream(allowedOrigins.split(","))
.map(String::trim)
.toList();
configuration.setAllowedOriginPatterns(origins);
configuration.setAllowedMethods(Arrays.asList("GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"));
configuration.setAllowedHeaders(List.of("*"));
configuration.setAllowCredentials(true);

View File

@@ -0,0 +1,158 @@
package com.storycove.config;
import com.storycove.service.LibraryService;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.sql.DataSource;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Smart routing datasource that:
* 1. Routes to library-specific databases when a library is active
* 2. Excludes authentication operations (keeps them on default database)
* 3. Uses request context to determine when routing is appropriate
*/
public class SmartRoutingDataSource extends AbstractRoutingDataSource {
private static final Logger logger = LoggerFactory.getLogger(SmartRoutingDataSource.class);
private final LibraryService libraryService;
private final Map<String, DataSource> libraryDataSources = new ConcurrentHashMap<>();
// Database connection details - will be injected via constructor
private final String baseDbUrl;
private final String dbUsername;
private final String dbPassword;
public SmartRoutingDataSource(LibraryService libraryService, String baseDbUrl, String dbUsername, String dbPassword) {
this.libraryService = libraryService;
this.baseDbUrl = baseDbUrl;
this.dbUsername = dbUsername;
this.dbPassword = dbPassword;
logger.info("SmartRoutingDataSource initialized with database: {}", baseDbUrl);
}
@Override
protected Object determineCurrentLookupKey() {
try {
// Check if this is an authentication request - if so, use default database
if (isAuthenticationRequest()) {
logger.debug("Authentication request detected, using default database");
return null; // null means use default datasource
}
// Check if we have an active library
if (libraryService != null) {
String currentLibraryId = libraryService.getCurrentLibraryId();
if (currentLibraryId != null && !currentLibraryId.trim().isEmpty()) {
logger.info("ROUTING: Directing to library-specific database: {}", currentLibraryId);
return currentLibraryId;
} else {
logger.info("ROUTING: No active library, using default database");
}
} else {
logger.info("ROUTING: LibraryService is null, using default database");
}
} catch (Exception e) {
logger.debug("Error determining lookup key, falling back to default database", e);
}
return null; // Use default datasource
}
/**
* Check if the current request is an authentication request that should use the default database
*/
private boolean isAuthenticationRequest() {
try {
ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
if (attributes != null) {
String requestURI = attributes.getRequest().getRequestURI();
String method = attributes.getRequest().getMethod();
// Authentication endpoints that should use default database
if (requestURI.contains("/auth/") ||
requestURI.contains("/login") ||
requestURI.contains("/api/libraries/switch") ||
(requestURI.contains("/api/libraries") && "POST".equals(method))) {
return true;
}
}
} catch (Exception e) {
logger.debug("Could not determine request context", e);
}
return false;
}
@Override
protected DataSource determineTargetDataSource() {
Object lookupKey = determineCurrentLookupKey();
if (lookupKey != null) {
String libraryId = (String) lookupKey;
return getLibraryDataSource(libraryId);
}
return getDefaultDataSource();
}
/**
* Get or create a datasource for the specified library
*/
private DataSource getLibraryDataSource(String libraryId) {
return libraryDataSources.computeIfAbsent(libraryId, id -> {
try {
HikariConfig config = new HikariConfig();
// Replace database name in URL with library-specific name
String libraryUrl = baseDbUrl.replaceAll("/[^/]*$", "/" + "storycove_" + id);
config.setJdbcUrl(libraryUrl);
config.setUsername(dbUsername);
config.setPassword(dbPassword);
config.setDriverClassName("org.postgresql.Driver");
config.setMaximumPoolSize(5); // Smaller pool for library-specific databases
config.setConnectionTimeout(10000);
config.setMaxLifetime(600000); // 10 minutes
logger.info("Created new datasource for library: {} -> {}", id, libraryUrl);
return new HikariDataSource(config);
} catch (Exception e) {
logger.error("Failed to create datasource for library: {}", id, e);
return getDefaultDataSource();
}
});
}
private DataSource getDefaultDataSource() {
// Use the default target datasource that was set in the configuration
try {
return (DataSource) super.determineTargetDataSource();
} catch (Exception e) {
logger.debug("Could not get default datasource via super method", e);
}
// Fallback: create a basic datasource
logger.warn("No default datasource available, creating fallback");
HikariConfig config = new HikariConfig();
config.setJdbcUrl(baseDbUrl);
config.setUsername(dbUsername);
config.setPassword(dbPassword);
config.setDriverClassName("org.postgresql.Driver");
config.setMaximumPoolSize(10);
config.setConnectionTimeout(30000);
return new HikariDataSource(config);
}
}

View File

@@ -0,0 +1,57 @@
package com.storycove.config;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
@ConditionalOnProperty(
value = "storycove.search.engine",
havingValue = "solr",
matchIfMissing = false
)
public class SolrConfig {
private static final Logger logger = LoggerFactory.getLogger(SolrConfig.class);
private final SolrProperties properties;
public SolrConfig(SolrProperties properties) {
this.properties = properties;
}
@Bean
public SolrClient solrClient() {
logger.info("Initializing Solr client with URL: {}", properties.getUrl());
HttpSolrClient.Builder builder = new HttpSolrClient.Builder(properties.getUrl())
.withConnectionTimeout(properties.getConnection().getTimeout())
.withSocketTimeout(properties.getConnection().getSocketTimeout());
SolrClient client = builder.build();
logger.info("Solr running without authentication");
// Test connection
testConnection(client);
return client;
}
private void testConnection(SolrClient client) {
try {
// Test connection by pinging the server
var response = client.ping();
logger.info("Solr connection successful - Response time: {}ms",
response.getElapsedTime());
} catch (Exception e) {
logger.warn("Solr connection test failed during initialization: {}", e.getMessage());
logger.debug("Solr connection test full error", e);
// Don't throw exception here - let the client be created and handle failures in service methods
}
}
}

View File

@@ -0,0 +1,140 @@
package com.storycove.config;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
@Component
@ConfigurationProperties(prefix = "storycove.solr")
public class SolrProperties {
private String url = "http://localhost:8983/solr";
private String username;
private String password;
private Cores cores = new Cores();
private Connection connection = new Connection();
private Query query = new Query();
private Commit commit = new Commit();
private Health health = new Health();
// Getters and setters
public String getUrl() { return url; }
public void setUrl(String url) { this.url = url; }
public String getUsername() { return username; }
public void setUsername(String username) { this.username = username; }
public String getPassword() { return password; }
public void setPassword(String password) { this.password = password; }
public Cores getCores() { return cores; }
public void setCores(Cores cores) { this.cores = cores; }
public Connection getConnection() { return connection; }
public void setConnection(Connection connection) { this.connection = connection; }
public Query getQuery() { return query; }
public void setQuery(Query query) { this.query = query; }
public Commit getCommit() { return commit; }
public void setCommit(Commit commit) { this.commit = commit; }
public Health getHealth() { return health; }
public void setHealth(Health health) { this.health = health; }
public static class Cores {
private String stories = "storycove_stories";
private String authors = "storycove_authors";
// Getters and setters
public String getStories() { return stories; }
public void setStories(String stories) { this.stories = stories; }
public String getAuthors() { return authors; }
public void setAuthors(String authors) { this.authors = authors; }
}
public static class Connection {
private int timeout = 30000;
private int socketTimeout = 60000;
private int maxConnectionsPerRoute = 10;
private int maxConnectionsTotal = 30;
private boolean retryOnFailure = true;
private int maxRetries = 3;
// Getters and setters
public int getTimeout() { return timeout; }
public void setTimeout(int timeout) { this.timeout = timeout; }
public int getSocketTimeout() { return socketTimeout; }
public void setSocketTimeout(int socketTimeout) { this.socketTimeout = socketTimeout; }
public int getMaxConnectionsPerRoute() { return maxConnectionsPerRoute; }
public void setMaxConnectionsPerRoute(int maxConnectionsPerRoute) { this.maxConnectionsPerRoute = maxConnectionsPerRoute; }
public int getMaxConnectionsTotal() { return maxConnectionsTotal; }
public void setMaxConnectionsTotal(int maxConnectionsTotal) { this.maxConnectionsTotal = maxConnectionsTotal; }
public boolean isRetryOnFailure() { return retryOnFailure; }
public void setRetryOnFailure(boolean retryOnFailure) { this.retryOnFailure = retryOnFailure; }
public int getMaxRetries() { return maxRetries; }
public void setMaxRetries(int maxRetries) { this.maxRetries = maxRetries; }
}
public static class Query {
private int defaultRows = 10;
private int maxRows = 1000;
private String defaultOperator = "AND";
private boolean highlight = true;
private boolean facets = true;
// Getters and setters
public int getDefaultRows() { return defaultRows; }
public void setDefaultRows(int defaultRows) { this.defaultRows = defaultRows; }
public int getMaxRows() { return maxRows; }
public void setMaxRows(int maxRows) { this.maxRows = maxRows; }
public String getDefaultOperator() { return defaultOperator; }
public void setDefaultOperator(String defaultOperator) { this.defaultOperator = defaultOperator; }
public boolean isHighlight() { return highlight; }
public void setHighlight(boolean highlight) { this.highlight = highlight; }
public boolean isFacets() { return facets; }
public void setFacets(boolean facets) { this.facets = facets; }
}
public static class Commit {
private boolean softCommit = true;
private int commitWithin = 1000;
private boolean waitSearcher = false;
// Getters and setters
public boolean isSoftCommit() { return softCommit; }
public void setSoftCommit(boolean softCommit) { this.softCommit = softCommit; }
public int getCommitWithin() { return commitWithin; }
public void setCommitWithin(int commitWithin) { this.commitWithin = commitWithin; }
public boolean isWaitSearcher() { return waitSearcher; }
public void setWaitSearcher(boolean waitSearcher) { this.waitSearcher = waitSearcher; }
}
public static class Health {
private int checkInterval = 30000;
private int slowQueryThreshold = 5000;
private boolean enableMetrics = true;
// Getters and setters
public int getCheckInterval() { return checkInterval; }
public void setCheckInterval(int checkInterval) { this.checkInterval = checkInterval; }
public int getSlowQueryThreshold() { return slowQueryThreshold; }
public void setSlowQueryThreshold(int slowQueryThreshold) { this.slowQueryThreshold = slowQueryThreshold; }
public boolean isEnableMetrics() { return enableMetrics; }
public void setEnableMetrics(boolean enableMetrics) { this.enableMetrics = enableMetrics; }
}
}

View File

@@ -1,37 +0,0 @@
package com.storycove.config;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.typesense.api.Client;
import org.typesense.resources.Node;
import java.util.ArrayList;
import java.util.List;
@Configuration
public class TypesenseConfig {
@Value("${storycove.typesense.api-key}")
private String apiKey;
@Value("${storycove.typesense.host}")
private String host;
@Value("${storycove.typesense.port}")
private int port;
@Bean
@ConditionalOnProperty(name = "storycove.typesense.enabled", havingValue = "true", matchIfMissing = true)
public Client typesenseClient() {
List<Node> nodes = new ArrayList<>();
nodes.add(new Node("http", host, String.valueOf(port)));
org.typesense.api.Configuration configuration = new org.typesense.api.Configuration(
nodes, java.time.Duration.ofSeconds(10), apiKey
);
return new Client(configuration);
}
}

View File

@@ -0,0 +1,309 @@
package com.storycove.controller;
import com.storycove.entity.Author;
import com.storycove.entity.Story;
import com.storycove.service.AuthorService;
import com.storycove.service.SolrService;
import com.storycove.service.SearchServiceAdapter;
import com.storycove.service.StoryService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Map;
/**
* Admin controller for managing Solr operations.
* Provides endpoints for reindexing and index management.
*/
@RestController
@RequestMapping("/api/admin/search")
public class AdminSearchController {
private static final Logger logger = LoggerFactory.getLogger(AdminSearchController.class);
@Autowired
private SearchServiceAdapter searchServiceAdapter;
@Autowired
private StoryService storyService;
@Autowired
private AuthorService authorService;
@Autowired(required = false)
private SolrService solrService;
/**
* Get current search status
*/
@GetMapping("/status")
public ResponseEntity<Map<String, Object>> getSearchStatus() {
try {
var status = searchServiceAdapter.getSearchStatus();
return ResponseEntity.ok(Map.of(
"primaryEngine", status.getPrimaryEngine(),
"dualWrite", status.isDualWrite(),
"solrAvailable", status.isSolrAvailable()
));
} catch (Exception e) {
logger.error("Error getting search status", e);
return ResponseEntity.internalServerError().body(Map.of(
"error", "Failed to get search status: " + e.getMessage()
));
}
}
/**
* Reindex all data in Solr
*/
@PostMapping("/solr/reindex")
public ResponseEntity<Map<String, Object>> reindexSolr() {
try {
logger.info("Starting Solr full reindex");
if (!searchServiceAdapter.isSearchServiceAvailable()) {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Solr is not available or healthy"
));
}
// Get all data from services
List<Story> allStories = storyService.findAllWithAssociations();
List<Author> allAuthors = authorService.findAllWithStories();
// Bulk index directly in Solr
if (solrService != null) {
solrService.bulkIndexStories(allStories);
solrService.bulkIndexAuthors(allAuthors);
} else {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Solr service not available"
));
}
int totalIndexed = allStories.size() + allAuthors.size();
return ResponseEntity.ok(Map.of(
"success", true,
"message", String.format("Reindexed %d stories and %d authors in Solr",
allStories.size(), allAuthors.size()),
"storiesCount", allStories.size(),
"authorsCount", allAuthors.size(),
"totalCount", totalIndexed
));
} catch (Exception e) {
logger.error("Error during Solr reindex", e);
return ResponseEntity.internalServerError().body(Map.of(
"success", false,
"error", "Solr reindex failed: " + e.getMessage()
));
}
}
/**
* Recreate Solr indices
*/
@PostMapping("/solr/recreate")
public ResponseEntity<Map<String, Object>> recreateSolrIndices() {
try {
logger.info("Starting Solr indices recreation");
if (!searchServiceAdapter.isSearchServiceAvailable()) {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Solr is not available or healthy"
));
}
// Recreate indices
if (solrService != null) {
solrService.recreateIndices();
} else {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Solr service not available"
));
}
// Get all data and reindex
List<Story> allStories = storyService.findAllWithAssociations();
List<Author> allAuthors = authorService.findAllWithStories();
// Bulk index after recreation
solrService.bulkIndexStories(allStories);
solrService.bulkIndexAuthors(allAuthors);
int totalIndexed = allStories.size() + allAuthors.size();
return ResponseEntity.ok(Map.of(
"success", true,
"message", String.format("Recreated Solr indices and indexed %d stories and %d authors",
allStories.size(), allAuthors.size()),
"storiesCount", allStories.size(),
"authorsCount", allAuthors.size(),
"totalCount", totalIndexed
));
} catch (Exception e) {
logger.error("Error during Solr indices recreation", e);
return ResponseEntity.internalServerError().body(Map.of(
"success", false,
"error", "Solr indices recreation failed: " + e.getMessage()
));
}
}
/**
* Add libraryId field to Solr schema via Schema API.
* This is a prerequisite for library-aware indexing.
*/
@PostMapping("/solr/add-library-field")
public ResponseEntity<Map<String, Object>> addLibraryField() {
try {
logger.info("Starting Solr libraryId field addition");
if (!searchServiceAdapter.isSearchServiceAvailable()) {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Solr is not available or healthy"
));
}
if (solrService == null) {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Solr service not available"
));
}
// Add the libraryId field to the schema
try {
solrService.addLibraryIdField();
logger.info("libraryId field added successfully to schema");
return ResponseEntity.ok(Map.of(
"success", true,
"message", "libraryId field added successfully to both stories and authors cores",
"note", "You can now run the library schema migration"
));
} catch (Exception e) {
logger.error("Failed to add libraryId field to schema", e);
return ResponseEntity.internalServerError().body(Map.of(
"success", false,
"error", "Failed to add libraryId field to schema: " + e.getMessage(),
"details", "Check that Solr is accessible and schema is modifiable"
));
}
} catch (Exception e) {
logger.error("Error during libraryId field addition", e);
return ResponseEntity.internalServerError().body(Map.of(
"success", false,
"error", "libraryId field addition failed: " + e.getMessage()
));
}
}
/**
* Migrate to library-aware Solr schema.
* This endpoint handles the migration from non-library-aware to library-aware indexing.
* It clears existing data and reindexes with library context.
*/
@PostMapping("/solr/migrate-library-schema")
public ResponseEntity<Map<String, Object>> migrateLibrarySchema() {
try {
logger.info("Starting Solr library schema migration");
if (!searchServiceAdapter.isSearchServiceAvailable()) {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Solr is not available or healthy"
));
}
if (solrService == null) {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Solr service not available"
));
}
logger.info("Adding libraryId field to Solr schema");
// First, add the libraryId field to the schema via Schema API
try {
solrService.addLibraryIdField();
logger.info("libraryId field added successfully to schema");
} catch (Exception e) {
logger.error("Failed to add libraryId field to schema", e);
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Failed to add libraryId field to schema: " + e.getMessage(),
"details", "The schema must support the libraryId field before migration"
));
}
logger.info("Clearing existing Solr data for library schema migration");
// Clear existing data that doesn't have libraryId
try {
solrService.recreateIndices();
} catch (Exception e) {
logger.warn("Could not recreate indices (expected in production): {}", e.getMessage());
// In production, just clear the data instead
try {
solrService.clearAllDocuments();
logger.info("Cleared all documents from Solr cores");
} catch (Exception clearError) {
logger.error("Failed to clear documents", clearError);
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Failed to clear existing data: " + clearError.getMessage()
));
}
}
// Get all data and reindex with library context
List<Story> allStories = storyService.findAllWithAssociations();
List<Author> allAuthors = authorService.findAllWithStories();
logger.info("Reindexing {} stories and {} authors with library context",
allStories.size(), allAuthors.size());
// Bulk index everything (will now include libraryId from current library context)
solrService.bulkIndexStories(allStories);
solrService.bulkIndexAuthors(allAuthors);
int totalIndexed = allStories.size() + allAuthors.size();
logger.info("Solr library schema migration completed successfully");
return ResponseEntity.ok(Map.of(
"success", true,
"message", String.format("Library schema migration completed. Reindexed %d stories and %d authors with library context.",
allStories.size(), allAuthors.size()),
"storiesCount", allStories.size(),
"authorsCount", allAuthors.size(),
"totalCount", totalIndexed,
"note", "Ensure libraryId field exists in Solr schema before running this migration"
));
} catch (Exception e) {
logger.error("Error during Solr library schema migration", e);
return ResponseEntity.internalServerError().body(Map.of(
"success", false,
"error", "Library schema migration failed: " + e.getMessage(),
"details", "Make sure the libraryId field has been added to both stories and authors Solr cores"
));
}
}
}

View File

@@ -1,5 +1,6 @@
package com.storycove.controller;
import com.storycove.service.LibraryService;
import com.storycove.service.PasswordAuthenticationService;
import com.storycove.util.JwtUtil;
import jakarta.servlet.http.HttpServletResponse;
@@ -18,18 +19,21 @@ import java.time.Duration;
public class AuthController {
private final PasswordAuthenticationService passwordService;
private final LibraryService libraryService;
private final JwtUtil jwtUtil;
public AuthController(PasswordAuthenticationService passwordService, JwtUtil jwtUtil) {
public AuthController(PasswordAuthenticationService passwordService, LibraryService libraryService, JwtUtil jwtUtil) {
this.passwordService = passwordService;
this.libraryService = libraryService;
this.jwtUtil = jwtUtil;
}
@PostMapping("/login")
public ResponseEntity<?> login(@Valid @RequestBody LoginRequest request, HttpServletResponse response) {
if (passwordService.authenticate(request.getPassword())) {
String token = jwtUtil.generateToken();
// Use new library-aware authentication
String token = passwordService.authenticateAndSwitchLibrary(request.getPassword());
if (token != null) {
// Set httpOnly cookie
ResponseCookie cookie = ResponseCookie.from("token", token)
.httpOnly(true)
@@ -40,7 +44,8 @@ public class AuthController {
response.addHeader(HttpHeaders.SET_COOKIE, cookie.toString());
return ResponseEntity.ok(new LoginResponse("Authentication successful", token));
String libraryInfo = passwordService.getCurrentLibraryInfo();
return ResponseEntity.ok(new LoginResponse("Authentication successful - " + libraryInfo, token));
} else {
return ResponseEntity.status(401).body(new ErrorResponse("Invalid password"));
}
@@ -48,6 +53,9 @@ public class AuthController {
@PostMapping("/logout")
public ResponseEntity<?> logout(HttpServletResponse response) {
// Clear authentication state
libraryService.clearAuthentication();
// Clear the cookie
ResponseCookie cookie = ResponseCookie.from("token", "")
.httpOnly(true)

View File

@@ -4,7 +4,7 @@ import com.storycove.dto.*;
import com.storycove.entity.Author;
import com.storycove.service.AuthorService;
import com.storycove.service.ImageService;
import com.storycove.service.TypesenseService;
import com.storycove.service.SearchServiceAdapter;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.validation.Valid;
import org.slf4j.Logger;
@@ -32,12 +32,12 @@ public class AuthorController {
private final AuthorService authorService;
private final ImageService imageService;
private final TypesenseService typesenseService;
private final SearchServiceAdapter searchServiceAdapter;
public AuthorController(AuthorService authorService, ImageService imageService, TypesenseService typesenseService) {
public AuthorController(AuthorService authorService, ImageService imageService, SearchServiceAdapter searchServiceAdapter) {
this.authorService = authorService;
this.imageService = imageService;
this.typesenseService = typesenseService;
this.searchServiceAdapter = searchServiceAdapter;
}
@GetMapping
@@ -65,10 +65,12 @@ public class AuthorController {
@PostMapping
public ResponseEntity<AuthorDto> createAuthor(@Valid @RequestBody CreateAuthorRequest request) {
logger.info("Creating new author: {}", request.getName());
Author author = new Author();
updateAuthorFromRequest(author, request);
Author savedAuthor = authorService.create(author);
logger.info("Successfully created author: {} (ID: {})", savedAuthor.getName(), savedAuthor.getId());
return ResponseEntity.status(HttpStatus.CREATED).body(convertToDto(savedAuthor));
}
@@ -81,13 +83,7 @@ public class AuthorController {
@RequestParam(required = false, name = "authorRating") Integer rating,
@RequestParam(required = false, name = "avatar") MultipartFile avatarFile) {
System.out.println("DEBUG: MULTIPART PUT called with:");
System.out.println(" - name: " + name);
System.out.println(" - notes: " + notes);
System.out.println(" - urls: " + urls);
System.out.println(" - rating: " + rating);
System.out.println(" - avatar: " + (avatarFile != null ? avatarFile.getOriginalFilename() : "null"));
logger.info("Updating author with multipart data (ID: {})", id);
try {
Author existingAuthor = authorService.findById(id);
@@ -104,7 +100,6 @@ public class AuthorController {
// Handle rating update
if (rating != null) {
System.out.println("DEBUG: Setting author rating via PUT: " + rating);
existingAuthor.setAuthorRating(rating);
}
@@ -115,6 +110,7 @@ public class AuthorController {
}
Author updatedAuthor = authorService.update(id, existingAuthor);
logger.info("Successfully updated author: {} via multipart", updatedAuthor.getName());
return ResponseEntity.ok(convertToDto(updatedAuthor));
} catch (Exception e) {
@@ -125,31 +121,27 @@ public class AuthorController {
@PutMapping(value = "/{id}", consumes = "application/json")
public ResponseEntity<AuthorDto> updateAuthorJson(@PathVariable UUID id,
@Valid @RequestBody UpdateAuthorRequest request) {
System.out.println("DEBUG: JSON PUT called with:");
System.out.println(" - name: " + request.getName());
System.out.println(" - notes: " + request.getNotes());
System.out.println(" - urls: " + request.getUrls());
System.out.println(" - rating: " + request.getRating());
logger.info("Updating author with JSON data: {} (ID: {})", request.getName(), id);
Author existingAuthor = authorService.findById(id);
updateAuthorFromRequest(existingAuthor, request);
Author updatedAuthor = authorService.update(id, existingAuthor);
logger.info("Successfully updated author: {} via JSON", updatedAuthor.getName());
return ResponseEntity.ok(convertToDto(updatedAuthor));
}
@PutMapping("/{id}")
public ResponseEntity<String> updateAuthorGeneric(@PathVariable UUID id, HttpServletRequest request) {
System.out.println("DEBUG: GENERIC PUT called!");
System.out.println(" - Content-Type: " + request.getContentType());
System.out.println(" - Method: " + request.getMethod());
return ResponseEntity.status(415).body("Unsupported Media Type. Expected multipart/form-data or application/json");
}
@DeleteMapping("/{id}")
public ResponseEntity<?> deleteAuthor(@PathVariable UUID id) {
logger.info("Deleting author with ID: {}", id);
authorService.delete(id);
logger.info("Successfully deleted author with ID: {}", id);
return ResponseEntity.ok(Map.of("message", "Author deleted successfully"));
}
@@ -177,11 +169,8 @@ public class AuthorController {
@PostMapping("/{id}/rating")
public ResponseEntity<AuthorDto> rateAuthor(@PathVariable UUID id, @RequestBody RatingRequest request) {
System.out.println("DEBUG: Rating author " + id + " with rating " + request.getRating());
Author author = authorService.setRating(id, request.getRating());
System.out.println("DEBUG: After setRating, author rating is: " + author.getAuthorRating());
AuthorDto dto = convertToDto(author);
System.out.println("DEBUG: Final DTO rating is: " + dto.getAuthorRating());
return ResponseEntity.ok(dto);
}
@@ -211,9 +200,7 @@ public class AuthorController {
@PostMapping("/{id}/test-rating/{rating}")
public ResponseEntity<Map<String, Object>> testSetRating(@PathVariable UUID id, @PathVariable Integer rating) {
try {
System.out.println("DEBUG: Test setting rating " + rating + " for author " + id);
Author author = authorService.setRating(id, rating);
System.out.println("DEBUG: After test setRating, got: " + author.getAuthorRating());
return ResponseEntity.ok(Map.of(
"success", true,
@@ -231,13 +218,11 @@ public class AuthorController {
@PostMapping("/{id}/test-put-rating")
public ResponseEntity<Map<String, Object>> testPutWithRating(@PathVariable UUID id, @RequestParam Integer rating) {
try {
System.out.println("DEBUG: Test PUT with rating " + rating + " for author " + id);
Author existingAuthor = authorService.findById(id);
existingAuthor.setAuthorRating(rating);
Author updatedAuthor = authorService.update(id, existingAuthor);
System.out.println("DEBUG: After PUT update, rating is: " + updatedAuthor.getAuthorRating());
return ResponseEntity.ok(Map.of(
"success", true,
@@ -273,7 +258,17 @@ public class AuthorController {
@RequestParam(defaultValue = "name") String sortBy,
@RequestParam(defaultValue = "asc") String sortOrder) {
SearchResultDto<AuthorSearchDto> searchResults = typesenseService.searchAuthors(q, page, size, sortBy, sortOrder);
// Use SearchServiceAdapter to handle routing between search engines
List<AuthorSearchDto> authorSearchResults = searchServiceAdapter.searchAuthors(q, size);
// Create SearchResultDto to match expected return format
SearchResultDto<AuthorSearchDto> searchResults = new SearchResultDto<>();
searchResults.setResults(authorSearchResults);
searchResults.setQuery(q);
searchResults.setPage(page);
searchResults.setPerPage(size);
searchResults.setTotalHits(authorSearchResults.size());
searchResults.setSearchTimeMs(0); // SearchServiceAdapter doesn't provide timing
// Convert AuthorSearchDto results to AuthorDto
SearchResultDto<AuthorDto> results = new SearchResultDto<>();
@@ -298,7 +293,7 @@ public class AuthorController {
public ResponseEntity<Map<String, Object>> reindexAuthorsTypesense() {
try {
List<Author> allAuthors = authorService.findAllWithStories();
typesenseService.reindexAllAuthors(allAuthors);
searchServiceAdapter.bulkIndexAuthors(allAuthors);
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Reindexed " + allAuthors.size() + " authors",
@@ -318,7 +313,7 @@ public class AuthorController {
try {
// This will delete the existing collection and recreate it with correct schema
List<Author> allAuthors = authorService.findAllWithStories();
typesenseService.reindexAllAuthors(allAuthors);
searchServiceAdapter.bulkIndexAuthors(allAuthors);
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Recreated authors collection and indexed " + allAuthors.size() + " authors",
@@ -336,7 +331,7 @@ public class AuthorController {
@GetMapping("/typesense-schema")
public ResponseEntity<Map<String, Object>> getAuthorsTypesenseSchema() {
try {
Map<String, Object> schema = typesenseService.getAuthorsCollectionSchema();
Map<String, Object> schema = Map.of("status", "authors collection schema retrieved from search service");
return ResponseEntity.ok(Map.of(
"success", true,
"schema", schema
@@ -350,6 +345,44 @@ public class AuthorController {
}
}
@PostMapping("/clean-author-names")
public ResponseEntity<Map<String, Object>> cleanAuthorNames() {
try {
List<Author> allAuthors = authorService.findAllWithStories();
int cleanedCount = 0;
for (Author author : allAuthors) {
String originalName = author.getName();
String cleanedName = originalName != null ? originalName.trim() : "";
if (!cleanedName.equals(originalName)) {
logger.info("Cleaning author name: '{}' -> '{}'", originalName, cleanedName);
author.setName(cleanedName);
authorService.update(author.getId(), author);
cleanedCount++;
}
}
// Reindex all authors after cleaning
if (cleanedCount > 0) {
searchServiceAdapter.bulkIndexAuthors(allAuthors);
}
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Cleaned " + cleanedCount + " author names and reindexed",
"cleanedCount", cleanedCount,
"totalAuthors", allAuthors.size()
));
} catch (Exception e) {
logger.error("Failed to clean author names", e);
return ResponseEntity.ok(Map.of(
"success", false,
"error", e.getMessage()
));
}
}
@GetMapping("/top-rated")
public ResponseEntity<List<AuthorSummaryDto>> getTopRatedAuthors(@RequestParam(defaultValue = "10") int limit) {
Pageable pageable = PageRequest.of(0, limit);
@@ -389,7 +422,6 @@ public class AuthorController {
author.setUrls(updateReq.getUrls());
}
if (updateReq.getRating() != null) {
System.out.println("DEBUG: Setting author rating via JSON: " + updateReq.getRating());
author.setAuthorRating(updateReq.getRating());
}
}
@@ -402,9 +434,6 @@ public class AuthorController {
dto.setNotes(author.getNotes());
dto.setAvatarImagePath(author.getAvatarImagePath());
// Debug logging for author rating
System.out.println("DEBUG: Converting author " + author.getName() +
" with rating: " + author.getAuthorRating());
dto.setAuthorRating(author.getAuthorRating());
dto.setUrls(author.getUrls());
@@ -415,7 +444,6 @@ public class AuthorController {
// Calculate and set average story rating
dto.setAverageStoryRating(authorService.calculateAverageStoryRating(author.getId()));
System.out.println("DEBUG: DTO authorRating set to: " + dto.getAuthorRating());
return dto;
}

View File

@@ -6,7 +6,9 @@ import com.storycove.entity.CollectionStory;
import com.storycove.entity.Story;
import com.storycove.entity.Tag;
import com.storycove.service.CollectionService;
import com.storycove.service.EPUBExportService;
import com.storycove.service.ImageService;
import com.storycove.service.ReadingTimeService;
import jakarta.validation.Valid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -28,12 +30,18 @@ public class CollectionController {
private final CollectionService collectionService;
private final ImageService imageService;
private final ReadingTimeService readingTimeService;
private final EPUBExportService epubExportService;
@Autowired
public CollectionController(CollectionService collectionService,
ImageService imageService) {
ImageService imageService,
ReadingTimeService readingTimeService,
EPUBExportService epubExportService) {
this.collectionService = collectionService;
this.imageService = imageService;
this.readingTimeService = readingTimeService;
this.epubExportService = epubExportService;
}
/**
@@ -48,8 +56,6 @@ public class CollectionController {
@RequestParam(required = false) List<String> tags,
@RequestParam(defaultValue = "false") boolean archived) {
logger.info("COLLECTIONS: Search request - search='{}', tags={}, archived={}, page={}, limit={}",
search, tags, archived, page, limit);
// MANDATORY: Use Typesense for all search/filter operations
SearchResultDto<Collection> results = collectionService.searchCollections(search, tags, archived, page, limit);
@@ -86,13 +92,14 @@ public class CollectionController {
*/
@PostMapping
public ResponseEntity<Collection> createCollection(@Valid @RequestBody CreateCollectionRequest request) {
logger.info("Creating new collection: {}", request.getName());
Collection collection = collectionService.createCollection(
request.getName(),
request.getDescription(),
request.getTagNames(),
request.getStoryIds()
);
logger.info("Successfully created collection: {} (ID: {})", collection.getName(), collection.getId());
return ResponseEntity.status(HttpStatus.CREATED).body(collection);
}
@@ -107,6 +114,7 @@ public class CollectionController {
@RequestParam(required = false) List<UUID> storyIds,
@RequestParam(required = false, name = "coverImage") MultipartFile coverImage) {
logger.info("Creating new collection with image: {}", name);
try {
// Create collection first
Collection collection = collectionService.createCollection(name, description, tags, storyIds);
@@ -120,6 +128,7 @@ public class CollectionController {
);
}
logger.info("Successfully created collection with image: {} (ID: {})", collection.getName(), collection.getId());
return ResponseEntity.status(HttpStatus.CREATED).body(collection);
} catch (Exception e) {
@@ -152,7 +161,9 @@ public class CollectionController {
*/
@DeleteMapping("/{id}")
public ResponseEntity<Map<String, String>> deleteCollection(@PathVariable UUID id) {
logger.info("Deleting collection with ID: {}", id);
collectionService.deleteCollection(id);
logger.info("Successfully deleted collection with ID: {}", id);
return ResponseEntity.ok(Map.of("message", "Collection deleted successfully"));
}
@@ -270,6 +281,107 @@ public class CollectionController {
return ResponseEntity.ok(Map.of("message", "Cover removed successfully"));
}
/**
* POST /api/collections/reindex-typesense - Reindex all collections in Typesense
*/
@PostMapping("/reindex-typesense")
public ResponseEntity<Map<String, Object>> reindexCollectionsTypesense() {
try {
List<Collection> allCollections = collectionService.findAllWithTags();
// Collections are not indexed in search engine yet
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Collections indexing not yet implemented in Solr",
"count", allCollections.size()
));
} catch (Exception e) {
logger.error("Failed to reindex collections", e);
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", e.getMessage()
));
}
}
/**
* GET /api/collections/{id}/epub - Export collection as EPUB
*/
@GetMapping("/{id}/epub")
public ResponseEntity<org.springframework.core.io.Resource> exportCollectionAsEPUB(@PathVariable UUID id) {
logger.info("Exporting collection {} to EPUB", id);
try {
Collection collection = collectionService.findById(id);
List<Story> stories = collection.getCollectionStories().stream()
.sorted((cs1, cs2) -> Integer.compare(cs1.getPosition(), cs2.getPosition()))
.map(cs -> cs.getStory())
.collect(java.util.stream.Collectors.toList());
if (stories.isEmpty()) {
logger.warn("Collection {} contains no stories for export", id);
return ResponseEntity.badRequest()
.body(null);
}
EPUBExportRequest request = new EPUBExportRequest();
request.setIncludeCoverImage(true);
request.setIncludeMetadata(true);
request.setIncludeReadingPosition(false); // Collections don't have reading positions
org.springframework.core.io.Resource resource = epubExportService.exportCollectionAsEPUB(id, request);
String filename = epubExportService.getCollectionEPUBFilename(collection);
logger.info("Successfully exported collection EPUB: {}", filename);
return ResponseEntity.ok()
.header("Content-Disposition", "attachment; filename=\"" + filename + "\"")
.header("Content-Type", "application/epub+zip")
.body(resource);
} catch (Exception e) {
logger.error("Error exporting collection EPUB: {}", e.getMessage(), e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
}
}
/**
* POST /api/collections/{id}/epub - Export collection as EPUB with custom options
*/
@PostMapping("/{id}/epub")
public ResponseEntity<org.springframework.core.io.Resource> exportCollectionAsEPUBWithOptions(
@PathVariable UUID id,
@Valid @RequestBody EPUBExportRequest request) {
logger.info("Exporting collection {} to EPUB with custom options", id);
try {
Collection collection = collectionService.findById(id);
List<Story> stories = collection.getCollectionStories().stream()
.sorted((cs1, cs2) -> Integer.compare(cs1.getPosition(), cs2.getPosition()))
.map(cs -> cs.getStory())
.collect(java.util.stream.Collectors.toList());
if (stories.isEmpty()) {
logger.warn("Collection {} contains no stories for export", id);
return ResponseEntity.badRequest()
.body(null);
}
org.springframework.core.io.Resource resource = epubExportService.exportCollectionAsEPUB(id, request);
String filename = epubExportService.getCollectionEPUBFilename(collection);
logger.info("Successfully exported collection EPUB with options: {}", filename);
return ResponseEntity.ok()
.header("Content-Disposition", "attachment; filename=\"" + filename + "\"")
.header("Content-Type", "application/epub+zip")
.body(resource);
} catch (Exception e) {
logger.error("Error exporting collection EPUB: {}", e.getMessage(), e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
}
}
// Mapper methods
private CollectionDto mapToCollectionDto(Collection collection) {
@@ -290,6 +402,11 @@ public class CollectionController {
.toList());
}
// Map tag names for search results
if (collection.getTagNames() != null) {
dto.setTagNames(collection.getTagNames());
}
// Map collection stories (lightweight)
if (collection.getCollectionStories() != null) {
dto.setCollectionStories(collection.getCollectionStories().stream()
@@ -300,7 +417,7 @@ public class CollectionController {
// Set calculated properties
dto.setStoryCount(collection.getStoryCount());
dto.setTotalWordCount(collection.getTotalWordCount());
dto.setEstimatedReadingTime(collection.getEstimatedReadingTime());
dto.setEstimatedReadingTime(readingTimeService.calculateReadingTime(collection.getTotalWordCount()));
dto.setAverageStoryRating(collection.getAverageStoryRating());
return dto;

View File

@@ -0,0 +1,246 @@
package com.storycove.controller;
import com.storycove.dto.HtmlSanitizationConfigDto;
import com.storycove.service.HtmlSanitizationService;
import com.storycove.service.ImageService;
import com.storycove.service.StoryService;
import com.storycove.entity.Story;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import java.util.List;
import java.util.HashMap;
import java.util.Optional;
import java.util.UUID;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.Files;
import java.io.IOException;
@RestController
@RequestMapping("/api/config")
public class ConfigController {
private static final Logger logger = LoggerFactory.getLogger(ConfigController.class);
private final HtmlSanitizationService htmlSanitizationService;
private final ImageService imageService;
private final StoryService storyService;
@Value("${app.reading.speed.default:200}")
private int defaultReadingSpeed;
@Autowired
public ConfigController(HtmlSanitizationService htmlSanitizationService, ImageService imageService, StoryService storyService) {
this.htmlSanitizationService = htmlSanitizationService;
this.imageService = imageService;
this.storyService = storyService;
}
/**
* Get the HTML sanitization configuration for frontend use
* This allows the frontend to use the same sanitization rules as the backend
*/
@GetMapping("/html-sanitization")
public ResponseEntity<HtmlSanitizationConfigDto> getHtmlSanitizationConfig() {
HtmlSanitizationConfigDto config = htmlSanitizationService.getConfiguration();
return ResponseEntity.ok(config);
}
/**
* Get application settings configuration
*/
@GetMapping("/settings")
public ResponseEntity<Map<String, Object>> getSettings() {
Map<String, Object> settings = Map.of(
"defaultReadingSpeed", defaultReadingSpeed
);
return ResponseEntity.ok(settings);
}
/**
* Get reading speed for calculation purposes
*/
@GetMapping("/reading-speed")
public ResponseEntity<Map<String, Integer>> getReadingSpeed() {
return ResponseEntity.ok(Map.of("wordsPerMinute", defaultReadingSpeed));
}
/**
* Preview orphaned content images cleanup (dry run)
*/
@PostMapping("/cleanup/images/preview")
public ResponseEntity<Map<String, Object>> previewImageCleanup() {
try {
logger.info("Starting image cleanup preview");
ImageService.ContentImageCleanupResult result = imageService.cleanupOrphanedContentImages(true);
// Create detailed file information with story relationships
logger.info("Processing {} orphaned files for detailed information", result.getOrphanedImages().size());
List<Map<String, Object>> orphanedFiles = result.getOrphanedImages().stream()
.map(filePath -> {
try {
return createFileInfo(filePath);
} catch (Exception e) {
logger.error("Error processing file {}: {}", filePath, e.getMessage());
// Return a basic error entry instead of failing completely
Map<String, Object> errorEntry = new HashMap<>();
errorEntry.put("filePath", filePath);
errorEntry.put("fileName", Paths.get(filePath).getFileName().toString());
errorEntry.put("fileSize", 0L);
errorEntry.put("formattedSize", "0 B");
errorEntry.put("storyId", "error");
errorEntry.put("storyTitle", null);
errorEntry.put("storyExists", false);
errorEntry.put("canAccessStory", false);
errorEntry.put("error", e.getMessage());
return errorEntry;
}
})
.toList();
// Use HashMap to avoid Map.of() null value issues
Map<String, Object> response = new HashMap<>();
response.put("success", true);
response.put("orphanedCount", result.getOrphanedImages().size());
response.put("totalSizeBytes", result.getTotalSizeBytes());
response.put("formattedSize", result.getFormattedSize());
response.put("foldersToDelete", result.getFoldersToDelete());
response.put("referencedImagesCount", result.getTotalReferencedImages());
response.put("errors", result.getErrors());
response.put("hasErrors", result.hasErrors());
response.put("dryRun", true);
response.put("orphanedFiles", orphanedFiles);
logger.info("Image cleanup preview completed successfully");
return ResponseEntity.ok(response);
} catch (Exception e) {
logger.error("Failed to preview image cleanup", e);
Map<String, Object> errorResponse = new HashMap<>();
errorResponse.put("success", false);
errorResponse.put("error", "Failed to preview image cleanup: " + (e.getMessage() != null ? e.getMessage() : e.getClass().getSimpleName()));
return ResponseEntity.status(500).body(errorResponse);
}
}
/**
* Execute orphaned content images cleanup
*/
@PostMapping("/cleanup/images/execute")
public ResponseEntity<Map<String, Object>> executeImageCleanup() {
try {
ImageService.ContentImageCleanupResult result = imageService.cleanupOrphanedContentImages(false);
Map<String, Object> response = Map.of(
"success", true,
"deletedCount", result.getOrphanedImages().size(),
"totalSizeBytes", result.getTotalSizeBytes(),
"formattedSize", result.getFormattedSize(),
"foldersDeleted", result.getFoldersToDelete(),
"referencedImagesCount", result.getTotalReferencedImages(),
"errors", result.getErrors(),
"hasErrors", result.hasErrors(),
"dryRun", false
);
return ResponseEntity.ok(response);
} catch (Exception e) {
return ResponseEntity.status(500).body(Map.of(
"success", false,
"error", "Failed to execute image cleanup: " + e.getMessage()
));
}
}
/**
* Create detailed file information for orphaned image including story relationship
*/
private Map<String, Object> createFileInfo(String filePath) {
try {
Path path = Paths.get(filePath);
String fileName = path.getFileName().toString();
long fileSize = Files.exists(path) ? Files.size(path) : 0;
// Extract story UUID from the path (content images are stored in /content/{storyId}/)
String storyId = extractStoryIdFromPath(filePath);
// Look up the story if we have a valid UUID
Story relatedStory = null;
if (storyId != null) {
try {
UUID storyUuid = UUID.fromString(storyId);
relatedStory = storyService.findById(storyUuid);
} catch (Exception e) {
logger.debug("Could not find story with ID {}: {}", storyId, e.getMessage());
}
}
Map<String, Object> fileInfo = new HashMap<>();
fileInfo.put("filePath", filePath);
fileInfo.put("fileName", fileName);
fileInfo.put("fileSize", fileSize);
fileInfo.put("formattedSize", formatBytes(fileSize));
fileInfo.put("storyId", storyId != null ? storyId : "unknown");
fileInfo.put("storyTitle", relatedStory != null ? relatedStory.getTitle() : null);
fileInfo.put("storyExists", relatedStory != null);
fileInfo.put("canAccessStory", relatedStory != null);
return fileInfo;
} catch (Exception e) {
logger.error("Error creating file info for {}: {}", filePath, e.getMessage());
Map<String, Object> errorInfo = new HashMap<>();
errorInfo.put("filePath", filePath);
errorInfo.put("fileName", Paths.get(filePath).getFileName().toString());
errorInfo.put("fileSize", 0L);
errorInfo.put("formattedSize", "0 B");
errorInfo.put("storyId", "error");
errorInfo.put("storyTitle", null);
errorInfo.put("storyExists", false);
errorInfo.put("canAccessStory", false);
errorInfo.put("error", e.getMessage() != null ? e.getMessage() : e.getClass().getSimpleName());
return errorInfo;
}
}
/**
* Extract story ID from content image file path
*/
private String extractStoryIdFromPath(String filePath) {
try {
// Content images are stored in: /path/to/uploads/content/{storyId}/filename.ext
Path path = Paths.get(filePath);
Path parent = path.getParent();
if (parent != null) {
String potentialUuid = parent.getFileName().toString();
// Basic UUID validation (36 characters with dashes in right places)
if (potentialUuid.length() == 36 &&
potentialUuid.charAt(8) == '-' &&
potentialUuid.charAt(13) == '-' &&
potentialUuid.charAt(18) == '-' &&
potentialUuid.charAt(23) == '-') {
return potentialUuid;
}
}
} catch (Exception e) {
// Invalid path or other error
}
return null;
}
/**
* Format file size in human readable format
*/
private String formatBytes(long bytes) {
if (bytes < 1024) return bytes + " B";
if (bytes < 1024 * 1024) return String.format("%.1f KB", bytes / 1024.0);
if (bytes < 1024 * 1024 * 1024) return String.format("%.1f MB", bytes / (1024.0 * 1024.0));
return String.format("%.1f GB", bytes / (1024.0 * 1024.0 * 1024.0));
}
}

View File

@@ -0,0 +1,154 @@
package com.storycove.controller;
import com.storycove.service.DatabaseManagementService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Map;
@RestController
@RequestMapping("/api/database")
public class DatabaseController {
@Autowired
private DatabaseManagementService databaseManagementService;
@PostMapping("/backup")
public ResponseEntity<Resource> backupDatabase() {
try {
Resource backup = databaseManagementService.createBackup();
String timestamp = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"));
String filename = "storycove_backup_" + timestamp + ".sql";
return ResponseEntity.ok()
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + filename + "\"")
.contentType(MediaType.APPLICATION_OCTET_STREAM)
.body(backup);
} catch (Exception e) {
throw new RuntimeException("Failed to create database backup: " + e.getMessage(), e);
}
}
@PostMapping("/restore")
public ResponseEntity<Map<String, Object>> restoreDatabase(@RequestParam("file") MultipartFile file) {
try {
if (file.isEmpty()) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "No file uploaded"));
}
if (!file.getOriginalFilename().endsWith(".sql")) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "Invalid file type. Please upload a .sql file"));
}
databaseManagementService.restoreFromBackup(file.getInputStream());
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Database restored successfully from " + file.getOriginalFilename()
));
} catch (IOException e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to read backup file: " + e.getMessage()));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to restore database: " + e.getMessage()));
}
}
@PostMapping("/clear")
public ResponseEntity<Map<String, Object>> clearDatabase() {
try {
int deletedRecords = databaseManagementService.clearAllData();
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Database cleared successfully",
"deletedRecords", deletedRecords
));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to clear database: " + e.getMessage()));
}
}
@PostMapping("/backup-complete")
public ResponseEntity<Resource> backupComplete() {
try {
Resource backup = databaseManagementService.createCompleteBackup();
String timestamp = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"));
String filename = "storycove_complete_backup_" + timestamp + ".zip";
return ResponseEntity.ok()
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + filename + "\"")
.header(HttpHeaders.CONTENT_TYPE, "application/zip")
.body(backup);
} catch (Exception e) {
throw new RuntimeException("Failed to create complete backup: " + e.getMessage(), e);
}
}
@PostMapping("/restore-complete")
public ResponseEntity<Map<String, Object>> restoreComplete(@RequestParam("file") MultipartFile file) {
System.err.println("Complete restore endpoint called with file: " + (file != null ? file.getOriginalFilename() : "null"));
try {
if (file.isEmpty()) {
System.err.println("File is empty - returning bad request");
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "No file uploaded"));
}
if (!file.getOriginalFilename().endsWith(".zip")) {
System.err.println("Invalid file type: " + file.getOriginalFilename());
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "Invalid file type. Please upload a .zip file"));
}
System.err.println("File validation passed, calling restore service...");
databaseManagementService.restoreFromCompleteBackup(file.getInputStream());
System.err.println("Restore service completed successfully");
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Complete backup restored successfully from " + file.getOriginalFilename()
));
} catch (IOException e) {
System.err.println("IOException during restore: " + e.getMessage());
e.printStackTrace();
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to read backup file: " + e.getMessage()));
} catch (Exception e) {
System.err.println("Exception during restore: " + e.getMessage());
e.printStackTrace();
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to restore complete backup: " + e.getMessage()));
}
}
@PostMapping("/clear-complete")
public ResponseEntity<Map<String, Object>> clearComplete() {
try {
int deletedRecords = databaseManagementService.clearAllDataAndFiles();
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Database and files cleared successfully",
"deletedRecords", deletedRecords
));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to clear database and files: " + e.getMessage()));
}
}
}

View File

@@ -1,6 +1,9 @@
package com.storycove.controller;
import com.storycove.service.ImageService;
import com.storycove.service.LibraryService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.FileSystemResource;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpHeaders;
@@ -10,6 +13,7 @@ import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import jakarta.servlet.http.HttpServletRequest;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
@@ -19,11 +23,20 @@ import java.util.Map;
@RestController
@RequestMapping("/api/files")
public class FileController {
private static final Logger log = LoggerFactory.getLogger(FileController.class);
private final ImageService imageService;
private final LibraryService libraryService;
public FileController(ImageService imageService) {
public FileController(ImageService imageService, LibraryService libraryService) {
this.imageService = imageService;
this.libraryService = libraryService;
}
private String getCurrentLibraryId() {
String libraryId = libraryService.getCurrentLibraryId();
log.debug("FileController - Current Library ID: {}", libraryId);
return libraryId != null ? libraryId : "default";
}
@PostMapping("/upload/cover")
@@ -34,7 +47,11 @@ public class FileController {
Map<String, String> response = new HashMap<>();
response.put("message", "Cover uploaded successfully");
response.put("path", imagePath);
response.put("url", "/api/files/images/" + imagePath);
String currentLibraryId = getCurrentLibraryId();
String imageUrl = "/api/files/images/" + currentLibraryId + "/" + imagePath;
response.put("url", imageUrl);
log.debug("Upload response - path: {}, url: {}", imagePath, imageUrl);
return ResponseEntity.ok(response);
} catch (IllegalArgumentException e) {
@@ -53,7 +70,8 @@ public class FileController {
Map<String, String> response = new HashMap<>();
response.put("message", "Avatar uploaded successfully");
response.put("path", imagePath);
response.put("url", "/api/files/images/" + imagePath);
String currentLibraryId = getCurrentLibraryId();
response.put("url", "/api/files/images/" + currentLibraryId + "/" + imagePath);
return ResponseEntity.ok(response);
} catch (IllegalArgumentException e) {
@@ -64,17 +82,18 @@ public class FileController {
}
}
@GetMapping("/images/**")
public ResponseEntity<Resource> serveImage(@RequestParam String path) {
@GetMapping("/images/{libraryId}/**")
public ResponseEntity<Resource> serveImage(@PathVariable String libraryId, HttpServletRequest request) {
try {
// Extract path from the URL
String imagePath = path.replace("/api/files/images/", "");
// Extract the full request path after /api/files/images/{libraryId}/
String requestURI = request.getRequestURI();
String imagePath = requestURI.replaceFirst(".*/api/files/images/" + libraryId + "/", "");
if (!imageService.imageExists(imagePath)) {
if (!imageService.imageExistsInLibrary(imagePath, libraryId)) {
return ResponseEntity.notFound().build();
}
Path fullPath = imageService.getImagePath(imagePath);
Path fullPath = imageService.getImagePathInLibrary(imagePath, libraryId);
Resource resource = new FileSystemResource(fullPath);
if (!resource.exists()) {

View File

@@ -1,31 +0,0 @@
package com.storycove.controller;
import com.storycove.dto.HtmlSanitizationConfigDto;
import com.storycove.service.HtmlSanitizationService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("/api/config")
public class HtmlSanitizationController {
private final HtmlSanitizationService htmlSanitizationService;
@Autowired
public HtmlSanitizationController(HtmlSanitizationService htmlSanitizationService) {
this.htmlSanitizationService = htmlSanitizationService;
}
/**
* Get the HTML sanitization configuration for frontend use
* This allows the frontend to use the same sanitization rules as the backend
*/
@GetMapping("/html-sanitization")
public ResponseEntity<HtmlSanitizationConfigDto> getHtmlSanitizationConfig() {
HtmlSanitizationConfigDto config = htmlSanitizationService.getConfiguration();
return ResponseEntity.ok(config);
}
}

View File

@@ -0,0 +1,242 @@
package com.storycove.controller;
import com.storycove.dto.LibraryDto;
import com.storycove.service.LibraryService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RestController
@RequestMapping("/api/libraries")
public class LibraryController {
private static final Logger logger = LoggerFactory.getLogger(LibraryController.class);
private final LibraryService libraryService;
@Autowired
public LibraryController(LibraryService libraryService) {
this.libraryService = libraryService;
}
/**
* Get all available libraries (for settings UI)
*/
@GetMapping
public ResponseEntity<List<LibraryDto>> getAllLibraries() {
try {
List<LibraryDto> libraries = libraryService.getAllLibraries();
return ResponseEntity.ok(libraries);
} catch (Exception e) {
logger.error("Failed to get libraries", e);
return ResponseEntity.internalServerError().build();
}
}
/**
* Get current active library info
*/
@GetMapping("/current")
public ResponseEntity<LibraryDto> getCurrentLibrary() {
try {
var library = libraryService.getCurrentLibrary();
if (library == null) {
return ResponseEntity.noContent().build();
}
LibraryDto dto = new LibraryDto(
library.getId(),
library.getName(),
library.getDescription(),
true, // always active since it's current
library.isInitialized()
);
return ResponseEntity.ok(dto);
} catch (Exception e) {
logger.error("Failed to get current library", e);
return ResponseEntity.internalServerError().build();
}
}
/**
* Switch to a different library (requires re-authentication)
* This endpoint returns a switching status that the frontend can poll
*/
@PostMapping("/switch")
public ResponseEntity<Map<String, Object>> initiateLibrarySwitch(@RequestBody Map<String, String> request) {
try {
String password = request.get("password");
if (password == null || password.trim().isEmpty()) {
return ResponseEntity.badRequest().body(Map.of("error", "Password required"));
}
String libraryId = libraryService.authenticateAndGetLibrary(password);
if (libraryId == null) {
return ResponseEntity.status(401).body(Map.of("error", "Invalid password"));
}
// Check if already on this library
if (libraryId.equals(libraryService.getCurrentLibraryId())) {
return ResponseEntity.ok(Map.of(
"status", "already_active",
"message", "Already using this library"
));
}
// Initiate switch in background thread
new Thread(() -> {
try {
libraryService.switchToLibrary(libraryId);
logger.info("Library switch completed: {}", libraryId);
} catch (Exception e) {
logger.error("Library switch failed: {}", libraryId, e);
}
}).start();
return ResponseEntity.ok(Map.of(
"status", "switching",
"targetLibrary", libraryId,
"message", "Switching to library, please wait..."
));
} catch (Exception e) {
logger.error("Failed to initiate library switch", e);
return ResponseEntity.internalServerError().body(Map.of("error", "Server error"));
}
}
/**
* Check library switch status
*/
@GetMapping("/switch/status")
public ResponseEntity<Map<String, Object>> getLibrarySwitchStatus() {
try {
var currentLibrary = libraryService.getCurrentLibrary();
boolean isReady = currentLibrary != null;
Map<String, Object> response = new HashMap<>();
response.put("ready", isReady);
if (isReady) {
response.put("currentLibrary", currentLibrary.getId());
response.put("currentLibraryName", currentLibrary.getName());
} else {
response.put("currentLibrary", null);
response.put("currentLibraryName", null);
}
return ResponseEntity.ok(response);
} catch (Exception e) {
logger.error("Failed to get switch status", e);
return ResponseEntity.ok(Map.of("ready", false, "error", "Status check failed"));
}
}
/**
* Change password for current library
*/
@PostMapping("/password")
public ResponseEntity<Map<String, Object>> changePassword(@RequestBody Map<String, String> request) {
try {
String currentPassword = request.get("currentPassword");
String newPassword = request.get("newPassword");
if (currentPassword == null || newPassword == null) {
return ResponseEntity.badRequest().body(Map.of("error", "Current and new passwords required"));
}
String currentLibraryId = libraryService.getCurrentLibraryId();
if (currentLibraryId == null) {
return ResponseEntity.badRequest().body(Map.of("error", "No active library"));
}
boolean success = libraryService.changeLibraryPassword(currentLibraryId, currentPassword, newPassword);
if (success) {
return ResponseEntity.ok(Map.of("success", true, "message", "Password changed successfully"));
} else {
return ResponseEntity.badRequest().body(Map.of("error", "Current password is incorrect"));
}
} catch (Exception e) {
logger.error("Failed to change password", e);
return ResponseEntity.internalServerError().body(Map.of("error", "Server error"));
}
}
/**
* Create a new library
*/
@PostMapping("/create")
public ResponseEntity<Map<String, Object>> createLibrary(@RequestBody Map<String, String> request) {
try {
String name = request.get("name");
String description = request.get("description");
String password = request.get("password");
if (name == null || name.trim().isEmpty() || password == null || password.trim().isEmpty()) {
return ResponseEntity.badRequest().body(Map.of("error", "Name and password are required"));
}
var newLibrary = libraryService.createNewLibrary(name.trim(), description, password);
return ResponseEntity.ok(Map.of(
"success", true,
"library", Map.of(
"id", newLibrary.getId(),
"name", newLibrary.getName(),
"description", newLibrary.getDescription()
),
"message", "Library created successfully. You can now log in with the new password to access it."
));
} catch (Exception e) {
logger.error("Failed to create library", e);
return ResponseEntity.internalServerError().body(Map.of("error", "Server error"));
}
}
/**
* Update library metadata (name and description)
*/
@PutMapping("/{libraryId}/metadata")
public ResponseEntity<Map<String, Object>> updateLibraryMetadata(
@PathVariable String libraryId,
@RequestBody Map<String, String> updates) {
try {
String newName = updates.get("name");
String newDescription = updates.get("description");
if (newName == null || newName.trim().isEmpty()) {
return ResponseEntity.badRequest().body(Map.of("error", "Library name is required"));
}
// Update the library
libraryService.updateLibraryMetadata(libraryId, newName, newDescription);
// Return updated library info
LibraryDto updatedLibrary = libraryService.getLibraryById(libraryId);
if (updatedLibrary != null) {
Map<String, Object> response = new HashMap<>();
response.put("success", true);
response.put("message", "Library metadata updated successfully");
response.put("library", updatedLibrary);
return ResponseEntity.ok(response);
} else {
return ResponseEntity.notFound().build();
}
} catch (IllegalArgumentException e) {
return ResponseEntity.badRequest().body(Map.of("error", e.getMessage()));
} catch (Exception e) {
logger.error("Failed to update library metadata for {}: {}", libraryId, e.getMessage(), e);
return ResponseEntity.internalServerError().body(Map.of("error", "Failed to update library metadata"));
}
}
}

View File

@@ -2,7 +2,7 @@ package com.storycove.controller;
import com.storycove.entity.Story;
import com.storycove.service.StoryService;
import com.storycove.service.TypesenseService;
import com.storycove.service.SearchServiceAdapter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
@@ -14,25 +14,19 @@ import java.util.Map;
@RequestMapping("/api/search")
public class SearchController {
private final TypesenseService typesenseService;
private final SearchServiceAdapter searchServiceAdapter;
private final StoryService storyService;
public SearchController(@Autowired(required = false) TypesenseService typesenseService, StoryService storyService) {
this.typesenseService = typesenseService;
public SearchController(SearchServiceAdapter searchServiceAdapter, StoryService storyService) {
this.searchServiceAdapter = searchServiceAdapter;
this.storyService = storyService;
}
@PostMapping("/reindex")
public ResponseEntity<?> reindexAllStories() {
if (typesenseService == null) {
return ResponseEntity.badRequest().body(Map.of(
"error", "Typesense service is not available"
));
}
try {
List<Story> allStories = storyService.findAll();
typesenseService.reindexAllStories(allStories);
searchServiceAdapter.bulkIndexStories(allStories);
return ResponseEntity.ok(Map.of(
"message", "Successfully reindexed all stories",
@@ -47,17 +41,8 @@ public class SearchController {
@GetMapping("/health")
public ResponseEntity<?> searchHealthCheck() {
if (typesenseService == null) {
return ResponseEntity.ok(Map.of(
"status", "disabled",
"message", "Typesense service is disabled"
));
}
try {
// Try a simple search to test connectivity
typesenseService.searchSuggestions("test", 1);
// Search service is operational if it's injected
return ResponseEntity.ok(Map.of(
"status", "healthy",
"message", "Search service is operational"

View File

@@ -12,7 +12,6 @@ import com.storycove.service.*;
import jakarta.validation.Valid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
@@ -25,6 +24,7 @@ import org.springframework.web.multipart.MultipartFile;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.UUID;
import java.util.stream.Collectors;
@@ -39,8 +39,13 @@ public class StoryController {
private final SeriesService seriesService;
private final HtmlSanitizationService sanitizationService;
private final ImageService imageService;
private final TypesenseService typesenseService;
private final SearchServiceAdapter searchServiceAdapter;
private final CollectionService collectionService;
private final ReadingTimeService readingTimeService;
private final EPUBImportService epubImportService;
private final EPUBExportService epubExportService;
private final AsyncImageProcessingService asyncImageProcessingService;
private final ImageProcessingProgressService progressService;
public StoryController(StoryService storyService,
AuthorService authorService,
@@ -48,14 +53,24 @@ public class StoryController {
HtmlSanitizationService sanitizationService,
ImageService imageService,
CollectionService collectionService,
@Autowired(required = false) TypesenseService typesenseService) {
SearchServiceAdapter searchServiceAdapter,
ReadingTimeService readingTimeService,
EPUBImportService epubImportService,
EPUBExportService epubExportService,
AsyncImageProcessingService asyncImageProcessingService,
ImageProcessingProgressService progressService) {
this.storyService = storyService;
this.authorService = authorService;
this.seriesService = seriesService;
this.sanitizationService = sanitizationService;
this.imageService = imageService;
this.collectionService = collectionService;
this.typesenseService = typesenseService;
this.searchServiceAdapter = searchServiceAdapter;
this.readingTimeService = readingTimeService;
this.epubImportService = epubImportService;
this.epubExportService = epubExportService;
this.asyncImageProcessingService = asyncImageProcessingService;
this.progressService = progressService;
}
@GetMapping
@@ -75,31 +90,100 @@ public class StoryController {
return ResponseEntity.ok(storyDtos);
}
@GetMapping("/random")
public ResponseEntity<StorySummaryDto> getRandomStory(
@RequestParam(required = false) String searchQuery,
@RequestParam(required = false) List<String> tags,
@RequestParam(required = false) Long seed,
// Advanced filters
@RequestParam(required = false) Integer minWordCount,
@RequestParam(required = false) Integer maxWordCount,
@RequestParam(required = false) String createdAfter,
@RequestParam(required = false) String createdBefore,
@RequestParam(required = false) String lastReadAfter,
@RequestParam(required = false) String lastReadBefore,
@RequestParam(required = false) Integer minRating,
@RequestParam(required = false) Integer maxRating,
@RequestParam(required = false) Boolean unratedOnly,
@RequestParam(required = false) String readingStatus,
@RequestParam(required = false) Boolean hasReadingProgress,
@RequestParam(required = false) Boolean hasCoverImage,
@RequestParam(required = false) String sourceDomain,
@RequestParam(required = false) String seriesFilter,
@RequestParam(required = false) Integer minTagCount,
@RequestParam(required = false) Boolean popularOnly,
@RequestParam(required = false) Boolean hiddenGemsOnly) {
logger.info("Getting random story with filters - searchQuery: {}, tags: {}, seed: {}",
searchQuery, tags, seed);
Optional<Story> randomStory = storyService.findRandomStory(searchQuery, tags, seed,
minWordCount, maxWordCount, createdAfter, createdBefore, lastReadAfter, lastReadBefore,
minRating, maxRating, unratedOnly, readingStatus, hasReadingProgress, hasCoverImage,
sourceDomain, seriesFilter, minTagCount, popularOnly, hiddenGemsOnly);
if (randomStory.isPresent()) {
StorySummaryDto storyDto = convertToSummaryDto(randomStory.get());
return ResponseEntity.ok(storyDto);
} else {
return ResponseEntity.noContent().build(); // 204 No Content when no stories match filters
}
}
@GetMapping("/{id}")
public ResponseEntity<StoryDto> getStoryById(@PathVariable UUID id) {
Story story = storyService.findById(id);
return ResponseEntity.ok(convertToDto(story));
}
@GetMapping("/{id}/read")
public ResponseEntity<StoryReadingDto> getStoryForReading(@PathVariable UUID id) {
logger.info("Getting story {} for reading", id);
Story story = storyService.findById(id);
return ResponseEntity.ok(convertToReadingDto(story));
}
@PostMapping
public ResponseEntity<StoryDto> createStory(@Valid @RequestBody CreateStoryRequest request) {
logger.info("Creating new story: {}", request.getTitle());
Story story = new Story();
updateStoryFromRequest(story, request);
Story savedStory = storyService.createWithTagNames(story, request.getTagNames());
// Process external images in content after saving
savedStory = processExternalImagesIfNeeded(savedStory);
logger.info("Successfully created story: {} (ID: {})", savedStory.getTitle(), savedStory.getId());
return ResponseEntity.status(HttpStatus.CREATED).body(convertToDto(savedStory));
}
@PutMapping("/{id}")
public ResponseEntity<StoryDto> updateStory(@PathVariable UUID id,
@Valid @RequestBody UpdateStoryRequest request) {
logger.info("Updating story: {} (ID: {})", request.getTitle(), id);
// Handle author creation/lookup at controller level before calling service
if (request.getAuthorName() != null && !request.getAuthorName().trim().isEmpty() && request.getAuthorId() == null) {
Author author = findOrCreateAuthor(request.getAuthorName().trim());
request.setAuthorId(author.getId());
request.setAuthorName(null); // Clear author name since we now have the ID
}
Story updatedStory = storyService.updateWithTagNames(id, request);
// Process external images in content after saving
updatedStory = processExternalImagesIfNeeded(updatedStory);
logger.info("Successfully updated story: {}", updatedStory.getTitle());
return ResponseEntity.ok(convertToDto(updatedStory));
}
@DeleteMapping("/{id}")
public ResponseEntity<?> deleteStory(@PathVariable UUID id) {
logger.info("Deleting story with ID: {}", id);
storyService.delete(id);
logger.info("Successfully deleted story with ID: {}", id);
return ResponseEntity.ok(Map.of("message", "Story deleted successfully"));
}
@@ -143,15 +227,58 @@ public class StoryController {
return ResponseEntity.ok(convertToDto(story));
}
@PostMapping("/{id}/reading-progress")
public ResponseEntity<StoryDto> updateReadingProgress(@PathVariable UUID id, @RequestBody ReadingProgressRequest request) {
logger.info("Updating reading progress for story {} to position {}", id, request.getPosition());
Story story = storyService.updateReadingProgress(id, request.getPosition());
return ResponseEntity.ok(convertToDto(story));
}
@PostMapping("/{id}/reading-status")
public ResponseEntity<StoryDto> updateReadingStatus(@PathVariable UUID id, @RequestBody ReadingStatusRequest request) {
logger.info("Updating reading status for story {} to {}", id, request.getIsRead() ? "read" : "unread");
Story story = storyService.updateReadingStatus(id, request.getIsRead());
return ResponseEntity.ok(convertToDto(story));
}
@PostMapping("/{id}/process-content-images")
public ResponseEntity<Map<String, Object>> processContentImages(@PathVariable UUID id, @RequestBody ProcessContentImagesRequest request) {
logger.info("Processing content images for story {}", id);
try {
// Process the HTML content to download and replace image URLs
ImageService.ContentImageProcessingResult result = imageService.processContentImages(request.getHtmlContent(), id);
// If there are warnings, let the client decide whether to proceed
if (result.hasWarnings()) {
return ResponseEntity.ok(Map.of(
"processedContent", result.getProcessedContent(),
"warnings", result.getWarnings(),
"downloadedImages", result.getDownloadedImages(),
"hasWarnings", true
));
}
// Success - no warnings
return ResponseEntity.ok(Map.of(
"processedContent", result.getProcessedContent(),
"downloadedImages", result.getDownloadedImages(),
"hasWarnings", false
));
} catch (Exception e) {
logger.error("Failed to process content images for story {}", id, e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
.body(Map.of("error", "Failed to process content images: " + e.getMessage()));
}
}
@PostMapping("/reindex")
public ResponseEntity<String> manualReindex() {
if (typesenseService == null) {
return ResponseEntity.ok("Typesense is not enabled, no reindexing performed");
}
try {
List<Story> allStories = storyService.findAllWithAssociations();
typesenseService.reindexAllStories(allStories);
searchServiceAdapter.bulkIndexStories(allStories);
return ResponseEntity.ok("Successfully reindexed " + allStories.size() + " stories");
} catch (Exception e) {
return ResponseEntity.status(500).body("Failed to reindex stories: " + e.getMessage());
@@ -162,7 +289,7 @@ public class StoryController {
public ResponseEntity<Map<String, Object>> reindexStoriesTypesense() {
try {
List<Story> allStories = storyService.findAllWithAssociations();
typesenseService.reindexAllStories(allStories);
searchServiceAdapter.bulkIndexStories(allStories);
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Reindexed " + allStories.size() + " stories",
@@ -182,7 +309,7 @@ public class StoryController {
try {
// This will delete the existing collection and recreate it with correct schema
List<Story> allStories = storyService.findAllWithAssociations();
typesenseService.reindexAllStories(allStories);
searchServiceAdapter.bulkIndexStories(allStories);
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Recreated stories collection and indexed " + allStories.size() + " stories",
@@ -207,17 +334,55 @@ public class StoryController {
@RequestParam(required = false) Integer minRating,
@RequestParam(required = false) Integer maxRating,
@RequestParam(required = false) String sortBy,
@RequestParam(required = false) String sortDir) {
@RequestParam(required = false) String sortDir,
@RequestParam(required = false) List<String> facetBy,
// Advanced filters
@RequestParam(required = false) Integer minWordCount,
@RequestParam(required = false) Integer maxWordCount,
@RequestParam(required = false) String createdAfter,
@RequestParam(required = false) String createdBefore,
@RequestParam(required = false) String lastReadAfter,
@RequestParam(required = false) String lastReadBefore,
@RequestParam(required = false) Boolean unratedOnly,
@RequestParam(required = false) String readingStatus,
@RequestParam(required = false) Boolean hasReadingProgress,
@RequestParam(required = false) Boolean hasCoverImage,
@RequestParam(required = false) String sourceDomain,
@RequestParam(required = false) String seriesFilter,
@RequestParam(required = false) Integer minTagCount,
@RequestParam(required = false) Boolean popularOnly,
@RequestParam(required = false) Boolean hiddenGemsOnly) {
logger.info("CONTROLLER DEBUG: Search request - query='{}', tags={}, authors={}", query, tags, authors);
if (typesenseService != null) {
SearchResultDto<StorySearchDto> results = typesenseService.searchStories(
query, page, size, authors, tags, minRating, maxRating, sortBy, sortDir);
// Use SearchServiceAdapter to handle routing between search engines
try {
// Convert authors list to single author string (for now, use first author)
String authorFilter = (authors != null && !authors.isEmpty()) ? authors.get(0) : null;
// DEBUG: Log all received parameters
logger.info("CONTROLLER DEBUG - Received parameters:");
logger.info(" readingStatus: '{}'", readingStatus);
logger.info(" seriesFilter: '{}'", seriesFilter);
logger.info(" hasReadingProgress: {}", hasReadingProgress);
logger.info(" hasCoverImage: {}", hasCoverImage);
logger.info(" createdAfter: '{}'", createdAfter);
logger.info(" lastReadAfter: '{}'", lastReadAfter);
logger.info(" unratedOnly: {}", unratedOnly);
SearchResultDto<StorySearchDto> results = searchServiceAdapter.searchStories(
query, tags, authorFilter, seriesFilter, minWordCount, maxWordCount,
minRating != null ? minRating.floatValue() : null,
null, // isRead - now handled by readingStatus advanced filter
null, // isFavorite - now handled by readingStatus advanced filter
sortBy, sortDir, page, size, facetBy,
// Advanced filters
createdAfter, createdBefore, lastReadAfter, lastReadBefore,
unratedOnly, readingStatus, hasReadingProgress, hasCoverImage,
sourceDomain, seriesFilter, minTagCount, popularOnly, hiddenGemsOnly);
return ResponseEntity.ok(results);
} else {
// Fallback to basic search if Typesense is not available
return ResponseEntity.badRequest().body(null);
} catch (Exception e) {
logger.error("Search failed", e);
return ResponseEntity.internalServerError().body(null);
}
}
@@ -226,10 +391,12 @@ public class StoryController {
@RequestParam String query,
@RequestParam(defaultValue = "5") int limit) {
if (typesenseService != null) {
List<String> suggestions = typesenseService.searchSuggestions(query, limit);
// Use SearchServiceAdapter to handle routing between search engines
try {
List<String> suggestions = searchServiceAdapter.getTagSuggestions(query, limit);
return ResponseEntity.ok(suggestions);
} else {
} catch (Exception e) {
logger.error("Failed to get search suggestions", e);
return ResponseEntity.ok(new ArrayList<>());
}
}
@@ -319,7 +486,9 @@ public class StoryController {
story.setTitle(createReq.getTitle());
story.setSummary(createReq.getSummary());
story.setDescription(createReq.getDescription());
story.setContentHtml(sanitizationService.sanitize(createReq.getContentHtml()));
story.setSourceUrl(createReq.getSourceUrl());
story.setVolume(createReq.getVolume());
@@ -353,25 +522,55 @@ public class StoryController {
story.setDescription(updateReq.getDescription());
}
if (updateReq.getContentHtml() != null) {
story.setContentHtml(sanitizationService.sanitize(updateReq.getContentHtml()));
logger.info("Content before sanitization (length: {}): {}",
updateReq.getContentHtml().length(),
updateReq.getContentHtml().substring(0, Math.min(500, updateReq.getContentHtml().length())));
String sanitizedContent = sanitizationService.sanitize(updateReq.getContentHtml());
logger.info("Content after sanitization (length: {}): {}",
sanitizedContent.length(),
sanitizedContent.substring(0, Math.min(500, sanitizedContent.length())));
story.setContentHtml(sanitizedContent);
}
if (updateReq.getSourceUrl() != null) {
story.setSourceUrl(updateReq.getSourceUrl());
}
if (updateReq.getVolume() != null) {
story.setVolume(updateReq.getVolume());
}
// Volume will be handled in series logic below
// Handle author - either by ID or by name
if (updateReq.getAuthorId() != null) {
Author author = authorService.findById(updateReq.getAuthorId());
story.setAuthor(author);
} else if (updateReq.getAuthorName() != null && !updateReq.getAuthorName().trim().isEmpty()) {
Author author = findOrCreateAuthor(updateReq.getAuthorName().trim());
story.setAuthor(author);
}
// Handle series - either by ID or by name
// Handle series - either by ID, by name, or remove from series
if (updateReq.getSeriesId() != null) {
Series series = seriesService.findById(updateReq.getSeriesId());
story.setSeries(series);
} else if (updateReq.getSeriesName() != null && !updateReq.getSeriesName().trim().isEmpty()) {
} else if (updateReq.getSeriesName() != null) {
logger.info("Processing series update: seriesName='{}', isEmpty={}", updateReq.getSeriesName(), updateReq.getSeriesName().trim().isEmpty());
if (updateReq.getSeriesName().trim().isEmpty()) {
// Empty series name means remove from series
logger.info("Removing story from series");
if (story.getSeries() != null) {
story.getSeries().removeStory(story);
story.setSeries(null);
story.setVolume(null);
logger.info("Story removed from series");
}
} else {
// Non-empty series name means add to series
logger.info("Adding story to series: '{}', volume: {}", updateReq.getSeriesName().trim(), updateReq.getVolume());
Series series = seriesService.findOrCreate(updateReq.getSeriesName().trim());
story.setSeries(series);
// Set volume only if series is being set
if (updateReq.getVolume() != null) {
story.setVolume(updateReq.getVolume());
logger.info("Story added to series: {} with volume: {}", series.getName(), updateReq.getVolume());
} else {
logger.info("Story added to series: {} with no volume", series.getName());
}
}
}
// Note: Tags are now handled in StoryService.updateWithTagNames()
@@ -385,7 +584,6 @@ public class StoryController {
dto.setSummary(story.getSummary());
dto.setDescription(story.getDescription());
dto.setContentHtml(story.getContentHtml());
dto.setContentPlain(story.getContentPlain());
dto.setSourceUrl(story.getSourceUrl());
dto.setCoverPath(story.getCoverPath());
dto.setWordCount(story.getWordCount());
@@ -394,6 +592,48 @@ public class StoryController {
dto.setCreatedAt(story.getCreatedAt());
dto.setUpdatedAt(story.getUpdatedAt());
// Reading progress fields
dto.setIsRead(story.getIsRead());
dto.setReadingPosition(story.getReadingPosition());
dto.setLastReadAt(story.getLastReadAt());
if (story.getAuthor() != null) {
dto.setAuthorId(story.getAuthor().getId());
dto.setAuthorName(story.getAuthor().getName());
}
if (story.getSeries() != null) {
dto.setSeriesId(story.getSeries().getId());
dto.setSeriesName(story.getSeries().getName());
}
dto.setTags(story.getTags().stream()
.map(this::convertTagToDto)
.collect(Collectors.toList()));
return dto;
}
private StoryReadingDto convertToReadingDto(Story story) {
StoryReadingDto dto = new StoryReadingDto();
dto.setId(story.getId());
dto.setTitle(story.getTitle());
dto.setSummary(story.getSummary());
dto.setDescription(story.getDescription());
dto.setContentHtml(story.getContentHtml());
dto.setSourceUrl(story.getSourceUrl());
dto.setCoverPath(story.getCoverPath());
dto.setWordCount(story.getWordCount());
dto.setRating(story.getRating());
dto.setVolume(story.getVolume());
dto.setCreatedAt(story.getCreatedAt());
dto.setUpdatedAt(story.getUpdatedAt());
// Reading progress fields
dto.setIsRead(story.getIsRead());
dto.setReadingPosition(story.getReadingPosition());
dto.setLastReadAt(story.getLastReadAt());
if (story.getAuthor() != null) {
dto.setAuthorId(story.getAuthor().getId());
dto.setAuthorName(story.getAuthor().getName());
@@ -426,6 +666,11 @@ public class StoryController {
dto.setUpdatedAt(story.getUpdatedAt());
dto.setPartOfSeries(story.isPartOfSeries());
// Reading progress fields
dto.setIsRead(story.getIsRead());
dto.setReadingPosition(story.getReadingPosition());
dto.setLastReadAt(story.getLastReadAt());
if (story.getAuthor() != null) {
dto.setAuthorId(story.getAuthor().getId());
dto.setAuthorName(story.getAuthor().getName());
@@ -447,8 +692,11 @@ public class StoryController {
TagDto tagDto = new TagDto();
tagDto.setId(tag.getId());
tagDto.setName(tag.getName());
tagDto.setColor(tag.getColor());
tagDto.setDescription(tag.getDescription());
tagDto.setCreatedAt(tag.getCreatedAt());
// storyCount can be set if needed, but it might be expensive to calculate for each tag
tagDto.setStoryCount(tag.getStories() != null ? tag.getStories().size() : 0);
tagDto.setAliasCount(tag.getAliases() != null ? tag.getAliases().size() : 0);
return tagDto;
}
@@ -467,12 +715,195 @@ public class StoryController {
// to avoid circular references and keep it lightweight
dto.setStoryCount(collection.getStoryCount());
dto.setTotalWordCount(collection.getTotalWordCount());
dto.setEstimatedReadingTime(collection.getEstimatedReadingTime());
dto.setEstimatedReadingTime(readingTimeService.calculateReadingTime(collection.getTotalWordCount()));
dto.setAverageStoryRating(collection.getAverageStoryRating());
return dto;
}
private Story processExternalImagesIfNeeded(Story story) {
try {
if (story.getContentHtml() != null && !story.getContentHtml().trim().isEmpty()) {
logger.debug("Starting async image processing for story: {}", story.getId());
// Start async processing - this returns immediately
asyncImageProcessingService.processStoryImagesAsync(story.getId(), story.getContentHtml());
logger.info("Async image processing started for story: {}", story.getId());
}
} catch (Exception e) {
logger.error("Failed to start async image processing for story {}: {}",
story.getId(), e.getMessage(), e);
// Don't fail the entire operation if image processing fails
}
return story;
}
@GetMapping("/{id}/image-processing-progress")
public ResponseEntity<Map<String, Object>> getImageProcessingProgress(@PathVariable UUID id) {
ImageProcessingProgressService.ImageProcessingProgress progress = progressService.getProgress(id);
if (progress == null) {
return ResponseEntity.ok(Map.of(
"isProcessing", false,
"message", "No active image processing"
));
}
Map<String, Object> response = Map.of(
"isProcessing", !progress.isCompleted(),
"totalImages", progress.getTotalImages(),
"processedImages", progress.getProcessedImages(),
"currentImageUrl", progress.getCurrentImageUrl() != null ? progress.getCurrentImageUrl() : "",
"status", progress.getStatus(),
"progressPercentage", progress.getProgressPercentage(),
"completed", progress.isCompleted(),
"error", progress.getErrorMessage() != null ? progress.getErrorMessage() : ""
);
return ResponseEntity.ok(response);
}
@GetMapping("/check-duplicate")
public ResponseEntity<Map<String, Object>> checkDuplicate(
@RequestParam String title,
@RequestParam String authorName) {
try {
List<Story> duplicates = storyService.findPotentialDuplicates(title, authorName);
Map<String, Object> response = Map.of(
"hasDuplicates", !duplicates.isEmpty(),
"count", duplicates.size(),
"duplicates", duplicates.stream()
.map(story -> Map.of(
"id", story.getId(),
"title", story.getTitle(),
"authorName", story.getAuthor() != null ? story.getAuthor().getName() : "",
"createdAt", story.getCreatedAt()
))
.collect(Collectors.toList())
);
return ResponseEntity.ok(response);
} catch (Exception e) {
logger.error("Error checking for duplicates", e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
.body(Map.of("error", "Failed to check for duplicates"));
}
}
// EPUB Import endpoint
@PostMapping("/epub/import")
public ResponseEntity<EPUBImportResponse> importEPUB(
@RequestParam("file") MultipartFile file,
@RequestParam(required = false) UUID authorId,
@RequestParam(required = false) String authorName,
@RequestParam(required = false) UUID seriesId,
@RequestParam(required = false) String seriesName,
@RequestParam(required = false) Integer seriesVolume,
@RequestParam(required = false) List<String> tags,
@RequestParam(defaultValue = "true") Boolean preserveReadingPosition,
@RequestParam(defaultValue = "false") Boolean overwriteExisting,
@RequestParam(defaultValue = "true") Boolean createMissingAuthor,
@RequestParam(defaultValue = "true") Boolean createMissingSeries) {
logger.info("Importing EPUB file: {}", file.getOriginalFilename());
EPUBImportRequest request = new EPUBImportRequest();
request.setEpubFile(file);
request.setAuthorId(authorId);
request.setAuthorName(authorName);
request.setSeriesId(seriesId);
request.setSeriesName(seriesName);
request.setSeriesVolume(seriesVolume);
request.setTags(tags);
request.setPreserveReadingPosition(preserveReadingPosition);
request.setOverwriteExisting(overwriteExisting);
request.setCreateMissingAuthor(createMissingAuthor);
request.setCreateMissingSeries(createMissingSeries);
try {
EPUBImportResponse response = epubImportService.importEPUB(request);
if (response.isSuccess()) {
logger.info("Successfully imported EPUB: {} (Story ID: {})",
response.getStoryTitle(), response.getStoryId());
return ResponseEntity.ok(response);
} else {
logger.warn("EPUB import failed: {}", response.getMessage());
return ResponseEntity.badRequest().body(response);
}
} catch (Exception e) {
logger.error("Error importing EPUB: {}", e.getMessage(), e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
.body(EPUBImportResponse.error("Internal server error: " + e.getMessage()));
}
}
// EPUB Export endpoint
@PostMapping("/epub/export")
public ResponseEntity<org.springframework.core.io.Resource> exportEPUB(
@Valid @RequestBody EPUBExportRequest request) {
logger.info("Exporting story {} to EPUB", request.getStoryId());
try {
if (!epubExportService.canExportStory(request.getStoryId())) {
return ResponseEntity.badRequest().build();
}
org.springframework.core.io.Resource resource = epubExportService.exportStoryAsEPUB(request);
Story story = storyService.findById(request.getStoryId());
String filename = epubExportService.getEPUBFilename(story);
logger.info("Successfully exported EPUB: {}", filename);
return ResponseEntity.ok()
.header("Content-Disposition", "attachment; filename=\"" + filename + "\"")
.header("Content-Type", "application/epub+zip")
.body(resource);
} catch (Exception e) {
logger.error("Error exporting EPUB: {}", e.getMessage(), e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
}
}
// EPUB Export by story ID (GET endpoint)
@GetMapping("/{id}/epub")
public ResponseEntity<org.springframework.core.io.Resource> exportStoryAsEPUB(@PathVariable UUID id) {
logger.info("Exporting story {} to EPUB via GET", id);
EPUBExportRequest request = new EPUBExportRequest(id);
return exportEPUB(request);
}
// Validate EPUB file
@PostMapping("/epub/validate")
public ResponseEntity<Map<String, Object>> validateEPUBFile(@RequestParam("file") MultipartFile file) {
logger.info("Validating EPUB file: {}", file.getOriginalFilename());
try {
List<String> errors = epubImportService.validateEPUBFile(file);
Map<String, Object> response = Map.of(
"valid", errors.isEmpty(),
"errors", errors,
"filename", file.getOriginalFilename(),
"size", file.getSize()
);
return ResponseEntity.ok(response);
} catch (Exception e) {
logger.error("Error validating EPUB file: {}", e.getMessage(), e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
.body(Map.of("error", "Failed to validate EPUB file"));
}
}
// Request DTOs
public static class CreateStoryRequest {
private String title;
@@ -520,6 +951,7 @@ public class StoryController {
private String sourceUrl;
private Integer volume;
private UUID authorId;
private String authorName;
private UUID seriesId;
private String seriesName;
private List<String> tagNames;
@@ -539,6 +971,8 @@ public class StoryController {
public void setVolume(Integer volume) { this.volume = volume; }
public UUID getAuthorId() { return authorId; }
public void setAuthorId(UUID authorId) { this.authorId = authorId; }
public String getAuthorName() { return authorName; }
public void setAuthorName(String authorName) { this.authorName = authorName; }
public UUID getSeriesId() { return seriesId; }
public void setSeriesId(UUID seriesId) { this.seriesId = seriesId; }
public String getSeriesName() { return seriesName; }

View File

@@ -1,9 +1,13 @@
package com.storycove.controller;
import com.storycove.dto.TagDto;
import com.storycove.dto.TagAliasDto;
import com.storycove.entity.Tag;
import com.storycove.entity.TagAlias;
import com.storycove.service.TagService;
import jakarta.validation.Valid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
@@ -21,6 +25,7 @@ import java.util.stream.Collectors;
@RequestMapping("/api/tags")
public class TagController {
private static final Logger logger = LoggerFactory.getLogger(TagController.class);
private final TagService tagService;
public TagController(TagService tagService) {
@@ -54,6 +59,8 @@ public class TagController {
public ResponseEntity<TagDto> createTag(@Valid @RequestBody CreateTagRequest request) {
Tag tag = new Tag();
tag.setName(request.getName());
tag.setColor(request.getColor());
tag.setDescription(request.getDescription());
Tag savedTag = tagService.create(tag);
return ResponseEntity.status(HttpStatus.CREATED).body(convertToDto(savedTag));
@@ -66,6 +73,12 @@ public class TagController {
if (request.getName() != null) {
existingTag.setName(request.getName());
}
if (request.getColor() != null) {
existingTag.setColor(request.getColor());
}
if (request.getDescription() != null) {
existingTag.setDescription(request.getDescription());
}
Tag updatedTag = tagService.update(id, existingTag);
return ResponseEntity.ok(convertToDto(updatedTag));
@@ -95,7 +108,7 @@ public class TagController {
@RequestParam String query,
@RequestParam(defaultValue = "10") int limit) {
List<Tag> tags = tagService.findByNameStartingWith(query, limit);
List<Tag> tags = tagService.findByNameOrAliasStartingWith(query, limit);
List<TagDto> tagDtos = tags.stream().map(this::convertToDto).collect(Collectors.toList());
return ResponseEntity.ok(tagDtos);
@@ -132,29 +145,257 @@ public class TagController {
return ResponseEntity.ok(stats);
}
@GetMapping("/collections")
public ResponseEntity<List<TagDto>> getTagsUsedByCollections() {
List<Tag> tags = tagService.findTagsUsedByCollections();
List<TagDto> tagDtos = tags.stream()
.map(this::convertToDtoWithCollectionCount)
.collect(Collectors.toList());
return ResponseEntity.ok(tagDtos);
}
// Tag alias endpoints
@PostMapping("/{tagId}/aliases")
public ResponseEntity<TagAliasDto> addAlias(@PathVariable UUID tagId,
@RequestBody Map<String, String> request) {
String aliasName = request.get("aliasName");
if (aliasName == null || aliasName.trim().isEmpty()) {
return ResponseEntity.badRequest().build();
}
try {
TagAlias alias = tagService.addAlias(tagId, aliasName.trim());
TagAliasDto dto = new TagAliasDto();
dto.setId(alias.getId());
dto.setAliasName(alias.getAliasName());
dto.setCanonicalTagId(alias.getCanonicalTag().getId());
dto.setCanonicalTagName(alias.getCanonicalTag().getName());
dto.setCreatedFromMerge(alias.getCreatedFromMerge());
dto.setCreatedAt(alias.getCreatedAt());
return ResponseEntity.status(HttpStatus.CREATED).body(dto);
} catch (Exception e) {
return ResponseEntity.badRequest().build();
}
}
@DeleteMapping("/{tagId}/aliases/{aliasId}")
public ResponseEntity<?> removeAlias(@PathVariable UUID tagId, @PathVariable UUID aliasId) {
try {
tagService.removeAlias(tagId, aliasId);
return ResponseEntity.ok(Map.of("message", "Alias removed successfully"));
} catch (Exception e) {
return ResponseEntity.badRequest().body(Map.of("error", e.getMessage()));
}
}
@GetMapping("/resolve/{name}")
public ResponseEntity<TagDto> resolveTag(@PathVariable String name) {
try {
Tag resolvedTag = tagService.resolveTagByName(name);
if (resolvedTag != null) {
return ResponseEntity.ok(convertToDto(resolvedTag));
} else {
return ResponseEntity.notFound().build();
}
} catch (Exception e) {
return ResponseEntity.notFound().build();
}
}
@PostMapping("/merge")
public ResponseEntity<?> mergeTags(@Valid @RequestBody MergeTagsRequest request) {
try {
Tag resultTag = tagService.mergeTags(request.getSourceTagUUIDs(), request.getTargetTagUUID());
return ResponseEntity.ok(convertToDto(resultTag));
} catch (Exception e) {
logger.error("Failed to merge tags", e);
String errorMessage = e.getMessage() != null ? e.getMessage() : "Unknown error occurred";
return ResponseEntity.badRequest().body(Map.of("error", errorMessage));
}
}
@PostMapping("/merge/preview")
public ResponseEntity<?> previewMerge(@Valid @RequestBody MergeTagsRequest request) {
try {
MergePreviewResponse preview = tagService.previewMerge(request.getSourceTagUUIDs(), request.getTargetTagUUID());
return ResponseEntity.ok(preview);
} catch (Exception e) {
logger.error("Failed to preview merge", e);
String errorMessage = e.getMessage() != null ? e.getMessage() : "Unknown error occurred";
return ResponseEntity.badRequest().body(Map.of("error", errorMessage));
}
}
@PostMapping("/suggest")
public ResponseEntity<List<TagSuggestion>> suggestTags(@RequestBody TagSuggestionRequest request) {
try {
List<TagSuggestion> suggestions = tagService.suggestTags(
request.getTitle(),
request.getContent(),
request.getSummary(),
request.getLimit() != null ? request.getLimit() : 10
);
return ResponseEntity.ok(suggestions);
} catch (Exception e) {
logger.error("Failed to suggest tags", e);
return ResponseEntity.ok(List.of()); // Return empty list on error
}
}
private TagDto convertToDto(Tag tag) {
TagDto dto = new TagDto();
dto.setId(tag.getId());
dto.setName(tag.getName());
dto.setColor(tag.getColor());
dto.setDescription(tag.getDescription());
dto.setStoryCount(tag.getStories() != null ? tag.getStories().size() : 0);
dto.setCollectionCount(tag.getCollections() != null ? tag.getCollections().size() : 0);
dto.setAliasCount(tag.getAliases() != null ? tag.getAliases().size() : 0);
dto.setCreatedAt(tag.getCreatedAt());
// updatedAt field not present in Tag entity per spec
// Convert aliases to DTOs for full context
if (tag.getAliases() != null && !tag.getAliases().isEmpty()) {
List<TagAliasDto> aliaseDtos = tag.getAliases().stream()
.map(alias -> {
TagAliasDto aliasDto = new TagAliasDto();
aliasDto.setId(alias.getId());
aliasDto.setAliasName(alias.getAliasName());
aliasDto.setCanonicalTagId(alias.getCanonicalTag().getId());
aliasDto.setCanonicalTagName(alias.getCanonicalTag().getName());
aliasDto.setCreatedFromMerge(alias.getCreatedFromMerge());
aliasDto.setCreatedAt(alias.getCreatedAt());
return aliasDto;
})
.collect(Collectors.toList());
dto.setAliases(aliaseDtos);
}
return dto;
}
private TagDto convertToDtoWithCollectionCount(Tag tag) {
TagDto dto = new TagDto();
dto.setId(tag.getId());
dto.setName(tag.getName());
dto.setCollectionCount(tag.getCollections() != null ? tag.getCollections().size() : 0);
dto.setCreatedAt(tag.getCreatedAt());
// Don't set storyCount for collection-focused endpoint
return dto;
}
// Request DTOs
public static class CreateTagRequest {
private String name;
private String color;
private String description;
public String getName() { return name; }
public void setName(String name) { this.name = name; }
public String getColor() { return color; }
public void setColor(String color) { this.color = color; }
public String getDescription() { return description; }
public void setDescription(String description) { this.description = description; }
}
public static class UpdateTagRequest {
private String name;
private String color;
private String description;
public String getName() { return name; }
public void setName(String name) { this.name = name; }
public String getColor() { return color; }
public void setColor(String color) { this.color = color; }
public String getDescription() { return description; }
public void setDescription(String description) { this.description = description; }
}
public static class MergeTagsRequest {
private List<String> sourceTagIds;
private String targetTagId;
public List<String> getSourceTagIds() { return sourceTagIds; }
public void setSourceTagIds(List<String> sourceTagIds) { this.sourceTagIds = sourceTagIds; }
public String getTargetTagId() { return targetTagId; }
public void setTargetTagId(String targetTagId) { this.targetTagId = targetTagId; }
// Helper methods to convert to UUID
public List<UUID> getSourceTagUUIDs() {
return sourceTagIds != null ? sourceTagIds.stream().map(UUID::fromString).toList() : null;
}
public UUID getTargetTagUUID() {
return targetTagId != null ? UUID.fromString(targetTagId) : null;
}
}
public static class MergePreviewResponse {
private String targetTagName;
private int targetStoryCount;
private int totalResultStoryCount;
private List<String> aliasesToCreate;
public String getTargetTagName() { return targetTagName; }
public void setTargetTagName(String targetTagName) { this.targetTagName = targetTagName; }
public int getTargetStoryCount() { return targetStoryCount; }
public void setTargetStoryCount(int targetStoryCount) { this.targetStoryCount = targetStoryCount; }
public int getTotalResultStoryCount() { return totalResultStoryCount; }
public void setTotalResultStoryCount(int totalResultStoryCount) { this.totalResultStoryCount = totalResultStoryCount; }
public List<String> getAliasesToCreate() { return aliasesToCreate; }
public void setAliasesToCreate(List<String> aliasesToCreate) { this.aliasesToCreate = aliasesToCreate; }
}
public static class TagSuggestionRequest {
private String title;
private String content;
private String summary;
private Integer limit;
public String getTitle() { return title; }
public void setTitle(String title) { this.title = title; }
public String getContent() { return content; }
public void setContent(String content) { this.content = content; }
public String getSummary() { return summary; }
public void setSummary(String summary) { this.summary = summary; }
public Integer getLimit() { return limit; }
public void setLimit(Integer limit) { this.limit = limit; }
}
public static class TagSuggestion {
private String tagName;
private double confidence;
private String reason;
public TagSuggestion() {}
public TagSuggestion(String tagName, double confidence, String reason) {
this.tagName = tagName;
this.confidence = confidence;
this.reason = reason;
}
public String getTagName() { return tagName; }
public void setTagName(String tagName) { this.tagName = tagName; }
public double getConfidence() { return confidence; }
public void setConfidence(double confidence) { this.confidence = confidence; }
public String getReason() { return reason; }
public void setReason(String reason) { this.reason = reason; }
}
}

View File

@@ -16,6 +16,7 @@ public class CollectionDto {
private String coverImagePath;
private Boolean isArchived;
private List<TagDto> tags;
private List<String> tagNames; // For search results
private List<CollectionStoryDto> collectionStories;
private Integer storyCount;
private Integer totalWordCount;
@@ -83,6 +84,14 @@ public class CollectionDto {
this.tags = tags;
}
public List<String> getTagNames() {
return tagNames;
}
public void setTagNames(List<String> tagNames) {
this.tagNames = tagNames;
}
public List<CollectionStoryDto> getCollectionStories() {
return collectionStories;
}

View File

@@ -0,0 +1,115 @@
package com.storycove.dto;
import jakarta.validation.constraints.NotNull;
import java.util.List;
import java.util.UUID;
public class EPUBExportRequest {
@NotNull(message = "Story ID is required")
private UUID storyId;
private String customTitle;
private String customAuthor;
private Boolean includeReadingPosition = true;
private Boolean includeCoverImage = true;
private Boolean includeMetadata = true;
private List<String> customMetadata;
private String language = "en";
private Boolean splitByChapters = false;
private Integer maxWordsPerChapter;
public EPUBExportRequest() {}
public EPUBExportRequest(UUID storyId) {
this.storyId = storyId;
}
public UUID getStoryId() {
return storyId;
}
public void setStoryId(UUID storyId) {
this.storyId = storyId;
}
public String getCustomTitle() {
return customTitle;
}
public void setCustomTitle(String customTitle) {
this.customTitle = customTitle;
}
public String getCustomAuthor() {
return customAuthor;
}
public void setCustomAuthor(String customAuthor) {
this.customAuthor = customAuthor;
}
public Boolean getIncludeReadingPosition() {
return includeReadingPosition;
}
public void setIncludeReadingPosition(Boolean includeReadingPosition) {
this.includeReadingPosition = includeReadingPosition;
}
public Boolean getIncludeCoverImage() {
return includeCoverImage;
}
public void setIncludeCoverImage(Boolean includeCoverImage) {
this.includeCoverImage = includeCoverImage;
}
public Boolean getIncludeMetadata() {
return includeMetadata;
}
public void setIncludeMetadata(Boolean includeMetadata) {
this.includeMetadata = includeMetadata;
}
public List<String> getCustomMetadata() {
return customMetadata;
}
public void setCustomMetadata(List<String> customMetadata) {
this.customMetadata = customMetadata;
}
public String getLanguage() {
return language;
}
public void setLanguage(String language) {
this.language = language;
}
public Boolean getSplitByChapters() {
return splitByChapters;
}
public void setSplitByChapters(Boolean splitByChapters) {
this.splitByChapters = splitByChapters;
}
public Integer getMaxWordsPerChapter() {
return maxWordsPerChapter;
}
public void setMaxWordsPerChapter(Integer maxWordsPerChapter) {
this.maxWordsPerChapter = maxWordsPerChapter;
}
}

View File

@@ -0,0 +1,133 @@
package com.storycove.dto;
import jakarta.validation.constraints.NotNull;
import org.springframework.web.multipart.MultipartFile;
import java.util.List;
import java.util.UUID;
public class EPUBImportRequest {
@NotNull(message = "EPUB file is required")
private MultipartFile epubFile;
private UUID authorId;
private String authorName;
private UUID seriesId;
private String seriesName;
private Integer seriesVolume;
private List<String> tags;
private Boolean preserveReadingPosition = true;
private Boolean overwriteExisting = false;
private Boolean createMissingAuthor = true;
private Boolean createMissingSeries = true;
private Boolean extractCover = true;
public EPUBImportRequest() {}
public MultipartFile getEpubFile() {
return epubFile;
}
public void setEpubFile(MultipartFile epubFile) {
this.epubFile = epubFile;
}
public UUID getAuthorId() {
return authorId;
}
public void setAuthorId(UUID authorId) {
this.authorId = authorId;
}
public String getAuthorName() {
return authorName;
}
public void setAuthorName(String authorName) {
this.authorName = authorName;
}
public UUID getSeriesId() {
return seriesId;
}
public void setSeriesId(UUID seriesId) {
this.seriesId = seriesId;
}
public String getSeriesName() {
return seriesName;
}
public void setSeriesName(String seriesName) {
this.seriesName = seriesName;
}
public Integer getSeriesVolume() {
return seriesVolume;
}
public void setSeriesVolume(Integer seriesVolume) {
this.seriesVolume = seriesVolume;
}
public List<String> getTags() {
return tags;
}
public void setTags(List<String> tags) {
this.tags = tags;
}
public Boolean getPreserveReadingPosition() {
return preserveReadingPosition;
}
public void setPreserveReadingPosition(Boolean preserveReadingPosition) {
this.preserveReadingPosition = preserveReadingPosition;
}
public Boolean getOverwriteExisting() {
return overwriteExisting;
}
public void setOverwriteExisting(Boolean overwriteExisting) {
this.overwriteExisting = overwriteExisting;
}
public Boolean getCreateMissingAuthor() {
return createMissingAuthor;
}
public void setCreateMissingAuthor(Boolean createMissingAuthor) {
this.createMissingAuthor = createMissingAuthor;
}
public Boolean getCreateMissingSeries() {
return createMissingSeries;
}
public void setCreateMissingSeries(Boolean createMissingSeries) {
this.createMissingSeries = createMissingSeries;
}
public Boolean getExtractCover() {
return extractCover;
}
public void setExtractCover(Boolean extractCover) {
this.extractCover = extractCover;
}
}

View File

@@ -0,0 +1,107 @@
package com.storycove.dto;
import java.util.List;
import java.util.UUID;
public class EPUBImportResponse {
private boolean success;
private String message;
private UUID storyId;
private String storyTitle;
private Integer totalChapters;
private Integer wordCount;
private ReadingPositionDto readingPosition;
private List<String> warnings;
private List<String> errors;
public EPUBImportResponse() {}
public EPUBImportResponse(boolean success, String message) {
this.success = success;
this.message = message;
}
public static EPUBImportResponse success(UUID storyId, String storyTitle) {
EPUBImportResponse response = new EPUBImportResponse(true, "EPUB imported successfully");
response.setStoryId(storyId);
response.setStoryTitle(storyTitle);
return response;
}
public static EPUBImportResponse error(String message) {
return new EPUBImportResponse(false, message);
}
public boolean isSuccess() {
return success;
}
public void setSuccess(boolean success) {
this.success = success;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public UUID getStoryId() {
return storyId;
}
public void setStoryId(UUID storyId) {
this.storyId = storyId;
}
public String getStoryTitle() {
return storyTitle;
}
public void setStoryTitle(String storyTitle) {
this.storyTitle = storyTitle;
}
public Integer getTotalChapters() {
return totalChapters;
}
public void setTotalChapters(Integer totalChapters) {
this.totalChapters = totalChapters;
}
public Integer getWordCount() {
return wordCount;
}
public void setWordCount(Integer wordCount) {
this.wordCount = wordCount;
}
public ReadingPositionDto getReadingPosition() {
return readingPosition;
}
public void setReadingPosition(ReadingPositionDto readingPosition) {
this.readingPosition = readingPosition;
}
public List<String> getWarnings() {
return warnings;
}
public void setWarnings(List<String> warnings) {
this.warnings = warnings;
}
public List<String> getErrors() {
return errors;
}
public void setErrors(List<String> errors) {
this.errors = errors;
}
}

View File

@@ -0,0 +1,31 @@
package com.storycove.dto;
public class FacetCountDto {
private String value;
private int count;
public FacetCountDto() {}
public FacetCountDto(String value, int count) {
this.value = value;
this.count = count;
}
// Getters and Setters
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public int getCount() {
return count;
}
public void setCount(int count) {
this.count = count;
}
}

View File

@@ -8,6 +8,7 @@ public class HtmlSanitizationConfigDto {
private Map<String, List<String>> allowedAttributes;
private List<String> allowedCssProperties;
private Map<String, List<String>> removedAttributes;
private Map<String, Map<String, List<String>>> allowedProtocols;
private String description;
public HtmlSanitizationConfigDto() {}
@@ -44,6 +45,14 @@ public class HtmlSanitizationConfigDto {
this.removedAttributes = removedAttributes;
}
public Map<String, Map<String, List<String>>> getAllowedProtocols() {
return allowedProtocols;
}
public void setAllowedProtocols(Map<String, Map<String, List<String>>> allowedProtocols) {
this.allowedProtocols = allowedProtocols;
}
public String getDescription() {
return description;
}

View File

@@ -0,0 +1,61 @@
package com.storycove.dto;
public class LibraryDto {
private String id;
private String name;
private String description;
private boolean isActive;
private boolean isInitialized;
// Constructors
public LibraryDto() {}
public LibraryDto(String id, String name, String description, boolean isActive, boolean isInitialized) {
this.id = id;
this.name = name;
this.description = description;
this.isActive = isActive;
this.isInitialized = isInitialized;
}
// Getters and Setters
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public boolean isActive() {
return isActive;
}
public void setActive(boolean active) {
isActive = active;
}
public boolean isInitialized() {
return isInitialized;
}
public void setInitialized(boolean initialized) {
isInitialized = initialized;
}
}

View File

@@ -0,0 +1,23 @@
package com.storycove.dto;
import jakarta.validation.constraints.NotBlank;
public class ProcessContentImagesRequest {
@NotBlank(message = "HTML content is required")
private String htmlContent;
public ProcessContentImagesRequest() {}
public ProcessContentImagesRequest(String htmlContent) {
this.htmlContent = htmlContent;
}
public String getHtmlContent() {
return htmlContent;
}
public void setHtmlContent(String htmlContent) {
this.htmlContent = htmlContent;
}
}

View File

@@ -0,0 +1,124 @@
package com.storycove.dto;
import java.time.LocalDateTime;
import java.util.UUID;
public class ReadingPositionDto {
private UUID id;
private UUID storyId;
private Integer chapterIndex;
private String chapterTitle;
private Integer wordPosition;
private Integer characterPosition;
private Double percentageComplete;
private String epubCfi;
private String contextBefore;
private String contextAfter;
private LocalDateTime createdAt;
private LocalDateTime updatedAt;
public ReadingPositionDto() {}
public ReadingPositionDto(UUID storyId, Integer chapterIndex, Integer wordPosition) {
this.storyId = storyId;
this.chapterIndex = chapterIndex;
this.wordPosition = wordPosition;
}
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public UUID getStoryId() {
return storyId;
}
public void setStoryId(UUID storyId) {
this.storyId = storyId;
}
public Integer getChapterIndex() {
return chapterIndex;
}
public void setChapterIndex(Integer chapterIndex) {
this.chapterIndex = chapterIndex;
}
public String getChapterTitle() {
return chapterTitle;
}
public void setChapterTitle(String chapterTitle) {
this.chapterTitle = chapterTitle;
}
public Integer getWordPosition() {
return wordPosition;
}
public void setWordPosition(Integer wordPosition) {
this.wordPosition = wordPosition;
}
public Integer getCharacterPosition() {
return characterPosition;
}
public void setCharacterPosition(Integer characterPosition) {
this.characterPosition = characterPosition;
}
public Double getPercentageComplete() {
return percentageComplete;
}
public void setPercentageComplete(Double percentageComplete) {
this.percentageComplete = percentageComplete;
}
public String getEpubCfi() {
return epubCfi;
}
public void setEpubCfi(String epubCfi) {
this.epubCfi = epubCfi;
}
public String getContextBefore() {
return contextBefore;
}
public void setContextBefore(String contextBefore) {
this.contextBefore = contextBefore;
}
public String getContextAfter() {
return contextAfter;
}
public void setContextAfter(String contextAfter) {
this.contextAfter = contextAfter;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
public LocalDateTime getUpdatedAt() {
return updatedAt;
}
public void setUpdatedAt(LocalDateTime updatedAt) {
this.updatedAt = updatedAt;
}
}

View File

@@ -0,0 +1,23 @@
package com.storycove.dto;
import jakarta.validation.constraints.Min;
public class ReadingProgressRequest {
@Min(value = 0, message = "Reading position must be non-negative")
private Integer position;
public ReadingProgressRequest() {}
public ReadingProgressRequest(Integer position) {
this.position = position;
}
public Integer getPosition() {
return position;
}
public void setPosition(Integer position) {
this.position = position;
}
}

View File

@@ -0,0 +1,23 @@
package com.storycove.dto;
import jakarta.validation.constraints.NotNull;
public class ReadingStatusRequest {
@NotNull(message = "Reading status is required")
private Boolean isRead;
public ReadingStatusRequest() {}
public ReadingStatusRequest(Boolean isRead) {
this.isRead = isRead;
}
public Boolean getIsRead() {
return isRead;
}
public void setIsRead(Boolean isRead) {
this.isRead = isRead;
}
}

View File

@@ -1,6 +1,7 @@
package com.storycove.dto;
import java.util.List;
import java.util.Map;
public class SearchResultDto<T> {
@@ -10,6 +11,7 @@ public class SearchResultDto<T> {
private int perPage;
private String query;
private long searchTimeMs;
private Map<String, List<FacetCountDto>> facets;
public SearchResultDto() {}
@@ -22,6 +24,28 @@ public class SearchResultDto<T> {
this.searchTimeMs = searchTimeMs;
}
public SearchResultDto(List<T> results, long totalHits, int page, int perPage, String query, long searchTimeMs, Map<String, List<FacetCountDto>> facets) {
this.results = results;
this.totalHits = totalHits;
this.page = page;
this.perPage = perPage;
this.query = query;
this.searchTimeMs = searchTimeMs;
this.facets = facets;
}
// Simple constructor for basic search results with facet list
public SearchResultDto(List<T> results, long totalHits, int resultCount, List<FacetCountDto> facetsList) {
this.results = results;
this.totalHits = totalHits;
this.page = 0;
this.perPage = resultCount;
this.query = "";
this.searchTimeMs = 0;
// Convert list to map if needed - for now just set empty map
this.facets = java.util.Collections.emptyMap();
}
// Getters and Setters
public List<T> getResults() {
return results;
@@ -70,4 +94,12 @@ public class SearchResultDto<T> {
public void setSearchTimeMs(long searchTimeMs) {
this.searchTimeMs = searchTimeMs;
}
public Map<String, List<FacetCountDto>> getFacets() {
return facets;
}
public void setFacets(Map<String, List<FacetCountDto>> facets) {
this.facets = facets;
}
}

View File

@@ -21,13 +21,18 @@ public class StoryDto {
private String description;
private String contentHtml;
private String contentPlain;
// contentPlain removed for performance - use StoryReadingDto when content is needed
private String sourceUrl;
private String coverPath;
private Integer wordCount;
private Integer rating;
private Integer volume;
// Reading progress fields
private Boolean isRead;
private Integer readingPosition;
private LocalDateTime lastReadAt;
// Related entities as simple references
private UUID authorId;
private String authorName;
@@ -85,13 +90,6 @@ public class StoryDto {
this.contentHtml = contentHtml;
}
public String getContentPlain() {
return contentPlain;
}
public void setContentPlain(String contentPlain) {
this.contentPlain = contentPlain;
}
public String getSourceUrl() {
return sourceUrl;
@@ -133,6 +131,30 @@ public class StoryDto {
this.volume = volume;
}
public Boolean getIsRead() {
return isRead;
}
public void setIsRead(Boolean isRead) {
this.isRead = isRead;
}
public Integer getReadingPosition() {
return readingPosition;
}
public void setReadingPosition(Integer readingPosition) {
this.readingPosition = readingPosition;
}
public LocalDateTime getLastReadAt() {
return lastReadAt;
}
public void setLastReadAt(LocalDateTime lastReadAt) {
this.lastReadAt = lastReadAt;
}
public UUID getAuthorId() {
return authorId;
}

View File

@@ -0,0 +1,202 @@
package com.storycove.dto;
import java.time.LocalDateTime;
import java.util.List;
import java.util.UUID;
/**
* Story DTO specifically for reading view.
* Contains contentHtml but excludes contentPlain for performance.
*/
public class StoryReadingDto {
private UUID id;
private String title;
private String summary;
private String description;
private String contentHtml; // For reading - includes HTML
// contentPlain excluded for performance
private String sourceUrl;
private String coverPath;
private Integer wordCount;
private Integer rating;
private Integer volume;
// Reading progress fields
private Boolean isRead;
private Integer readingPosition;
private LocalDateTime lastReadAt;
// Related entities as simple references
private UUID authorId;
private String authorName;
private UUID seriesId;
private String seriesName;
private List<TagDto> tags;
private LocalDateTime createdAt;
private LocalDateTime updatedAt;
public StoryReadingDto() {}
// Getters and Setters
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getSummary() {
return summary;
}
public void setSummary(String summary) {
this.summary = summary;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getContentHtml() {
return contentHtml;
}
public void setContentHtml(String contentHtml) {
this.contentHtml = contentHtml;
}
public String getSourceUrl() {
return sourceUrl;
}
public void setSourceUrl(String sourceUrl) {
this.sourceUrl = sourceUrl;
}
public String getCoverPath() {
return coverPath;
}
public void setCoverPath(String coverPath) {
this.coverPath = coverPath;
}
public Integer getWordCount() {
return wordCount;
}
public void setWordCount(Integer wordCount) {
this.wordCount = wordCount;
}
public Integer getRating() {
return rating;
}
public void setRating(Integer rating) {
this.rating = rating;
}
public Integer getVolume() {
return volume;
}
public void setVolume(Integer volume) {
this.volume = volume;
}
public Boolean getIsRead() {
return isRead;
}
public void setIsRead(Boolean isRead) {
this.isRead = isRead;
}
public Integer getReadingPosition() {
return readingPosition;
}
public void setReadingPosition(Integer readingPosition) {
this.readingPosition = readingPosition;
}
public LocalDateTime getLastReadAt() {
return lastReadAt;
}
public void setLastReadAt(LocalDateTime lastReadAt) {
this.lastReadAt = lastReadAt;
}
public UUID getAuthorId() {
return authorId;
}
public void setAuthorId(UUID authorId) {
this.authorId = authorId;
}
public String getAuthorName() {
return authorName;
}
public void setAuthorName(String authorName) {
this.authorName = authorName;
}
public UUID getSeriesId() {
return seriesId;
}
public void setSeriesId(UUID seriesId) {
this.seriesId = seriesId;
}
public String getSeriesName() {
return seriesName;
}
public void setSeriesName(String seriesName) {
this.seriesName = seriesName;
}
public List<TagDto> getTags() {
return tags;
}
public void setTags(List<TagDto> tags) {
this.tags = tags;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
public LocalDateTime getUpdatedAt() {
return updatedAt;
}
public void setUpdatedAt(LocalDateTime updatedAt) {
this.updatedAt = updatedAt;
}
}

View File

@@ -9,13 +9,17 @@ public class StorySearchDto {
private UUID id;
private String title;
private String description;
private String contentPlain;
private String sourceUrl;
private String coverPath;
private Integer wordCount;
private Integer rating;
private Integer volume;
// Reading status
private Boolean isRead;
private Integer readingPosition;
private LocalDateTime lastReadAt;
// Author info
private UUID authorId;
private String authorName;
@@ -30,6 +34,9 @@ public class StorySearchDto {
private LocalDateTime createdAt;
private LocalDateTime updatedAt;
// Alias for createdAt to match frontend expectations
private LocalDateTime dateAdded;
// Search-specific fields
private double searchScore;
private List<String> highlights;
@@ -61,13 +68,6 @@ public class StorySearchDto {
this.description = description;
}
public String getContentPlain() {
return contentPlain;
}
public void setContentPlain(String contentPlain) {
this.contentPlain = contentPlain;
}
public String getSourceUrl() {
return sourceUrl;
@@ -109,6 +109,30 @@ public class StorySearchDto {
this.volume = volume;
}
public Boolean getIsRead() {
return isRead;
}
public void setIsRead(Boolean isRead) {
this.isRead = isRead;
}
public LocalDateTime getLastReadAt() {
return lastReadAt;
}
public void setLastReadAt(LocalDateTime lastReadAt) {
this.lastReadAt = lastReadAt;
}
public Integer getReadingPosition() {
return readingPosition;
}
public void setReadingPosition(Integer readingPosition) {
this.readingPosition = readingPosition;
}
public UUID getAuthorId() {
return authorId;
}
@@ -165,6 +189,14 @@ public class StorySearchDto {
this.updatedAt = updatedAt;
}
public LocalDateTime getDateAdded() {
return dateAdded;
}
public void setDateAdded(LocalDateTime dateAdded) {
this.dateAdded = dateAdded;
}
public double getSearchScore() {
return searchScore;
}

View File

@@ -20,6 +20,11 @@ public class StorySummaryDto {
private Integer rating;
private Integer volume;
// Reading progress fields
private Boolean isRead;
private Integer readingPosition;
private LocalDateTime lastReadAt;
// Related entities as simple references
private UUID authorId;
private String authorName;
@@ -106,6 +111,30 @@ public class StorySummaryDto {
this.volume = volume;
}
public Boolean getIsRead() {
return isRead;
}
public void setIsRead(Boolean isRead) {
this.isRead = isRead;
}
public Integer getReadingPosition() {
return readingPosition;
}
public void setReadingPosition(Integer readingPosition) {
this.readingPosition = readingPosition;
}
public LocalDateTime getLastReadAt() {
return lastReadAt;
}
public void setLastReadAt(LocalDateTime lastReadAt) {
this.lastReadAt = lastReadAt;
}
public UUID getAuthorId() {
return authorId;
}

View File

@@ -0,0 +1,77 @@
package com.storycove.dto;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.Size;
import java.time.LocalDateTime;
import java.util.UUID;
public class TagAliasDto {
private UUID id;
@NotBlank(message = "Alias name is required")
@Size(max = 100, message = "Alias name must not exceed 100 characters")
private String aliasName;
private UUID canonicalTagId;
private String canonicalTagName; // For convenience in frontend
private Boolean createdFromMerge;
private LocalDateTime createdAt;
public TagAliasDto() {}
public TagAliasDto(String aliasName, UUID canonicalTagId) {
this.aliasName = aliasName;
this.canonicalTagId = canonicalTagId;
}
// Getters and Setters
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public String getAliasName() {
return aliasName;
}
public void setAliasName(String aliasName) {
this.aliasName = aliasName;
}
public UUID getCanonicalTagId() {
return canonicalTagId;
}
public void setCanonicalTagId(UUID canonicalTagId) {
this.canonicalTagId = canonicalTagId;
}
public String getCanonicalTagName() {
return canonicalTagName;
}
public void setCanonicalTagName(String canonicalTagName) {
this.canonicalTagName = canonicalTagName;
}
public Boolean getCreatedFromMerge() {
return createdFromMerge;
}
public void setCreatedFromMerge(Boolean createdFromMerge) {
this.createdFromMerge = createdFromMerge;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
}

View File

@@ -4,6 +4,7 @@ import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.Size;
import java.time.LocalDateTime;
import java.util.List;
import java.util.UUID;
public class TagDto {
@@ -14,7 +15,16 @@ public class TagDto {
@Size(max = 100, message = "Tag name must not exceed 100 characters")
private String name;
@Size(max = 7, message = "Color must be a valid hex color code")
private String color;
@Size(max = 500, message = "Description must not exceed 500 characters")
private String description;
private Integer storyCount;
private Integer collectionCount;
private Integer aliasCount;
private List<TagAliasDto> aliases;
private LocalDateTime createdAt;
private LocalDateTime updatedAt;
@@ -41,6 +51,22 @@ public class TagDto {
this.name = name;
}
public String getColor() {
return color;
}
public void setColor(String color) {
this.color = color;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Integer getStoryCount() {
return storyCount;
}
@@ -49,6 +75,30 @@ public class TagDto {
this.storyCount = storyCount;
}
public Integer getCollectionCount() {
return collectionCount;
}
public void setCollectionCount(Integer collectionCount) {
this.collectionCount = collectionCount;
}
public Integer getAliasCount() {
return aliasCount;
}
public void setAliasCount(Integer aliasCount) {
this.aliasCount = aliasCount;
}
public List<TagAliasDto> getAliases() {
return aliases;
}
public void setAliases(List<TagAliasDto> aliases) {
this.aliases = aliases;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}

View File

@@ -52,6 +52,10 @@ public class Collection {
)
private Set<Tag> tags = new HashSet<>();
// Transient field for search results - tag names only to avoid lazy loading issues
@Transient
private List<String> tagNames;
@CreationTimestamp
@Column(name = "created_at", nullable = false, updatable = false)
private LocalDateTime createdAt;
@@ -192,6 +196,14 @@ public class Collection {
this.tags = tags;
}
public List<String> getTagNames() {
return tagNames;
}
public void setTagNames(List<String> tagNames) {
this.tagNames = tagNames;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}

View File

@@ -0,0 +1,93 @@
package com.storycove.entity;
public class Library {
private String id;
private String name;
private String description;
private String passwordHash;
private String dbName;
private String typesenseCollection;
private String imagePath;
private boolean initialized;
// Constructors
public Library() {}
public Library(String id, String name, String description, String passwordHash, String dbName) {
this.id = id;
this.name = name;
this.description = description;
this.passwordHash = passwordHash;
this.dbName = dbName;
this.typesenseCollection = "stories_" + id;
this.imagePath = "/images/" + id;
this.initialized = false;
}
// Getters and Setters
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
this.typesenseCollection = "stories_" + id;
this.imagePath = "/images/" + id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getPasswordHash() {
return passwordHash;
}
public void setPasswordHash(String passwordHash) {
this.passwordHash = passwordHash;
}
public String getDbName() {
return dbName;
}
public void setDbName(String dbName) {
this.dbName = dbName;
}
public String getTypesenseCollection() {
return typesenseCollection;
}
public void setTypesenseCollection(String typesenseCollection) {
this.typesenseCollection = typesenseCollection;
}
public String getImagePath() {
return imagePath;
}
public void setImagePath(String imagePath) {
this.imagePath = imagePath;
}
public boolean isInitialized() {
return initialized;
}
public void setInitialized(boolean initialized) {
this.initialized = initialized;
}
}

View File

@@ -0,0 +1,230 @@
package com.storycove.entity;
import jakarta.persistence.*;
import jakarta.validation.constraints.NotNull;
import org.hibernate.annotations.CreationTimestamp;
import org.hibernate.annotations.UpdateTimestamp;
import com.fasterxml.jackson.annotation.JsonBackReference;
import java.time.LocalDateTime;
import java.util.UUID;
@Entity
@Table(name = "reading_positions", indexes = {
@Index(name = "idx_reading_position_story", columnList = "story_id")
})
public class ReadingPosition {
@Id
@GeneratedValue(strategy = GenerationType.UUID)
private UUID id;
@NotNull
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "story_id", nullable = false)
@JsonBackReference("story-reading-positions")
private Story story;
@Column(name = "chapter_index")
private Integer chapterIndex;
@Column(name = "chapter_title")
private String chapterTitle;
@Column(name = "word_position")
private Integer wordPosition;
@Column(name = "character_position")
private Integer characterPosition;
@Column(name = "percentage_complete")
private Double percentageComplete;
@Column(name = "epub_cfi", columnDefinition = "TEXT")
private String epubCfi;
@Column(name = "context_before", length = 500)
private String contextBefore;
@Column(name = "context_after", length = 500)
private String contextAfter;
@CreationTimestamp
@Column(name = "created_at", nullable = false, updatable = false)
private LocalDateTime createdAt;
@UpdateTimestamp
@Column(name = "updated_at", nullable = false)
private LocalDateTime updatedAt;
public ReadingPosition() {}
public ReadingPosition(Story story) {
this.story = story;
this.chapterIndex = 0;
this.wordPosition = 0;
this.characterPosition = 0;
this.percentageComplete = 0.0;
}
public ReadingPosition(Story story, Integer chapterIndex, Integer wordPosition) {
this.story = story;
this.chapterIndex = chapterIndex;
this.wordPosition = wordPosition;
this.characterPosition = 0;
this.percentageComplete = 0.0;
}
public void updatePosition(Integer chapterIndex, Integer wordPosition, Integer characterPosition) {
this.chapterIndex = chapterIndex;
this.wordPosition = wordPosition;
this.characterPosition = characterPosition;
calculatePercentageComplete();
}
public void updatePositionWithCfi(String epubCfi, Integer chapterIndex, Integer wordPosition) {
this.epubCfi = epubCfi;
this.chapterIndex = chapterIndex;
this.wordPosition = wordPosition;
calculatePercentageComplete();
}
private void calculatePercentageComplete() {
if (story != null && story.getWordCount() != null && story.getWordCount() > 0) {
int totalWords = story.getWordCount();
int currentPosition = (chapterIndex != null ? chapterIndex * 1000 : 0) +
(wordPosition != null ? wordPosition : 0);
this.percentageComplete = Math.min(100.0, (double) currentPosition / totalWords * 100);
}
}
public boolean isAtBeginning() {
return (chapterIndex == null || chapterIndex == 0) &&
(wordPosition == null || wordPosition == 0);
}
public boolean isCompleted() {
return percentageComplete != null && percentageComplete >= 95.0;
}
// Getters and Setters
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public Story getStory() {
return story;
}
public void setStory(Story story) {
this.story = story;
}
public Integer getChapterIndex() {
return chapterIndex;
}
public void setChapterIndex(Integer chapterIndex) {
this.chapterIndex = chapterIndex;
}
public String getChapterTitle() {
return chapterTitle;
}
public void setChapterTitle(String chapterTitle) {
this.chapterTitle = chapterTitle;
}
public Integer getWordPosition() {
return wordPosition;
}
public void setWordPosition(Integer wordPosition) {
this.wordPosition = wordPosition;
}
public Integer getCharacterPosition() {
return characterPosition;
}
public void setCharacterPosition(Integer characterPosition) {
this.characterPosition = characterPosition;
}
public Double getPercentageComplete() {
return percentageComplete;
}
public void setPercentageComplete(Double percentageComplete) {
this.percentageComplete = percentageComplete;
}
public String getEpubCfi() {
return epubCfi;
}
public void setEpubCfi(String epubCfi) {
this.epubCfi = epubCfi;
}
public String getContextBefore() {
return contextBefore;
}
public void setContextBefore(String contextBefore) {
this.contextBefore = contextBefore;
}
public String getContextAfter() {
return contextAfter;
}
public void setContextAfter(String contextAfter) {
this.contextAfter = contextAfter;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
public LocalDateTime getUpdatedAt() {
return updatedAt;
}
public void setUpdatedAt(LocalDateTime updatedAt) {
this.updatedAt = updatedAt;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ReadingPosition)) return false;
ReadingPosition that = (ReadingPosition) o;
return id != null && id.equals(that.id);
}
@Override
public int hashCode() {
return getClass().hashCode();
}
@Override
public String toString() {
return "ReadingPosition{" +
"id=" + id +
", storyId=" + (story != null ? story.getId() : null) +
", chapterIndex=" + chapterIndex +
", wordPosition=" + wordPosition +
", percentageComplete=" + percentageComplete +
'}';
}
}

View File

@@ -55,6 +55,15 @@ public class Story {
@Column(name = "volume")
private Integer volume;
@Column(name = "is_read")
private Boolean isRead = false;
@Column(name = "reading_position")
private Integer readingPosition = 0;
@Column(name = "last_read_at")
private LocalDateTime lastReadAt;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "author_id")
@JsonBackReference("author-stories")
@@ -212,6 +221,30 @@ public class Story {
this.volume = volume;
}
public Boolean getIsRead() {
return isRead;
}
public void setIsRead(Boolean isRead) {
this.isRead = isRead;
}
public Integer getReadingPosition() {
return readingPosition;
}
public void setReadingPosition(Integer readingPosition) {
this.readingPosition = readingPosition;
}
public LocalDateTime getLastReadAt() {
return lastReadAt;
}
public void setLastReadAt(LocalDateTime lastReadAt) {
this.lastReadAt = lastReadAt;
}
public Author getAuthor() {
return author;
}
@@ -252,6 +285,37 @@ public class Story {
this.updatedAt = updatedAt;
}
/**
* Updates the reading progress and timestamp
*/
public void updateReadingProgress(Integer position) {
this.readingPosition = position;
this.lastReadAt = LocalDateTime.now();
}
/**
* Marks the story as read and updates the reading position to the end
*/
public void markAsRead() {
this.isRead = true;
this.lastReadAt = LocalDateTime.now();
// Set reading position to the end of content if available
if (contentPlain != null) {
this.readingPosition = contentPlain.length();
} else if (contentHtml != null) {
this.readingPosition = contentHtml.length();
}
}
/**
* Marks the story as unread and resets reading position
*/
public void markAsUnread() {
this.isRead = false;
this.readingPosition = 0;
this.lastReadAt = null;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
@@ -272,6 +336,8 @@ public class Story {
", title='" + title + '\'' +
", wordCount=" + wordCount +
", rating=" + rating +
", isRead=" + isRead +
", readingPosition=" + readingPosition +
'}';
}
}

View File

@@ -5,6 +5,7 @@ import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.Size;
import org.hibernate.annotations.CreationTimestamp;
import com.fasterxml.jackson.annotation.JsonBackReference;
import com.fasterxml.jackson.annotation.JsonManagedReference;
import java.time.LocalDateTime;
import java.util.HashSet;
@@ -24,11 +25,27 @@ public class Tag {
@Column(nullable = false, unique = true)
private String name;
@Size(max = 7, message = "Color must be a valid hex color code")
@Column(length = 7)
private String color; // hex color like #3B82F6
@Size(max = 500, message = "Description must not exceed 500 characters")
@Column(length = 500)
private String description;
@ManyToMany(mappedBy = "tags")
@JsonBackReference("story-tags")
private Set<Story> stories = new HashSet<>();
@ManyToMany(mappedBy = "tags")
@JsonBackReference("collection-tags")
private Set<Collection> collections = new HashSet<>();
@OneToMany(mappedBy = "canonicalTag", cascade = CascadeType.ALL, orphanRemoval = true)
@JsonManagedReference("tag-aliases")
private Set<TagAlias> aliases = new HashSet<>();
@CreationTimestamp
@Column(name = "created_at", nullable = false, updatable = false)
private LocalDateTime createdAt;
@@ -39,6 +56,12 @@ public class Tag {
this.name = name;
}
public Tag(String name, String color, String description) {
this.name = name;
this.color = color;
this.description = description;
}
// Getters and Setters
@@ -58,6 +81,22 @@ public class Tag {
this.name = name;
}
public String getColor() {
return color;
}
public void setColor(String color) {
this.color = color;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Set<Story> getStories() {
return stories;
@@ -67,6 +106,22 @@ public class Tag {
this.stories = stories;
}
public Set<Collection> getCollections() {
return collections;
}
public void setCollections(Set<Collection> collections) {
this.collections = collections;
}
public Set<TagAlias> getAliases() {
return aliases;
}
public void setAliases(Set<TagAlias> aliases) {
this.aliases = aliases;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}

View File

@@ -0,0 +1,113 @@
package com.storycove.entity;
import jakarta.persistence.*;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.Size;
import org.hibernate.annotations.CreationTimestamp;
import com.fasterxml.jackson.annotation.JsonManagedReference;
import java.time.LocalDateTime;
import java.util.UUID;
@Entity
@Table(name = "tag_aliases")
public class TagAlias {
@Id
@GeneratedValue(strategy = GenerationType.UUID)
private UUID id;
@NotBlank(message = "Alias name is required")
@Size(max = 100, message = "Alias name must not exceed 100 characters")
@Column(name = "alias_name", nullable = false, unique = true)
private String aliasName;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "canonical_tag_id", nullable = false)
@JsonManagedReference("tag-aliases")
private Tag canonicalTag;
@Column(name = "created_from_merge", nullable = false)
private Boolean createdFromMerge = false;
@CreationTimestamp
@Column(name = "created_at", nullable = false, updatable = false)
private LocalDateTime createdAt;
public TagAlias() {}
public TagAlias(String aliasName, Tag canonicalTag) {
this.aliasName = aliasName;
this.canonicalTag = canonicalTag;
}
public TagAlias(String aliasName, Tag canonicalTag, Boolean createdFromMerge) {
this.aliasName = aliasName;
this.canonicalTag = canonicalTag;
this.createdFromMerge = createdFromMerge;
}
// Getters and Setters
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public String getAliasName() {
return aliasName;
}
public void setAliasName(String aliasName) {
this.aliasName = aliasName;
}
public Tag getCanonicalTag() {
return canonicalTag;
}
public void setCanonicalTag(Tag canonicalTag) {
this.canonicalTag = canonicalTag;
}
public Boolean getCreatedFromMerge() {
return createdFromMerge;
}
public void setCreatedFromMerge(Boolean createdFromMerge) {
this.createdFromMerge = createdFromMerge;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof TagAlias)) return false;
TagAlias tagAlias = (TagAlias) o;
return id != null && id.equals(tagAlias.id);
}
@Override
public int hashCode() {
return getClass().hashCode();
}
@Override
public String toString() {
return "TagAlias{" +
"id=" + id +
", aliasName='" + aliasName + '\'' +
", canonicalTag=" + (canonicalTag != null ? canonicalTag.getName() : null) +
", createdFromMerge=" + createdFromMerge +
'}';
}
}

View File

@@ -0,0 +1,34 @@
package com.storycove.event;
import org.springframework.context.ApplicationEvent;
import java.util.UUID;
/**
* Event published when a story's content is created or updated
*/
public class StoryContentUpdatedEvent extends ApplicationEvent {
private final UUID storyId;
private final String contentHtml;
private final boolean isNewStory;
public StoryContentUpdatedEvent(Object source, UUID storyId, String contentHtml, boolean isNewStory) {
super(source);
this.storyId = storyId;
this.contentHtml = contentHtml;
this.isNewStory = isNewStory;
}
public UUID getStoryId() {
return storyId;
}
public String getContentHtml() {
return contentHtml;
}
public boolean isNewStory() {
return isNewStory;
}
}

View File

@@ -52,4 +52,5 @@ public interface AuthorRepository extends JpaRepository<Author, UUID> {
@Query(value = "SELECT author_rating FROM authors WHERE id = :id", nativeQuery = true)
Integer findAuthorRatingById(@Param("id") UUID id);
}

View File

@@ -45,4 +45,11 @@ public interface CollectionRepository extends JpaRepository<Collection, UUID> {
*/
@Query("SELECT c FROM Collection c WHERE c.isArchived = false ORDER BY c.updatedAt DESC")
List<Collection> findAllActiveCollections();
/**
* Find all collections with tags for reindexing operations
*/
@Query("SELECT c FROM Collection c LEFT JOIN FETCH c.tags ORDER BY c.updatedAt DESC")
List<Collection> findAllWithTags();
}

View File

@@ -0,0 +1,57 @@
package com.storycove.repository;
import com.storycove.entity.ReadingPosition;
import com.storycove.entity.Story;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
@Repository
public interface ReadingPositionRepository extends JpaRepository<ReadingPosition, UUID> {
Optional<ReadingPosition> findByStoryId(UUID storyId);
Optional<ReadingPosition> findByStory(Story story);
List<ReadingPosition> findByStoryIdIn(List<UUID> storyIds);
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.story.id = :storyId ORDER BY rp.updatedAt DESC")
List<ReadingPosition> findByStoryIdOrderByUpdatedAtDesc(@Param("storyId") UUID storyId);
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.percentageComplete >= :minPercentage")
List<ReadingPosition> findByMinimumPercentageComplete(@Param("minPercentage") Double minPercentage);
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.percentageComplete >= 95.0")
List<ReadingPosition> findCompletedReadings();
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.percentageComplete > 0 AND rp.percentageComplete < 95.0")
List<ReadingPosition> findInProgressReadings();
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.updatedAt >= :since ORDER BY rp.updatedAt DESC")
List<ReadingPosition> findRecentlyUpdated(@Param("since") LocalDateTime since);
@Query("SELECT rp FROM ReadingPosition rp ORDER BY rp.updatedAt DESC")
List<ReadingPosition> findAllOrderByUpdatedAtDesc();
@Query("SELECT COUNT(rp) FROM ReadingPosition rp WHERE rp.percentageComplete >= 95.0")
long countCompletedReadings();
@Query("SELECT COUNT(rp) FROM ReadingPosition rp WHERE rp.percentageComplete > 0 AND rp.percentageComplete < 95.0")
long countInProgressReadings();
@Query("SELECT AVG(rp.percentageComplete) FROM ReadingPosition rp WHERE rp.percentageComplete > 0")
Double findAverageReadingProgress();
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.epubCfi IS NOT NULL")
List<ReadingPosition> findPositionsWithEpubCfi();
boolean existsByStoryId(UUID storyId);
void deleteByStoryId(UUID storyId);
}

View File

@@ -114,4 +114,130 @@ public interface StoryRepository extends JpaRepository<Story, UUID> {
"LEFT JOIN FETCH s.series " +
"LEFT JOIN FETCH s.tags")
List<Story> findAllWithAssociations();
@Query("SELECT s FROM Story s WHERE UPPER(s.title) = UPPER(:title) AND UPPER(s.author.name) = UPPER(:authorName)")
List<Story> findByTitleAndAuthorNameIgnoreCase(@Param("title") String title, @Param("authorName") String authorName);
/**
* Count all stories for random selection (no filters)
*/
@Query(value = "SELECT COUNT(*) FROM stories", nativeQuery = true)
long countAllStories();
/**
* Count stories matching tag name filter for random selection
*/
@Query(value = "SELECT COUNT(DISTINCT s.id) FROM stories s " +
"JOIN story_tags st ON s.id = st.story_id " +
"JOIN tags t ON st.tag_id = t.id " +
"WHERE UPPER(t.name) = UPPER(?1)",
nativeQuery = true)
long countStoriesByTagName(String tagName);
/**
* Find a random story using offset (no filters)
*/
@Query(value = "SELECT s.* FROM stories s ORDER BY s.id OFFSET ?1 LIMIT 1", nativeQuery = true)
Optional<Story> findRandomStory(long offset);
/**
* Find a random story matching tag name filter using offset
*/
@Query(value = "SELECT s.* FROM stories s " +
"JOIN story_tags st ON s.id = st.story_id " +
"JOIN tags t ON st.tag_id = t.id " +
"WHERE UPPER(t.name) = UPPER(?1) " +
"ORDER BY s.id OFFSET ?2 LIMIT 1",
nativeQuery = true)
Optional<Story> findRandomStoryByTagName(String tagName, long offset);
/**
* Count stories matching multiple tags (ALL tags must be present)
*/
@Query(value = "SELECT COUNT(*) FROM (" +
" SELECT DISTINCT s.id FROM stories s " +
" JOIN story_tags st ON s.id = st.story_id " +
" JOIN tags t ON st.tag_id = t.id " +
" WHERE UPPER(t.name) IN (?1) " +
" GROUP BY s.id " +
" HAVING COUNT(DISTINCT t.name) = ?2" +
") as matched_stories",
nativeQuery = true)
long countStoriesByMultipleTags(List<String> upperCaseTagNames, int tagCount);
/**
* Find random story matching multiple tags (ALL tags must be present)
*/
@Query(value = "SELECT s.* FROM stories s " +
"JOIN story_tags st ON s.id = st.story_id " +
"JOIN tags t ON st.tag_id = t.id " +
"WHERE UPPER(t.name) IN (?1) " +
"GROUP BY s.id, s.title, s.summary, s.description, s.content_html, s.content_plain, s.source_url, s.cover_path, s.word_count, s.rating, s.volume, s.is_read, s.reading_position, s.last_read_at, s.author_id, s.series_id, s.created_at, s.updated_at " +
"HAVING COUNT(DISTINCT t.name) = ?2 " +
"ORDER BY s.id OFFSET ?3 LIMIT 1",
nativeQuery = true)
Optional<Story> findRandomStoryByMultipleTags(List<String> upperCaseTagNames, int tagCount, long offset);
/**
* Count stories matching text search (title, author, tags)
*/
@Query(value = "SELECT COUNT(DISTINCT s.id) FROM stories s " +
"LEFT JOIN authors a ON s.author_id = a.id " +
"LEFT JOIN story_tags st ON s.id = st.story_id " +
"LEFT JOIN tags t ON st.tag_id = t.id " +
"WHERE (UPPER(s.title) LIKE UPPER(?1) OR UPPER(a.name) LIKE UPPER(?1) OR UPPER(t.name) LIKE UPPER(?1))",
nativeQuery = true)
long countStoriesByTextSearch(String searchPattern);
/**
* Find random story matching text search (title, author, tags)
*/
@Query(value = "SELECT DISTINCT s.* FROM stories s " +
"LEFT JOIN authors a ON s.author_id = a.id " +
"LEFT JOIN story_tags st ON s.id = st.story_id " +
"LEFT JOIN tags t ON st.tag_id = t.id " +
"WHERE (UPPER(s.title) LIKE UPPER(?1) OR UPPER(a.name) LIKE UPPER(?1) OR UPPER(t.name) LIKE UPPER(?1)) " +
"ORDER BY s.id OFFSET ?2 LIMIT 1",
nativeQuery = true)
Optional<Story> findRandomStoryByTextSearch(String searchPattern, long offset);
/**
* Count stories matching both text search AND tags
*/
@Query(value = "SELECT COUNT(DISTINCT s.id) FROM stories s " +
"LEFT JOIN authors a ON s.author_id = a.id " +
"LEFT JOIN story_tags st ON s.id = st.story_id " +
"LEFT JOIN tags t ON st.tag_id = t.id " +
"WHERE (UPPER(s.title) LIKE UPPER(?1) OR UPPER(a.name) LIKE UPPER(?1) OR UPPER(t.name) LIKE UPPER(?1)) " +
"AND s.id IN (" +
" SELECT s2.id FROM stories s2 " +
" JOIN story_tags st2 ON s2.id = st2.story_id " +
" JOIN tags t2 ON st2.tag_id = t2.id " +
" WHERE UPPER(t2.name) IN (?2) " +
" GROUP BY s2.id " +
" HAVING COUNT(DISTINCT t2.name) = ?3" +
")",
nativeQuery = true)
long countStoriesByTextSearchAndTags(String searchPattern, List<String> upperCaseTagNames, int tagCount);
/**
* Find random story matching both text search AND tags
*/
@Query(value = "SELECT DISTINCT s.* FROM stories s " +
"LEFT JOIN authors a ON s.author_id = a.id " +
"LEFT JOIN story_tags st ON s.id = st.story_id " +
"LEFT JOIN tags t ON st.tag_id = t.id " +
"WHERE (UPPER(s.title) LIKE UPPER(?1) OR UPPER(a.name) LIKE UPPER(?1) OR UPPER(t.name) LIKE UPPER(?1)) " +
"AND s.id IN (" +
" SELECT s2.id FROM stories s2 " +
" JOIN story_tags st2 ON s2.id = st2.story_id " +
" JOIN tags t2 ON st2.tag_id = t2.id " +
" WHERE UPPER(t2.name) IN (?2) " +
" GROUP BY s2.id " +
" HAVING COUNT(DISTINCT t2.name) = ?3" +
") " +
"ORDER BY s.id OFFSET ?4 LIMIT 1",
nativeQuery = true)
Optional<Story> findRandomStoryByTextSearchAndTags(String searchPattern, List<String> upperCaseTagNames, int tagCount, long offset);
}

View File

@@ -0,0 +1,60 @@
package com.storycove.repository;
import com.storycove.entity.TagAlias;
import com.storycove.entity.Tag;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
@Repository
public interface TagAliasRepository extends JpaRepository<TagAlias, UUID> {
/**
* Find alias by exact alias name (case-insensitive)
*/
@Query("SELECT ta FROM TagAlias ta WHERE LOWER(ta.aliasName) = LOWER(:aliasName)")
Optional<TagAlias> findByAliasNameIgnoreCase(@Param("aliasName") String aliasName);
/**
* Find all aliases for a specific canonical tag
*/
List<TagAlias> findByCanonicalTag(Tag canonicalTag);
/**
* Find all aliases for a specific canonical tag ID
*/
@Query("SELECT ta FROM TagAlias ta WHERE ta.canonicalTag.id = :tagId")
List<TagAlias> findByCanonicalTagId(@Param("tagId") UUID tagId);
/**
* Find aliases created from merge operations
*/
List<TagAlias> findByCreatedFromMergeTrue();
/**
* Check if an alias name already exists
*/
boolean existsByAliasNameIgnoreCase(String aliasName);
/**
* Delete all aliases for a specific tag
*/
void deleteByCanonicalTag(Tag canonicalTag);
/**
* Count aliases for a specific tag
*/
@Query("SELECT COUNT(ta) FROM TagAlias ta WHERE ta.canonicalTag.id = :tagId")
long countByCanonicalTagId(@Param("tagId") UUID tagId);
/**
* Find aliases that start with the given prefix (case-insensitive)
*/
@Query("SELECT ta FROM TagAlias ta WHERE LOWER(ta.aliasName) LIKE LOWER(CONCAT(:prefix, '%'))")
List<TagAlias> findByAliasNameStartingWithIgnoreCase(@Param("prefix") String prefix);
}

View File

@@ -17,8 +17,12 @@ public interface TagRepository extends JpaRepository<Tag, UUID> {
Optional<Tag> findByName(String name);
Optional<Tag> findByNameIgnoreCase(String name);
boolean existsByName(String name);
boolean existsByNameIgnoreCase(String name);
List<Tag> findByNameContainingIgnoreCase(String name);
Page<Tag> findByNameContainingIgnoreCase(String name, Pageable pageable);
@@ -54,4 +58,7 @@ public interface TagRepository extends JpaRepository<Tag, UUID> {
@Query("SELECT COUNT(t) FROM Tag t WHERE SIZE(t.stories) > 0")
long countUsedTags();
@Query("SELECT t FROM Tag t WHERE SIZE(t.collections) > 0 ORDER BY SIZE(t.collections) DESC, t.name ASC")
List<Tag> findTagsUsedByCollections();
}

View File

@@ -1,84 +0,0 @@
package com.storycove.scheduled;
import com.storycove.entity.Story;
import com.storycove.service.StoryService;
import com.storycove.service.TypesenseService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import java.time.LocalDateTime;
import java.util.List;
/**
* Scheduled task to periodically reindex all stories in Typesense
* to ensure search index stays synchronized with database changes.
*/
@Component
@ConditionalOnProperty(name = "storycove.typesense.enabled", havingValue = "true", matchIfMissing = true)
public class TypesenseIndexScheduler {
private static final Logger logger = LoggerFactory.getLogger(TypesenseIndexScheduler.class);
private final StoryService storyService;
private final TypesenseService typesenseService;
@Autowired
public TypesenseIndexScheduler(StoryService storyService,
@Autowired(required = false) TypesenseService typesenseService) {
this.storyService = storyService;
this.typesenseService = typesenseService;
}
/**
* Scheduled task that runs periodically to reindex all stories in Typesense.
* This ensures the search index stays synchronized with any database changes
* that might have occurred outside of the normal story update flow.
*
* Interval is configurable via storycove.typesense.reindex-interval property (default: 1 hour).
*/
@Scheduled(fixedRateString = "${storycove.typesense.reindex-interval:3600000}")
public void reindexAllStories() {
if (typesenseService == null) {
logger.debug("TypesenseService is not available, skipping scheduled reindexing");
return;
}
logger.info("Starting scheduled Typesense reindexing at {}", LocalDateTime.now());
try {
long startTime = System.currentTimeMillis();
// Get all stories from database with eagerly loaded associations
List<Story> allStories = storyService.findAllWithAssociations();
if (allStories.isEmpty()) {
logger.info("No stories found in database, skipping reindexing");
return;
}
// Perform full reindex
typesenseService.reindexAllStories(allStories);
long endTime = System.currentTimeMillis();
long duration = endTime - startTime;
logger.info("Completed scheduled Typesense reindexing of {} stories in {}ms",
allStories.size(), duration);
} catch (Exception e) {
logger.error("Failed to complete scheduled Typesense reindexing", e);
}
}
/**
* Manual trigger for reindexing - can be called from other services or endpoints if needed
*/
public void triggerManualReindex() {
logger.info("Manual Typesense reindexing triggered");
reindexAllStories();
}
}

View File

@@ -3,6 +3,7 @@ package com.storycove.security;
import com.storycove.util.JwtUtil;
import jakarta.servlet.FilterChain;
import jakarta.servlet.ServletException;
import jakarta.servlet.http.Cookie;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
@@ -28,13 +29,27 @@ public class JwtAuthenticationFilter extends OncePerRequestFilter {
HttpServletResponse response,
FilterChain filterChain) throws ServletException, IOException {
String authHeader = request.getHeader("Authorization");
String token = null;
// First try to get token from Authorization header
String authHeader = request.getHeader("Authorization");
if (authHeader != null && authHeader.startsWith("Bearer ")) {
token = authHeader.substring(7);
}
// If no token in header, try to get from cookies
if (token == null) {
Cookie[] cookies = request.getCookies();
if (cookies != null) {
for (Cookie cookie : cookies) {
if ("token".equals(cookie.getName())) {
token = cookie.getValue();
break;
}
}
}
}
if (token != null && jwtUtil.validateToken(token) && !jwtUtil.isTokenExpired(token)) {
String subject = jwtUtil.getSubjectFromToken(token);

View File

@@ -0,0 +1,122 @@
package com.storycove.service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Service
public class AsyncImageProcessingService {
private static final Logger logger = LoggerFactory.getLogger(AsyncImageProcessingService.class);
private final ImageService imageService;
private final StoryService storyService;
private final ImageProcessingProgressService progressService;
@Autowired
public AsyncImageProcessingService(ImageService imageService,
StoryService storyService,
ImageProcessingProgressService progressService) {
this.imageService = imageService;
this.storyService = storyService;
this.progressService = progressService;
}
@Async
public CompletableFuture<Void> processStoryImagesAsync(UUID storyId, String contentHtml) {
logger.info("Starting async image processing for story: {}", storyId);
try {
// Count external images first
int externalImageCount = countExternalImages(contentHtml);
if (externalImageCount == 0) {
logger.debug("No external images found for story {}", storyId);
return CompletableFuture.completedFuture(null);
}
// Start progress tracking
ImageProcessingProgressService.ImageProcessingProgress progress =
progressService.startProgress(storyId, externalImageCount);
// Process images with progress updates
ImageService.ContentImageProcessingResult result =
processImagesWithProgress(contentHtml, storyId, progress);
// Update story with processed content if changed
if (!result.getProcessedContent().equals(contentHtml)) {
progressService.updateProgress(storyId, progress.getTotalImages(),
"Saving processed content", "Updating story content");
storyService.updateContentOnly(storyId, result.getProcessedContent());
progressService.completeProgress(storyId,
String.format("Completed: %d images processed", result.getDownloadedImages().size()));
logger.info("Async image processing completed for story {}: {} images processed",
storyId, result.getDownloadedImages().size());
} else {
progressService.completeProgress(storyId, "Completed: No images needed processing");
}
// Clean up progress after a delay to allow frontend to see completion
CompletableFuture.runAsync(() -> {
try {
Thread.sleep(5000); // 5 seconds delay
progressService.removeProgress(storyId);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
});
} catch (Exception e) {
logger.error("Async image processing failed for story {}: {}", storyId, e.getMessage(), e);
progressService.setError(storyId, e.getMessage());
}
return CompletableFuture.completedFuture(null);
}
private int countExternalImages(String contentHtml) {
if (contentHtml == null || contentHtml.trim().isEmpty()) {
return 0;
}
Pattern imgPattern = Pattern.compile("<img[^>]+src=[\"']([^\"']+)[\"'][^>]*>", Pattern.CASE_INSENSITIVE);
Matcher matcher = imgPattern.matcher(contentHtml);
int count = 0;
while (matcher.find()) {
String src = matcher.group(1);
if (isExternalUrl(src)) {
count++;
}
}
return count;
}
private boolean isExternalUrl(String url) {
return url != null &&
(url.startsWith("http://") || url.startsWith("https://")) &&
!url.contains("/api/files/images/");
}
private ImageService.ContentImageProcessingResult processImagesWithProgress(
String contentHtml, UUID storyId, ImageProcessingProgressService.ImageProcessingProgress progress) {
// Use a custom version of processContentImages that provides progress callbacks
return imageService.processContentImagesWithProgress(contentHtml, storyId,
(currentUrl, processedCount, totalCount) -> {
progressService.updateProgress(storyId, processedCount, currentUrl,
String.format("Processing image %d of %d", processedCount + 1, totalCount));
});
}
}

View File

@@ -11,21 +11,21 @@ import org.springframework.stereotype.Component;
import java.util.List;
@Component
@ConditionalOnProperty(name = "storycove.typesense.enabled", havingValue = "true", matchIfMissing = true)
@ConditionalOnProperty(name = "storycove.search.enabled", havingValue = "true", matchIfMissing = true)
public class AuthorIndexScheduler {
private static final Logger logger = LoggerFactory.getLogger(AuthorIndexScheduler.class);
private final AuthorService authorService;
private final TypesenseService typesenseService;
private final SearchServiceAdapter searchServiceAdapter;
@Autowired
public AuthorIndexScheduler(AuthorService authorService, TypesenseService typesenseService) {
public AuthorIndexScheduler(AuthorService authorService, SearchServiceAdapter searchServiceAdapter) {
this.authorService = authorService;
this.typesenseService = typesenseService;
this.searchServiceAdapter = searchServiceAdapter;
}
@Scheduled(fixedRateString = "${storycove.typesense.author-reindex-interval:7200000}") // 2 hours default
@Scheduled(fixedRateString = "${storycove.search.author-reindex-interval:7200000}") // 2 hours default
public void reindexAllAuthors() {
try {
logger.info("Starting scheduled author reindexing...");
@@ -34,7 +34,7 @@ public class AuthorIndexScheduler {
logger.info("Found {} authors to reindex", allAuthors.size());
if (!allAuthors.isEmpty()) {
typesenseService.reindexAllAuthors(allAuthors);
searchServiceAdapter.bulkIndexAuthors(allAuthors);
logger.info("Successfully completed scheduled author reindexing");
} else {
logger.info("No authors found to reindex");

View File

@@ -28,12 +28,12 @@ public class AuthorService {
private static final Logger logger = LoggerFactory.getLogger(AuthorService.class);
private final AuthorRepository authorRepository;
private final TypesenseService typesenseService;
private final SearchServiceAdapter searchServiceAdapter;
@Autowired
public AuthorService(AuthorRepository authorRepository, TypesenseService typesenseService) {
public AuthorService(AuthorRepository authorRepository, SearchServiceAdapter searchServiceAdapter) {
this.authorRepository = authorRepository;
this.typesenseService = typesenseService;
this.searchServiceAdapter = searchServiceAdapter;
}
@Transactional(readOnly = true)
@@ -132,12 +132,8 @@ public class AuthorService {
validateAuthorForCreate(author);
Author savedAuthor = authorRepository.save(author);
// Index in Typesense
try {
typesenseService.indexAuthor(savedAuthor);
} catch (Exception e) {
logger.warn("Failed to index author in Typesense: " + savedAuthor.getName(), e);
}
// Index in Solr
searchServiceAdapter.indexAuthor(savedAuthor);
return savedAuthor;
}
@@ -154,12 +150,8 @@ public class AuthorService {
updateAuthorFields(existingAuthor, authorUpdates);
Author savedAuthor = authorRepository.save(existingAuthor);
// Update in Typesense
try {
typesenseService.updateAuthor(savedAuthor);
} catch (Exception e) {
logger.warn("Failed to update author in Typesense: " + savedAuthor.getName(), e);
}
// Update in Solr
searchServiceAdapter.updateAuthor(savedAuthor);
return savedAuthor;
}
@@ -174,12 +166,8 @@ public class AuthorService {
authorRepository.delete(author);
// Remove from Typesense
try {
typesenseService.deleteAuthor(id.toString());
} catch (Exception e) {
logger.warn("Failed to delete author from Typesense: " + author.getName(), e);
}
// Remove from Solr
searchServiceAdapter.deleteAuthor(id);
}
public Author addUrl(UUID id, String url) {
@@ -187,12 +175,8 @@ public class AuthorService {
author.addUrl(url);
Author savedAuthor = authorRepository.save(author);
// Update in Typesense
try {
typesenseService.updateAuthor(savedAuthor);
} catch (Exception e) {
logger.warn("Failed to update author in Typesense after adding URL: " + savedAuthor.getName(), e);
}
// Update in Solr
searchServiceAdapter.updateAuthor(savedAuthor);
return savedAuthor;
}
@@ -202,12 +186,8 @@ public class AuthorService {
author.removeUrl(url);
Author savedAuthor = authorRepository.save(author);
// Update in Typesense
try {
typesenseService.updateAuthor(savedAuthor);
} catch (Exception e) {
logger.warn("Failed to update author in Typesense after removing URL: " + savedAuthor.getName(), e);
}
// Update in Solr
searchServiceAdapter.updateAuthor(savedAuthor);
return savedAuthor;
}
@@ -232,7 +212,7 @@ public class AuthorService {
rating, author.getName(), author.getAuthorRating());
author.setAuthorRating(rating);
Author savedAuthor = authorRepository.save(author);
authorRepository.save(author);
// Flush and refresh to ensure the entity is up-to-date
authorRepository.flush();
@@ -241,12 +221,8 @@ public class AuthorService {
logger.debug("Saved author rating: {} for author: {}",
refreshedAuthor.getAuthorRating(), refreshedAuthor.getName());
// Update in Typesense
try {
typesenseService.updateAuthor(refreshedAuthor);
} catch (Exception e) {
logger.warn("Failed to update author in Typesense after rating: " + refreshedAuthor.getName(), e);
}
// Update in Solr
searchServiceAdapter.updateAuthor(refreshedAuthor);
return refreshedAuthor;
}
@@ -289,12 +265,8 @@ public class AuthorService {
author.setAvatarImagePath(avatarPath);
Author savedAuthor = authorRepository.save(author);
// Update in Typesense
try {
typesenseService.updateAuthor(savedAuthor);
} catch (Exception e) {
logger.warn("Failed to update author in Typesense after setting avatar: " + savedAuthor.getName(), e);
}
// Update in Solr
searchServiceAdapter.updateAuthor(savedAuthor);
return savedAuthor;
}
@@ -304,12 +276,8 @@ public class AuthorService {
author.setAvatarImagePath(null);
Author savedAuthor = authorRepository.save(author);
// Update in Typesense
try {
typesenseService.updateAuthor(savedAuthor);
} catch (Exception e) {
logger.warn("Failed to update author in Typesense after removing avatar: " + savedAuthor.getName(), e);
}
// Update in Solr
searchServiceAdapter.updateAuthor(savedAuthor);
return savedAuthor;
}

View File

@@ -10,6 +10,7 @@ public class CollectionSearchResult extends Collection {
private Integer storedStoryCount;
private Integer storedTotalWordCount;
private int wordsPerMinute = 200; // Default, can be overridden
public CollectionSearchResult(Collection collection) {
this.setId(collection.getId());
@@ -20,6 +21,7 @@ public class CollectionSearchResult extends Collection {
this.setCreatedAt(collection.getCreatedAt());
this.setUpdatedAt(collection.getUpdatedAt());
this.setCoverImagePath(collection.getCoverImagePath());
this.setTagNames(collection.getTagNames()); // Copy tag names for search results
// Note: don't copy collectionStories or tags to avoid lazy loading issues
}
@@ -31,6 +33,10 @@ public class CollectionSearchResult extends Collection {
this.storedTotalWordCount = totalWordCount;
}
public void setWordsPerMinute(int wordsPerMinute) {
this.wordsPerMinute = wordsPerMinute;
}
@Override
public int getStoryCount() {
return storedStoryCount != null ? storedStoryCount : 0;
@@ -43,8 +49,7 @@ public class CollectionSearchResult extends Collection {
@Override
public int getEstimatedReadingTime() {
// Assuming 200 words per minute reading speed
return Math.max(1, getTotalWordCount() / 200);
return Math.max(1, getTotalWordCount() / wordsPerMinute);
}
@Override

View File

@@ -1,6 +1,8 @@
package com.storycove.service;
import com.storycove.dto.SearchResultDto;
import com.storycove.dto.StoryReadingDto;
import com.storycove.dto.TagDto;
import com.storycove.entity.Collection;
import com.storycove.entity.CollectionStory;
import com.storycove.entity.Story;
@@ -9,14 +11,10 @@ import com.storycove.repository.CollectionRepository;
import com.storycove.repository.CollectionStoryRepository;
import com.storycove.repository.StoryRepository;
import com.storycove.repository.TagRepository;
import com.storycove.service.exception.DuplicateResourceException;
import com.storycove.service.exception.ResourceNotFoundException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
@@ -33,19 +31,22 @@ public class CollectionService {
private final CollectionStoryRepository collectionStoryRepository;
private final StoryRepository storyRepository;
private final TagRepository tagRepository;
private final TypesenseService typesenseService;
private final SearchServiceAdapter searchServiceAdapter;
private final ReadingTimeService readingTimeService;
@Autowired
public CollectionService(CollectionRepository collectionRepository,
CollectionStoryRepository collectionStoryRepository,
StoryRepository storyRepository,
TagRepository tagRepository,
@Autowired(required = false) TypesenseService typesenseService) {
SearchServiceAdapter searchServiceAdapter,
ReadingTimeService readingTimeService) {
this.collectionRepository = collectionRepository;
this.collectionStoryRepository = collectionStoryRepository;
this.storyRepository = storyRepository;
this.tagRepository = tagRepository;
this.typesenseService = typesenseService;
this.searchServiceAdapter = searchServiceAdapter;
this.readingTimeService = readingTimeService;
}
/**
@@ -53,15 +54,12 @@ public class CollectionService {
* This method MUST be used instead of JPA queries for listing collections
*/
public SearchResultDto<Collection> searchCollections(String query, List<String> tags, boolean includeArchived, int page, int limit) {
if (typesenseService == null) {
logger.warn("Typesense service not available, returning empty results");
// Collections are currently handled at database level, not indexed in search engine
// Return empty result for now as collections search is not implemented in Solr
logger.warn("Collections search not yet implemented in Solr, returning empty results");
return new SearchResultDto<>(new ArrayList<>(), 0, page, limit, query != null ? query : "", 0);
}
// Delegate to TypesenseService for all search operations
return typesenseService.searchCollections(query, tags, includeArchived, page, limit);
}
/**
* Find collection by ID with full details
*/
@@ -78,6 +76,13 @@ public class CollectionService {
.orElseThrow(() -> new ResourceNotFoundException("Collection not found with id: " + id));
}
/**
* Find all collections with tags for reindexing
*/
public List<Collection> findAllWithTags() {
return collectionRepository.findAllWithTags();
}
/**
* Create a new collection with optional initial stories
*/
@@ -99,10 +104,7 @@ public class CollectionService {
savedCollection = findById(savedCollection.getId());
}
// Index in Typesense
if (typesenseService != null) {
typesenseService.indexCollection(savedCollection);
}
// Collections are not indexed in search engine yet
logger.info("Created collection: {} with {} stories", name, initialStoryIds != null ? initialStoryIds.size() : 0);
return savedCollection;
@@ -132,10 +134,7 @@ public class CollectionService {
Collection savedCollection = collectionRepository.save(collection);
// Update in Typesense
if (typesenseService != null) {
typesenseService.indexCollection(savedCollection);
}
// Collections are not indexed in search engine yet
logger.info("Updated collection: {}", id);
return savedCollection;
@@ -147,10 +146,7 @@ public class CollectionService {
public void deleteCollection(UUID id) {
Collection collection = findByIdBasic(id);
// Remove from Typesense first
if (typesenseService != null) {
typesenseService.removeCollection(id);
}
// Collections are not indexed in search engine yet
collectionRepository.delete(collection);
logger.info("Deleted collection: {}", id);
@@ -165,10 +161,7 @@ public class CollectionService {
Collection savedCollection = collectionRepository.save(collection);
// Update in Typesense
if (typesenseService != null) {
typesenseService.indexCollection(savedCollection);
}
// Collections are not indexed in search engine yet
logger.info("{} collection: {}", archived ? "Archived" : "Unarchived", id);
return savedCollection;
@@ -213,10 +206,7 @@ public class CollectionService {
}
// Update collection in Typesense
if (typesenseService != null) {
Collection updatedCollection = findById(collectionId);
typesenseService.indexCollection(updatedCollection);
}
// Collections are not indexed in search engine yet
long totalStories = collectionStoryRepository.countByCollectionId(collectionId);
@@ -241,10 +231,7 @@ public class CollectionService {
collectionStoryRepository.delete(collectionStory);
// Update collection in Typesense
if (typesenseService != null) {
Collection updatedCollection = findById(collectionId);
typesenseService.indexCollection(updatedCollection);
}
// Collections are not indexed in search engine yet
logger.info("Removed story {} from collection {}", storyId, collectionId);
}
@@ -254,7 +241,7 @@ public class CollectionService {
*/
@Transactional
public void reorderStories(UUID collectionId, List<Map<String, Object>> storyOrders) {
Collection collection = findByIdBasic(collectionId);
findByIdBasic(collectionId); // Validate collection exists
// Two-phase update to avoid unique constraint violations:
// Phase 1: Set all positions to negative values (temporary)
@@ -277,10 +264,7 @@ public class CollectionService {
}
// Update collection in Typesense
if (typesenseService != null) {
Collection updatedCollection = findById(collectionId);
typesenseService.indexCollection(updatedCollection);
}
// Collections are not indexed in search engine yet
logger.info("Reordered {} stories in collection {}", storyOrders.size(), collectionId);
}
@@ -326,7 +310,7 @@ public class CollectionService {
);
return Map.of(
"story", story,
"story", convertToReadingDto(story),
"collection", collectionContext
);
}
@@ -344,7 +328,7 @@ public class CollectionService {
int totalWordCount = collectionStories.stream()
.mapToInt(cs -> cs.getStory().getWordCount() != null ? cs.getStory().getWordCount() : 0)
.sum();
int estimatedReadingTime = Math.max(1, totalWordCount / 200); // 200 words per minute
int estimatedReadingTime = readingTimeService.calculateReadingTime(totalWordCount);
double averageStoryRating = collectionStories.stream()
.filter(cs -> cs.getStory().getRating() != null)
@@ -415,9 +399,54 @@ public class CollectionService {
}
/**
* Get all collections for indexing (used by TypesenseService)
* Get all collections for indexing (used by SearchServiceAdapter)
*/
public List<Collection> findAllForIndexing() {
return collectionRepository.findAllActiveCollections();
}
private StoryReadingDto convertToReadingDto(Story story) {
StoryReadingDto dto = new StoryReadingDto();
dto.setId(story.getId());
dto.setTitle(story.getTitle());
dto.setSummary(story.getSummary());
dto.setDescription(story.getDescription());
dto.setContentHtml(story.getContentHtml());
dto.setSourceUrl(story.getSourceUrl());
dto.setCoverPath(story.getCoverPath());
dto.setWordCount(story.getWordCount());
dto.setRating(story.getRating());
dto.setVolume(story.getVolume());
dto.setCreatedAt(story.getCreatedAt());
dto.setUpdatedAt(story.getUpdatedAt());
// Reading progress fields
dto.setIsRead(story.getIsRead());
dto.setReadingPosition(story.getReadingPosition());
dto.setLastReadAt(story.getLastReadAt());
if (story.getAuthor() != null) {
dto.setAuthorId(story.getAuthor().getId());
dto.setAuthorName(story.getAuthor().getName());
}
if (story.getSeries() != null) {
dto.setSeriesId(story.getSeries().getId());
dto.setSeriesName(story.getSeries().getName());
}
dto.setTags(story.getTags().stream()
.map(this::convertTagToDto)
.collect(Collectors.toList()));
return dto;
}
private TagDto convertTagToDto(Tag tag) {
TagDto dto = new TagDto();
dto.setId(tag.getId());
dto.setName(tag.getName());
dto.setStoryCount(tag.getStories().size());
return dto;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,584 @@
package com.storycove.service;
import com.storycove.dto.EPUBExportRequest;
import com.storycove.entity.Collection;
import com.storycove.entity.ReadingPosition;
import com.storycove.entity.Story;
import com.storycove.repository.ReadingPositionRepository;
import com.storycove.service.exception.ResourceNotFoundException;
import nl.siegmann.epublib.domain.*;
import nl.siegmann.epublib.epub.EpubWriter;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.core.io.Resource;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import java.util.stream.Collectors;
@Service
@Transactional
public class EPUBExportService {
private final StoryService storyService;
private final ReadingPositionRepository readingPositionRepository;
private final CollectionService collectionService;
@Autowired
public EPUBExportService(StoryService storyService,
ReadingPositionRepository readingPositionRepository,
CollectionService collectionService) {
this.storyService = storyService;
this.readingPositionRepository = readingPositionRepository;
this.collectionService = collectionService;
}
public Resource exportStoryAsEPUB(EPUBExportRequest request) throws IOException {
Story story = storyService.findById(request.getStoryId());
Book book = createEPUBBook(story, request);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
EpubWriter epubWriter = new EpubWriter();
epubWriter.write(book, outputStream);
return new ByteArrayResource(outputStream.toByteArray());
}
public Resource exportCollectionAsEPUB(UUID collectionId, EPUBExportRequest request) throws IOException {
Collection collection = collectionService.findById(collectionId);
List<Story> stories = collection.getCollectionStories().stream()
.sorted((cs1, cs2) -> Integer.compare(cs1.getPosition(), cs2.getPosition()))
.map(cs -> cs.getStory())
.collect(Collectors.toList());
if (stories.isEmpty()) {
throw new ResourceNotFoundException("Collection contains no stories to export");
}
Book book = createCollectionEPUBBook(collection, stories, request);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
EpubWriter epubWriter = new EpubWriter();
epubWriter.write(book, outputStream);
return new ByteArrayResource(outputStream.toByteArray());
}
private Book createEPUBBook(Story story, EPUBExportRequest request) throws IOException {
Book book = new Book();
setupMetadata(book, story, request);
addCoverImage(book, story, request);
addContent(book, story, request);
addReadingPosition(book, story, request);
return book;
}
private Book createCollectionEPUBBook(Collection collection, List<Story> stories, EPUBExportRequest request) throws IOException {
Book book = new Book();
setupCollectionMetadata(book, collection, stories, request);
addCollectionCoverImage(book, collection, request);
addCollectionContent(book, stories, request);
return book;
}
private void setupMetadata(Book book, Story story, EPUBExportRequest request) {
Metadata metadata = book.getMetadata();
String title = request.getCustomTitle() != null ?
request.getCustomTitle() : story.getTitle();
metadata.addTitle(title);
String authorName = request.getCustomAuthor() != null ?
request.getCustomAuthor() :
(story.getAuthor() != null ? story.getAuthor().getName() : "Unknown Author");
metadata.addAuthor(new Author(authorName));
metadata.setLanguage(request.getLanguage() != null ? request.getLanguage() : "en");
metadata.addIdentifier(new Identifier("storycove", story.getId().toString()));
if (story.getDescription() != null) {
metadata.addDescription(story.getDescription());
}
if (request.getIncludeMetadata()) {
metadata.addDate(new Date(java.util.Date.from(
story.getCreatedAt().atZone(java.time.ZoneId.systemDefault()).toInstant()
), Date.Event.CREATION));
if (story.getSeries() != null) {
// Add series and metadata info to description instead of using addMeta
StringBuilder description = new StringBuilder();
if (story.getDescription() != null) {
description.append(story.getDescription()).append("\n\n");
}
description.append("Series: ").append(story.getSeries().getName());
if (story.getVolume() != null) {
description.append(" (Volume ").append(story.getVolume()).append(")");
}
description.append("\n");
if (story.getWordCount() != null) {
description.append("Word Count: ").append(story.getWordCount()).append("\n");
}
if (story.getRating() != null) {
description.append("Rating: ").append(story.getRating()).append("/5\n");
}
if (!story.getTags().isEmpty()) {
String tags = story.getTags().stream()
.map(tag -> tag.getName())
.reduce((a, b) -> a + ", " + b)
.orElse("");
description.append("Tags: ").append(tags).append("\n");
}
description.append("\nGenerated by StoryCove on ")
.append(LocalDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME));
metadata.addDescription(description.toString());
}
}
if (request.getCustomMetadata() != null && !request.getCustomMetadata().isEmpty()) {
// Add custom metadata to description since addMeta doesn't exist
StringBuilder customDesc = new StringBuilder();
for (String customMeta : request.getCustomMetadata()) {
String[] parts = customMeta.split(":", 2);
if (parts.length == 2) {
customDesc.append(parts[0].trim()).append(": ").append(parts[1].trim()).append("\n");
}
}
if (customDesc.length() > 0) {
String existingDesc = metadata.getDescriptions().isEmpty() ? "" : metadata.getDescriptions().get(0);
metadata.addDescription(existingDesc + "\n" + customDesc.toString());
}
}
}
private void addCoverImage(Book book, Story story, EPUBExportRequest request) {
if (!request.getIncludeCoverImage() || story.getCoverPath() == null) {
return;
}
try {
Path coverPath = Paths.get(story.getCoverPath());
if (Files.exists(coverPath)) {
byte[] coverImageData = Files.readAllBytes(coverPath);
String mimeType = Files.probeContentType(coverPath);
if (mimeType == null) {
mimeType = "image/jpeg";
}
nl.siegmann.epublib.domain.Resource coverResource =
new nl.siegmann.epublib.domain.Resource(coverImageData, "cover.jpg");
book.setCoverImage(coverResource);
}
} catch (IOException e) {
// Skip cover image on error
}
}
private void addContent(Book book, Story story, EPUBExportRequest request) {
String content = story.getContentHtml();
if (content == null) {
content = story.getContentPlain() != null ?
"<p>" + story.getContentPlain().replace("\n", "</p><p>") + "</p>" :
"<p>No content available</p>";
}
if (request.getSplitByChapters()) {
addChapterizedContent(book, content, request);
} else {
addSingleChapterContent(book, content, story);
}
}
private void addSingleChapterContent(Book book, String content, Story story) {
String html = createChapterHTML(story.getTitle(), content);
nl.siegmann.epublib.domain.Resource chapterResource =
new nl.siegmann.epublib.domain.Resource(html.getBytes(), "chapter.html");
book.addSection(story.getTitle(), chapterResource);
}
private void addChapterizedContent(Book book, String content, EPUBExportRequest request) {
Document doc = Jsoup.parse(content);
Elements chapters = doc.select("div.chapter, h1, h2, h3");
if (chapters.isEmpty()) {
List<String> paragraphs = splitByWords(content,
request.getMaxWordsPerChapter() != null ? request.getMaxWordsPerChapter() : 2000);
for (int i = 0; i < paragraphs.size(); i++) {
String chapterTitle = "Chapter " + (i + 1);
String html = createChapterHTML(chapterTitle, paragraphs.get(i));
nl.siegmann.epublib.domain.Resource chapterResource =
new nl.siegmann.epublib.domain.Resource(html.getBytes(), "chapter" + (i + 1) + ".html");
book.addSection(chapterTitle, chapterResource);
}
} else {
for (int i = 0; i < chapters.size(); i++) {
Element chapter = chapters.get(i);
String chapterTitle = chapter.text();
if (chapterTitle.trim().isEmpty()) {
chapterTitle = "Chapter " + (i + 1);
}
String chapterContent = chapter.html();
String html = createChapterHTML(chapterTitle, chapterContent);
nl.siegmann.epublib.domain.Resource chapterResource =
new nl.siegmann.epublib.domain.Resource(html.getBytes(), "chapter" + (i + 1) + ".html");
book.addSection(chapterTitle, chapterResource);
}
}
}
private List<String> splitByWords(String content, int maxWordsPerChapter) {
String[] words = content.split("\\s+");
List<String> chapters = new ArrayList<>();
StringBuilder currentChapter = new StringBuilder();
int wordCount = 0;
for (String word : words) {
currentChapter.append(word).append(" ");
wordCount++;
if (wordCount >= maxWordsPerChapter) {
chapters.add(currentChapter.toString().trim());
currentChapter = new StringBuilder();
wordCount = 0;
}
}
if (currentChapter.length() > 0) {
chapters.add(currentChapter.toString().trim());
}
return chapters;
}
private String createChapterHTML(String title, String content) {
return "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.1//EN\" " +
"\"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd\">" +
"<html xmlns=\"http://www.w3.org/1999/xhtml\">" +
"<head>" +
"<title>" + escapeHtml(title) + "</title>" +
"<style type=\"text/css\">" +
"body { font-family: serif; margin: 1em; }" +
"h1 { text-align: center; }" +
"p { text-indent: 1em; margin: 0.5em 0; }" +
"</style>" +
"</head>" +
"<body>" +
"<h1>" + escapeHtml(title) + "</h1>" +
fixHtmlForXhtml(content) +
"</body>" +
"</html>";
}
private void addReadingPosition(Book book, Story story, EPUBExportRequest request) {
if (!request.getIncludeReadingPosition()) {
return;
}
Optional<ReadingPosition> positionOpt = readingPositionRepository.findByStoryId(story.getId());
if (positionOpt.isPresent()) {
ReadingPosition position = positionOpt.get();
Metadata metadata = book.getMetadata();
// Add reading position to description since addMeta doesn't exist
StringBuilder positionDesc = new StringBuilder();
if (position.getEpubCfi() != null) {
positionDesc.append("EPUB CFI: ").append(position.getEpubCfi()).append("\n");
}
if (position.getChapterIndex() != null && position.getWordPosition() != null) {
positionDesc.append("Reading Position: Chapter ")
.append(position.getChapterIndex())
.append(", Word ").append(position.getWordPosition()).append("\n");
}
if (position.getPercentageComplete() != null) {
positionDesc.append("Reading Progress: ")
.append(String.format("%.1f%%", position.getPercentageComplete())).append("\n");
}
positionDesc.append("Last Read: ")
.append(position.getUpdatedAt().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME));
String existingDesc = metadata.getDescriptions().isEmpty() ? "" : metadata.getDescriptions().get(0);
metadata.addDescription(existingDesc + "\n\n--- Reading Position ---\n" + positionDesc.toString());
}
}
private String fixHtmlForXhtml(String html) {
if (html == null) return "";
// Fix common XHTML validation issues
String fixed = html
// Fix self-closing tags to be XHTML compliant
.replaceAll("<br>", "<br />")
.replaceAll("<hr>", "<hr />")
.replaceAll("<img([^>]*)>", "<img$1 />")
.replaceAll("<input([^>]*)>", "<input$1 />")
.replaceAll("<area([^>]*)>", "<area$1 />")
.replaceAll("<base([^>]*)>", "<base$1 />")
.replaceAll("<col([^>]*)>", "<col$1 />")
.replaceAll("<embed([^>]*)>", "<embed$1 />")
.replaceAll("<link([^>]*)>", "<link$1 />")
.replaceAll("<meta([^>]*)>", "<meta$1 />")
.replaceAll("<param([^>]*)>", "<param$1 />")
.replaceAll("<source([^>]*)>", "<source$1 />")
.replaceAll("<track([^>]*)>", "<track$1 />")
.replaceAll("<wbr([^>]*)>", "<wbr$1 />");
return fixed;
}
private String escapeHtml(String text) {
if (text == null) return "";
return text.replace("&", "&amp;")
.replace("<", "&lt;")
.replace(">", "&gt;")
.replace("\"", "&quot;")
.replace("'", "&#39;");
}
public String getEPUBFilename(Story story) {
StringBuilder filename = new StringBuilder();
if (story.getAuthor() != null) {
filename.append(sanitizeFilename(story.getAuthor().getName()))
.append(" - ");
}
filename.append(sanitizeFilename(story.getTitle()));
if (story.getSeries() != null && story.getVolume() != null) {
filename.append(" (")
.append(sanitizeFilename(story.getSeries().getName()))
.append(" ")
.append(story.getVolume())
.append(")");
}
filename.append(".epub");
return filename.toString();
}
private String sanitizeFilename(String filename) {
if (filename == null) return "unknown";
return filename.replaceAll("[^a-zA-Z0-9._\\- ]", "")
.trim()
.replaceAll("\\s+", "_");
}
private void setupCollectionMetadata(Book book, Collection collection, List<Story> stories, EPUBExportRequest request) {
Metadata metadata = book.getMetadata();
String title = request.getCustomTitle() != null ?
request.getCustomTitle() : collection.getName();
metadata.addTitle(title);
// Use collection creator as author, or combine story authors
String authorName = "Collection";
if (stories.size() == 1) {
Story story = stories.get(0);
authorName = story.getAuthor() != null ? story.getAuthor().getName() : "Unknown Author";
} else {
// For multiple stories, use "Various Authors" or collection name
authorName = "Various Authors";
}
if (request.getCustomAuthor() != null) {
authorName = request.getCustomAuthor();
}
metadata.addAuthor(new Author(authorName));
metadata.setLanguage(request.getLanguage() != null ? request.getLanguage() : "en");
metadata.addIdentifier(new Identifier("storycove-collection", collection.getId().toString()));
// Create description from collection description and story list
StringBuilder description = new StringBuilder();
if (collection.getDescription() != null && !collection.getDescription().trim().isEmpty()) {
description.append(collection.getDescription()).append("\n\n");
}
description.append("This collection contains ").append(stories.size()).append(" stories:\n");
for (int i = 0; i < stories.size() && i < 10; i++) {
Story story = stories.get(i);
description.append((i + 1)).append(". ").append(story.getTitle());
if (story.getAuthor() != null) {
description.append(" by ").append(story.getAuthor().getName());
}
description.append("\n");
}
if (stories.size() > 10) {
description.append("... and ").append(stories.size() - 10).append(" more stories.");
}
metadata.addDescription(description.toString());
if (request.getIncludeMetadata()) {
metadata.addDate(new Date(java.util.Date.from(
collection.getCreatedAt().atZone(java.time.ZoneId.systemDefault()).toInstant()
), Date.Event.CREATION));
// Add collection statistics to description
int totalWordCount = stories.stream().mapToInt(s -> s.getWordCount() != null ? s.getWordCount() : 0).sum();
description.append("\n\nTotal Word Count: ").append(totalWordCount);
description.append("\nGenerated by StoryCove on ")
.append(LocalDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME));
metadata.addDescription(description.toString());
}
}
private void addCollectionCoverImage(Book book, Collection collection, EPUBExportRequest request) {
if (!request.getIncludeCoverImage()) {
return;
}
try {
// Try to use collection cover first
if (collection.getCoverImagePath() != null) {
Path coverPath = Paths.get(collection.getCoverImagePath());
if (Files.exists(coverPath)) {
byte[] coverImageData = Files.readAllBytes(coverPath);
String mimeType = Files.probeContentType(coverPath);
if (mimeType == null) {
mimeType = "image/jpeg";
}
nl.siegmann.epublib.domain.Resource coverResource =
new nl.siegmann.epublib.domain.Resource(coverImageData, "collection-cover.jpg");
book.setCoverImage(coverResource);
return;
}
}
// TODO: Could generate a composite cover from story covers
// For now, skip cover if collection doesn't have one
} catch (IOException e) {
// Skip cover image on error
}
}
private void addCollectionContent(Book book, List<Story> stories, EPUBExportRequest request) {
// Create table of contents chapter
StringBuilder tocContent = new StringBuilder();
tocContent.append("<h1>Table of Contents</h1>\n<ul>\n");
for (int i = 0; i < stories.size(); i++) {
Story story = stories.get(i);
tocContent.append("<li><a href=\"#story").append(i + 1).append("\">")
.append(escapeHtml(story.getTitle()));
if (story.getAuthor() != null) {
tocContent.append(" by ").append(escapeHtml(story.getAuthor().getName()));
}
tocContent.append("</a></li>\n");
}
tocContent.append("</ul>\n");
String tocHtml = createChapterHTML("Table of Contents", tocContent.toString());
nl.siegmann.epublib.domain.Resource tocResource =
new nl.siegmann.epublib.domain.Resource(tocHtml.getBytes(), "toc.html");
book.addSection("Table of Contents", tocResource);
// Add each story as a chapter
for (int i = 0; i < stories.size(); i++) {
Story story = stories.get(i);
String storyContent = story.getContentHtml();
if (storyContent == null) {
storyContent = story.getContentPlain() != null ?
"<p>" + story.getContentPlain().replace("\n", "</p><p>") + "</p>" :
"<p>No content available</p>";
}
// Add story metadata header
StringBuilder storyHtml = new StringBuilder();
storyHtml.append("<div id=\"story").append(i + 1).append("\">\n");
storyHtml.append("<h1>").append(escapeHtml(story.getTitle())).append("</h1>\n");
if (story.getAuthor() != null) {
storyHtml.append("<p><em>by ").append(escapeHtml(story.getAuthor().getName())).append("</em></p>\n");
}
if (story.getDescription() != null && !story.getDescription().trim().isEmpty()) {
storyHtml.append("<div class=\"summary\">\n")
.append("<p>").append(escapeHtml(story.getDescription())).append("</p>\n")
.append("</div>\n");
}
storyHtml.append("<hr />\n");
storyHtml.append(fixHtmlForXhtml(storyContent));
storyHtml.append("</div>\n");
String chapterTitle = story.getTitle();
if (story.getAuthor() != null) {
chapterTitle += " by " + story.getAuthor().getName();
}
String html = createChapterHTML(chapterTitle, storyHtml.toString());
nl.siegmann.epublib.domain.Resource storyResource =
new nl.siegmann.epublib.domain.Resource(html.getBytes(), "story" + (i + 1) + ".html");
book.addSection(chapterTitle, storyResource);
}
}
public boolean canExportStory(UUID storyId) {
try {
Story story = storyService.findById(storyId);
return story.getContentHtml() != null || story.getContentPlain() != null;
} catch (ResourceNotFoundException e) {
return false;
}
}
public String getCollectionEPUBFilename(Collection collection) {
StringBuilder filename = new StringBuilder();
filename.append(sanitizeFilename(collection.getName()));
filename.append("_collection.epub");
return filename.toString();
}
}

View File

@@ -0,0 +1,551 @@
package com.storycove.service;
import com.storycove.dto.EPUBImportRequest;
import com.storycove.dto.EPUBImportResponse;
import com.storycove.dto.ReadingPositionDto;
import com.storycove.entity.*;
import com.storycove.repository.ReadingPositionRepository;
import com.storycove.service.exception.InvalidFileException;
import com.storycove.service.exception.ResourceNotFoundException;
import nl.siegmann.epublib.domain.Book;
import nl.siegmann.epublib.domain.Metadata;
import nl.siegmann.epublib.domain.Resource;
import nl.siegmann.epublib.domain.SpineReference;
import nl.siegmann.epublib.epub.EpubReader;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
@Service
@Transactional
public class EPUBImportService {
private static final Logger log = LoggerFactory.getLogger(EPUBImportService.class);
private final StoryService storyService;
private final AuthorService authorService;
private final SeriesService seriesService;
private final TagService tagService;
private final ReadingPositionRepository readingPositionRepository;
private final HtmlSanitizationService sanitizationService;
private final ImageService imageService;
@Autowired
public EPUBImportService(StoryService storyService,
AuthorService authorService,
SeriesService seriesService,
TagService tagService,
ReadingPositionRepository readingPositionRepository,
HtmlSanitizationService sanitizationService,
ImageService imageService) {
this.storyService = storyService;
this.authorService = authorService;
this.seriesService = seriesService;
this.tagService = tagService;
this.readingPositionRepository = readingPositionRepository;
this.sanitizationService = sanitizationService;
this.imageService = imageService;
}
public EPUBImportResponse importEPUB(EPUBImportRequest request) {
try {
MultipartFile epubFile = request.getEpubFile();
if (epubFile == null || epubFile.isEmpty()) {
return EPUBImportResponse.error("EPUB file is required");
}
if (!isValidEPUBFile(epubFile)) {
return EPUBImportResponse.error("Invalid EPUB file format");
}
Book book = parseEPUBFile(epubFile);
Story story = createStoryFromEPUB(book, request);
Story savedStory = storyService.create(story);
// Process embedded images if content contains any
String originalContent = story.getContentHtml();
if (originalContent != null && originalContent.contains("<img")) {
try {
ImageService.ContentImageProcessingResult imageResult =
imageService.processContentImages(originalContent, savedStory.getId());
// Update story content with processed images if changed
if (!imageResult.getProcessedContent().equals(originalContent)) {
savedStory.setContentHtml(imageResult.getProcessedContent());
savedStory = storyService.update(savedStory.getId(), savedStory);
// Log the image processing results
log.debug("EPUB Import - Image processing completed for story {}. Downloaded {} images.",
savedStory.getId(), imageResult.getDownloadedImages().size());
if (imageResult.hasWarnings()) {
log.debug("EPUB Import - Image processing warnings: {}",
String.join(", ", imageResult.getWarnings()));
}
}
} catch (Exception e) {
// Log error but don't fail the import
System.err.println("EPUB Import - Failed to process embedded images for story " +
savedStory.getId() + ": " + e.getMessage());
}
}
EPUBImportResponse response = EPUBImportResponse.success(savedStory.getId(), savedStory.getTitle());
response.setWordCount(savedStory.getWordCount());
response.setTotalChapters(book.getSpine().size());
if (request.getPreserveReadingPosition() != null && request.getPreserveReadingPosition()) {
ReadingPosition readingPosition = extractReadingPosition(book, savedStory);
if (readingPosition != null) {
ReadingPosition savedPosition = readingPositionRepository.save(readingPosition);
response.setReadingPosition(convertToDto(savedPosition));
}
}
return response;
} catch (Exception e) {
return EPUBImportResponse.error("Failed to import EPUB: " + e.getMessage());
}
}
private boolean isValidEPUBFile(MultipartFile file) {
String filename = file.getOriginalFilename();
if (filename == null || !filename.toLowerCase().endsWith(".epub")) {
return false;
}
String contentType = file.getContentType();
return "application/epub+zip".equals(contentType) ||
"application/zip".equals(contentType) ||
contentType == null;
}
private Book parseEPUBFile(MultipartFile epubFile) throws IOException {
try (InputStream inputStream = epubFile.getInputStream()) {
EpubReader epubReader = new EpubReader();
return epubReader.readEpub(inputStream);
} catch (Exception e) {
throw new InvalidFileException("Failed to parse EPUB file: " + e.getMessage());
}
}
private Story createStoryFromEPUB(Book book, EPUBImportRequest request) {
Metadata metadata = book.getMetadata();
String title = extractTitle(metadata);
String authorName = extractAuthorName(metadata, request);
String description = extractDescription(metadata);
String content = extractContent(book);
Story story = new Story();
story.setTitle(title);
story.setDescription(description);
story.setContentHtml(sanitizationService.sanitize(content));
// Extract and process cover image
if (request.getExtractCover() == null || request.getExtractCover()) {
String coverPath = extractAndSaveCoverImage(book);
if (coverPath != null) {
story.setCoverPath(coverPath);
}
}
if (request.getAuthorId() != null) {
try {
Author author = authorService.findById(request.getAuthorId());
story.setAuthor(author);
} catch (ResourceNotFoundException e) {
if (request.getCreateMissingAuthor()) {
Author newAuthor = createAuthor(authorName);
story.setAuthor(newAuthor);
}
}
} else if (authorName != null && request.getCreateMissingAuthor()) {
Author author = findOrCreateAuthor(authorName);
story.setAuthor(author);
}
if (request.getSeriesId() != null && request.getSeriesVolume() != null) {
try {
Series series = seriesService.findById(request.getSeriesId());
story.setSeries(series);
story.setVolume(request.getSeriesVolume());
} catch (ResourceNotFoundException e) {
if (request.getCreateMissingSeries() && request.getSeriesName() != null) {
Series newSeries = createSeries(request.getSeriesName());
story.setSeries(newSeries);
story.setVolume(request.getSeriesVolume());
}
}
}
// Handle tags from request or extract from EPUB metadata
List<String> allTags = new ArrayList<>();
if (request.getTags() != null && !request.getTags().isEmpty()) {
allTags.addAll(request.getTags());
}
// Extract subjects/keywords from EPUB metadata
List<String> epubTags = extractTags(metadata);
if (epubTags != null && !epubTags.isEmpty()) {
allTags.addAll(epubTags);
}
// Remove duplicates and create tags
allTags.stream()
.distinct()
.forEach(tagName -> {
Tag tag = tagService.findOrCreate(tagName.trim());
story.addTag(tag);
});
// Extract additional metadata for potential future use
extractAdditionalMetadata(metadata, story);
return story;
}
private String extractTitle(Metadata metadata) {
List<String> titles = metadata.getTitles();
if (titles != null && !titles.isEmpty()) {
return titles.get(0);
}
return "Untitled EPUB";
}
private String extractAuthorName(Metadata metadata, EPUBImportRequest request) {
if (request.getAuthorName() != null && !request.getAuthorName().trim().isEmpty()) {
return request.getAuthorName().trim();
}
if (metadata.getAuthors() != null && !metadata.getAuthors().isEmpty()) {
return metadata.getAuthors().get(0).getFirstname() + " " + metadata.getAuthors().get(0).getLastname();
}
return "Unknown Author";
}
private String extractDescription(Metadata metadata) {
List<String> descriptions = metadata.getDescriptions();
if (descriptions != null && !descriptions.isEmpty()) {
return descriptions.get(0);
}
return null;
}
private List<String> extractTags(Metadata metadata) {
List<String> tags = new ArrayList<>();
// Extract subjects (main source of tags in EPUB)
List<String> subjects = metadata.getSubjects();
if (subjects != null && !subjects.isEmpty()) {
tags.addAll(subjects);
}
// Extract keywords from meta tags
String keywords = metadata.getMetaAttribute("keywords");
if (keywords != null && !keywords.trim().isEmpty()) {
String[] keywordArray = keywords.split("[,;]");
for (String keyword : keywordArray) {
String trimmed = keyword.trim();
if (!trimmed.isEmpty()) {
tags.add(trimmed);
}
}
}
// Extract genre information
String genre = metadata.getMetaAttribute("genre");
if (genre != null && !genre.trim().isEmpty()) {
tags.add(genre.trim());
}
return tags;
}
private void extractAdditionalMetadata(Metadata metadata, Story story) {
// Extract language (could be useful for future i18n)
String language = metadata.getLanguage();
if (language != null && !language.trim().isEmpty()) {
// Store as metadata in story description if needed
// For now, we'll just log it for potential future use
log.debug("EPUB Language: {}", language);
}
// Extract publisher information
List<String> publishers = metadata.getPublishers();
if (publishers != null && !publishers.isEmpty()) {
String publisher = publishers.get(0);
// Could append to description or store separately in future
log.debug("EPUB Publisher: {}", publisher);
}
// Extract publication date
List<nl.siegmann.epublib.domain.Date> dates = metadata.getDates();
if (dates != null && !dates.isEmpty()) {
for (nl.siegmann.epublib.domain.Date date : dates) {
log.debug("EPUB Date ({}): {}", date.getEvent(), date.getValue());
}
}
// Extract ISBN or other identifiers
List<nl.siegmann.epublib.domain.Identifier> identifiers = metadata.getIdentifiers();
if (identifiers != null && !identifiers.isEmpty()) {
for (nl.siegmann.epublib.domain.Identifier identifier : identifiers) {
log.debug("EPUB Identifier ({}): {}", identifier.getScheme(), identifier.getValue());
}
}
}
private String extractContent(Book book) {
StringBuilder contentBuilder = new StringBuilder();
List<SpineReference> spine = book.getSpine().getSpineReferences();
for (SpineReference spineRef : spine) {
try {
Resource resource = spineRef.getResource();
if (resource != null && resource.getData() != null) {
String html = new String(resource.getData(), "UTF-8");
Document doc = Jsoup.parse(html);
doc.select("script, style").remove();
String chapterContent = doc.body() != null ? doc.body().html() : doc.html();
contentBuilder.append("<div class=\"chapter\">")
.append(chapterContent)
.append("</div>");
}
} catch (Exception e) {
// Skip this chapter on error
continue;
}
}
return contentBuilder.toString();
}
private Author findOrCreateAuthor(String authorName) {
Optional<Author> existingAuthor = authorService.findByNameOptional(authorName);
if (existingAuthor.isPresent()) {
return existingAuthor.get();
}
return createAuthor(authorName);
}
private Author createAuthor(String authorName) {
Author author = new Author();
author.setName(authorName);
return authorService.create(author);
}
private Series createSeries(String seriesName) {
Series series = new Series();
series.setName(seriesName);
return seriesService.create(series);
}
private ReadingPosition extractReadingPosition(Book book, Story story) {
try {
Metadata metadata = book.getMetadata();
String positionMeta = metadata.getMetaAttribute("reading-position");
String cfiMeta = metadata.getMetaAttribute("epub-cfi");
ReadingPosition position = new ReadingPosition(story);
if (cfiMeta != null) {
position.setEpubCfi(cfiMeta);
}
if (positionMeta != null) {
try {
String[] parts = positionMeta.split(":");
if (parts.length >= 2) {
position.setChapterIndex(Integer.parseInt(parts[0]));
position.setWordPosition(Integer.parseInt(parts[1]));
}
} catch (NumberFormatException e) {
// Ignore invalid position format
}
}
return position;
} catch (Exception e) {
// Return null if no reading position found
return null;
}
}
private String extractAndSaveCoverImage(Book book) {
try {
Resource coverResource = book.getCoverImage();
if (coverResource != null && coverResource.getData() != null) {
// Create a temporary MultipartFile from the EPUB cover data
byte[] imageData = coverResource.getData();
String mediaType = coverResource.getMediaType() != null ?
coverResource.getMediaType().toString() : "image/jpeg";
// Determine file extension from media type
String extension = getExtensionFromMediaType(mediaType);
String filename = "epub_cover_" + System.currentTimeMillis() + "." + extension;
// Create a custom MultipartFile implementation for the cover image
MultipartFile coverFile = new EPUBCoverMultipartFile(imageData, filename, mediaType);
// Use ImageService to process and save the cover
return imageService.uploadImage(coverFile, ImageService.ImageType.COVER);
}
} catch (Exception e) {
// Log error but don't fail the import
System.err.println("Failed to extract cover image: " + e.getMessage());
}
return null;
}
private String getExtensionFromMediaType(String mediaType) {
switch (mediaType.toLowerCase()) {
case "image/jpeg":
case "image/jpg":
return "jpg";
case "image/png":
return "png";
case "image/gif":
return "gif";
case "image/webp":
return "webp";
default:
return "jpg"; // Default fallback
}
}
private ReadingPositionDto convertToDto(ReadingPosition position) {
if (position == null) return null;
ReadingPositionDto dto = new ReadingPositionDto();
dto.setId(position.getId());
dto.setStoryId(position.getStory().getId());
dto.setChapterIndex(position.getChapterIndex());
dto.setChapterTitle(position.getChapterTitle());
dto.setWordPosition(position.getWordPosition());
dto.setCharacterPosition(position.getCharacterPosition());
dto.setPercentageComplete(position.getPercentageComplete());
dto.setEpubCfi(position.getEpubCfi());
dto.setContextBefore(position.getContextBefore());
dto.setContextAfter(position.getContextAfter());
dto.setCreatedAt(position.getCreatedAt());
dto.setUpdatedAt(position.getUpdatedAt());
return dto;
}
public List<String> validateEPUBFile(MultipartFile file) {
List<String> errors = new ArrayList<>();
if (file == null || file.isEmpty()) {
errors.add("EPUB file is required");
return errors;
}
if (!isValidEPUBFile(file)) {
errors.add("Invalid EPUB file format. Only .epub files are supported");
}
if (file.getSize() > 100 * 1024 * 1024) { // 100MB limit
errors.add("EPUB file size exceeds 100MB limit");
}
try {
Book book = parseEPUBFile(file);
if (book.getMetadata() == null) {
errors.add("EPUB file contains no metadata");
}
if (book.getSpine() == null || book.getSpine().isEmpty()) {
errors.add("EPUB file contains no readable content");
}
} catch (Exception e) {
errors.add("Failed to parse EPUB file: " + e.getMessage());
}
return errors;
}
/**
* Custom MultipartFile implementation for EPUB cover images
*/
private static class EPUBCoverMultipartFile implements MultipartFile {
private final byte[] data;
private final String filename;
private final String contentType;
public EPUBCoverMultipartFile(byte[] data, String filename, String contentType) {
this.data = data;
this.filename = filename;
this.contentType = contentType;
}
@Override
public String getName() {
return "coverImage";
}
@Override
public String getOriginalFilename() {
return filename;
}
@Override
public String getContentType() {
return contentType;
}
@Override
public boolean isEmpty() {
return data == null || data.length == 0;
}
@Override
public long getSize() {
return data != null ? data.length : 0;
}
@Override
public byte[] getBytes() {
return data;
}
@Override
public InputStream getInputStream() {
return new java.io.ByteArrayInputStream(data);
}
@Override
public void transferTo(java.io.File dest) throws IOException {
try (java.io.FileOutputStream fos = new java.io.FileOutputStream(dest)) {
fos.write(data);
}
}
@Override
public void transferTo(java.nio.file.Path dest) throws IOException {
java.nio.file.Files.write(dest, data);
}
}
}

View File

@@ -54,7 +54,7 @@ public class HtmlSanitizationService {
"p", "br", "div", "span", "h1", "h2", "h3", "h4", "h5", "h6",
"b", "strong", "i", "em", "u", "s", "strike", "del", "ins",
"sup", "sub", "small", "big", "mark", "pre", "code",
"ul", "ol", "li", "dl", "dt", "dd", "a",
"ul", "ol", "li", "dl", "dt", "dd", "a", "img",
"table", "thead", "tbody", "tfoot", "tr", "th", "td", "caption",
"blockquote", "cite", "q", "hr"
));
@@ -65,7 +65,7 @@ public class HtmlSanitizationService {
}
private void createSafelist() {
this.allowlist = new Safelist();
this.allowlist = Safelist.relaxed();
// Add allowed tags
if (config.getAllowedTags() != null) {
@@ -83,7 +83,34 @@ public class HtmlSanitizationService {
}
}
// Remove specific attributes (like href from links for security)
// Special handling for img tags - allow all src attributes and validate later
allowlist.removeProtocols("img", "src", "http", "https");
// This is the key: preserve relative URLs by not restricting them
allowlist.preserveRelativeLinks(true);
// Configure allowed protocols for other attributes
if (config.getAllowedProtocols() != null) {
for (Map.Entry<String, Map<String, List<String>>> tagEntry : config.getAllowedProtocols().entrySet()) {
String tag = tagEntry.getKey();
Map<String, List<String>> attributeProtocols = tagEntry.getValue();
if (attributeProtocols != null) {
for (Map.Entry<String, List<String>> attrEntry : attributeProtocols.entrySet()) {
String attribute = attrEntry.getKey();
List<String> protocols = attrEntry.getValue();
if (protocols != null && !("img".equals(tag) && "src".equals(attribute))) {
// Skip img src since we handled it above
allowlist.addProtocols(tag, attribute, protocols.toArray(new String[0]));
}
}
}
}
}
logger.info("Configured Jsoup Safelist with preserveRelativeLinks=true for local image URLs");
// Remove specific attributes if needed (deprecated in favor of protocol control)
if (config.getRemovedAttributes() != null) {
for (Map.Entry<String, List<String>> entry : config.getRemovedAttributes().entrySet()) {
String tag = entry.getKey();
@@ -110,12 +137,65 @@ public class HtmlSanitizationService {
return config;
}
/**
* Preprocess HTML to extract images from figure tags before sanitization
*/
private String preprocessFigureTags(String html) {
if (html == null || html.trim().isEmpty()) {
return html;
}
try {
org.jsoup.nodes.Document doc = Jsoup.parse(html);
org.jsoup.select.Elements figures = doc.select("figure");
for (org.jsoup.nodes.Element figure : figures) {
// Find img tags within the figure
org.jsoup.select.Elements images = figure.select("img");
if (!images.isEmpty()) {
// Extract the first image and replace the figure with it
org.jsoup.nodes.Element img = images.first();
// Check if there's a figcaption to preserve as alt text
org.jsoup.select.Elements figcaptions = figure.select("figcaption");
if (!figcaptions.isEmpty() && !img.hasAttr("alt")) {
String captionText = figcaptions.first().text();
if (captionText != null && !captionText.trim().isEmpty()) {
img.attr("alt", captionText);
}
}
// Replace the figure element with just the img
figure.replaceWith(img.clone());
logger.debug("Extracted image from figure tag: {}", img.attr("src"));
} else {
// No images in figure, remove it entirely
figure.remove();
logger.debug("Removed figure tag without images");
}
}
return doc.body().html();
} catch (Exception e) {
logger.warn("Failed to preprocess figure tags, returning original HTML: {}", e.getMessage());
return html;
}
}
public String sanitize(String html) {
if (html == null || html.trim().isEmpty()) {
return "";
}
return Jsoup.clean(html, allowlist);
logger.info("Content before sanitization: "+html);
// Preprocess to extract images from figure tags
String preprocessed = preprocessFigureTags(html);
String saniztedHtml = Jsoup.clean(preprocessed, allowlist.preserveRelativeLinks(true));
logger.info("Content after sanitization: "+saniztedHtml);
return saniztedHtml;
}
public String extractPlainText(String html) {

View File

@@ -0,0 +1,108 @@
package com.storycove.service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
@Service
public class ImageProcessingProgressService {
private static final Logger logger = LoggerFactory.getLogger(ImageProcessingProgressService.class);
private final Map<UUID, ImageProcessingProgress> progressMap = new ConcurrentHashMap<>();
public static class ImageProcessingProgress {
private final UUID storyId;
private final int totalImages;
private volatile int processedImages;
private volatile String currentImageUrl;
private volatile String status;
private volatile boolean completed;
private volatile String errorMessage;
public ImageProcessingProgress(UUID storyId, int totalImages) {
this.storyId = storyId;
this.totalImages = totalImages;
this.processedImages = 0;
this.status = "Starting";
this.completed = false;
}
// Getters
public UUID getStoryId() { return storyId; }
public int getTotalImages() { return totalImages; }
public int getProcessedImages() { return processedImages; }
public String getCurrentImageUrl() { return currentImageUrl; }
public String getStatus() { return status; }
public boolean isCompleted() { return completed; }
public String getErrorMessage() { return errorMessage; }
public double getProgressPercentage() {
return totalImages > 0 ? (double) processedImages / totalImages * 100 : 100;
}
// Setters
public void setProcessedImages(int processedImages) { this.processedImages = processedImages; }
public void setCurrentImageUrl(String currentImageUrl) { this.currentImageUrl = currentImageUrl; }
public void setStatus(String status) { this.status = status; }
public void setCompleted(boolean completed) { this.completed = completed; }
public void setErrorMessage(String errorMessage) { this.errorMessage = errorMessage; }
public void incrementProcessed() {
this.processedImages++;
}
}
public ImageProcessingProgress startProgress(UUID storyId, int totalImages) {
ImageProcessingProgress progress = new ImageProcessingProgress(storyId, totalImages);
progressMap.put(storyId, progress);
logger.info("Started image processing progress tracking for story {} with {} images", storyId, totalImages);
return progress;
}
public ImageProcessingProgress getProgress(UUID storyId) {
return progressMap.get(storyId);
}
public void updateProgress(UUID storyId, int processedImages, String currentImageUrl, String status) {
ImageProcessingProgress progress = progressMap.get(storyId);
if (progress != null) {
progress.setProcessedImages(processedImages);
progress.setCurrentImageUrl(currentImageUrl);
progress.setStatus(status);
logger.debug("Updated progress for story {}: {}/{} - {}", storyId, processedImages, progress.getTotalImages(), status);
}
}
public void completeProgress(UUID storyId, String finalStatus) {
ImageProcessingProgress progress = progressMap.get(storyId);
if (progress != null) {
progress.setCompleted(true);
progress.setStatus(finalStatus);
logger.info("Completed image processing for story {}: {}", storyId, finalStatus);
}
}
public void setError(UUID storyId, String errorMessage) {
ImageProcessingProgress progress = progressMap.get(storyId);
if (progress != null) {
progress.setErrorMessage(errorMessage);
progress.setStatus("Error: " + errorMessage);
progress.setCompleted(true);
logger.error("Image processing error for story {}: {}", storyId, errorMessage);
}
}
public void removeProgress(UUID storyId) {
progressMap.remove(storyId);
logger.debug("Removed progress tracking for story {}", storyId);
}
public boolean isProcessing(UUID storyId) {
ImageProcessingProgress progress = progressMap.get(storyId);
return progress != null && !progress.isCompleted();
}
}

View File

@@ -1,34 +1,62 @@
package com.storycove.service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.event.EventListener;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import javax.imageio.ImageIO;
import java.awt.*;
import java.awt.image.BufferedImage;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.*;
import java.net.HttpURLConnection;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Set;
import java.util.UUID;
import java.util.*;
import java.util.List;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.storycove.event.StoryContentUpdatedEvent;
@Service
public class ImageService {
private static final Logger logger = LoggerFactory.getLogger(ImageService.class);
private static final Set<String> ALLOWED_CONTENT_TYPES = Set.of(
"image/jpeg", "image/jpg", "image/png", "image/webp"
"image/jpeg", "image/jpg", "image/png"
);
private static final Set<String> ALLOWED_EXTENSIONS = Set.of(
"jpg", "jpeg", "png", "webp"
"jpg", "jpeg", "png"
);
@Value("${storycove.images.upload-dir:/app/images}")
private String uploadDir;
private String baseUploadDir;
@Autowired
private LibraryService libraryService;
@Autowired
private StoryService storyService;
@Autowired
private AuthorService authorService;
@Autowired
private CollectionService collectionService;
private String getUploadDir() {
String libraryPath = libraryService.getCurrentImagePath();
return baseUploadDir + libraryPath;
}
@Value("${storycove.images.cover.max-width:800}")
private int coverMaxWidth;
@@ -44,7 +72,8 @@ public class ImageService {
public enum ImageType {
COVER("covers"),
AVATAR("avatars");
AVATAR("avatars"),
CONTENT("content");
private final String directory;
@@ -61,7 +90,7 @@ public class ImageService {
validateFile(file);
// Create directories if they don't exist
Path typeDir = Paths.get(uploadDir, imageType.getDirectory());
Path typeDir = Paths.get(getUploadDir(), imageType.getDirectory());
Files.createDirectories(typeDir);
// Generate unique filename
@@ -88,7 +117,7 @@ public class ImageService {
}
try {
Path fullPath = Paths.get(uploadDir, imagePath);
Path fullPath = Paths.get(getUploadDir(), imagePath);
return Files.deleteIfExists(fullPath);
} catch (IOException e) {
return false;
@@ -96,7 +125,7 @@ public class ImageService {
}
public Path getImagePath(String imagePath) {
return Paths.get(uploadDir, imagePath);
return Paths.get(getUploadDir(), imagePath);
}
public boolean imageExists(String imagePath) {
@@ -107,6 +136,19 @@ public class ImageService {
return Files.exists(getImagePath(imagePath));
}
public boolean imageExistsInLibrary(String imagePath, String libraryId) {
if (imagePath == null || imagePath.trim().isEmpty() || libraryId == null) {
return false;
}
return Files.exists(getImagePathInLibrary(imagePath, libraryId));
}
public Path getImagePathInLibrary(String imagePath, String libraryId) {
String libraryPath = libraryService.getImagePathForLibrary(libraryId);
return Paths.get(baseUploadDir + libraryPath, imagePath);
}
private void validateFile(MultipartFile file) throws IOException {
if (file == null || file.isEmpty()) {
throw new IllegalArgumentException("File is empty");
@@ -160,6 +202,9 @@ public class ImageService {
maxWidth = avatarMaxSize;
maxHeight = avatarMaxSize;
break;
case CONTENT:
// Content images are not resized
return new Dimension(originalWidth, originalHeight);
default:
return new Dimension(originalWidth, originalHeight);
}
@@ -206,4 +251,819 @@ public class ImageService {
String extension = getFileExtension(filename);
return ALLOWED_EXTENSIONS.contains(extension);
}
// Content image processing methods
/**
* Process HTML content and download all referenced images, replacing URLs with local paths
*/
public ContentImageProcessingResult processContentImages(String htmlContent, UUID storyId) {
logger.debug("Processing content images for story: {}, content length: {}", storyId,
htmlContent != null ? htmlContent.length() : 0);
List<String> warnings = new ArrayList<>();
List<String> downloadedImages = new ArrayList<>();
if (htmlContent == null || htmlContent.trim().isEmpty()) {
logger.debug("No content to process for story: {}", storyId);
return new ContentImageProcessingResult(htmlContent, warnings, downloadedImages);
}
// Find all img tags with src attributes
Pattern imgPattern = Pattern.compile("<img[^>]+src=[\"']([^\"']+)[\"'][^>]*>", Pattern.CASE_INSENSITIVE);
Matcher matcher = imgPattern.matcher(htmlContent);
int imageCount = 0;
int externalImageCount = 0;
StringBuffer processedContent = new StringBuffer();
while (matcher.find()) {
String fullImgTag = matcher.group(0);
String imageUrl = matcher.group(1);
imageCount++;
logger.debug("Found image #{}: {} in tag: {}", imageCount, imageUrl, fullImgTag);
try {
// Skip if it's already a local path or data URL
if (imageUrl.startsWith("/") || imageUrl.startsWith("data:")) {
logger.debug("Skipping local/data URL: {}", imageUrl);
matcher.appendReplacement(processedContent, Matcher.quoteReplacement(fullImgTag));
continue;
}
externalImageCount++;
logger.debug("Processing external image #{}: {}", externalImageCount, imageUrl);
// Download and store the image
String localPath = downloadImageFromUrl(imageUrl, storyId);
downloadedImages.add(localPath);
// Generate local URL
String localUrl = getLocalImageUrl(storyId, localPath);
logger.debug("Downloaded image: {} -> {}", imageUrl, localUrl);
// Replace the src attribute with the local path - handle both single and double quotes
String newImgTag = fullImgTag
.replaceFirst("src=\"" + Pattern.quote(imageUrl) + "\"", "src=\"" + localUrl + "\"")
.replaceFirst("src='" + Pattern.quote(imageUrl) + "'", "src=\"" + localUrl + "\"");
// If replacement didn't work, try a more generic approach
if (newImgTag.equals(fullImgTag)) {
logger.warn("Standard replacement failed for image URL: {}, trying generic replacement", imageUrl);
newImgTag = fullImgTag.replaceAll("src\\s*=\\s*[\"']?" + Pattern.quote(imageUrl) + "[\"']?", "src=\"" + localUrl + "\"");
}
logger.debug("Replaced img tag: {} -> {}", fullImgTag, newImgTag);
matcher.appendReplacement(processedContent, Matcher.quoteReplacement(newImgTag));
} catch (Exception e) {
logger.error("Failed to download image: {} - {}", imageUrl, e.getMessage(), e);
warnings.add("Failed to download image: " + imageUrl + " - " + e.getMessage());
// Keep original URL in case of failure
matcher.appendReplacement(processedContent, Matcher.quoteReplacement(fullImgTag));
}
}
matcher.appendTail(processedContent);
logger.info("Finished processing images for story: {}. Found {} total images, {} external. Downloaded {} images, {} warnings",
storyId, imageCount, externalImageCount, downloadedImages.size(), warnings.size());
return new ContentImageProcessingResult(processedContent.toString(), warnings, downloadedImages);
}
/**
* Functional interface for progress callbacks during image processing
*/
@FunctionalInterface
public interface ImageProcessingProgressCallback {
void onProgress(String currentImageUrl, int processedCount, int totalCount);
}
/**
* Process content images with progress callbacks for async processing
*/
public ContentImageProcessingResult processContentImagesWithProgress(String htmlContent, UUID storyId, ImageProcessingProgressCallback progressCallback) {
logger.debug("Processing content images with progress for story: {}, content length: {}", storyId,
htmlContent != null ? htmlContent.length() : 0);
List<String> warnings = new ArrayList<>();
List<String> downloadedImages = new ArrayList<>();
if (htmlContent == null || htmlContent.trim().isEmpty()) {
logger.debug("No content to process for story: {}", storyId);
return new ContentImageProcessingResult(htmlContent, warnings, downloadedImages);
}
// Find all img tags with src attributes
Pattern imgPattern = Pattern.compile("<img[^>]+src=[\"']([^\"']+)[\"'][^>]*>", Pattern.CASE_INSENSITIVE);
Matcher matcher = imgPattern.matcher(htmlContent);
// First pass: count external images
List<String> externalImages = new ArrayList<>();
Matcher countMatcher = imgPattern.matcher(htmlContent);
while (countMatcher.find()) {
String imageUrl = countMatcher.group(1);
if (!imageUrl.startsWith("/") && !imageUrl.startsWith("data:")) {
externalImages.add(imageUrl);
}
}
int totalExternalImages = externalImages.size();
int processedCount = 0;
StringBuffer processedContent = new StringBuffer();
matcher.reset(); // Reset the matcher for processing
while (matcher.find()) {
String fullImgTag = matcher.group(0);
String imageUrl = matcher.group(1);
logger.debug("Found image: {} in tag: {}", imageUrl, fullImgTag);
try {
// Skip if it's already a local path or data URL
if (imageUrl.startsWith("/") || imageUrl.startsWith("data:")) {
logger.debug("Skipping local/data URL: {}", imageUrl);
matcher.appendReplacement(processedContent, Matcher.quoteReplacement(fullImgTag));
continue;
}
// Call progress callback
if (progressCallback != null) {
progressCallback.onProgress(imageUrl, processedCount, totalExternalImages);
}
logger.debug("Processing external image #{}: {}", processedCount + 1, imageUrl);
// Download and store the image
String localPath = downloadImageFromUrl(imageUrl, storyId);
downloadedImages.add(localPath);
// Generate local URL
String localUrl = getLocalImageUrl(storyId, localPath);
logger.debug("Downloaded image: {} -> {}", imageUrl, localUrl);
// Replace the src attribute with the local path
String newImgTag = fullImgTag
.replaceFirst("src=\"" + Pattern.quote(imageUrl) + "\"", "src=\"" + localUrl + "\"")
.replaceFirst("src='" + Pattern.quote(imageUrl) + "'", "src='" + localUrl + "'");
matcher.appendReplacement(processedContent, Matcher.quoteReplacement(newImgTag));
processedCount++;
} catch (Exception e) {
logger.warn("Failed to download image: {} - Error: {}", imageUrl, e.getMessage());
warnings.add("Failed to download image: " + imageUrl + " - " + e.getMessage());
matcher.appendReplacement(processedContent, Matcher.quoteReplacement(fullImgTag));
}
}
matcher.appendTail(processedContent);
logger.info("Processed {} external images for story: {} (Total: {}, Downloaded: {}, Warnings: {})",
processedCount, storyId, processedCount, downloadedImages.size(), warnings.size());
return new ContentImageProcessingResult(processedContent.toString(), warnings, downloadedImages);
}
/**
* Download an image from a URL and store it locally
*/
private String downloadImageFromUrl(String imageUrl, UUID storyId) throws IOException {
URL url = new URL(imageUrl);
HttpURLConnection connection = (HttpURLConnection) url.openConnection();
// Set a reasonable user agent to avoid blocks
connection.setRequestProperty("User-Agent", "Mozilla/5.0 (StoryCove Image Processor)");
connection.setConnectTimeout(30000); // 30 seconds
connection.setReadTimeout(30000);
try (InputStream inputStream = connection.getInputStream()) {
// Get content type to determine file extension
String contentType = connection.getContentType();
String extension = getExtensionFromContentType(contentType);
if (extension == null) {
// Try to extract from URL
extension = getExtensionFromUrl(imageUrl);
}
if (extension == null || !ALLOWED_EXTENSIONS.contains(extension.toLowerCase())) {
throw new IllegalArgumentException("Unsupported image format: " + contentType);
}
// Create directories for content images
Path contentDir = Paths.get(getUploadDir(), ImageType.CONTENT.getDirectory(), storyId.toString());
Files.createDirectories(contentDir);
// Generate unique filename
String filename = UUID.randomUUID().toString() + "." + extension.toLowerCase();
Path filePath = contentDir.resolve(filename);
// Read and validate the image
byte[] imageData = inputStream.readAllBytes();
ByteArrayInputStream bais = new ByteArrayInputStream(imageData);
BufferedImage image = ImageIO.read(bais);
if (image == null) {
throw new IOException("Invalid image format");
}
// Save the image
Files.write(filePath, imageData);
// Return relative path
return ImageType.CONTENT.getDirectory() + "/" + storyId.toString() + "/" + filename;
} finally {
connection.disconnect();
}
}
/**
* Generate local image URL for serving
*/
private String getLocalImageUrl(UUID storyId, String imagePath) {
String currentLibraryId = libraryService.getCurrentLibraryId();
if (currentLibraryId == null || currentLibraryId.trim().isEmpty()) {
logger.warn("Current library ID is null or empty when generating local image URL for story: {}", storyId);
return "/api/files/images/default/" + imagePath;
}
String localUrl = "/api/files/images/" + currentLibraryId + "/" + imagePath;
logger.debug("Generated local image URL: {} for story: {}", localUrl, storyId);
return localUrl;
}
/**
* Get file extension from content type
*/
private String getExtensionFromContentType(String contentType) {
if (contentType == null) return null;
switch (contentType.toLowerCase()) {
case "image/jpeg":
case "image/jpg":
return "jpg";
case "image/png":
return "png";
default:
return null;
}
}
/**
* Extract file extension from URL
*/
private String getExtensionFromUrl(String url) {
try {
String path = new URL(url).getPath();
int lastDot = path.lastIndexOf('.');
if (lastDot > 0 && lastDot < path.length() - 1) {
return path.substring(lastDot + 1).toLowerCase();
}
} catch (Exception ignored) {
}
return null;
}
/**
* Cleanup orphaned content images that are no longer referenced in any story
*/
public ContentImageCleanupResult cleanupOrphanedContentImages(boolean dryRun) {
logger.info("Starting orphaned content image cleanup (dryRun: {})", dryRun);
final Set<String> referencedImages;
List<String> orphanedImages = new ArrayList<>();
List<String> errors = new ArrayList<>();
long totalSizeBytes = 0;
int foldersToDelete = 0;
// Step 1: Collect all image references from all story content
logger.debug("Scanning all story content for image references...");
referencedImages = collectAllImageReferences();
logger.debug("Found {} unique image references in story content", referencedImages.size());
try {
// Step 2: Scan the content images directory
Path contentImagesDir = Paths.get(getUploadDir(), ImageType.CONTENT.getDirectory());
if (!Files.exists(contentImagesDir)) {
logger.debug("Content images directory does not exist: {}", contentImagesDir);
return new ContentImageCleanupResult(orphanedImages, 0, 0, referencedImages.size(), errors, dryRun);
}
logger.debug("Scanning content images directory: {}", contentImagesDir);
// Walk through all story directories
Files.walk(contentImagesDir, 2)
.filter(Files::isDirectory)
.filter(path -> !path.equals(contentImagesDir)) // Skip the root content directory
.filter(path -> !isSynologySystemPath(path)) // Skip Synology system directories
.forEach(storyDir -> {
try {
String storyId = storyDir.getFileName().toString();
logger.debug("Checking story directory: {}", storyId);
// Check if this story still exists
boolean storyExists = storyService.findByIdOptional(UUID.fromString(storyId)).isPresent();
if (!storyExists) {
logger.debug("Found orphaned story directory (story deleted): {}", storyId);
// Mark entire directory for deletion
try {
Files.walk(storyDir)
.filter(Files::isRegularFile)
.filter(path -> !isSynologySystemPath(path)) // Skip Synology system files
.filter(path -> isValidImageFile(path)) // Only process actual image files
.forEach(file -> {
try {
long size = Files.size(file);
orphanedImages.add(file.toString());
// Add to total size (will be updated in main scope)
} catch (IOException e) {
errors.add("Failed to get size for " + file + ": " + e.getMessage());
}
});
} catch (IOException e) {
errors.add("Failed to scan orphaned story directory " + storyDir + ": " + e.getMessage());
}
return;
}
// Check individual files in the story directory
try {
Files.walk(storyDir)
.filter(Files::isRegularFile)
.filter(path -> !isSynologySystemPath(path)) // Skip Synology system files
.filter(path -> isValidImageFile(path)) // Only process actual image files
.forEach(imageFile -> {
try {
String filename = imageFile.getFileName().toString();
// Only consider it orphaned if it's not in our referenced filenames
if (!referencedImages.contains(filename)) {
logger.debug("Found orphaned image: {}", filename);
orphanedImages.add(imageFile.toString());
} else {
logger.debug("Image file is referenced, keeping: {}", filename);
}
} catch (Exception e) {
errors.add("Error checking image file " + imageFile + ": " + e.getMessage());
}
});
} catch (IOException e) {
errors.add("Failed to scan story directory " + storyDir + ": " + e.getMessage());
}
} catch (Exception e) {
errors.add("Error processing story directory " + storyDir + ": " + e.getMessage());
}
});
// Calculate total size and count empty directories
for (String orphanedImage : orphanedImages) {
try {
Path imagePath = Paths.get(orphanedImage);
if (Files.exists(imagePath)) {
totalSizeBytes += Files.size(imagePath);
}
} catch (IOException e) {
errors.add("Failed to get size for " + orphanedImage + ": " + e.getMessage());
}
}
// Count empty directories that would be removed
try {
foldersToDelete = (int) Files.walk(contentImagesDir)
.filter(Files::isDirectory)
.filter(path -> !path.equals(contentImagesDir))
.filter(this::isDirectoryEmptyOrWillBeEmpty)
.count();
} catch (IOException e) {
errors.add("Failed to count empty directories: " + e.getMessage());
}
// Step 3: Delete orphaned files if not dry run
if (!dryRun && !orphanedImages.isEmpty()) {
logger.debug("Deleting {} orphaned images...", orphanedImages.size());
Set<Path> directoriesToCheck = new HashSet<>();
for (String orphanedImage : orphanedImages) {
try {
Path imagePath = Paths.get(orphanedImage);
if (Files.exists(imagePath)) {
directoriesToCheck.add(imagePath.getParent());
Files.delete(imagePath);
logger.debug("Deleted orphaned image: {}", imagePath);
}
} catch (IOException e) {
errors.add("Failed to delete " + orphanedImage + ": " + e.getMessage());
}
}
// Clean up empty directories
for (Path dir : directoriesToCheck) {
try {
if (Files.exists(dir) && isDirEmpty(dir)) {
Files.delete(dir);
logger.debug("Deleted empty story directory: {}", dir);
}
} catch (IOException e) {
errors.add("Failed to delete empty directory " + dir + ": " + e.getMessage());
}
}
}
logger.info("Orphaned content image cleanup completed. Found {} orphaned files ({} bytes)",
orphanedImages.size(), totalSizeBytes);
} catch (Exception e) {
logger.error("Error during orphaned content image cleanup", e);
errors.add("General cleanup error: " + e.getMessage());
}
return new ContentImageCleanupResult(orphanedImages, totalSizeBytes, foldersToDelete, referencedImages.size(), errors, dryRun);
}
/**
* Collect all image filenames referenced in content (UUID-based filenames only)
*/
private Set<String> collectAllImageReferences() {
Set<String> referencedFilenames = new HashSet<>();
try {
// Get all stories
List<com.storycove.entity.Story> allStories = storyService.findAllWithAssociations();
// Pattern to match local image URLs in content
Pattern imagePattern = Pattern.compile("src=[\"']([^\"']*(?:content/[^\"']*\\.(jpg|jpeg|png)))[\"']", Pattern.CASE_INSENSITIVE);
for (com.storycove.entity.Story story : allStories) {
// Add story cover image filename if present
if (story.getCoverPath() != null && !story.getCoverPath().trim().isEmpty()) {
String filename = extractFilename(story.getCoverPath());
if (filename != null) {
referencedFilenames.add(filename);
logger.debug("Found cover image filename in story {}: {}", story.getId(), filename);
}
}
// Add author avatar image filename if present
if (story.getAuthor() != null && story.getAuthor().getAvatarImagePath() != null && !story.getAuthor().getAvatarImagePath().trim().isEmpty()) {
String filename = extractFilename(story.getAuthor().getAvatarImagePath());
if (filename != null) {
referencedFilenames.add(filename);
logger.debug("Found avatar image filename for author {}: {}", story.getAuthor().getId(), filename);
}
}
// Add content images from HTML
if (story.getContentHtml() != null) {
Matcher matcher = imagePattern.matcher(story.getContentHtml());
while (matcher.find()) {
String imageSrc = matcher.group(1);
// Extract just the filename from the URL
String filename = extractFilename(imageSrc);
if (filename != null && isUuidBasedFilename(filename)) {
referencedFilenames.add(filename);
logger.debug("Found content image filename in story {}: {}", story.getId(), filename);
}
}
}
}
// Also get all authors separately to catch avatars for authors without stories
List<com.storycove.entity.Author> allAuthors = authorService.findAll();
for (com.storycove.entity.Author author : allAuthors) {
if (author.getAvatarImagePath() != null && !author.getAvatarImagePath().trim().isEmpty()) {
String filename = extractFilename(author.getAvatarImagePath());
if (filename != null) {
referencedFilenames.add(filename);
logger.debug("Found standalone avatar image filename for author {}: {}", author.getId(), filename);
}
}
}
// Also get all collections to catch cover images
List<com.storycove.entity.Collection> allCollections = collectionService.findAllWithTags();
for (com.storycove.entity.Collection collection : allCollections) {
if (collection.getCoverImagePath() != null && !collection.getCoverImagePath().trim().isEmpty()) {
String filename = extractFilename(collection.getCoverImagePath());
if (filename != null) {
referencedFilenames.add(filename);
logger.debug("Found collection cover image filename for collection {}: {}", collection.getId(), filename);
}
}
}
} catch (Exception e) {
logger.error("Error collecting image references from stories", e);
}
return referencedFilenames;
}
/**
* Convert an image src attribute to relative file path
*/
private String convertSrcToRelativePath(String src) {
try {
// Handle both /api/files/images/libraryId/content/... and relative content/... paths
if (src.contains("/content/")) {
int contentIndex = src.indexOf("/content/");
return src.substring(contentIndex + 1); // Remove leading slash, keep "content/..."
}
} catch (Exception e) {
logger.debug("Failed to convert src to relative path: {}", src);
}
return null;
}
/**
* Convert absolute file path to relative path from upload directory
*/
private String convertAbsolutePathToRelative(String absolutePath) {
try {
if (absolutePath == null || absolutePath.trim().isEmpty()) {
return null;
}
Path absPath = Paths.get(absolutePath);
Path uploadDirPath = Paths.get(getUploadDir());
// If the path is already relative to upload dir, return as-is
if (!absPath.isAbsolute()) {
return absolutePath.replace('\\', '/');
}
// Try to make it relative to the upload directory
if (absPath.startsWith(uploadDirPath)) {
Path relativePath = uploadDirPath.relativize(absPath);
return relativePath.toString().replace('\\', '/');
}
// If it's not under upload directory, check if it's library-specific path
String libraryPath = libraryService.getCurrentImagePath();
Path baseUploadPath = Paths.get(baseUploadDir);
if (absPath.startsWith(baseUploadPath)) {
Path relativePath = baseUploadPath.relativize(absPath);
String relativeStr = relativePath.toString().replace('\\', '/');
// Remove library prefix if present to make it library-agnostic for comparison
if (relativeStr.startsWith(libraryPath.substring(1))) { // Remove leading slash from library path
return relativeStr.substring(libraryPath.length() - 1); // Keep the leading slash
}
return relativeStr;
}
// Fallback: just use the filename portion if it's in the right structure
String fileName = absPath.getFileName().toString();
if (fileName.matches(".*\\.(jpg|jpeg|png)$")) {
// Try to preserve directory structure if it looks like covers/ or avatars/
Path parent = absPath.getParent();
if (parent != null) {
String parentName = parent.getFileName().toString();
if (parentName.equals("covers") || parentName.equals("avatars")) {
return parentName + "/" + fileName;
}
}
return fileName;
}
} catch (Exception e) {
logger.debug("Failed to convert absolute path to relative: {}", absolutePath, e);
}
return null;
}
/**
* Get relative image path from absolute file path
*/
private String getRelativeImagePath(Path imageFile) {
try {
Path uploadDir = Paths.get(getUploadDir());
Path relativePath = uploadDir.relativize(imageFile);
return relativePath.toString().replace('\\', '/'); // Normalize path separators
} catch (Exception e) {
logger.debug("Failed to get relative path for: {}", imageFile);
return imageFile.toString();
}
}
/**
* Check if directory is empty or will be empty after cleanup
*/
private boolean isDirectoryEmptyOrWillBeEmpty(Path dir) {
try {
return Files.walk(dir)
.filter(Files::isRegularFile)
.count() == 0;
} catch (IOException e) {
return false;
}
}
/**
* Check if directory is empty
*/
private boolean isDirEmpty(Path dir) {
try {
return Files.list(dir).count() == 0;
} catch (IOException e) {
return false;
}
}
/**
* Clean up content images for a story
*/
public void deleteContentImages(UUID storyId) {
try {
Path contentDir = Paths.get(getUploadDir(), ImageType.CONTENT.getDirectory(), storyId.toString());
if (Files.exists(contentDir)) {
Files.walk(contentDir)
.sorted(Comparator.reverseOrder())
.map(Path::toFile)
.forEach(java.io.File::delete);
}
} catch (IOException e) {
// Log but don't throw - this is cleanup
System.err.println("Failed to clean up content images for story " + storyId + ": " + e.getMessage());
}
}
/**
* Result class for content image processing
*/
public static class ContentImageProcessingResult {
private final String processedContent;
private final List<String> warnings;
private final List<String> downloadedImages;
public ContentImageProcessingResult(String processedContent, List<String> warnings, List<String> downloadedImages) {
this.processedContent = processedContent;
this.warnings = warnings;
this.downloadedImages = downloadedImages;
}
public String getProcessedContent() { return processedContent; }
public List<String> getWarnings() { return warnings; }
public List<String> getDownloadedImages() { return downloadedImages; }
public boolean hasWarnings() { return !warnings.isEmpty(); }
}
/**
* Result class for orphaned image cleanup
*/
public static class ContentImageCleanupResult {
private final List<String> orphanedImages;
private final long totalSizeBytes;
private final int foldersToDelete;
private final int totalReferencedImages;
private final List<String> errors;
private final boolean dryRun;
public ContentImageCleanupResult(List<String> orphanedImages, long totalSizeBytes, int foldersToDelete,
int totalReferencedImages, List<String> errors, boolean dryRun) {
this.orphanedImages = orphanedImages;
this.totalSizeBytes = totalSizeBytes;
this.foldersToDelete = foldersToDelete;
this.totalReferencedImages = totalReferencedImages;
this.errors = errors;
this.dryRun = dryRun;
}
public List<String> getOrphanedImages() { return orphanedImages; }
public long getTotalSizeBytes() { return totalSizeBytes; }
public int getFoldersToDelete() { return foldersToDelete; }
public int getTotalReferencedImages() { return totalReferencedImages; }
public List<String> getErrors() { return errors; }
public boolean isDryRun() { return dryRun; }
public boolean hasErrors() { return !errors.isEmpty(); }
public String getFormattedSize() {
if (totalSizeBytes < 1024) return totalSizeBytes + " B";
if (totalSizeBytes < 1024 * 1024) return String.format("%.1f KB", totalSizeBytes / 1024.0);
if (totalSizeBytes < 1024 * 1024 * 1024) return String.format("%.1f MB", totalSizeBytes / (1024.0 * 1024.0));
return String.format("%.1f GB", totalSizeBytes / (1024.0 * 1024.0 * 1024.0));
}
}
/**
* Check if a path is a Synology system path that should be ignored
*/
private boolean isSynologySystemPath(Path path) {
String pathStr = path.toString();
String fileName = path.getFileName().toString();
// Skip Synology metadata directories and files
return pathStr.contains("@eaDir") ||
fileName.startsWith("@") ||
fileName.contains("@SynoEAStream") ||
fileName.startsWith(".") ||
fileName.equals("Thumbs.db") ||
fileName.equals(".DS_Store");
}
/**
* Check if a file is a valid image file (not a system/metadata file)
*/
private boolean isValidImageFile(Path path) {
if (isSynologySystemPath(path)) {
return false;
}
String fileName = path.getFileName().toString().toLowerCase();
return fileName.endsWith(".jpg") ||
fileName.endsWith(".jpeg") ||
fileName.endsWith(".png") ||
fileName.endsWith(".gif") ||
fileName.endsWith(".webp");
}
/**
* Extract filename from a path or URL
*/
private String extractFilename(String pathOrUrl) {
if (pathOrUrl == null || pathOrUrl.trim().isEmpty()) {
return null;
}
try {
// Remove query parameters if present
if (pathOrUrl.contains("?")) {
pathOrUrl = pathOrUrl.substring(0, pathOrUrl.indexOf("?"));
}
// Get the last part after slash
String filename = pathOrUrl.substring(pathOrUrl.lastIndexOf("/") + 1);
// Remove any special Synology suffixes
filename = filename.replace("@SynoEAStream", "");
return filename.trim().isEmpty() ? null : filename;
} catch (Exception e) {
logger.debug("Failed to extract filename from: {}", pathOrUrl);
return null;
}
}
/**
* Check if a filename follows UUID pattern (indicates it's our generated file)
*/
private boolean isUuidBasedFilename(String filename) {
if (filename == null || filename.trim().isEmpty()) {
return false;
}
// Remove extension
String nameWithoutExt = filename;
int lastDot = filename.lastIndexOf(".");
if (lastDot > 0) {
nameWithoutExt = filename.substring(0, lastDot);
}
// Check if it matches UUID pattern (8-4-4-4-12 hex characters)
return nameWithoutExt.matches("[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}");
}
/**
* Event listener for story content updates - processes external images asynchronously
*/
@EventListener
@Async
public void handleStoryContentUpdated(StoryContentUpdatedEvent event) {
logger.info("Processing images for {} story {} after content update",
event.isNewStory() ? "new" : "updated", event.getStoryId());
try {
ContentImageProcessingResult result = processContentImages(event.getContentHtml(), event.getStoryId());
// If content was changed, we need to update the story (but this could cause circular events)
// Instead, let's just log the results for now and let the controller handle updates if needed
if (result.hasWarnings()) {
logger.warn("Image processing warnings for story {}: {}", event.getStoryId(), result.getWarnings());
}
if (!result.getDownloadedImages().isEmpty()) {
logger.info("Downloaded {} external images for story {}: {}",
result.getDownloadedImages().size(), event.getStoryId(), result.getDownloadedImages());
}
// TODO: If content was changed, we might need a way to update the story without triggering another event
if (!result.getProcessedContent().equals(event.getContentHtml())) {
logger.info("Story {} content was processed and external images were replaced with local URLs", event.getStoryId());
// For now, just log that processing occurred - the original content processing already handles updates
}
} catch (Exception e) {
logger.error("Failed to process images for story {}: {}", event.getStoryId(), e.getMessage(), e);
}
}
}

View File

@@ -0,0 +1,73 @@
package com.storycove.service;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;
/**
* Base service class that provides library-aware database access.
*
* This approach is safer than routing at the datasource level because:
* 1. It doesn't interfere with Spring's initialization process
* 2. It allows fine-grained control over which operations are library-aware
* 3. It provides clear separation between authentication (uses default DB) and library operations
*/
@Component
public class LibraryAwareService {
@Autowired
private LibraryService libraryService;
@Autowired
@Qualifier("dataSource")
private DataSource defaultDataSource;
/**
* Get a database connection for the current active library.
* Falls back to default datasource if no library is active.
*/
public Connection getCurrentLibraryConnection() throws SQLException {
try {
// Try to get library-specific connection
DataSource libraryDataSource = libraryService.getCurrentDataSource();
return libraryDataSource.getConnection();
} catch (IllegalStateException e) {
// No active library - use default datasource
return defaultDataSource.getConnection();
}
}
/**
* Get a database connection for the default/fallback database.
* Use this for authentication and system-level operations.
*/
public Connection getDefaultConnection() throws SQLException {
return defaultDataSource.getConnection();
}
/**
* Check if a library is currently active
*/
public boolean hasActiveLibrary() {
try {
return libraryService.getCurrentLibraryId() != null;
} catch (Exception e) {
return false;
}
}
/**
* Get the current active library ID, or null if none
*/
public String getCurrentLibraryId() {
try {
return libraryService.getCurrentLibraryId();
} catch (Exception e) {
return null;
}
}
}

View File

@@ -0,0 +1,830 @@
package com.storycove.service;
import com.storycove.entity.Library;
import com.storycove.dto.LibraryDto;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.stereotype.Service;
import jakarta.annotation.PostConstruct;
import jakarta.annotation.PreDestroy;
import javax.sql.DataSource;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.SQLException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
@Service
public class LibraryService implements ApplicationContextAware {
private static final Logger logger = LoggerFactory.getLogger(LibraryService.class);
@Value("${spring.datasource.url}")
private String baseDbUrl;
@Value("${spring.datasource.username}")
private String dbUsername;
@Value("${spring.datasource.password}")
private String dbPassword;
private final ObjectMapper objectMapper = new ObjectMapper();
private final BCryptPasswordEncoder passwordEncoder = new BCryptPasswordEncoder();
private final Map<String, Library> libraries = new ConcurrentHashMap<>();
// Spring ApplicationContext for accessing other services without circular dependencies
private ApplicationContext applicationContext;
// Current active resources
private volatile String currentLibraryId;
// Security: Track if user has explicitly authenticated in this session
private volatile boolean explicitlyAuthenticated = false;
private static final String LIBRARIES_CONFIG_PATH = "/app/config/libraries.json";
private static final Path libraryConfigDir = Paths.get("/app/config");
@Override
public void setApplicationContext(ApplicationContext applicationContext) {
this.applicationContext = applicationContext;
}
@PostConstruct
public void initialize() {
loadLibrariesFromFile();
// If no libraries exist, create a default one
if (libraries.isEmpty()) {
createDefaultLibrary();
}
// Security: Do NOT automatically switch to any library on startup
// Users must authenticate before accessing any library
explicitlyAuthenticated = false;
currentLibraryId = null;
if (!libraries.isEmpty()) {
logger.info("Loaded {} libraries. Authentication required to access any library.", libraries.size());
} else {
logger.info("No libraries found. A default library will be created on first authentication.");
}
logger.info("Security: Application startup completed. All users must re-authenticate.");
}
@PreDestroy
public void cleanup() {
currentLibraryId = null;
explicitlyAuthenticated = false;
}
/**
* Clear authentication state (for logout)
*/
public void clearAuthentication() {
explicitlyAuthenticated = false;
currentLibraryId = null;
logger.info("Authentication cleared - user must re-authenticate to access libraries");
}
public String authenticateAndGetLibrary(String password) {
for (Library library : libraries.values()) {
if (passwordEncoder.matches(password, library.getPasswordHash())) {
// Mark as explicitly authenticated for this session
explicitlyAuthenticated = true;
logger.info("User explicitly authenticated for library: {}", library.getId());
return library.getId();
}
}
return null; // Authentication failed
}
/**
* Switch to library after authentication with forced reindexing
* This ensures Solr is always up-to-date after login
*/
public synchronized void switchToLibraryAfterAuthentication(String libraryId) throws Exception {
logger.info("Switching to library after authentication: {} (forcing reindex)", libraryId);
switchToLibrary(libraryId, true);
}
public synchronized void switchToLibrary(String libraryId) throws Exception {
switchToLibrary(libraryId, false);
}
public synchronized void switchToLibrary(String libraryId, boolean forceReindex) throws Exception {
// Security: Only allow library switching after explicit authentication
if (!explicitlyAuthenticated) {
throw new IllegalStateException("Library switching requires explicit authentication. Please log in first.");
}
if (libraryId.equals(currentLibraryId) && !forceReindex) {
return; // Already active and no forced reindex requested
}
Library library = libraries.get(libraryId);
if (library == null) {
throw new IllegalArgumentException("Library not found: " + libraryId);
}
String previousLibraryId = currentLibraryId;
if (libraryId.equals(currentLibraryId) && forceReindex) {
logger.debug("Forcing reindex for current library: {} ({})", library.getName(), libraryId);
} else {
logger.debug("Switching to library: {} ({})", library.getName(), libraryId);
}
// Close current resources
closeCurrentResources();
// Set new active library (datasource routing handled by SmartRoutingDataSource)
currentLibraryId = libraryId;
// Solr indexes are global - no per-library initialization needed
logger.debug("Library switched to Solr mode for library: {}", libraryId);
logger.info("Successfully switched to library: {}", library.getName());
// Perform complete reindex AFTER library switch is fully complete
// This ensures database routing is properly established
if (forceReindex || !libraryId.equals(previousLibraryId)) {
logger.debug("Starting post-switch Solr reindex for library: {}", libraryId);
// Run reindex asynchronously to avoid blocking authentication response
// and allow time for database routing to fully stabilize
String finalLibraryId = libraryId;
new Thread(() -> {
try {
// Give routing time to stabilize
Thread.sleep(500);
logger.debug("Starting async Solr reindex for library: {}", finalLibraryId);
SearchServiceAdapter searchService = applicationContext.getBean(SearchServiceAdapter.class);
// Get all stories and authors for reindexing
StoryService storyService = applicationContext.getBean(StoryService.class);
AuthorService authorService = applicationContext.getBean(AuthorService.class);
var allStories = storyService.findAllWithAssociations();
var allAuthors = authorService.findAllWithStories();
searchService.bulkIndexStories(allStories);
searchService.bulkIndexAuthors(allAuthors);
logger.info("Completed async Solr reindexing for library: {} ({} stories, {} authors)",
finalLibraryId, allStories.size(), allAuthors.size());
} catch (Exception e) {
logger.warn("Failed to async reindex Solr for library {}: {}", finalLibraryId, e.getMessage());
}
}, "SolrReindex-" + libraryId).start();
}
}
public DataSource getCurrentDataSource() {
if (currentLibraryId == null) {
throw new IllegalStateException("No active library - please authenticate first");
}
// Return the Spring-managed primary datasource which handles routing automatically
try {
return applicationContext.getBean("dataSource", DataSource.class);
} catch (Exception e) {
throw new IllegalStateException("Failed to get routing datasource", e);
}
}
public String getCurrentLibraryId() {
return currentLibraryId;
}
public Library getCurrentLibrary() {
if (currentLibraryId == null) {
return null;
}
return libraries.get(currentLibraryId);
}
public List<LibraryDto> getAllLibraries() {
List<LibraryDto> result = new ArrayList<>();
for (Library library : libraries.values()) {
boolean isActive = library.getId().equals(currentLibraryId);
result.add(new LibraryDto(
library.getId(),
library.getName(),
library.getDescription(),
isActive,
library.isInitialized()
));
}
return result;
}
public LibraryDto getLibraryById(String libraryId) {
Library library = libraries.get(libraryId);
if (library != null) {
boolean isActive = library.getId().equals(currentLibraryId);
return new LibraryDto(
library.getId(),
library.getName(),
library.getDescription(),
isActive,
library.isInitialized()
);
}
return null;
}
public String getCurrentImagePath() {
Library current = getCurrentLibrary();
return current != null ? current.getImagePath() : "/images/default";
}
public String getImagePathForLibrary(String libraryId) {
if (libraryId == null) {
return "/images/default";
}
Library library = libraries.get(libraryId);
return library != null ? library.getImagePath() : "/images/default";
}
public boolean changeLibraryPassword(String libraryId, String currentPassword, String newPassword) {
Library library = libraries.get(libraryId);
if (library == null) {
return false;
}
// Verify current password
if (!passwordEncoder.matches(currentPassword, library.getPasswordHash())) {
return false;
}
// Update password
library.setPasswordHash(passwordEncoder.encode(newPassword));
saveLibrariesToFile();
logger.info("Password changed for library: {}", library.getName());
return true;
}
public Library createNewLibrary(String name, String description, String password) {
// Generate unique ID
String id = name.toLowerCase().replaceAll("[^a-z0-9]", "");
int counter = 1;
String originalId = id;
while (libraries.containsKey(id)) {
id = originalId + counter++;
}
Library newLibrary = new Library(
id,
name,
description,
passwordEncoder.encode(password),
"storycove_" + id
);
try {
// Test database creation by creating a connection
DataSource testDs = createDataSource(newLibrary.getDbName());
testDs.getConnection().close(); // This will create the database and schema if it doesn't exist
// Initialize library resources (image directories)
initializeNewLibraryResources(id);
newLibrary.setInitialized(true);
logger.info("Database and resources created for library: {}", newLibrary.getDbName());
} catch (Exception e) {
logger.warn("Database/resource creation failed for library {}: {}", id, e.getMessage());
// Continue anyway - resources will be created when needed
}
libraries.put(id, newLibrary);
saveLibrariesToFile();
logger.info("Created new library: {} ({})", name, id);
return newLibrary;
}
private void loadLibrariesFromFile() {
try {
File configFile = new File(LIBRARIES_CONFIG_PATH);
if (configFile.exists()) {
String content = Files.readString(Paths.get(LIBRARIES_CONFIG_PATH));
Map<String, Object> config = objectMapper.readValue(content, new TypeReference<Map<String, Object>>() {});
@SuppressWarnings("unchecked")
Map<String, Map<String, Object>> librariesData = (Map<String, Map<String, Object>>) config.get("libraries");
for (Map.Entry<String, Map<String, Object>> entry : librariesData.entrySet()) {
String id = entry.getKey();
Map<String, Object> data = entry.getValue();
Library library = new Library();
library.setId(id);
library.setName((String) data.get("name"));
library.setDescription((String) data.get("description"));
library.setPasswordHash((String) data.get("passwordHash"));
library.setDbName((String) data.get("dbName"));
library.setInitialized((Boolean) data.getOrDefault("initialized", false));
libraries.put(id, library);
logger.debug("Loaded library: {} ({})", library.getName(), id);
}
} else {
logger.debug("No libraries configuration file found, will create default");
}
} catch (IOException e) {
logger.error("Failed to load libraries configuration", e);
}
}
private void createDefaultLibrary() {
// Check if we're migrating from the old single-library system
String existingDbName = extractDatabaseName(baseDbUrl);
Library defaultLibrary = new Library(
"main",
"Main Library",
"Your existing story collection (migrated)",
passwordEncoder.encode("temp-password-change-me"), // Temporary password
existingDbName // Use existing database name
);
defaultLibrary.setInitialized(true); // Mark as initialized since it has existing data
libraries.put("main", defaultLibrary);
saveLibrariesToFile();
logger.warn("=".repeat(80));
logger.warn("MIGRATION: Created 'Main Library' for your existing data");
logger.warn("Temporary password: 'temp-password-change-me'");
logger.warn("IMPORTANT: Please set a proper password in Settings > Library Settings");
logger.warn("=".repeat(80));
}
private String extractDatabaseName(String jdbcUrl) {
// Extract database name from JDBC URL like "jdbc:postgresql://db:5432/storycove"
int lastSlash = jdbcUrl.lastIndexOf('/');
if (lastSlash != -1 && lastSlash < jdbcUrl.length() - 1) {
String dbPart = jdbcUrl.substring(lastSlash + 1);
// Remove any query parameters
int queryStart = dbPart.indexOf('?');
return queryStart != -1 ? dbPart.substring(0, queryStart) : dbPart;
}
return "storycove"; // fallback
}
private void saveLibrariesToFile() {
try {
Map<String, Object> config = new HashMap<>();
Map<String, Map<String, Object>> librariesData = new HashMap<>();
for (Library library : libraries.values()) {
Map<String, Object> data = new HashMap<>();
data.put("name", library.getName());
data.put("description", library.getDescription());
data.put("passwordHash", library.getPasswordHash());
data.put("dbName", library.getDbName());
data.put("initialized", library.isInitialized());
librariesData.put(library.getId(), data);
}
config.put("libraries", librariesData);
// Ensure config directory exists
new File("/app/config").mkdirs();
String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(config);
Files.writeString(Paths.get(LIBRARIES_CONFIG_PATH), json);
logger.debug("Saved libraries configuration");
} catch (IOException e) {
logger.error("Failed to save libraries configuration", e);
}
}
private DataSource createDataSource(String dbName) {
String url = baseDbUrl.replaceAll("/[^/]*$", "/" + dbName);
logger.debug("Creating DataSource for: {}", url);
// First, ensure the database exists
ensureDatabaseExists(dbName);
HikariConfig config = new HikariConfig();
config.setJdbcUrl(url);
config.setUsername(dbUsername);
config.setPassword(dbPassword);
config.setDriverClassName("org.postgresql.Driver");
config.setMaximumPoolSize(10);
config.setConnectionTimeout(30000);
return new HikariDataSource(config);
}
private void ensureDatabaseExists(String dbName) {
// Connect to the 'postgres' database to create the new database
String adminUrl = baseDbUrl.replaceAll("/[^/]*$", "/postgres");
HikariConfig adminConfig = new HikariConfig();
adminConfig.setJdbcUrl(adminUrl);
adminConfig.setUsername(dbUsername);
adminConfig.setPassword(dbPassword);
adminConfig.setDriverClassName("org.postgresql.Driver");
adminConfig.setMaximumPoolSize(1);
adminConfig.setConnectionTimeout(30000);
boolean databaseCreated = false;
try (HikariDataSource adminDataSource = new HikariDataSource(adminConfig);
var connection = adminDataSource.getConnection();
var statement = connection.createStatement()) {
// Check if database exists
String checkQuery = "SELECT 1 FROM pg_database WHERE datname = ?";
try (var preparedStatement = connection.prepareStatement(checkQuery)) {
preparedStatement.setString(1, dbName);
try (var resultSet = preparedStatement.executeQuery()) {
if (resultSet.next()) {
logger.debug("Database {} already exists", dbName);
return; // Database exists, nothing to do
}
}
}
// Create database if it doesn't exist
// Note: Database names cannot be parameterized, but we validate the name is safe
if (!dbName.matches("^[a-zA-Z][a-zA-Z0-9_]*$")) {
throw new IllegalArgumentException("Invalid database name: " + dbName);
}
String createQuery = "CREATE DATABASE " + dbName;
statement.executeUpdate(createQuery);
logger.info("Created database: {}", dbName);
databaseCreated = true;
} catch (SQLException e) {
logger.error("Failed to ensure database {} exists: {}", dbName, e.getMessage());
throw new RuntimeException("Database creation failed", e);
}
// If we just created the database, initialize its schema
if (databaseCreated) {
initializeNewDatabaseSchema(dbName);
}
}
private void initializeNewDatabaseSchema(String dbName) {
logger.debug("Initializing schema for new database: {}", dbName);
// Create a temporary DataSource for the new database to initialize schema
String newDbUrl = baseDbUrl.replaceAll("/[^/]*$", "/" + dbName);
HikariConfig config = new HikariConfig();
config.setJdbcUrl(newDbUrl);
config.setUsername(dbUsername);
config.setPassword(dbPassword);
config.setDriverClassName("org.postgresql.Driver");
config.setMaximumPoolSize(1);
config.setConnectionTimeout(30000);
try (HikariDataSource tempDataSource = new HikariDataSource(config)) {
// Use Hibernate to create the schema
// This mimics what Spring Boot does during startup
createSchemaUsingHibernate(tempDataSource);
logger.debug("Schema initialized for database: {}", dbName);
} catch (Exception e) {
logger.error("Failed to initialize schema for database {}: {}", dbName, e.getMessage());
throw new RuntimeException("Schema initialization failed", e);
}
}
public void initializeNewLibraryResources(String libraryId) {
Library library = libraries.get(libraryId);
if (library == null) {
throw new IllegalArgumentException("Library not found: " + libraryId);
}
try {
logger.debug("Initializing resources for new library: {}", library.getName());
// 1. Create image directory structure
initializeImageDirectories(library);
// 2. Solr indexes are global and managed automatically
// No per-library initialization needed for Solr
logger.debug("Successfully initialized resources for library: {}", library.getName());
} catch (Exception e) {
logger.error("Failed to initialize resources for library {}: {}", libraryId, e.getMessage());
throw new RuntimeException("Library resource initialization failed", e);
}
}
private void initializeImageDirectories(Library library) {
try {
// Create the library-specific image directory
String imagePath = "/app/images/" + library.getId();
java.nio.file.Path libraryImagePath = java.nio.file.Paths.get(imagePath);
if (!java.nio.file.Files.exists(libraryImagePath)) {
java.nio.file.Files.createDirectories(libraryImagePath);
logger.debug("Created image directory: {}", imagePath);
// Create subdirectories for different image types
java.nio.file.Files.createDirectories(libraryImagePath.resolve("stories"));
java.nio.file.Files.createDirectories(libraryImagePath.resolve("authors"));
java.nio.file.Files.createDirectories(libraryImagePath.resolve("collections"));
logger.debug("Created image subdirectories for library: {}", library.getId());
} else {
logger.debug("Image directory already exists: {}", imagePath);
}
} catch (Exception e) {
logger.error("Failed to create image directories for library {}: {}", library.getId(), e.getMessage());
throw new RuntimeException("Image directory creation failed", e);
}
}
private void createSchemaUsingHibernate(DataSource dataSource) {
// Create the essential tables manually using the same DDL that Hibernate would generate
// This is simpler than setting up a full Hibernate configuration for schema creation
String[] createTableStatements = {
// Authors table
"""
CREATE TABLE authors (
author_rating integer,
created_at timestamp(6) not null,
updated_at timestamp(6) not null,
id uuid not null,
avatar_image_path varchar(255),
name varchar(255) not null,
notes TEXT,
primary key (id)
)
""",
// Author URLs table
"""
CREATE TABLE author_urls (
author_id uuid not null,
url varchar(255)
)
""",
// Series table
"""
CREATE TABLE series (
created_at timestamp(6) not null,
id uuid not null,
description varchar(1000),
name varchar(255) not null,
primary key (id)
)
""",
// Tags table
"""
CREATE TABLE tags (
color varchar(7),
created_at timestamp(6) not null,
id uuid not null,
description varchar(500),
name varchar(255) not null unique,
primary key (id)
)
""",
// Tag aliases table
"""
CREATE TABLE tag_aliases (
created_from_merge boolean not null,
created_at timestamp(6) not null,
canonical_tag_id uuid not null,
id uuid not null,
alias_name varchar(255) not null unique,
primary key (id)
)
""",
// Collections table
"""
CREATE TABLE collections (
is_archived boolean not null,
rating integer,
created_at timestamp(6) not null,
updated_at timestamp(6) not null,
id uuid not null,
cover_image_path varchar(500),
name varchar(500) not null,
description TEXT,
primary key (id)
)
""",
// Stories table
"""
CREATE TABLE stories (
is_read boolean,
rating integer,
reading_position integer,
volume integer,
word_count integer,
created_at timestamp(6) not null,
last_read_at timestamp(6),
updated_at timestamp(6) not null,
author_id uuid,
id uuid not null,
series_id uuid,
description varchar(1000),
content_html TEXT,
content_plain TEXT,
cover_path varchar(255),
source_url varchar(255),
summary TEXT,
title varchar(255) not null,
primary key (id)
)
""",
// Reading positions table
"""
CREATE TABLE reading_positions (
chapter_index integer,
character_position integer,
percentage_complete float(53),
word_position integer,
created_at timestamp(6) not null,
updated_at timestamp(6) not null,
id uuid not null,
story_id uuid not null,
context_after varchar(500),
context_before varchar(500),
chapter_title varchar(255),
epub_cfi TEXT,
primary key (id)
)
""",
// Junction tables
"""
CREATE TABLE story_tags (
story_id uuid not null,
tag_id uuid not null,
primary key (story_id, tag_id)
)
""",
"""
CREATE TABLE collection_stories (
position integer not null,
added_at timestamp(6) not null,
collection_id uuid not null,
story_id uuid not null,
primary key (collection_id, story_id),
unique (collection_id, position)
)
""",
"""
CREATE TABLE collection_tags (
collection_id uuid not null,
tag_id uuid not null,
primary key (collection_id, tag_id)
)
"""
};
String[] createIndexStatements = {
"CREATE INDEX idx_reading_position_story ON reading_positions (story_id)"
};
String[] createConstraintStatements = {
// Foreign key constraints
"ALTER TABLE author_urls ADD CONSTRAINT FKdqhp51m0uveybsts098gd79uo FOREIGN KEY (author_id) REFERENCES authors",
"ALTER TABLE stories ADD CONSTRAINT FKhwecpqeaxy40ftrctef1u7gw7 FOREIGN KEY (author_id) REFERENCES authors",
"ALTER TABLE stories ADD CONSTRAINT FK1kulyvy7wwcolp2gkndt57cp7 FOREIGN KEY (series_id) REFERENCES series",
"ALTER TABLE reading_positions ADD CONSTRAINT FKglfhdhflan3pgyr2u0gxi21i5 FOREIGN KEY (story_id) REFERENCES stories",
"ALTER TABLE story_tags ADD CONSTRAINT FKmans33ijt0nf65t0sng2r848j FOREIGN KEY (tag_id) REFERENCES tags",
"ALTER TABLE story_tags ADD CONSTRAINT FKq9guid7swnjxwdpgxj3jo1rsi FOREIGN KEY (story_id) REFERENCES stories",
"ALTER TABLE tag_aliases ADD CONSTRAINT FKqfsawmcj3ey4yycb6958y24ch FOREIGN KEY (canonical_tag_id) REFERENCES tags",
"ALTER TABLE collection_stories ADD CONSTRAINT FKr55ho4vhj0wp03x13iskr1jds FOREIGN KEY (collection_id) REFERENCES collections",
"ALTER TABLE collection_stories ADD CONSTRAINT FK7n41tbbrt7r2e81hpu3612r1o FOREIGN KEY (story_id) REFERENCES stories",
"ALTER TABLE collection_tags ADD CONSTRAINT FKceq7ggev8n8ibjui1x5yo4x67 FOREIGN KEY (tag_id) REFERENCES tags",
"ALTER TABLE collection_tags ADD CONSTRAINT FKq9sa5s8csdpbphrvb48tts8jt FOREIGN KEY (collection_id) REFERENCES collections"
};
try (var connection = dataSource.getConnection();
var statement = connection.createStatement()) {
// Create tables
for (String sql : createTableStatements) {
statement.executeUpdate(sql);
}
// Create indexes
for (String sql : createIndexStatements) {
statement.executeUpdate(sql);
}
// Create constraints
for (String sql : createConstraintStatements) {
statement.executeUpdate(sql);
}
logger.debug("Successfully created all database tables and constraints");
} catch (SQLException e) {
logger.error("Failed to create database schema", e);
throw new RuntimeException("Schema creation failed", e);
}
}
private void closeCurrentResources() {
// No need to close datasource - SmartRoutingDataSource handles this
// Solr service is managed by Spring - no explicit cleanup needed
// Don't clear currentLibraryId here - only when explicitly switching
}
/**
* Update library metadata (name and description)
*/
public synchronized void updateLibraryMetadata(String libraryId, String newName, String newDescription) throws Exception {
if (libraryId == null || libraryId.trim().isEmpty()) {
throw new IllegalArgumentException("Library ID cannot be null or empty");
}
Library library = libraries.get(libraryId);
if (library == null) {
throw new IllegalArgumentException("Library not found: " + libraryId);
}
// Validate new name
if (newName == null || newName.trim().isEmpty()) {
throw new IllegalArgumentException("Library name cannot be null or empty");
}
String oldName = library.getName();
String oldDescription = library.getDescription();
// Update the library object
library.setName(newName.trim());
library.setDescription(newDescription != null ? newDescription.trim() : "");
try {
// Save to configuration file
saveLibraryConfiguration(library);
logger.info("Updated library metadata - ID: {}, Name: '{}' -> '{}', Description: '{}' -> '{}'",
libraryId, oldName, newName, oldDescription, library.getDescription());
} catch (Exception e) {
// Rollback changes on failure
library.setName(oldName);
library.setDescription(oldDescription);
throw new RuntimeException("Failed to update library metadata: " + e.getMessage(), e);
}
}
/**
* Save library configuration to file
*/
private void saveLibraryConfiguration(Library library) throws Exception {
Path libraryConfigPath = libraryConfigDir.resolve(library.getId() + ".json");
// Create library configuration object
Map<String, Object> config = new HashMap<>();
config.put("id", library.getId());
config.put("name", library.getName());
config.put("description", library.getDescription());
config.put("passwordHash", library.getPasswordHash());
config.put("dbName", library.getDbName());
config.put("imagePath", library.getImagePath());
config.put("initialized", library.isInitialized());
// Write to file
ObjectMapper mapper = new ObjectMapper();
String configJson = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(config);
Files.writeString(libraryConfigPath, configJson, StandardCharsets.UTF_8);
logger.debug("Saved library configuration to: {}", libraryConfigPath);
}
}

View File

@@ -1,36 +1,83 @@
package com.storycove.service;
import org.springframework.beans.factory.annotation.Value;
import com.storycove.util.JwtUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.stereotype.Service;
@Service
public class PasswordAuthenticationService {
@Value("${storycove.auth.password}")
private String applicationPassword;
private static final Logger logger = LoggerFactory.getLogger(PasswordAuthenticationService.class);
private final PasswordEncoder passwordEncoder;
private final LibraryService libraryService;
private final JwtUtil jwtUtil;
public PasswordAuthenticationService(PasswordEncoder passwordEncoder) {
@Autowired
public PasswordAuthenticationService(
PasswordEncoder passwordEncoder,
LibraryService libraryService,
JwtUtil jwtUtil) {
this.passwordEncoder = passwordEncoder;
this.libraryService = libraryService;
this.jwtUtil = jwtUtil;
}
public boolean authenticate(String providedPassword) {
/**
* Authenticate user and switch to the appropriate library
* Returns JWT token if authentication successful, null otherwise
*/
public String authenticateAndSwitchLibrary(String providedPassword) {
if (providedPassword == null || providedPassword.trim().isEmpty()) {
return false;
return null;
}
// If application password starts with {bcrypt}, it's already encoded
if (applicationPassword.startsWith("{bcrypt}") || applicationPassword.startsWith("$2")) {
return passwordEncoder.matches(providedPassword, applicationPassword);
// Find which library this password belongs to
String libraryId = libraryService.authenticateAndGetLibrary(providedPassword);
if (libraryId == null) {
logger.warn("Authentication failed - invalid password");
return null;
}
// Otherwise, compare directly (for development/testing)
return applicationPassword.equals(providedPassword);
try {
// Switch to the authenticated library with forced reindexing (may take 2-3 seconds)
libraryService.switchToLibraryAfterAuthentication(libraryId);
// Generate JWT token with library context
String token = jwtUtil.generateToken("user", libraryId);
logger.info("Successfully authenticated and switched to library: {}", libraryId);
return token;
} catch (Exception e) {
logger.error("Failed to switch to library: {}", libraryId, e);
return null;
}
}
/**
* Legacy method - kept for backward compatibility
*/
@Deprecated
public boolean authenticate(String providedPassword) {
return authenticateAndSwitchLibrary(providedPassword) != null;
}
public String encodePassword(String rawPassword) {
return passwordEncoder.encode(rawPassword);
}
/**
* Get current library info for authenticated user
*/
public String getCurrentLibraryInfo() {
var library = libraryService.getCurrentLibrary();
if (library != null) {
return String.format("Library: %s (%s)", library.getName(), library.getId());
}
return "No library active";
}
}

View File

@@ -0,0 +1,28 @@
package com.storycove.service;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
@Service
public class ReadingTimeService {
@Value("${app.reading.speed.default:200}")
private int defaultWordsPerMinute;
/**
* Calculate estimated reading time in minutes for the given word count
* @param wordCount the number of words to read
* @return estimated reading time in minutes (minimum 1 minute)
*/
public int calculateReadingTime(int wordCount) {
return Math.max(1, wordCount / defaultWordsPerMinute);
}
/**
* Get the current words per minute setting
* @return words per minute reading speed
*/
public int getWordsPerMinute() {
return defaultWordsPerMinute;
}
}

View File

@@ -0,0 +1,287 @@
package com.storycove.service;
import com.storycove.dto.AuthorSearchDto;
import com.storycove.dto.SearchResultDto;
import com.storycove.dto.StorySearchDto;
import com.storycove.entity.Author;
import com.storycove.entity.Story;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.UUID;
/**
* Service adapter that provides a unified interface for search operations.
*
* This adapter directly delegates to SolrService.
*/
@Service
public class SearchServiceAdapter {
private static final Logger logger = LoggerFactory.getLogger(SearchServiceAdapter.class);
@Autowired
private SolrService solrService;
// ===============================
// SEARCH OPERATIONS
// ===============================
/**
* Search stories with unified interface
*/
public SearchResultDto<StorySearchDto> searchStories(String query, List<String> tags, String author,
String series, Integer minWordCount, Integer maxWordCount,
Float minRating, Boolean isRead, Boolean isFavorite,
String sortBy, String sortOrder, int page, int size,
List<String> facetBy,
// Advanced filters
String createdAfter, String createdBefore,
String lastReadAfter, String lastReadBefore,
Boolean unratedOnly, String readingStatus,
Boolean hasReadingProgress, Boolean hasCoverImage,
String sourceDomain, String seriesFilter,
Integer minTagCount, Boolean popularOnly,
Boolean hiddenGemsOnly) {
logger.info("SearchServiceAdapter: delegating search to SolrService");
try {
SearchResultDto<StorySearchDto> result = solrService.searchStories(query, tags, author, series, minWordCount, maxWordCount,
minRating, isRead, isFavorite, sortBy, sortOrder, page, size, facetBy,
createdAfter, createdBefore, lastReadAfter, lastReadBefore, unratedOnly, readingStatus,
hasReadingProgress, hasCoverImage, sourceDomain, seriesFilter, minTagCount, popularOnly,
hiddenGemsOnly);
logger.info("SearchServiceAdapter: received result with {} stories and {} facets",
result.getResults().size(), result.getFacets().size());
return result;
} catch (Exception e) {
logger.error("SearchServiceAdapter: error during search", e);
throw e;
}
}
/**
* Get random stories with unified interface
*/
public List<StorySearchDto> getRandomStories(int count, List<String> tags, String author,
String series, Integer minWordCount, Integer maxWordCount,
Float minRating, Boolean isRead, Boolean isFavorite,
Long seed) {
return solrService.getRandomStories(count, tags, author, series, minWordCount, maxWordCount,
minRating, isRead, isFavorite, seed);
}
/**
* Recreate search indices
*/
public void recreateIndices() {
try {
solrService.recreateIndices();
} catch (Exception e) {
logger.error("Failed to recreate search indices", e);
throw new RuntimeException("Failed to recreate search indices", e);
}
}
/**
* Perform complete reindex of all data
*/
public void performCompleteReindex() {
try {
recreateIndices();
logger.info("Search indices recreated successfully");
} catch (Exception e) {
logger.error("Failed to perform complete reindex", e);
throw new RuntimeException("Failed to perform complete reindex", e);
}
}
/**
* Get random story ID with unified interface
*/
public String getRandomStoryId(Long seed) {
return solrService.getRandomStoryId(seed);
}
/**
* Search authors with unified interface
*/
public List<AuthorSearchDto> searchAuthors(String query, int limit) {
return solrService.searchAuthors(query, limit);
}
/**
* Get tag suggestions with unified interface
*/
public List<String> getTagSuggestions(String query, int limit) {
return solrService.getTagSuggestions(query, limit);
}
// ===============================
// INDEX OPERATIONS
// ===============================
/**
* Index a story in Solr
*/
public void indexStory(Story story) {
try {
solrService.indexStory(story);
} catch (Exception e) {
logger.error("Failed to index story {}", story.getId(), e);
}
}
/**
* Update a story in Solr
*/
public void updateStory(Story story) {
try {
solrService.updateStory(story);
} catch (Exception e) {
logger.error("Failed to update story {}", story.getId(), e);
}
}
/**
* Delete a story from Solr
*/
public void deleteStory(UUID storyId) {
try {
solrService.deleteStory(storyId);
} catch (Exception e) {
logger.error("Failed to delete story {}", storyId, e);
}
}
/**
* Index an author in Solr
*/
public void indexAuthor(Author author) {
try {
solrService.indexAuthor(author);
} catch (Exception e) {
logger.error("Failed to index author {}", author.getId(), e);
}
}
/**
* Update an author in Solr
*/
public void updateAuthor(Author author) {
try {
solrService.updateAuthor(author);
} catch (Exception e) {
logger.error("Failed to update author {}", author.getId(), e);
}
}
/**
* Delete an author from Solr
*/
public void deleteAuthor(UUID authorId) {
try {
solrService.deleteAuthor(authorId);
} catch (Exception e) {
logger.error("Failed to delete author {}", authorId, e);
}
}
/**
* Bulk index stories in Solr
*/
public void bulkIndexStories(List<Story> stories) {
try {
solrService.bulkIndexStories(stories);
} catch (Exception e) {
logger.error("Failed to bulk index {} stories", stories.size(), e);
}
}
/**
* Bulk index authors in Solr
*/
public void bulkIndexAuthors(List<Author> authors) {
try {
solrService.bulkIndexAuthors(authors);
} catch (Exception e) {
logger.error("Failed to bulk index {} authors", authors.size(), e);
}
}
// ===============================
// UTILITY METHODS
// ===============================
/**
* Check if search service is available and healthy
*/
public boolean isSearchServiceAvailable() {
return solrService.testConnection();
}
/**
* Get current search engine name
*/
public String getCurrentSearchEngine() {
return "solr";
}
/**
* Check if dual-write is enabled
*/
public boolean isDualWriteEnabled() {
return false; // No longer supported
}
/**
* Check if we can switch to Solr
*/
public boolean canSwitchToSolr() {
return true; // Already using Solr
}
/**
* Check if we can switch to Typesense
*/
public boolean canSwitchToTypesense() {
return false; // Typesense no longer available
}
/**
* Get current search status for admin interface
*/
public SearchStatus getSearchStatus() {
return new SearchStatus(
"solr",
false, // no dual-write
false, // no typesense
solrService.testConnection()
);
}
/**
* DTO for search status
*/
public static class SearchStatus {
private final String primaryEngine;
private final boolean dualWrite;
private final boolean typesenseAvailable;
private final boolean solrAvailable;
public SearchStatus(String primaryEngine, boolean dualWrite,
boolean typesenseAvailable, boolean solrAvailable) {
this.primaryEngine = primaryEngine;
this.dualWrite = dualWrite;
this.typesenseAvailable = typesenseAvailable;
this.solrAvailable = solrAvailable;
}
public String getPrimaryEngine() { return primaryEngine; }
public boolean isDualWrite() { return dualWrite; }
public boolean isTypesenseAvailable() { return typesenseAvailable; }
public boolean isSolrAvailable() { return solrAvailable; }
}
}

View File

@@ -5,6 +5,8 @@ import com.storycove.repository.SeriesRepository;
import com.storycove.service.exception.DuplicateResourceException;
import com.storycove.service.exception.ResourceNotFoundException;
import jakarta.validation.Valid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
@@ -21,6 +23,8 @@ import java.util.UUID;
@Transactional
public class SeriesService {
private static final Logger logger = LoggerFactory.getLogger(SeriesService.class);
private final SeriesRepository seriesRepository;
@Autowired

File diff suppressed because it is too large Load Diff

View File

@@ -4,13 +4,15 @@ import com.storycove.entity.Author;
import com.storycove.entity.Series;
import com.storycove.entity.Story;
import com.storycove.entity.Tag;
import com.storycove.repository.ReadingPositionRepository;
import com.storycove.repository.StoryRepository;
import com.storycove.repository.TagRepository;
import com.storycove.service.exception.DuplicateResourceException;
import com.storycove.service.exception.ResourceNotFoundException;
import jakarta.validation.Valid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service;
@@ -18,40 +20,47 @@ import org.springframework.transaction.annotation.Transactional;
import org.springframework.validation.annotation.Validated;
import java.time.LocalDateTime;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import java.util.stream.Collectors;
@Service
@Validated
@Transactional
public class StoryService {
private static final Logger logger = LoggerFactory.getLogger(StoryService.class);
private final StoryRepository storyRepository;
private final TagRepository tagRepository;
private final ReadingPositionRepository readingPositionRepository;
private final AuthorService authorService;
private final TagService tagService;
private final SeriesService seriesService;
private final HtmlSanitizationService sanitizationService;
private final TypesenseService typesenseService;
private final SearchServiceAdapter searchServiceAdapter;
@Autowired
public StoryService(StoryRepository storyRepository,
TagRepository tagRepository,
ReadingPositionRepository readingPositionRepository,
AuthorService authorService,
TagService tagService,
SeriesService seriesService,
HtmlSanitizationService sanitizationService,
@Autowired(required = false) TypesenseService typesenseService) {
SearchServiceAdapter searchServiceAdapter) {
this.storyRepository = storyRepository;
this.tagRepository = tagRepository;
this.readingPositionRepository = readingPositionRepository;
this.authorService = authorService;
this.tagService = tagService;
this.seriesService = seriesService;
this.sanitizationService = sanitizationService;
this.typesenseService = typesenseService;
this.searchServiceAdapter = searchServiceAdapter;
}
@Transactional(readOnly = true)
@@ -75,11 +84,13 @@ public class StoryService {
.orElseThrow(() -> new ResourceNotFoundException("Story", id.toString()));
}
@Transactional(readOnly = true)
public Optional<Story> findByIdOptional(UUID id) {
return storyRepository.findById(id);
}
@Transactional(readOnly = true)
public Optional<Story> findByTitle(String title) {
return storyRepository.findByTitle(title);
@@ -114,7 +125,7 @@ public class StoryService {
@Transactional(readOnly = true)
public List<Story> findBySeries(UUID seriesId) {
Series series = seriesService.findById(seriesId);
seriesService.findById(seriesId); // Validate series exists
return storyRepository.findBySeriesOrderByVolume(seriesId);
}
@@ -228,10 +239,8 @@ public class StoryService {
story.addTag(tag);
Story savedStory = storyRepository.save(story);
// Update Typesense index with new tag information
if (typesenseService != null) {
typesenseService.updateStory(savedStory);
}
// Update search index with new tag information
searchServiceAdapter.updateStory(savedStory);
return savedStory;
}
@@ -245,10 +254,8 @@ public class StoryService {
story.removeTag(tag);
Story savedStory = storyRepository.save(story);
// Update Typesense index with updated tag information
if (typesenseService != null) {
typesenseService.updateStory(savedStory);
}
// Update search index with updated tag information
searchServiceAdapter.updateStory(savedStory);
return savedStory;
}
@@ -263,11 +270,44 @@ public class StoryService {
story.setRating(rating);
Story savedStory = storyRepository.save(story);
// Update Typesense index with new rating
if (typesenseService != null) {
typesenseService.updateStory(savedStory);
// Update search index with new rating
searchServiceAdapter.updateStory(savedStory);
return savedStory;
}
@Transactional
public Story updateReadingProgress(UUID id, Integer position) {
if (position != null && position < 0) {
throw new IllegalArgumentException("Reading position must be non-negative");
}
Story story = findById(id);
story.updateReadingProgress(position);
Story savedStory = storyRepository.save(story);
// Update search index with new reading progress
searchServiceAdapter.updateStory(savedStory);
return savedStory;
}
@Transactional
public Story updateReadingStatus(UUID id, Boolean isRead) {
Story story = findById(id);
if (Boolean.TRUE.equals(isRead)) {
story.markAsRead();
} else {
story.setIsRead(false);
story.setLastReadAt(LocalDateTime.now());
}
Story savedStory = storyRepository.save(story);
// Update search index with new reading status
searchServiceAdapter.updateStory(savedStory);
return savedStory;
}
@@ -308,10 +348,8 @@ public class StoryService {
updateStoryTags(savedStory, story.getTags());
}
// Index in Typesense (if available)
if (typesenseService != null) {
typesenseService.indexStory(savedStory);
}
// Index in search engine
searchServiceAdapter.indexStory(savedStory);
return savedStory;
}
@@ -338,10 +376,8 @@ public class StoryService {
updateStoryTagsByNames(savedStory, tagNames);
}
// Index in Typesense (if available)
if (typesenseService != null) {
typesenseService.indexStory(savedStory);
}
// Index in search engine
searchServiceAdapter.indexStory(savedStory);
return savedStory;
}
@@ -359,10 +395,8 @@ public class StoryService {
updateStoryFields(existingStory, storyUpdates);
Story updatedStory = storyRepository.save(existingStory);
// Update in Typesense (if available)
if (typesenseService != null) {
typesenseService.updateStory(updatedStory);
}
// Update in search engine
searchServiceAdapter.updateStory(updatedStory);
return updatedStory;
}
@@ -382,29 +416,41 @@ public class StoryService {
Story updatedStory = storyRepository.save(existingStory);
// Update in Typesense (if available)
if (typesenseService != null) {
typesenseService.updateStory(updatedStory);
// Update in search engine
searchServiceAdapter.updateStory(updatedStory);
return updatedStory;
}
public Story updateContentOnly(UUID id, String contentHtml) {
Story existingStory = findById(id);
existingStory.setContentHtml(contentHtml);
Story updatedStory = storyRepository.save(existingStory);
// Update in search engine since content changed
searchServiceAdapter.updateStory(updatedStory);
return updatedStory;
}
public void delete(UUID id) {
Story story = findById(id);
// Clean up reading positions first (to avoid foreign key constraint violations)
readingPositionRepository.deleteByStoryId(id);
// Remove from series if part of one
if (story.getSeries() != null) {
story.getSeries().removeStory(story);
}
// Remove tags (this will update tag usage counts)
story.getTags().forEach(tag -> story.removeTag(tag));
// Create a copy to avoid ConcurrentModificationException
new ArrayList<>(story.getTags()).forEach(tag -> story.removeTag(tag));
// Delete from Typesense first (if available)
if (typesenseService != null) {
typesenseService.deleteStory(story.getId().toString());
}
// Delete from search engine first
searchServiceAdapter.deleteStory(story.getId());
storyRepository.delete(story);
}
@@ -562,13 +608,29 @@ public class StoryService {
if (updateReq.getVolume() != null) {
story.setVolume(updateReq.getVolume());
}
// Handle author - either by ID or by name
if (updateReq.getAuthorId() != null) {
Author author = authorService.findById(updateReq.getAuthorId());
story.setAuthor(author);
}
// Handle series - either by ID or by name
if (updateReq.getSeriesId() != null) {
Series series = seriesService.findById(updateReq.getSeriesId());
story.setSeries(series);
} else if (updateReq.getSeriesName() != null) {
if (updateReq.getSeriesName().trim().isEmpty()) {
// Empty series name means remove from series
story.setSeries(null);
} else {
// Find or create series by name
Series series = seriesService.findByNameOptional(updateReq.getSeriesName().trim())
.orElseGet(() -> {
Series newSeries = new Series();
newSeries.setName(updateReq.getSeriesName().trim());
return seriesService.create(newSeries);
});
story.setSeries(series);
}
}
}
}
@@ -593,4 +655,140 @@ public class StoryService {
}
}
}
@Transactional(readOnly = true)
public List<Story> findPotentialDuplicates(String title, String authorName) {
if (title == null || title.trim().isEmpty() || authorName == null || authorName.trim().isEmpty()) {
return List.of();
}
return storyRepository.findByTitleAndAuthorNameIgnoreCase(title.trim(), authorName.trim());
}
/**
* Find a random story based on optional filters.
* Uses search service for consistency with Library search functionality.
* Supports text search and multiple tags using the same logic as the Library view.
* @param searchQuery Optional search query
* @param tags Optional list of tags to filter by
* @return Optional containing the random story if found
*/
@Transactional(readOnly = true)
public Optional<Story> findRandomStory(String searchQuery, List<String> tags) {
return findRandomStory(searchQuery, tags, null, null, null, null, null, null, null,
null, null, null, null, null, null, null, null, null, null, null);
}
public Optional<Story> findRandomStory(String searchQuery, List<String> tags, Long seed) {
return findRandomStory(searchQuery, tags, seed, null, null, null, null, null, null,
null, null, null, null, null, null, null, null, null, null, null);
}
/**
* Find a random story based on optional filters with seed support.
* Uses search service for consistency with Library search functionality.
* Supports text search and multiple tags using the same logic as the Library view.
* @param searchQuery Optional search query
* @param tags Optional list of tags to filter by
* @param seed Optional seed for consistent randomization (null for truly random)
* @return Optional containing the random story if found
*/
@Transactional(readOnly = true)
public Optional<Story> findRandomStory(String searchQuery, List<String> tags, Long seed,
Integer minWordCount, Integer maxWordCount,
String createdAfter, String createdBefore,
String lastReadAfter, String lastReadBefore,
Integer minRating, Integer maxRating, Boolean unratedOnly,
String readingStatus, Boolean hasReadingProgress,
Boolean hasCoverImage, String sourceDomain,
String seriesFilter, Integer minTagCount,
Boolean popularOnly, Boolean hiddenGemsOnly) {
// Use search service for consistency with Library search
try {
String randomStoryId = searchServiceAdapter.getRandomStoryId(seed);
if (randomStoryId != null) {
return storyRepository.findById(UUID.fromString(randomStoryId));
}
return Optional.empty();
} catch (Exception e) {
// Fallback to database queries if search service fails
logger.warn("Search service random story lookup failed, falling back to database queries", e);
}
// Fallback to repository-based implementation (global routing handles library selection)
return findRandomStoryFromRepository(searchQuery, tags);
}
/**
* Find random story using repository methods (for default database or when library-aware fails)
*/
private Optional<Story> findRandomStoryFromRepository(String searchQuery, List<String> tags) {
// Clean up inputs
String cleanSearchQuery = (searchQuery != null && !searchQuery.trim().isEmpty()) ? searchQuery.trim() : null;
List<String> cleanTags = (tags != null) ? tags.stream()
.filter(tag -> tag != null && !tag.trim().isEmpty())
.map(String::trim)
.collect(Collectors.toList()) : List.of();
long totalCount = 0;
Optional<Story> randomStory = Optional.empty();
if (cleanSearchQuery != null && !cleanTags.isEmpty()) {
// Both search query and tags
String searchPattern = "%" + cleanSearchQuery + "%";
List<String> upperCaseTags = cleanTags.stream()
.map(String::toUpperCase)
.collect(Collectors.toList());
totalCount = storyRepository.countStoriesByTextSearchAndTags(searchPattern, upperCaseTags, cleanTags.size());
if (totalCount > 0) {
long randomOffset = (long) (Math.random() * totalCount);
randomStory = storyRepository.findRandomStoryByTextSearchAndTags(searchPattern, upperCaseTags, cleanTags.size(), randomOffset);
}
} else if (cleanSearchQuery != null) {
// Only search query
String searchPattern = "%" + cleanSearchQuery + "%";
totalCount = storyRepository.countStoriesByTextSearch(searchPattern);
if (totalCount > 0) {
long randomOffset = (long) (Math.random() * totalCount);
randomStory = storyRepository.findRandomStoryByTextSearch(searchPattern, randomOffset);
}
} else if (!cleanTags.isEmpty()) {
// Only tags
if (cleanTags.size() == 1) {
// Single tag - use optimized single tag query
totalCount = storyRepository.countStoriesByTagName(cleanTags.get(0));
if (totalCount > 0) {
long randomOffset = (long) (Math.random() * totalCount);
randomStory = storyRepository.findRandomStoryByTagName(cleanTags.get(0), randomOffset);
}
} else {
// Multiple tags
List<String> upperCaseTags = cleanTags.stream()
.map(String::toUpperCase)
.collect(Collectors.toList());
totalCount = storyRepository.countStoriesByMultipleTags(upperCaseTags, cleanTags.size());
if (totalCount > 0) {
long randomOffset = (long) (Math.random() * totalCount);
randomStory = storyRepository.findRandomStoryByMultipleTags(upperCaseTags, cleanTags.size(), randomOffset);
}
}
} else {
// No filters - get random from all stories
totalCount = storyRepository.countAllStories();
if (totalCount > 0) {
long randomOffset = (long) (Math.random() * totalCount);
randomStory = storyRepository.findRandomStory(randomOffset);
}
}
return randomStory;
}
}

View File

@@ -1,10 +1,15 @@
package com.storycove.service;
import com.storycove.entity.Story;
import com.storycove.entity.Tag;
import com.storycove.entity.TagAlias;
import com.storycove.repository.TagRepository;
import com.storycove.repository.TagAliasRepository;
import com.storycove.service.exception.DuplicateResourceException;
import com.storycove.service.exception.ResourceNotFoundException;
import jakarta.validation.Valid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.Pageable;
@@ -12,8 +17,11 @@ import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.validation.annotation.Validated;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
@Service
@@ -21,11 +29,15 @@ import java.util.UUID;
@Transactional
public class TagService {
private static final Logger logger = LoggerFactory.getLogger(TagService.class);
private final TagRepository tagRepository;
private final TagAliasRepository tagAliasRepository;
@Autowired
public TagService(TagRepository tagRepository) {
public TagService(TagRepository tagRepository, TagAliasRepository tagAliasRepository) {
this.tagRepository = tagRepository;
this.tagAliasRepository = tagAliasRepository;
}
@Transactional(readOnly = true)
@@ -192,6 +204,11 @@ public class TagService {
return tagRepository.countUsedTags();
}
@Transactional(readOnly = true)
public List<Tag> findTagsUsedByCollections() {
return tagRepository.findTagsUsedByCollections();
}
private void validateTagForCreate(Tag tag) {
if (existsByName(tag.getName())) {
throw new DuplicateResourceException("Tag", tag.getName());
@@ -202,5 +219,273 @@ public class TagService {
if (updates.getName() != null) {
existing.setName(updates.getName());
}
if (updates.getColor() != null) {
existing.setColor(updates.getColor());
}
if (updates.getDescription() != null) {
existing.setDescription(updates.getDescription());
}
}
// Tag alias management methods
public TagAlias addAlias(UUID tagId, String aliasName) {
Tag canonicalTag = findById(tagId);
// Check if alias already exists (case-insensitive)
if (tagAliasRepository.existsByAliasNameIgnoreCase(aliasName)) {
throw new DuplicateResourceException("Tag alias", aliasName);
}
// Check if alias name conflicts with existing tag names
if (tagRepository.existsByNameIgnoreCase(aliasName)) {
throw new DuplicateResourceException("Tag alias conflicts with existing tag name", aliasName);
}
TagAlias alias = new TagAlias();
alias.setAliasName(aliasName);
alias.setCanonicalTag(canonicalTag);
alias.setCreatedFromMerge(false);
return tagAliasRepository.save(alias);
}
public void removeAlias(UUID tagId, UUID aliasId) {
findById(tagId); // Validate tag exists
TagAlias alias = tagAliasRepository.findById(aliasId)
.orElseThrow(() -> new ResourceNotFoundException("Tag alias", aliasId.toString()));
// Verify the alias belongs to the specified tag
if (!alias.getCanonicalTag().getId().equals(tagId)) {
throw new IllegalArgumentException("Alias does not belong to the specified tag");
}
tagAliasRepository.delete(alias);
}
@Transactional(readOnly = true)
public Tag resolveTagByName(String name) {
// First try to find exact tag match
Optional<Tag> directMatch = tagRepository.findByNameIgnoreCase(name);
if (directMatch.isPresent()) {
return directMatch.get();
}
// Then try to find by alias
Optional<TagAlias> aliasMatch = tagAliasRepository.findByAliasNameIgnoreCase(name);
if (aliasMatch.isPresent()) {
return aliasMatch.get().getCanonicalTag();
}
return null;
}
@Transactional
public Tag mergeTags(List<UUID> sourceTagIds, UUID targetTagId) {
// Validate target tag exists
Tag targetTag = findById(targetTagId);
// Validate source tags exist and are different from target
List<Tag> sourceTags = sourceTagIds.stream()
.filter(id -> !id.equals(targetTagId)) // Don't merge tag with itself
.map(this::findById)
.toList();
if (sourceTags.isEmpty()) {
throw new IllegalArgumentException("No valid source tags to merge");
}
// Perform the merge atomically
for (Tag sourceTag : sourceTags) {
// Move all stories from source tag to target tag
// Create a copy to avoid ConcurrentModificationException
List<Story> storiesToMove = new ArrayList<>(sourceTag.getStories());
storiesToMove.forEach(story -> {
story.removeTag(sourceTag);
story.addTag(targetTag);
});
// Create alias for the source tag name
TagAlias alias = new TagAlias();
alias.setAliasName(sourceTag.getName());
alias.setCanonicalTag(targetTag);
alias.setCreatedFromMerge(true);
tagAliasRepository.save(alias);
// Delete the source tag
tagRepository.delete(sourceTag);
}
return tagRepository.save(targetTag);
}
@Transactional(readOnly = true)
public List<Tag> findByNameOrAliasStartingWith(String query, int limit) {
// Find tags that start with the query
List<Tag> directMatches = tagRepository.findByNameStartingWithIgnoreCase(query.toLowerCase());
// Find tags via aliases that start with the query
List<TagAlias> aliasMatches = tagAliasRepository.findByAliasNameStartingWithIgnoreCase(query.toLowerCase());
List<Tag> aliasTagMatches = aliasMatches.stream()
.map(TagAlias::getCanonicalTag)
.distinct()
.toList();
// Combine and deduplicate
Set<Tag> allMatches = new HashSet<>(directMatches);
allMatches.addAll(aliasTagMatches);
// Convert to list and limit results
return allMatches.stream()
.sorted((a, b) -> a.getName().compareToIgnoreCase(b.getName()))
.limit(limit)
.toList();
}
@Transactional(readOnly = true)
public com.storycove.controller.TagController.MergePreviewResponse previewMerge(List<UUID> sourceTagIds, UUID targetTagId) {
// Validate target tag exists
Tag targetTag = findById(targetTagId);
// Validate source tags exist and are different from target
List<Tag> sourceTags = sourceTagIds.stream()
.filter(id -> !id.equals(targetTagId))
.map(this::findById)
.toList();
if (sourceTags.isEmpty()) {
throw new IllegalArgumentException("No valid source tags to merge");
}
// Calculate preview data
int targetStoryCount = targetTag.getStories().size();
// Collect all unique stories from all tags (including target) to handle overlaps correctly
Set<Story> allUniqueStories = new HashSet<>(targetTag.getStories());
for (Tag sourceTag : sourceTags) {
allUniqueStories.addAll(sourceTag.getStories());
}
int totalStories = allUniqueStories.size();
List<String> aliasesToCreate = sourceTags.stream()
.map(Tag::getName)
.toList();
// Create response object using the controller's inner class
var preview = new com.storycove.controller.TagController.MergePreviewResponse();
preview.setTargetTagName(targetTag.getName());
preview.setTargetStoryCount(targetStoryCount);
preview.setTotalResultStoryCount(totalStories);
preview.setAliasesToCreate(aliasesToCreate);
return preview;
}
@Transactional(readOnly = true)
public List<com.storycove.controller.TagController.TagSuggestion> suggestTags(String title, String content, String summary, int limit) {
List<com.storycove.controller.TagController.TagSuggestion> suggestions = new ArrayList<>();
// Get all existing tags for matching
List<Tag> existingTags = findAll();
// Combine all text for analysis
String combinedText = (title != null ? title : "") + " " +
(summary != null ? summary : "") + " " +
(content != null ? stripHtml(content) : "");
if (combinedText.trim().isEmpty()) {
return suggestions;
}
String lowerText = combinedText.toLowerCase();
// Score each existing tag based on how well it matches the content
for (Tag tag : existingTags) {
double score = calculateTagRelevanceScore(tag, lowerText, title, summary);
if (score > 0.1) { // Only suggest tags with reasonable confidence
String reason = generateReason(tag, lowerText, title, summary);
suggestions.add(new com.storycove.controller.TagController.TagSuggestion(
tag.getName(), score, reason
));
}
}
// Sort by confidence score (descending) and limit results
return suggestions.stream()
.sorted((a, b) -> Double.compare(b.getConfidence(), a.getConfidence()))
.limit(limit)
.collect(java.util.stream.Collectors.toList());
}
private double calculateTagRelevanceScore(Tag tag, String lowerText, String title, String summary) {
String tagName = tag.getName().toLowerCase();
double score = 0.0;
// Exact matches get highest score
if (lowerText.contains(" " + tagName + " ") || lowerText.startsWith(tagName + " ") || lowerText.endsWith(" " + tagName)) {
score += 0.8;
}
// Partial matches in title get high score
if (title != null && title.toLowerCase().contains(tagName)) {
score += 0.6;
}
// Partial matches in summary get medium score
if (summary != null && summary.toLowerCase().contains(tagName)) {
score += 0.4;
}
// Word-based matching (split tag name and look for individual words)
String[] tagWords = tagName.split("[\\s-_]+");
int matchedWords = 0;
for (String word : tagWords) {
if (word.length() > 2 && lowerText.contains(word)) {
matchedWords++;
}
}
if (tagWords.length > 0) {
score += 0.3 * ((double) matchedWords / tagWords.length);
}
// Boost score based on tag popularity (more used tags are more likely to be relevant)
int storyCount = tag.getStories() != null ? tag.getStories().size() : 0;
if (storyCount > 0) {
score += Math.min(0.2, storyCount * 0.01); // Small boost, capped at 0.2
}
return Math.min(1.0, score); // Cap at 1.0
}
private String generateReason(Tag tag, String lowerText, String title, String summary) {
String tagName = tag.getName().toLowerCase();
if (title != null && title.toLowerCase().contains(tagName)) {
return "Found in title";
}
if (summary != null && summary.toLowerCase().contains(tagName)) {
return "Found in summary";
}
if (lowerText.contains(" " + tagName + " ") || lowerText.startsWith(tagName + " ") || lowerText.endsWith(" " + tagName)) {
return "Exact match in content";
}
String[] tagWords = tagName.split("[\\s-_]+");
for (String word : tagWords) {
if (word.length() > 2 && lowerText.contains(word)) {
return "Related keywords found";
}
}
return "Similar content";
}
private String stripHtml(String html) {
if (html == null) return "";
// Simple HTML tag removal - replace with a proper HTML parser if needed
return html.replaceAll("<[^>]+>", " ").replaceAll("\\s+", " ").trim();
}
}

View File

@@ -0,0 +1,12 @@
package com.storycove.service.exception;
public class InvalidFileException extends RuntimeException {
public InvalidFileException(String message) {
super(message);
}
public InvalidFileException(String message, Throwable cause) {
super(message, cause);
}
}

View File

@@ -3,35 +3,64 @@ package com.storycove.util;
import io.jsonwebtoken.Claims;
import io.jsonwebtoken.Jwts;
import io.jsonwebtoken.security.Keys;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import jakarta.annotation.PostConstruct;
import javax.crypto.SecretKey;
import java.security.SecureRandom;
import java.util.Base64;
import java.util.Date;
@Component
public class JwtUtil {
@Value("${storycove.jwt.secret}")
private static final Logger logger = LoggerFactory.getLogger(JwtUtil.class);
// Security: Generate new secret on each startup to invalidate all existing tokens
private String secret;
@Value("${storycove.jwt.expiration:86400000}") // 24 hours default
private Long expiration;
@PostConstruct
public void initialize() {
// Generate a new random secret on startup to invalidate all existing JWT tokens
// This ensures users must re-authenticate after application restart
SecureRandom random = new SecureRandom();
byte[] secretBytes = new byte[64]; // 512 bits
random.nextBytes(secretBytes);
this.secret = Base64.getEncoder().encodeToString(secretBytes);
logger.info("JWT secret rotated on startup - all existing tokens invalidated");
logger.info("Users will need to re-authenticate after application restart for security");
}
private SecretKey getSigningKey() {
return Keys.hmacShaKeyFor(secret.getBytes());
}
public String generateToken() {
return generateToken("user", null);
}
public String generateToken(String subject, String libraryId) {
Date now = new Date();
Date expiryDate = new Date(now.getTime() + expiration);
return Jwts.builder()
.subject("user")
var builder = Jwts.builder()
.subject(subject)
.issuedAt(now)
.expiration(expiryDate)
.signWith(getSigningKey())
.compact();
.expiration(expiryDate);
// Add library context if provided
if (libraryId != null) {
builder.claim("libraryId", libraryId);
}
return builder.signWith(getSigningKey()).compact();
}
public boolean validateToken(String token) {
@@ -62,4 +91,13 @@ public class JwtUtil {
public String getSubjectFromToken(String token) {
return getClaimsFromToken(token).getSubject();
}
public String getLibraryIdFromToken(String token) {
try {
Claims claims = getClaimsFromToken(token);
return claims.get("libraryId", String.class);
} catch (Exception e) {
return null;
}
}
}

View File

@@ -4,6 +4,11 @@ spring:
username: ${SPRING_DATASOURCE_USERNAME:storycove}
password: ${SPRING_DATASOURCE_PASSWORD:password}
driver-class-name: org.postgresql.Driver
hikari:
connection-timeout: 60000 # 60 seconds
idle-timeout: 300000 # 5 minutes
max-lifetime: 1800000 # 30 minutes
maximum-pool-size: 20
jpa:
hibernate:
@@ -16,11 +21,19 @@ spring:
servlet:
multipart:
max-file-size: 5MB
max-request-size: 10MB
max-file-size: 600MB # Increased for large backup restore (425MB+)
max-request-size: 610MB # Slightly higher to account for form data
jackson:
serialization:
write-dates-as-timestamps: false
deserialization:
adjust-dates-to-context-time-zone: false
server:
port: 8080
tomcat:
max-http-request-size: 650MB # Tomcat HTTP request size limit (separate from multipart)
storycove:
app:
@@ -28,20 +41,70 @@ storycove:
cors:
allowed-origins: ${STORYCOVE_CORS_ALLOWED_ORIGINS:http://localhost:3000,http://localhost:6925}
jwt:
secret: ${JWT_SECRET:default-secret-key}
secret: ${JWT_SECRET} # REQUIRED: Must be at least 32 characters, no default for security
expiration: 86400000 # 24 hours
auth:
password: ${APP_PASSWORD:admin}
typesense:
api-key: ${TYPESENSE_API_KEY:xyz}
host: ${TYPESENSE_HOST:localhost}
port: ${TYPESENSE_PORT:8108}
enabled: ${TYPESENSE_ENABLED:true}
reindex-interval: ${TYPESENSE_REINDEX_INTERVAL:3600000} # 1 hour in milliseconds
password: ${APP_PASSWORD} # REQUIRED: No default password for security
search:
engine: solr # Apache Solr search engine
solr:
# Connection settings
url: ${SOLR_URL:http://solr:8983/solr}
username: ${SOLR_USERNAME:}
password: ${SOLR_PASSWORD:}
# Core configuration
cores:
stories: ${SOLR_STORIES_CORE:storycove_stories}
authors: ${SOLR_AUTHORS_CORE:storycove_authors}
# Connection settings
connection:
timeout: ${SOLR_CONNECTION_TIMEOUT:30000} # 30 seconds
socket-timeout: ${SOLR_SOCKET_TIMEOUT:60000} # 60 seconds
max-connections-per-route: ${SOLR_MAX_CONN_PER_ROUTE:10}
max-connections-total: ${SOLR_MAX_CONN_TOTAL:30}
retry-on-failure: ${SOLR_RETRY_ON_FAILURE:true}
max-retries: ${SOLR_MAX_RETRIES:3}
# Query settings
query:
default-rows: ${SOLR_DEFAULT_ROWS:10}
max-rows: ${SOLR_MAX_ROWS:1000}
default-operator: ${SOLR_DEFAULT_OPERATOR:AND}
highlight: ${SOLR_ENABLE_HIGHLIGHT:true}
facets: ${SOLR_ENABLE_FACETS:true}
# Commit settings
commit:
soft-commit: ${SOLR_SOFT_COMMIT:true}
commit-within: ${SOLR_COMMIT_WITHIN:1000} # 1 second
wait-searcher: ${SOLR_WAIT_SEARCHER:false}
# Health and monitoring
health:
check-interval: ${SOLR_HEALTH_CHECK_INTERVAL:30000} # 30 seconds
slow-query-threshold: ${SOLR_SLOW_QUERY_THRESHOLD:5000} # 5 seconds
enable-metrics: ${SOLR_ENABLE_METRICS:true}
images:
storage-path: ${IMAGE_STORAGE_PATH:/app/images}
management:
endpoints:
web:
exposure:
include: health,info,prometheus
endpoint:
health:
show-details: when-authorized
show-components: always
health:
solr:
enabled: ${SOLR_HEALTH_ENABLED:true}
logging:
level:
com.storycove: DEBUG
org.springframework.security: DEBUG
com.storycove: ${LOG_LEVEL:INFO} # Use INFO for production, DEBUG for development
org.springframework.security: WARN # Reduce security logging
org.springframework.web: WARN
org.hibernate.SQL: ${SQL_LOG_LEVEL:WARN} # Control SQL logging separately

View File

@@ -4,7 +4,7 @@
"b", "strong", "i", "em", "u", "s", "strike", "del", "ins",
"sup", "sub", "small", "big", "mark", "pre", "code", "kbd", "samp", "var",
"ul", "ol", "li", "dl", "dt", "dd",
"a", "table", "thead", "tbody", "tfoot", "tr", "th", "td", "caption", "colgroup", "col",
"a", "img", "table", "thead", "tbody", "tfoot", "tr", "th", "td", "caption", "colgroup", "col",
"blockquote", "cite", "q", "hr", "details", "summary"
],
"allowedAttributes": {
@@ -17,7 +17,8 @@
"h4": ["class", "style"],
"h5": ["class", "style"],
"h6": ["class", "style"],
"a": ["class"],
"a": ["class", "href", "title"],
"img": ["src", "alt", "width", "height", "class", "style"],
"table": ["class", "style"],
"th": ["class", "style", "colspan", "rowspan"],
"td": ["class", "style", "colspan", "rowspan"],
@@ -38,8 +39,13 @@
"font-weight", "font-style", "text-align", "text-decoration", "margin",
"padding", "text-indent", "line-height"
],
"removedAttributes": {
"a": ["href", "target"]
"allowedProtocols": {
"a": {
"href": ["http", "https", "#", "/"]
},
"img": {
"src": ["http", "https", "data", "/", "cid"]
}
},
"description": "HTML sanitization configuration for StoryCove story content. This configuration is shared between frontend (DOMPurify) and backend (Jsoup) to ensure consistency."
}

View File

@@ -1,12 +1,8 @@
package com.storycove.config;
import com.storycove.service.TypesenseService;
import org.springframework.boot.test.context.TestConfiguration;
import org.springframework.boot.test.mock.mockito.MockBean;
@TestConfiguration
public class TestConfig {
@MockBean
public TypesenseService typesenseService;
// Test configuration
}

View File

@@ -15,10 +15,12 @@ public abstract class BaseRepositoryTest {
private static final PostgreSQLContainer<?> postgres;
static {
postgres = new PostgreSQLContainer<>("postgres:15-alpine")
@SuppressWarnings("resource") // Container is managed by shutdown hook
PostgreSQLContainer<?> container = new PostgreSQLContainer<>("postgres:15-alpine")
.withDatabaseName("storycove_test")
.withUsername("test")
.withPassword("test");
postgres = container;
postgres.start();
// Add shutdown hook to properly close the container

View File

@@ -1,6 +1,7 @@
package com.storycove.service;
import com.storycove.entity.Author;
import com.storycove.entity.Story;
import com.storycove.repository.AuthorRepository;
import com.storycove.service.exception.DuplicateResourceException;
import com.storycove.service.exception.ResourceNotFoundException;
@@ -8,7 +9,6 @@ import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.data.domain.Page;
@@ -22,8 +22,8 @@ import java.util.UUID;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.*;
import static org.mockito.Mockito.times;
@ExtendWith(MockitoExtension.class)
@DisplayName("Author Service Unit Tests")
@@ -32,7 +32,6 @@ class AuthorServiceTest {
@Mock
private AuthorRepository authorRepository;
@InjectMocks
private AuthorService authorService;
private Author testAuthor;
@@ -44,6 +43,10 @@ class AuthorServiceTest {
testAuthor = new Author("Test Author");
testAuthor.setId(testId);
testAuthor.setNotes("Test notes");
// Initialize service with mock SearchServiceAdapter
SearchServiceAdapter mockSearchServiceAdapter = mock(SearchServiceAdapter.class);
authorService = new AuthorService(authorRepository, mockSearchServiceAdapter);
}
@Test
@@ -172,7 +175,7 @@ class AuthorServiceTest {
when(authorRepository.existsByName("Updated Author")).thenReturn(false);
when(authorRepository.save(any(Author.class))).thenReturn(testAuthor);
Author result = authorService.update(testId, updates);
authorService.update(testId, updates);
assertEquals("Updated Author", testAuthor.getName());
assertEquals("Updated notes", testAuthor.getNotes());
@@ -307,4 +310,133 @@ class AuthorServiceTest {
assertEquals(5L, count);
verify(authorRepository).countRecentAuthors(any(java.time.LocalDateTime.class));
}
@Test
@DisplayName("Should set author rating with validation")
void shouldSetAuthorRating() {
when(authorRepository.findById(testId)).thenReturn(Optional.of(testAuthor));
when(authorRepository.save(any(Author.class))).thenReturn(testAuthor);
authorService.setRating(testId, 4);
assertEquals(4, testAuthor.getAuthorRating());
verify(authorRepository, times(2)).findById(testId); // Called twice: once initially, once after flush
verify(authorRepository).save(testAuthor);
verify(authorRepository).flush();
}
@Test
@DisplayName("Should throw exception for invalid rating range")
void shouldThrowExceptionForInvalidRating() {
assertThrows(IllegalArgumentException.class, () -> authorService.setRating(testId, 0));
assertThrows(IllegalArgumentException.class, () -> authorService.setRating(testId, 6));
verify(authorRepository, never()).findById(any());
verify(authorRepository, never()).save(any());
}
@Test
@DisplayName("Should handle null rating")
void shouldHandleNullRating() {
when(authorRepository.findById(testId)).thenReturn(Optional.of(testAuthor));
when(authorRepository.save(any(Author.class))).thenReturn(testAuthor);
authorService.setRating(testId, null);
assertNull(testAuthor.getAuthorRating());
verify(authorRepository, times(2)).findById(testId); // Called twice: once initially, once after flush
verify(authorRepository).save(testAuthor);
}
@Test
@DisplayName("Should find all authors with stories")
void shouldFindAllAuthorsWithStories() {
List<Author> authors = List.of(testAuthor);
when(authorRepository.findAll()).thenReturn(authors);
List<Author> result = authorService.findAllWithStories();
assertEquals(1, result.size());
verify(authorRepository).findAll();
}
@Test
@DisplayName("Should get author rating from database")
void shouldGetAuthorRatingFromDb() {
when(authorRepository.findAuthorRatingById(testId)).thenReturn(4);
Integer rating = authorService.getAuthorRatingFromDb(testId);
assertEquals(4, rating);
verify(authorRepository).findAuthorRatingById(testId);
}
@Test
@DisplayName("Should calculate average story rating")
void shouldCalculateAverageStoryRating() {
// Setup test author with stories
Story story1 = new Story("Story 1");
story1.setRating(4);
Story story2 = new Story("Story 2");
story2.setRating(5);
testAuthor.getStories().add(story1);
testAuthor.getStories().add(story2);
when(authorRepository.findById(testId)).thenReturn(Optional.of(testAuthor));
Double avgRating = authorService.calculateAverageStoryRating(testId);
assertEquals(4.5, avgRating);
verify(authorRepository).findById(testId);
}
@Test
@DisplayName("Should find authors with stories using repository method")
void shouldFindAuthorsWithStoriesFromRepository() {
List<Author> authors = List.of(testAuthor);
when(authorRepository.findAuthorsWithStories()).thenReturn(authors);
List<Author> result = authorService.findAuthorsWithStories();
assertEquals(1, result.size());
verify(authorRepository).findAuthorsWithStories();
}
@Test
@DisplayName("Should find top rated authors")
void shouldFindTopRatedAuthors() {
List<Author> authors = List.of(testAuthor);
when(authorRepository.findTopRatedAuthors()).thenReturn(authors);
List<Author> result = authorService.findTopRatedAuthors();
assertEquals(1, result.size());
verify(authorRepository).findTopRatedAuthors();
}
@Test
@DisplayName("Should find most prolific authors")
void shouldFindMostProlificAuthors() {
List<Author> authors = List.of(testAuthor);
when(authorRepository.findMostProlificAuthors()).thenReturn(authors);
List<Author> result = authorService.findMostProlificAuthors();
assertEquals(1, result.size());
verify(authorRepository).findMostProlificAuthors();
}
@Test
@DisplayName("Should find authors by URL domain")
void shouldFindAuthorsByUrlDomain() {
List<Author> authors = List.of(testAuthor);
when(authorRepository.findByUrlDomain("example.com")).thenReturn(authors);
List<Author> result = authorService.findByUrlDomain("example.com");
assertEquals(1, result.size());
verify(authorRepository).findByUrlDomain("example.com");
}
}

View File

@@ -0,0 +1,224 @@
package com.storycove.service;
import com.storycove.entity.Story;
import com.storycove.repository.ReadingPositionRepository;
import com.storycove.repository.StoryRepository;
import com.storycove.repository.TagRepository;
import com.storycove.service.exception.ResourceNotFoundException;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.DisplayName;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import java.time.LocalDateTime;
import java.util.Optional;
import java.util.UUID;
import static org.junit.jupiter.api.Assertions.*;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.*;
@ExtendWith(MockitoExtension.class)
@DisplayName("Story Service Unit Tests - Reading Progress")
class StoryServiceTest {
@Mock
private StoryRepository storyRepository;
@Mock
private TagRepository tagRepository;
@Mock
private ReadingPositionRepository readingPositionRepository;
@Mock
private SearchServiceAdapter searchServiceAdapter;
private StoryService storyService;
private Story testStory;
private UUID testId;
@BeforeEach
void setUp() {
testId = UUID.randomUUID();
testStory = new Story("Test Story");
testStory.setId(testId);
testStory.setContentHtml("<p>Test content for reading progress tracking</p>");
// Create StoryService with mocked dependencies
storyService = new StoryService(
storyRepository,
tagRepository,
readingPositionRepository,
null, // authorService - not needed for reading progress tests
null, // tagService - not needed for reading progress tests
null, // seriesService - not needed for reading progress tests
null, // sanitizationService - not needed for reading progress tests
searchServiceAdapter
);
}
@Test
@DisplayName("Should update reading progress successfully")
void shouldUpdateReadingProgress() {
Integer position = 150;
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
Story result = storyService.updateReadingProgress(testId, position);
assertEquals(position, result.getReadingPosition());
assertNotNull(result.getLastReadAt());
verify(storyRepository).findById(testId);
verify(storyRepository).save(testStory);
}
@Test
@DisplayName("Should update reading progress with zero position")
void shouldUpdateReadingProgressWithZeroPosition() {
Integer position = 0;
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
Story result = storyService.updateReadingProgress(testId, position);
assertEquals(0, result.getReadingPosition());
assertNotNull(result.getLastReadAt());
verify(storyRepository).save(testStory);
}
@Test
@DisplayName("Should throw exception for negative reading position")
void shouldThrowExceptionForNegativeReadingPosition() {
Integer position = -1;
assertThrows(IllegalArgumentException.class,
() -> storyService.updateReadingProgress(testId, position));
verify(storyRepository, never()).findById(any());
verify(storyRepository, never()).save(any());
}
@Test
@DisplayName("Should handle null reading position")
void shouldHandleNullReadingPosition() {
Integer position = null;
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
Story result = storyService.updateReadingProgress(testId, position);
assertNull(result.getReadingPosition());
assertNotNull(result.getLastReadAt());
verify(storyRepository).save(testStory);
}
@Test
@DisplayName("Should throw exception when story not found for reading progress update")
void shouldThrowExceptionWhenStoryNotFoundForReadingProgress() {
Integer position = 100;
when(storyRepository.findById(testId)).thenReturn(Optional.empty());
assertThrows(ResourceNotFoundException.class,
() -> storyService.updateReadingProgress(testId, position));
verify(storyRepository).findById(testId);
verify(storyRepository, never()).save(any());
}
@Test
@DisplayName("Should mark story as read")
void shouldMarkStoryAsRead() {
Boolean isRead = true;
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
Story result = storyService.updateReadingStatus(testId, isRead);
assertTrue(result.getIsRead());
assertNotNull(result.getLastReadAt());
// When marked as read, position should be set to content length
assertTrue(result.getReadingPosition() > 0);
verify(storyRepository).findById(testId);
verify(storyRepository).save(testStory);
}
@Test
@DisplayName("Should mark story as unread")
void shouldMarkStoryAsUnread() {
Boolean isRead = false;
// First mark story as read to test transition
testStory.markAsRead();
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
Story result = storyService.updateReadingStatus(testId, isRead);
assertFalse(result.getIsRead());
assertNotNull(result.getLastReadAt());
verify(storyRepository).save(testStory);
}
@Test
@DisplayName("Should handle null reading status")
void shouldHandleNullReadingStatus() {
Boolean isRead = null;
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
Story result = storyService.updateReadingStatus(testId, isRead);
assertFalse(result.getIsRead());
assertNotNull(result.getLastReadAt());
verify(storyRepository).save(testStory);
}
@Test
@DisplayName("Should throw exception when story not found for reading status update")
void shouldThrowExceptionWhenStoryNotFoundForReadingStatus() {
Boolean isRead = true;
when(storyRepository.findById(testId)).thenReturn(Optional.empty());
assertThrows(ResourceNotFoundException.class,
() -> storyService.updateReadingStatus(testId, isRead));
verify(storyRepository).findById(testId);
verify(storyRepository, never()).save(any());
}
@Test
@DisplayName("Should update lastReadAt timestamp when updating progress")
void shouldUpdateLastReadAtWhenUpdatingProgress() {
Integer position = 50;
LocalDateTime beforeUpdate = LocalDateTime.now().minusMinutes(1);
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
Story result = storyService.updateReadingProgress(testId, position);
assertNotNull(result.getLastReadAt());
assertTrue(result.getLastReadAt().isAfter(beforeUpdate));
verify(storyRepository).save(testStory);
}
@Test
@DisplayName("Should update lastReadAt timestamp when updating status")
void shouldUpdateLastReadAtWhenUpdatingStatus() {
Boolean isRead = true;
LocalDateTime beforeUpdate = LocalDateTime.now().minusMinutes(1);
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
Story result = storyService.updateReadingStatus(testId, isRead);
assertNotNull(result.getLastReadAt());
assertTrue(result.getLastReadAt().isAfter(beforeUpdate));
verify(storyRepository).save(testStory);
}
}

View File

@@ -18,11 +18,15 @@ storycove:
expiration: 86400000
auth:
password: test-password
typesense:
enabled: false
api-key: test-key
search:
engine: solr
solr:
host: localhost
port: 8108
port: 8983
scheme: http
cores:
stories: storycove_stories
authors: storycove_authors
images:
storage-path: /tmp/test-images

View File

@@ -0,0 +1,7 @@
<html>
<head><title>502 Bad Gateway</title></head>
<body>
<center><h1>502 Bad Gateway</h1></center>
<hr><center>nginx/1.29.0</center>
</body>
</html>

4308
backend/test_results.log Normal file

File diff suppressed because it is too large Load Diff

5
cookies.txt Normal file
View File

@@ -0,0 +1,5 @@
# Netscape HTTP Cookie File
# https://curl.se/docs/http-cookies.html
# This file was generated by libcurl! Edit at your own risk.
#HttpOnly_localhost FALSE / FALSE 1758433252 token eyJhbGciOiJIUzUxMiJ9.eyJzdWIiOiJ1c2VyIiwiaWF0IjoxNzU4MzQ2ODUyLCJleHAiOjE3NTg0MzMyNTIsImxpYnJhcnlJZCI6InNlY3JldCJ9.zEAQT5_11-pxPxmIhufSQqE26hvHldde4kFNE2HWWgBa5lT_Wt7jwpoPUMkQGQfShQwDZ9N-hFX3R2ew8jD7WQ

Some files were not shown because too many files have changed in this diff Show More