124 Commits

Author SHA1 Message Date
Stefan Hardegger
16983fd871 Merge branch 'main' into statistics 2025-10-21 07:58:25 +02:00
Stefan Hardegger
ff49589f32 Automatic backup 2025-10-20 14:51:27 +02:00
Stefan Hardegger
4abb442c50 fix async 2025-10-20 14:34:26 +02:00
Stefan Hardegger
1c004eb7d6 fix backup async 2025-10-20 14:25:12 +02:00
Stefan Hardegger
32544d4f4a different approach to migration 2025-10-20 14:13:45 +02:00
Stefan Hardegger
1ee9af8f28 deployment fix? 2025-10-20 12:55:56 +02:00
Stefan Hardegger
70599083b8 db migration 2025-10-20 12:43:58 +02:00
Stefan Hardegger
6a38189ef0 fix images 2025-10-20 12:30:28 +02:00
Stefan Hardegger
c9d58173f3 improved backup creation 2025-10-20 09:23:34 +02:00
Stefan Hardegger
3dd2ff50d8 Fix for memory issue during backup 2025-10-20 08:58:09 +02:00
Stefan Hardegger
378265c3a3 initial statistics implementation 2025-10-20 08:50:12 +02:00
Stefan Hardegger
30c0132a92 Various Improvements.
- Testing Coverage
- Image Handling
- Session Handling
- Library Switching
2025-10-20 08:24:29 +02:00
Stefan Hardegger
20d0652c85 Image Handling 2025-10-09 14:39:55 +02:00
Stefan Hardegger
4e02cd8eaa fix image 2025-09-30 17:03:49 +02:00
Stefan Hardegger
48b0087b01 fix embedded images on deviantart 2025-09-30 16:18:05 +02:00
Stefan Hardegger
c291559366 Fix Image Processing 2025-09-28 20:06:52 +02:00
Stefan Hardegger
622cf9ac76 fix image processing 2025-09-27 09:29:40 +02:00
Stefan Hardegger
df5e124115 fix image processing 2025-09-27 09:15:01 +02:00
Stefan Hardegger
2b4cb1456f fix orphaned file discovery 2025-09-27 08:46:17 +02:00
Stefan Hardegger
c2e5445196 fix 2025-09-27 08:32:11 +02:00
Stefan Hardegger
360b69effc fix cleanup 2025-09-27 08:15:09 +02:00
Stefan Hardegger
3bc8bb9e0c backup / restore improvement 2025-09-26 22:34:21 +02:00
Stefan Hardegger
7ca4823573 backup / restore improvement 2025-09-26 22:26:26 +02:00
Stefan Hardegger
5325169495 maintenance improvements 2025-09-26 21:41:33 +02:00
Stefan Hardegger
74cdd5dc57 solr random fix 2025-09-26 15:05:27 +02:00
Stefan Hardegger
574f20bfd7 dependency 2025-09-26 08:28:32 +02:00
Stefan Hardegger
c8249c94d6 new editor 2025-09-26 08:22:54 +02:00
Stefan Hardegger
51a1a69b45 solr migration button 2025-09-23 14:57:16 +02:00
Stefan Hardegger
6ee2d67027 solr migration button 2025-09-23 14:42:38 +02:00
Stefan Hardegger
9472210d8b solr migration button 2025-09-23 14:18:56 +02:00
Stefan Hardegger
62f017c4ca solr fix 2025-09-23 13:58:49 +02:00
Stefan Hardegger
857871273d fix pre formatting 2025-09-22 15:43:25 +02:00
Stefan Hardegger
a9521a9da1 fix saving stories. 2025-09-22 13:52:48 +02:00
Stefan Hardegger
1f41974208 ff 2025-09-22 12:43:05 +02:00
Stefan Hardegger
b68fde71c0 ff 2025-09-22 12:28:31 +02:00
Stefan Hardegger
f61be90d5c ff 2025-09-22 10:13:49 +02:00
Stefan Hardegger
87f37567fb replacing opensearch with solr 2025-09-22 09:44:50 +02:00
Stefan Hardegger
9e684a956b ff 2025-09-21 19:25:11 +02:00
Stefan Hardegger
379ef0d209 ff 2025-09-21 19:21:26 +02:00
Stefan Hardegger
b1ff684df6 asd 2025-09-21 19:18:03 +02:00
Stefan Hardegger
0032590030 fix? 2025-09-21 19:13:39 +02:00
Stefan Hardegger
db38d68399 fix? 2025-09-21 19:10:06 +02:00
Stefan Hardegger
48a0865199 fa 2025-09-21 18:04:36 +02:00
Stefan Hardegger
7daed22d2d another try 2025-09-21 17:53:52 +02:00
Stefan Hardegger
6c02b8831f asd 2025-09-21 17:47:03 +02:00
Stefan Hardegger
042f80dd2a another try 2025-09-21 17:38:57 +02:00
Stefan Hardegger
a472c11ac8 fix 2025-09-21 17:30:15 +02:00
Stefan Hardegger
a037dd92af fix 2025-09-21 17:21:49 +02:00
Stefan Hardegger
634de0b6a5 fix 2025-09-21 16:43:47 +02:00
Stefan Hardegger
b4635b56a3 fix 2025-09-21 16:39:41 +02:00
Stefan Hardegger
bfb68e81a8 fix 2025-09-21 16:34:28 +02:00
Stefan Hardegger
1247a3420e fix 2025-09-21 16:23:44 +02:00
Stefan Hardegger
6caee8a007 config 2025-09-21 16:21:53 +02:00
Stefan Hardegger
cf93d3b3a6 opensearch config 2025-09-21 16:14:20 +02:00
Stefan Hardegger
53cb296adc opensearch config 2025-09-21 16:10:07 +02:00
Stefan Hardegger
f71b70d03b opensearch config 2025-09-21 16:07:48 +02:00
Stefan Hardegger
0bdc3f4731 adjustment 2025-09-21 15:59:15 +02:00
Stefan Hardegger
345065c03b missing dependencies 2025-09-21 15:53:03 +02:00
Stefan Hardegger
c50dc618bf build adjustment 2025-09-21 15:47:14 +02:00
Stefan Hardegger
96e6ced8da adjustment 2025-09-21 15:37:48 +02:00
Stefan Hardegger
4738ae3a75 opefully build fix 2025-09-21 15:30:27 +02:00
Stefan Hardegger
591ca5a149 disable opensearch security 2025-09-21 15:08:20 +02:00
Stefan Hardegger
41ff3a9961 correction 2025-09-21 14:55:43 +02:00
Stefan Hardegger
0101c0ca2c bugfixes, and logging cleanup 2025-09-21 14:55:43 +02:00
58bb7f8229 revert a5628019f8
revert revert b1dbd85346

revert richtext replacement
2025-09-21 14:54:39 +02:00
a5628019f8 revert b1dbd85346
revert richtext replacement
2025-09-21 10:13:48 +02:00
Stefan Hardegger
b1dbd85346 richtext replacement 2025-09-21 10:10:04 +02:00
Stefan Hardegger
aae8f8926b removing typesense 2025-09-20 14:39:51 +02:00
Stefan Hardegger
f1773873d4 Full parallel implementation of typesense and opensearch 2025-09-20 09:40:09 +02:00
Stefan Hardegger
54df3c471e phase 1 2025-09-18 07:46:10 +02:00
Stefan Hardegger
64f97f5648 Settings reorganization 2025-09-17 15:06:35 +02:00
Stefan Hardegger
c0b3ae3b72 embedded image finishing 2025-09-17 10:28:35 +02:00
Stefan Hardegger
e5596b5a17 fix port mapping 2025-09-16 15:06:40 +02:00
Stefan Hardegger
c7b516be31 phase 1 and 2 of embedded images 2025-09-16 14:58:50 +02:00
Stefan Hardegger
c92308c24a layout enhancement. Reading position reset 2025-09-16 09:34:27 +02:00
Stefan Hardegger
f92dcc5314 Advanced Filters - Build optimizations 2025-09-04 15:49:24 +02:00
Stefan Hardegger
702fcb33c1 Improvements to Editor 2025-09-02 09:28:06 +02:00
Stefan Hardegger
11b2a8b071 revert postgres version 2025-09-01 16:19:14 +02:00
Stefan Hardegger
d1289bd616 Security Updates and random improvement. 2025-09-01 16:02:19 +02:00
Stefan Hardegger
15708b5ab2 Table of Content functionality 2025-08-22 09:03:21 +02:00
Stefan Hardegger
a660056003 Various improvements 2025-08-21 13:55:38 +02:00
Stefan Hardegger
35a5825e76 Fix cover images display 2025-08-21 12:38:48 +02:00
Stefan Hardegger
87a4999ffe Fixing Database switching functionality. 2025-08-21 08:54:28 +02:00
Stefan Hardegger
4ee5fa2330 fix 2025-08-20 15:11:41 +02:00
Stefan Hardegger
6128d61349 Library Switching functionality 2025-08-20 15:10:40 +02:00
Stefan Hardegger
5e347f2e2e Incrase permitted upload size 2025-08-20 08:11:36 +02:00
Stefan Hardegger
8eb126a304 performance 2025-08-18 19:27:57 +02:00
Stefan Hardegger
3dc02420fe performance optimization in library view 2025-08-18 19:03:42 +02:00
Stefan Hardegger
241a15a174 Series auto complete 2025-08-18 14:19:14 +02:00
Stefan Hardegger
6b97c0a70f fix loop 2025-08-18 10:41:32 +02:00
Stefan Hardegger
e952241e3c fix 2025-08-18 10:32:02 +02:00
Stefan Hardegger
65f1c6edc7 fix 2025-08-18 10:16:20 +02:00
Stefan Hardegger
40fe3fdb80 Improvements, Fixes. 2025-08-18 10:04:38 +02:00
Stefan Hardegger
95ce5fb532 Bugfixes and Improvements Tag Management 2025-08-18 08:54:18 +02:00
Stefan Hardegger
1a99d9830d Tag Enhancement + bugfixes 2025-08-17 17:16:40 +02:00
Stefan Hardegger
6b83783381 Small improvements 2025-08-15 07:58:36 +02:00
Stefan Hardegger
460ec358ca New Switchable Library Layout 2025-08-14 19:46:50 +02:00
Stefan Hardegger
1d14d3d7aa Fix for Random Story Function 2025-08-14 13:14:46 +02:00
Stefan Hardegger
4357351ec8 randomized 2025-08-13 14:49:57 +02:00
Stefan Hardegger
4ab03953ae random story selector 2025-08-13 14:48:40 +02:00
Stefan Hardegger
142d8328c2 revert security config 2025-08-12 15:14:14 +02:00
Stefan Hardegger
c46108c317 various improvements and performance enhancements 2025-08-12 14:55:51 +02:00
Stefan Hardegger
75c207970d Changing Authors 2025-08-12 12:57:34 +02:00
Stefan Hardegger
3b22d155db restructuring 2025-08-11 14:40:56 +02:00
Stefan Hardegger
51e3d20c24 various fixes 2025-08-11 08:15:20 +02:00
Stefan Hardegger
5d195b63ef Fix dead links 2025-08-08 15:05:10 +02:00
Stefan Hardegger
5b3a9d183e Image Handling in Epub Import/export 2025-08-08 14:50:49 +02:00
Stefan Hardegger
379c8c170f Various improvements & Epub support 2025-08-08 14:09:14 +02:00
Stefan Hardegger
090b858a54 Bugfix 2025-07-31 13:43:23 +02:00
Stefan Hardegger
b0c14d4b37 DB Backup Bugfix 2025-07-31 08:36:33 +02:00
Stefan Hardegger
7227061d25 DB Backup Bugfix 2025-07-31 08:25:47 +02:00
Stefan Hardegger
415eab07de DB Backup Bugfix 2025-07-31 07:54:43 +02:00
Stefan Hardegger
e89331e059 DB Backup Bugfix 2025-07-31 07:46:14 +02:00
Stefan Hardegger
370bef2f07 DB Backup Bug 2025-07-31 07:38:05 +02:00
Stefan Hardegger
9e788c2018 bugfix DB Backup 2025-07-31 07:30:23 +02:00
Stefan Hardegger
590e2590d6 DB Backup and Restore 2025-07-31 07:12:12 +02:00
Stefan Hardegger
57859d7a84 Reading Progress 2025-07-29 14:53:44 +02:00
Stefan Hardegger
5746001c4a Bugfixes 2025-07-29 11:02:46 +02:00
Stefan Hardegger
c08082c0d6 Correct tag facets handling 2025-07-28 14:37:58 +02:00
Stefan Hardegger
860bf02d56 Dockerfile improvement 2025-07-28 14:28:01 +02:00
Stefan Hardegger
a501b27169 Saving reading position 2025-07-28 14:09:19 +02:00
Stefan Hardegger
fcad028959 scraping and improvements 2025-07-28 13:52:09 +02:00
Stefan Hardegger
f95d7aa8bb Various Fixes and QoL enhancements. 2025-07-26 12:05:54 +02:00
5e8164c6a4 Merge pull request 'feature/collections' (#1) from feature/collections into main
Reviewed-on: #1
2025-07-25 14:22:57 +02:00
233 changed files with 43378 additions and 3456 deletions

View File

@@ -14,11 +14,18 @@ JWT_SECRET=secure_jwt_secret_here
# Application Authentication # Application Authentication
APP_PASSWORD=application_password_here APP_PASSWORD=application_password_here
# Search Engine Configuration
SEARCH_ENGINE=typesense
# Typesense Search Configuration # Typesense Search Configuration
TYPESENSE_API_KEY=secure_api_key_here TYPESENSE_API_KEY=secure_api_key_here
TYPESENSE_ENABLED=true TYPESENSE_ENABLED=true
TYPESENSE_REINDEX_INTERVAL=3600000 TYPESENSE_REINDEX_INTERVAL=3600000
# OpenSearch Configuration
OPENSEARCH_USERNAME=admin
OPENSEARCH_PASSWORD=secure_opensearch_password_here
# Image Storage # Image Storage
IMAGE_STORAGE_PATH=/app/images IMAGE_STORAGE_PATH=/app/images

View File

@@ -18,10 +18,9 @@ JWT_SECRET=REPLACE_WITH_SECURE_JWT_SECRET_MINIMUM_32_CHARS
# Use a strong password in production # Use a strong password in production
APP_PASSWORD=REPLACE_WITH_SECURE_APP_PASSWORD APP_PASSWORD=REPLACE_WITH_SECURE_APP_PASSWORD
# Typesense Search Configuration # OpenSearch Configuration
TYPESENSE_API_KEY=REPLACE_WITH_SECURE_TYPESENSE_API_KEY #OPENSEARCH_PASSWORD=REPLACE_WITH_SECURE_OPENSEARCH_PASSWORD
TYPESENSE_ENABLED=true SEARCH_ENGINE=opensearch
TYPESENSE_REINDEX_INTERVAL=3600000
# Image Storage # Image Storage
IMAGE_STORAGE_PATH=/app/images IMAGE_STORAGE_PATH=/app/images

1
.gitignore vendored
View File

@@ -47,3 +47,4 @@ Thumbs.db
# Application data # Application data
images/ images/
data/ data/
backend/cookies.txt

Binary file not shown.

Before

Width:  |  Height:  |  Size: 37 KiB

220
ASYNC_IMAGE_PROCESSING.md Normal file
View File

@@ -0,0 +1,220 @@
# Async Image Processing Implementation
## Overview
The image processing system has been updated to handle external images asynchronously, preventing timeouts when processing stories with many images. This provides real-time progress updates to users showing which images are being processed.
## Backend Components
### 1. `ImageProcessingProgressService`
- Tracks progress for individual story image processing sessions
- Thread-safe with `ConcurrentHashMap` for multi-user support
- Provides progress information: total images, processed count, current image, status, errors
### 2. `AsyncImageProcessingService`
- Handles asynchronous image processing using Spring's `@Async` annotation
- Counts external images before processing
- Provides progress callbacks during processing
- Updates story content when processing completes
- Automatic cleanup of progress data after completion
### 3. Enhanced `ImageService`
- Added `processContentImagesWithProgress()` method with callback support
- Progress callbacks provide real-time updates during image download/processing
- Maintains compatibility with existing synchronous processing
### 4. Updated `StoryController`
- `POST /api/stories` and `PUT /api/stories/{id}` now trigger async image processing
- `GET /api/stories/{id}/image-processing-progress` endpoint for progress polling
- Processing starts immediately after story save and returns control to user
## Frontend Components
### 1. `ImageProcessingProgressTracker` (Utility Class)
```typescript
const tracker = new ImageProcessingProgressTracker(storyId);
tracker.onProgress((progress) => {
console.log(`Processing ${progress.processedImages}/${progress.totalImages}`);
});
tracker.onComplete(() => console.log('Done!'));
tracker.start();
```
### 2. `ImageProcessingProgressComponent` (React Component)
```tsx
<ImageProcessingProgressComponent
storyId={storyId}
autoStart={true}
onComplete={() => refreshStory()}
/>
```
## User Experience
### Before (Synchronous)
1. User saves story with external images
2. Request hangs for 30+ seconds processing images
3. Browser may timeout
4. No feedback about progress
5. User doesn't know if it's working
### After (Asynchronous)
1. User saves story with external images
2. Save completes immediately
3. Progress indicator appears: "Processing 5 images. Currently image 2 of 5..."
4. User can continue using the application
5. Progress updates every second
6. Story automatically refreshes when processing completes
## API Endpoints
### Progress Endpoint
```
GET /api/stories/{id}/image-processing-progress
```
**Response when processing:**
```json
{
"isProcessing": true,
"totalImages": 5,
"processedImages": 2,
"currentImageUrl": "https://example.com/image.jpg",
"status": "Processing image 3 of 5",
"progressPercentage": 40.0,
"completed": false,
"error": ""
}
```
**Response when completed:**
```json
{
"isProcessing": false,
"totalImages": 5,
"processedImages": 5,
"currentImageUrl": "",
"status": "Completed: 5 images processed",
"progressPercentage": 100.0,
"completed": true,
"error": ""
}
```
**Response when no processing:**
```json
{
"isProcessing": false,
"message": "No active image processing"
}
```
## Integration Examples
### React Hook Usage
```tsx
import { useImageProcessingProgress } from '../utils/imageProcessingProgress';
function StoryEditor({ storyId }) {
const { progress, isTracking, startTracking } = useImageProcessingProgress(storyId);
const handleSave = async () => {
await saveStory();
startTracking(); // Start monitoring progress
};
return (
<div>
{isTracking && progress && (
<div className="progress-indicator">
Processing {progress.processedImages}/{progress.totalImages} images...
</div>
)}
<button onClick={handleSave}>Save Story</button>
</div>
);
}
```
### Manual Progress Tracking
```typescript
// After saving a story with external images
const tracker = new ImageProcessingProgressTracker(storyId);
tracker.onProgress((progress) => {
updateProgressBar(progress.progressPercentage);
showStatus(progress.status);
if (progress.currentImageUrl) {
showCurrentImage(progress.currentImageUrl);
}
});
tracker.onComplete((finalProgress) => {
hideProgressBar();
showNotification('Image processing completed!');
refreshStoryContent(); // Reload story with processed images
});
tracker.onError((error) => {
hideProgressBar();
showError(`Image processing failed: ${error}`);
});
tracker.start();
```
## Configuration
### Polling Interval
Default: 1 second (1000ms)
```typescript
const tracker = new ImageProcessingProgressTracker(storyId, 500); // Poll every 500ms
```
### Timeout
Default: 5 minutes (300000ms)
```typescript
const tracker = new ImageProcessingProgressTracker(storyId, 1000, 600000); // 10 minute timeout
```
### Spring Async Configuration
The backend uses Spring's default async executor. For production, consider configuring a custom thread pool in your application properties:
```yaml
spring:
task:
execution:
pool:
core-size: 4
max-size: 8
queue-capacity: 100
```
## Error Handling
### Backend Errors
- Network timeouts downloading images
- Invalid image formats
- Disk space issues
- All errors are logged and returned in progress status
### Frontend Errors
- Network failures during progress polling
- Timeout if processing takes too long
- Graceful degradation - user can continue working
## Benefits
1. **No More Timeouts**: Large image processing operations won't timeout HTTP requests
2. **Better UX**: Users get real-time feedback about processing progress
3. **Improved Performance**: Users can continue using the app while images process
4. **Error Visibility**: Clear error messages when image processing fails
5. **Scalability**: Multiple users can process images simultaneously without blocking
## Future Enhancements
1. **WebSocket Support**: Replace polling with WebSocket for real-time push updates
2. **Batch Processing**: Queue multiple stories for batch image processing
3. **Retry Logic**: Automatic retry for failed image downloads
4. **Progress Persistence**: Save progress to database for recovery after server restart
5. **Image Optimization**: Automatic resize/compress images during processing

137
DEPLOYMENT.md Normal file
View File

@@ -0,0 +1,137 @@
# StoryCove Deployment Guide
## Quick Deployment
StoryCove includes an automated deployment script that handles Solr volume cleanup and ensures fresh search indices on every deployment.
### Using the Deployment Script
```bash
./deploy.sh
```
This script will:
1. Stop all running containers
2. **Remove the Solr data volume** (forcing fresh core creation)
3. Build and start all containers
4. Wait for services to become healthy
5. Trigger automatic bulk reindexing
### What Happens During Deployment
#### 1. Solr Volume Cleanup
The script removes the `storycove_solr_data` volume, which:
- Ensures all Solr cores are recreated from scratch
- Prevents stale configuration issues
- Guarantees schema changes are applied
#### 2. Automatic Bulk Reindexing
When the backend starts, it automatically:
- Detects that Solr is available
- Fetches all entities from the database (Stories, Authors, Collections)
- Bulk indexes them into Solr
- Logs progress and completion
### Monitoring the Deployment
Watch the backend logs to see reindexing progress:
```bash
docker-compose logs -f backend
```
You should see output like:
```
========================================
Starting automatic bulk reindexing...
========================================
📚 Indexing stories...
✅ Indexed 150 stories
👤 Indexing authors...
✅ Indexed 45 authors
📂 Indexing collections...
✅ Indexed 12 collections
========================================
✅ Bulk reindexing completed successfully in 2345ms
📊 Total indexed: 150 stories, 45 authors, 12 collections
========================================
```
## Manual Deployment (Without Script)
If you prefer manual control:
```bash
# Stop containers
docker-compose down
# Remove Solr volume
docker volume rm storycove_solr_data
# Start containers
docker-compose up -d --build
```
The automatic reindexing will still occur on startup.
## Troubleshooting
### Reindexing Fails
If bulk reindexing fails:
1. Check Solr is running: `docker-compose logs solr`
2. Verify Solr health: `curl http://localhost:8983/solr/admin/ping`
3. Check backend logs: `docker-compose logs backend`
The application will still start even if reindexing fails - you can manually trigger reindexing through the admin API.
### Solr Cores Not Created
If Solr cores aren't being created properly:
1. Check the `solr.Dockerfile` to ensure cores are created
2. Verify the Solr image builds correctly: `docker-compose build solr`
3. Check Solr Admin UI: http://localhost:8983
### Performance Issues
If reindexing takes too long:
- The bulk indexing is already optimized (batch operations)
- Consider increasing Solr memory in `docker-compose.yml`:
```yaml
environment:
- SOLR_HEAP=1024m
```
## Development Workflow
### Daily Development
Just use the normal commands:
```bash
docker-compose up -d
```
The automatic reindexing still happens, but it's fast on small datasets.
### Schema Changes
When you modify Solr schema or add new cores:
```bash
./deploy.sh
```
This ensures a clean slate.
### Skipping Reindexing
Reindexing is automatic and cannot be disabled. It's designed to be fast and unobtrusive. The application starts immediately - reindexing happens in the background.
## Environment Variables
No additional environment variables are needed for the deployment script. All configuration is in `docker-compose.yml`.
## Backup Considerations
**Important**: Since the Solr volume is recreated on every deployment, you should:
- Never rely on Solr as the source of truth
- Always maintain data in PostgreSQL
- Solr is treated as a disposable cache/index
This is the recommended approach for search indices.

View File

@@ -0,0 +1,466 @@
# EPUB Import/Export Specification
## 🎉 Phase 1 & 2 Implementation Complete
**Status**: Both Phase 1 and Phase 2 fully implemented and operational as of August 2025
**Phase 1 Achievements**:
- ✅ Complete EPUB import functionality with validation and error handling
- ✅ Single story EPUB export with XML validation fixes
- ✅ Reading position preservation using EPUB CFI standards
- ✅ Full frontend UI integration with navigation and authentication
- ✅ Moved export button to Story Detail View for better UX
- ✅ Added EPUB import to main Add Story menu dropdown
**Phase 2 Enhancements**:
-**Enhanced Cover Processing**: Automatic extraction and optimization of cover images during EPUB import
-**Advanced Metadata Extraction**: Comprehensive extraction of subjects/tags, keywords, publisher, language, publication dates, and identifiers
-**Collection EPUB Export**: Full collection export with table of contents, proper chapter structure, and metadata aggregation
-**Image Validation**: Robust cover image processing with format detection, resizing, and storage management
-**API Endpoints**: Complete REST API for both individual story and collection EPUB operations
## Overview
This specification defines the requirements and implementation details for importing and exporting EPUB files in StoryCove. The feature enables users to import stories from EPUB files and export their stories/collections as EPUB files with preserved reading positions.
## Scope
### In Scope
- **EPUB Import**: Parse DRM-free EPUB files and import as stories
- **EPUB Export**: Export individual stories and collections as EPUB files
- **Reading Position Preservation**: Store and restore reading positions using EPUB standards
- **Metadata Handling**: Extract and preserve story metadata (title, author, cover, etc.)
- **Content Processing**: HTML content sanitization and formatting
### Out of Scope (Phase 1)
- DRM-protected EPUB files (future consideration)
- Real-time reading position sync between devices
- Advanced EPUB features (audio, video, interactive content)
- EPUB validation beyond basic structure
## Technical Architecture
### Backend Implementation
- **Language**: Java (Spring Boot)
- **Primary Library**: EPUBLib (nl.siegmann.epublib:epublib-core:3.1)
- **Processing**: Server-side generation and parsing
- **File Handling**: Multipart file upload for import, streaming download for export
### Dependencies
```xml
<dependency>
<groupId>com.positiondev.epublib</groupId>
<artifactId>epublib-core</artifactId>
<version>3.1</version>
</dependency>
```
### Phase 1 Implementation Notes
- **EPUBImportService**: Implemented with full validation, metadata extraction, and reading position handling
- **EPUBExportService**: Implemented with XML validation fixes for EPUB reader compatibility
- **ReadingPosition Entity**: Created with EPUB CFI support and database indexing
- **Authentication**: All endpoints secured with JWT authentication and proper frontend integration
- **UI Integration**: Export moved to Story Detail View, Import added to main navigation menu
- **XML Compliance**: Fixed XHTML validation issues by properly formatting self-closing tags (`<br>``<br />`)
## EPUB Import Specification
### Supported Formats
- **EPUB 2.0** and **EPUB 3.x** formats
- **DRM-Free** files only
- **Maximum file size**: 50MB
- **Supported content**: Text-based stories with HTML content
### Import Process Flow
1. **File Upload**: User uploads EPUB file via web interface
2. **Validation**: Check file format, size, and basic EPUB structure
3. **Parsing**: Extract metadata, content, and resources using EPUBLib
4. **Content Processing**: Sanitize HTML content using existing Jsoup pipeline
5. **Story Creation**: Create Story entity with extracted data
6. **Preview**: Show extracted story details for user confirmation
7. **Finalization**: Save story to database with imported metadata
### Metadata Mapping
```java
// EPUB Metadata → StoryCove Story Entity
epub.getMetadata().getFirstTitle() story.title
epub.getMetadata().getAuthors().get(0) story.authorName
epub.getMetadata().getDescriptions().get(0) story.summary
epub.getCoverImage() story.coverPath
epub.getMetadata().getSubjects() story.tags
```
### Content Extraction
- **Multi-chapter EPUBs**: Combine all content files into single HTML
- **Chapter separation**: Insert `<hr>` or `<h2>` tags between chapters
- **HTML sanitization**: Apply existing sanitization rules
- **Image handling**: Extract and store cover images, inline images optional
### API Endpoints
#### POST /api/stories/import-epub
```java
@PostMapping("/import-epub")
public ResponseEntity<?> importEPUB(@RequestParam("file") MultipartFile file) {
// Implementation in EPUBImportService
}
```
**Request**: Multipart file upload
**Response**:
```json
{
"message": "EPUB imported successfully",
"storyId": "uuid",
"extractedData": {
"title": "Story Title",
"author": "Author Name",
"summary": "Story description",
"chapterCount": 12,
"wordCount": 45000,
"hasCovers": true
}
}
```
## EPUB Export Specification
### Export Types
1. **Single Story Export**: Convert one story to EPUB
2. **Collection Export**: Multiple stories as single EPUB with chapters
### EPUB Structure Generation
```
story.epub
├── mimetype
├── META-INF/
│ └── container.xml
└── OEBPS/
├── content.opf # Package metadata
├── toc.ncx # Navigation
├── stylesheet.css # Styling
├── cover.html # Cover page
├── chapter001.xhtml # Story content
├── images/
│ └── cover.jpg # Cover image
└── fonts/ (optional)
```
### Reading Position Implementation
#### EPUB 3 CFI (Canonical Fragment Identifier)
```xml
<!-- In content.opf metadata -->
<meta property="epub-cfi" content="/6/4[chap01]!/4[body01]/10[para05]/3:142"/>
<meta property="reading-percentage" content="0.65"/>
<meta property="last-read-timestamp" content="2023-12-07T10:30:00Z"/>
```
#### StoryCove Custom Metadata (Fallback)
```xml
<meta name="storycove:reading-chapter" content="3"/>
<meta name="storycove:reading-paragraph" content="15"/>
<meta name="storycove:reading-offset" content="142"/>
<meta name="storycove:reading-percentage" content="0.65"/>
```
#### CFI Generation Logic
```java
public String generateCFI(ReadingPosition position) {
return String.format("/6/%d[chap%02d]!/4[body01]/%d[para%02d]/3:%d",
(position.getChapterIndex() * 2) + 4,
position.getChapterIndex(),
(position.getParagraphIndex() * 2) + 4,
position.getParagraphIndex(),
position.getCharacterOffset());
}
```
### API Endpoints
#### GET /api/stories/{id}/export-epub
```java
@GetMapping("/{id}/export-epub")
public ResponseEntity<StreamingResponseBody> exportStory(@PathVariable UUID id) {
// Implementation in EPUBExportService
}
```
**Response**: EPUB file download with headers:
```
Content-Type: application/epub+zip
Content-Disposition: attachment; filename="story-title.epub"
```
#### GET /api/collections/{id}/export-epub
```java
@GetMapping("/{id}/export-epub")
public ResponseEntity<StreamingResponseBody> exportCollection(@PathVariable UUID id) {
// Implementation in EPUBExportService
}
```
**Response**: Multi-story EPUB with table of contents
## Data Models
### ReadingPosition Entity
```java
@Entity
@Table(name = "reading_positions")
public class ReadingPosition {
@Id
private UUID id;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "story_id")
private Story story;
@Column(name = "chapter_index")
private Integer chapterIndex = 0;
@Column(name = "paragraph_index")
private Integer paragraphIndex = 0;
@Column(name = "character_offset")
private Integer characterOffset = 0;
@Column(name = "progress_percentage")
private Double progressPercentage = 0.0;
@Column(name = "epub_cfi")
private String canonicalFragmentIdentifier;
@Column(name = "last_read_at")
private LocalDateTime lastReadAt;
@Column(name = "device_identifier")
private String deviceIdentifier;
// Constructors, getters, setters
}
```
### EPUB Import Request DTO
```java
public class EPUBImportRequest {
private String filename;
private Long fileSize;
private Boolean preserveChapterStructure = true;
private Boolean extractCover = true;
private String targetCollectionId; // Optional: add to specific collection
}
```
### EPUB Export Options DTO
```java
public class EPUBExportOptions {
private Boolean includeReadingPosition = true;
private Boolean includeCoverImage = true;
private Boolean includeMetadata = true;
private String cssStylesheet; // Optional custom CSS
private EPUBVersion version = EPUBVersion.EPUB3;
}
```
## Service Layer Architecture
### EPUBImportService
```java
@Service
public class EPUBImportService {
// Core import method
public Story importEPUBFile(MultipartFile file, EPUBImportRequest request);
// Helper methods
private void validateEPUBFile(MultipartFile file);
private Book parseEPUBStructure(InputStream inputStream);
private Story extractStoryData(Book epub);
private String combineChapterContent(Book epub);
private void extractAndSaveCover(Book epub, Story story);
private List<String> extractTags(Book epub);
private ReadingPosition extractReadingPosition(Book epub);
}
```
### EPUBExportService
```java
@Service
public class EPUBExportService {
// Core export methods
public byte[] exportSingleStory(UUID storyId, EPUBExportOptions options);
public byte[] exportCollection(UUID collectionId, EPUBExportOptions options);
// Helper methods
private Book createEPUBStructure(Story story, ReadingPosition position);
private Book createCollectionEPUB(Collection collection, List<ReadingPosition> positions);
private void addReadingPositionMetadata(Book book, ReadingPosition position);
private String generateCFI(ReadingPosition position);
private Resource createChapterResource(Story story);
private Resource createStylesheetResource();
private void addCoverImage(Book book, Story story);
}
```
## Frontend Integration
### Import UI Flow
1. **Upload Interface**: File input with EPUB validation
2. **Progress Indicator**: Show parsing progress
3. **Preview Screen**: Display extracted metadata for confirmation
4. **Confirmation**: Allow editing of title, author, summary before saving
5. **Success**: Redirect to created story
### Export UI Flow
1. **Export Button**: Available on story detail and collection pages
2. **Options Modal**: Allow selection of export options
3. **Progress Indicator**: Show EPUB generation progress
4. **Download**: Automatic file download on completion
### Frontend API Calls
```typescript
// Import EPUB
const importEPUB = async (file: File) => {
const formData = new FormData();
formData.append('file', file);
const response = await fetch('/api/stories/import-epub', {
method: 'POST',
body: formData,
});
return await response.json();
};
// Export Story
const exportStoryEPUB = async (storyId: string) => {
const response = await fetch(`/api/stories/${storyId}/export-epub`, {
method: 'GET',
});
const blob = await response.blob();
const url = window.URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = `${storyTitle}.epub`;
a.click();
};
```
## Error Handling
### Import Errors
- **Invalid EPUB format**: "Invalid EPUB file format"
- **File too large**: "File size exceeds 50MB limit"
- **DRM protected**: "DRM-protected EPUBs not supported"
- **Corrupted file**: "EPUB file appears to be corrupted"
- **No content**: "EPUB contains no readable content"
### Export Errors
- **Story not found**: "Story not found or access denied"
- **Missing content**: "Story has no content to export"
- **Generation failure**: "Failed to generate EPUB file"
## Security Considerations
### File Upload Security
- **File type validation**: Verify EPUB MIME type and structure
- **Size limits**: Enforce maximum file size limits
- **Content sanitization**: Apply existing HTML sanitization
- **Virus scanning**: Consider integration with antivirus scanning
### Content Security
- **HTML sanitization**: Apply existing Jsoup rules to imported content
- **Image validation**: Validate extracted cover images
- **Metadata escaping**: Escape special characters in metadata
## Testing Strategy
### Unit Tests
- EPUB parsing and validation logic
- CFI generation and parsing
- Metadata extraction accuracy
- Content sanitization
### Integration Tests
- End-to-end import/export workflow
- Reading position preservation
- Multi-story collection export
- Error handling scenarios
### Test Data
- Sample EPUB files for various scenarios
- EPUBs with and without reading positions
- Multi-chapter EPUBs
- EPUBs with covers and metadata
## Performance Considerations
### Import Performance
- **Streaming processing**: Process large EPUBs without loading entirely into memory
- **Async processing**: Consider async import for large files
- **Progress tracking**: Provide progress feedback for large imports
### Export Performance
- **Caching**: Cache generated EPUBs for repeated exports
- **Streaming**: Stream EPUB generation for large collections
- **Resource optimization**: Optimize image and content sizes
## Future Enhancements (Out of Scope)
### Phase 2 Considerations
- **DRM support**: Research legal and technical feasibility
- **Reading position sync**: Real-time sync across devices
- **Advanced EPUB features**: Enhanced typography, annotations
- **Bulk operations**: Import/export multiple EPUBs
- **EPUB validation**: Full EPUB compliance checking
### Integration Possibilities
- **Cloud storage**: Export directly to Google Drive, Dropbox
- **E-reader sync**: Direct sync with Kindle, Kobo devices
- **Reading analytics**: Track reading patterns and statistics
## Implementation Phases
### Phase 1: Core Functionality ✅ **COMPLETED**
- [x] Basic EPUB import (DRM-free)
- [x] Single story export
- [x] Reading position storage and retrieval
- [x] Frontend UI integration
### Phase 2: Enhanced Features ✅ **COMPLETED**
- [x] Collection export with table of contents
- [x] Advanced metadata handling (subjects, keywords, publisher, language, etc.)
- [x] Enhanced cover image processing for import/export
- [x] Comprehensive error handling
### Phase 3: Advanced Features
- [ ] DRM exploration (legal research required)
- [ ] Reading position sync
- [ ] Advanced EPUB features
- [ ] Analytics and reporting
## Acceptance Criteria
### Import Success Criteria ✅ **COMPLETED**
- [x] Successfully parse EPUB 2.0 and 3.x files
- [x] Extract title, author, summary, and content accurately
- [x] Preserve formatting and basic HTML structure
- [x] Handle cover images correctly
- [x] Import reading positions when present
- [x] Provide clear error messages for invalid files
### Export Success Criteria ✅ **FULLY COMPLETED**
- [x] Generate valid EPUB files compatible with major readers
- [x] Include accurate metadata and content
- [x] Embed reading positions using CFI standard
- [x] Support single story export
- [x] Support collection export with proper structure
- [x] Generate proper table of contents for collections
- [x] Include cover images when available
---
*This specification serves as the implementation guide for the EPUB import/export feature. All implementation decisions should reference this document for consistency and completeness.*

View File

@@ -0,0 +1,539 @@
# StoryCove Housekeeping Complete Report
**Date:** 2025-10-10
**Scope:** Comprehensive audit of backend, frontend, tests, and documentation
**Overall Grade:** A- (90%)
---
## Executive Summary
StoryCove is a **production-ready** self-hosted short story library application with **excellent architecture** and **comprehensive feature implementation**. The codebase demonstrates professional-grade engineering with only one critical issue blocking 100% compliance.
### Key Highlights ✅
-**Entity layer:** 100% specification compliant
-**EPUB Import/Export:** Phase 2 fully implemented
-**Tag Enhancement:** Aliases, merging, AI suggestions complete
-**Multi-Library Support:** Robust isolation with security
-**HTML Sanitization:** Shared backend/frontend config with DOMPurify
-**Advanced Search:** 15+ filter parameters, Solr integration
-**Reading Experience:** Progress tracking, TOC, series navigation
### Critical Issue 🚨
1. **Collections Search Not Implemented** (CollectionService.java:56-61)
- GET /api/collections returns empty results
- Requires Solr Collections core implementation
- Estimated: 4-6 hours to fix
---
## Phase 1: Documentation & State Assessment (COMPLETED)
### Entity Models - Grade: A+ (100%)
All 7 entity models are **specification-perfect**:
| Entity | Spec Compliance | Key Features | Status |
|--------|----------------|--------------|--------|
| **Story** | 100% | All 14 fields, reading progress, series support | ✅ Perfect |
| **Author** | 100% | Rating, avatar, URL collections | ✅ Perfect |
| **Tag** | 100% | Color (7-char hex), description (500 chars), aliases | ✅ Perfect |
| **Collection** | 100% | Gap-based positioning, calculated properties | ✅ Perfect |
| **Series** | 100% | Name, description, stories relationship | ✅ Perfect |
| **ReadingPosition** | 100% | EPUB CFI, context, percentage tracking | ✅ Perfect |
| **TagAlias** | 100% | Alias resolution, merge tracking | ✅ Perfect |
**Verification:**
- `Story.java:1-343`: All fields match DATA_MODEL.md
- `Collection.java:1-245`: Helper methods for story management
- `ReadingPosition.java:1-230`: Complete EPUB CFI support
- `TagAlias.java:1-113`: Proper canonical tag resolution
### Repository Layer - Grade: A+ (100%)
**Best Practices Verified:**
- ✅ No search anti-patterns (CollectionRepository correctly delegates to search service)
- ✅ Proper use of `@Query` annotations for complex operations
- ✅ Efficient eager loading with JOIN FETCH
- ✅ Return types: Page<T> for pagination, List<T> for unbounded
**Files Audited:**
- `CollectionRepository.java:1-55` - ID-based lookups only
- `StoryRepository.java` - Complex queries with associations
- `AuthorRepository.java` - Join fetch for stories
- `TagRepository.java` - Alias-aware queries
---
## Phase 2: Backend Implementation Audit (COMPLETED)
### Service Layer - Grade: A (95%)
#### Core Services ✅
**StoryService.java** (794 lines)
- ✅ CRUD with search integration
- ✅ HTML sanitization on create/update (line 490, 528-532)
- ✅ Reading progress management
- ✅ Tag alias resolution
- ✅ Random story with 15+ filters
**AuthorService.java** (317 lines)
- ✅ Avatar management
- ✅ Rating validation (1-5 range)
- ✅ Search index synchronization
- ✅ URL management
**TagService.java** (491 lines)
-**Tag Enhancement spec 100% complete**
- ✅ Alias system: addAlias(), removeAlias(), resolveTagByName()
- ✅ Tag merging with atomic operations
- ✅ AI tag suggestions with confidence scoring
- ✅ Merge preview functionality
**CollectionService.java** (452 lines)
- ⚠️ **CRITICAL ISSUE at lines 56-61:**
```java
public SearchResultDto<Collection> searchCollections(...) {
logger.warn("Collections search not yet implemented in Solr, returning empty results");
return new SearchResultDto<>(new ArrayList<>(), 0, page, limit, query != null ? query : "", 0);
}
```
- ✅ All other CRUD operations work correctly
- ✅ Gap-based positioning for story reordering
#### EPUB Services ✅
**EPUBImportService.java** (551 lines)
- ✅ Metadata extraction (title, author, description, tags)
- ✅ Cover image extraction and processing
- ✅ Content image download and replacement
- ✅ Reading position preservation
- ✅ Author/series auto-creation
**EPUBExportService.java** (584 lines)
- ✅ Single story export
- ✅ Collection export (multi-story)
- ✅ Chapter splitting by word count or HTML headings
- ✅ Custom metadata and title support
- ✅ XHTML compliance (fixHtmlForXhtml method)
- ✅ Reading position inclusion
#### Advanced Services ✅
**HtmlSanitizationService.java** (222 lines)
- ✅ Jsoup Safelist configuration
- ✅ Loads config from `html-sanitization-config.json`
- ✅ Figure tag preprocessing (lines 143-184)
- ✅ Relative URL preservation (line 89)
- ✅ Shared with frontend via `/api/config/html-sanitization`
**ImageService.java** (1122 lines)
- ✅ Three image types: COVER, AVATAR, CONTENT
- ✅ Content image processing with download
- ✅ Orphaned image cleanup
- ✅ Library-aware paths
- ✅ Async processing support
**LibraryService.java** (830 lines)
- ✅ Multi-library isolation
-**Explicit authentication required** (lines 104-114)
- ✅ Automatic schema creation for new libraries
- ✅ Smart database routing (SmartRoutingDataSource)
- ✅ Async Solr reindexing on library switch (lines 164-193)
- ✅ BCrypt password encryption
**DatabaseManagementService.java** (1206 lines)
- ✅ ZIP-based complete backup with pg_dump
- ✅ Restore with schema creation
- ✅ Manual reindexing from database (lines 1047-1097)
- ✅ Security: ZIP path validation
**SearchServiceAdapter.java** (287 lines)
- ✅ Unified search interface
- ✅ Delegates to SolrService
- ✅ Bulk indexing operations
- ✅ Tag suggestions
**SolrService.java** (1115 lines)
- ✅ Two cores: stories and authors
- ✅ Advanced filtering with 20+ parameters
- ✅ Library-aware filtering
- ✅ Faceting support
- ⚠️ **No Collections core** (known issue)
### Controller Layer - Grade: A (95%)
**StoryController.java** (1000+ lines)
- ✅ Comprehensive REST API
- ✅ CRUD operations
- ✅ EPUB import/export endpoints
- ✅ Async content image processing with progress
- ✅ Duplicate detection
- ✅ Advanced search with 15+ filters
- ✅ Random story endpoint
- ✅ Reading progress tracking
**CollectionController.java** (538 lines)
- ✅ Full CRUD operations
- ✅ Cover image upload/removal
- ✅ Story reordering
- ✅ EPUB collection export
- ⚠️ Search returns empty (known issue)
- ✅ Lightweight DTOs to avoid circular references
**SearchController.java** (57 lines)
- ✅ Reindex endpoint
- ✅ Health check
- ⚠️ Minimal implementation (search is in StoryController)
---
## Phase 3: Frontend Implementation Audit (COMPLETED)
### API Client Layer - Grade: A+ (100%)
**api.ts** (994 lines)
- ✅ Axios instance with interceptors
- ✅ JWT token management (localStorage + httpOnly cookies)
- ✅ Auto-redirect on 401/403
- ✅ Comprehensive endpoints for all resources
- ✅ Tag alias resolution in search (lines 576-585)
- ✅ Advanced filter parameters (15+ filters)
- ✅ Random story with Solr RandomSortField (lines 199-307)
- ✅ Library-aware image URLs (lines 983-994)
**Endpoints Coverage:**
- ✅ Stories: CRUD, search, random, EPUB import/export, duplicate check
- ✅ Authors: CRUD, avatar, search
- ✅ Tags: CRUD, aliases, merge, suggestions, autocomplete
- ✅ Collections: CRUD, search, cover, reorder, EPUB export
- ✅ Series: CRUD, search
- ✅ Database: backup/restore (both SQL and complete)
- ✅ Config: HTML sanitization, image cleanup
- ✅ Search Admin: engine switching, reindex, library migration
### HTML Sanitization - Grade: A+ (100%)
**sanitization.ts** (368 lines)
-**Shared configuration with backend** via `/api/config/html-sanitization`
- ✅ DOMPurify with custom configuration
- ✅ CSS property filtering (lines 20-47)
- ✅ Figure tag preprocessing (lines 187-251) - **matches backend**
- ✅ Async `sanitizeHtml()` and sync `sanitizeHtmlSync()`
- ✅ Fallback configuration if backend unavailable
- ✅ Config caching for performance
**Security Features:**
- ✅ Allowlist-based tag filtering
- ✅ CSS property whitelist
- ✅ URL protocol validation
- ✅ Relative URL preservation for local images
### Pages & Components - Grade: A (95%)
#### Library Page (LibraryContent.tsx - 341 lines)
- ✅ Advanced search with debouncing
- ✅ Tag facet enrichment with full tag data
- ✅ URL parameter handling for filters
- ✅ Three layout modes: sidebar, toolbar, minimal
- ✅ Advanced filters integration
- ✅ Random story with all filters applied
- ✅ Pagination
#### Collections Page (page.tsx - 300 lines)
- ✅ Search with tag filtering
- ✅ Archive toggle
- ✅ Grid/list view modes
- ✅ Pagination
- ⚠️ **Search returns empty results** (backend issue)
#### Story Reading Page (stories/[id]/page.tsx - 669 lines)
-**Sophisticated reading experience:**
- Reading progress bar with percentage
- Auto-scroll to saved position
- Debounced position saving (2 second delay)
- Character position tracking
- End-of-story detection with reset option
-**Table of Contents:**
- Auto-generated from headings
- Modal overlay
- Smooth scroll navigation
-**Series Navigation:**
- Previous/Next story links
- Inline metadata display
-**Memoized content rendering** to prevent re-sanitization on scroll
- ✅ Preloaded sanitization config
#### Settings Page (SettingsContent.tsx - 183 lines)
- ✅ Three tabs: Appearance, Content, System
- ✅ Theme switching (light/dark)
- ✅ Font customization (serif, sans, mono)
- ✅ Font size control
- ✅ Reading width preferences
- ✅ Reading speed configuration
- ✅ localStorage persistence
#### Slate Editor (SlateEditor.tsx - 942 lines)
-**Rich text editing with Slate.js**
-**Advanced image handling:**
- Image paste with src preservation
- Interactive image elements with edit/delete
- Image error handling with fallback
- External image indicators
-**Formatting:**
- Headings (H1, H2, H3)
- Text formatting (bold, italic, underline, strikethrough)
- Keyboard shortcuts (Ctrl+B, Ctrl+I, etc.)
-**HTML conversion:**
- Bidirectional HTML ↔ Slate conversion
- Mixed content support (text + images)
- Figure tag preprocessing
- Sanitization integration
---
## Phase 4: Test Coverage Assessment (COMPLETED)
### Current Test Files (9 total):
**Entity Tests (5):**
-`StoryTest.java` - Story entity validation
-`AuthorTest.java` - Author entity validation
-`TagTest.java` - Tag entity validation
-`SeriesTest.java` - Series entity validation
- ❌ Missing: CollectionTest, ReadingPositionTest, TagAliasTest
**Repository Tests (3):**
-`StoryRepositoryTest.java` - Story persistence
-`AuthorRepositoryTest.java` - Author persistence
-`BaseRepositoryTest.java` - Base test configuration
- ❌ Missing: TagRepository, SeriesRepository, CollectionRepository, ReadingPositionRepository
**Service Tests (2):**
-`StoryServiceTest.java` - Story business logic
-`AuthorServiceTest.java` - Author business logic
- ❌ Missing: TagService, CollectionService, EPUBImportService, EPUBExportService, HtmlSanitizationService, ImageService, LibraryService, DatabaseManagementService, SeriesService, SearchServiceAdapter, SolrService
**Controller Tests:** ❌ None
**Frontend Tests:** ❌ None
### Test Coverage Estimate: ~25%
**Missing HIGH Priority Tests:**
1. CollectionServiceTest - Collections CRUD and search
2. TagServiceTest - Alias, merge, AI suggestions
3. EPUBImportServiceTest - Import logic verification
4. EPUBExportServiceTest - Export format validation
5. HtmlSanitizationServiceTest - **Security critical**
6. ImageServiceTest - Image processing and download
**Missing MEDIUM Priority:**
- SeriesServiceTest
- LibraryServiceTest
- DatabaseManagementServiceTest
- SearchServiceAdapter/SolrServiceTest
- All controller tests
- All frontend component tests
**Recommended Action:**
Create comprehensive test suite with target coverage of 80%+ for services, 70%+ for controllers.
---
## Phase 5: Documentation Review
### Specification Documents ✅
| Document | Status | Notes |
|----------|--------|-------|
| storycove-spec.md | ✅ Current | Core specification |
| DATA_MODEL.md | ✅ Current | 100% implemented |
| API.md | ⚠️ Needs minor updates | Missing some advanced filter docs |
| TAG_ENHANCEMENT_SPECIFICATION.md | ✅ Current | 100% implemented |
| EPUB_IMPORT_EXPORT_SPECIFICATION.md | ✅ Current | Phase 2 complete |
| storycove-collections-spec.md | ⚠️ Known issue | Search not implemented |
### Implementation Reports ✅
-`HOUSEKEEPING_PHASE1_REPORT.md` - Detailed assessment
-`HOUSEKEEPING_COMPLETE_REPORT.md` - This document
### Recommendations:
1. **Update API.md** to document:
- Advanced search filters (15+ parameters)
- Random story endpoint with filter support
- EPUB import/export endpoints
- Image processing endpoints
2. **Add MULTI_LIBRARY_SPEC.md** documenting:
- Library isolation architecture
- Authentication flow
- Database routing
- Search index separation
---
## Critical Findings Summary
### 🚨 CRITICAL (Must Fix)
1. **Collections Search Not Implemented**
- **Location:** `CollectionService.java:56-61`
- **Impact:** GET /api/collections always returns empty results
- **Specification:** storycove-collections-spec.md lines 52-61 mandates Solr search
- **Estimated Fix:** 4-6 hours
- **Steps:**
1. Create Solr Collections core with schema
2. Implement indexing in SearchServiceAdapter
3. Wire up CollectionService.searchCollections()
4. Test pagination and filtering
### ⚠️ HIGH Priority (Recommended)
2. **Missing Test Coverage** (~25% vs target 80%)
- HtmlSanitizationServiceTest - security critical
- CollectionServiceTest - feature verification
- TagServiceTest - complex logic (aliases, merge)
- EPUBImportServiceTest, EPUBExportServiceTest - file processing
3. **API Documentation Updates**
- Advanced filters not fully documented
- EPUB endpoints missing from API.md
### 📋 MEDIUM Priority (Optional)
4. **SearchController Minimal**
- Only has reindex and health check
- Actual search in StoryController
5. **Frontend Test Coverage**
- No component tests
- No integration tests
- Recommend: Jest + React Testing Library
---
## Strengths & Best Practices 🌟
### Architecture Excellence
1. **Multi-Library Support**
- Complete isolation with separate databases
- Explicit authentication required
- Smart routing with automatic reindexing
- Library-aware image paths
2. **Security-First Design**
- HTML sanitization with shared backend/frontend config
- JWT authentication with httpOnly cookies
- BCrypt password encryption
- Input validation throughout
3. **Production-Ready Features**
- Complete backup/restore system (pg_dump/psql)
- Orphaned image cleanup
- Async image processing with progress tracking
- Reading position tracking with EPUB CFI
### Code Quality
1. **Proper Separation of Concerns**
- Repository anti-patterns avoided
- Service layer handles business logic
- Controllers are thin and focused
- DTOs prevent circular references
2. **Error Handling**
- Custom exceptions (ResourceNotFoundException, DuplicateResourceException)
- Proper HTTP status codes
- Fallback configurations
3. **Performance Optimizations**
- Eager loading with JOIN FETCH
- Memoized React components
- Debounced search and autosave
- Config caching
---
## Compliance Matrix
| Feature Area | Spec Compliance | Implementation Quality | Notes |
|-------------|----------------|----------------------|-------|
| **Entity Models** | 100% | A+ | Perfect spec match |
| **Database Layer** | 100% | A+ | Best practices followed |
| **EPUB Import/Export** | 100% | A | Phase 2 complete |
| **Tag Enhancement** | 100% | A | Aliases, merge, AI complete |
| **Collections** | 80% | B | Search not implemented |
| **HTML Sanitization** | 100% | A+ | Shared config, security-first |
| **Search** | 95% | A | Missing Collections core |
| **Multi-Library** | 100% | A | Robust isolation |
| **Reading Experience** | 100% | A+ | Sophisticated tracking |
| **Image Processing** | 100% | A | Download, async, cleanup |
| **Test Coverage** | 25% | C | Needs significant work |
| **Documentation** | 90% | B+ | Minor updates needed |
---
## Recommendations by Priority
### Immediate (This Sprint)
1.**Fix Collections Search** (4-6 hours)
- Implement Solr Collections core
- Wire up searchCollections()
- Test thoroughly
### Short-Term (Next Sprint)
2.**Create Critical Tests** (10-12 hours)
- HtmlSanitizationServiceTest
- CollectionServiceTest
- TagServiceTest
- EPUBImportServiceTest
- EPUBExportServiceTest
3.**Update API Documentation** (2-3 hours)
- Document advanced filters
- Add EPUB endpoints
- Update examples
### Medium-Term (Next Month)
4.**Expand Test Coverage to 80%** (20-25 hours)
- ImageServiceTest
- LibraryServiceTest
- DatabaseManagementServiceTest
- Controller tests
- Frontend component tests
5.**Create Multi-Library Spec** (3-4 hours)
- Document architecture
- Authentication flow
- Database routing
- Migration guide
---
## Conclusion
StoryCove is a **well-architected, production-ready application** with only one critical blocker (Collections search). The codebase demonstrates:
-**Excellent architecture** with proper separation of concerns
-**Security-first** approach with HTML sanitization and authentication
-**Production features** like backup/restore, multi-library, async processing
-**Sophisticated UX** with reading progress, TOC, series navigation
- ⚠️ **Test coverage gap** that should be addressed
### Final Grade: A- (90%)
**Breakdown:**
- Backend Implementation: A (95%)
- Frontend Implementation: A (95%)
- Test Coverage: C (25%)
- Documentation: B+ (90%)
- Overall Architecture: A+ (100%)
**Primary Blocker:** Collections search (6 hours to fix)
**Recommended Focus:** Test coverage (target 80%)
---
*Report Generated: 2025-10-10*
*Next Review: After Collections search implementation*

View File

@@ -0,0 +1,526 @@
# StoryCove Housekeeping Report - Phase 1: Documentation & State Assessment
**Date**: 2025-01-10
**Completed By**: Claude Code (Housekeeping Analysis)
## Executive Summary
Phase 1 assessment has been completed, providing a comprehensive review of the StoryCove application's current implementation status against specifications. The application is **well-implemented** with most core features working, but there is **1 CRITICAL ISSUE** and several areas requiring attention.
### Critical Finding
🚨 **Collections Search Not Implemented**: The Collections feature does not use Typesense/Solr for search as mandated by the specification. This is a critical architectural requirement that must be addressed.
### Overall Status
- **Backend Implementation**: ~85% complete with specification
- **Entity Models**: ✅ 100% compliant with DATA_MODEL.md
- **Test Coverage**: ⚠️ 9 tests exist, but many critical services lack tests
- **Documentation**: ✅ Comprehensive and up-to-date
---
## 1. Implementation Status Matrix
### 1.1 Entity Layer (✅ FULLY COMPLIANT)
| Entity | Specification | Implementation Status | Notes |
|--------|---------------|----------------------|-------|
| **Story** | storycove-spec.md | ✅ Complete | All fields match spec including reading position, isRead, lastReadAt |
| **Author** | storycove-spec.md | ✅ Complete | Includes avatar_image_path, rating, URLs as @ElementCollection |
| **Tag** | TAG_ENHANCEMENT_SPECIFICATION.md | ✅ Complete | Includes color, description, aliases relationship |
| **TagAlias** | TAG_ENHANCEMENT_SPECIFICATION.md | ✅ Complete | Implements alias system with createdFromMerge flag |
| **Series** | storycove-spec.md | ✅ Complete | Basic implementation as specified |
| **Collection** | storycove-collections-spec.md | ✅ Complete | All fields including isArchived, gap-based positioning |
| **CollectionStory** | storycove-collections-spec.md | ✅ Complete | Junction entity with position field |
| **ReadingPosition** | EPUB_IMPORT_EXPORT_SPECIFICATION.md | ✅ Complete | Full EPUB CFI support, chapter tracking, percentage complete |
| **Library** | (Multi-library support) | ✅ Complete | Implemented for multi-library feature |
**Assessment**: Entity layer is **100% specification-compliant**
---
### 1.2 Repository Layer (⚠️ MOSTLY COMPLIANT)
| Repository | Specification Compliance | Issues |
|------------|-------------------------|--------|
| **CollectionRepository** | ⚠️ Partial | Contains only ID-based lookups (correct), has note about Typesense |
| **TagRepository** | ✅ Complete | Proper query methods, no search anti-patterns |
| **StoryRepository** | ✅ Complete | Appropriate methods |
| **AuthorRepository** | ✅ Complete | Appropriate methods |
| **SeriesRepository** | ✅ Complete | Basic CRUD |
| **ReadingPositionRepository** | ✅ Complete | Story-based lookups |
| **TagAliasRepository** | ✅ Complete | Name-based lookups for resolution |
**Key Finding**: CollectionRepository correctly avoids search/filter methods (good architectural design), but the corresponding search implementation in CollectionService is not yet complete.
---
### 1.3 Service Layer (🚨 CRITICAL ISSUE FOUND)
| Service | Status | Specification Match | Critical Issues |
|---------|--------|---------------------|-----------------|
| **CollectionService** | 🚨 **INCOMPLETE** | 20% | **Collections search returns empty results** (line 56-61) |
| **TagService** | ✅ Complete | 100% | Full alias, merging, AI suggestions implemented |
| **StoryService** | ✅ Complete | 95% | Core features complete |
| **AuthorService** | ✅ Complete | 95% | Core features complete |
| **EPUBImportService** | ✅ Complete | 100% | Phase 1 & 2 complete per spec |
| **EPUBExportService** | ✅ Complete | 100% | Single story & collection export working |
| **ImageService** | ✅ Complete | 90% | Upload, resize, delete implemented |
| **HtmlSanitizationService** | ✅ Complete | 100% | Security-critical, appears complete |
| **SearchServiceAdapter** | ⚠️ Partial | 70% | Solr integration present but Collections not indexed |
| **ReadingTimeService** | ✅ Complete | 100% | Word count calculations |
#### 🚨 CRITICAL ISSUE Detail: CollectionService.searchCollections()
**File**: `backend/src/main/java/com/storycove/service/CollectionService.java:56-61`
```java
public SearchResultDto<Collection> searchCollections(String query, List<String> tags, boolean includeArchived, int page, int limit) {
// Collections are currently handled at database level, not indexed in search engine
// Return empty result for now as collections search is not implemented in Solr
logger.warn("Collections search not yet implemented in Solr, returning empty results");
return new SearchResultDto<>(new ArrayList<>(), 0, page, limit, query != null ? query : "", 0);
}
```
**Impact**:
- GET /api/collections endpoint always returns 0 results
- Frontend collections list view will appear empty
- Violates architectural requirement in storycove-collections-spec.md Section 4.2 and 5.2
**Specification Requirement** (storycove-collections-spec.md:52-61):
> **IMPORTANT**: This endpoint MUST use Typesense for all search and filtering operations.
> Do NOT implement search/filter logic using JPA/SQL queries.
---
### 1.4 Controller/API Layer (✅ MOSTLY COMPLIANT)
| Controller | Endpoints | Status | Notes |
|------------|-----------|--------|-------|
| **CollectionController** | 13 endpoints | ⚠️ 90% | All endpoints implemented but search returns empty |
| **StoryController** | ~15 endpoints | ✅ Complete | CRUD, reading progress, EPUB export |
| **AuthorController** | ~10 endpoints | ✅ Complete | CRUD, avatar management |
| **TagController** | ~12 endpoints | ✅ Complete | Enhanced features: aliases, merging, suggestions |
| **SeriesController** | ~6 endpoints | ✅ Complete | Basic CRUD |
| **AuthController** | 3 endpoints | ✅ Complete | Login, logout, verify |
| **FileController** | 4 endpoints | ✅ Complete | Image serving and uploads |
| **SearchController** | 3 endpoints | ✅ Complete | Story/Author search via Solr |
#### Endpoint Verification vs API.md
**Collections Endpoints (storycove-collections-spec.md)**:
- ✅ GET /api/collections - Implemented (but returns empty due to search issue)
- ✅ GET /api/collections/{id} - Implemented
- ✅ POST /api/collections - Implemented (JSON & multipart)
- ✅ PUT /api/collections/{id} - Implemented
- ✅ DELETE /api/collections/{id} - Implemented
- ✅ PUT /api/collections/{id}/archive - Implemented
- ✅ POST /api/collections/{id}/stories - Implemented
- ✅ DELETE /api/collections/{id}/stories/{storyId} - Implemented
- ✅ PUT /api/collections/{id}/stories/order - Implemented
- ✅ GET /api/collections/{id}/read/{storyId} - Implemented
- ✅ GET /api/collections/{id}/stats - Implemented
- ✅ GET /api/collections/{id}/epub - Implemented
- ✅ POST /api/collections/{id}/epub - Implemented
**Tag Enhancement Endpoints (TAG_ENHANCEMENT_SPECIFICATION.md)**:
- ✅ POST /api/tags/{tagId}/aliases - Implemented
- ✅ DELETE /api/tags/{tagId}/aliases/{aliasId} - Implemented
- ✅ POST /api/tags/merge - Implemented
- ✅ POST /api/tags/merge/preview - Implemented
- ✅ POST /api/tags/suggest - Implemented (AI-powered)
- ✅ GET /api/tags/resolve/{name} - Implemented
---
### 1.5 Advanced Features Status
#### ✅ Tag Enhancement System (COMPLETE)
**Specification**: TAG_ENHANCEMENT_SPECIFICATION.md (Status: ✅ COMPLETED)
| Feature | Status | Implementation |
|---------|--------|----------------|
| Color Tags | ✅ Complete | Tag entity has `color` field (VARCHAR(7) hex) |
| Tag Descriptions | ✅ Complete | Tag entity has `description` field (VARCHAR(500)) |
| Tag Aliases | ✅ Complete | TagAlias entity, resolution logic in TagService |
| Tag Merging | ✅ Complete | Atomic merge with automatic alias creation |
| AI Tag Suggestions | ✅ Complete | TagService.suggestTags() with confidence scoring |
| Alias Resolution | ✅ Complete | TagService.resolveTagByName() checks both tags and aliases |
**Code Evidence**:
- Tag entity: Tag.java:29-34 (color, description fields)
- TagAlias entity: TagAlias.java (full implementation)
- Merge logic: TagService.java:284-320
- AI suggestions: TagService.java:385-491
---
#### ✅ EPUB Import/Export (PHASE 1 & 2 COMPLETE)
**Specification**: EPUB_IMPORT_EXPORT_SPECIFICATION.md (Status: ✅ COMPLETED)
| Feature | Status | Files |
|---------|--------|-------|
| EPUB Import | ✅ Complete | EPUBImportService.java |
| EPUB Export (Single) | ✅ Complete | EPUBExportService.java |
| EPUB Export (Collection) | ✅ Complete | EPUBExportService.java, CollectionController:309-383 |
| Reading Position (CFI) | ✅ Complete | ReadingPosition entity with epubCfi field |
| Metadata Extraction | ✅ Complete | Cover, tags, author, title extraction |
| Validation | ✅ Complete | File format and structure validation |
**Frontend Integration**:
- ✅ Import UI: frontend/src/app/import/epub/page.tsx
- ✅ Bulk Import: frontend/src/app/import/bulk/page.tsx
- ✅ Export from Story Detail: (per spec update)
---
#### ⚠️ Collections Feature (MOSTLY COMPLETE, CRITICAL SEARCH ISSUE)
**Specification**: storycove-collections-spec.md (Status: ⚠️ 85% COMPLETE)
| Feature | Status | Issue |
|---------|--------|-------|
| Entity Model | ✅ Complete | Collection, CollectionStory entities |
| CRUD Operations | ✅ Complete | Create, update, delete, archive |
| Story Management | ✅ Complete | Add, remove, reorder (gap-based positioning) |
| Statistics | ✅ Complete | Word count, reading time, tag frequency |
| EPUB Export | ✅ Complete | Full collection export |
| **Search/Listing** | 🚨 **NOT IMPLEMENTED** | Returns empty results |
| Reading Flow | ✅ Complete | Navigation context, previous/next |
**Critical Gap**: SearchServiceAdapter does not index Collections in Solr/Typesense.
---
#### ✅ Reading Position Tracking (COMPLETE)
| Feature | Status |
|---------|--------|
| Character Position | ✅ Complete |
| Chapter Tracking | ✅ Complete |
| EPUB CFI Support | ✅ Complete |
| Percentage Calculation | ✅ Complete |
| Context Before/After | ✅ Complete |
---
### 1.6 Frontend Implementation (PRESENT BUT NOT FULLY AUDITED)
**Pages Found**:
- ✅ Collections List: frontend/src/app/collections/page.tsx
- ✅ Collection Detail: frontend/src/app/collections/[id]/page.tsx
- ✅ Collection Reading: frontend/src/app/collections/[id]/read/[storyId]/page.tsx
- ✅ Tag Maintenance: frontend/src/app/settings/tag-maintenance/page.tsx
- ✅ EPUB Import: frontend/src/app/import/epub/page.tsx
- ✅ Stories List: frontend/src/app/stories/page.tsx
- ✅ Authors List: frontend/src/app/authors/page.tsx
**Note**: Full frontend audit deferred to Phase 3.
---
## 2. Test Coverage Assessment
### 2.1 Current Test Inventory
**Total Test Files**: 9
| Test File | Type | Target | Status |
|-----------|------|--------|--------|
| BaseRepositoryTest.java | Integration | Database setup | ✅ Present |
| AuthorRepositoryTest.java | Integration | Author CRUD | ✅ Present |
| StoryRepositoryTest.java | Integration | Story CRUD | ✅ Present |
| TagTest.java | Unit | Tag entity | ✅ Present |
| SeriesTest.java | Unit | Series entity | ✅ Present |
| AuthorTest.java | Unit | Author entity | ✅ Present |
| StoryTest.java | Unit | Story entity | ✅ Present |
| AuthorServiceTest.java | Integration | Author service | ✅ Present |
| StoryServiceTest.java | Integration | Story service | ✅ Present |
### 2.2 Missing Critical Tests
**Priority 1 (Critical Features)**:
- ❌ CollectionServiceTest - **CRITICAL** (for search implementation verification)
- ❌ TagServiceTest - Aliases, merging, AI suggestions
- ❌ EPUBImportServiceTest - Import validation, metadata extraction
- ❌ EPUBExportServiceTest - Export generation, collection EPUB
**Priority 2 (Core Services)**:
- ❌ ImageServiceTest - Upload, resize, security
- ❌ HtmlSanitizationServiceTest - **SECURITY CRITICAL**
- ❌ SearchServiceAdapterTest - Solr integration
- ❌ ReadingPositionServiceTest (if exists) - CFI handling
**Priority 3 (Controllers)**:
- ❌ CollectionControllerTest
- ❌ TagControllerTest
- ❌ EPUBControllerTest
### 2.3 Test Coverage Estimate
- **Current Coverage**: ~25% of service layer
- **Target Coverage**: 80%+ for service layer
- **Gap**: ~55% (approximately 15-20 test classes needed)
---
## 3. Specification Compliance Summary
| Specification Document | Compliance | Issues |
|------------------------|------------|--------|
| **storycove-spec.md** | 95% | Core features complete, minor gaps |
| **DATA_MODEL.md** | 100% | Perfect match ✅ |
| **API.md** | 90% | Most endpoints match, need verification |
| **TAG_ENHANCEMENT_SPECIFICATION.md** | 100% | Fully implemented ✅ |
| **EPUB_IMPORT_EXPORT_SPECIFICATION.md** | 100% | Phase 1 & 2 complete ✅ |
| **storycove-collections-spec.md** | 85% | Search not implemented 🚨 |
| **storycove-scraper-spec.md** | ❓ | Not assessed (separate feature) |
---
## 4. Database Schema Verification
### 4.1 Tables vs Specification
| Table | Specification | Implementation | Match |
|-------|---------------|----------------|-------|
| stories | DATA_MODEL.md | Story.java | ✅ 100% |
| authors | DATA_MODEL.md | Author.java | ✅ 100% |
| tags | DATA_MODEL.md + TAG_ENHANCEMENT | Tag.java | ✅ 100% |
| tag_aliases | TAG_ENHANCEMENT | TagAlias.java | ✅ 100% |
| series | DATA_MODEL.md | Series.java | ✅ 100% |
| collections | storycove-collections-spec.md | Collection.java | ✅ 100% |
| collection_stories | storycove-collections-spec.md | CollectionStory.java | ✅ 100% |
| collection_tags | storycove-collections-spec.md | @JoinTable in Collection | ✅ 100% |
| story_tags | DATA_MODEL.md | @JoinTable in Story | ✅ 100% |
| reading_positions | EPUB_IMPORT_EXPORT | ReadingPosition.java | ✅ 100% |
| libraries | (Multi-library) | Library.java | ✅ Present |
**Assessment**: Database schema is **100% specification-compliant**
### 4.2 Indexes Verification
| Index | Required By Spec | Implementation | Status |
|-------|------------------|----------------|--------|
| idx_collections_archived | Collections spec | Collection entity | ✅ |
| idx_collection_stories_position | Collections spec | CollectionStory entity | ✅ |
| idx_reading_position_story | EPUB spec | ReadingPosition entity | ✅ |
| idx_tag_aliases_name | TAG_ENHANCEMENT | Unique constraint on alias_name | ✅ |
---
## 5. Architecture Compliance
### 5.1 Search Integration Architecture
**Specification Requirement** (storycove-collections-spec.md):
> All search, filtering, and listing operations MUST use Typesense as the primary data source.
**Current State**:
-**Stories**: Properly use SearchServiceAdapter (Solr)
-**Authors**: Properly use SearchServiceAdapter (Solr)
- 🚨 **Collections**: NOT using SearchServiceAdapter
### 5.2 Anti-Pattern Verification
**Collections Repository** (CollectionRepository.java): ✅ CORRECT
- Contains ONLY findById methods
- Has explicit note: "For search/filter/list operations, use TypesenseService instead"
- No search anti-patterns present
**Comparison with Spec Anti-Patterns** (storycove-collections-spec.md:663-689):
```java
// ❌ WRONG patterns NOT FOUND in codebase ✅
// CollectionRepository correctly avoids:
// - findByNameContaining()
// - findByTagsIn()
// - findByNameContainingAndArchived()
```
**Issue**: While the repository layer is correctly designed, the service layer implementation is incomplete.
---
## 6. Code Quality Observations
### 6.1 Positive Findings
1.**Consistent Entity Design**: All entities use UUID, proper annotations, equals/hashCode
2.**Transaction Management**: @Transactional used appropriately
3.**Logging**: Comprehensive SLF4J logging throughout
4.**Validation**: Jakarta validation annotations used
5.**DTOs**: Proper separation between entities and DTOs
6.**Error Handling**: Custom exceptions (ResourceNotFoundException, DuplicateResourceException)
7.**Gap-Based Positioning**: Collections use proper positioning algorithm (multiples of 1000)
### 6.2 Areas for Improvement
1. ⚠️ **Test Coverage**: Major gap in service layer tests
2. 🚨 **Collections Search**: Critical feature not implemented
3. ⚠️ **Security Tests**: No dedicated tests for HtmlSanitizationService
4. ⚠️ **Integration Tests**: Limited E2E testing
---
## 7. Dependencies & Technology Stack
### 7.1 Key Dependencies (Observed)
- ✅ Spring Boot (Jakarta EE)
- ✅ Hibernate/JPA
- ✅ PostgreSQL
- ✅ Solr (in place of Typesense, acceptable alternative)
- ✅ EPUBLib (for EPUB handling)
- ✅ Jsoup (for HTML sanitization)
- ✅ JWT (authentication)
### 7.2 Search Engine Note
**Specification**: Calls for Typesense
**Implementation**: Uses Solr (Apache Solr)
**Assessment**: ✅ Acceptable - Solr provides equivalent functionality
---
## 8. Documentation Status
### 8.1 Specification Documents
| Document | Status | Notes |
|----------|--------|-------|
| storycove-spec.md | ✅ Current | Comprehensive main spec |
| DATA_MODEL.md | ✅ Current | Matches implementation |
| API.md | ⚠️ Needs minor updates | Most endpoints documented |
| TAG_ENHANCEMENT_SPECIFICATION.md | ✅ Current | Marked as completed |
| EPUB_IMPORT_EXPORT_SPECIFICATION.md | ✅ Current | Phase 1 & 2 marked complete |
| storycove-collections-spec.md | ⚠️ Needs update | Should note search not implemented |
| CLAUDE.md | ✅ Current | Good project guidance |
### 8.2 Code Documentation
- ✅ Controllers: Well documented with Javadoc
- ✅ Services: Good inline comments
- ✅ Entities: Adequate field documentation
- ⚠️ Tests: Limited documentation
---
## 9. Phase 1 Conclusions
### 9.1 Summary
StoryCove is a **well-architected application** with strong entity design, comprehensive feature implementation, and good adherence to specifications. The codebase demonstrates professional-quality development practices.
### 9.2 Critical Finding
**Collections Search**: The most critical issue is the incomplete Collections search implementation, which violates a mandatory architectural requirement and renders the Collections list view non-functional.
### 9.3 Test Coverage Gap
With only 9 test files covering the basics, there is a significant testing gap that needs to be addressed to ensure code quality and prevent regressions.
### 9.4 Overall Assessment
**Grade**: B+ (85%)
- **Entity & Database**: A+ (100%)
- **Service Layer**: B (85%)
- **API Layer**: A- (90%)
- **Test Coverage**: C (25%)
- **Documentation**: A (95%)
---
## 10. Next Steps (Phase 2 & Beyond)
### Phase 2: Backend Audit (NEXT)
1. 🚨 **URGENT**: Implement Collections search in SearchServiceAdapter/SolrService
2. Deep dive into each service for business logic verification
3. Review transaction boundaries and error handling
4. Verify security measures (authentication, authorization, sanitization)
### Phase 3: Frontend Audit
1. Verify UI components match UI/UX specifications
2. Check Collections pagination implementation
3. Review theme implementation (light/dark mode)
4. Test responsive design
### Phase 4: Test Coverage
1. Create CollectionServiceTest (PRIORITY 1)
2. Create TagServiceTest with alias and merge tests
3. Create EPUBImportServiceTest and EPUBExportServiceTest
4. Create security-critical HtmlSanitizationServiceTest
5. Add integration tests for search flows
### Phase 5: Documentation Updates
1. Update API.md with any missing endpoints
2. Update storycove-collections-spec.md with current status
3. Create TESTING.md with coverage report
### Phase 6: Code Quality
1. Run static analysis tools (SonarQube, SpotBugs)
2. Review security vulnerabilities
3. Performance profiling
---
## 11. Priority Action Items
### 🚨 CRITICAL (Must Fix Immediately)
1. **Implement Collections Search** in SearchServiceAdapter
- File: backend/src/main/java/com/storycove/service/SearchServiceAdapter.java
- Add Solr indexing for Collections
- Update CollectionService.searchCollections() to use search engine
- Est. Time: 4-6 hours
### ⚠️ HIGH PRIORITY (Fix Soon)
2. **Create CollectionServiceTest**
- Verify CRUD operations
- Test search functionality once implemented
- Est. Time: 3-4 hours
3. **Create HtmlSanitizationServiceTest**
- Security-critical testing
- XSS prevention verification
- Est. Time: 2-3 hours
4. **Create TagServiceTest**
- Alias resolution
- Merge operations
- AI suggestions
- Est. Time: 4-5 hours
### 📋 MEDIUM PRIORITY (Next Sprint)
5. **EPUB Service Tests**
- EPUBImportServiceTest
- EPUBExportServiceTest
- Est. Time: 5-6 hours
6. **Frontend Audit**
- Verify Collections pagination
- Check UI/UX compliance
- Est. Time: 4-6 hours
### 📝 DOCUMENTATION (Ongoing)
7. **Update API Documentation**
- Verify all endpoints documented
- Add missing examples
- Est. Time: 2-3 hours
---
## 12. Appendix: File Structure
### Backend Structure
```
backend/src/main/java/com/storycove/
├── controller/ (12 controllers - all implemented)
├── service/ (20 services - 1 incomplete)
├── entity/ (10 entities - all complete)
├── repository/ (8 repositories - all appropriate)
├── dto/ (~20 DTOs)
├── exception/ (Custom exceptions)
├── config/ (Security, DB, Solr config)
└── security/ (JWT authentication)
```
### Test Structure
```
backend/src/test/java/com/storycove/
├── entity/ (4 entity tests)
├── repository/ (3 repository tests)
└── service/ (2 service tests)
```
---
**Phase 1 Assessment Complete**
**Next Phase**: Backend Audit (focusing on Collections search implementation)
**Estimated Total Time to Address All Issues**: 30-40 hours

118
PORTABLE_TEXT_SETUP.md Normal file
View File

@@ -0,0 +1,118 @@
# Portable Text Editor Setup Instructions
## Current Status
⚠️ **Temporarily Reverted to Original Editor**
Due to npm cache permission issues preventing Docker builds, I've temporarily reverted the imports back to `RichTextEditor`. The Portable Text implementation is complete and ready to activate once the npm issue is resolved.
## Files Ready for Portable Text
-`PortableTextEditor.tsx` - Complete implementation
-`schema.ts` - Portable Text schema
-`conversion.ts` - HTML ↔ Portable Text conversion
-`package.json.with-portabletext` - Updated dependencies
## Docker Build Issue Resolution
The error `npm ci` requires `package-lock.json` but npm cache permissions prevent generating it.
### Solution Steps:
1. **Fix npm permissions:**
```bash
sudo chown -R $(whoami) ~/.npm
```
2. **Switch to Portable Text setup:**
```bash
cd frontend
mv package.json package.json.original
mv package.json.with-portabletext package.json
npm install # This will generate package-lock.json
```
3. **Update component imports** (change RichTextEditor → PortableTextEditor):
```typescript
// In src/app/add-story/page.tsx and src/app/stories/[id]/edit/page.tsx
import PortableTextEditor from '../../components/stories/PortableTextEditor';
// And update the JSX to use <PortableTextEditor ... />
```
4. **Build and test:**
```bash
npm run build
docker-compose build
```
## Implementation Complete
**Portable Text Schema** - Defines formatting options matching the original editor
**HTML ↔ Portable Text Conversion** - Seamless conversion between formats
**Sanitization Integration** - Uses existing sanitization strategy
**Component Replacement** - PortableTextEditor replaces RichTextEditor
**Image Processing** - Maintains existing image processing functionality
**Toolbar** - All formatting buttons from original editor
**Keyboard Shortcuts** - Ctrl+B, Ctrl+I, Ctrl+Shift+1-6
## Features Maintained
### 1. **Formatting Options**
- Bold, Italic, Underline, Strike, Code
- Headings H1-H6
- Paragraphs and Blockquotes
- All original toolbar buttons
### 2. **Visual & HTML Modes**
- Visual mode: Structured Portable Text editing
- HTML mode: Direct HTML editing (fallback)
- Live preview in HTML mode
### 3. **Image Processing**
- Existing image processing pipeline maintained
- Background image download and conversion
- Processing status indicators
- Warning system
### 4. **Paste Handling**
- Rich text paste from websites
- Image processing during paste
- HTML sanitization
- Structured content conversion
### 5. **Maximization & Resizing**
- Fullscreen editing mode
- Resizable editor height
- Keyboard shortcuts (Escape to exit)
## Benefits of Portable Text
1. **Structured Content** - Content is stored as JSON, not just HTML
2. **Future-Proof** - Easy to export/migrate content
3. **Better Search** - Structured content works better with Typesense
4. **Extensible** - Easy to add custom block types (images, etc.)
5. **Sanitization** - Inherently safer than HTML parsing
## Next Steps
1. Install the npm packages using one of the methods above
2. Test the editor functionality
3. Verify image processing works correctly
4. Optional: Add custom image block types for enhanced image handling
## File Structure
```
frontend/src/
├── components/stories/
│ ├── PortableTextEditor.tsx # New editor component
│ └── RichTextEditor.tsx # Original (can be removed after testing)
├── lib/portabletext/
│ ├── schema.ts # Portable Text schema and types
│ └── conversion.ts # HTML ↔ Portable Text conversion
└── app/
├── add-story/page.tsx # Updated to use PortableTextEditor
└── stories/[id]/edit/page.tsx # Updated to use PortableTextEditor
```
The implementation is backward compatible and maintains all existing functionality while providing the benefits of structured content editing.

131
README.md
View File

@@ -131,9 +131,12 @@ cd backend
### 🎨 **User Experience** ### 🎨 **User Experience**
- **Dark/Light Mode**: Automatic theme switching with system preference detection - **Dark/Light Mode**: Automatic theme switching with system preference detection
- **Responsive Design**: Optimized for desktop, tablet, and mobile - **Responsive Design**: Optimized for desktop, tablet, and mobile
- **Reading Mode**: Distraction-free reading interface - **Reading Mode**: Distraction-free reading interface with real-time progress tracking
- **Reading Position Memory**: Character-based position tracking with smooth auto-scroll restoration
- **Smart Tag Filtering**: Dynamic tag filters with live story counts in library view
- **Keyboard Navigation**: Full keyboard accessibility - **Keyboard Navigation**: Full keyboard accessibility
- **Rich Text Editor**: Visual and source editing modes for story content - **Rich Text Editor**: Visual and source editing modes for story content
- **Progress Indicators**: Visual reading progress bars and completion tracking
### 🔒 **Security & Administration** ### 🔒 **Security & Administration**
- **JWT Authentication**: Secure token-based authentication - **JWT Authentication**: Secure token-based authentication
@@ -158,43 +161,75 @@ cd backend
## 📖 Documentation ## 📖 Documentation
- **[API Documentation](docs/API.md)**: Complete REST API reference with examples - **[Technical Specification](storycove-spec.md)**: Complete technical specification with API documentation, data models, and all feature specifications
- **[Data Model](docs/DATA_MODEL.md)**: Detailed database schema and relationships - **[Web Scraper Specification](storycove-scraper-spec.md)**: URL content grabbing functionality
- **[Technical Specification](storycove-spec.md)**: Comprehensive technical specification
- **Environment Configuration**: Multi-environment deployment setup (see above) - **Environment Configuration**: Multi-environment deployment setup (see above)
- **Development Setup**: Local development environment setup (see below) - **Development Setup**: Local development environment setup (see below)
> **Note**: All feature specifications (Collections, Tag Enhancements, EPUB Import/Export) have been consolidated into the main technical specification for easier maintenance and reference.
## 🗄️ Data Model ## 🗄️ Data Model
StoryCove uses a PostgreSQL database with the following core entities: StoryCove uses a PostgreSQL database with the following core entities:
### **Stories** ### **Stories**
- **Primary Key**: UUID - **Primary Key**: UUID
- **Fields**: title, summary, description, content_html, content_plain, source_url, word_count, rating, volume, cover_path - **Fields**: title, summary, description, content_html, content_plain, source_url, word_count, rating, volume, cover_path, is_read, reading_position, last_read_at, created_at, updated_at
- **Relationships**: Many-to-One with Author, Many-to-One with Series, Many-to-Many with Tags - **Relationships**: Many-to-One with Author, Many-to-One with Series, Many-to-Many with Tags, One-to-Many with ReadingPositions
- **Features**: Automatic word count calculation, HTML sanitization, plain text extraction - **Features**: Automatic word count calculation, HTML sanitization, plain text extraction, reading progress tracking, duplicate detection
### **Authors** ### **Authors**
- **Primary Key**: UUID - **Primary Key**: UUID
- **Fields**: name, notes, author_rating, avatar_image_path - **Fields**: name, notes, author_rating, avatar_image_path, created_at, updated_at
- **Relationships**: One-to-Many with Stories, One-to-Many with Author URLs - **Relationships**: One-to-Many with Stories, One-to-Many with Author URLs (via @ElementCollection)
- **Features**: URL collection storage, rating system, statistics calculation - **Features**: URL collection storage, rating system, statistics calculation, average story rating calculation
### **Collections**
- **Primary Key**: UUID
- **Fields**: name, description, rating, cover_image_path, is_archived, created_at, updated_at
- **Relationships**: Many-to-Many with Tags, One-to-Many with CollectionStories
- **Features**: Story ordering with gap-based positioning, statistics calculation, EPUB export, Typesense search
### **CollectionStories** (Junction Table)
- **Composite Key**: collection_id, story_id
- **Fields**: position, added_at
- **Relationships**: Links Collections to Stories with ordering
- **Features**: Gap-based positioning for efficient reordering
### **Series** ### **Series**
- **Primary Key**: UUID - **Primary Key**: UUID
- **Fields**: name, description - **Fields**: name, description, created_at
- **Relationships**: One-to-Many with Stories (ordered by volume) - **Relationships**: One-to-Many with Stories (ordered by volume)
- **Features**: Volume-based story ordering, navigation methods - **Features**: Volume-based story ordering, navigation methods (next/previous story)
### **Tags** ### **Tags**
- **Primary Key**: UUID - **Primary Key**: UUID
- **Fields**: name (unique) - **Fields**: name (unique), color (hex), description, created_at
- **Relationships**: Many-to-Many with Stories - **Relationships**: Many-to-Many with Stories, Many-to-Many with Collections, One-to-Many with TagAliases
- **Features**: Autocomplete support, usage statistics - **Features**: Color coding, alias system, autocomplete support, usage statistics, AI-powered suggestions
### **Join Tables** ### **TagAliases**
- **story_tags**: Links stories to tags - **Primary Key**: UUID
- **author_urls**: Stores multiple URLs per author - **Fields**: alias_name (unique), canonical_tag_id, created_from_merge, created_at
- **Relationships**: Many-to-One with Tag (canonical)
- **Features**: Transparent alias resolution, merge tracking, autocomplete integration
### **ReadingPositions**
- **Primary Key**: UUID
- **Fields**: story_id, chapter_index, chapter_title, word_position, character_position, percentage_complete, epub_cfi, context_before, context_after, created_at, updated_at
- **Relationships**: Many-to-One with Story
- **Features**: Advanced reading position tracking, EPUB CFI support, context preservation, percentage calculation
### **Libraries**
- **Primary Key**: UUID
- **Fields**: name, description, is_default, created_at, updated_at
- **Features**: Multi-library support, library switching functionality
### **Core Join Tables**
- **story_tags**: Links stories to tags (Many-to-Many)
- **collection_tags**: Links collections to tags (Many-to-Many)
- **collection_stories**: Links collections to stories with ordering
- **author_urls**: Stores multiple URLs per author (@ElementCollection)
## 🔌 REST API Reference ## 🔌 REST API Reference
@@ -206,6 +241,7 @@ StoryCove uses a PostgreSQL database with the following core entities:
### **Stories** (`/api/stories`) ### **Stories** (`/api/stories`)
- `GET /` - List stories (paginated) - `GET /` - List stories (paginated)
- `GET /{id}` - Get specific story - `GET /{id}` - Get specific story
- `GET /{id}/read` - Get story for reading interface
- `POST /` - Create new story - `POST /` - Create new story
- `PUT /{id}` - Update story - `PUT /{id}` - Update story
- `DELETE /{id}` - Delete story - `DELETE /{id}` - Delete story
@@ -214,13 +250,28 @@ StoryCove uses a PostgreSQL database with the following core entities:
- `POST /{id}/rating` - Set story rating - `POST /{id}/rating` - Set story rating
- `POST /{id}/tags/{tagId}` - Add tag to story - `POST /{id}/tags/{tagId}` - Add tag to story
- `DELETE /{id}/tags/{tagId}` - Remove tag from story - `DELETE /{id}/tags/{tagId}` - Remove tag from story
- `GET /search` - Search stories (Typesense) - `POST /{id}/reading-progress` - Update reading position
- `POST /{id}/reading-status` - Mark story as read/unread
- `GET /{id}/collections` - Get collections containing story
- `GET /random` - Get random story with optional filters
- `GET /check-duplicate` - Check for duplicate stories
- `GET /search` - Search stories (Typesense with faceting)
- `GET /search/suggestions` - Get search suggestions - `GET /search/suggestions` - Get search suggestions
- `GET /author/{authorId}` - Stories by author - `GET /author/{authorId}` - Stories by author
- `GET /series/{seriesId}` - Stories in series - `GET /series/{seriesId}` - Stories in series
- `GET /tags/{tagName}` - Stories with tag - `GET /tags/{tagName}` - Stories with tag
- `GET /recent` - Recent stories - `GET /recent` - Recent stories
- `GET /top-rated` - Top-rated stories - `GET /top-rated` - Top-rated stories
- `POST /batch/add-to-collection` - Add multiple stories to collection
- `POST /reindex` - Manual Typesense reindex
- `POST /reindex-typesense` - Reindex stories in Typesense
- `POST /recreate-typesense-collection` - Recreate Typesense collection
#### **EPUB Import/Export** (`/api/stories/epub`)
- `POST /import` - Import story from EPUB file
- `POST /export` - Export story as EPUB with options
- `GET /{id}/epub` - Export story as EPUB (simple)
- `POST /validate` - Validate EPUB file structure
### **Authors** (`/api/authors`) ### **Authors** (`/api/authors`)
- `GET /` - List authors (paginated) - `GET /` - List authors (paginated)
@@ -240,14 +291,49 @@ StoryCove uses a PostgreSQL database with the following core entities:
### **Tags** (`/api/tags`) ### **Tags** (`/api/tags`)
- `GET /` - List tags (paginated) - `GET /` - List tags (paginated)
- `GET /{id}` - Get specific tag - `GET /{id}` - Get specific tag
- `POST /` - Create new tag - `POST /` - Create new tag (with color and description)
- `PUT /{id}` - Update tag - `PUT /{id}` - Update tag (name, color, description)
- `DELETE /{id}` - Delete tag - `DELETE /{id}` - Delete tag
- `GET /search` - Search tags - `GET /search` - Search tags
- `GET /autocomplete` - Tag autocomplete - `GET /autocomplete` - Tag autocomplete with alias resolution
- `GET /popular` - Most used tags - `GET /popular` - Most used tags
- `GET /unused` - Unused tags - `GET /unused` - Unused tags
- `GET /stats` - Tag statistics - `GET /stats` - Tag statistics
- `GET /collections` - Tags used by collections
- `GET /resolve/{name}` - Resolve tag name (handles aliases)
#### **Tag Aliases** (`/api/tags/{tagId}/aliases`)
- `POST /` - Add alias to tag
- `DELETE /{aliasId}` - Remove alias from tag
#### **Tag Management**
- `POST /merge` - Merge multiple tags into one
- `POST /merge/preview` - Preview tag merge operation
- `POST /suggest` - AI-powered tag suggestions for content
### **Collections** (`/api/collections`)
- `GET /` - Search and list collections (Typesense)
- `GET /{id}` - Get collection details
- `POST /` - Create new collection (JSON or multipart)
- `PUT /{id}` - Update collection metadata
- `DELETE /{id}` - Delete collection
- `PUT /{id}/archive` - Archive/unarchive collection
- `POST /{id}/cover` - Upload collection cover image
- `DELETE /{id}/cover` - Remove collection cover image
- `GET /{id}/stats` - Get collection statistics
#### **Collection Story Management**
- `POST /{id}/stories` - Add stories to collection
- `DELETE /{id}/stories/{storyId}` - Remove story from collection
- `PUT /{id}/stories/order` - Reorder stories in collection
- `GET /{id}/read/{storyId}` - Get story with collection context
#### **Collection EPUB Export**
- `GET /{id}/epub` - Export collection as EPUB
- `POST /{id}/epub` - Export collection as EPUB with options
#### **Collection Management**
- `POST /reindex-typesense` - Reindex collections in Typesense
### **Series** (`/api/series`) ### **Series** (`/api/series`)
- `GET /` - List series (paginated) - `GET /` - List series (paginated)
@@ -295,6 +381,7 @@ All API endpoints use JSON format with proper HTTP status codes:
- **Backend**: Spring Boot 3, Java 21, PostgreSQL, Typesense - **Backend**: Spring Boot 3, Java 21, PostgreSQL, Typesense
- **Infrastructure**: Docker, Docker Compose, Nginx - **Infrastructure**: Docker, Docker Compose, Nginx
- **Security**: JWT authentication, HTML sanitization, CORS - **Security**: JWT authentication, HTML sanitization, CORS
- **Search**: Typesense with faceting and full-text search capabilities
### **Local Development Setup** ### **Local Development Setup**

View File

@@ -0,0 +1,269 @@
# Refresh Token Implementation
## Overview
This document describes the refresh token functionality implemented for StoryCove, allowing users to stay authenticated for up to 2 weeks with automatic token refresh.
## Architecture
### Token Types
1. **Access Token (JWT)**
- Lifetime: 24 hours
- Stored in: httpOnly cookie + localStorage
- Used for: API authentication
- Format: JWT with subject and libraryId claims
2. **Refresh Token**
- Lifetime: 14 days (2 weeks)
- Stored in: httpOnly cookie + database
- Used for: Generating new access tokens
- Format: Secure random 256-bit token (Base64 encoded)
### Token Flow
1. **Login**
- User provides password
- Backend validates password
- Backend generates both access token and refresh token
- Both tokens sent as httpOnly cookies
- Access token also returned in response body for localStorage
2. **API Request**
- Frontend sends access token via Authorization header and cookie
- Backend validates access token
- If valid: Request proceeds
- If expired: Frontend attempts token refresh
3. **Token Refresh**
- Frontend detects 401/403 response
- Frontend automatically calls `/api/auth/refresh`
- Backend validates refresh token from cookie
- If valid: New access token generated and returned
- If invalid/expired: User redirected to login
4. **Logout**
- Frontend calls `/api/auth/logout`
- Backend revokes refresh token in database
- Both cookies cleared
- User redirected to login page
## Backend Implementation
### New Files
1. **`RefreshToken.java`** - Entity class
- Fields: id, token, expiresAt, createdAt, revokedAt, libraryId, userAgent, ipAddress
- Helper methods: isExpired(), isRevoked(), isValid()
2. **`RefreshTokenRepository.java`** - Repository interface
- findByToken(String)
- deleteExpiredTokens(LocalDateTime)
- revokeAllByLibraryId(String, LocalDateTime)
- revokeAll(LocalDateTime)
3. **`RefreshTokenService.java`** - Service class
- createRefreshToken(libraryId, userAgent, ipAddress)
- verifyRefreshToken(token)
- revokeToken(token)
- revokeAllByLibraryId(libraryId)
- cleanupExpiredTokens() - Scheduled daily at 3 AM
### Modified Files
1. **`JwtUtil.java`**
- Added `refreshExpiration` property (14 days)
- Added `generateRefreshToken()` method
- Added `getRefreshExpirationMs()` method
2. **`AuthController.java`**
- Updated `/login` endpoint to create and return refresh token
- Added `/refresh` endpoint to handle token refresh
- Updated `/logout` endpoint to revoke refresh token
- Added helper methods: `getRefreshTokenFromCookies()`, `getClientIpAddress()`
3. **`SecurityConfig.java`**
- Added `/api/auth/refresh` to public endpoints
4. **`application.yml`**
- Added `storycove.jwt.refresh-expiration: 1209600000` (14 days)
## Frontend Implementation
### Modified Files
1. **`api.ts`**
- Added automatic token refresh logic in response interceptor
- Added request queuing during token refresh
- Prevents multiple simultaneous refresh attempts
- Automatically retries failed requests after refresh
### Token Refresh Logic
```typescript
// On 401/403 response:
1. Check if already retrying -> if yes, queue request
2. Check if refresh/login endpoint -> if yes, logout
3. Attempt token refresh via /api/auth/refresh
4. If successful:
- Update localStorage with new token
- Retry original request
- Process queued requests
5. If failed:
- Clear token
- Redirect to login
- Reject queued requests
```
## Security Features
1. **httpOnly Cookies**: Prevents XSS attacks
2. **Token Revocation**: Refresh tokens can be revoked
3. **Database Storage**: Refresh tokens stored server-side
4. **Expiration Tracking**: Tokens have strict expiration dates
5. **IP & User Agent Tracking**: Stored for security auditing
6. **Library Isolation**: Tokens scoped to specific library
## Database Schema
```sql
CREATE TABLE refresh_tokens (
id UUID PRIMARY KEY,
token VARCHAR(255) UNIQUE NOT NULL,
expires_at TIMESTAMP NOT NULL,
created_at TIMESTAMP NOT NULL,
revoked_at TIMESTAMP,
library_id VARCHAR(255),
user_agent VARCHAR(255) NOT NULL,
ip_address VARCHAR(255) NOT NULL
);
CREATE INDEX idx_refresh_token ON refresh_tokens(token);
CREATE INDEX idx_expires_at ON refresh_tokens(expires_at);
```
## Configuration
### Backend (`application.yml`)
```yaml
storycove:
jwt:
expiration: 86400000 # 24 hours (access token)
refresh-expiration: 1209600000 # 14 days (refresh token)
```
### Environment Variables
No new environment variables required. Existing `JWT_SECRET` is used.
## Testing
Comprehensive test suite in `RefreshTokenServiceTest.java`:
- Token creation
- Token validation
- Expired token handling
- Revoked token handling
- Token revocation
- Cleanup operations
Run tests:
```bash
cd backend
mvn test -Dtest=RefreshTokenServiceTest
```
## Maintenance
### Automated Cleanup
Expired tokens are automatically cleaned up daily at 3 AM via scheduled task in `RefreshTokenService.cleanupExpiredTokens()`.
### Manual Revocation
```java
// Revoke all tokens for a library
refreshTokenService.revokeAllByLibraryId("library-id");
// Revoke all tokens (logout all users)
refreshTokenService.revokeAll();
```
## User Experience
1. **Seamless Authentication**: Users stay logged in for 2 weeks
2. **Automatic Refresh**: Token refresh happens transparently
3. **No Interruptions**: API calls succeed even when access token expires
4. **Backend Restart**: Users must re-login (JWT secret rotates on startup)
5. **Cross-Device Library Switching**: Automatic library switching when using different devices with different libraries
## Cross-Device Library Switching
### Feature Overview
The system automatically detects and switches libraries when you use different devices authenticated to different libraries. This ensures you always see the correct library's data.
### How It Works
**Scenario 1: Active Access Token (within 24 hours)**
1. Request comes in with valid JWT access token
2. `JwtAuthenticationFilter` extracts `libraryId` from token
3. Compares with `currentLibraryId` in backend
4. **If different**: Automatically switches to token's library
5. **If same**: Early return (no overhead, just string comparison)
6. Request proceeds with correct library
**Scenario 2: Token Refresh (after 24 hours)**
1. Access token expired, refresh token still valid
2. `/api/auth/refresh` endpoint validates refresh token
3. Extracts `libraryId` from refresh token
4. Compares with `currentLibraryId` in backend
5. **If different**: Automatically switches to token's library
6. **If same**: Early return (no overhead)
7. Generates new access token with correct `libraryId`
**Scenario 3: After Backend Restart**
1. `currentLibraryId` is null (no active library)
2. First request with any token automatically switches to that token's library
3. Subsequent requests use early return optimization
### Performance
**When libraries match** (most common case):
- Simple string comparison: `libraryId.equals(currentLibraryId)`
- Immediate return - zero overhead
- No datasource changes, no reindexing
**When libraries differ** (switching devices):
- Synchronized library switch
- Datasource routing updated instantly
- Solr reindex runs asynchronously (doesn't block request)
- Takes 2-3 seconds in background
### Edge Cases
**Multi-device simultaneous use:**
- If two devices with different libraries are used simultaneously
- Last request "wins" and switches backend to its library
- Not recommended but handled gracefully
- Each device corrects itself on next request
**Library doesn't exist:**
- If token contains invalid `libraryId`
- Library switch fails with error
- Request is rejected with 500 error
- User must re-login with valid credentials
## Future Enhancements
Potential improvements:
1. Persistent JWT secret (survive backend restarts)
2. Sliding refresh token expiration (extend on use)
3. Multiple device management (view/revoke sessions)
4. Configurable token lifetimes via environment variables
5. Token rotation (new refresh token on each use)
6. Thread-local library context for true stateless operation
## Summary
The refresh token implementation provides a robust, secure authentication system that balances user convenience (2-week sessions) with security (short-lived access tokens, automatic refresh). The implementation follows industry best practices and provides a solid foundation for future enhancements.

244
SOLR_LIBRARY_MIGRATION.md Normal file
View File

@@ -0,0 +1,244 @@
# Solr Library Separation Migration Guide
This guide explains how to migrate existing StoryCove deployments to support proper library separation in Solr search.
## What Changed
The Solr service has been enhanced to support multi-tenant library separation by:
- Adding a `libraryId` field to all Solr documents
- Filtering all search queries by the current library context
- Ensuring complete data isolation between libraries
## Migration Options
### Option 1: Docker Volume Reset (Recommended for Docker)
**Best for**: Development, staging, and Docker-based deployments where data loss is acceptable.
```bash
# Stop the application
docker-compose down
# Remove only the Solr data volume (preserves database and images)
docker volume rm storycove_solr_data
# Restart - Solr will recreate cores with new schema
docker-compose up -d
# Wait for services to start, then trigger reindex via admin panel
```
**Pros**: Clean, simple, guaranteed to work
**Cons**: Requires downtime, loses existing search index
### Option 2: Schema API Migration (Production Safe)
**Best for**: Production environments where you need to preserve uptime.
**Method A: Automatic (Recommended)**
```bash
# Single endpoint that adds field and migrates data
curl -X POST "http://your-app-host/api/admin/search/solr/migrate-library-schema" \
-H "Authorization: Bearer YOUR_JWT_TOKEN"
```
**Method B: Manual Steps**
```bash
# Step 1: Add libraryId field via app API
curl -X POST "http://your-app-host/api/admin/search/solr/add-library-field" \
-H "Authorization: Bearer YOUR_JWT_TOKEN"
# Step 2: Run migration
curl -X POST "http://your-app-host/api/admin/search/solr/migrate-library-schema" \
-H "Authorization: Bearer YOUR_JWT_TOKEN"
```
**Method C: Direct Solr API (if app API fails)**
```bash
# Add libraryId field to stories core
curl -X POST "http://your-solr-host:8983/solr/storycove_stories/schema" \
-H "Content-Type: application/json" \
-d '{
"add-field": {
"name": "libraryId",
"type": "string",
"indexed": true,
"stored": true,
"required": false
}
}'
# Add libraryId field to authors core
curl -X POST "http://your-solr-host:8983/solr/storycove_authors/schema" \
-H "Content-Type: application/json" \
-d '{
"add-field": {
"name": "libraryId",
"type": "string",
"indexed": true,
"stored": true,
"required": false
}
}'
# Then run the migration
curl -X POST "http://your-app-host/api/admin/search/solr/migrate-library-schema" \
-H "Authorization: Bearer YOUR_JWT_TOKEN"
```
**Pros**: No downtime, preserves service availability, automatic field addition
**Cons**: Requires API access
### Option 3: Application-Level Migration (Recommended for Production)
**Best for**: Production environments with proper admin access.
1. **Deploy the code changes** to your environment
2. **Access the admin panel** of your application
3. **Navigate to search settings**
4. **Use the "Migrate Library Schema" button** or API endpoint:
```
POST /api/admin/search/solr/migrate-library-schema
```
**Pros**: User-friendly, handles all complexity internally
**Cons**: Requires admin access to application
## Step-by-Step Migration Process
### For Docker Deployments
1. **Backup your data** (optional but recommended):
```bash
# Backup database
docker-compose exec postgres pg_dump -U storycove storycove > backup.sql
```
2. **Pull the latest code** with library separation fixes
3. **Choose migration approach**:
- **Quick & Clean**: Use Option 1 (volume reset)
- **Production**: Use Option 2 or 3
4. **Verify migration**:
- Log in with different library passwords
- Perform searches to confirm isolation
- Check that new content gets indexed with library IDs
### For Kubernetes/Production Deployments
1. **Update your deployment** with the new container images
2. **Add the libraryId field** to Solr schema using Option 2
3. **Use the migration endpoint** (Option 3):
```bash
kubectl exec -it deployment/storycove-backend -- \
curl -X POST http://localhost:8080/api/admin/search/solr/migrate-library-schema
```
4. **Monitor logs** for successful migration
## Verification Steps
After migration, verify that library separation is working:
1. **Test with multiple libraries**:
- Log in with Library A password
- Add/search content
- Log in with Library B password
- Confirm Library A content is not visible
2. **Check Solr directly** (if accessible):
```bash
# Should show documents with libraryId field
curl "http://solr:8983/solr/storycove_stories/select?q=*:*&fl=id,title,libraryId&rows=5"
```
3. **Monitor application logs** for any library separation errors
## Troubleshooting
### "unknown field 'libraryId'" Error
**Problem**: `ERROR: [doc=xxx] unknown field 'libraryId'`
**Cause**: The Solr schema doesn't have the libraryId field yet.
**Solutions**:
1. **Use the automated migration** (adds field automatically):
```bash
curl -X POST "http://your-app/api/admin/search/solr/migrate-library-schema"
```
2. **Add field manually first**:
```bash
# Add field via app API
curl -X POST "http://your-app/api/admin/search/solr/add-library-field"
# Then run migration
curl -X POST "http://your-app/api/admin/search/solr/migrate-library-schema"
```
3. **Direct Solr API** (if app API fails):
```bash
# Add to both cores
curl -X POST "http://solr:8983/solr/storycove_stories/schema" \
-H "Content-Type: application/json" \
-d '{"add-field":{"name":"libraryId","type":"string","indexed":true,"stored":true}}'
curl -X POST "http://solr:8983/solr/storycove_authors/schema" \
-H "Content-Type: application/json" \
-d '{"add-field":{"name":"libraryId","type":"string","indexed":true,"stored":true}}'
```
4. **For development**: Use Option 1 (volume reset) for clean restart
### Migration Endpoint Returns Error
Common causes:
- Solr is not available (check connectivity)
- No active library context (ensure user is authenticated)
- Insufficient permissions (check JWT token/authentication)
### Search Results Still Mixed
This indicates incomplete migration:
- Clear all Solr data and reindex completely
- Verify that all documents have libraryId field
- Check that search queries include library filters
## Environment-Specific Notes
### Development
- Use Option 1 (volume reset) for simplicity
- Data loss is acceptable in dev environments
### Staging
- Use Option 2 or 3 to test production migration procedures
- Verify migration process before applying to production
### Production
- **Always backup data first**
- Use Option 2 (Schema API) or Option 3 (Admin endpoint)
- Plan for brief performance impact during reindexing
- Monitor system resources during bulk reindexing
## Performance Considerations
- **Reindexing time**: Depends on data size (typically 1000 docs/second)
- **Memory usage**: May increase during bulk indexing
- **Search performance**: Minimal impact from library filtering
- **Storage**: Slight increase due to libraryId field
## Rollback Plan
If issues occur:
1. **Immediate**: Restart Solr to previous state (if using Option 1)
2. **Schema revert**: Remove libraryId field via Schema API
3. **Code rollback**: Deploy previous version without library separation
4. **Data restore**: Restore from backup if necessary
This migration enables proper multi-tenant isolation while maintaining search performance and functionality.

View File

@@ -0,0 +1,305 @@
# Tag Enhancement Specification
> **✅ Implementation Status: COMPLETED**
> This feature has been fully implemented and is available in the system.
> All tag enhancements including colors, aliases, merging, and AI suggestions are working.
> Last updated: January 2025
## Overview
This document outlines the comprehensive enhancement of the tagging functionality in StoryCove, including color tags, tag deletion, merging, and aliases. These features will be accessible through a new "Tag Maintenance" page linked from the Settings page.
## Features
### 1. Color Tags
**Purpose**: Assign optional colors to tags for visual distinction and better organization.
**Implementation Details**:
- **Color Selection**: Predefined color palette that complements the app's theme
- **Custom Colors**: Fallback option with full color picker for advanced users
- **Default Behavior**: Tags without colors use consistent default styling
- **Accessibility**: All colors ensure sufficient contrast ratios
**UI Design**:
```
Color Selection Interface:
[Theme Blue] [Theme Green] [Theme Purple] [Theme Orange] ... [Custom ▼]
```
**Database Changes**:
```sql
ALTER TABLE tags ADD COLUMN color VARCHAR(7); -- hex colors like #3B82F6
ALTER TABLE tags ADD COLUMN description TEXT;
```
### 2. Tag Deletion
**Purpose**: Remove unused or unwanted tags from the system.
**Safety Features**:
- Show impact: "This tag is used by X stories"
- Confirmation dialog with story count
- Option to reassign stories to different tag before deletion
- Simple workflow appropriate for single-user application
**Behavior**:
- Display number of affected stories
- Require confirmation for deletion
- Optionally allow reassignment to another tag
### 3. Tag Merging
**Purpose**: Combine similar tags into a single canonical tag to reduce duplication.
**Workflow**:
1. User selects multiple tags to merge
2. User chooses which tag name becomes canonical
3. System shows merge preview with story counts
4. All story associations transfer to canonical tag
5. **Automatic Aliasing**: Merged tags automatically become aliases
**Example**:
```
Merge Preview:
• "magictf" (5 stories) → "magic tf" (12 stories)
• Result: "magic tf" (17 stories)
• "magictf" will become an alias for "magic tf"
```
**Technical Implementation**:
```sql
-- Merge operation (atomic transaction)
BEGIN TRANSACTION;
UPDATE story_tags SET tag_id = target_tag_id WHERE tag_id = source_tag_id;
INSERT INTO tag_aliases (alias_name, canonical_tag_id, created_from_merge)
VALUES (source_tag_name, target_tag_id, TRUE);
DELETE FROM tags WHERE id = source_tag_id;
COMMIT;
```
### 4. Tag Aliases
**Purpose**: Prevent tag duplication by allowing alternative names that resolve to canonical tags.
**Key Features**:
- **Transparent Resolution**: Users type "magictf" and automatically get "magic tf"
- **Hover Display**: Show aliases when hovering over tags
- **Import Integration**: Automatic alias resolution during story imports
- **Auto-Generation**: Created automatically during tag merges
**Database Schema**:
```sql
CREATE TABLE tag_aliases (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
alias_name VARCHAR(255) UNIQUE NOT NULL,
canonical_tag_id UUID NOT NULL REFERENCES tags(id) ON DELETE CASCADE,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
created_from_merge BOOLEAN DEFAULT FALSE
);
CREATE INDEX idx_tag_aliases_name ON tag_aliases(alias_name);
```
**UI Behavior**:
- Tags with aliases show subtle indicator (e.g., small "+" icon)
- Hover tooltip displays:
```
magic tf
────────────
Aliases: magictf, magic_tf, magic-transformation
```
## Tag Maintenance Page
### Access
- Reachable only through Settings page
- Button: "Tag Maintenance" or "Manage Tags"
### Main Interface
**Tag Management Table**:
```
┌─ Search: [____________] [Color Filter ▼] [Sort: Usage ▼]
├─
├─ ☐ magic tf 🔵 (17 stories) [+2 aliases] [Edit] [Delete]
├─ ☐ transformation 🟢 (34 stories) [+1 alias] [Edit] [Delete]
├─ ☐ sci-fi 🟣 (45 stories) [Edit] [Delete]
└─
[Merge Selected] [Bulk Delete] [Export/Import Tags]
```
**Features**:
- Searchable and filterable tag list
- Sortable by name, usage count, creation date
- Bulk selection for merge/delete operations
- Visual indicators for color and alias count
### Tag Edit Modal
```
Edit Tag: "magic tf"
┌─ Name: [magic tf ]
├─ Color: [🔵] [Theme Colors...] [Custom...]
├─ Description: [Optional description]
├─
├─ Aliases (2):
│ • magictf [Remove]
│ • magic_tf [Remove]
│ [Add Alias: ____________] [Add]
├─
├─ Used by 17 stories [View Stories]
└─ [Save] [Cancel]
```
**Functionality**:
- Edit tag name, color, and description
- Manage aliases (add/remove)
- View associated stories
- Prevent circular alias references
### Merge Interface
**Selection Process**:
1. Select multiple tags from main table
2. Click "Merge Selected"
3. Choose canonical tag name
4. Preview merge results
5. Confirm operation
**Preview Display**:
- Show before/after story counts
- List all aliases that will be created
- Highlight any conflicts or issues
## Integration Points
### 1. Import/Scraping Enhancement
```javascript
// Tag resolution during imports
const resolveTagName = async (inputTag) => {
const alias = await tagApi.findAlias(inputTag);
return alias ? alias.canonicalTag : inputTag;
};
```
### 2. Tag Input Components
**Enhanced Autocomplete**:
- Include both canonical names and aliases in suggestions
- Show resolution: "magictf → magic tf" in dropdown
- Always save canonical name to database
### 3. Search Functionality
**Transparent Alias Search**:
- Search for "magictf" includes stories tagged with "magic tf"
- User doesn't need to know about canonical/alias distinction
- Expand search queries to include all aliases
### 4. Display Components
**Tag Rendering**:
- Apply colors consistently across all tag displays
- Show alias indicator where appropriate
- Implement hover tooltips for alias information
## Implementation Phases
### Phase 1: Core Infrastructure
- [ ] Database schema updates (tags.color, tag_aliases table)
- [ ] Basic tag editing functionality (name, color, description)
- [ ] Color palette component with theme colors
- [ ] Tag edit modal interface
### Phase 2: Merging & Aliasing
- [ ] Tag merge functionality with automatic alias creation
- [ ] Alias resolution in import/scraping logic
- [ ] Tag input component enhancements
- [ ] Search integration with alias expansion
### Phase 3: UI Polish & Advanced Features
- [ ] Hover tooltips for alias display
- [ ] Bulk operations (merge multiple, bulk delete)
- [ ] Advanced filtering and sorting options
- [ ] Tag maintenance page integration with Settings
### Phase 4: Smart Features (Optional)
- [ ] Auto-merge suggestions for similar tag names
- [ ] Color auto-assignment based on usage patterns
- [ ] Import intelligence and learning from user decisions
## Technical Considerations
### Performance
- Index alias names for fast lookup during imports
- Optimize tag queries with proper database indexing
- Consider caching for frequently accessed tag/alias mappings
### Data Integrity
- Prevent circular alias references
- Atomic transactions for merge operations
- Cascade deletion handling for tag relationships
### User Experience
- Clear visual feedback for all operations
- Comprehensive preview before destructive actions
- Consistent color and styling across the application
### Accessibility
- Sufficient color contrast for all tag colors
- Keyboard navigation support
- Screen reader compatibility
- Don't rely solely on color for information
## API Endpoints
### New Endpoints Needed
- `GET /api/tags/{id}/aliases` - Get aliases for a tag
- `POST /api/tags/merge` - Merge multiple tags
- `POST /api/tags/{id}/aliases` - Add alias to tag
- `DELETE /api/tags/{id}/aliases/{aliasId}` - Remove alias
- `PUT /api/tags/{id}/color` - Update tag color
- `GET /api/tags/resolve/{name}` - Resolve tag name (check aliases)
### Enhanced Endpoints
- `GET /api/tags` - Include color and alias count in response
- `PUT /api/tags/{id}` - Support color and description updates
- `DELETE /api/tags/{id}` - Enhanced with story impact information
## Configuration
### Theme Color Palette
Define a curated set of colors that work well with both light and dark themes:
- Primary blues: #3B82F6, #1D4ED8, #60A5FA
- Greens: #10B981, #059669, #34D399
- Purples: #8B5CF6, #7C3AED, #A78BFA
- Warm tones: #F59E0B, #D97706, #F97316
- Neutrals: #6B7280, #4B5563, #9CA3AF
### Settings Integration
- Add "Tag Maintenance" button to Settings page
- Consider adding tag-related preferences (default colors, etc.)
## Success Criteria
1. **Color Tags**: Tags can be assigned colors that display consistently throughout the application
2. **Tag Deletion**: Users can safely delete tags with appropriate warnings and reassignment options
3. **Tag Merging**: Similar tags can be merged with automatic alias creation
4. **Alias Resolution**: Imports automatically resolve aliases to canonical tags
5. **User Experience**: All operations are intuitive with clear feedback and preview options
6. **Performance**: Tag operations remain fast even with large numbers of tags and aliases
7. **Data Integrity**: No orphaned references or circular alias chains
## Future Enhancements
- **Tag Statistics**: Usage analytics and trends
- **Tag Recommendations**: AI-powered tag suggestions during story import
- **Tag Templates**: Predefined tag sets for common story types
- **Export/Import**: Backup and restore tag configurations
- **Tag Validation**: Rules for tag naming conventions
---
*This specification serves as the definitive guide for implementing the tag enhancement features in StoryCove. All implementation should refer back to this document to ensure consistency and completeness.*

45
apply_migration_production.sh Executable file
View File

@@ -0,0 +1,45 @@
#!/bin/bash
# Run this script on your production server to apply the backup_jobs table migration
# to all library databases
echo "Applying backup_jobs table migration to all databases..."
echo ""
# Apply to each database
for DB in storycove storycove_afterdark storycove_clas storycove_secret; do
echo "Applying to $DB..."
docker-compose exec -T postgres psql -U storycove -d "$DB" <<'SQL'
CREATE TABLE IF NOT EXISTS backup_jobs (
id UUID PRIMARY KEY,
library_id VARCHAR(255) NOT NULL,
type VARCHAR(50) NOT NULL CHECK (type IN ('DATABASE_ONLY', 'COMPLETE')),
status VARCHAR(50) NOT NULL CHECK (status IN ('PENDING', 'IN_PROGRESS', 'COMPLETED', 'FAILED', 'EXPIRED')),
file_path VARCHAR(1000),
file_size_bytes BIGINT,
progress_percent INTEGER,
error_message VARCHAR(1000),
created_at TIMESTAMP NOT NULL,
started_at TIMESTAMP,
completed_at TIMESTAMP,
expires_at TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_library_id ON backup_jobs(library_id);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_status ON backup_jobs(status);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_expires_at ON backup_jobs(expires_at);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_created_at ON backup_jobs(created_at DESC);
SQL
echo "✓ Done with $DB"
echo ""
done
echo "Migration complete! Verifying..."
echo ""
# Verify tables exist
for DB in storycove storycove_afterdark storycove_clas storycove_secret; do
echo "Checking $DB:"
docker-compose exec -T postgres psql -U storycove -d "$DB" -c "\d backup_jobs" 2>&1 | grep -E "Table|does not exist" || echo " ✓ Table exists"
echo ""
done

View File

@@ -2,15 +2,20 @@ FROM openjdk:17-jdk-slim
WORKDIR /app WORKDIR /app
COPY pom.xml . # Install Maven and PostgreSQL 15 client tools
COPY src ./src RUN apt-get update && apt-get install -y wget ca-certificates gnupg maven && \
wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add - && \
RUN apt-get update && apt-get install -y maven && \ echo "deb http://apt.postgresql.org/pub/repos/apt/ bullseye-pgdg main" > /etc/apt/sources.list.d/pgdg.list && \
mvn clean package -DskipTests && \ apt-get update && \
apt-get remove -y maven && \ apt-get install -y postgresql-client-15 && \
apt-get autoremove -y && \
rm -rf /var/lib/apt/lists/* rm -rf /var/lib/apt/lists/*
# Copy source code
COPY . .
# Build the application
RUN mvn clean package -DskipTests
EXPOSE 8080 EXPOSE 8080
CMD ["java", "-jar", "target/storycove-backend-0.0.1-SNAPSHOT.jar"] ENTRYPOINT ["java", "-jar", "target/storycove-backend-0.0.1-SNAPSHOT.jar"]

View File

@@ -0,0 +1,54 @@
#!/bin/bash
# Script to apply backup_jobs table migration to all library databases
# This should be run from the backend directory
set -e
# Use full docker path
DOCKER="/usr/local/bin/docker"
echo "Applying backup_jobs table migration..."
# Get database connection details from environment or use defaults
DB_HOST="${POSTGRES_HOST:-postgres}"
DB_PORT="${POSTGRES_PORT:-5432}"
DB_USER="${POSTGRES_USER:-storycove}"
DB_PASSWORD="${POSTGRES_PASSWORD:-password}"
# List of databases to update
DATABASES=("storycove" "storycove_afterdark")
for DB_NAME in "${DATABASES[@]}"; do
echo ""
echo "Applying migration to database: $DB_NAME"
# Check if database exists
if $DOCKER exec storycove-postgres-1 psql -U "$DB_USER" -lqt | cut -d \| -f 1 | grep -qw "$DB_NAME"; then
echo "Database $DB_NAME exists, applying migration..."
# Apply migration
$DOCKER exec -i storycove-postgres-1 psql -U "$DB_USER" -d "$DB_NAME" < create_backup_jobs_table.sql
if [ $? -eq 0 ]; then
echo "✓ Migration applied successfully to $DB_NAME"
else
echo "✗ Failed to apply migration to $DB_NAME"
exit 1
fi
else
echo "⚠ Database $DB_NAME does not exist, skipping..."
fi
done
echo ""
echo "Migration complete!"
echo ""
echo "Verifying table creation..."
for DB_NAME in "${DATABASES[@]}"; do
if $DOCKER exec storycove-postgres-1 psql -U "$DB_USER" -lqt | cut -d \| -f 1 | grep -qw "$DB_NAME"; then
echo ""
echo "Checking $DB_NAME:"
$DOCKER exec storycove-postgres-1 psql -U "$DB_USER" -d "$DB_NAME" -c "\d backup_jobs" 2>/dev/null || echo " Table not found in $DB_NAME"
fi
done

1
backend/backend.log Normal file
View File

@@ -0,0 +1 @@
(eval):1: no such file or directory: ./mvnw

4
backend/cookies_new.txt Normal file
View File

@@ -0,0 +1,4 @@
# Netscape HTTP Cookie File
# https://curl.se/docs/http-cookies.html
# This file was generated by libcurl! Edit at your own risk.

View File

@@ -0,0 +1,29 @@
-- Create backup_jobs table for async backup job tracking
-- This should be run on all library databases (default and afterdark)
CREATE TABLE IF NOT EXISTS backup_jobs (
id UUID PRIMARY KEY,
library_id VARCHAR(255) NOT NULL,
type VARCHAR(50) NOT NULL CHECK (type IN ('DATABASE_ONLY', 'COMPLETE')),
status VARCHAR(50) NOT NULL CHECK (status IN ('PENDING', 'IN_PROGRESS', 'COMPLETED', 'FAILED', 'EXPIRED')),
file_path VARCHAR(1000),
file_size_bytes BIGINT,
progress_percent INTEGER,
error_message VARCHAR(1000),
created_at TIMESTAMP NOT NULL,
started_at TIMESTAMP,
completed_at TIMESTAMP,
expires_at TIMESTAMP
);
-- Create index on library_id for faster lookups
CREATE INDEX IF NOT EXISTS idx_backup_jobs_library_id ON backup_jobs(library_id);
-- Create index on status for cleanup queries
CREATE INDEX IF NOT EXISTS idx_backup_jobs_status ON backup_jobs(status);
-- Create index on expires_at for cleanup queries
CREATE INDEX IF NOT EXISTS idx_backup_jobs_expires_at ON backup_jobs(expires_at);
-- Create index on created_at for ordering
CREATE INDEX IF NOT EXISTS idx_backup_jobs_created_at ON backup_jobs(created_at DESC);

View File

@@ -5,7 +5,7 @@
<parent> <parent>
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-parent</artifactId> <artifactId>spring-boot-starter-parent</artifactId>
<version>3.2.0</version> <version>3.5.5</version>
<relativePath/> <relativePath/>
</parent> </parent>
@@ -17,7 +17,7 @@
<properties> <properties>
<java.version>17</java.version> <java.version>17</java.version>
<testcontainers.version>1.19.3</testcontainers.version> <testcontainers.version>1.21.3</testcontainers.version>
</properties> </properties>
<dependencyManagement> <dependencyManagement>
@@ -49,6 +49,10 @@
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-validation</artifactId> <artifactId>spring-boot-starter-validation</artifactId>
</dependency> </dependency>
<dependency>
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-starter-actuator</artifactId>
</dependency>
<dependency> <dependency>
<groupId>org.postgresql</groupId> <groupId>org.postgresql</groupId>
<artifactId>postgresql</artifactId> <artifactId>postgresql</artifactId>
@@ -56,18 +60,18 @@
<dependency> <dependency>
<groupId>io.jsonwebtoken</groupId> <groupId>io.jsonwebtoken</groupId>
<artifactId>jjwt-api</artifactId> <artifactId>jjwt-api</artifactId>
<version>0.12.3</version> <version>0.13.0</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>io.jsonwebtoken</groupId> <groupId>io.jsonwebtoken</groupId>
<artifactId>jjwt-impl</artifactId> <artifactId>jjwt-impl</artifactId>
<version>0.12.3</version> <version>0.13.0</version>
<scope>runtime</scope> <scope>runtime</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>io.jsonwebtoken</groupId> <groupId>io.jsonwebtoken</groupId>
<artifactId>jjwt-jackson</artifactId> <artifactId>jjwt-jackson</artifactId>
<version>0.12.3</version> <version>0.13.0</version>
<scope>runtime</scope> <scope>runtime</scope>
</dependency> </dependency>
<dependency> <dependency>
@@ -80,9 +84,38 @@
<artifactId>httpclient5</artifactId> <artifactId>httpclient5</artifactId>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.typesense</groupId> <groupId>org.apache.solr</groupId>
<artifactId>typesense-java</artifactId> <artifactId>solr-solrj</artifactId>
<version>1.3.0</version> <version>9.9.0</version>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-client</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-util</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-http</artifactId>
</dependency>
<dependency>
<groupId>org.eclipse.jetty</groupId>
<artifactId>jetty-io</artifactId>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents.core5</groupId>
<artifactId>httpcore5</artifactId>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents.core5</groupId>
<artifactId>httpcore5-h2</artifactId>
</dependency>
<dependency>
<groupId>com.positiondev.epublib</groupId>
<artifactId>epublib-core</artifactId>
<version>3.1</version>
</dependency> </dependency>
<!-- Test dependencies --> <!-- Test dependencies -->
@@ -114,6 +147,13 @@
<groupId>org.springframework.boot</groupId> <groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-maven-plugin</artifactId> <artifactId>spring-boot-maven-plugin</artifactId>
</plugin> </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<parameters>true</parameters>
</configuration>
</plugin>
</plugins> </plugins>
</build> </build>
</project> </project>

View File

@@ -2,10 +2,12 @@ package com.storycove;
import org.springframework.boot.SpringApplication; import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.scheduling.annotation.EnableAsync;
import org.springframework.scheduling.annotation.EnableScheduling; import org.springframework.scheduling.annotation.EnableScheduling;
@SpringBootApplication @SpringBootApplication
@EnableScheduling @EnableScheduling
@EnableAsync
public class StoryCoveApplication { public class StoryCoveApplication {
public static void main(String[] args) { public static void main(String[] args) {

View File

@@ -0,0 +1,64 @@
package com.storycove.config;
import com.storycove.service.LibraryService;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.DependsOn;
import org.springframework.context.annotation.Primary;
import javax.sql.DataSource;
/**
* Database configuration that sets up library-aware datasource routing.
*
* This configuration replaces the default Spring Boot datasource with a routing
* datasource that automatically directs all database operations to the appropriate
* library-specific database based on the current active library.
*/
@Configuration
public class DatabaseConfig {
@Value("${spring.datasource.url}")
private String baseDbUrl;
@Value("${spring.datasource.username}")
private String dbUsername;
@Value("${spring.datasource.password}")
private String dbPassword;
/**
* Create a fallback datasource for when no library is active.
* This connects to the main database specified in application.yml.
*/
@Bean(name = "fallbackDataSource")
public DataSource fallbackDataSource() {
HikariConfig config = new HikariConfig();
config.setJdbcUrl(baseDbUrl);
config.setUsername(dbUsername);
config.setPassword(dbPassword);
config.setDriverClassName("org.postgresql.Driver");
config.setMaximumPoolSize(10);
config.setConnectionTimeout(30000);
return new HikariDataSource(config);
}
/**
* Primary datasource bean - uses smart routing that excludes authentication operations
*/
@Bean(name = "dataSource")
@Primary
@DependsOn("libraryService")
public DataSource primaryDataSource(LibraryService libraryService) {
SmartRoutingDataSource routingDataSource = new SmartRoutingDataSource(
libraryService, baseDbUrl, dbUsername, dbPassword);
routingDataSource.setDefaultTargetDataSource(fallbackDataSource());
routingDataSource.setTargetDataSources(new java.util.HashMap<>());
return routingDataSource;
}
}

View File

@@ -0,0 +1,111 @@
package com.storycove.config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.CommandLineRunner;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.Statement;
import java.util.Arrays;
import java.util.List;
/**
* Runs database migrations on application startup.
* This ensures all library databases have the required schema,
* particularly for tables like backup_jobs that were added after initial deployment.
*/
@Component
@Order(1) // Run early in startup sequence
public class DatabaseMigrationRunner implements CommandLineRunner {
private static final Logger logger = LoggerFactory.getLogger(DatabaseMigrationRunner.class);
@Autowired
private DataSource dataSource;
@Value("${spring.datasource.username}")
private String dbUsername;
@Value("${spring.datasource.password}")
private String dbPassword;
// List of all library databases that need migrations
private static final List<String> LIBRARY_DATABASES = Arrays.asList(
"storycove", // default database
"storycove_afterdark",
"storycove_clas",
"storycove_secret"
);
// SQL for backup_jobs table migration (idempotent)
private static final String BACKUP_JOBS_MIGRATION = """
CREATE TABLE IF NOT EXISTS backup_jobs (
id UUID PRIMARY KEY,
library_id VARCHAR(255) NOT NULL,
type VARCHAR(50) NOT NULL CHECK (type IN ('DATABASE_ONLY', 'COMPLETE')),
status VARCHAR(50) NOT NULL CHECK (status IN ('PENDING', 'IN_PROGRESS', 'COMPLETED', 'FAILED', 'EXPIRED')),
file_path VARCHAR(1000),
file_size_bytes BIGINT,
progress_percent INTEGER,
error_message VARCHAR(1000),
created_at TIMESTAMP NOT NULL,
started_at TIMESTAMP,
completed_at TIMESTAMP,
expires_at TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_library_id ON backup_jobs(library_id);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_status ON backup_jobs(status);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_expires_at ON backup_jobs(expires_at);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_created_at ON backup_jobs(created_at DESC);
""";
@Override
public void run(String... args) throws Exception {
logger.info("🗄️ Starting database migrations...");
for (String database : LIBRARY_DATABASES) {
try {
applyMigrations(database);
logger.info("✅ Successfully applied migrations to database: {}", database);
} catch (Exception e) {
// Log error but don't fail startup if database doesn't exist yet
if (e.getMessage() != null && e.getMessage().contains("does not exist")) {
logger.warn("⚠️ Database {} does not exist yet, skipping migrations", database);
} else {
logger.error("❌ Failed to apply migrations to database: {}", database, e);
// Don't throw - allow application to start even if some migrations fail
}
}
}
logger.info("✅ Database migrations completed");
}
private void applyMigrations(String database) throws Exception {
// We need to connect directly to each database, not through SmartRoutingDataSource
// Build connection URL from the default datasource URL
String originalUrl = dataSource.getConnection().getMetaData().getURL();
String baseUrl = originalUrl.substring(0, originalUrl.lastIndexOf('/'));
String targetUrl = baseUrl + "/" + database;
// Connect directly to target database using credentials from application properties
try (Connection conn = java.sql.DriverManager.getConnection(
targetUrl,
dbUsername,
dbPassword
)) {
// Apply backup_jobs migration
try (Statement stmt = conn.createStatement()) {
stmt.execute(BACKUP_JOBS_MIGRATION);
}
logger.debug("Applied backup_jobs migration to {}", database);
}
}
}

View File

@@ -0,0 +1,65 @@
package com.storycove.config;
import com.storycove.service.LibraryService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
/**
* Custom DataSource router that dynamically routes database calls to the appropriate
* library-specific datasource based on the current active library.
*
* This makes ALL Spring Data JPA repositories automatically library-aware without
* requiring changes to existing repository or service code.
*/
public class LibraryAwareDataSource extends AbstractRoutingDataSource {
private static final Logger logger = LoggerFactory.getLogger(LibraryAwareDataSource.class);
private final LibraryService libraryService;
public LibraryAwareDataSource(LibraryService libraryService) {
this.libraryService = libraryService;
// Set empty target datasources to satisfy AbstractRoutingDataSource requirements
// We override determineTargetDataSource() so this won't be used
setTargetDataSources(new java.util.HashMap<>());
}
@Override
protected Object determineCurrentLookupKey() {
String currentLibraryId = libraryService.getCurrentLibraryId();
logger.debug("Routing database call to library: {}", currentLibraryId);
return currentLibraryId;
}
@Override
protected javax.sql.DataSource determineTargetDataSource() {
try {
// Check if LibraryService is properly initialized
if (libraryService == null) {
logger.debug("LibraryService not available, using default datasource");
return getResolvedDefaultDataSource();
}
// Check if any library is currently active
String currentLibraryId = libraryService.getCurrentLibraryId();
if (currentLibraryId == null) {
logger.debug("No active library, using default datasource");
return getResolvedDefaultDataSource();
}
// Try to get the current library datasource
javax.sql.DataSource libraryDataSource = libraryService.getCurrentDataSource();
logger.debug("Successfully routing database call to library: {}", currentLibraryId);
return libraryDataSource;
} catch (IllegalStateException e) {
// This is expected during authentication, startup, or when no library is active
logger.debug("No active library (IllegalStateException) - using default datasource: {}", e.getMessage());
return getResolvedDefaultDataSource();
} catch (Exception e) {
logger.warn("Unexpected error determining target datasource, falling back to default: {}", e.getMessage(), e);
return getResolvedDefaultDataSource();
}
}
}

View File

@@ -40,6 +40,8 @@ public class SecurityConfig {
.sessionManagement(session -> session.sessionCreationPolicy(SessionCreationPolicy.STATELESS)) .sessionManagement(session -> session.sessionCreationPolicy(SessionCreationPolicy.STATELESS))
.authorizeHttpRequests(authz -> authz .authorizeHttpRequests(authz -> authz
// Public endpoints // Public endpoints
.requestMatchers("/api/auth/login").permitAll()
.requestMatchers("/api/auth/refresh").permitAll() // Allow refresh without access token
.requestMatchers("/api/auth/**").permitAll() .requestMatchers("/api/auth/**").permitAll()
.requestMatchers("/api/files/images/**").permitAll() // Public image serving .requestMatchers("/api/files/images/**").permitAll() // Public image serving
.requestMatchers("/api/config/**").permitAll() // Public configuration endpoints .requestMatchers("/api/config/**").permitAll() // Public configuration endpoints
@@ -56,7 +58,10 @@ public class SecurityConfig {
@Bean @Bean
public CorsConfigurationSource corsConfigurationSource() { public CorsConfigurationSource corsConfigurationSource() {
CorsConfiguration configuration = new CorsConfiguration(); CorsConfiguration configuration = new CorsConfiguration();
configuration.setAllowedOriginPatterns(Arrays.asList(allowedOrigins.split(","))); List<String> origins = Arrays.stream(allowedOrigins.split(","))
.map(String::trim)
.toList();
configuration.setAllowedOriginPatterns(origins);
configuration.setAllowedMethods(Arrays.asList("GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS")); configuration.setAllowedMethods(Arrays.asList("GET", "POST", "PUT", "PATCH", "DELETE", "OPTIONS"));
configuration.setAllowedHeaders(List.of("*")); configuration.setAllowedHeaders(List.of("*"));
configuration.setAllowCredentials(true); configuration.setAllowCredentials(true);

View File

@@ -0,0 +1,158 @@
package com.storycove.config;
import com.storycove.service.LibraryService;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.jdbc.datasource.lookup.AbstractRoutingDataSource;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import javax.sql.DataSource;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
/**
* Smart routing datasource that:
* 1. Routes to library-specific databases when a library is active
* 2. Excludes authentication operations (keeps them on default database)
* 3. Uses request context to determine when routing is appropriate
*/
public class SmartRoutingDataSource extends AbstractRoutingDataSource {
private static final Logger logger = LoggerFactory.getLogger(SmartRoutingDataSource.class);
private final LibraryService libraryService;
private final Map<String, DataSource> libraryDataSources = new ConcurrentHashMap<>();
// Database connection details - will be injected via constructor
private final String baseDbUrl;
private final String dbUsername;
private final String dbPassword;
public SmartRoutingDataSource(LibraryService libraryService, String baseDbUrl, String dbUsername, String dbPassword) {
this.libraryService = libraryService;
this.baseDbUrl = baseDbUrl;
this.dbUsername = dbUsername;
this.dbPassword = dbPassword;
logger.info("SmartRoutingDataSource initialized with database: {}", baseDbUrl);
}
@Override
protected Object determineCurrentLookupKey() {
try {
// Check if this is an authentication request - if so, use default database
if (isAuthenticationRequest()) {
logger.debug("Authentication request detected, using default database");
return null; // null means use default datasource
}
// Check if we have an active library
if (libraryService != null) {
String currentLibraryId = libraryService.getCurrentLibraryId();
if (currentLibraryId != null && !currentLibraryId.trim().isEmpty()) {
logger.info("ROUTING: Directing to library-specific database: {}", currentLibraryId);
return currentLibraryId;
} else {
logger.info("ROUTING: No active library, using default database");
}
} else {
logger.info("ROUTING: LibraryService is null, using default database");
}
} catch (Exception e) {
logger.debug("Error determining lookup key, falling back to default database", e);
}
return null; // Use default datasource
}
/**
* Check if the current request is an authentication request that should use the default database
*/
private boolean isAuthenticationRequest() {
try {
ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
if (attributes != null) {
String requestURI = attributes.getRequest().getRequestURI();
String method = attributes.getRequest().getMethod();
// Authentication endpoints that should use default database
if (requestURI.contains("/auth/") ||
requestURI.contains("/login") ||
requestURI.contains("/api/libraries/switch") ||
(requestURI.contains("/api/libraries") && "POST".equals(method))) {
return true;
}
}
} catch (Exception e) {
logger.debug("Could not determine request context", e);
}
return false;
}
@Override
protected DataSource determineTargetDataSource() {
Object lookupKey = determineCurrentLookupKey();
if (lookupKey != null) {
String libraryId = (String) lookupKey;
return getLibraryDataSource(libraryId);
}
return getDefaultDataSource();
}
/**
* Get or create a datasource for the specified library
*/
private DataSource getLibraryDataSource(String libraryId) {
return libraryDataSources.computeIfAbsent(libraryId, id -> {
try {
HikariConfig config = new HikariConfig();
// Replace database name in URL with library-specific name
String libraryUrl = baseDbUrl.replaceAll("/[^/]*$", "/" + "storycove_" + id);
config.setJdbcUrl(libraryUrl);
config.setUsername(dbUsername);
config.setPassword(dbPassword);
config.setDriverClassName("org.postgresql.Driver");
config.setMaximumPoolSize(5); // Smaller pool for library-specific databases
config.setConnectionTimeout(10000);
config.setMaxLifetime(600000); // 10 minutes
logger.info("Created new datasource for library: {} -> {}", id, libraryUrl);
return new HikariDataSource(config);
} catch (Exception e) {
logger.error("Failed to create datasource for library: {}", id, e);
return getDefaultDataSource();
}
});
}
private DataSource getDefaultDataSource() {
// Use the default target datasource that was set in the configuration
try {
return (DataSource) super.determineTargetDataSource();
} catch (Exception e) {
logger.debug("Could not get default datasource via super method", e);
}
// Fallback: create a basic datasource
logger.warn("No default datasource available, creating fallback");
HikariConfig config = new HikariConfig();
config.setJdbcUrl(baseDbUrl);
config.setUsername(dbUsername);
config.setPassword(dbPassword);
config.setDriverClassName("org.postgresql.Driver");
config.setMaximumPoolSize(10);
config.setConnectionTimeout(30000);
return new HikariDataSource(config);
}
}

View File

@@ -0,0 +1,57 @@
package com.storycove.config;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
@ConditionalOnProperty(
value = "storycove.search.engine",
havingValue = "solr",
matchIfMissing = false
)
public class SolrConfig {
private static final Logger logger = LoggerFactory.getLogger(SolrConfig.class);
private final SolrProperties properties;
public SolrConfig(SolrProperties properties) {
this.properties = properties;
}
@Bean
public SolrClient solrClient() {
logger.info("Initializing Solr client with URL: {}", properties.getUrl());
HttpSolrClient.Builder builder = new HttpSolrClient.Builder(properties.getUrl())
.withConnectionTimeout(properties.getConnection().getTimeout())
.withSocketTimeout(properties.getConnection().getSocketTimeout());
SolrClient client = builder.build();
logger.info("Solr running without authentication");
// Test connection
testConnection(client);
return client;
}
private void testConnection(SolrClient client) {
try {
// Test connection by pinging the server
var response = client.ping();
logger.info("Solr connection successful - Response time: {}ms",
response.getElapsedTime());
} catch (Exception e) {
logger.warn("Solr connection test failed during initialization: {}", e.getMessage());
logger.debug("Solr connection test full error", e);
// Don't throw exception here - let the client be created and handle failures in service methods
}
}
}

View File

@@ -0,0 +1,144 @@
package com.storycove.config;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
@Component
@ConfigurationProperties(prefix = "storycove.solr")
public class SolrProperties {
private String url = "http://localhost:8983/solr";
private String username;
private String password;
private Cores cores = new Cores();
private Connection connection = new Connection();
private Query query = new Query();
private Commit commit = new Commit();
private Health health = new Health();
// Getters and setters
public String getUrl() { return url; }
public void setUrl(String url) { this.url = url; }
public String getUsername() { return username; }
public void setUsername(String username) { this.username = username; }
public String getPassword() { return password; }
public void setPassword(String password) { this.password = password; }
public Cores getCores() { return cores; }
public void setCores(Cores cores) { this.cores = cores; }
public Connection getConnection() { return connection; }
public void setConnection(Connection connection) { this.connection = connection; }
public Query getQuery() { return query; }
public void setQuery(Query query) { this.query = query; }
public Commit getCommit() { return commit; }
public void setCommit(Commit commit) { this.commit = commit; }
public Health getHealth() { return health; }
public void setHealth(Health health) { this.health = health; }
public static class Cores {
private String stories = "storycove_stories";
private String authors = "storycove_authors";
private String collections = "storycove_collections";
// Getters and setters
public String getStories() { return stories; }
public void setStories(String stories) { this.stories = stories; }
public String getAuthors() { return authors; }
public void setAuthors(String authors) { this.authors = authors; }
public String getCollections() { return collections; }
public void setCollections(String collections) { this.collections = collections; }
}
public static class Connection {
private int timeout = 30000;
private int socketTimeout = 60000;
private int maxConnectionsPerRoute = 10;
private int maxConnectionsTotal = 30;
private boolean retryOnFailure = true;
private int maxRetries = 3;
// Getters and setters
public int getTimeout() { return timeout; }
public void setTimeout(int timeout) { this.timeout = timeout; }
public int getSocketTimeout() { return socketTimeout; }
public void setSocketTimeout(int socketTimeout) { this.socketTimeout = socketTimeout; }
public int getMaxConnectionsPerRoute() { return maxConnectionsPerRoute; }
public void setMaxConnectionsPerRoute(int maxConnectionsPerRoute) { this.maxConnectionsPerRoute = maxConnectionsPerRoute; }
public int getMaxConnectionsTotal() { return maxConnectionsTotal; }
public void setMaxConnectionsTotal(int maxConnectionsTotal) { this.maxConnectionsTotal = maxConnectionsTotal; }
public boolean isRetryOnFailure() { return retryOnFailure; }
public void setRetryOnFailure(boolean retryOnFailure) { this.retryOnFailure = retryOnFailure; }
public int getMaxRetries() { return maxRetries; }
public void setMaxRetries(int maxRetries) { this.maxRetries = maxRetries; }
}
public static class Query {
private int defaultRows = 10;
private int maxRows = 1000;
private String defaultOperator = "AND";
private boolean highlight = true;
private boolean facets = true;
// Getters and setters
public int getDefaultRows() { return defaultRows; }
public void setDefaultRows(int defaultRows) { this.defaultRows = defaultRows; }
public int getMaxRows() { return maxRows; }
public void setMaxRows(int maxRows) { this.maxRows = maxRows; }
public String getDefaultOperator() { return defaultOperator; }
public void setDefaultOperator(String defaultOperator) { this.defaultOperator = defaultOperator; }
public boolean isHighlight() { return highlight; }
public void setHighlight(boolean highlight) { this.highlight = highlight; }
public boolean isFacets() { return facets; }
public void setFacets(boolean facets) { this.facets = facets; }
}
public static class Commit {
private boolean softCommit = true;
private int commitWithin = 1000;
private boolean waitSearcher = false;
// Getters and setters
public boolean isSoftCommit() { return softCommit; }
public void setSoftCommit(boolean softCommit) { this.softCommit = softCommit; }
public int getCommitWithin() { return commitWithin; }
public void setCommitWithin(int commitWithin) { this.commitWithin = commitWithin; }
public boolean isWaitSearcher() { return waitSearcher; }
public void setWaitSearcher(boolean waitSearcher) { this.waitSearcher = waitSearcher; }
}
public static class Health {
private int checkInterval = 30000;
private int slowQueryThreshold = 5000;
private boolean enableMetrics = true;
// Getters and setters
public int getCheckInterval() { return checkInterval; }
public void setCheckInterval(int checkInterval) { this.checkInterval = checkInterval; }
public int getSlowQueryThreshold() { return slowQueryThreshold; }
public void setSlowQueryThreshold(int slowQueryThreshold) { this.slowQueryThreshold = slowQueryThreshold; }
public boolean isEnableMetrics() { return enableMetrics; }
public void setEnableMetrics(boolean enableMetrics) { this.enableMetrics = enableMetrics; }
}
}

View File

@@ -0,0 +1,102 @@
package com.storycove.config;
import com.storycove.entity.Author;
import com.storycove.entity.Collection;
import com.storycove.entity.Story;
import com.storycove.repository.AuthorRepository;
import com.storycove.repository.CollectionRepository;
import com.storycove.repository.StoryRepository;
import com.storycove.service.SearchServiceAdapter;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.stereotype.Component;
import java.util.List;
/**
* Automatically performs bulk reindexing of all entities on application startup.
* This ensures that the search index is always in sync with the database,
* especially after Solr volume recreation during deployment.
*/
@Component
public class StartupIndexingRunner implements ApplicationRunner {
private static final Logger logger = LoggerFactory.getLogger(StartupIndexingRunner.class);
@Autowired
private SearchServiceAdapter searchServiceAdapter;
@Autowired
private StoryRepository storyRepository;
@Autowired
private AuthorRepository authorRepository;
@Autowired
private CollectionRepository collectionRepository;
@Override
public void run(ApplicationArguments args) throws Exception {
logger.info("========================================");
logger.info("Starting automatic bulk reindexing...");
logger.info("========================================");
try {
// Check if search service is available
if (!searchServiceAdapter.isSearchServiceAvailable()) {
logger.warn("Search service (Solr) is not available. Skipping bulk reindexing.");
logger.warn("Make sure Solr is running and accessible.");
return;
}
long startTime = System.currentTimeMillis();
// Index all stories
logger.info("📚 Indexing stories...");
List<Story> stories = storyRepository.findAllWithAssociations();
if (!stories.isEmpty()) {
searchServiceAdapter.bulkIndexStories(stories);
logger.info("✅ Indexed {} stories", stories.size());
} else {
logger.info(" No stories to index");
}
// Index all authors
logger.info("👤 Indexing authors...");
List<Author> authors = authorRepository.findAll();
if (!authors.isEmpty()) {
searchServiceAdapter.bulkIndexAuthors(authors);
logger.info("✅ Indexed {} authors", authors.size());
} else {
logger.info(" No authors to index");
}
// Index all collections
logger.info("📂 Indexing collections...");
List<Collection> collections = collectionRepository.findAllWithTags();
if (!collections.isEmpty()) {
searchServiceAdapter.bulkIndexCollections(collections);
logger.info("✅ Indexed {} collections", collections.size());
} else {
logger.info(" No collections to index");
}
long duration = System.currentTimeMillis() - startTime;
logger.info("========================================");
logger.info("✅ Bulk reindexing completed successfully in {}ms", duration);
logger.info("📊 Total indexed: {} stories, {} authors, {} collections",
stories.size(), authors.size(), collections.size());
logger.info("========================================");
} catch (Exception e) {
logger.error("========================================");
logger.error("❌ Bulk reindexing failed", e);
logger.error("========================================");
// Don't throw the exception - let the application start even if indexing fails
// This allows the application to be functional even with search issues
}
}
}

View File

@@ -1,37 +0,0 @@
package com.storycove.config;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.typesense.api.Client;
import org.typesense.resources.Node;
import java.util.ArrayList;
import java.util.List;
@Configuration
public class TypesenseConfig {
@Value("${storycove.typesense.api-key}")
private String apiKey;
@Value("${storycove.typesense.host}")
private String host;
@Value("${storycove.typesense.port}")
private int port;
@Bean
@ConditionalOnProperty(name = "storycove.typesense.enabled", havingValue = "true", matchIfMissing = true)
public Client typesenseClient() {
List<Node> nodes = new ArrayList<>();
nodes.add(new Node("http", host, String.valueOf(port)));
org.typesense.api.Configuration configuration = new org.typesense.api.Configuration(
nodes, java.time.Duration.ofSeconds(10), apiKey
);
return new Client(configuration);
}
}

View File

@@ -0,0 +1,309 @@
package com.storycove.controller;
import com.storycove.entity.Author;
import com.storycove.entity.Story;
import com.storycove.service.AuthorService;
import com.storycove.service.SolrService;
import com.storycove.service.SearchServiceAdapter;
import com.storycove.service.StoryService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Map;
/**
* Admin controller for managing Solr operations.
* Provides endpoints for reindexing and index management.
*/
@RestController
@RequestMapping("/api/admin/search")
public class AdminSearchController {
private static final Logger logger = LoggerFactory.getLogger(AdminSearchController.class);
@Autowired
private SearchServiceAdapter searchServiceAdapter;
@Autowired
private StoryService storyService;
@Autowired
private AuthorService authorService;
@Autowired(required = false)
private SolrService solrService;
/**
* Get current search status
*/
@GetMapping("/status")
public ResponseEntity<Map<String, Object>> getSearchStatus() {
try {
var status = searchServiceAdapter.getSearchStatus();
return ResponseEntity.ok(Map.of(
"primaryEngine", status.getPrimaryEngine(),
"dualWrite", status.isDualWrite(),
"solrAvailable", status.isSolrAvailable()
));
} catch (Exception e) {
logger.error("Error getting search status", e);
return ResponseEntity.internalServerError().body(Map.of(
"error", "Failed to get search status: " + e.getMessage()
));
}
}
/**
* Reindex all data in Solr
*/
@PostMapping("/solr/reindex")
public ResponseEntity<Map<String, Object>> reindexSolr() {
try {
logger.info("Starting Solr full reindex");
if (!searchServiceAdapter.isSearchServiceAvailable()) {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Solr is not available or healthy"
));
}
// Get all data from services
List<Story> allStories = storyService.findAllWithAssociations();
List<Author> allAuthors = authorService.findAllWithStories();
// Bulk index directly in Solr
if (solrService != null) {
solrService.bulkIndexStories(allStories);
solrService.bulkIndexAuthors(allAuthors);
} else {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Solr service not available"
));
}
int totalIndexed = allStories.size() + allAuthors.size();
return ResponseEntity.ok(Map.of(
"success", true,
"message", String.format("Reindexed %d stories and %d authors in Solr",
allStories.size(), allAuthors.size()),
"storiesCount", allStories.size(),
"authorsCount", allAuthors.size(),
"totalCount", totalIndexed
));
} catch (Exception e) {
logger.error("Error during Solr reindex", e);
return ResponseEntity.internalServerError().body(Map.of(
"success", false,
"error", "Solr reindex failed: " + e.getMessage()
));
}
}
/**
* Recreate Solr indices
*/
@PostMapping("/solr/recreate")
public ResponseEntity<Map<String, Object>> recreateSolrIndices() {
try {
logger.info("Starting Solr indices recreation");
if (!searchServiceAdapter.isSearchServiceAvailable()) {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Solr is not available or healthy"
));
}
// Recreate indices
if (solrService != null) {
solrService.recreateIndices();
} else {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Solr service not available"
));
}
// Get all data and reindex
List<Story> allStories = storyService.findAllWithAssociations();
List<Author> allAuthors = authorService.findAllWithStories();
// Bulk index after recreation
solrService.bulkIndexStories(allStories);
solrService.bulkIndexAuthors(allAuthors);
int totalIndexed = allStories.size() + allAuthors.size();
return ResponseEntity.ok(Map.of(
"success", true,
"message", String.format("Recreated Solr indices and indexed %d stories and %d authors",
allStories.size(), allAuthors.size()),
"storiesCount", allStories.size(),
"authorsCount", allAuthors.size(),
"totalCount", totalIndexed
));
} catch (Exception e) {
logger.error("Error during Solr indices recreation", e);
return ResponseEntity.internalServerError().body(Map.of(
"success", false,
"error", "Solr indices recreation failed: " + e.getMessage()
));
}
}
/**
* Add libraryId field to Solr schema via Schema API.
* This is a prerequisite for library-aware indexing.
*/
@PostMapping("/solr/add-library-field")
public ResponseEntity<Map<String, Object>> addLibraryField() {
try {
logger.info("Starting Solr libraryId field addition");
if (!searchServiceAdapter.isSearchServiceAvailable()) {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Solr is not available or healthy"
));
}
if (solrService == null) {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Solr service not available"
));
}
// Add the libraryId field to the schema
try {
solrService.addLibraryIdField();
logger.info("libraryId field added successfully to schema");
return ResponseEntity.ok(Map.of(
"success", true,
"message", "libraryId field added successfully to both stories and authors cores",
"note", "You can now run the library schema migration"
));
} catch (Exception e) {
logger.error("Failed to add libraryId field to schema", e);
return ResponseEntity.internalServerError().body(Map.of(
"success", false,
"error", "Failed to add libraryId field to schema: " + e.getMessage(),
"details", "Check that Solr is accessible and schema is modifiable"
));
}
} catch (Exception e) {
logger.error("Error during libraryId field addition", e);
return ResponseEntity.internalServerError().body(Map.of(
"success", false,
"error", "libraryId field addition failed: " + e.getMessage()
));
}
}
/**
* Migrate to library-aware Solr schema.
* This endpoint handles the migration from non-library-aware to library-aware indexing.
* It clears existing data and reindexes with library context.
*/
@PostMapping("/solr/migrate-library-schema")
public ResponseEntity<Map<String, Object>> migrateLibrarySchema() {
try {
logger.info("Starting Solr library schema migration");
if (!searchServiceAdapter.isSearchServiceAvailable()) {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Solr is not available or healthy"
));
}
if (solrService == null) {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Solr service not available"
));
}
logger.info("Adding libraryId field to Solr schema");
// First, add the libraryId field to the schema via Schema API
try {
solrService.addLibraryIdField();
logger.info("libraryId field added successfully to schema");
} catch (Exception e) {
logger.error("Failed to add libraryId field to schema", e);
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Failed to add libraryId field to schema: " + e.getMessage(),
"details", "The schema must support the libraryId field before migration"
));
}
logger.info("Clearing existing Solr data for library schema migration");
// Clear existing data that doesn't have libraryId
try {
solrService.recreateIndices();
} catch (Exception e) {
logger.warn("Could not recreate indices (expected in production): {}", e.getMessage());
// In production, just clear the data instead
try {
solrService.clearAllDocuments();
logger.info("Cleared all documents from Solr cores");
} catch (Exception clearError) {
logger.error("Failed to clear documents", clearError);
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "Failed to clear existing data: " + clearError.getMessage()
));
}
}
// Get all data and reindex with library context
List<Story> allStories = storyService.findAllWithAssociations();
List<Author> allAuthors = authorService.findAllWithStories();
logger.info("Reindexing {} stories and {} authors with library context",
allStories.size(), allAuthors.size());
// Bulk index everything (will now include libraryId from current library context)
solrService.bulkIndexStories(allStories);
solrService.bulkIndexAuthors(allAuthors);
int totalIndexed = allStories.size() + allAuthors.size();
logger.info("Solr library schema migration completed successfully");
return ResponseEntity.ok(Map.of(
"success", true,
"message", String.format("Library schema migration completed. Reindexed %d stories and %d authors with library context.",
allStories.size(), allAuthors.size()),
"storiesCount", allStories.size(),
"authorsCount", allAuthors.size(),
"totalCount", totalIndexed,
"note", "Ensure libraryId field exists in Solr schema before running this migration"
));
} catch (Exception e) {
logger.error("Error during Solr library schema migration", e);
return ResponseEntity.internalServerError().body(Map.of(
"success", false,
"error", "Library schema migration failed: " + e.getMessage(),
"details", "Make sure the libraryId field has been added to both stories and authors Solr cores"
));
}
}
}

View File

@@ -1,10 +1,17 @@
package com.storycove.controller; package com.storycove.controller;
import com.storycove.entity.RefreshToken;
import com.storycove.service.LibraryService;
import com.storycove.service.PasswordAuthenticationService; import com.storycove.service.PasswordAuthenticationService;
import com.storycove.service.RefreshTokenService;
import com.storycove.util.JwtUtil; import com.storycove.util.JwtUtil;
import jakarta.servlet.http.Cookie;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse; import jakarta.servlet.http.HttpServletResponse;
import jakarta.validation.Valid; import jakarta.validation.Valid;
import jakarta.validation.constraints.NotBlank; import jakarta.validation.constraints.NotBlank;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.http.HttpHeaders; import org.springframework.http.HttpHeaders;
import org.springframework.http.ResponseCookie; import org.springframework.http.ResponseCookie;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
@@ -12,26 +19,113 @@ import org.springframework.security.core.Authentication;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.*;
import java.time.Duration; import java.time.Duration;
import java.util.Arrays;
import java.util.Optional;
@RestController @RestController
@RequestMapping("/api/auth") @RequestMapping("/api/auth")
public class AuthController { public class AuthController {
private final PasswordAuthenticationService passwordService; private static final Logger logger = LoggerFactory.getLogger(AuthController.class);
private final JwtUtil jwtUtil;
public AuthController(PasswordAuthenticationService passwordService, JwtUtil jwtUtil) { private final PasswordAuthenticationService passwordService;
private final LibraryService libraryService;
private final JwtUtil jwtUtil;
private final RefreshTokenService refreshTokenService;
public AuthController(PasswordAuthenticationService passwordService, LibraryService libraryService, JwtUtil jwtUtil, RefreshTokenService refreshTokenService) {
this.passwordService = passwordService; this.passwordService = passwordService;
this.libraryService = libraryService;
this.jwtUtil = jwtUtil; this.jwtUtil = jwtUtil;
this.refreshTokenService = refreshTokenService;
} }
@PostMapping("/login") @PostMapping("/login")
public ResponseEntity<?> login(@Valid @RequestBody LoginRequest request, HttpServletResponse response) { public ResponseEntity<?> login(@Valid @RequestBody LoginRequest request, HttpServletRequest httpRequest, HttpServletResponse response) {
if (passwordService.authenticate(request.getPassword())) { // Use new library-aware authentication
String token = jwtUtil.generateToken(); String token = passwordService.authenticateAndSwitchLibrary(request.getPassword());
// Set httpOnly cookie if (token != null) {
ResponseCookie cookie = ResponseCookie.from("token", token) // Get library ID from JWT token
String libraryId = jwtUtil.getLibraryIdFromToken(token);
// Get user agent and IP address for refresh token
String userAgent = httpRequest.getHeader("User-Agent");
String ipAddress = getClientIpAddress(httpRequest);
// Create refresh token
RefreshToken refreshToken = refreshTokenService.createRefreshToken(libraryId, userAgent, ipAddress);
// Set access token cookie (24 hours)
ResponseCookie accessCookie = ResponseCookie.from("token", token)
.httpOnly(true)
.secure(false) // Set to true in production with HTTPS
.path("/")
.maxAge(Duration.ofDays(1))
.build();
// Set refresh token cookie (14 days)
ResponseCookie refreshCookie = ResponseCookie.from("refreshToken", refreshToken.getToken())
.httpOnly(true)
.secure(false) // Set to true in production with HTTPS
.path("/")
.maxAge(Duration.ofDays(14))
.build();
response.addHeader(HttpHeaders.SET_COOKIE, accessCookie.toString());
response.addHeader(HttpHeaders.SET_COOKIE, refreshCookie.toString());
String libraryInfo = passwordService.getCurrentLibraryInfo();
return ResponseEntity.ok(new LoginResponse("Authentication successful - " + libraryInfo, token));
} else {
return ResponseEntity.status(401).body(new ErrorResponse("Invalid password"));
}
}
@PostMapping("/refresh")
public ResponseEntity<?> refresh(HttpServletRequest request, HttpServletResponse response) {
// Get refresh token from cookie
String refreshTokenString = getRefreshTokenFromCookies(request);
if (refreshTokenString == null) {
return ResponseEntity.status(401).body(new ErrorResponse("Refresh token not found"));
}
// Verify refresh token
Optional<RefreshToken> refreshTokenOpt = refreshTokenService.verifyRefreshToken(refreshTokenString);
if (refreshTokenOpt.isEmpty()) {
return ResponseEntity.status(401).body(new ErrorResponse("Invalid or expired refresh token"));
}
RefreshToken refreshToken = refreshTokenOpt.get();
String tokenLibraryId = refreshToken.getLibraryId();
// Check if we need to switch libraries based on refresh token's library ID
try {
String currentLibraryId = libraryService.getCurrentLibraryId();
// Switch library if refresh token's library differs from current library
// This handles cross-device library switching on token refresh
if (tokenLibraryId != null && !tokenLibraryId.equals(currentLibraryId)) {
logger.info("Refresh token library '{}' differs from current library '{}', switching libraries",
tokenLibraryId, currentLibraryId);
libraryService.switchToLibraryAfterAuthentication(tokenLibraryId);
} else if (currentLibraryId == null && tokenLibraryId != null) {
// Handle case after backend restart where no library is active
logger.info("No active library on refresh, switching to refresh token's library: {}", tokenLibraryId);
libraryService.switchToLibraryAfterAuthentication(tokenLibraryId);
}
} catch (Exception e) {
logger.error("Failed to switch library during token refresh: {}", e.getMessage());
return ResponseEntity.status(500).body(new ErrorResponse("Failed to switch library: " + e.getMessage()));
}
// Generate new access token
String newAccessToken = jwtUtil.generateToken("user", tokenLibraryId);
// Set new access token cookie
ResponseCookie cookie = ResponseCookie.from("token", newAccessToken)
.httpOnly(true) .httpOnly(true)
.secure(false) // Set to true in production with HTTPS .secure(false) // Set to true in production with HTTPS
.path("/") .path("/")
@@ -40,23 +134,38 @@ public class AuthController {
response.addHeader(HttpHeaders.SET_COOKIE, cookie.toString()); response.addHeader(HttpHeaders.SET_COOKIE, cookie.toString());
return ResponseEntity.ok(new LoginResponse("Authentication successful", token)); return ResponseEntity.ok(new LoginResponse("Token refreshed successfully", newAccessToken));
} else {
return ResponseEntity.status(401).body(new ErrorResponse("Invalid password"));
}
} }
@PostMapping("/logout") @PostMapping("/logout")
public ResponseEntity<?> logout(HttpServletResponse response) { public ResponseEntity<?> logout(HttpServletRequest request, HttpServletResponse response) {
// Clear the cookie // Clear authentication state
ResponseCookie cookie = ResponseCookie.from("token", "") libraryService.clearAuthentication();
// Revoke refresh token if present
String refreshTokenString = getRefreshTokenFromCookies(request);
if (refreshTokenString != null) {
refreshTokenService.findByToken(refreshTokenString).ifPresent(refreshTokenService::revokeToken);
}
// Clear the access token cookie
ResponseCookie accessCookie = ResponseCookie.from("token", "")
.httpOnly(true) .httpOnly(true)
.secure(false) .secure(false)
.path("/") .path("/")
.maxAge(Duration.ZERO) .maxAge(Duration.ZERO)
.build(); .build();
response.addHeader(HttpHeaders.SET_COOKIE, cookie.toString()); // Clear the refresh token cookie
ResponseCookie refreshCookie = ResponseCookie.from("refreshToken", "")
.httpOnly(true)
.secure(false)
.path("/")
.maxAge(Duration.ZERO)
.build();
response.addHeader(HttpHeaders.SET_COOKIE, accessCookie.toString());
response.addHeader(HttpHeaders.SET_COOKIE, refreshCookie.toString());
return ResponseEntity.ok(new MessageResponse("Logged out successfully")); return ResponseEntity.ok(new MessageResponse("Logged out successfully"));
} }
@@ -70,6 +179,33 @@ public class AuthController {
} }
} }
// Helper methods
private String getRefreshTokenFromCookies(HttpServletRequest request) {
if (request.getCookies() == null) {
return null;
}
return Arrays.stream(request.getCookies())
.filter(cookie -> "refreshToken".equals(cookie.getName()))
.map(Cookie::getValue)
.findFirst()
.orElse(null);
}
private String getClientIpAddress(HttpServletRequest request) {
String xForwardedFor = request.getHeader("X-Forwarded-For");
if (xForwardedFor != null && !xForwardedFor.isEmpty()) {
return xForwardedFor.split(",")[0].trim();
}
String xRealIp = request.getHeader("X-Real-IP");
if (xRealIp != null && !xRealIp.isEmpty()) {
return xRealIp;
}
return request.getRemoteAddr();
}
// DTOs // DTOs
public static class LoginRequest { public static class LoginRequest {
@NotBlank(message = "Password is required") @NotBlank(message = "Password is required")

View File

@@ -4,7 +4,7 @@ import com.storycove.dto.*;
import com.storycove.entity.Author; import com.storycove.entity.Author;
import com.storycove.service.AuthorService; import com.storycove.service.AuthorService;
import com.storycove.service.ImageService; import com.storycove.service.ImageService;
import com.storycove.service.TypesenseService; import com.storycove.service.SearchServiceAdapter;
import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletRequest;
import jakarta.validation.Valid; import jakarta.validation.Valid;
import org.slf4j.Logger; import org.slf4j.Logger;
@@ -32,12 +32,12 @@ public class AuthorController {
private final AuthorService authorService; private final AuthorService authorService;
private final ImageService imageService; private final ImageService imageService;
private final TypesenseService typesenseService; private final SearchServiceAdapter searchServiceAdapter;
public AuthorController(AuthorService authorService, ImageService imageService, TypesenseService typesenseService) { public AuthorController(AuthorService authorService, ImageService imageService, SearchServiceAdapter searchServiceAdapter) {
this.authorService = authorService; this.authorService = authorService;
this.imageService = imageService; this.imageService = imageService;
this.typesenseService = typesenseService; this.searchServiceAdapter = searchServiceAdapter;
} }
@GetMapping @GetMapping
@@ -65,10 +65,12 @@ public class AuthorController {
@PostMapping @PostMapping
public ResponseEntity<AuthorDto> createAuthor(@Valid @RequestBody CreateAuthorRequest request) { public ResponseEntity<AuthorDto> createAuthor(@Valid @RequestBody CreateAuthorRequest request) {
logger.info("Creating new author: {}", request.getName());
Author author = new Author(); Author author = new Author();
updateAuthorFromRequest(author, request); updateAuthorFromRequest(author, request);
Author savedAuthor = authorService.create(author); Author savedAuthor = authorService.create(author);
logger.info("Successfully created author: {} (ID: {})", savedAuthor.getName(), savedAuthor.getId());
return ResponseEntity.status(HttpStatus.CREATED).body(convertToDto(savedAuthor)); return ResponseEntity.status(HttpStatus.CREATED).body(convertToDto(savedAuthor));
} }
@@ -81,13 +83,7 @@ public class AuthorController {
@RequestParam(required = false, name = "authorRating") Integer rating, @RequestParam(required = false, name = "authorRating") Integer rating,
@RequestParam(required = false, name = "avatar") MultipartFile avatarFile) { @RequestParam(required = false, name = "avatar") MultipartFile avatarFile) {
System.out.println("DEBUG: MULTIPART PUT called with:"); logger.info("Updating author with multipart data (ID: {})", id);
System.out.println(" - name: " + name);
System.out.println(" - notes: " + notes);
System.out.println(" - urls: " + urls);
System.out.println(" - rating: " + rating);
System.out.println(" - avatar: " + (avatarFile != null ? avatarFile.getOriginalFilename() : "null"));
try { try {
Author existingAuthor = authorService.findById(id); Author existingAuthor = authorService.findById(id);
@@ -104,7 +100,6 @@ public class AuthorController {
// Handle rating update // Handle rating update
if (rating != null) { if (rating != null) {
System.out.println("DEBUG: Setting author rating via PUT: " + rating);
existingAuthor.setAuthorRating(rating); existingAuthor.setAuthorRating(rating);
} }
@@ -115,6 +110,7 @@ public class AuthorController {
} }
Author updatedAuthor = authorService.update(id, existingAuthor); Author updatedAuthor = authorService.update(id, existingAuthor);
logger.info("Successfully updated author: {} via multipart", updatedAuthor.getName());
return ResponseEntity.ok(convertToDto(updatedAuthor)); return ResponseEntity.ok(convertToDto(updatedAuthor));
} catch (Exception e) { } catch (Exception e) {
@@ -125,31 +121,27 @@ public class AuthorController {
@PutMapping(value = "/{id}", consumes = "application/json") @PutMapping(value = "/{id}", consumes = "application/json")
public ResponseEntity<AuthorDto> updateAuthorJson(@PathVariable UUID id, public ResponseEntity<AuthorDto> updateAuthorJson(@PathVariable UUID id,
@Valid @RequestBody UpdateAuthorRequest request) { @Valid @RequestBody UpdateAuthorRequest request) {
System.out.println("DEBUG: JSON PUT called with:"); logger.info("Updating author with JSON data: {} (ID: {})", request.getName(), id);
System.out.println(" - name: " + request.getName());
System.out.println(" - notes: " + request.getNotes());
System.out.println(" - urls: " + request.getUrls());
System.out.println(" - rating: " + request.getRating());
Author existingAuthor = authorService.findById(id); Author existingAuthor = authorService.findById(id);
updateAuthorFromRequest(existingAuthor, request); updateAuthorFromRequest(existingAuthor, request);
Author updatedAuthor = authorService.update(id, existingAuthor); Author updatedAuthor = authorService.update(id, existingAuthor);
logger.info("Successfully updated author: {} via JSON", updatedAuthor.getName());
return ResponseEntity.ok(convertToDto(updatedAuthor)); return ResponseEntity.ok(convertToDto(updatedAuthor));
} }
@PutMapping("/{id}") @PutMapping("/{id}")
public ResponseEntity<String> updateAuthorGeneric(@PathVariable UUID id, HttpServletRequest request) { public ResponseEntity<String> updateAuthorGeneric(@PathVariable UUID id, HttpServletRequest request) {
System.out.println("DEBUG: GENERIC PUT called!");
System.out.println(" - Content-Type: " + request.getContentType());
System.out.println(" - Method: " + request.getMethod());
return ResponseEntity.status(415).body("Unsupported Media Type. Expected multipart/form-data or application/json"); return ResponseEntity.status(415).body("Unsupported Media Type. Expected multipart/form-data or application/json");
} }
@DeleteMapping("/{id}") @DeleteMapping("/{id}")
public ResponseEntity<?> deleteAuthor(@PathVariable UUID id) { public ResponseEntity<?> deleteAuthor(@PathVariable UUID id) {
logger.info("Deleting author with ID: {}", id);
authorService.delete(id); authorService.delete(id);
logger.info("Successfully deleted author with ID: {}", id);
return ResponseEntity.ok(Map.of("message", "Author deleted successfully")); return ResponseEntity.ok(Map.of("message", "Author deleted successfully"));
} }
@@ -177,11 +169,8 @@ public class AuthorController {
@PostMapping("/{id}/rating") @PostMapping("/{id}/rating")
public ResponseEntity<AuthorDto> rateAuthor(@PathVariable UUID id, @RequestBody RatingRequest request) { public ResponseEntity<AuthorDto> rateAuthor(@PathVariable UUID id, @RequestBody RatingRequest request) {
System.out.println("DEBUG: Rating author " + id + " with rating " + request.getRating());
Author author = authorService.setRating(id, request.getRating()); Author author = authorService.setRating(id, request.getRating());
System.out.println("DEBUG: After setRating, author rating is: " + author.getAuthorRating());
AuthorDto dto = convertToDto(author); AuthorDto dto = convertToDto(author);
System.out.println("DEBUG: Final DTO rating is: " + dto.getAuthorRating());
return ResponseEntity.ok(dto); return ResponseEntity.ok(dto);
} }
@@ -211,9 +200,7 @@ public class AuthorController {
@PostMapping("/{id}/test-rating/{rating}") @PostMapping("/{id}/test-rating/{rating}")
public ResponseEntity<Map<String, Object>> testSetRating(@PathVariable UUID id, @PathVariable Integer rating) { public ResponseEntity<Map<String, Object>> testSetRating(@PathVariable UUID id, @PathVariable Integer rating) {
try { try {
System.out.println("DEBUG: Test setting rating " + rating + " for author " + id);
Author author = authorService.setRating(id, rating); Author author = authorService.setRating(id, rating);
System.out.println("DEBUG: After test setRating, got: " + author.getAuthorRating());
return ResponseEntity.ok(Map.of( return ResponseEntity.ok(Map.of(
"success", true, "success", true,
@@ -231,13 +218,11 @@ public class AuthorController {
@PostMapping("/{id}/test-put-rating") @PostMapping("/{id}/test-put-rating")
public ResponseEntity<Map<String, Object>> testPutWithRating(@PathVariable UUID id, @RequestParam Integer rating) { public ResponseEntity<Map<String, Object>> testPutWithRating(@PathVariable UUID id, @RequestParam Integer rating) {
try { try {
System.out.println("DEBUG: Test PUT with rating " + rating + " for author " + id);
Author existingAuthor = authorService.findById(id); Author existingAuthor = authorService.findById(id);
existingAuthor.setAuthorRating(rating); existingAuthor.setAuthorRating(rating);
Author updatedAuthor = authorService.update(id, existingAuthor); Author updatedAuthor = authorService.update(id, existingAuthor);
System.out.println("DEBUG: After PUT update, rating is: " + updatedAuthor.getAuthorRating());
return ResponseEntity.ok(Map.of( return ResponseEntity.ok(Map.of(
"success", true, "success", true,
@@ -273,7 +258,17 @@ public class AuthorController {
@RequestParam(defaultValue = "name") String sortBy, @RequestParam(defaultValue = "name") String sortBy,
@RequestParam(defaultValue = "asc") String sortOrder) { @RequestParam(defaultValue = "asc") String sortOrder) {
SearchResultDto<AuthorSearchDto> searchResults = typesenseService.searchAuthors(q, page, size, sortBy, sortOrder); // Use SearchServiceAdapter to handle routing between search engines
List<AuthorSearchDto> authorSearchResults = searchServiceAdapter.searchAuthors(q, size);
// Create SearchResultDto to match expected return format
SearchResultDto<AuthorSearchDto> searchResults = new SearchResultDto<>();
searchResults.setResults(authorSearchResults);
searchResults.setQuery(q);
searchResults.setPage(page);
searchResults.setPerPage(size);
searchResults.setTotalHits(authorSearchResults.size());
searchResults.setSearchTimeMs(0); // SearchServiceAdapter doesn't provide timing
// Convert AuthorSearchDto results to AuthorDto // Convert AuthorSearchDto results to AuthorDto
SearchResultDto<AuthorDto> results = new SearchResultDto<>(); SearchResultDto<AuthorDto> results = new SearchResultDto<>();
@@ -298,7 +293,7 @@ public class AuthorController {
public ResponseEntity<Map<String, Object>> reindexAuthorsTypesense() { public ResponseEntity<Map<String, Object>> reindexAuthorsTypesense() {
try { try {
List<Author> allAuthors = authorService.findAllWithStories(); List<Author> allAuthors = authorService.findAllWithStories();
typesenseService.reindexAllAuthors(allAuthors); searchServiceAdapter.bulkIndexAuthors(allAuthors);
return ResponseEntity.ok(Map.of( return ResponseEntity.ok(Map.of(
"success", true, "success", true,
"message", "Reindexed " + allAuthors.size() + " authors", "message", "Reindexed " + allAuthors.size() + " authors",
@@ -318,7 +313,7 @@ public class AuthorController {
try { try {
// This will delete the existing collection and recreate it with correct schema // This will delete the existing collection and recreate it with correct schema
List<Author> allAuthors = authorService.findAllWithStories(); List<Author> allAuthors = authorService.findAllWithStories();
typesenseService.reindexAllAuthors(allAuthors); searchServiceAdapter.bulkIndexAuthors(allAuthors);
return ResponseEntity.ok(Map.of( return ResponseEntity.ok(Map.of(
"success", true, "success", true,
"message", "Recreated authors collection and indexed " + allAuthors.size() + " authors", "message", "Recreated authors collection and indexed " + allAuthors.size() + " authors",
@@ -336,7 +331,7 @@ public class AuthorController {
@GetMapping("/typesense-schema") @GetMapping("/typesense-schema")
public ResponseEntity<Map<String, Object>> getAuthorsTypesenseSchema() { public ResponseEntity<Map<String, Object>> getAuthorsTypesenseSchema() {
try { try {
Map<String, Object> schema = typesenseService.getAuthorsCollectionSchema(); Map<String, Object> schema = Map.of("status", "authors collection schema retrieved from search service");
return ResponseEntity.ok(Map.of( return ResponseEntity.ok(Map.of(
"success", true, "success", true,
"schema", schema "schema", schema
@@ -350,6 +345,44 @@ public class AuthorController {
} }
} }
@PostMapping("/clean-author-names")
public ResponseEntity<Map<String, Object>> cleanAuthorNames() {
try {
List<Author> allAuthors = authorService.findAllWithStories();
int cleanedCount = 0;
for (Author author : allAuthors) {
String originalName = author.getName();
String cleanedName = originalName != null ? originalName.trim() : "";
if (!cleanedName.equals(originalName)) {
logger.info("Cleaning author name: '{}' -> '{}'", originalName, cleanedName);
author.setName(cleanedName);
authorService.update(author.getId(), author);
cleanedCount++;
}
}
// Reindex all authors after cleaning
if (cleanedCount > 0) {
searchServiceAdapter.bulkIndexAuthors(allAuthors);
}
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Cleaned " + cleanedCount + " author names and reindexed",
"cleanedCount", cleanedCount,
"totalAuthors", allAuthors.size()
));
} catch (Exception e) {
logger.error("Failed to clean author names", e);
return ResponseEntity.ok(Map.of(
"success", false,
"error", e.getMessage()
));
}
}
@GetMapping("/top-rated") @GetMapping("/top-rated")
public ResponseEntity<List<AuthorSummaryDto>> getTopRatedAuthors(@RequestParam(defaultValue = "10") int limit) { public ResponseEntity<List<AuthorSummaryDto>> getTopRatedAuthors(@RequestParam(defaultValue = "10") int limit) {
Pageable pageable = PageRequest.of(0, limit); Pageable pageable = PageRequest.of(0, limit);
@@ -389,7 +422,6 @@ public class AuthorController {
author.setUrls(updateReq.getUrls()); author.setUrls(updateReq.getUrls());
} }
if (updateReq.getRating() != null) { if (updateReq.getRating() != null) {
System.out.println("DEBUG: Setting author rating via JSON: " + updateReq.getRating());
author.setAuthorRating(updateReq.getRating()); author.setAuthorRating(updateReq.getRating());
} }
} }
@@ -402,9 +434,6 @@ public class AuthorController {
dto.setNotes(author.getNotes()); dto.setNotes(author.getNotes());
dto.setAvatarImagePath(author.getAvatarImagePath()); dto.setAvatarImagePath(author.getAvatarImagePath());
// Debug logging for author rating
System.out.println("DEBUG: Converting author " + author.getName() +
" with rating: " + author.getAuthorRating());
dto.setAuthorRating(author.getAuthorRating()); dto.setAuthorRating(author.getAuthorRating());
dto.setUrls(author.getUrls()); dto.setUrls(author.getUrls());
@@ -415,7 +444,6 @@ public class AuthorController {
// Calculate and set average story rating // Calculate and set average story rating
dto.setAverageStoryRating(authorService.calculateAverageStoryRating(author.getId())); dto.setAverageStoryRating(authorService.calculateAverageStoryRating(author.getId()));
System.out.println("DEBUG: DTO authorRating set to: " + dto.getAuthorRating());
return dto; return dto;
} }

View File

@@ -6,7 +6,9 @@ import com.storycove.entity.CollectionStory;
import com.storycove.entity.Story; import com.storycove.entity.Story;
import com.storycove.entity.Tag; import com.storycove.entity.Tag;
import com.storycove.service.CollectionService; import com.storycove.service.CollectionService;
import com.storycove.service.EPUBExportService;
import com.storycove.service.ImageService; import com.storycove.service.ImageService;
import com.storycove.service.ReadingTimeService;
import jakarta.validation.Valid; import jakarta.validation.Valid;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@@ -28,12 +30,18 @@ public class CollectionController {
private final CollectionService collectionService; private final CollectionService collectionService;
private final ImageService imageService; private final ImageService imageService;
private final ReadingTimeService readingTimeService;
private final EPUBExportService epubExportService;
@Autowired @Autowired
public CollectionController(CollectionService collectionService, public CollectionController(CollectionService collectionService,
ImageService imageService) { ImageService imageService,
ReadingTimeService readingTimeService,
EPUBExportService epubExportService) {
this.collectionService = collectionService; this.collectionService = collectionService;
this.imageService = imageService; this.imageService = imageService;
this.readingTimeService = readingTimeService;
this.epubExportService = epubExportService;
} }
/** /**
@@ -48,8 +56,6 @@ public class CollectionController {
@RequestParam(required = false) List<String> tags, @RequestParam(required = false) List<String> tags,
@RequestParam(defaultValue = "false") boolean archived) { @RequestParam(defaultValue = "false") boolean archived) {
logger.info("COLLECTIONS: Search request - search='{}', tags={}, archived={}, page={}, limit={}",
search, tags, archived, page, limit);
// MANDATORY: Use Typesense for all search/filter operations // MANDATORY: Use Typesense for all search/filter operations
SearchResultDto<Collection> results = collectionService.searchCollections(search, tags, archived, page, limit); SearchResultDto<Collection> results = collectionService.searchCollections(search, tags, archived, page, limit);
@@ -86,13 +92,14 @@ public class CollectionController {
*/ */
@PostMapping @PostMapping
public ResponseEntity<Collection> createCollection(@Valid @RequestBody CreateCollectionRequest request) { public ResponseEntity<Collection> createCollection(@Valid @RequestBody CreateCollectionRequest request) {
logger.info("Creating new collection: {}", request.getName());
Collection collection = collectionService.createCollection( Collection collection = collectionService.createCollection(
request.getName(), request.getName(),
request.getDescription(), request.getDescription(),
request.getTagNames(), request.getTagNames(),
request.getStoryIds() request.getStoryIds()
); );
logger.info("Successfully created collection: {} (ID: {})", collection.getName(), collection.getId());
return ResponseEntity.status(HttpStatus.CREATED).body(collection); return ResponseEntity.status(HttpStatus.CREATED).body(collection);
} }
@@ -107,6 +114,7 @@ public class CollectionController {
@RequestParam(required = false) List<UUID> storyIds, @RequestParam(required = false) List<UUID> storyIds,
@RequestParam(required = false, name = "coverImage") MultipartFile coverImage) { @RequestParam(required = false, name = "coverImage") MultipartFile coverImage) {
logger.info("Creating new collection with image: {}", name);
try { try {
// Create collection first // Create collection first
Collection collection = collectionService.createCollection(name, description, tags, storyIds); Collection collection = collectionService.createCollection(name, description, tags, storyIds);
@@ -120,6 +128,7 @@ public class CollectionController {
); );
} }
logger.info("Successfully created collection with image: {} (ID: {})", collection.getName(), collection.getId());
return ResponseEntity.status(HttpStatus.CREATED).body(collection); return ResponseEntity.status(HttpStatus.CREATED).body(collection);
} catch (Exception e) { } catch (Exception e) {
@@ -152,7 +161,9 @@ public class CollectionController {
*/ */
@DeleteMapping("/{id}") @DeleteMapping("/{id}")
public ResponseEntity<Map<String, String>> deleteCollection(@PathVariable UUID id) { public ResponseEntity<Map<String, String>> deleteCollection(@PathVariable UUID id) {
logger.info("Deleting collection with ID: {}", id);
collectionService.deleteCollection(id); collectionService.deleteCollection(id);
logger.info("Successfully deleted collection with ID: {}", id);
return ResponseEntity.ok(Map.of("message", "Collection deleted successfully")); return ResponseEntity.ok(Map.of("message", "Collection deleted successfully"));
} }
@@ -270,6 +281,107 @@ public class CollectionController {
return ResponseEntity.ok(Map.of("message", "Cover removed successfully")); return ResponseEntity.ok(Map.of("message", "Cover removed successfully"));
} }
/**
* POST /api/collections/reindex-typesense - Reindex all collections in Typesense
*/
@PostMapping("/reindex-typesense")
public ResponseEntity<Map<String, Object>> reindexCollectionsTypesense() {
try {
List<Collection> allCollections = collectionService.findAllWithTags();
// Collections are not indexed in search engine yet
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Collections indexing not yet implemented in Solr",
"count", allCollections.size()
));
} catch (Exception e) {
logger.error("Failed to reindex collections", e);
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", e.getMessage()
));
}
}
/**
* GET /api/collections/{id}/epub - Export collection as EPUB
*/
@GetMapping("/{id}/epub")
public ResponseEntity<org.springframework.core.io.Resource> exportCollectionAsEPUB(@PathVariable UUID id) {
logger.info("Exporting collection {} to EPUB", id);
try {
Collection collection = collectionService.findById(id);
List<Story> stories = collection.getCollectionStories().stream()
.sorted((cs1, cs2) -> Integer.compare(cs1.getPosition(), cs2.getPosition()))
.map(cs -> cs.getStory())
.collect(java.util.stream.Collectors.toList());
if (stories.isEmpty()) {
logger.warn("Collection {} contains no stories for export", id);
return ResponseEntity.badRequest()
.body(null);
}
EPUBExportRequest request = new EPUBExportRequest();
request.setIncludeCoverImage(true);
request.setIncludeMetadata(true);
request.setIncludeReadingPosition(false); // Collections don't have reading positions
org.springframework.core.io.Resource resource = epubExportService.exportCollectionAsEPUB(id, request);
String filename = epubExportService.getCollectionEPUBFilename(collection);
logger.info("Successfully exported collection EPUB: {}", filename);
return ResponseEntity.ok()
.header("Content-Disposition", "attachment; filename=\"" + filename + "\"")
.header("Content-Type", "application/epub+zip")
.body(resource);
} catch (Exception e) {
logger.error("Error exporting collection EPUB: {}", e.getMessage(), e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
}
}
/**
* POST /api/collections/{id}/epub - Export collection as EPUB with custom options
*/
@PostMapping("/{id}/epub")
public ResponseEntity<org.springframework.core.io.Resource> exportCollectionAsEPUBWithOptions(
@PathVariable UUID id,
@Valid @RequestBody EPUBExportRequest request) {
logger.info("Exporting collection {} to EPUB with custom options", id);
try {
Collection collection = collectionService.findById(id);
List<Story> stories = collection.getCollectionStories().stream()
.sorted((cs1, cs2) -> Integer.compare(cs1.getPosition(), cs2.getPosition()))
.map(cs -> cs.getStory())
.collect(java.util.stream.Collectors.toList());
if (stories.isEmpty()) {
logger.warn("Collection {} contains no stories for export", id);
return ResponseEntity.badRequest()
.body(null);
}
org.springframework.core.io.Resource resource = epubExportService.exportCollectionAsEPUB(id, request);
String filename = epubExportService.getCollectionEPUBFilename(collection);
logger.info("Successfully exported collection EPUB with options: {}", filename);
return ResponseEntity.ok()
.header("Content-Disposition", "attachment; filename=\"" + filename + "\"")
.header("Content-Type", "application/epub+zip")
.body(resource);
} catch (Exception e) {
logger.error("Error exporting collection EPUB: {}", e.getMessage(), e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
}
}
// Mapper methods // Mapper methods
private CollectionDto mapToCollectionDto(Collection collection) { private CollectionDto mapToCollectionDto(Collection collection) {
@@ -290,6 +402,11 @@ public class CollectionController {
.toList()); .toList());
} }
// Map tag names for search results
if (collection.getTagNames() != null) {
dto.setTagNames(collection.getTagNames());
}
// Map collection stories (lightweight) // Map collection stories (lightweight)
if (collection.getCollectionStories() != null) { if (collection.getCollectionStories() != null) {
dto.setCollectionStories(collection.getCollectionStories().stream() dto.setCollectionStories(collection.getCollectionStories().stream()
@@ -300,7 +417,7 @@ public class CollectionController {
// Set calculated properties // Set calculated properties
dto.setStoryCount(collection.getStoryCount()); dto.setStoryCount(collection.getStoryCount());
dto.setTotalWordCount(collection.getTotalWordCount()); dto.setTotalWordCount(collection.getTotalWordCount());
dto.setEstimatedReadingTime(collection.getEstimatedReadingTime()); dto.setEstimatedReadingTime(readingTimeService.calculateReadingTime(collection.getTotalWordCount()));
dto.setAverageStoryRating(collection.getAverageStoryRating()); dto.setAverageStoryRating(collection.getAverageStoryRating());
return dto; return dto;

View File

@@ -0,0 +1,246 @@
package com.storycove.controller;
import com.storycove.dto.HtmlSanitizationConfigDto;
import com.storycove.service.HtmlSanitizationService;
import com.storycove.service.ImageService;
import com.storycove.service.StoryService;
import com.storycove.entity.Story;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.Map;
import java.util.List;
import java.util.HashMap;
import java.util.Optional;
import java.util.UUID;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.Files;
import java.io.IOException;
@RestController
@RequestMapping("/api/config")
public class ConfigController {
private static final Logger logger = LoggerFactory.getLogger(ConfigController.class);
private final HtmlSanitizationService htmlSanitizationService;
private final ImageService imageService;
private final StoryService storyService;
@Value("${app.reading.speed.default:200}")
private int defaultReadingSpeed;
@Autowired
public ConfigController(HtmlSanitizationService htmlSanitizationService, ImageService imageService, StoryService storyService) {
this.htmlSanitizationService = htmlSanitizationService;
this.imageService = imageService;
this.storyService = storyService;
}
/**
* Get the HTML sanitization configuration for frontend use
* This allows the frontend to use the same sanitization rules as the backend
*/
@GetMapping("/html-sanitization")
public ResponseEntity<HtmlSanitizationConfigDto> getHtmlSanitizationConfig() {
HtmlSanitizationConfigDto config = htmlSanitizationService.getConfiguration();
return ResponseEntity.ok(config);
}
/**
* Get application settings configuration
*/
@GetMapping("/settings")
public ResponseEntity<Map<String, Object>> getSettings() {
Map<String, Object> settings = Map.of(
"defaultReadingSpeed", defaultReadingSpeed
);
return ResponseEntity.ok(settings);
}
/**
* Get reading speed for calculation purposes
*/
@GetMapping("/reading-speed")
public ResponseEntity<Map<String, Integer>> getReadingSpeed() {
return ResponseEntity.ok(Map.of("wordsPerMinute", defaultReadingSpeed));
}
/**
* Preview orphaned content images cleanup (dry run)
*/
@PostMapping("/cleanup/images/preview")
public ResponseEntity<Map<String, Object>> previewImageCleanup() {
try {
logger.info("Starting image cleanup preview");
ImageService.ContentImageCleanupResult result = imageService.cleanupOrphanedContentImages(true);
// Create detailed file information with story relationships
logger.info("Processing {} orphaned files for detailed information", result.getOrphanedImages().size());
List<Map<String, Object>> orphanedFiles = result.getOrphanedImages().stream()
.map(filePath -> {
try {
return createFileInfo(filePath);
} catch (Exception e) {
logger.error("Error processing file {}: {}", filePath, e.getMessage());
// Return a basic error entry instead of failing completely
Map<String, Object> errorEntry = new HashMap<>();
errorEntry.put("filePath", filePath);
errorEntry.put("fileName", Paths.get(filePath).getFileName().toString());
errorEntry.put("fileSize", 0L);
errorEntry.put("formattedSize", "0 B");
errorEntry.put("storyId", "error");
errorEntry.put("storyTitle", null);
errorEntry.put("storyExists", false);
errorEntry.put("canAccessStory", false);
errorEntry.put("error", e.getMessage());
return errorEntry;
}
})
.toList();
// Use HashMap to avoid Map.of() null value issues
Map<String, Object> response = new HashMap<>();
response.put("success", true);
response.put("orphanedCount", result.getOrphanedImages().size());
response.put("totalSizeBytes", result.getTotalSizeBytes());
response.put("formattedSize", result.getFormattedSize());
response.put("foldersToDelete", result.getFoldersToDelete());
response.put("referencedImagesCount", result.getTotalReferencedImages());
response.put("errors", result.getErrors());
response.put("hasErrors", result.hasErrors());
response.put("dryRun", true);
response.put("orphanedFiles", orphanedFiles);
logger.info("Image cleanup preview completed successfully");
return ResponseEntity.ok(response);
} catch (Exception e) {
logger.error("Failed to preview image cleanup", e);
Map<String, Object> errorResponse = new HashMap<>();
errorResponse.put("success", false);
errorResponse.put("error", "Failed to preview image cleanup: " + (e.getMessage() != null ? e.getMessage() : e.getClass().getSimpleName()));
return ResponseEntity.status(500).body(errorResponse);
}
}
/**
* Execute orphaned content images cleanup
*/
@PostMapping("/cleanup/images/execute")
public ResponseEntity<Map<String, Object>> executeImageCleanup() {
try {
ImageService.ContentImageCleanupResult result = imageService.cleanupOrphanedContentImages(false);
Map<String, Object> response = Map.of(
"success", true,
"deletedCount", result.getOrphanedImages().size(),
"totalSizeBytes", result.getTotalSizeBytes(),
"formattedSize", result.getFormattedSize(),
"foldersDeleted", result.getFoldersToDelete(),
"referencedImagesCount", result.getTotalReferencedImages(),
"errors", result.getErrors(),
"hasErrors", result.hasErrors(),
"dryRun", false
);
return ResponseEntity.ok(response);
} catch (Exception e) {
return ResponseEntity.status(500).body(Map.of(
"success", false,
"error", "Failed to execute image cleanup: " + e.getMessage()
));
}
}
/**
* Create detailed file information for orphaned image including story relationship
*/
private Map<String, Object> createFileInfo(String filePath) {
try {
Path path = Paths.get(filePath);
String fileName = path.getFileName().toString();
long fileSize = Files.exists(path) ? Files.size(path) : 0;
// Extract story UUID from the path (content images are stored in /content/{storyId}/)
String storyId = extractStoryIdFromPath(filePath);
// Look up the story if we have a valid UUID
Story relatedStory = null;
if (storyId != null) {
try {
UUID storyUuid = UUID.fromString(storyId);
relatedStory = storyService.findById(storyUuid);
} catch (Exception e) {
logger.debug("Could not find story with ID {}: {}", storyId, e.getMessage());
}
}
Map<String, Object> fileInfo = new HashMap<>();
fileInfo.put("filePath", filePath);
fileInfo.put("fileName", fileName);
fileInfo.put("fileSize", fileSize);
fileInfo.put("formattedSize", formatBytes(fileSize));
fileInfo.put("storyId", storyId != null ? storyId : "unknown");
fileInfo.put("storyTitle", relatedStory != null ? relatedStory.getTitle() : null);
fileInfo.put("storyExists", relatedStory != null);
fileInfo.put("canAccessStory", relatedStory != null);
return fileInfo;
} catch (Exception e) {
logger.error("Error creating file info for {}: {}", filePath, e.getMessage());
Map<String, Object> errorInfo = new HashMap<>();
errorInfo.put("filePath", filePath);
errorInfo.put("fileName", Paths.get(filePath).getFileName().toString());
errorInfo.put("fileSize", 0L);
errorInfo.put("formattedSize", "0 B");
errorInfo.put("storyId", "error");
errorInfo.put("storyTitle", null);
errorInfo.put("storyExists", false);
errorInfo.put("canAccessStory", false);
errorInfo.put("error", e.getMessage() != null ? e.getMessage() : e.getClass().getSimpleName());
return errorInfo;
}
}
/**
* Extract story ID from content image file path
*/
private String extractStoryIdFromPath(String filePath) {
try {
// Content images are stored in: /path/to/uploads/content/{storyId}/filename.ext
Path path = Paths.get(filePath);
Path parent = path.getParent();
if (parent != null) {
String potentialUuid = parent.getFileName().toString();
// Basic UUID validation (36 characters with dashes in right places)
if (potentialUuid.length() == 36 &&
potentialUuid.charAt(8) == '-' &&
potentialUuid.charAt(13) == '-' &&
potentialUuid.charAt(18) == '-' &&
potentialUuid.charAt(23) == '-') {
return potentialUuid;
}
}
} catch (Exception e) {
// Invalid path or other error
}
return null;
}
/**
* Format file size in human readable format
*/
private String formatBytes(long bytes) {
if (bytes < 1024) return bytes + " B";
if (bytes < 1024 * 1024) return String.format("%.1f KB", bytes / 1024.0);
if (bytes < 1024 * 1024 * 1024) return String.format("%.1f MB", bytes / (1024.0 * 1024.0));
return String.format("%.1f GB", bytes / (1024.0 * 1024.0 * 1024.0));
}
}

View File

@@ -0,0 +1,285 @@
package com.storycove.controller;
import com.storycove.service.AsyncBackupService;
import com.storycove.service.DatabaseManagementService;
import com.storycove.service.LibraryService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.Map;
@RestController
@RequestMapping("/api/database")
public class DatabaseController {
@Autowired
private DatabaseManagementService databaseManagementService;
@Autowired
private AsyncBackupService asyncBackupService;
@Autowired
private LibraryService libraryService;
@PostMapping("/backup")
public ResponseEntity<Resource> backupDatabase() {
try {
Resource backup = databaseManagementService.createBackup();
String timestamp = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"));
String filename = "storycove_backup_" + timestamp + ".sql";
return ResponseEntity.ok()
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + filename + "\"")
.contentType(MediaType.APPLICATION_OCTET_STREAM)
.body(backup);
} catch (Exception e) {
throw new RuntimeException("Failed to create database backup: " + e.getMessage(), e);
}
}
@PostMapping("/restore")
public ResponseEntity<Map<String, Object>> restoreDatabase(@RequestParam("file") MultipartFile file) {
try {
if (file.isEmpty()) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "No file uploaded"));
}
if (!file.getOriginalFilename().endsWith(".sql")) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "Invalid file type. Please upload a .sql file"));
}
databaseManagementService.restoreFromBackup(file.getInputStream());
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Database restored successfully from " + file.getOriginalFilename()
));
} catch (IOException e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to read backup file: " + e.getMessage()));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to restore database: " + e.getMessage()));
}
}
@PostMapping("/clear")
public ResponseEntity<Map<String, Object>> clearDatabase() {
try {
int deletedRecords = databaseManagementService.clearAllData();
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Database cleared successfully",
"deletedRecords", deletedRecords
));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to clear database: " + e.getMessage()));
}
}
@PostMapping("/backup-complete")
public ResponseEntity<Map<String, Object>> backupCompleteAsync() {
try {
String libraryId = libraryService.getCurrentLibraryId();
if (libraryId == null) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "No library selected"));
}
// Start backup job asynchronously
com.storycove.entity.BackupJob job = asyncBackupService.startBackupJob(
libraryId,
com.storycove.entity.BackupJob.BackupType.COMPLETE
);
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Backup started",
"jobId", job.getId().toString(),
"status", job.getStatus().toString()
));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to start backup: " + e.getMessage()));
}
}
@GetMapping("/backup-status/{jobId}")
public ResponseEntity<Map<String, Object>> getBackupStatus(@PathVariable String jobId) {
try {
java.util.UUID uuid = java.util.UUID.fromString(jobId);
java.util.Optional<com.storycove.entity.BackupJob> jobOpt = asyncBackupService.getJobStatus(uuid);
if (jobOpt.isEmpty()) {
return ResponseEntity.notFound().build();
}
com.storycove.entity.BackupJob job = jobOpt.get();
return ResponseEntity.ok(Map.of(
"success", true,
"jobId", job.getId().toString(),
"status", job.getStatus().toString(),
"progress", job.getProgressPercent(),
"fileSizeBytes", job.getFileSizeBytes() != null ? job.getFileSizeBytes() : 0,
"createdAt", job.getCreatedAt().toString(),
"completedAt", job.getCompletedAt() != null ? job.getCompletedAt().toString() : "",
"errorMessage", job.getErrorMessage() != null ? job.getErrorMessage() : ""
));
} catch (IllegalArgumentException e) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "Invalid job ID"));
}
}
@GetMapping("/backup-download/{jobId}")
public ResponseEntity<Resource> downloadBackup(@PathVariable String jobId) {
try {
java.util.UUID uuid = java.util.UUID.fromString(jobId);
Resource backup = asyncBackupService.getBackupFile(uuid);
java.util.Optional<com.storycove.entity.BackupJob> jobOpt = asyncBackupService.getJobStatus(uuid);
if (jobOpt.isEmpty()) {
return ResponseEntity.notFound().build();
}
com.storycove.entity.BackupJob job = jobOpt.get();
String timestamp = job.getCreatedAt().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"));
String extension = job.getType() == com.storycove.entity.BackupJob.BackupType.COMPLETE ? "zip" : "sql";
String filename = "storycove_backup_" + timestamp + "." + extension;
return ResponseEntity.ok()
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + filename + "\"")
.header(HttpHeaders.CONTENT_TYPE,
job.getType() == com.storycove.entity.BackupJob.BackupType.COMPLETE
? "application/zip"
: "application/sql")
.body(backup);
} catch (IllegalArgumentException e) {
return ResponseEntity.badRequest().build();
} catch (Exception e) {
throw new RuntimeException("Failed to download backup: " + e.getMessage(), e);
}
}
@GetMapping("/backup-list")
public ResponseEntity<Map<String, Object>> listBackups() {
try {
String libraryId = libraryService.getCurrentLibraryId();
if (libraryId == null) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "No library selected"));
}
List<com.storycove.entity.BackupJob> jobs = asyncBackupService.listBackupJobs(libraryId);
List<Map<String, Object>> jobsList = jobs.stream()
.map(job -> {
Map<String, Object> jobMap = new java.util.HashMap<>();
jobMap.put("jobId", job.getId().toString());
jobMap.put("type", job.getType().toString());
jobMap.put("status", job.getStatus().toString());
jobMap.put("progress", job.getProgressPercent());
jobMap.put("fileSizeBytes", job.getFileSizeBytes() != null ? job.getFileSizeBytes() : 0L);
jobMap.put("createdAt", job.getCreatedAt().toString());
jobMap.put("completedAt", job.getCompletedAt() != null ? job.getCompletedAt().toString() : "");
return jobMap;
})
.collect(java.util.stream.Collectors.toList());
return ResponseEntity.ok(Map.of(
"success", true,
"backups", jobsList
));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to list backups: " + e.getMessage()));
}
}
@DeleteMapping("/backup/{jobId}")
public ResponseEntity<Map<String, Object>> deleteBackup(@PathVariable String jobId) {
try {
java.util.UUID uuid = java.util.UUID.fromString(jobId);
asyncBackupService.deleteBackupJob(uuid);
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Backup deleted successfully"
));
} catch (IllegalArgumentException e) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "Invalid job ID"));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to delete backup: " + e.getMessage()));
}
}
@PostMapping("/restore-complete")
public ResponseEntity<Map<String, Object>> restoreComplete(@RequestParam("file") MultipartFile file) {
System.err.println("Complete restore endpoint called with file: " + (file != null ? file.getOriginalFilename() : "null"));
try {
if (file.isEmpty()) {
System.err.println("File is empty - returning bad request");
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "No file uploaded"));
}
if (!file.getOriginalFilename().endsWith(".zip")) {
System.err.println("Invalid file type: " + file.getOriginalFilename());
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "Invalid file type. Please upload a .zip file"));
}
System.err.println("File validation passed, calling restore service...");
databaseManagementService.restoreFromCompleteBackup(file.getInputStream());
System.err.println("Restore service completed successfully");
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Complete backup restored successfully from " + file.getOriginalFilename()
));
} catch (IOException e) {
System.err.println("IOException during restore: " + e.getMessage());
e.printStackTrace();
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to read backup file: " + e.getMessage()));
} catch (Exception e) {
System.err.println("Exception during restore: " + e.getMessage());
e.printStackTrace();
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to restore complete backup: " + e.getMessage()));
}
}
@PostMapping("/clear-complete")
public ResponseEntity<Map<String, Object>> clearComplete() {
try {
int deletedRecords = databaseManagementService.clearAllDataAndFiles();
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Database and files cleared successfully",
"deletedRecords", deletedRecords
));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to clear database and files: " + e.getMessage()));
}
}
}

View File

@@ -1,6 +1,9 @@
package com.storycove.controller; package com.storycove.controller;
import com.storycove.service.ImageService; import com.storycove.service.ImageService;
import com.storycove.service.LibraryService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.FileSystemResource; import org.springframework.core.io.FileSystemResource;
import org.springframework.core.io.Resource; import org.springframework.core.io.Resource;
import org.springframework.http.HttpHeaders; import org.springframework.http.HttpHeaders;
@@ -10,6 +13,7 @@ import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile; import org.springframework.web.multipart.MultipartFile;
import jakarta.servlet.http.HttpServletRequest;
import java.io.IOException; import java.io.IOException;
import java.nio.file.Files; import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
@@ -19,11 +23,20 @@ import java.util.Map;
@RestController @RestController
@RequestMapping("/api/files") @RequestMapping("/api/files")
public class FileController { public class FileController {
private static final Logger log = LoggerFactory.getLogger(FileController.class);
private final ImageService imageService; private final ImageService imageService;
private final LibraryService libraryService;
public FileController(ImageService imageService) { public FileController(ImageService imageService, LibraryService libraryService) {
this.imageService = imageService; this.imageService = imageService;
this.libraryService = libraryService;
}
private String getCurrentLibraryId() {
String libraryId = libraryService.getCurrentLibraryId();
log.debug("FileController - Current Library ID: {}", libraryId);
return libraryId != null ? libraryId : "default";
} }
@PostMapping("/upload/cover") @PostMapping("/upload/cover")
@@ -34,7 +47,11 @@ public class FileController {
Map<String, String> response = new HashMap<>(); Map<String, String> response = new HashMap<>();
response.put("message", "Cover uploaded successfully"); response.put("message", "Cover uploaded successfully");
response.put("path", imagePath); response.put("path", imagePath);
response.put("url", "/api/files/images/" + imagePath); String currentLibraryId = getCurrentLibraryId();
String imageUrl = "/api/files/images/" + currentLibraryId + "/" + imagePath;
response.put("url", imageUrl);
log.debug("Upload response - path: {}, url: {}", imagePath, imageUrl);
return ResponseEntity.ok(response); return ResponseEntity.ok(response);
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
@@ -53,7 +70,8 @@ public class FileController {
Map<String, String> response = new HashMap<>(); Map<String, String> response = new HashMap<>();
response.put("message", "Avatar uploaded successfully"); response.put("message", "Avatar uploaded successfully");
response.put("path", imagePath); response.put("path", imagePath);
response.put("url", "/api/files/images/" + imagePath); String currentLibraryId = getCurrentLibraryId();
response.put("url", "/api/files/images/" + currentLibraryId + "/" + imagePath);
return ResponseEntity.ok(response); return ResponseEntity.ok(response);
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
@@ -64,17 +82,18 @@ public class FileController {
} }
} }
@GetMapping("/images/**") @GetMapping("/images/{libraryId}/**")
public ResponseEntity<Resource> serveImage(@RequestParam String path) { public ResponseEntity<Resource> serveImage(@PathVariable String libraryId, HttpServletRequest request) {
try { try {
// Extract path from the URL // Extract the full request path after /api/files/images/{libraryId}/
String imagePath = path.replace("/api/files/images/", ""); String requestURI = request.getRequestURI();
String imagePath = requestURI.replaceFirst(".*/api/files/images/" + libraryId + "/", "");
if (!imageService.imageExists(imagePath)) { if (!imageService.imageExistsInLibrary(imagePath, libraryId)) {
return ResponseEntity.notFound().build(); return ResponseEntity.notFound().build();
} }
Path fullPath = imageService.getImagePath(imagePath); Path fullPath = imageService.getImagePathInLibrary(imagePath, libraryId);
Resource resource = new FileSystemResource(fullPath); Resource resource = new FileSystemResource(fullPath);
if (!resource.exists()) { if (!resource.exists()) {

View File

@@ -1,31 +0,0 @@
package com.storycove.controller;
import com.storycove.dto.HtmlSanitizationConfigDto;
import com.storycove.service.HtmlSanitizationService;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
@RestController
@RequestMapping("/api/config")
public class HtmlSanitizationController {
private final HtmlSanitizationService htmlSanitizationService;
@Autowired
public HtmlSanitizationController(HtmlSanitizationService htmlSanitizationService) {
this.htmlSanitizationService = htmlSanitizationService;
}
/**
* Get the HTML sanitization configuration for frontend use
* This allows the frontend to use the same sanitization rules as the backend
*/
@GetMapping("/html-sanitization")
public ResponseEntity<HtmlSanitizationConfigDto> getHtmlSanitizationConfig() {
HtmlSanitizationConfigDto config = htmlSanitizationService.getConfiguration();
return ResponseEntity.ok(config);
}
}

View File

@@ -0,0 +1,242 @@
package com.storycove.controller;
import com.storycove.dto.LibraryDto;
import com.storycove.service.LibraryService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
@RestController
@RequestMapping("/api/libraries")
public class LibraryController {
private static final Logger logger = LoggerFactory.getLogger(LibraryController.class);
private final LibraryService libraryService;
@Autowired
public LibraryController(LibraryService libraryService) {
this.libraryService = libraryService;
}
/**
* Get all available libraries (for settings UI)
*/
@GetMapping
public ResponseEntity<List<LibraryDto>> getAllLibraries() {
try {
List<LibraryDto> libraries = libraryService.getAllLibraries();
return ResponseEntity.ok(libraries);
} catch (Exception e) {
logger.error("Failed to get libraries", e);
return ResponseEntity.internalServerError().build();
}
}
/**
* Get current active library info
*/
@GetMapping("/current")
public ResponseEntity<LibraryDto> getCurrentLibrary() {
try {
var library = libraryService.getCurrentLibrary();
if (library == null) {
return ResponseEntity.noContent().build();
}
LibraryDto dto = new LibraryDto(
library.getId(),
library.getName(),
library.getDescription(),
true, // always active since it's current
library.isInitialized()
);
return ResponseEntity.ok(dto);
} catch (Exception e) {
logger.error("Failed to get current library", e);
return ResponseEntity.internalServerError().build();
}
}
/**
* Switch to a different library (requires re-authentication)
* This endpoint returns a switching status that the frontend can poll
*/
@PostMapping("/switch")
public ResponseEntity<Map<String, Object>> initiateLibrarySwitch(@RequestBody Map<String, String> request) {
try {
String password = request.get("password");
if (password == null || password.trim().isEmpty()) {
return ResponseEntity.badRequest().body(Map.of("error", "Password required"));
}
String libraryId = libraryService.authenticateAndGetLibrary(password);
if (libraryId == null) {
return ResponseEntity.status(401).body(Map.of("error", "Invalid password"));
}
// Check if already on this library
if (libraryId.equals(libraryService.getCurrentLibraryId())) {
return ResponseEntity.ok(Map.of(
"status", "already_active",
"message", "Already using this library"
));
}
// Initiate switch in background thread
new Thread(() -> {
try {
libraryService.switchToLibrary(libraryId);
logger.info("Library switch completed: {}", libraryId);
} catch (Exception e) {
logger.error("Library switch failed: {}", libraryId, e);
}
}).start();
return ResponseEntity.ok(Map.of(
"status", "switching",
"targetLibrary", libraryId,
"message", "Switching to library, please wait..."
));
} catch (Exception e) {
logger.error("Failed to initiate library switch", e);
return ResponseEntity.internalServerError().body(Map.of("error", "Server error"));
}
}
/**
* Check library switch status
*/
@GetMapping("/switch/status")
public ResponseEntity<Map<String, Object>> getLibrarySwitchStatus() {
try {
var currentLibrary = libraryService.getCurrentLibrary();
boolean isReady = currentLibrary != null;
Map<String, Object> response = new HashMap<>();
response.put("ready", isReady);
if (isReady) {
response.put("currentLibrary", currentLibrary.getId());
response.put("currentLibraryName", currentLibrary.getName());
} else {
response.put("currentLibrary", null);
response.put("currentLibraryName", null);
}
return ResponseEntity.ok(response);
} catch (Exception e) {
logger.error("Failed to get switch status", e);
return ResponseEntity.ok(Map.of("ready", false, "error", "Status check failed"));
}
}
/**
* Change password for current library
*/
@PostMapping("/password")
public ResponseEntity<Map<String, Object>> changePassword(@RequestBody Map<String, String> request) {
try {
String currentPassword = request.get("currentPassword");
String newPassword = request.get("newPassword");
if (currentPassword == null || newPassword == null) {
return ResponseEntity.badRequest().body(Map.of("error", "Current and new passwords required"));
}
String currentLibraryId = libraryService.getCurrentLibraryId();
if (currentLibraryId == null) {
return ResponseEntity.badRequest().body(Map.of("error", "No active library"));
}
boolean success = libraryService.changeLibraryPassword(currentLibraryId, currentPassword, newPassword);
if (success) {
return ResponseEntity.ok(Map.of("success", true, "message", "Password changed successfully"));
} else {
return ResponseEntity.badRequest().body(Map.of("error", "Current password is incorrect"));
}
} catch (Exception e) {
logger.error("Failed to change password", e);
return ResponseEntity.internalServerError().body(Map.of("error", "Server error"));
}
}
/**
* Create a new library
*/
@PostMapping("/create")
public ResponseEntity<Map<String, Object>> createLibrary(@RequestBody Map<String, String> request) {
try {
String name = request.get("name");
String description = request.get("description");
String password = request.get("password");
if (name == null || name.trim().isEmpty() || password == null || password.trim().isEmpty()) {
return ResponseEntity.badRequest().body(Map.of("error", "Name and password are required"));
}
var newLibrary = libraryService.createNewLibrary(name.trim(), description, password);
return ResponseEntity.ok(Map.of(
"success", true,
"library", Map.of(
"id", newLibrary.getId(),
"name", newLibrary.getName(),
"description", newLibrary.getDescription()
),
"message", "Library created successfully. You can now log in with the new password to access it."
));
} catch (Exception e) {
logger.error("Failed to create library", e);
return ResponseEntity.internalServerError().body(Map.of("error", "Server error"));
}
}
/**
* Update library metadata (name and description)
*/
@PutMapping("/{libraryId}/metadata")
public ResponseEntity<Map<String, Object>> updateLibraryMetadata(
@PathVariable String libraryId,
@RequestBody Map<String, String> updates) {
try {
String newName = updates.get("name");
String newDescription = updates.get("description");
if (newName == null || newName.trim().isEmpty()) {
return ResponseEntity.badRequest().body(Map.of("error", "Library name is required"));
}
// Update the library
libraryService.updateLibraryMetadata(libraryId, newName, newDescription);
// Return updated library info
LibraryDto updatedLibrary = libraryService.getLibraryById(libraryId);
if (updatedLibrary != null) {
Map<String, Object> response = new HashMap<>();
response.put("success", true);
response.put("message", "Library metadata updated successfully");
response.put("library", updatedLibrary);
return ResponseEntity.ok(response);
} else {
return ResponseEntity.notFound().build();
}
} catch (IllegalArgumentException e) {
return ResponseEntity.badRequest().body(Map.of("error", e.getMessage()));
} catch (Exception e) {
logger.error("Failed to update library metadata for {}: {}", libraryId, e.getMessage(), e);
return ResponseEntity.internalServerError().body(Map.of("error", "Failed to update library metadata"));
}
}
}

View File

@@ -0,0 +1,57 @@
package com.storycove.controller;
import com.storycove.dto.LibraryOverviewStatsDto;
import com.storycove.service.LibraryService;
import com.storycove.service.LibraryStatisticsService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
@RestController
@RequestMapping("/api/libraries/{libraryId}/statistics")
public class LibraryStatisticsController {
private static final Logger logger = LoggerFactory.getLogger(LibraryStatisticsController.class);
@Autowired
private LibraryStatisticsService statisticsService;
@Autowired
private LibraryService libraryService;
/**
* Get overview statistics for a library
*/
@GetMapping("/overview")
public ResponseEntity<?> getOverviewStatistics(@PathVariable String libraryId) {
try {
// Verify library exists
if (libraryService.getLibraryById(libraryId) == null) {
return ResponseEntity.notFound().build();
}
LibraryOverviewStatsDto stats = statisticsService.getOverviewStatistics(libraryId);
return ResponseEntity.ok(stats);
} catch (Exception e) {
logger.error("Failed to get overview statistics for library: {}", libraryId, e);
return ResponseEntity.internalServerError()
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
}
}
// Error response DTO
private static class ErrorResponse {
private String error;
public ErrorResponse(String error) {
this.error = error;
}
public String getError() {
return error;
}
}
}

View File

@@ -2,7 +2,7 @@ package com.storycove.controller;
import com.storycove.entity.Story; import com.storycove.entity.Story;
import com.storycove.service.StoryService; import com.storycove.service.StoryService;
import com.storycove.service.TypesenseService; import com.storycove.service.SearchServiceAdapter;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*; import org.springframework.web.bind.annotation.*;
@@ -14,25 +14,19 @@ import java.util.Map;
@RequestMapping("/api/search") @RequestMapping("/api/search")
public class SearchController { public class SearchController {
private final TypesenseService typesenseService; private final SearchServiceAdapter searchServiceAdapter;
private final StoryService storyService; private final StoryService storyService;
public SearchController(@Autowired(required = false) TypesenseService typesenseService, StoryService storyService) { public SearchController(SearchServiceAdapter searchServiceAdapter, StoryService storyService) {
this.typesenseService = typesenseService; this.searchServiceAdapter = searchServiceAdapter;
this.storyService = storyService; this.storyService = storyService;
} }
@PostMapping("/reindex") @PostMapping("/reindex")
public ResponseEntity<?> reindexAllStories() { public ResponseEntity<?> reindexAllStories() {
if (typesenseService == null) {
return ResponseEntity.badRequest().body(Map.of(
"error", "Typesense service is not available"
));
}
try { try {
List<Story> allStories = storyService.findAll(); List<Story> allStories = storyService.findAll();
typesenseService.reindexAllStories(allStories); searchServiceAdapter.bulkIndexStories(allStories);
return ResponseEntity.ok(Map.of( return ResponseEntity.ok(Map.of(
"message", "Successfully reindexed all stories", "message", "Successfully reindexed all stories",
@@ -47,17 +41,8 @@ public class SearchController {
@GetMapping("/health") @GetMapping("/health")
public ResponseEntity<?> searchHealthCheck() { public ResponseEntity<?> searchHealthCheck() {
if (typesenseService == null) {
return ResponseEntity.ok(Map.of(
"status", "disabled",
"message", "Typesense service is disabled"
));
}
try { try {
// Try a simple search to test connectivity // Search service is operational if it's injected
typesenseService.searchSuggestions("test", 1);
return ResponseEntity.ok(Map.of( return ResponseEntity.ok(Map.of(
"status", "healthy", "status", "healthy",
"message", "Search service is operational" "message", "Search service is operational"

View File

@@ -12,7 +12,6 @@ import com.storycove.service.*;
import jakarta.validation.Valid; import jakarta.validation.Valid;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
@@ -25,6 +24,7 @@ import org.springframework.web.multipart.MultipartFile;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Optional;
import java.util.UUID; import java.util.UUID;
import java.util.stream.Collectors; import java.util.stream.Collectors;
@@ -39,8 +39,13 @@ public class StoryController {
private final SeriesService seriesService; private final SeriesService seriesService;
private final HtmlSanitizationService sanitizationService; private final HtmlSanitizationService sanitizationService;
private final ImageService imageService; private final ImageService imageService;
private final TypesenseService typesenseService; private final SearchServiceAdapter searchServiceAdapter;
private final CollectionService collectionService; private final CollectionService collectionService;
private final ReadingTimeService readingTimeService;
private final EPUBImportService epubImportService;
private final EPUBExportService epubExportService;
private final AsyncImageProcessingService asyncImageProcessingService;
private final ImageProcessingProgressService progressService;
public StoryController(StoryService storyService, public StoryController(StoryService storyService,
AuthorService authorService, AuthorService authorService,
@@ -48,14 +53,24 @@ public class StoryController {
HtmlSanitizationService sanitizationService, HtmlSanitizationService sanitizationService,
ImageService imageService, ImageService imageService,
CollectionService collectionService, CollectionService collectionService,
@Autowired(required = false) TypesenseService typesenseService) { SearchServiceAdapter searchServiceAdapter,
ReadingTimeService readingTimeService,
EPUBImportService epubImportService,
EPUBExportService epubExportService,
AsyncImageProcessingService asyncImageProcessingService,
ImageProcessingProgressService progressService) {
this.storyService = storyService; this.storyService = storyService;
this.authorService = authorService; this.authorService = authorService;
this.seriesService = seriesService; this.seriesService = seriesService;
this.sanitizationService = sanitizationService; this.sanitizationService = sanitizationService;
this.imageService = imageService; this.imageService = imageService;
this.collectionService = collectionService; this.collectionService = collectionService;
this.typesenseService = typesenseService; this.searchServiceAdapter = searchServiceAdapter;
this.readingTimeService = readingTimeService;
this.epubImportService = epubImportService;
this.epubExportService = epubExportService;
this.asyncImageProcessingService = asyncImageProcessingService;
this.progressService = progressService;
} }
@GetMapping @GetMapping
@@ -75,31 +90,100 @@ public class StoryController {
return ResponseEntity.ok(storyDtos); return ResponseEntity.ok(storyDtos);
} }
@GetMapping("/random")
public ResponseEntity<StorySummaryDto> getRandomStory(
@RequestParam(required = false) String searchQuery,
@RequestParam(required = false) List<String> tags,
@RequestParam(required = false) Long seed,
// Advanced filters
@RequestParam(required = false) Integer minWordCount,
@RequestParam(required = false) Integer maxWordCount,
@RequestParam(required = false) String createdAfter,
@RequestParam(required = false) String createdBefore,
@RequestParam(required = false) String lastReadAfter,
@RequestParam(required = false) String lastReadBefore,
@RequestParam(required = false) Integer minRating,
@RequestParam(required = false) Integer maxRating,
@RequestParam(required = false) Boolean unratedOnly,
@RequestParam(required = false) String readingStatus,
@RequestParam(required = false) Boolean hasReadingProgress,
@RequestParam(required = false) Boolean hasCoverImage,
@RequestParam(required = false) String sourceDomain,
@RequestParam(required = false) String seriesFilter,
@RequestParam(required = false) Integer minTagCount,
@RequestParam(required = false) Boolean popularOnly,
@RequestParam(required = false) Boolean hiddenGemsOnly) {
logger.info("Getting random story with filters - searchQuery: {}, tags: {}, seed: {}",
searchQuery, tags, seed);
Optional<Story> randomStory = storyService.findRandomStory(searchQuery, tags, seed,
minWordCount, maxWordCount, createdAfter, createdBefore, lastReadAfter, lastReadBefore,
minRating, maxRating, unratedOnly, readingStatus, hasReadingProgress, hasCoverImage,
sourceDomain, seriesFilter, minTagCount, popularOnly, hiddenGemsOnly);
if (randomStory.isPresent()) {
StorySummaryDto storyDto = convertToSummaryDto(randomStory.get());
return ResponseEntity.ok(storyDto);
} else {
return ResponseEntity.noContent().build(); // 204 No Content when no stories match filters
}
}
@GetMapping("/{id}") @GetMapping("/{id}")
public ResponseEntity<StoryDto> getStoryById(@PathVariable UUID id) { public ResponseEntity<StoryDto> getStoryById(@PathVariable UUID id) {
Story story = storyService.findById(id); Story story = storyService.findById(id);
return ResponseEntity.ok(convertToDto(story)); return ResponseEntity.ok(convertToDto(story));
} }
@GetMapping("/{id}/read")
public ResponseEntity<StoryReadingDto> getStoryForReading(@PathVariable UUID id) {
logger.info("Getting story {} for reading", id);
Story story = storyService.findById(id);
return ResponseEntity.ok(convertToReadingDto(story));
}
@PostMapping @PostMapping
public ResponseEntity<StoryDto> createStory(@Valid @RequestBody CreateStoryRequest request) { public ResponseEntity<StoryDto> createStory(@Valid @RequestBody CreateStoryRequest request) {
logger.info("Creating new story: {}", request.getTitle());
Story story = new Story(); Story story = new Story();
updateStoryFromRequest(story, request); updateStoryFromRequest(story, request);
Story savedStory = storyService.createWithTagNames(story, request.getTagNames()); Story savedStory = storyService.createWithTagNames(story, request.getTagNames());
// Process external images in content after saving
savedStory = processExternalImagesIfNeeded(savedStory);
logger.info("Successfully created story: {} (ID: {})", savedStory.getTitle(), savedStory.getId());
return ResponseEntity.status(HttpStatus.CREATED).body(convertToDto(savedStory)); return ResponseEntity.status(HttpStatus.CREATED).body(convertToDto(savedStory));
} }
@PutMapping("/{id}") @PutMapping("/{id}")
public ResponseEntity<StoryDto> updateStory(@PathVariable UUID id, public ResponseEntity<StoryDto> updateStory(@PathVariable UUID id,
@Valid @RequestBody UpdateStoryRequest request) { @Valid @RequestBody UpdateStoryRequest request) {
logger.info("Updating story: {} (ID: {})", request.getTitle(), id);
// Handle author creation/lookup at controller level before calling service
if (request.getAuthorName() != null && !request.getAuthorName().trim().isEmpty() && request.getAuthorId() == null) {
Author author = findOrCreateAuthor(request.getAuthorName().trim());
request.setAuthorId(author.getId());
request.setAuthorName(null); // Clear author name since we now have the ID
}
Story updatedStory = storyService.updateWithTagNames(id, request); Story updatedStory = storyService.updateWithTagNames(id, request);
// Process external images in content after saving
updatedStory = processExternalImagesIfNeeded(updatedStory);
logger.info("Successfully updated story: {}", updatedStory.getTitle());
return ResponseEntity.ok(convertToDto(updatedStory)); return ResponseEntity.ok(convertToDto(updatedStory));
} }
@DeleteMapping("/{id}") @DeleteMapping("/{id}")
public ResponseEntity<?> deleteStory(@PathVariable UUID id) { public ResponseEntity<?> deleteStory(@PathVariable UUID id) {
logger.info("Deleting story with ID: {}", id);
storyService.delete(id); storyService.delete(id);
logger.info("Successfully deleted story with ID: {}", id);
return ResponseEntity.ok(Map.of("message", "Story deleted successfully")); return ResponseEntity.ok(Map.of("message", "Story deleted successfully"));
} }
@@ -143,15 +227,58 @@ public class StoryController {
return ResponseEntity.ok(convertToDto(story)); return ResponseEntity.ok(convertToDto(story));
} }
@PostMapping("/{id}/reading-progress")
public ResponseEntity<StoryDto> updateReadingProgress(@PathVariable UUID id, @RequestBody ReadingProgressRequest request) {
logger.info("Updating reading progress for story {} to position {}", id, request.getPosition());
Story story = storyService.updateReadingProgress(id, request.getPosition());
return ResponseEntity.ok(convertToDto(story));
}
@PostMapping("/{id}/reading-status")
public ResponseEntity<StoryDto> updateReadingStatus(@PathVariable UUID id, @RequestBody ReadingStatusRequest request) {
logger.info("Updating reading status for story {} to {}", id, request.getIsRead() ? "read" : "unread");
Story story = storyService.updateReadingStatus(id, request.getIsRead());
return ResponseEntity.ok(convertToDto(story));
}
@PostMapping("/{id}/process-content-images")
public ResponseEntity<Map<String, Object>> processContentImages(@PathVariable UUID id, @RequestBody ProcessContentImagesRequest request) {
logger.info("Processing content images for story {}", id);
try {
// Process the HTML content to download and replace image URLs
ImageService.ContentImageProcessingResult result = imageService.processContentImages(request.getHtmlContent(), id);
// If there are warnings, let the client decide whether to proceed
if (result.hasWarnings()) {
return ResponseEntity.ok(Map.of(
"processedContent", result.getProcessedContent(),
"warnings", result.getWarnings(),
"downloadedImages", result.getDownloadedImages(),
"hasWarnings", true
));
}
// Success - no warnings
return ResponseEntity.ok(Map.of(
"processedContent", result.getProcessedContent(),
"downloadedImages", result.getDownloadedImages(),
"hasWarnings", false
));
} catch (Exception e) {
logger.error("Failed to process content images for story {}", id, e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
.body(Map.of("error", "Failed to process content images: " + e.getMessage()));
}
}
@PostMapping("/reindex") @PostMapping("/reindex")
public ResponseEntity<String> manualReindex() { public ResponseEntity<String> manualReindex() {
if (typesenseService == null) {
return ResponseEntity.ok("Typesense is not enabled, no reindexing performed");
}
try { try {
List<Story> allStories = storyService.findAllWithAssociations(); List<Story> allStories = storyService.findAllWithAssociations();
typesenseService.reindexAllStories(allStories); searchServiceAdapter.bulkIndexStories(allStories);
return ResponseEntity.ok("Successfully reindexed " + allStories.size() + " stories"); return ResponseEntity.ok("Successfully reindexed " + allStories.size() + " stories");
} catch (Exception e) { } catch (Exception e) {
return ResponseEntity.status(500).body("Failed to reindex stories: " + e.getMessage()); return ResponseEntity.status(500).body("Failed to reindex stories: " + e.getMessage());
@@ -162,7 +289,7 @@ public class StoryController {
public ResponseEntity<Map<String, Object>> reindexStoriesTypesense() { public ResponseEntity<Map<String, Object>> reindexStoriesTypesense() {
try { try {
List<Story> allStories = storyService.findAllWithAssociations(); List<Story> allStories = storyService.findAllWithAssociations();
typesenseService.reindexAllStories(allStories); searchServiceAdapter.bulkIndexStories(allStories);
return ResponseEntity.ok(Map.of( return ResponseEntity.ok(Map.of(
"success", true, "success", true,
"message", "Reindexed " + allStories.size() + " stories", "message", "Reindexed " + allStories.size() + " stories",
@@ -182,7 +309,7 @@ public class StoryController {
try { try {
// This will delete the existing collection and recreate it with correct schema // This will delete the existing collection and recreate it with correct schema
List<Story> allStories = storyService.findAllWithAssociations(); List<Story> allStories = storyService.findAllWithAssociations();
typesenseService.reindexAllStories(allStories); searchServiceAdapter.bulkIndexStories(allStories);
return ResponseEntity.ok(Map.of( return ResponseEntity.ok(Map.of(
"success", true, "success", true,
"message", "Recreated stories collection and indexed " + allStories.size() + " stories", "message", "Recreated stories collection and indexed " + allStories.size() + " stories",
@@ -207,17 +334,55 @@ public class StoryController {
@RequestParam(required = false) Integer minRating, @RequestParam(required = false) Integer minRating,
@RequestParam(required = false) Integer maxRating, @RequestParam(required = false) Integer maxRating,
@RequestParam(required = false) String sortBy, @RequestParam(required = false) String sortBy,
@RequestParam(required = false) String sortDir) { @RequestParam(required = false) String sortDir,
@RequestParam(required = false) List<String> facetBy,
// Advanced filters
@RequestParam(required = false) Integer minWordCount,
@RequestParam(required = false) Integer maxWordCount,
@RequestParam(required = false) String createdAfter,
@RequestParam(required = false) String createdBefore,
@RequestParam(required = false) String lastReadAfter,
@RequestParam(required = false) String lastReadBefore,
@RequestParam(required = false) Boolean unratedOnly,
@RequestParam(required = false) String readingStatus,
@RequestParam(required = false) Boolean hasReadingProgress,
@RequestParam(required = false) Boolean hasCoverImage,
@RequestParam(required = false) String sourceDomain,
@RequestParam(required = false) String seriesFilter,
@RequestParam(required = false) Integer minTagCount,
@RequestParam(required = false) Boolean popularOnly,
@RequestParam(required = false) Boolean hiddenGemsOnly) {
logger.info("CONTROLLER DEBUG: Search request - query='{}', tags={}, authors={}", query, tags, authors);
if (typesenseService != null) { // Use SearchServiceAdapter to handle routing between search engines
SearchResultDto<StorySearchDto> results = typesenseService.searchStories( try {
query, page, size, authors, tags, minRating, maxRating, sortBy, sortDir); // Convert authors list to single author string (for now, use first author)
String authorFilter = (authors != null && !authors.isEmpty()) ? authors.get(0) : null;
// DEBUG: Log all received parameters
logger.info("CONTROLLER DEBUG - Received parameters:");
logger.info(" readingStatus: '{}'", readingStatus);
logger.info(" seriesFilter: '{}'", seriesFilter);
logger.info(" hasReadingProgress: {}", hasReadingProgress);
logger.info(" hasCoverImage: {}", hasCoverImage);
logger.info(" createdAfter: '{}'", createdAfter);
logger.info(" lastReadAfter: '{}'", lastReadAfter);
logger.info(" unratedOnly: {}", unratedOnly);
SearchResultDto<StorySearchDto> results = searchServiceAdapter.searchStories(
query, tags, authorFilter, seriesFilter, minWordCount, maxWordCount,
minRating != null ? minRating.floatValue() : null,
null, // isRead - now handled by readingStatus advanced filter
null, // isFavorite - now handled by readingStatus advanced filter
sortBy, sortDir, page, size, facetBy,
// Advanced filters
createdAfter, createdBefore, lastReadAfter, lastReadBefore,
unratedOnly, readingStatus, hasReadingProgress, hasCoverImage,
sourceDomain, seriesFilter, minTagCount, popularOnly, hiddenGemsOnly);
return ResponseEntity.ok(results); return ResponseEntity.ok(results);
} else { } catch (Exception e) {
// Fallback to basic search if Typesense is not available logger.error("Search failed", e);
return ResponseEntity.badRequest().body(null); return ResponseEntity.internalServerError().body(null);
} }
} }
@@ -226,10 +391,12 @@ public class StoryController {
@RequestParam String query, @RequestParam String query,
@RequestParam(defaultValue = "5") int limit) { @RequestParam(defaultValue = "5") int limit) {
if (typesenseService != null) { // Use SearchServiceAdapter to handle routing between search engines
List<String> suggestions = typesenseService.searchSuggestions(query, limit); try {
List<String> suggestions = searchServiceAdapter.getTagSuggestions(query, limit);
return ResponseEntity.ok(suggestions); return ResponseEntity.ok(suggestions);
} else { } catch (Exception e) {
logger.error("Failed to get search suggestions", e);
return ResponseEntity.ok(new ArrayList<>()); return ResponseEntity.ok(new ArrayList<>());
} }
} }
@@ -319,7 +486,9 @@ public class StoryController {
story.setTitle(createReq.getTitle()); story.setTitle(createReq.getTitle());
story.setSummary(createReq.getSummary()); story.setSummary(createReq.getSummary());
story.setDescription(createReq.getDescription()); story.setDescription(createReq.getDescription());
story.setContentHtml(sanitizationService.sanitize(createReq.getContentHtml())); story.setContentHtml(sanitizationService.sanitize(createReq.getContentHtml()));
story.setSourceUrl(createReq.getSourceUrl()); story.setSourceUrl(createReq.getSourceUrl());
story.setVolume(createReq.getVolume()); story.setVolume(createReq.getVolume());
@@ -353,25 +522,55 @@ public class StoryController {
story.setDescription(updateReq.getDescription()); story.setDescription(updateReq.getDescription());
} }
if (updateReq.getContentHtml() != null) { if (updateReq.getContentHtml() != null) {
story.setContentHtml(sanitizationService.sanitize(updateReq.getContentHtml())); logger.info("Content before sanitization (length: {}): {}",
updateReq.getContentHtml().length(),
updateReq.getContentHtml().substring(0, Math.min(500, updateReq.getContentHtml().length())));
String sanitizedContent = sanitizationService.sanitize(updateReq.getContentHtml());
logger.info("Content after sanitization (length: {}): {}",
sanitizedContent.length(),
sanitizedContent.substring(0, Math.min(500, sanitizedContent.length())));
story.setContentHtml(sanitizedContent);
} }
if (updateReq.getSourceUrl() != null) { if (updateReq.getSourceUrl() != null) {
story.setSourceUrl(updateReq.getSourceUrl()); story.setSourceUrl(updateReq.getSourceUrl());
} }
if (updateReq.getVolume() != null) { // Volume will be handled in series logic below
story.setVolume(updateReq.getVolume()); // Handle author - either by ID or by name
}
if (updateReq.getAuthorId() != null) { if (updateReq.getAuthorId() != null) {
Author author = authorService.findById(updateReq.getAuthorId()); Author author = authorService.findById(updateReq.getAuthorId());
story.setAuthor(author); story.setAuthor(author);
} else if (updateReq.getAuthorName() != null && !updateReq.getAuthorName().trim().isEmpty()) {
Author author = findOrCreateAuthor(updateReq.getAuthorName().trim());
story.setAuthor(author);
} }
// Handle series - either by ID or by name // Handle series - either by ID, by name, or remove from series
if (updateReq.getSeriesId() != null) { if (updateReq.getSeriesId() != null) {
Series series = seriesService.findById(updateReq.getSeriesId()); Series series = seriesService.findById(updateReq.getSeriesId());
story.setSeries(series); story.setSeries(series);
} else if (updateReq.getSeriesName() != null && !updateReq.getSeriesName().trim().isEmpty()) { } else if (updateReq.getSeriesName() != null) {
logger.info("Processing series update: seriesName='{}', isEmpty={}", updateReq.getSeriesName(), updateReq.getSeriesName().trim().isEmpty());
if (updateReq.getSeriesName().trim().isEmpty()) {
// Empty series name means remove from series
logger.info("Removing story from series");
if (story.getSeries() != null) {
story.getSeries().removeStory(story);
story.setSeries(null);
story.setVolume(null);
logger.info("Story removed from series");
}
} else {
// Non-empty series name means add to series
logger.info("Adding story to series: '{}', volume: {}", updateReq.getSeriesName().trim(), updateReq.getVolume());
Series series = seriesService.findOrCreate(updateReq.getSeriesName().trim()); Series series = seriesService.findOrCreate(updateReq.getSeriesName().trim());
story.setSeries(series); story.setSeries(series);
// Set volume only if series is being set
if (updateReq.getVolume() != null) {
story.setVolume(updateReq.getVolume());
logger.info("Story added to series: {} with volume: {}", series.getName(), updateReq.getVolume());
} else {
logger.info("Story added to series: {} with no volume", series.getName());
}
}
} }
// Note: Tags are now handled in StoryService.updateWithTagNames() // Note: Tags are now handled in StoryService.updateWithTagNames()
@@ -385,7 +584,6 @@ public class StoryController {
dto.setSummary(story.getSummary()); dto.setSummary(story.getSummary());
dto.setDescription(story.getDescription()); dto.setDescription(story.getDescription());
dto.setContentHtml(story.getContentHtml()); dto.setContentHtml(story.getContentHtml());
dto.setContentPlain(story.getContentPlain());
dto.setSourceUrl(story.getSourceUrl()); dto.setSourceUrl(story.getSourceUrl());
dto.setCoverPath(story.getCoverPath()); dto.setCoverPath(story.getCoverPath());
dto.setWordCount(story.getWordCount()); dto.setWordCount(story.getWordCount());
@@ -394,6 +592,48 @@ public class StoryController {
dto.setCreatedAt(story.getCreatedAt()); dto.setCreatedAt(story.getCreatedAt());
dto.setUpdatedAt(story.getUpdatedAt()); dto.setUpdatedAt(story.getUpdatedAt());
// Reading progress fields
dto.setIsRead(story.getIsRead());
dto.setReadingPosition(story.getReadingPosition());
dto.setLastReadAt(story.getLastReadAt());
if (story.getAuthor() != null) {
dto.setAuthorId(story.getAuthor().getId());
dto.setAuthorName(story.getAuthor().getName());
}
if (story.getSeries() != null) {
dto.setSeriesId(story.getSeries().getId());
dto.setSeriesName(story.getSeries().getName());
}
dto.setTags(story.getTags().stream()
.map(this::convertTagToDto)
.collect(Collectors.toList()));
return dto;
}
private StoryReadingDto convertToReadingDto(Story story) {
StoryReadingDto dto = new StoryReadingDto();
dto.setId(story.getId());
dto.setTitle(story.getTitle());
dto.setSummary(story.getSummary());
dto.setDescription(story.getDescription());
dto.setContentHtml(story.getContentHtml());
dto.setSourceUrl(story.getSourceUrl());
dto.setCoverPath(story.getCoverPath());
dto.setWordCount(story.getWordCount());
dto.setRating(story.getRating());
dto.setVolume(story.getVolume());
dto.setCreatedAt(story.getCreatedAt());
dto.setUpdatedAt(story.getUpdatedAt());
// Reading progress fields
dto.setIsRead(story.getIsRead());
dto.setReadingPosition(story.getReadingPosition());
dto.setLastReadAt(story.getLastReadAt());
if (story.getAuthor() != null) { if (story.getAuthor() != null) {
dto.setAuthorId(story.getAuthor().getId()); dto.setAuthorId(story.getAuthor().getId());
dto.setAuthorName(story.getAuthor().getName()); dto.setAuthorName(story.getAuthor().getName());
@@ -426,6 +666,11 @@ public class StoryController {
dto.setUpdatedAt(story.getUpdatedAt()); dto.setUpdatedAt(story.getUpdatedAt());
dto.setPartOfSeries(story.isPartOfSeries()); dto.setPartOfSeries(story.isPartOfSeries());
// Reading progress fields
dto.setIsRead(story.getIsRead());
dto.setReadingPosition(story.getReadingPosition());
dto.setLastReadAt(story.getLastReadAt());
if (story.getAuthor() != null) { if (story.getAuthor() != null) {
dto.setAuthorId(story.getAuthor().getId()); dto.setAuthorId(story.getAuthor().getId());
dto.setAuthorName(story.getAuthor().getName()); dto.setAuthorName(story.getAuthor().getName());
@@ -447,8 +692,11 @@ public class StoryController {
TagDto tagDto = new TagDto(); TagDto tagDto = new TagDto();
tagDto.setId(tag.getId()); tagDto.setId(tag.getId());
tagDto.setName(tag.getName()); tagDto.setName(tag.getName());
tagDto.setColor(tag.getColor());
tagDto.setDescription(tag.getDescription());
tagDto.setCreatedAt(tag.getCreatedAt()); tagDto.setCreatedAt(tag.getCreatedAt());
// storyCount can be set if needed, but it might be expensive to calculate for each tag tagDto.setStoryCount(tag.getStories() != null ? tag.getStories().size() : 0);
tagDto.setAliasCount(tag.getAliases() != null ? tag.getAliases().size() : 0);
return tagDto; return tagDto;
} }
@@ -467,12 +715,195 @@ public class StoryController {
// to avoid circular references and keep it lightweight // to avoid circular references and keep it lightweight
dto.setStoryCount(collection.getStoryCount()); dto.setStoryCount(collection.getStoryCount());
dto.setTotalWordCount(collection.getTotalWordCount()); dto.setTotalWordCount(collection.getTotalWordCount());
dto.setEstimatedReadingTime(collection.getEstimatedReadingTime()); dto.setEstimatedReadingTime(readingTimeService.calculateReadingTime(collection.getTotalWordCount()));
dto.setAverageStoryRating(collection.getAverageStoryRating()); dto.setAverageStoryRating(collection.getAverageStoryRating());
return dto; return dto;
} }
private Story processExternalImagesIfNeeded(Story story) {
try {
if (story.getContentHtml() != null && !story.getContentHtml().trim().isEmpty()) {
logger.debug("Starting async image processing for story: {}", story.getId());
// Start async processing - this returns immediately
asyncImageProcessingService.processStoryImagesAsync(story.getId(), story.getContentHtml());
logger.info("Async image processing started for story: {}", story.getId());
}
} catch (Exception e) {
logger.error("Failed to start async image processing for story {}: {}",
story.getId(), e.getMessage(), e);
// Don't fail the entire operation if image processing fails
}
return story;
}
@GetMapping("/{id}/image-processing-progress")
public ResponseEntity<Map<String, Object>> getImageProcessingProgress(@PathVariable UUID id) {
ImageProcessingProgressService.ImageProcessingProgress progress = progressService.getProgress(id);
if (progress == null) {
return ResponseEntity.ok(Map.of(
"isProcessing", false,
"message", "No active image processing"
));
}
Map<String, Object> response = Map.of(
"isProcessing", !progress.isCompleted(),
"totalImages", progress.getTotalImages(),
"processedImages", progress.getProcessedImages(),
"currentImageUrl", progress.getCurrentImageUrl() != null ? progress.getCurrentImageUrl() : "",
"status", progress.getStatus(),
"progressPercentage", progress.getProgressPercentage(),
"completed", progress.isCompleted(),
"error", progress.getErrorMessage() != null ? progress.getErrorMessage() : ""
);
return ResponseEntity.ok(response);
}
@GetMapping("/check-duplicate")
public ResponseEntity<Map<String, Object>> checkDuplicate(
@RequestParam String title,
@RequestParam String authorName) {
try {
List<Story> duplicates = storyService.findPotentialDuplicates(title, authorName);
Map<String, Object> response = Map.of(
"hasDuplicates", !duplicates.isEmpty(),
"count", duplicates.size(),
"duplicates", duplicates.stream()
.map(story -> Map.of(
"id", story.getId(),
"title", story.getTitle(),
"authorName", story.getAuthor() != null ? story.getAuthor().getName() : "",
"createdAt", story.getCreatedAt()
))
.collect(Collectors.toList())
);
return ResponseEntity.ok(response);
} catch (Exception e) {
logger.error("Error checking for duplicates", e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
.body(Map.of("error", "Failed to check for duplicates"));
}
}
// EPUB Import endpoint
@PostMapping("/epub/import")
public ResponseEntity<EPUBImportResponse> importEPUB(
@RequestParam("file") MultipartFile file,
@RequestParam(required = false) UUID authorId,
@RequestParam(required = false) String authorName,
@RequestParam(required = false) UUID seriesId,
@RequestParam(required = false) String seriesName,
@RequestParam(required = false) Integer seriesVolume,
@RequestParam(required = false) List<String> tags,
@RequestParam(defaultValue = "true") Boolean preserveReadingPosition,
@RequestParam(defaultValue = "false") Boolean overwriteExisting,
@RequestParam(defaultValue = "true") Boolean createMissingAuthor,
@RequestParam(defaultValue = "true") Boolean createMissingSeries) {
logger.info("Importing EPUB file: {}", file.getOriginalFilename());
EPUBImportRequest request = new EPUBImportRequest();
request.setEpubFile(file);
request.setAuthorId(authorId);
request.setAuthorName(authorName);
request.setSeriesId(seriesId);
request.setSeriesName(seriesName);
request.setSeriesVolume(seriesVolume);
request.setTags(tags);
request.setPreserveReadingPosition(preserveReadingPosition);
request.setOverwriteExisting(overwriteExisting);
request.setCreateMissingAuthor(createMissingAuthor);
request.setCreateMissingSeries(createMissingSeries);
try {
EPUBImportResponse response = epubImportService.importEPUB(request);
if (response.isSuccess()) {
logger.info("Successfully imported EPUB: {} (Story ID: {})",
response.getStoryTitle(), response.getStoryId());
return ResponseEntity.ok(response);
} else {
logger.warn("EPUB import failed: {}", response.getMessage());
return ResponseEntity.badRequest().body(response);
}
} catch (Exception e) {
logger.error("Error importing EPUB: {}", e.getMessage(), e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
.body(EPUBImportResponse.error("Internal server error: " + e.getMessage()));
}
}
// EPUB Export endpoint
@PostMapping("/epub/export")
public ResponseEntity<org.springframework.core.io.Resource> exportEPUB(
@Valid @RequestBody EPUBExportRequest request) {
logger.info("Exporting story {} to EPUB", request.getStoryId());
try {
if (!epubExportService.canExportStory(request.getStoryId())) {
return ResponseEntity.badRequest().build();
}
org.springframework.core.io.Resource resource = epubExportService.exportStoryAsEPUB(request);
Story story = storyService.findById(request.getStoryId());
String filename = epubExportService.getEPUBFilename(story);
logger.info("Successfully exported EPUB: {}", filename);
return ResponseEntity.ok()
.header("Content-Disposition", "attachment; filename=\"" + filename + "\"")
.header("Content-Type", "application/epub+zip")
.body(resource);
} catch (Exception e) {
logger.error("Error exporting EPUB: {}", e.getMessage(), e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR).build();
}
}
// EPUB Export by story ID (GET endpoint)
@GetMapping("/{id}/epub")
public ResponseEntity<org.springframework.core.io.Resource> exportStoryAsEPUB(@PathVariable UUID id) {
logger.info("Exporting story {} to EPUB via GET", id);
EPUBExportRequest request = new EPUBExportRequest(id);
return exportEPUB(request);
}
// Validate EPUB file
@PostMapping("/epub/validate")
public ResponseEntity<Map<String, Object>> validateEPUBFile(@RequestParam("file") MultipartFile file) {
logger.info("Validating EPUB file: {}", file.getOriginalFilename());
try {
List<String> errors = epubImportService.validateEPUBFile(file);
Map<String, Object> response = Map.of(
"valid", errors.isEmpty(),
"errors", errors,
"filename", file.getOriginalFilename(),
"size", file.getSize()
);
return ResponseEntity.ok(response);
} catch (Exception e) {
logger.error("Error validating EPUB file: {}", e.getMessage(), e);
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
.body(Map.of("error", "Failed to validate EPUB file"));
}
}
// Request DTOs // Request DTOs
public static class CreateStoryRequest { public static class CreateStoryRequest {
private String title; private String title;
@@ -520,6 +951,7 @@ public class StoryController {
private String sourceUrl; private String sourceUrl;
private Integer volume; private Integer volume;
private UUID authorId; private UUID authorId;
private String authorName;
private UUID seriesId; private UUID seriesId;
private String seriesName; private String seriesName;
private List<String> tagNames; private List<String> tagNames;
@@ -539,6 +971,8 @@ public class StoryController {
public void setVolume(Integer volume) { this.volume = volume; } public void setVolume(Integer volume) { this.volume = volume; }
public UUID getAuthorId() { return authorId; } public UUID getAuthorId() { return authorId; }
public void setAuthorId(UUID authorId) { this.authorId = authorId; } public void setAuthorId(UUID authorId) { this.authorId = authorId; }
public String getAuthorName() { return authorName; }
public void setAuthorName(String authorName) { this.authorName = authorName; }
public UUID getSeriesId() { return seriesId; } public UUID getSeriesId() { return seriesId; }
public void setSeriesId(UUID seriesId) { this.seriesId = seriesId; } public void setSeriesId(UUID seriesId) { this.seriesId = seriesId; }
public String getSeriesName() { return seriesName; } public String getSeriesName() { return seriesName; }

View File

@@ -1,9 +1,13 @@
package com.storycove.controller; package com.storycove.controller;
import com.storycove.dto.TagDto; import com.storycove.dto.TagDto;
import com.storycove.dto.TagAliasDto;
import com.storycove.entity.Tag; import com.storycove.entity.Tag;
import com.storycove.entity.TagAlias;
import com.storycove.service.TagService; import com.storycove.service.TagService;
import jakarta.validation.Valid; import jakarta.validation.Valid;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.data.domain.Page; import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest; import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable; import org.springframework.data.domain.Pageable;
@@ -21,6 +25,7 @@ import java.util.stream.Collectors;
@RequestMapping("/api/tags") @RequestMapping("/api/tags")
public class TagController { public class TagController {
private static final Logger logger = LoggerFactory.getLogger(TagController.class);
private final TagService tagService; private final TagService tagService;
public TagController(TagService tagService) { public TagController(TagService tagService) {
@@ -54,6 +59,8 @@ public class TagController {
public ResponseEntity<TagDto> createTag(@Valid @RequestBody CreateTagRequest request) { public ResponseEntity<TagDto> createTag(@Valid @RequestBody CreateTagRequest request) {
Tag tag = new Tag(); Tag tag = new Tag();
tag.setName(request.getName()); tag.setName(request.getName());
tag.setColor(request.getColor());
tag.setDescription(request.getDescription());
Tag savedTag = tagService.create(tag); Tag savedTag = tagService.create(tag);
return ResponseEntity.status(HttpStatus.CREATED).body(convertToDto(savedTag)); return ResponseEntity.status(HttpStatus.CREATED).body(convertToDto(savedTag));
@@ -66,6 +73,12 @@ public class TagController {
if (request.getName() != null) { if (request.getName() != null) {
existingTag.setName(request.getName()); existingTag.setName(request.getName());
} }
if (request.getColor() != null) {
existingTag.setColor(request.getColor());
}
if (request.getDescription() != null) {
existingTag.setDescription(request.getDescription());
}
Tag updatedTag = tagService.update(id, existingTag); Tag updatedTag = tagService.update(id, existingTag);
return ResponseEntity.ok(convertToDto(updatedTag)); return ResponseEntity.ok(convertToDto(updatedTag));
@@ -95,7 +108,7 @@ public class TagController {
@RequestParam String query, @RequestParam String query,
@RequestParam(defaultValue = "10") int limit) { @RequestParam(defaultValue = "10") int limit) {
List<Tag> tags = tagService.findByNameStartingWith(query, limit); List<Tag> tags = tagService.findByNameOrAliasStartingWith(query, limit);
List<TagDto> tagDtos = tags.stream().map(this::convertToDto).collect(Collectors.toList()); List<TagDto> tagDtos = tags.stream().map(this::convertToDto).collect(Collectors.toList());
return ResponseEntity.ok(tagDtos); return ResponseEntity.ok(tagDtos);
@@ -132,29 +145,257 @@ public class TagController {
return ResponseEntity.ok(stats); return ResponseEntity.ok(stats);
} }
@GetMapping("/collections")
public ResponseEntity<List<TagDto>> getTagsUsedByCollections() {
List<Tag> tags = tagService.findTagsUsedByCollections();
List<TagDto> tagDtos = tags.stream()
.map(this::convertToDtoWithCollectionCount)
.collect(Collectors.toList());
return ResponseEntity.ok(tagDtos);
}
// Tag alias endpoints
@PostMapping("/{tagId}/aliases")
public ResponseEntity<TagAliasDto> addAlias(@PathVariable UUID tagId,
@RequestBody Map<String, String> request) {
String aliasName = request.get("aliasName");
if (aliasName == null || aliasName.trim().isEmpty()) {
return ResponseEntity.badRequest().build();
}
try {
TagAlias alias = tagService.addAlias(tagId, aliasName.trim());
TagAliasDto dto = new TagAliasDto();
dto.setId(alias.getId());
dto.setAliasName(alias.getAliasName());
dto.setCanonicalTagId(alias.getCanonicalTag().getId());
dto.setCanonicalTagName(alias.getCanonicalTag().getName());
dto.setCreatedFromMerge(alias.getCreatedFromMerge());
dto.setCreatedAt(alias.getCreatedAt());
return ResponseEntity.status(HttpStatus.CREATED).body(dto);
} catch (Exception e) {
return ResponseEntity.badRequest().build();
}
}
@DeleteMapping("/{tagId}/aliases/{aliasId}")
public ResponseEntity<?> removeAlias(@PathVariable UUID tagId, @PathVariable UUID aliasId) {
try {
tagService.removeAlias(tagId, aliasId);
return ResponseEntity.ok(Map.of("message", "Alias removed successfully"));
} catch (Exception e) {
return ResponseEntity.badRequest().body(Map.of("error", e.getMessage()));
}
}
@GetMapping("/resolve/{name}")
public ResponseEntity<TagDto> resolveTag(@PathVariable String name) {
try {
Tag resolvedTag = tagService.resolveTagByName(name);
if (resolvedTag != null) {
return ResponseEntity.ok(convertToDto(resolvedTag));
} else {
return ResponseEntity.notFound().build();
}
} catch (Exception e) {
return ResponseEntity.notFound().build();
}
}
@PostMapping("/merge")
public ResponseEntity<?> mergeTags(@Valid @RequestBody MergeTagsRequest request) {
try {
Tag resultTag = tagService.mergeTags(request.getSourceTagUUIDs(), request.getTargetTagUUID());
return ResponseEntity.ok(convertToDto(resultTag));
} catch (Exception e) {
logger.error("Failed to merge tags", e);
String errorMessage = e.getMessage() != null ? e.getMessage() : "Unknown error occurred";
return ResponseEntity.badRequest().body(Map.of("error", errorMessage));
}
}
@PostMapping("/merge/preview")
public ResponseEntity<?> previewMerge(@Valid @RequestBody MergeTagsRequest request) {
try {
MergePreviewResponse preview = tagService.previewMerge(request.getSourceTagUUIDs(), request.getTargetTagUUID());
return ResponseEntity.ok(preview);
} catch (Exception e) {
logger.error("Failed to preview merge", e);
String errorMessage = e.getMessage() != null ? e.getMessage() : "Unknown error occurred";
return ResponseEntity.badRequest().body(Map.of("error", errorMessage));
}
}
@PostMapping("/suggest")
public ResponseEntity<List<TagSuggestion>> suggestTags(@RequestBody TagSuggestionRequest request) {
try {
List<TagSuggestion> suggestions = tagService.suggestTags(
request.getTitle(),
request.getContent(),
request.getSummary(),
request.getLimit() != null ? request.getLimit() : 10
);
return ResponseEntity.ok(suggestions);
} catch (Exception e) {
logger.error("Failed to suggest tags", e);
return ResponseEntity.ok(List.of()); // Return empty list on error
}
}
private TagDto convertToDto(Tag tag) { private TagDto convertToDto(Tag tag) {
TagDto dto = new TagDto(); TagDto dto = new TagDto();
dto.setId(tag.getId()); dto.setId(tag.getId());
dto.setName(tag.getName()); dto.setName(tag.getName());
dto.setColor(tag.getColor());
dto.setDescription(tag.getDescription());
dto.setStoryCount(tag.getStories() != null ? tag.getStories().size() : 0); dto.setStoryCount(tag.getStories() != null ? tag.getStories().size() : 0);
dto.setCollectionCount(tag.getCollections() != null ? tag.getCollections().size() : 0);
dto.setAliasCount(tag.getAliases() != null ? tag.getAliases().size() : 0);
dto.setCreatedAt(tag.getCreatedAt()); dto.setCreatedAt(tag.getCreatedAt());
// updatedAt field not present in Tag entity per spec // updatedAt field not present in Tag entity per spec
// Convert aliases to DTOs for full context
if (tag.getAliases() != null && !tag.getAliases().isEmpty()) {
List<TagAliasDto> aliaseDtos = tag.getAliases().stream()
.map(alias -> {
TagAliasDto aliasDto = new TagAliasDto();
aliasDto.setId(alias.getId());
aliasDto.setAliasName(alias.getAliasName());
aliasDto.setCanonicalTagId(alias.getCanonicalTag().getId());
aliasDto.setCanonicalTagName(alias.getCanonicalTag().getName());
aliasDto.setCreatedFromMerge(alias.getCreatedFromMerge());
aliasDto.setCreatedAt(alias.getCreatedAt());
return aliasDto;
})
.collect(Collectors.toList());
dto.setAliases(aliaseDtos);
}
return dto;
}
private TagDto convertToDtoWithCollectionCount(Tag tag) {
TagDto dto = new TagDto();
dto.setId(tag.getId());
dto.setName(tag.getName());
dto.setCollectionCount(tag.getCollections() != null ? tag.getCollections().size() : 0);
dto.setCreatedAt(tag.getCreatedAt());
// Don't set storyCount for collection-focused endpoint
return dto; return dto;
} }
// Request DTOs // Request DTOs
public static class CreateTagRequest { public static class CreateTagRequest {
private String name; private String name;
private String color;
private String description;
public String getName() { return name; } public String getName() { return name; }
public void setName(String name) { this.name = name; } public void setName(String name) { this.name = name; }
public String getColor() { return color; }
public void setColor(String color) { this.color = color; }
public String getDescription() { return description; }
public void setDescription(String description) { this.description = description; }
} }
public static class UpdateTagRequest { public static class UpdateTagRequest {
private String name; private String name;
private String color;
private String description;
public String getName() { return name; } public String getName() { return name; }
public void setName(String name) { this.name = name; } public void setName(String name) { this.name = name; }
public String getColor() { return color; }
public void setColor(String color) { this.color = color; }
public String getDescription() { return description; }
public void setDescription(String description) { this.description = description; }
}
public static class MergeTagsRequest {
private List<String> sourceTagIds;
private String targetTagId;
public List<String> getSourceTagIds() { return sourceTagIds; }
public void setSourceTagIds(List<String> sourceTagIds) { this.sourceTagIds = sourceTagIds; }
public String getTargetTagId() { return targetTagId; }
public void setTargetTagId(String targetTagId) { this.targetTagId = targetTagId; }
// Helper methods to convert to UUID
public List<UUID> getSourceTagUUIDs() {
return sourceTagIds != null ? sourceTagIds.stream().map(UUID::fromString).toList() : null;
}
public UUID getTargetTagUUID() {
return targetTagId != null ? UUID.fromString(targetTagId) : null;
}
}
public static class MergePreviewResponse {
private String targetTagName;
private int targetStoryCount;
private int totalResultStoryCount;
private List<String> aliasesToCreate;
public String getTargetTagName() { return targetTagName; }
public void setTargetTagName(String targetTagName) { this.targetTagName = targetTagName; }
public int getTargetStoryCount() { return targetStoryCount; }
public void setTargetStoryCount(int targetStoryCount) { this.targetStoryCount = targetStoryCount; }
public int getTotalResultStoryCount() { return totalResultStoryCount; }
public void setTotalResultStoryCount(int totalResultStoryCount) { this.totalResultStoryCount = totalResultStoryCount; }
public List<String> getAliasesToCreate() { return aliasesToCreate; }
public void setAliasesToCreate(List<String> aliasesToCreate) { this.aliasesToCreate = aliasesToCreate; }
}
public static class TagSuggestionRequest {
private String title;
private String content;
private String summary;
private Integer limit;
public String getTitle() { return title; }
public void setTitle(String title) { this.title = title; }
public String getContent() { return content; }
public void setContent(String content) { this.content = content; }
public String getSummary() { return summary; }
public void setSummary(String summary) { this.summary = summary; }
public Integer getLimit() { return limit; }
public void setLimit(Integer limit) { this.limit = limit; }
}
public static class TagSuggestion {
private String tagName;
private double confidence;
private String reason;
public TagSuggestion() {}
public TagSuggestion(String tagName, double confidence, String reason) {
this.tagName = tagName;
this.confidence = confidence;
this.reason = reason;
}
public String getTagName() { return tagName; }
public void setTagName(String tagName) { this.tagName = tagName; }
public double getConfidence() { return confidence; }
public void setConfidence(double confidence) { this.confidence = confidence; }
public String getReason() { return reason; }
public void setReason(String reason) { this.reason = reason; }
} }
} }

View File

@@ -16,6 +16,7 @@ public class CollectionDto {
private String coverImagePath; private String coverImagePath;
private Boolean isArchived; private Boolean isArchived;
private List<TagDto> tags; private List<TagDto> tags;
private List<String> tagNames; // For search results
private List<CollectionStoryDto> collectionStories; private List<CollectionStoryDto> collectionStories;
private Integer storyCount; private Integer storyCount;
private Integer totalWordCount; private Integer totalWordCount;
@@ -83,6 +84,14 @@ public class CollectionDto {
this.tags = tags; this.tags = tags;
} }
public List<String> getTagNames() {
return tagNames;
}
public void setTagNames(List<String> tagNames) {
this.tagNames = tagNames;
}
public List<CollectionStoryDto> getCollectionStories() { public List<CollectionStoryDto> getCollectionStories() {
return collectionStories; return collectionStories;
} }

View File

@@ -0,0 +1,115 @@
package com.storycove.dto;
import jakarta.validation.constraints.NotNull;
import java.util.List;
import java.util.UUID;
public class EPUBExportRequest {
@NotNull(message = "Story ID is required")
private UUID storyId;
private String customTitle;
private String customAuthor;
private Boolean includeReadingPosition = true;
private Boolean includeCoverImage = true;
private Boolean includeMetadata = true;
private List<String> customMetadata;
private String language = "en";
private Boolean splitByChapters = false;
private Integer maxWordsPerChapter;
public EPUBExportRequest() {}
public EPUBExportRequest(UUID storyId) {
this.storyId = storyId;
}
public UUID getStoryId() {
return storyId;
}
public void setStoryId(UUID storyId) {
this.storyId = storyId;
}
public String getCustomTitle() {
return customTitle;
}
public void setCustomTitle(String customTitle) {
this.customTitle = customTitle;
}
public String getCustomAuthor() {
return customAuthor;
}
public void setCustomAuthor(String customAuthor) {
this.customAuthor = customAuthor;
}
public Boolean getIncludeReadingPosition() {
return includeReadingPosition;
}
public void setIncludeReadingPosition(Boolean includeReadingPosition) {
this.includeReadingPosition = includeReadingPosition;
}
public Boolean getIncludeCoverImage() {
return includeCoverImage;
}
public void setIncludeCoverImage(Boolean includeCoverImage) {
this.includeCoverImage = includeCoverImage;
}
public Boolean getIncludeMetadata() {
return includeMetadata;
}
public void setIncludeMetadata(Boolean includeMetadata) {
this.includeMetadata = includeMetadata;
}
public List<String> getCustomMetadata() {
return customMetadata;
}
public void setCustomMetadata(List<String> customMetadata) {
this.customMetadata = customMetadata;
}
public String getLanguage() {
return language;
}
public void setLanguage(String language) {
this.language = language;
}
public Boolean getSplitByChapters() {
return splitByChapters;
}
public void setSplitByChapters(Boolean splitByChapters) {
this.splitByChapters = splitByChapters;
}
public Integer getMaxWordsPerChapter() {
return maxWordsPerChapter;
}
public void setMaxWordsPerChapter(Integer maxWordsPerChapter) {
this.maxWordsPerChapter = maxWordsPerChapter;
}
}

View File

@@ -0,0 +1,133 @@
package com.storycove.dto;
import jakarta.validation.constraints.NotNull;
import org.springframework.web.multipart.MultipartFile;
import java.util.List;
import java.util.UUID;
public class EPUBImportRequest {
@NotNull(message = "EPUB file is required")
private MultipartFile epubFile;
private UUID authorId;
private String authorName;
private UUID seriesId;
private String seriesName;
private Integer seriesVolume;
private List<String> tags;
private Boolean preserveReadingPosition = true;
private Boolean overwriteExisting = false;
private Boolean createMissingAuthor = true;
private Boolean createMissingSeries = true;
private Boolean extractCover = true;
public EPUBImportRequest() {}
public MultipartFile getEpubFile() {
return epubFile;
}
public void setEpubFile(MultipartFile epubFile) {
this.epubFile = epubFile;
}
public UUID getAuthorId() {
return authorId;
}
public void setAuthorId(UUID authorId) {
this.authorId = authorId;
}
public String getAuthorName() {
return authorName;
}
public void setAuthorName(String authorName) {
this.authorName = authorName;
}
public UUID getSeriesId() {
return seriesId;
}
public void setSeriesId(UUID seriesId) {
this.seriesId = seriesId;
}
public String getSeriesName() {
return seriesName;
}
public void setSeriesName(String seriesName) {
this.seriesName = seriesName;
}
public Integer getSeriesVolume() {
return seriesVolume;
}
public void setSeriesVolume(Integer seriesVolume) {
this.seriesVolume = seriesVolume;
}
public List<String> getTags() {
return tags;
}
public void setTags(List<String> tags) {
this.tags = tags;
}
public Boolean getPreserveReadingPosition() {
return preserveReadingPosition;
}
public void setPreserveReadingPosition(Boolean preserveReadingPosition) {
this.preserveReadingPosition = preserveReadingPosition;
}
public Boolean getOverwriteExisting() {
return overwriteExisting;
}
public void setOverwriteExisting(Boolean overwriteExisting) {
this.overwriteExisting = overwriteExisting;
}
public Boolean getCreateMissingAuthor() {
return createMissingAuthor;
}
public void setCreateMissingAuthor(Boolean createMissingAuthor) {
this.createMissingAuthor = createMissingAuthor;
}
public Boolean getCreateMissingSeries() {
return createMissingSeries;
}
public void setCreateMissingSeries(Boolean createMissingSeries) {
this.createMissingSeries = createMissingSeries;
}
public Boolean getExtractCover() {
return extractCover;
}
public void setExtractCover(Boolean extractCover) {
this.extractCover = extractCover;
}
}

View File

@@ -0,0 +1,107 @@
package com.storycove.dto;
import java.util.List;
import java.util.UUID;
public class EPUBImportResponse {
private boolean success;
private String message;
private UUID storyId;
private String storyTitle;
private Integer totalChapters;
private Integer wordCount;
private ReadingPositionDto readingPosition;
private List<String> warnings;
private List<String> errors;
public EPUBImportResponse() {}
public EPUBImportResponse(boolean success, String message) {
this.success = success;
this.message = message;
}
public static EPUBImportResponse success(UUID storyId, String storyTitle) {
EPUBImportResponse response = new EPUBImportResponse(true, "EPUB imported successfully");
response.setStoryId(storyId);
response.setStoryTitle(storyTitle);
return response;
}
public static EPUBImportResponse error(String message) {
return new EPUBImportResponse(false, message);
}
public boolean isSuccess() {
return success;
}
public void setSuccess(boolean success) {
this.success = success;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public UUID getStoryId() {
return storyId;
}
public void setStoryId(UUID storyId) {
this.storyId = storyId;
}
public String getStoryTitle() {
return storyTitle;
}
public void setStoryTitle(String storyTitle) {
this.storyTitle = storyTitle;
}
public Integer getTotalChapters() {
return totalChapters;
}
public void setTotalChapters(Integer totalChapters) {
this.totalChapters = totalChapters;
}
public Integer getWordCount() {
return wordCount;
}
public void setWordCount(Integer wordCount) {
this.wordCount = wordCount;
}
public ReadingPositionDto getReadingPosition() {
return readingPosition;
}
public void setReadingPosition(ReadingPositionDto readingPosition) {
this.readingPosition = readingPosition;
}
public List<String> getWarnings() {
return warnings;
}
public void setWarnings(List<String> warnings) {
this.warnings = warnings;
}
public List<String> getErrors() {
return errors;
}
public void setErrors(List<String> errors) {
this.errors = errors;
}
}

View File

@@ -0,0 +1,31 @@
package com.storycove.dto;
public class FacetCountDto {
private String value;
private int count;
public FacetCountDto() {}
public FacetCountDto(String value, int count) {
this.value = value;
this.count = count;
}
// Getters and Setters
public String getValue() {
return value;
}
public void setValue(String value) {
this.value = value;
}
public int getCount() {
return count;
}
public void setCount(int count) {
this.count = count;
}
}

View File

@@ -8,6 +8,7 @@ public class HtmlSanitizationConfigDto {
private Map<String, List<String>> allowedAttributes; private Map<String, List<String>> allowedAttributes;
private List<String> allowedCssProperties; private List<String> allowedCssProperties;
private Map<String, List<String>> removedAttributes; private Map<String, List<String>> removedAttributes;
private Map<String, Map<String, List<String>>> allowedProtocols;
private String description; private String description;
public HtmlSanitizationConfigDto() {} public HtmlSanitizationConfigDto() {}
@@ -44,6 +45,14 @@ public class HtmlSanitizationConfigDto {
this.removedAttributes = removedAttributes; this.removedAttributes = removedAttributes;
} }
public Map<String, Map<String, List<String>>> getAllowedProtocols() {
return allowedProtocols;
}
public void setAllowedProtocols(Map<String, Map<String, List<String>>> allowedProtocols) {
this.allowedProtocols = allowedProtocols;
}
public String getDescription() { public String getDescription() {
return description; return description;
} }

View File

@@ -0,0 +1,61 @@
package com.storycove.dto;
public class LibraryDto {
private String id;
private String name;
private String description;
private boolean isActive;
private boolean isInitialized;
// Constructors
public LibraryDto() {}
public LibraryDto(String id, String name, String description, boolean isActive, boolean isInitialized) {
this.id = id;
this.name = name;
this.description = description;
this.isActive = isActive;
this.isInitialized = isInitialized;
}
// Getters and Setters
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public boolean isActive() {
return isActive;
}
public void setActive(boolean active) {
isActive = active;
}
public boolean isInitialized() {
return isInitialized;
}
public void setInitialized(boolean initialized) {
isInitialized = initialized;
}
}

View File

@@ -0,0 +1,183 @@
package com.storycove.dto;
public class LibraryOverviewStatsDto {
// Collection Overview
private long totalStories;
private long totalAuthors;
private long totalSeries;
private long totalTags;
private long totalCollections;
private long uniqueSourceDomains;
// Content Metrics
private long totalWordCount;
private double averageWordsPerStory;
private StoryWordCountDto longestStory;
private StoryWordCountDto shortestStory;
// Reading Time (based on 250 words/minute)
private long totalReadingTimeMinutes;
private double averageReadingTimeMinutes;
// Constructor
public LibraryOverviewStatsDto() {
}
// Getters and Setters
public long getTotalStories() {
return totalStories;
}
public void setTotalStories(long totalStories) {
this.totalStories = totalStories;
}
public long getTotalAuthors() {
return totalAuthors;
}
public void setTotalAuthors(long totalAuthors) {
this.totalAuthors = totalAuthors;
}
public long getTotalSeries() {
return totalSeries;
}
public void setTotalSeries(long totalSeries) {
this.totalSeries = totalSeries;
}
public long getTotalTags() {
return totalTags;
}
public void setTotalTags(long totalTags) {
this.totalTags = totalTags;
}
public long getTotalCollections() {
return totalCollections;
}
public void setTotalCollections(long totalCollections) {
this.totalCollections = totalCollections;
}
public long getUniqueSourceDomains() {
return uniqueSourceDomains;
}
public void setUniqueSourceDomains(long uniqueSourceDomains) {
this.uniqueSourceDomains = uniqueSourceDomains;
}
public long getTotalWordCount() {
return totalWordCount;
}
public void setTotalWordCount(long totalWordCount) {
this.totalWordCount = totalWordCount;
}
public double getAverageWordsPerStory() {
return averageWordsPerStory;
}
public void setAverageWordsPerStory(double averageWordsPerStory) {
this.averageWordsPerStory = averageWordsPerStory;
}
public StoryWordCountDto getLongestStory() {
return longestStory;
}
public void setLongestStory(StoryWordCountDto longestStory) {
this.longestStory = longestStory;
}
public StoryWordCountDto getShortestStory() {
return shortestStory;
}
public void setShortestStory(StoryWordCountDto shortestStory) {
this.shortestStory = shortestStory;
}
public long getTotalReadingTimeMinutes() {
return totalReadingTimeMinutes;
}
public void setTotalReadingTimeMinutes(long totalReadingTimeMinutes) {
this.totalReadingTimeMinutes = totalReadingTimeMinutes;
}
public double getAverageReadingTimeMinutes() {
return averageReadingTimeMinutes;
}
public void setAverageReadingTimeMinutes(double averageReadingTimeMinutes) {
this.averageReadingTimeMinutes = averageReadingTimeMinutes;
}
// Nested DTO for story word count info
public static class StoryWordCountDto {
private String id;
private String title;
private String authorName;
private int wordCount;
private long readingTimeMinutes;
public StoryWordCountDto() {
}
public StoryWordCountDto(String id, String title, String authorName, int wordCount, long readingTimeMinutes) {
this.id = id;
this.title = title;
this.authorName = authorName;
this.wordCount = wordCount;
this.readingTimeMinutes = readingTimeMinutes;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getAuthorName() {
return authorName;
}
public void setAuthorName(String authorName) {
this.authorName = authorName;
}
public int getWordCount() {
return wordCount;
}
public void setWordCount(int wordCount) {
this.wordCount = wordCount;
}
public long getReadingTimeMinutes() {
return readingTimeMinutes;
}
public void setReadingTimeMinutes(long readingTimeMinutes) {
this.readingTimeMinutes = readingTimeMinutes;
}
}
}

View File

@@ -0,0 +1,23 @@
package com.storycove.dto;
import jakarta.validation.constraints.NotBlank;
public class ProcessContentImagesRequest {
@NotBlank(message = "HTML content is required")
private String htmlContent;
public ProcessContentImagesRequest() {}
public ProcessContentImagesRequest(String htmlContent) {
this.htmlContent = htmlContent;
}
public String getHtmlContent() {
return htmlContent;
}
public void setHtmlContent(String htmlContent) {
this.htmlContent = htmlContent;
}
}

View File

@@ -0,0 +1,124 @@
package com.storycove.dto;
import java.time.LocalDateTime;
import java.util.UUID;
public class ReadingPositionDto {
private UUID id;
private UUID storyId;
private Integer chapterIndex;
private String chapterTitle;
private Integer wordPosition;
private Integer characterPosition;
private Double percentageComplete;
private String epubCfi;
private String contextBefore;
private String contextAfter;
private LocalDateTime createdAt;
private LocalDateTime updatedAt;
public ReadingPositionDto() {}
public ReadingPositionDto(UUID storyId, Integer chapterIndex, Integer wordPosition) {
this.storyId = storyId;
this.chapterIndex = chapterIndex;
this.wordPosition = wordPosition;
}
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public UUID getStoryId() {
return storyId;
}
public void setStoryId(UUID storyId) {
this.storyId = storyId;
}
public Integer getChapterIndex() {
return chapterIndex;
}
public void setChapterIndex(Integer chapterIndex) {
this.chapterIndex = chapterIndex;
}
public String getChapterTitle() {
return chapterTitle;
}
public void setChapterTitle(String chapterTitle) {
this.chapterTitle = chapterTitle;
}
public Integer getWordPosition() {
return wordPosition;
}
public void setWordPosition(Integer wordPosition) {
this.wordPosition = wordPosition;
}
public Integer getCharacterPosition() {
return characterPosition;
}
public void setCharacterPosition(Integer characterPosition) {
this.characterPosition = characterPosition;
}
public Double getPercentageComplete() {
return percentageComplete;
}
public void setPercentageComplete(Double percentageComplete) {
this.percentageComplete = percentageComplete;
}
public String getEpubCfi() {
return epubCfi;
}
public void setEpubCfi(String epubCfi) {
this.epubCfi = epubCfi;
}
public String getContextBefore() {
return contextBefore;
}
public void setContextBefore(String contextBefore) {
this.contextBefore = contextBefore;
}
public String getContextAfter() {
return contextAfter;
}
public void setContextAfter(String contextAfter) {
this.contextAfter = contextAfter;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
public LocalDateTime getUpdatedAt() {
return updatedAt;
}
public void setUpdatedAt(LocalDateTime updatedAt) {
this.updatedAt = updatedAt;
}
}

View File

@@ -0,0 +1,23 @@
package com.storycove.dto;
import jakarta.validation.constraints.Min;
public class ReadingProgressRequest {
@Min(value = 0, message = "Reading position must be non-negative")
private Integer position;
public ReadingProgressRequest() {}
public ReadingProgressRequest(Integer position) {
this.position = position;
}
public Integer getPosition() {
return position;
}
public void setPosition(Integer position) {
this.position = position;
}
}

View File

@@ -0,0 +1,23 @@
package com.storycove.dto;
import jakarta.validation.constraints.NotNull;
public class ReadingStatusRequest {
@NotNull(message = "Reading status is required")
private Boolean isRead;
public ReadingStatusRequest() {}
public ReadingStatusRequest(Boolean isRead) {
this.isRead = isRead;
}
public Boolean getIsRead() {
return isRead;
}
public void setIsRead(Boolean isRead) {
this.isRead = isRead;
}
}

View File

@@ -1,6 +1,7 @@
package com.storycove.dto; package com.storycove.dto;
import java.util.List; import java.util.List;
import java.util.Map;
public class SearchResultDto<T> { public class SearchResultDto<T> {
@@ -10,6 +11,7 @@ public class SearchResultDto<T> {
private int perPage; private int perPage;
private String query; private String query;
private long searchTimeMs; private long searchTimeMs;
private Map<String, List<FacetCountDto>> facets;
public SearchResultDto() {} public SearchResultDto() {}
@@ -22,6 +24,28 @@ public class SearchResultDto<T> {
this.searchTimeMs = searchTimeMs; this.searchTimeMs = searchTimeMs;
} }
public SearchResultDto(List<T> results, long totalHits, int page, int perPage, String query, long searchTimeMs, Map<String, List<FacetCountDto>> facets) {
this.results = results;
this.totalHits = totalHits;
this.page = page;
this.perPage = perPage;
this.query = query;
this.searchTimeMs = searchTimeMs;
this.facets = facets;
}
// Simple constructor for basic search results with facet list
public SearchResultDto(List<T> results, long totalHits, int resultCount, List<FacetCountDto> facetsList) {
this.results = results;
this.totalHits = totalHits;
this.page = 0;
this.perPage = resultCount;
this.query = "";
this.searchTimeMs = 0;
// Convert list to map if needed - for now just set empty map
this.facets = java.util.Collections.emptyMap();
}
// Getters and Setters // Getters and Setters
public List<T> getResults() { public List<T> getResults() {
return results; return results;
@@ -70,4 +94,12 @@ public class SearchResultDto<T> {
public void setSearchTimeMs(long searchTimeMs) { public void setSearchTimeMs(long searchTimeMs) {
this.searchTimeMs = searchTimeMs; this.searchTimeMs = searchTimeMs;
} }
public Map<String, List<FacetCountDto>> getFacets() {
return facets;
}
public void setFacets(Map<String, List<FacetCountDto>> facets) {
this.facets = facets;
}
} }

View File

@@ -21,13 +21,18 @@ public class StoryDto {
private String description; private String description;
private String contentHtml; private String contentHtml;
private String contentPlain; // contentPlain removed for performance - use StoryReadingDto when content is needed
private String sourceUrl; private String sourceUrl;
private String coverPath; private String coverPath;
private Integer wordCount; private Integer wordCount;
private Integer rating; private Integer rating;
private Integer volume; private Integer volume;
// Reading progress fields
private Boolean isRead;
private Integer readingPosition;
private LocalDateTime lastReadAt;
// Related entities as simple references // Related entities as simple references
private UUID authorId; private UUID authorId;
private String authorName; private String authorName;
@@ -85,13 +90,6 @@ public class StoryDto {
this.contentHtml = contentHtml; this.contentHtml = contentHtml;
} }
public String getContentPlain() {
return contentPlain;
}
public void setContentPlain(String contentPlain) {
this.contentPlain = contentPlain;
}
public String getSourceUrl() { public String getSourceUrl() {
return sourceUrl; return sourceUrl;
@@ -133,6 +131,30 @@ public class StoryDto {
this.volume = volume; this.volume = volume;
} }
public Boolean getIsRead() {
return isRead;
}
public void setIsRead(Boolean isRead) {
this.isRead = isRead;
}
public Integer getReadingPosition() {
return readingPosition;
}
public void setReadingPosition(Integer readingPosition) {
this.readingPosition = readingPosition;
}
public LocalDateTime getLastReadAt() {
return lastReadAt;
}
public void setLastReadAt(LocalDateTime lastReadAt) {
this.lastReadAt = lastReadAt;
}
public UUID getAuthorId() { public UUID getAuthorId() {
return authorId; return authorId;
} }

View File

@@ -0,0 +1,202 @@
package com.storycove.dto;
import java.time.LocalDateTime;
import java.util.List;
import java.util.UUID;
/**
* Story DTO specifically for reading view.
* Contains contentHtml but excludes contentPlain for performance.
*/
public class StoryReadingDto {
private UUID id;
private String title;
private String summary;
private String description;
private String contentHtml; // For reading - includes HTML
// contentPlain excluded for performance
private String sourceUrl;
private String coverPath;
private Integer wordCount;
private Integer rating;
private Integer volume;
// Reading progress fields
private Boolean isRead;
private Integer readingPosition;
private LocalDateTime lastReadAt;
// Related entities as simple references
private UUID authorId;
private String authorName;
private UUID seriesId;
private String seriesName;
private List<TagDto> tags;
private LocalDateTime createdAt;
private LocalDateTime updatedAt;
public StoryReadingDto() {}
// Getters and Setters
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getSummary() {
return summary;
}
public void setSummary(String summary) {
this.summary = summary;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getContentHtml() {
return contentHtml;
}
public void setContentHtml(String contentHtml) {
this.contentHtml = contentHtml;
}
public String getSourceUrl() {
return sourceUrl;
}
public void setSourceUrl(String sourceUrl) {
this.sourceUrl = sourceUrl;
}
public String getCoverPath() {
return coverPath;
}
public void setCoverPath(String coverPath) {
this.coverPath = coverPath;
}
public Integer getWordCount() {
return wordCount;
}
public void setWordCount(Integer wordCount) {
this.wordCount = wordCount;
}
public Integer getRating() {
return rating;
}
public void setRating(Integer rating) {
this.rating = rating;
}
public Integer getVolume() {
return volume;
}
public void setVolume(Integer volume) {
this.volume = volume;
}
public Boolean getIsRead() {
return isRead;
}
public void setIsRead(Boolean isRead) {
this.isRead = isRead;
}
public Integer getReadingPosition() {
return readingPosition;
}
public void setReadingPosition(Integer readingPosition) {
this.readingPosition = readingPosition;
}
public LocalDateTime getLastReadAt() {
return lastReadAt;
}
public void setLastReadAt(LocalDateTime lastReadAt) {
this.lastReadAt = lastReadAt;
}
public UUID getAuthorId() {
return authorId;
}
public void setAuthorId(UUID authorId) {
this.authorId = authorId;
}
public String getAuthorName() {
return authorName;
}
public void setAuthorName(String authorName) {
this.authorName = authorName;
}
public UUID getSeriesId() {
return seriesId;
}
public void setSeriesId(UUID seriesId) {
this.seriesId = seriesId;
}
public String getSeriesName() {
return seriesName;
}
public void setSeriesName(String seriesName) {
this.seriesName = seriesName;
}
public List<TagDto> getTags() {
return tags;
}
public void setTags(List<TagDto> tags) {
this.tags = tags;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
public LocalDateTime getUpdatedAt() {
return updatedAt;
}
public void setUpdatedAt(LocalDateTime updatedAt) {
this.updatedAt = updatedAt;
}
}

View File

@@ -9,13 +9,17 @@ public class StorySearchDto {
private UUID id; private UUID id;
private String title; private String title;
private String description; private String description;
private String contentPlain;
private String sourceUrl; private String sourceUrl;
private String coverPath; private String coverPath;
private Integer wordCount; private Integer wordCount;
private Integer rating; private Integer rating;
private Integer volume; private Integer volume;
// Reading status
private Boolean isRead;
private Integer readingPosition;
private LocalDateTime lastReadAt;
// Author info // Author info
private UUID authorId; private UUID authorId;
private String authorName; private String authorName;
@@ -30,6 +34,9 @@ public class StorySearchDto {
private LocalDateTime createdAt; private LocalDateTime createdAt;
private LocalDateTime updatedAt; private LocalDateTime updatedAt;
// Alias for createdAt to match frontend expectations
private LocalDateTime dateAdded;
// Search-specific fields // Search-specific fields
private double searchScore; private double searchScore;
private List<String> highlights; private List<String> highlights;
@@ -61,13 +68,6 @@ public class StorySearchDto {
this.description = description; this.description = description;
} }
public String getContentPlain() {
return contentPlain;
}
public void setContentPlain(String contentPlain) {
this.contentPlain = contentPlain;
}
public String getSourceUrl() { public String getSourceUrl() {
return sourceUrl; return sourceUrl;
@@ -109,6 +109,30 @@ public class StorySearchDto {
this.volume = volume; this.volume = volume;
} }
public Boolean getIsRead() {
return isRead;
}
public void setIsRead(Boolean isRead) {
this.isRead = isRead;
}
public LocalDateTime getLastReadAt() {
return lastReadAt;
}
public void setLastReadAt(LocalDateTime lastReadAt) {
this.lastReadAt = lastReadAt;
}
public Integer getReadingPosition() {
return readingPosition;
}
public void setReadingPosition(Integer readingPosition) {
this.readingPosition = readingPosition;
}
public UUID getAuthorId() { public UUID getAuthorId() {
return authorId; return authorId;
} }
@@ -165,6 +189,14 @@ public class StorySearchDto {
this.updatedAt = updatedAt; this.updatedAt = updatedAt;
} }
public LocalDateTime getDateAdded() {
return dateAdded;
}
public void setDateAdded(LocalDateTime dateAdded) {
this.dateAdded = dateAdded;
}
public double getSearchScore() { public double getSearchScore() {
return searchScore; return searchScore;
} }

View File

@@ -20,6 +20,11 @@ public class StorySummaryDto {
private Integer rating; private Integer rating;
private Integer volume; private Integer volume;
// Reading progress fields
private Boolean isRead;
private Integer readingPosition;
private LocalDateTime lastReadAt;
// Related entities as simple references // Related entities as simple references
private UUID authorId; private UUID authorId;
private String authorName; private String authorName;
@@ -106,6 +111,30 @@ public class StorySummaryDto {
this.volume = volume; this.volume = volume;
} }
public Boolean getIsRead() {
return isRead;
}
public void setIsRead(Boolean isRead) {
this.isRead = isRead;
}
public Integer getReadingPosition() {
return readingPosition;
}
public void setReadingPosition(Integer readingPosition) {
this.readingPosition = readingPosition;
}
public LocalDateTime getLastReadAt() {
return lastReadAt;
}
public void setLastReadAt(LocalDateTime lastReadAt) {
this.lastReadAt = lastReadAt;
}
public UUID getAuthorId() { public UUID getAuthorId() {
return authorId; return authorId;
} }

View File

@@ -0,0 +1,77 @@
package com.storycove.dto;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.Size;
import java.time.LocalDateTime;
import java.util.UUID;
public class TagAliasDto {
private UUID id;
@NotBlank(message = "Alias name is required")
@Size(max = 100, message = "Alias name must not exceed 100 characters")
private String aliasName;
private UUID canonicalTagId;
private String canonicalTagName; // For convenience in frontend
private Boolean createdFromMerge;
private LocalDateTime createdAt;
public TagAliasDto() {}
public TagAliasDto(String aliasName, UUID canonicalTagId) {
this.aliasName = aliasName;
this.canonicalTagId = canonicalTagId;
}
// Getters and Setters
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public String getAliasName() {
return aliasName;
}
public void setAliasName(String aliasName) {
this.aliasName = aliasName;
}
public UUID getCanonicalTagId() {
return canonicalTagId;
}
public void setCanonicalTagId(UUID canonicalTagId) {
this.canonicalTagId = canonicalTagId;
}
public String getCanonicalTagName() {
return canonicalTagName;
}
public void setCanonicalTagName(String canonicalTagName) {
this.canonicalTagName = canonicalTagName;
}
public Boolean getCreatedFromMerge() {
return createdFromMerge;
}
public void setCreatedFromMerge(Boolean createdFromMerge) {
this.createdFromMerge = createdFromMerge;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
}

View File

@@ -4,6 +4,7 @@ import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.Size; import jakarta.validation.constraints.Size;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.util.List;
import java.util.UUID; import java.util.UUID;
public class TagDto { public class TagDto {
@@ -14,7 +15,16 @@ public class TagDto {
@Size(max = 100, message = "Tag name must not exceed 100 characters") @Size(max = 100, message = "Tag name must not exceed 100 characters")
private String name; private String name;
@Size(max = 7, message = "Color must be a valid hex color code")
private String color;
@Size(max = 500, message = "Description must not exceed 500 characters")
private String description;
private Integer storyCount; private Integer storyCount;
private Integer collectionCount;
private Integer aliasCount;
private List<TagAliasDto> aliases;
private LocalDateTime createdAt; private LocalDateTime createdAt;
private LocalDateTime updatedAt; private LocalDateTime updatedAt;
@@ -41,6 +51,22 @@ public class TagDto {
this.name = name; this.name = name;
} }
public String getColor() {
return color;
}
public void setColor(String color) {
this.color = color;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Integer getStoryCount() { public Integer getStoryCount() {
return storyCount; return storyCount;
} }
@@ -49,6 +75,30 @@ public class TagDto {
this.storyCount = storyCount; this.storyCount = storyCount;
} }
public Integer getCollectionCount() {
return collectionCount;
}
public void setCollectionCount(Integer collectionCount) {
this.collectionCount = collectionCount;
}
public Integer getAliasCount() {
return aliasCount;
}
public void setAliasCount(Integer aliasCount) {
this.aliasCount = aliasCount;
}
public List<TagAliasDto> getAliases() {
return aliases;
}
public void setAliases(List<TagAliasDto> aliases) {
this.aliases = aliases;
}
public LocalDateTime getCreatedAt() { public LocalDateTime getCreatedAt() {
return createdAt; return createdAt;
} }

View File

@@ -0,0 +1,195 @@
package com.storycove.entity;
import jakarta.persistence.*;
import java.time.LocalDateTime;
import java.util.UUID;
@Entity
@Table(name = "backup_jobs")
public class BackupJob {
@Id
@GeneratedValue(strategy = GenerationType.UUID)
private UUID id;
@Column(nullable = false)
private String libraryId;
@Column(nullable = false)
@Enumerated(EnumType.STRING)
private BackupType type;
@Column(nullable = false)
@Enumerated(EnumType.STRING)
private BackupStatus status;
@Column
private String filePath;
@Column
private Long fileSizeBytes;
@Column
private Integer progressPercent;
@Column(length = 1000)
private String errorMessage;
@Column(nullable = false)
private LocalDateTime createdAt;
@Column
private LocalDateTime startedAt;
@Column
private LocalDateTime completedAt;
@Column
private LocalDateTime expiresAt;
@PrePersist
protected void onCreate() {
createdAt = LocalDateTime.now();
// Backups expire after 24 hours
expiresAt = LocalDateTime.now().plusDays(1);
}
// Enums
public enum BackupType {
DATABASE_ONLY,
COMPLETE
}
public enum BackupStatus {
PENDING,
IN_PROGRESS,
COMPLETED,
FAILED,
EXPIRED
}
// Constructors
public BackupJob() {
}
public BackupJob(String libraryId, BackupType type) {
this.libraryId = libraryId;
this.type = type;
this.status = BackupStatus.PENDING;
this.progressPercent = 0;
}
// Getters and Setters
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public String getLibraryId() {
return libraryId;
}
public void setLibraryId(String libraryId) {
this.libraryId = libraryId;
}
public BackupType getType() {
return type;
}
public void setType(BackupType type) {
this.type = type;
}
public BackupStatus getStatus() {
return status;
}
public void setStatus(BackupStatus status) {
this.status = status;
}
public String getFilePath() {
return filePath;
}
public void setFilePath(String filePath) {
this.filePath = filePath;
}
public Long getFileSizeBytes() {
return fileSizeBytes;
}
public void setFileSizeBytes(Long fileSizeBytes) {
this.fileSizeBytes = fileSizeBytes;
}
public Integer getProgressPercent() {
return progressPercent;
}
public void setProgressPercent(Integer progressPercent) {
this.progressPercent = progressPercent;
}
public String getErrorMessage() {
return errorMessage;
}
public void setErrorMessage(String errorMessage) {
this.errorMessage = errorMessage;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
public LocalDateTime getStartedAt() {
return startedAt;
}
public void setStartedAt(LocalDateTime startedAt) {
this.startedAt = startedAt;
}
public LocalDateTime getCompletedAt() {
return completedAt;
}
public void setCompletedAt(LocalDateTime completedAt) {
this.completedAt = completedAt;
}
public LocalDateTime getExpiresAt() {
return expiresAt;
}
public void setExpiresAt(LocalDateTime expiresAt) {
this.expiresAt = expiresAt;
}
// Helper methods
public boolean isExpired() {
return LocalDateTime.now().isAfter(expiresAt);
}
public boolean isCompleted() {
return status == BackupStatus.COMPLETED;
}
public boolean isFailed() {
return status == BackupStatus.FAILED;
}
public boolean isInProgress() {
return status == BackupStatus.IN_PROGRESS;
}
}

View File

@@ -52,6 +52,10 @@ public class Collection {
) )
private Set<Tag> tags = new HashSet<>(); private Set<Tag> tags = new HashSet<>();
// Transient field for search results - tag names only to avoid lazy loading issues
@Transient
private List<String> tagNames;
@CreationTimestamp @CreationTimestamp
@Column(name = "created_at", nullable = false, updatable = false) @Column(name = "created_at", nullable = false, updatable = false)
private LocalDateTime createdAt; private LocalDateTime createdAt;
@@ -192,6 +196,14 @@ public class Collection {
this.tags = tags; this.tags = tags;
} }
public List<String> getTagNames() {
return tagNames;
}
public void setTagNames(List<String> tagNames) {
this.tagNames = tagNames;
}
public LocalDateTime getCreatedAt() { public LocalDateTime getCreatedAt() {
return createdAt; return createdAt;
} }

View File

@@ -0,0 +1,93 @@
package com.storycove.entity;
public class Library {
private String id;
private String name;
private String description;
private String passwordHash;
private String dbName;
private String typesenseCollection;
private String imagePath;
private boolean initialized;
// Constructors
public Library() {}
public Library(String id, String name, String description, String passwordHash, String dbName) {
this.id = id;
this.name = name;
this.description = description;
this.passwordHash = passwordHash;
this.dbName = dbName;
this.typesenseCollection = "stories_" + id;
this.imagePath = "/images/" + id;
this.initialized = false;
}
// Getters and Setters
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
this.typesenseCollection = "stories_" + id;
this.imagePath = "/images/" + id;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public String getPasswordHash() {
return passwordHash;
}
public void setPasswordHash(String passwordHash) {
this.passwordHash = passwordHash;
}
public String getDbName() {
return dbName;
}
public void setDbName(String dbName) {
this.dbName = dbName;
}
public String getTypesenseCollection() {
return typesenseCollection;
}
public void setTypesenseCollection(String typesenseCollection) {
this.typesenseCollection = typesenseCollection;
}
public String getImagePath() {
return imagePath;
}
public void setImagePath(String imagePath) {
this.imagePath = imagePath;
}
public boolean isInitialized() {
return initialized;
}
public void setInitialized(boolean initialized) {
this.initialized = initialized;
}
}

View File

@@ -0,0 +1,230 @@
package com.storycove.entity;
import jakarta.persistence.*;
import jakarta.validation.constraints.NotNull;
import org.hibernate.annotations.CreationTimestamp;
import org.hibernate.annotations.UpdateTimestamp;
import com.fasterxml.jackson.annotation.JsonBackReference;
import java.time.LocalDateTime;
import java.util.UUID;
@Entity
@Table(name = "reading_positions", indexes = {
@Index(name = "idx_reading_position_story", columnList = "story_id")
})
public class ReadingPosition {
@Id
@GeneratedValue(strategy = GenerationType.UUID)
private UUID id;
@NotNull
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "story_id", nullable = false)
@JsonBackReference("story-reading-positions")
private Story story;
@Column(name = "chapter_index")
private Integer chapterIndex;
@Column(name = "chapter_title")
private String chapterTitle;
@Column(name = "word_position")
private Integer wordPosition;
@Column(name = "character_position")
private Integer characterPosition;
@Column(name = "percentage_complete")
private Double percentageComplete;
@Column(name = "epub_cfi", columnDefinition = "TEXT")
private String epubCfi;
@Column(name = "context_before", length = 500)
private String contextBefore;
@Column(name = "context_after", length = 500)
private String contextAfter;
@CreationTimestamp
@Column(name = "created_at", nullable = false, updatable = false)
private LocalDateTime createdAt;
@UpdateTimestamp
@Column(name = "updated_at", nullable = false)
private LocalDateTime updatedAt;
public ReadingPosition() {}
public ReadingPosition(Story story) {
this.story = story;
this.chapterIndex = 0;
this.wordPosition = 0;
this.characterPosition = 0;
this.percentageComplete = 0.0;
}
public ReadingPosition(Story story, Integer chapterIndex, Integer wordPosition) {
this.story = story;
this.chapterIndex = chapterIndex;
this.wordPosition = wordPosition;
this.characterPosition = 0;
this.percentageComplete = 0.0;
}
public void updatePosition(Integer chapterIndex, Integer wordPosition, Integer characterPosition) {
this.chapterIndex = chapterIndex;
this.wordPosition = wordPosition;
this.characterPosition = characterPosition;
calculatePercentageComplete();
}
public void updatePositionWithCfi(String epubCfi, Integer chapterIndex, Integer wordPosition) {
this.epubCfi = epubCfi;
this.chapterIndex = chapterIndex;
this.wordPosition = wordPosition;
calculatePercentageComplete();
}
private void calculatePercentageComplete() {
if (story != null && story.getWordCount() != null && story.getWordCount() > 0) {
int totalWords = story.getWordCount();
int currentPosition = (chapterIndex != null ? chapterIndex * 1000 : 0) +
(wordPosition != null ? wordPosition : 0);
this.percentageComplete = Math.min(100.0, (double) currentPosition / totalWords * 100);
}
}
public boolean isAtBeginning() {
return (chapterIndex == null || chapterIndex == 0) &&
(wordPosition == null || wordPosition == 0);
}
public boolean isCompleted() {
return percentageComplete != null && percentageComplete >= 95.0;
}
// Getters and Setters
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public Story getStory() {
return story;
}
public void setStory(Story story) {
this.story = story;
}
public Integer getChapterIndex() {
return chapterIndex;
}
public void setChapterIndex(Integer chapterIndex) {
this.chapterIndex = chapterIndex;
}
public String getChapterTitle() {
return chapterTitle;
}
public void setChapterTitle(String chapterTitle) {
this.chapterTitle = chapterTitle;
}
public Integer getWordPosition() {
return wordPosition;
}
public void setWordPosition(Integer wordPosition) {
this.wordPosition = wordPosition;
}
public Integer getCharacterPosition() {
return characterPosition;
}
public void setCharacterPosition(Integer characterPosition) {
this.characterPosition = characterPosition;
}
public Double getPercentageComplete() {
return percentageComplete;
}
public void setPercentageComplete(Double percentageComplete) {
this.percentageComplete = percentageComplete;
}
public String getEpubCfi() {
return epubCfi;
}
public void setEpubCfi(String epubCfi) {
this.epubCfi = epubCfi;
}
public String getContextBefore() {
return contextBefore;
}
public void setContextBefore(String contextBefore) {
this.contextBefore = contextBefore;
}
public String getContextAfter() {
return contextAfter;
}
public void setContextAfter(String contextAfter) {
this.contextAfter = contextAfter;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
public LocalDateTime getUpdatedAt() {
return updatedAt;
}
public void setUpdatedAt(LocalDateTime updatedAt) {
this.updatedAt = updatedAt;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof ReadingPosition)) return false;
ReadingPosition that = (ReadingPosition) o;
return id != null && id.equals(that.id);
}
@Override
public int hashCode() {
return getClass().hashCode();
}
@Override
public String toString() {
return "ReadingPosition{" +
"id=" + id +
", storyId=" + (story != null ? story.getId() : null) +
", chapterIndex=" + chapterIndex +
", wordPosition=" + wordPosition +
", percentageComplete=" + percentageComplete +
'}';
}
}

View File

@@ -0,0 +1,130 @@
package com.storycove.entity;
import jakarta.persistence.*;
import java.time.LocalDateTime;
import java.util.UUID;
@Entity
@Table(name = "refresh_tokens")
public class RefreshToken {
@Id
@GeneratedValue(strategy = GenerationType.UUID)
private UUID id;
@Column(nullable = false, unique = true)
private String token;
@Column(nullable = false)
private LocalDateTime expiresAt;
@Column(nullable = false)
private LocalDateTime createdAt;
@Column
private LocalDateTime revokedAt;
@Column
private String libraryId;
@Column(nullable = false)
private String userAgent;
@Column(nullable = false)
private String ipAddress;
@PrePersist
protected void onCreate() {
createdAt = LocalDateTime.now();
}
// Constructors
public RefreshToken() {
}
public RefreshToken(String token, LocalDateTime expiresAt, String libraryId, String userAgent, String ipAddress) {
this.token = token;
this.expiresAt = expiresAt;
this.libraryId = libraryId;
this.userAgent = userAgent;
this.ipAddress = ipAddress;
}
// Getters and Setters
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public String getToken() {
return token;
}
public void setToken(String token) {
this.token = token;
}
public LocalDateTime getExpiresAt() {
return expiresAt;
}
public void setExpiresAt(LocalDateTime expiresAt) {
this.expiresAt = expiresAt;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
public LocalDateTime getRevokedAt() {
return revokedAt;
}
public void setRevokedAt(LocalDateTime revokedAt) {
this.revokedAt = revokedAt;
}
public String getLibraryId() {
return libraryId;
}
public void setLibraryId(String libraryId) {
this.libraryId = libraryId;
}
public String getUserAgent() {
return userAgent;
}
public void setUserAgent(String userAgent) {
this.userAgent = userAgent;
}
public String getIpAddress() {
return ipAddress;
}
public void setIpAddress(String ipAddress) {
this.ipAddress = ipAddress;
}
// Helper methods
public boolean isExpired() {
return LocalDateTime.now().isAfter(expiresAt);
}
public boolean isRevoked() {
return revokedAt != null;
}
public boolean isValid() {
return !isExpired() && !isRevoked();
}
}

View File

@@ -55,6 +55,15 @@ public class Story {
@Column(name = "volume") @Column(name = "volume")
private Integer volume; private Integer volume;
@Column(name = "is_read")
private Boolean isRead = false;
@Column(name = "reading_position")
private Integer readingPosition = 0;
@Column(name = "last_read_at")
private LocalDateTime lastReadAt;
@ManyToOne(fetch = FetchType.LAZY) @ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "author_id") @JoinColumn(name = "author_id")
@JsonBackReference("author-stories") @JsonBackReference("author-stories")
@@ -212,6 +221,30 @@ public class Story {
this.volume = volume; this.volume = volume;
} }
public Boolean getIsRead() {
return isRead;
}
public void setIsRead(Boolean isRead) {
this.isRead = isRead;
}
public Integer getReadingPosition() {
return readingPosition;
}
public void setReadingPosition(Integer readingPosition) {
this.readingPosition = readingPosition;
}
public LocalDateTime getLastReadAt() {
return lastReadAt;
}
public void setLastReadAt(LocalDateTime lastReadAt) {
this.lastReadAt = lastReadAt;
}
public Author getAuthor() { public Author getAuthor() {
return author; return author;
} }
@@ -252,6 +285,37 @@ public class Story {
this.updatedAt = updatedAt; this.updatedAt = updatedAt;
} }
/**
* Updates the reading progress and timestamp
*/
public void updateReadingProgress(Integer position) {
this.readingPosition = position;
this.lastReadAt = LocalDateTime.now();
}
/**
* Marks the story as read and updates the reading position to the end
*/
public void markAsRead() {
this.isRead = true;
this.lastReadAt = LocalDateTime.now();
// Set reading position to the end of content if available
if (contentPlain != null) {
this.readingPosition = contentPlain.length();
} else if (contentHtml != null) {
this.readingPosition = contentHtml.length();
}
}
/**
* Marks the story as unread and resets reading position
*/
public void markAsUnread() {
this.isRead = false;
this.readingPosition = 0;
this.lastReadAt = null;
}
@Override @Override
public boolean equals(Object o) { public boolean equals(Object o) {
if (this == o) return true; if (this == o) return true;
@@ -272,6 +336,8 @@ public class Story {
", title='" + title + '\'' + ", title='" + title + '\'' +
", wordCount=" + wordCount + ", wordCount=" + wordCount +
", rating=" + rating + ", rating=" + rating +
", isRead=" + isRead +
", readingPosition=" + readingPosition +
'}'; '}';
} }
} }

View File

@@ -5,6 +5,7 @@ import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.Size; import jakarta.validation.constraints.Size;
import org.hibernate.annotations.CreationTimestamp; import org.hibernate.annotations.CreationTimestamp;
import com.fasterxml.jackson.annotation.JsonBackReference; import com.fasterxml.jackson.annotation.JsonBackReference;
import com.fasterxml.jackson.annotation.JsonManagedReference;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.util.HashSet; import java.util.HashSet;
@@ -24,11 +25,27 @@ public class Tag {
@Column(nullable = false, unique = true) @Column(nullable = false, unique = true)
private String name; private String name;
@Size(max = 7, message = "Color must be a valid hex color code")
@Column(length = 7)
private String color; // hex color like #3B82F6
@Size(max = 500, message = "Description must not exceed 500 characters")
@Column(length = 500)
private String description;
@ManyToMany(mappedBy = "tags") @ManyToMany(mappedBy = "tags")
@JsonBackReference("story-tags") @JsonBackReference("story-tags")
private Set<Story> stories = new HashSet<>(); private Set<Story> stories = new HashSet<>();
@ManyToMany(mappedBy = "tags")
@JsonBackReference("collection-tags")
private Set<Collection> collections = new HashSet<>();
@OneToMany(mappedBy = "canonicalTag", cascade = CascadeType.ALL, orphanRemoval = true)
@JsonManagedReference("tag-aliases")
private Set<TagAlias> aliases = new HashSet<>();
@CreationTimestamp @CreationTimestamp
@Column(name = "created_at", nullable = false, updatable = false) @Column(name = "created_at", nullable = false, updatable = false)
private LocalDateTime createdAt; private LocalDateTime createdAt;
@@ -39,6 +56,12 @@ public class Tag {
this.name = name; this.name = name;
} }
public Tag(String name, String color, String description) {
this.name = name;
this.color = color;
this.description = description;
}
// Getters and Setters // Getters and Setters
@@ -58,6 +81,22 @@ public class Tag {
this.name = name; this.name = name;
} }
public String getColor() {
return color;
}
public void setColor(String color) {
this.color = color;
}
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public Set<Story> getStories() { public Set<Story> getStories() {
return stories; return stories;
@@ -67,6 +106,22 @@ public class Tag {
this.stories = stories; this.stories = stories;
} }
public Set<Collection> getCollections() {
return collections;
}
public void setCollections(Set<Collection> collections) {
this.collections = collections;
}
public Set<TagAlias> getAliases() {
return aliases;
}
public void setAliases(Set<TagAlias> aliases) {
this.aliases = aliases;
}
public LocalDateTime getCreatedAt() { public LocalDateTime getCreatedAt() {
return createdAt; return createdAt;
} }

View File

@@ -0,0 +1,113 @@
package com.storycove.entity;
import jakarta.persistence.*;
import jakarta.validation.constraints.NotBlank;
import jakarta.validation.constraints.Size;
import org.hibernate.annotations.CreationTimestamp;
import com.fasterxml.jackson.annotation.JsonManagedReference;
import java.time.LocalDateTime;
import java.util.UUID;
@Entity
@Table(name = "tag_aliases")
public class TagAlias {
@Id
@GeneratedValue(strategy = GenerationType.UUID)
private UUID id;
@NotBlank(message = "Alias name is required")
@Size(max = 100, message = "Alias name must not exceed 100 characters")
@Column(name = "alias_name", nullable = false, unique = true)
private String aliasName;
@ManyToOne(fetch = FetchType.LAZY)
@JoinColumn(name = "canonical_tag_id", nullable = false)
@JsonManagedReference("tag-aliases")
private Tag canonicalTag;
@Column(name = "created_from_merge", nullable = false)
private Boolean createdFromMerge = false;
@CreationTimestamp
@Column(name = "created_at", nullable = false, updatable = false)
private LocalDateTime createdAt;
public TagAlias() {}
public TagAlias(String aliasName, Tag canonicalTag) {
this.aliasName = aliasName;
this.canonicalTag = canonicalTag;
}
public TagAlias(String aliasName, Tag canonicalTag, Boolean createdFromMerge) {
this.aliasName = aliasName;
this.canonicalTag = canonicalTag;
this.createdFromMerge = createdFromMerge;
}
// Getters and Setters
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public String getAliasName() {
return aliasName;
}
public void setAliasName(String aliasName) {
this.aliasName = aliasName;
}
public Tag getCanonicalTag() {
return canonicalTag;
}
public void setCanonicalTag(Tag canonicalTag) {
this.canonicalTag = canonicalTag;
}
public Boolean getCreatedFromMerge() {
return createdFromMerge;
}
public void setCreatedFromMerge(Boolean createdFromMerge) {
this.createdFromMerge = createdFromMerge;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (!(o instanceof TagAlias)) return false;
TagAlias tagAlias = (TagAlias) o;
return id != null && id.equals(tagAlias.id);
}
@Override
public int hashCode() {
return getClass().hashCode();
}
@Override
public String toString() {
return "TagAlias{" +
"id=" + id +
", aliasName='" + aliasName + '\'' +
", canonicalTag=" + (canonicalTag != null ? canonicalTag.getName() : null) +
", createdFromMerge=" + createdFromMerge +
'}';
}
}

View File

@@ -0,0 +1,34 @@
package com.storycove.event;
import org.springframework.context.ApplicationEvent;
import java.util.UUID;
/**
* Event published when a story's content is created or updated
*/
public class StoryContentUpdatedEvent extends ApplicationEvent {
private final UUID storyId;
private final String contentHtml;
private final boolean isNewStory;
public StoryContentUpdatedEvent(Object source, UUID storyId, String contentHtml, boolean isNewStory) {
super(source);
this.storyId = storyId;
this.contentHtml = contentHtml;
this.isNewStory = isNewStory;
}
public UUID getStoryId() {
return storyId;
}
public String getContentHtml() {
return contentHtml;
}
public boolean isNewStory() {
return isNewStory;
}
}

View File

@@ -52,4 +52,5 @@ public interface AuthorRepository extends JpaRepository<Author, UUID> {
@Query(value = "SELECT author_rating FROM authors WHERE id = :id", nativeQuery = true) @Query(value = "SELECT author_rating FROM authors WHERE id = :id", nativeQuery = true)
Integer findAuthorRatingById(@Param("id") UUID id); Integer findAuthorRatingById(@Param("id") UUID id);
} }

View File

@@ -0,0 +1,25 @@
package com.storycove.repository;
import com.storycove.entity.BackupJob;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import java.time.LocalDateTime;
import java.util.List;
import java.util.UUID;
@Repository
public interface BackupJobRepository extends JpaRepository<BackupJob, UUID> {
List<BackupJob> findByLibraryIdOrderByCreatedAtDesc(String libraryId);
@Query("SELECT bj FROM BackupJob bj WHERE bj.expiresAt < :now AND bj.status = 'COMPLETED'")
List<BackupJob> findExpiredJobs(@Param("now") LocalDateTime now);
@Modifying
@Query("UPDATE BackupJob bj SET bj.status = 'EXPIRED' WHERE bj.expiresAt < :now AND bj.status = 'COMPLETED'")
int markExpiredJobs(@Param("now") LocalDateTime now);
}

View File

@@ -45,4 +45,11 @@ public interface CollectionRepository extends JpaRepository<Collection, UUID> {
*/ */
@Query("SELECT c FROM Collection c WHERE c.isArchived = false ORDER BY c.updatedAt DESC") @Query("SELECT c FROM Collection c WHERE c.isArchived = false ORDER BY c.updatedAt DESC")
List<Collection> findAllActiveCollections(); List<Collection> findAllActiveCollections();
/**
* Find all collections with tags for reindexing operations
*/
@Query("SELECT c FROM Collection c LEFT JOIN FETCH c.tags ORDER BY c.updatedAt DESC")
List<Collection> findAllWithTags();
} }

View File

@@ -0,0 +1,57 @@
package com.storycove.repository;
import com.storycove.entity.ReadingPosition;
import com.storycove.entity.Story;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
@Repository
public interface ReadingPositionRepository extends JpaRepository<ReadingPosition, UUID> {
Optional<ReadingPosition> findByStoryId(UUID storyId);
Optional<ReadingPosition> findByStory(Story story);
List<ReadingPosition> findByStoryIdIn(List<UUID> storyIds);
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.story.id = :storyId ORDER BY rp.updatedAt DESC")
List<ReadingPosition> findByStoryIdOrderByUpdatedAtDesc(@Param("storyId") UUID storyId);
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.percentageComplete >= :minPercentage")
List<ReadingPosition> findByMinimumPercentageComplete(@Param("minPercentage") Double minPercentage);
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.percentageComplete >= 95.0")
List<ReadingPosition> findCompletedReadings();
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.percentageComplete > 0 AND rp.percentageComplete < 95.0")
List<ReadingPosition> findInProgressReadings();
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.updatedAt >= :since ORDER BY rp.updatedAt DESC")
List<ReadingPosition> findRecentlyUpdated(@Param("since") LocalDateTime since);
@Query("SELECT rp FROM ReadingPosition rp ORDER BY rp.updatedAt DESC")
List<ReadingPosition> findAllOrderByUpdatedAtDesc();
@Query("SELECT COUNT(rp) FROM ReadingPosition rp WHERE rp.percentageComplete >= 95.0")
long countCompletedReadings();
@Query("SELECT COUNT(rp) FROM ReadingPosition rp WHERE rp.percentageComplete > 0 AND rp.percentageComplete < 95.0")
long countInProgressReadings();
@Query("SELECT AVG(rp.percentageComplete) FROM ReadingPosition rp WHERE rp.percentageComplete > 0")
Double findAverageReadingProgress();
@Query("SELECT rp FROM ReadingPosition rp WHERE rp.epubCfi IS NOT NULL")
List<ReadingPosition> findPositionsWithEpubCfi();
boolean existsByStoryId(UUID storyId);
void deleteByStoryId(UUID storyId);
}

View File

@@ -0,0 +1,30 @@
package com.storycove.repository;
import com.storycove.entity.RefreshToken;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import java.time.LocalDateTime;
import java.util.Optional;
import java.util.UUID;
@Repository
public interface RefreshTokenRepository extends JpaRepository<RefreshToken, UUID> {
Optional<RefreshToken> findByToken(String token);
@Modifying
@Query("DELETE FROM RefreshToken rt WHERE rt.expiresAt < :now")
void deleteExpiredTokens(@Param("now") LocalDateTime now);
@Modifying
@Query("UPDATE RefreshToken rt SET rt.revokedAt = :now WHERE rt.libraryId = :libraryId AND rt.revokedAt IS NULL")
void revokeAllByLibraryId(@Param("libraryId") String libraryId, @Param("now") LocalDateTime now);
@Modifying
@Query("UPDATE RefreshToken rt SET rt.revokedAt = :now WHERE rt.revokedAt IS NULL")
void revokeAll(@Param("now") LocalDateTime now);
}

View File

@@ -87,6 +87,9 @@ public interface StoryRepository extends JpaRepository<Story, UUID> {
@Query("SELECT COUNT(s) FROM Story s WHERE s.createdAt >= :since") @Query("SELECT COUNT(s) FROM Story s WHERE s.createdAt >= :since")
long countStoriesCreatedSince(@Param("since") LocalDateTime since); long countStoriesCreatedSince(@Param("since") LocalDateTime since);
@Query("SELECT COUNT(s) FROM Story s WHERE s.createdAt >= :since OR s.updatedAt >= :since")
long countStoriesModifiedAfter(@Param("since") LocalDateTime since);
@Query("SELECT AVG(s.wordCount) FROM Story s") @Query("SELECT AVG(s.wordCount) FROM Story s")
Double findAverageWordCount(); Double findAverageWordCount();
@@ -114,4 +117,130 @@ public interface StoryRepository extends JpaRepository<Story, UUID> {
"LEFT JOIN FETCH s.series " + "LEFT JOIN FETCH s.series " +
"LEFT JOIN FETCH s.tags") "LEFT JOIN FETCH s.tags")
List<Story> findAllWithAssociations(); List<Story> findAllWithAssociations();
@Query("SELECT s FROM Story s WHERE UPPER(s.title) = UPPER(:title) AND UPPER(s.author.name) = UPPER(:authorName)")
List<Story> findByTitleAndAuthorNameIgnoreCase(@Param("title") String title, @Param("authorName") String authorName);
/**
* Count all stories for random selection (no filters)
*/
@Query(value = "SELECT COUNT(*) FROM stories", nativeQuery = true)
long countAllStories();
/**
* Count stories matching tag name filter for random selection
*/
@Query(value = "SELECT COUNT(DISTINCT s.id) FROM stories s " +
"JOIN story_tags st ON s.id = st.story_id " +
"JOIN tags t ON st.tag_id = t.id " +
"WHERE UPPER(t.name) = UPPER(?1)",
nativeQuery = true)
long countStoriesByTagName(String tagName);
/**
* Find a random story using offset (no filters)
*/
@Query(value = "SELECT s.* FROM stories s ORDER BY s.id OFFSET ?1 LIMIT 1", nativeQuery = true)
Optional<Story> findRandomStory(long offset);
/**
* Find a random story matching tag name filter using offset
*/
@Query(value = "SELECT s.* FROM stories s " +
"JOIN story_tags st ON s.id = st.story_id " +
"JOIN tags t ON st.tag_id = t.id " +
"WHERE UPPER(t.name) = UPPER(?1) " +
"ORDER BY s.id OFFSET ?2 LIMIT 1",
nativeQuery = true)
Optional<Story> findRandomStoryByTagName(String tagName, long offset);
/**
* Count stories matching multiple tags (ALL tags must be present)
*/
@Query(value = "SELECT COUNT(*) FROM (" +
" SELECT DISTINCT s.id FROM stories s " +
" JOIN story_tags st ON s.id = st.story_id " +
" JOIN tags t ON st.tag_id = t.id " +
" WHERE UPPER(t.name) IN (?1) " +
" GROUP BY s.id " +
" HAVING COUNT(DISTINCT t.name) = ?2" +
") as matched_stories",
nativeQuery = true)
long countStoriesByMultipleTags(List<String> upperCaseTagNames, int tagCount);
/**
* Find random story matching multiple tags (ALL tags must be present)
*/
@Query(value = "SELECT s.* FROM stories s " +
"JOIN story_tags st ON s.id = st.story_id " +
"JOIN tags t ON st.tag_id = t.id " +
"WHERE UPPER(t.name) IN (?1) " +
"GROUP BY s.id, s.title, s.summary, s.description, s.content_html, s.content_plain, s.source_url, s.cover_path, s.word_count, s.rating, s.volume, s.is_read, s.reading_position, s.last_read_at, s.author_id, s.series_id, s.created_at, s.updated_at " +
"HAVING COUNT(DISTINCT t.name) = ?2 " +
"ORDER BY s.id OFFSET ?3 LIMIT 1",
nativeQuery = true)
Optional<Story> findRandomStoryByMultipleTags(List<String> upperCaseTagNames, int tagCount, long offset);
/**
* Count stories matching text search (title, author, tags)
*/
@Query(value = "SELECT COUNT(DISTINCT s.id) FROM stories s " +
"LEFT JOIN authors a ON s.author_id = a.id " +
"LEFT JOIN story_tags st ON s.id = st.story_id " +
"LEFT JOIN tags t ON st.tag_id = t.id " +
"WHERE (UPPER(s.title) LIKE UPPER(?1) OR UPPER(a.name) LIKE UPPER(?1) OR UPPER(t.name) LIKE UPPER(?1))",
nativeQuery = true)
long countStoriesByTextSearch(String searchPattern);
/**
* Find random story matching text search (title, author, tags)
*/
@Query(value = "SELECT DISTINCT s.* FROM stories s " +
"LEFT JOIN authors a ON s.author_id = a.id " +
"LEFT JOIN story_tags st ON s.id = st.story_id " +
"LEFT JOIN tags t ON st.tag_id = t.id " +
"WHERE (UPPER(s.title) LIKE UPPER(?1) OR UPPER(a.name) LIKE UPPER(?1) OR UPPER(t.name) LIKE UPPER(?1)) " +
"ORDER BY s.id OFFSET ?2 LIMIT 1",
nativeQuery = true)
Optional<Story> findRandomStoryByTextSearch(String searchPattern, long offset);
/**
* Count stories matching both text search AND tags
*/
@Query(value = "SELECT COUNT(DISTINCT s.id) FROM stories s " +
"LEFT JOIN authors a ON s.author_id = a.id " +
"LEFT JOIN story_tags st ON s.id = st.story_id " +
"LEFT JOIN tags t ON st.tag_id = t.id " +
"WHERE (UPPER(s.title) LIKE UPPER(?1) OR UPPER(a.name) LIKE UPPER(?1) OR UPPER(t.name) LIKE UPPER(?1)) " +
"AND s.id IN (" +
" SELECT s2.id FROM stories s2 " +
" JOIN story_tags st2 ON s2.id = st2.story_id " +
" JOIN tags t2 ON st2.tag_id = t2.id " +
" WHERE UPPER(t2.name) IN (?2) " +
" GROUP BY s2.id " +
" HAVING COUNT(DISTINCT t2.name) = ?3" +
")",
nativeQuery = true)
long countStoriesByTextSearchAndTags(String searchPattern, List<String> upperCaseTagNames, int tagCount);
/**
* Find random story matching both text search AND tags
*/
@Query(value = "SELECT DISTINCT s.* FROM stories s " +
"LEFT JOIN authors a ON s.author_id = a.id " +
"LEFT JOIN story_tags st ON s.id = st.story_id " +
"LEFT JOIN tags t ON st.tag_id = t.id " +
"WHERE (UPPER(s.title) LIKE UPPER(?1) OR UPPER(a.name) LIKE UPPER(?1) OR UPPER(t.name) LIKE UPPER(?1)) " +
"AND s.id IN (" +
" SELECT s2.id FROM stories s2 " +
" JOIN story_tags st2 ON s2.id = st2.story_id " +
" JOIN tags t2 ON st2.tag_id = t2.id " +
" WHERE UPPER(t2.name) IN (?2) " +
" GROUP BY s2.id " +
" HAVING COUNT(DISTINCT t2.name) = ?3" +
") " +
"ORDER BY s.id OFFSET ?4 LIMIT 1",
nativeQuery = true)
Optional<Story> findRandomStoryByTextSearchAndTags(String searchPattern, List<String> upperCaseTagNames, int tagCount, long offset);
} }

View File

@@ -0,0 +1,60 @@
package com.storycove.repository;
import com.storycove.entity.TagAlias;
import com.storycove.entity.Tag;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
@Repository
public interface TagAliasRepository extends JpaRepository<TagAlias, UUID> {
/**
* Find alias by exact alias name (case-insensitive)
*/
@Query("SELECT ta FROM TagAlias ta WHERE LOWER(ta.aliasName) = LOWER(:aliasName)")
Optional<TagAlias> findByAliasNameIgnoreCase(@Param("aliasName") String aliasName);
/**
* Find all aliases for a specific canonical tag
*/
List<TagAlias> findByCanonicalTag(Tag canonicalTag);
/**
* Find all aliases for a specific canonical tag ID
*/
@Query("SELECT ta FROM TagAlias ta WHERE ta.canonicalTag.id = :tagId")
List<TagAlias> findByCanonicalTagId(@Param("tagId") UUID tagId);
/**
* Find aliases created from merge operations
*/
List<TagAlias> findByCreatedFromMergeTrue();
/**
* Check if an alias name already exists
*/
boolean existsByAliasNameIgnoreCase(String aliasName);
/**
* Delete all aliases for a specific tag
*/
void deleteByCanonicalTag(Tag canonicalTag);
/**
* Count aliases for a specific tag
*/
@Query("SELECT COUNT(ta) FROM TagAlias ta WHERE ta.canonicalTag.id = :tagId")
long countByCanonicalTagId(@Param("tagId") UUID tagId);
/**
* Find aliases that start with the given prefix (case-insensitive)
*/
@Query("SELECT ta FROM TagAlias ta WHERE LOWER(ta.aliasName) LIKE LOWER(CONCAT(:prefix, '%'))")
List<TagAlias> findByAliasNameStartingWithIgnoreCase(@Param("prefix") String prefix);
}

View File

@@ -17,8 +17,12 @@ public interface TagRepository extends JpaRepository<Tag, UUID> {
Optional<Tag> findByName(String name); Optional<Tag> findByName(String name);
Optional<Tag> findByNameIgnoreCase(String name);
boolean existsByName(String name); boolean existsByName(String name);
boolean existsByNameIgnoreCase(String name);
List<Tag> findByNameContainingIgnoreCase(String name); List<Tag> findByNameContainingIgnoreCase(String name);
Page<Tag> findByNameContainingIgnoreCase(String name, Pageable pageable); Page<Tag> findByNameContainingIgnoreCase(String name, Pageable pageable);
@@ -54,4 +58,7 @@ public interface TagRepository extends JpaRepository<Tag, UUID> {
@Query("SELECT COUNT(t) FROM Tag t WHERE SIZE(t.stories) > 0") @Query("SELECT COUNT(t) FROM Tag t WHERE SIZE(t.stories) > 0")
long countUsedTags(); long countUsedTags();
@Query("SELECT t FROM Tag t WHERE SIZE(t.collections) > 0 ORDER BY SIZE(t.collections) DESC, t.name ASC")
List<Tag> findTagsUsedByCollections();
} }

View File

@@ -1,84 +0,0 @@
package com.storycove.scheduled;
import com.storycove.entity.Story;
import com.storycove.service.StoryService;
import com.storycove.service.TypesenseService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Component;
import java.time.LocalDateTime;
import java.util.List;
/**
* Scheduled task to periodically reindex all stories in Typesense
* to ensure search index stays synchronized with database changes.
*/
@Component
@ConditionalOnProperty(name = "storycove.typesense.enabled", havingValue = "true", matchIfMissing = true)
public class TypesenseIndexScheduler {
private static final Logger logger = LoggerFactory.getLogger(TypesenseIndexScheduler.class);
private final StoryService storyService;
private final TypesenseService typesenseService;
@Autowired
public TypesenseIndexScheduler(StoryService storyService,
@Autowired(required = false) TypesenseService typesenseService) {
this.storyService = storyService;
this.typesenseService = typesenseService;
}
/**
* Scheduled task that runs periodically to reindex all stories in Typesense.
* This ensures the search index stays synchronized with any database changes
* that might have occurred outside of the normal story update flow.
*
* Interval is configurable via storycove.typesense.reindex-interval property (default: 1 hour).
*/
@Scheduled(fixedRateString = "${storycove.typesense.reindex-interval:3600000}")
public void reindexAllStories() {
if (typesenseService == null) {
logger.debug("TypesenseService is not available, skipping scheduled reindexing");
return;
}
logger.info("Starting scheduled Typesense reindexing at {}", LocalDateTime.now());
try {
long startTime = System.currentTimeMillis();
// Get all stories from database with eagerly loaded associations
List<Story> allStories = storyService.findAllWithAssociations();
if (allStories.isEmpty()) {
logger.info("No stories found in database, skipping reindexing");
return;
}
// Perform full reindex
typesenseService.reindexAllStories(allStories);
long endTime = System.currentTimeMillis();
long duration = endTime - startTime;
logger.info("Completed scheduled Typesense reindexing of {} stories in {}ms",
allStories.size(), duration);
} catch (Exception e) {
logger.error("Failed to complete scheduled Typesense reindexing", e);
}
}
/**
* Manual trigger for reindexing - can be called from other services or endpoints if needed
*/
public void triggerManualReindex() {
logger.info("Manual Typesense reindexing triggered");
reindexAllStories();
}
}

View File

@@ -1,10 +1,14 @@
package com.storycove.security; package com.storycove.security;
import com.storycove.service.LibraryService;
import com.storycove.util.JwtUtil; import com.storycove.util.JwtUtil;
import jakarta.servlet.FilterChain; import jakarta.servlet.FilterChain;
import jakarta.servlet.ServletException; import jakarta.servlet.ServletException;
import jakarta.servlet.http.Cookie;
import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletRequest;
import jakarta.servlet.http.HttpServletResponse; import jakarta.servlet.http.HttpServletResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.security.authentication.UsernamePasswordAuthenticationToken; import org.springframework.security.authentication.UsernamePasswordAuthenticationToken;
import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.security.core.context.SecurityContextHolder;
import org.springframework.security.web.authentication.WebAuthenticationDetailsSource; import org.springframework.security.web.authentication.WebAuthenticationDetailsSource;
@@ -17,10 +21,14 @@ import java.util.ArrayList;
@Component @Component
public class JwtAuthenticationFilter extends OncePerRequestFilter { public class JwtAuthenticationFilter extends OncePerRequestFilter {
private final JwtUtil jwtUtil; private static final Logger logger = LoggerFactory.getLogger(JwtAuthenticationFilter.class);
public JwtAuthenticationFilter(JwtUtil jwtUtil) { private final JwtUtil jwtUtil;
private final LibraryService libraryService;
public JwtAuthenticationFilter(JwtUtil jwtUtil, LibraryService libraryService) {
this.jwtUtil = jwtUtil; this.jwtUtil = jwtUtil;
this.libraryService = libraryService;
} }
@Override @Override
@@ -28,16 +36,52 @@ public class JwtAuthenticationFilter extends OncePerRequestFilter {
HttpServletResponse response, HttpServletResponse response,
FilterChain filterChain) throws ServletException, IOException { FilterChain filterChain) throws ServletException, IOException {
String authHeader = request.getHeader("Authorization");
String token = null; String token = null;
// First try to get token from Authorization header
String authHeader = request.getHeader("Authorization");
if (authHeader != null && authHeader.startsWith("Bearer ")) { if (authHeader != null && authHeader.startsWith("Bearer ")) {
token = authHeader.substring(7); token = authHeader.substring(7);
} }
// If no token in header, try to get from cookies
if (token == null) {
Cookie[] cookies = request.getCookies();
if (cookies != null) {
for (Cookie cookie : cookies) {
if ("token".equals(cookie.getName())) {
token = cookie.getValue();
break;
}
}
}
}
if (token != null && jwtUtil.validateToken(token) && !jwtUtil.isTokenExpired(token)) { if (token != null && jwtUtil.validateToken(token) && !jwtUtil.isTokenExpired(token)) {
String subject = jwtUtil.getSubjectFromToken(token); String subject = jwtUtil.getSubjectFromToken(token);
// Check if we need to switch libraries based on token's library ID
try {
String tokenLibraryId = jwtUtil.getLibraryIdFromToken(token);
String currentLibraryId = libraryService.getCurrentLibraryId();
// Switch library if token's library differs from current library
// This handles cross-device library switching automatically
if (tokenLibraryId != null && !tokenLibraryId.equals(currentLibraryId)) {
logger.info("Token library '{}' differs from current library '{}', switching libraries",
tokenLibraryId, currentLibraryId);
libraryService.switchToLibraryAfterAuthentication(tokenLibraryId);
} else if (currentLibraryId == null && tokenLibraryId != null) {
// Handle case after backend restart where no library is active
logger.info("No active library, switching to token's library: {}", tokenLibraryId);
libraryService.switchToLibraryAfterAuthentication(tokenLibraryId);
}
} catch (Exception e) {
logger.error("Failed to switch library from token: {}", e.getMessage());
// Don't fail the request - authentication can still proceed
// but user might see wrong library data until next login
}
if (subject != null && SecurityContextHolder.getContext().getAuthentication() == null) { if (subject != null && SecurityContextHolder.getContext().getAuthentication() == null) {
UsernamePasswordAuthenticationToken authToken = UsernamePasswordAuthenticationToken authToken =
new UsernamePasswordAuthenticationToken(subject, null, new ArrayList<>()); new UsernamePasswordAuthenticationToken(subject, null, new ArrayList<>());

View File

@@ -0,0 +1,125 @@
package com.storycove.service;
import com.storycove.entity.BackupJob;
import com.storycove.repository.BackupJobRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.util.Optional;
import java.util.UUID;
/**
* Separate service for async backup execution.
* This is needed because @Async doesn't work when called from within the same class.
*/
@Service
public class AsyncBackupExecutor {
private static final Logger logger = LoggerFactory.getLogger(AsyncBackupExecutor.class);
@Value("${storycove.upload.dir:/app/images}")
private String uploadDir;
@Autowired
private BackupJobRepository backupJobRepository;
@Autowired
private DatabaseManagementService databaseManagementService;
@Autowired
private LibraryService libraryService;
/**
* Execute backup asynchronously.
* This method MUST be in a separate service class for @Async to work properly.
*/
@Async
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void executeBackupAsync(UUID jobId) {
logger.info("Async executor starting for job {}", jobId);
Optional<BackupJob> jobOpt = backupJobRepository.findById(jobId);
if (jobOpt.isEmpty()) {
logger.error("Backup job not found: {}", jobId);
return;
}
BackupJob job = jobOpt.get();
job.setStatus(BackupJob.BackupStatus.IN_PROGRESS);
job.setStartedAt(LocalDateTime.now());
job.setProgressPercent(0);
backupJobRepository.save(job);
try {
logger.info("Starting backup job {} for library {}", job.getId(), job.getLibraryId());
// Switch to the correct library
if (!job.getLibraryId().equals(libraryService.getCurrentLibraryId())) {
libraryService.switchToLibraryAfterAuthentication(job.getLibraryId());
}
// Create backup file
Path backupDir = Paths.get(uploadDir, "backups", job.getLibraryId());
Files.createDirectories(backupDir);
String filename = String.format("backup_%s_%s.%s",
job.getId().toString(),
LocalDateTime.now().toString().replaceAll(":", "-"),
job.getType() == BackupJob.BackupType.COMPLETE ? "zip" : "sql");
Path backupFile = backupDir.resolve(filename);
job.setProgressPercent(10);
backupJobRepository.save(job);
// Create the backup
Resource backupResource;
if (job.getType() == BackupJob.BackupType.COMPLETE) {
backupResource = databaseManagementService.createCompleteBackup();
} else {
backupResource = databaseManagementService.createBackup();
}
job.setProgressPercent(80);
backupJobRepository.save(job);
// Copy resource to permanent file
try (var inputStream = backupResource.getInputStream();
var outputStream = Files.newOutputStream(backupFile)) {
inputStream.transferTo(outputStream);
}
job.setProgressPercent(95);
backupJobRepository.save(job);
// Set file info
job.setFilePath(backupFile.toString());
job.setFileSizeBytes(Files.size(backupFile));
job.setStatus(BackupJob.BackupStatus.COMPLETED);
job.setCompletedAt(LocalDateTime.now());
job.setProgressPercent(100);
logger.info("Backup job {} completed successfully. File size: {} bytes",
job.getId(), job.getFileSizeBytes());
} catch (Exception e) {
logger.error("Backup job {} failed", job.getId(), e);
job.setStatus(BackupJob.BackupStatus.FAILED);
job.setErrorMessage(e.getMessage());
job.setCompletedAt(LocalDateTime.now());
} finally {
backupJobRepository.save(job);
}
}
}

View File

@@ -0,0 +1,167 @@
package com.storycove.service;
import com.storycove.entity.BackupJob;
import com.storycove.repository.BackupJobRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.FileSystemResource;
import org.springframework.core.io.Resource;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
@Service
public class AsyncBackupService {
private static final Logger logger = LoggerFactory.getLogger(AsyncBackupService.class);
@Value("${storycove.upload.dir:/app/images}")
private String uploadDir;
@Autowired
private BackupJobRepository backupJobRepository;
@Autowired
private AsyncBackupExecutor asyncBackupExecutor;
/**
* Start a backup job asynchronously.
* This method returns immediately after creating the job record.
*/
@Transactional
public BackupJob startBackupJob(String libraryId, BackupJob.BackupType type) {
logger.info("Creating backup job for library: {}, type: {}", libraryId, type);
BackupJob job = new BackupJob(libraryId, type);
job = backupJobRepository.save(job);
logger.info("Backup job created with ID: {}. Starting async execution...", job.getId());
// Start backup in background using separate service (ensures @Async works properly)
asyncBackupExecutor.executeBackupAsync(job.getId());
logger.info("Async backup execution triggered for job: {}", job.getId());
return job;
}
/**
* Get backup job status
*/
public Optional<BackupJob> getJobStatus(UUID jobId) {
return backupJobRepository.findById(jobId);
}
/**
* Get backup file for download
*/
public Resource getBackupFile(UUID jobId) throws IOException {
Optional<BackupJob> jobOpt = backupJobRepository.findById(jobId);
if (jobOpt.isEmpty()) {
throw new IOException("Backup job not found");
}
BackupJob job = jobOpt.get();
if (!job.isCompleted()) {
throw new IOException("Backup is not completed yet");
}
if (job.isExpired()) {
throw new IOException("Backup has expired");
}
if (job.getFilePath() == null) {
throw new IOException("Backup file path not set");
}
Path backupPath = Paths.get(job.getFilePath());
if (!Files.exists(backupPath)) {
throw new IOException("Backup file not found");
}
return new FileSystemResource(backupPath);
}
/**
* List backup jobs for a library
*/
public List<BackupJob> listBackupJobs(String libraryId) {
return backupJobRepository.findByLibraryIdOrderByCreatedAtDesc(libraryId);
}
/**
* Clean up expired backup jobs and their files
* Runs daily at 2 AM
*/
@Scheduled(cron = "0 0 2 * * ?")
@Transactional
public void cleanupExpiredBackups() {
logger.info("Starting cleanup of expired backups");
LocalDateTime now = LocalDateTime.now();
// Mark expired jobs
int markedCount = backupJobRepository.markExpiredJobs(now);
logger.info("Marked {} jobs as expired", markedCount);
// Find all expired jobs to delete their files
List<BackupJob> expiredJobs = backupJobRepository.findExpiredJobs(now);
for (BackupJob job : expiredJobs) {
if (job.getFilePath() != null) {
try {
Path filePath = Paths.get(job.getFilePath());
if (Files.exists(filePath)) {
Files.delete(filePath);
logger.info("Deleted expired backup file: {}", filePath);
}
} catch (IOException e) {
logger.warn("Failed to delete expired backup file: {}", job.getFilePath(), e);
}
}
// Delete the job record
backupJobRepository.delete(job);
}
logger.info("Cleanup completed. Deleted {} expired backups", expiredJobs.size());
}
/**
* Delete a specific backup job and its file
*/
@Transactional
public void deleteBackupJob(UUID jobId) throws IOException {
Optional<BackupJob> jobOpt = backupJobRepository.findById(jobId);
if (jobOpt.isEmpty()) {
throw new IOException("Backup job not found");
}
BackupJob job = jobOpt.get();
// Delete file if it exists
if (job.getFilePath() != null) {
Path filePath = Paths.get(job.getFilePath());
if (Files.exists(filePath)) {
Files.delete(filePath);
logger.info("Deleted backup file: {}", filePath);
}
}
// Delete job record
backupJobRepository.delete(job);
logger.info("Deleted backup job: {}", jobId);
}
}

View File

@@ -0,0 +1,169 @@
package com.storycove.service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import java.util.UUID;
import java.util.concurrent.CompletableFuture;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@Service
public class AsyncImageProcessingService {
private static final Logger logger = LoggerFactory.getLogger(AsyncImageProcessingService.class);
private final ImageService imageService;
private final StoryService storyService;
private final ImageProcessingProgressService progressService;
@org.springframework.beans.factory.annotation.Value("${storycove.app.public-url:http://localhost:6925}")
private String publicUrl;
@Autowired
public AsyncImageProcessingService(ImageService imageService,
StoryService storyService,
ImageProcessingProgressService progressService) {
this.imageService = imageService;
this.storyService = storyService;
this.progressService = progressService;
}
@Async
public CompletableFuture<Void> processStoryImagesAsync(UUID storyId, String contentHtml) {
logger.info("Starting async image processing for story: {}", storyId);
try {
// Count external images first
int externalImageCount = countExternalImages(contentHtml);
if (externalImageCount == 0) {
logger.debug("No external images found for story {}", storyId);
return CompletableFuture.completedFuture(null);
}
// Start progress tracking
ImageProcessingProgressService.ImageProcessingProgress progress =
progressService.startProgress(storyId, externalImageCount);
// Process images with progress updates
ImageService.ContentImageProcessingResult result =
processImagesWithProgress(contentHtml, storyId, progress);
// Update story with processed content if changed
if (!result.getProcessedContent().equals(contentHtml)) {
progressService.updateProgress(storyId, progress.getTotalImages(),
"Saving processed content", "Updating story content");
storyService.updateContentOnly(storyId, result.getProcessedContent());
progressService.completeProgress(storyId,
String.format("Completed: %d images processed", result.getDownloadedImages().size()));
logger.info("Async image processing completed for story {}: {} images processed",
storyId, result.getDownloadedImages().size());
} else {
progressService.completeProgress(storyId, "Completed: No images needed processing");
}
// Clean up progress after a delay to allow frontend to see completion
CompletableFuture.runAsync(() -> {
try {
Thread.sleep(5000); // 5 seconds delay
progressService.removeProgress(storyId);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
});
} catch (Exception e) {
logger.error("Async image processing failed for story {}: {}", storyId, e.getMessage(), e);
progressService.setError(storyId, e.getMessage());
}
return CompletableFuture.completedFuture(null);
}
private int countExternalImages(String contentHtml) {
if (contentHtml == null || contentHtml.trim().isEmpty()) {
return 0;
}
Pattern imgPattern = Pattern.compile("<img[^>]+src=[\"']([^\"']+)[\"'][^>]*>", Pattern.CASE_INSENSITIVE);
Matcher matcher = imgPattern.matcher(contentHtml);
int count = 0;
while (matcher.find()) {
String src = matcher.group(1);
if (isExternalUrl(src)) {
count++;
}
}
return count;
}
/**
* Check if a URL is external (not from this application).
* Returns true if the URL should be downloaded, false if it's already local.
*/
private boolean isExternalUrl(String url) {
if (url == null || url.trim().isEmpty()) {
return false;
}
// Skip data URLs
if (url.startsWith("data:")) {
return false;
}
// Skip relative URLs (local paths)
if (url.startsWith("/")) {
return false;
}
// Skip URLs that are already pointing to our API
if (url.contains("/api/files/images/")) {
return false;
}
// Check if URL starts with the public URL (our own domain)
if (publicUrl != null && !publicUrl.trim().isEmpty()) {
String normalizedUrl = url.trim().toLowerCase();
String normalizedPublicUrl = publicUrl.trim().toLowerCase();
// Remove trailing slash from public URL for comparison
if (normalizedPublicUrl.endsWith("/")) {
normalizedPublicUrl = normalizedPublicUrl.substring(0, normalizedPublicUrl.length() - 1);
}
if (normalizedUrl.startsWith(normalizedPublicUrl)) {
logger.debug("URL is from this application (matches publicUrl): {}", url);
return false;
}
}
// If it's an HTTP(S) URL that didn't match our filters, it's external
if (url.startsWith("http://") || url.startsWith("https://")) {
logger.debug("URL is external: {}", url);
return true;
}
// For any other format, consider it non-external (safer default)
return false;
}
private ImageService.ContentImageProcessingResult processImagesWithProgress(
String contentHtml, UUID storyId, ImageProcessingProgressService.ImageProcessingProgress progress) {
// Use a custom version of processContentImages that provides progress callbacks
return imageService.processContentImagesWithProgress(contentHtml, storyId,
(currentUrl, processedCount, totalCount) -> {
progressService.updateProgress(storyId, processedCount, currentUrl,
String.format("Processing image %d of %d", processedCount + 1, totalCount));
});
}
}

View File

@@ -11,21 +11,21 @@ import org.springframework.stereotype.Component;
import java.util.List; import java.util.List;
@Component @Component
@ConditionalOnProperty(name = "storycove.typesense.enabled", havingValue = "true", matchIfMissing = true) @ConditionalOnProperty(name = "storycove.search.enabled", havingValue = "true", matchIfMissing = true)
public class AuthorIndexScheduler { public class AuthorIndexScheduler {
private static final Logger logger = LoggerFactory.getLogger(AuthorIndexScheduler.class); private static final Logger logger = LoggerFactory.getLogger(AuthorIndexScheduler.class);
private final AuthorService authorService; private final AuthorService authorService;
private final TypesenseService typesenseService; private final SearchServiceAdapter searchServiceAdapter;
@Autowired @Autowired
public AuthorIndexScheduler(AuthorService authorService, TypesenseService typesenseService) { public AuthorIndexScheduler(AuthorService authorService, SearchServiceAdapter searchServiceAdapter) {
this.authorService = authorService; this.authorService = authorService;
this.typesenseService = typesenseService; this.searchServiceAdapter = searchServiceAdapter;
} }
@Scheduled(fixedRateString = "${storycove.typesense.author-reindex-interval:7200000}") // 2 hours default @Scheduled(fixedRateString = "${storycove.search.author-reindex-interval:7200000}") // 2 hours default
public void reindexAllAuthors() { public void reindexAllAuthors() {
try { try {
logger.info("Starting scheduled author reindexing..."); logger.info("Starting scheduled author reindexing...");
@@ -34,7 +34,7 @@ public class AuthorIndexScheduler {
logger.info("Found {} authors to reindex", allAuthors.size()); logger.info("Found {} authors to reindex", allAuthors.size());
if (!allAuthors.isEmpty()) { if (!allAuthors.isEmpty()) {
typesenseService.reindexAllAuthors(allAuthors); searchServiceAdapter.bulkIndexAuthors(allAuthors);
logger.info("Successfully completed scheduled author reindexing"); logger.info("Successfully completed scheduled author reindexing");
} else { } else {
logger.info("No authors found to reindex"); logger.info("No authors found to reindex");

View File

@@ -28,12 +28,12 @@ public class AuthorService {
private static final Logger logger = LoggerFactory.getLogger(AuthorService.class); private static final Logger logger = LoggerFactory.getLogger(AuthorService.class);
private final AuthorRepository authorRepository; private final AuthorRepository authorRepository;
private final TypesenseService typesenseService; private final SearchServiceAdapter searchServiceAdapter;
@Autowired @Autowired
public AuthorService(AuthorRepository authorRepository, TypesenseService typesenseService) { public AuthorService(AuthorRepository authorRepository, SearchServiceAdapter searchServiceAdapter) {
this.authorRepository = authorRepository; this.authorRepository = authorRepository;
this.typesenseService = typesenseService; this.searchServiceAdapter = searchServiceAdapter;
} }
@Transactional(readOnly = true) @Transactional(readOnly = true)
@@ -132,12 +132,8 @@ public class AuthorService {
validateAuthorForCreate(author); validateAuthorForCreate(author);
Author savedAuthor = authorRepository.save(author); Author savedAuthor = authorRepository.save(author);
// Index in Typesense // Index in Solr
try { searchServiceAdapter.indexAuthor(savedAuthor);
typesenseService.indexAuthor(savedAuthor);
} catch (Exception e) {
logger.warn("Failed to index author in Typesense: " + savedAuthor.getName(), e);
}
return savedAuthor; return savedAuthor;
} }
@@ -154,12 +150,8 @@ public class AuthorService {
updateAuthorFields(existingAuthor, authorUpdates); updateAuthorFields(existingAuthor, authorUpdates);
Author savedAuthor = authorRepository.save(existingAuthor); Author savedAuthor = authorRepository.save(existingAuthor);
// Update in Typesense // Update in Solr
try { searchServiceAdapter.updateAuthor(savedAuthor);
typesenseService.updateAuthor(savedAuthor);
} catch (Exception e) {
logger.warn("Failed to update author in Typesense: " + savedAuthor.getName(), e);
}
return savedAuthor; return savedAuthor;
} }
@@ -174,12 +166,8 @@ public class AuthorService {
authorRepository.delete(author); authorRepository.delete(author);
// Remove from Typesense // Remove from Solr
try { searchServiceAdapter.deleteAuthor(id);
typesenseService.deleteAuthor(id.toString());
} catch (Exception e) {
logger.warn("Failed to delete author from Typesense: " + author.getName(), e);
}
} }
public Author addUrl(UUID id, String url) { public Author addUrl(UUID id, String url) {
@@ -187,12 +175,8 @@ public class AuthorService {
author.addUrl(url); author.addUrl(url);
Author savedAuthor = authorRepository.save(author); Author savedAuthor = authorRepository.save(author);
// Update in Typesense // Update in Solr
try { searchServiceAdapter.updateAuthor(savedAuthor);
typesenseService.updateAuthor(savedAuthor);
} catch (Exception e) {
logger.warn("Failed to update author in Typesense after adding URL: " + savedAuthor.getName(), e);
}
return savedAuthor; return savedAuthor;
} }
@@ -202,12 +186,8 @@ public class AuthorService {
author.removeUrl(url); author.removeUrl(url);
Author savedAuthor = authorRepository.save(author); Author savedAuthor = authorRepository.save(author);
// Update in Typesense // Update in Solr
try { searchServiceAdapter.updateAuthor(savedAuthor);
typesenseService.updateAuthor(savedAuthor);
} catch (Exception e) {
logger.warn("Failed to update author in Typesense after removing URL: " + savedAuthor.getName(), e);
}
return savedAuthor; return savedAuthor;
} }
@@ -232,7 +212,7 @@ public class AuthorService {
rating, author.getName(), author.getAuthorRating()); rating, author.getName(), author.getAuthorRating());
author.setAuthorRating(rating); author.setAuthorRating(rating);
Author savedAuthor = authorRepository.save(author); authorRepository.save(author);
// Flush and refresh to ensure the entity is up-to-date // Flush and refresh to ensure the entity is up-to-date
authorRepository.flush(); authorRepository.flush();
@@ -241,12 +221,8 @@ public class AuthorService {
logger.debug("Saved author rating: {} for author: {}", logger.debug("Saved author rating: {} for author: {}",
refreshedAuthor.getAuthorRating(), refreshedAuthor.getName()); refreshedAuthor.getAuthorRating(), refreshedAuthor.getName());
// Update in Typesense // Update in Solr
try { searchServiceAdapter.updateAuthor(refreshedAuthor);
typesenseService.updateAuthor(refreshedAuthor);
} catch (Exception e) {
logger.warn("Failed to update author in Typesense after rating: " + refreshedAuthor.getName(), e);
}
return refreshedAuthor; return refreshedAuthor;
} }
@@ -289,12 +265,8 @@ public class AuthorService {
author.setAvatarImagePath(avatarPath); author.setAvatarImagePath(avatarPath);
Author savedAuthor = authorRepository.save(author); Author savedAuthor = authorRepository.save(author);
// Update in Typesense // Update in Solr
try { searchServiceAdapter.updateAuthor(savedAuthor);
typesenseService.updateAuthor(savedAuthor);
} catch (Exception e) {
logger.warn("Failed to update author in Typesense after setting avatar: " + savedAuthor.getName(), e);
}
return savedAuthor; return savedAuthor;
} }
@@ -304,12 +276,8 @@ public class AuthorService {
author.setAvatarImagePath(null); author.setAvatarImagePath(null);
Author savedAuthor = authorRepository.save(author); Author savedAuthor = authorRepository.save(author);
// Update in Typesense // Update in Solr
try { searchServiceAdapter.updateAuthor(savedAuthor);
typesenseService.updateAuthor(savedAuthor);
} catch (Exception e) {
logger.warn("Failed to update author in Typesense after removing avatar: " + savedAuthor.getName(), e);
}
return savedAuthor; return savedAuthor;
} }

View File

@@ -0,0 +1,262 @@
package com.storycove.service;
import com.storycove.repository.StoryRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Service for automatic daily backups.
* Runs at 4 AM daily and creates a backup if content has changed since last backup.
* Keeps maximum of 5 backups, rotating old ones out.
*/
@Service
public class AutomaticBackupService {
private static final Logger logger = LoggerFactory.getLogger(AutomaticBackupService.class);
private static final int MAX_BACKUPS = 5;
private static final DateTimeFormatter FILENAME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss");
@Value("${storycove.automatic-backup.dir:/app/automatic-backups}")
private String automaticBackupDir;
@Autowired
private StoryRepository storyRepository;
@Autowired
private DatabaseManagementService databaseManagementService;
@Autowired
private LibraryService libraryService;
private LocalDateTime lastBackupCheck = null;
/**
* Scheduled job that runs daily at 4 AM.
* Creates a backup if content has changed since last backup.
*/
@Scheduled(cron = "0 0 4 * * ?")
public void performAutomaticBackup() {
logger.info("========================================");
logger.info("Starting automatic backup check at 4 AM");
logger.info("========================================");
try {
// Get current library ID (or default)
String libraryId = libraryService.getCurrentLibraryId();
if (libraryId == null) {
libraryId = "default";
}
logger.info("Checking for content changes in library: {}", libraryId);
// Check if content has changed since last backup
if (!hasContentChanged()) {
logger.info("No content changes detected since last backup. Skipping backup.");
logger.info("========================================");
return;
}
logger.info("Content changes detected! Creating automatic backup...");
// Create backup directory for this library
Path backupPath = Paths.get(automaticBackupDir, libraryId);
Files.createDirectories(backupPath);
// Create the backup
String timestamp = LocalDateTime.now().format(FILENAME_FORMATTER);
String filename = String.format("auto_backup_%s.zip", timestamp);
Path backupFile = backupPath.resolve(filename);
logger.info("Creating complete backup to: {}", backupFile);
Resource backup = databaseManagementService.createCompleteBackup();
// Write backup to file
try (var inputStream = backup.getInputStream();
var outputStream = Files.newOutputStream(backupFile)) {
inputStream.transferTo(outputStream);
}
long fileSize = Files.size(backupFile);
logger.info("✅ Automatic backup created successfully");
logger.info(" File: {}", backupFile.getFileName());
logger.info(" Size: {} MB", fileSize / 1024 / 1024);
// Rotate old backups (keep only MAX_BACKUPS)
rotateBackups(backupPath);
// Update last backup check time
lastBackupCheck = LocalDateTime.now();
logger.info("========================================");
logger.info("Automatic backup completed successfully");
logger.info("========================================");
} catch (Exception e) {
logger.error("❌ Automatic backup failed", e);
logger.info("========================================");
}
}
/**
* Check if content has changed since last backup.
* Looks for stories created or updated after the last backup time.
*/
private boolean hasContentChanged() {
try {
if (lastBackupCheck == null) {
// First run - check if there are any stories at all
long storyCount = storyRepository.count();
logger.info("First backup check - found {} stories", storyCount);
return storyCount > 0;
}
// Check for stories created or updated since last backup
long changedCount = storyRepository.countStoriesModifiedAfter(lastBackupCheck);
logger.info("Found {} stories modified since last backup ({})", changedCount, lastBackupCheck);
return changedCount > 0;
} catch (Exception e) {
logger.error("Error checking for content changes", e);
// On error, create backup to be safe
return true;
}
}
/**
* Rotate backups - keep only MAX_BACKUPS most recent backups.
* Deletes older backups.
*/
private void rotateBackups(Path backupPath) throws IOException {
logger.info("Checking for old backups to rotate...");
// Find all backup files in the directory
List<Path> backupFiles;
try (Stream<Path> stream = Files.list(backupPath)) {
backupFiles = stream
.filter(Files::isRegularFile)
.filter(p -> p.getFileName().toString().startsWith("auto_backup_"))
.filter(p -> p.getFileName().toString().endsWith(".zip"))
.sorted(Comparator.comparing((Path p) -> {
try {
return Files.getLastModifiedTime(p);
} catch (IOException e) {
return null;
}
}).reversed()) // Most recent first
.collect(Collectors.toList());
}
logger.info("Found {} automatic backups", backupFiles.size());
// Delete old backups if we exceed MAX_BACKUPS
if (backupFiles.size() > MAX_BACKUPS) {
List<Path> toDelete = backupFiles.subList(MAX_BACKUPS, backupFiles.size());
logger.info("Deleting {} old backups to maintain maximum of {}", toDelete.size(), MAX_BACKUPS);
for (Path oldBackup : toDelete) {
try {
Files.delete(oldBackup);
logger.info(" Deleted old backup: {}", oldBackup.getFileName());
} catch (IOException e) {
logger.warn("Failed to delete old backup: {}", oldBackup, e);
}
}
} else {
logger.info("Backup count within limit ({}), no rotation needed", MAX_BACKUPS);
}
}
/**
* Manual trigger for testing - creates backup immediately if content changed.
*/
public void triggerManualBackup() {
logger.info("Manual automatic backup triggered");
performAutomaticBackup();
}
/**
* Get list of automatic backups for the current library.
*/
public List<BackupInfo> listAutomaticBackups() throws IOException {
String libraryId = libraryService.getCurrentLibraryId();
if (libraryId == null) {
libraryId = "default";
}
Path backupPath = Paths.get(automaticBackupDir, libraryId);
if (!Files.exists(backupPath)) {
return List.of();
}
try (Stream<Path> stream = Files.list(backupPath)) {
return stream
.filter(Files::isRegularFile)
.filter(p -> p.getFileName().toString().startsWith("auto_backup_"))
.filter(p -> p.getFileName().toString().endsWith(".zip"))
.sorted(Comparator.comparing((Path p) -> {
try {
return Files.getLastModifiedTime(p);
} catch (IOException e) {
return null;
}
}).reversed())
.map(p -> {
try {
return new BackupInfo(
p.getFileName().toString(),
Files.size(p),
Files.getLastModifiedTime(p).toInstant().toString()
);
} catch (IOException e) {
return null;
}
})
.filter(info -> info != null)
.collect(Collectors.toList());
}
}
/**
* Simple backup info class.
*/
public static class BackupInfo {
private final String filename;
private final long sizeBytes;
private final String createdAt;
public BackupInfo(String filename, long sizeBytes, String createdAt) {
this.filename = filename;
this.sizeBytes = sizeBytes;
this.createdAt = createdAt;
}
public String getFilename() {
return filename;
}
public long getSizeBytes() {
return sizeBytes;
}
public String getCreatedAt() {
return createdAt;
}
}
}

View File

@@ -10,6 +10,7 @@ public class CollectionSearchResult extends Collection {
private Integer storedStoryCount; private Integer storedStoryCount;
private Integer storedTotalWordCount; private Integer storedTotalWordCount;
private int wordsPerMinute = 200; // Default, can be overridden
public CollectionSearchResult(Collection collection) { public CollectionSearchResult(Collection collection) {
this.setId(collection.getId()); this.setId(collection.getId());
@@ -20,6 +21,7 @@ public class CollectionSearchResult extends Collection {
this.setCreatedAt(collection.getCreatedAt()); this.setCreatedAt(collection.getCreatedAt());
this.setUpdatedAt(collection.getUpdatedAt()); this.setUpdatedAt(collection.getUpdatedAt());
this.setCoverImagePath(collection.getCoverImagePath()); this.setCoverImagePath(collection.getCoverImagePath());
this.setTagNames(collection.getTagNames()); // Copy tag names for search results
// Note: don't copy collectionStories or tags to avoid lazy loading issues // Note: don't copy collectionStories or tags to avoid lazy loading issues
} }
@@ -31,6 +33,10 @@ public class CollectionSearchResult extends Collection {
this.storedTotalWordCount = totalWordCount; this.storedTotalWordCount = totalWordCount;
} }
public void setWordsPerMinute(int wordsPerMinute) {
this.wordsPerMinute = wordsPerMinute;
}
@Override @Override
public int getStoryCount() { public int getStoryCount() {
return storedStoryCount != null ? storedStoryCount : 0; return storedStoryCount != null ? storedStoryCount : 0;
@@ -43,8 +49,7 @@ public class CollectionSearchResult extends Collection {
@Override @Override
public int getEstimatedReadingTime() { public int getEstimatedReadingTime() {
// Assuming 200 words per minute reading speed return Math.max(1, getTotalWordCount() / wordsPerMinute);
return Math.max(1, getTotalWordCount() / 200);
} }
@Override @Override

View File

@@ -1,6 +1,9 @@
package com.storycove.service; package com.storycove.service;
import com.storycove.dto.CollectionDto;
import com.storycove.dto.SearchResultDto; import com.storycove.dto.SearchResultDto;
import com.storycove.dto.StoryReadingDto;
import com.storycove.dto.TagDto;
import com.storycove.entity.Collection; import com.storycove.entity.Collection;
import com.storycove.entity.CollectionStory; import com.storycove.entity.CollectionStory;
import com.storycove.entity.Story; import com.storycove.entity.Story;
@@ -9,14 +12,10 @@ import com.storycove.repository.CollectionRepository;
import com.storycove.repository.CollectionStoryRepository; import com.storycove.repository.CollectionStoryRepository;
import com.storycove.repository.StoryRepository; import com.storycove.repository.StoryRepository;
import com.storycove.repository.TagRepository; import com.storycove.repository.TagRepository;
import com.storycove.service.exception.DuplicateResourceException;
import com.storycove.service.exception.ResourceNotFoundException; import com.storycove.service.exception.ResourceNotFoundException;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.data.domain.Page;
import org.springframework.data.domain.PageRequest;
import org.springframework.data.domain.Pageable;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
@@ -33,33 +32,50 @@ public class CollectionService {
private final CollectionStoryRepository collectionStoryRepository; private final CollectionStoryRepository collectionStoryRepository;
private final StoryRepository storyRepository; private final StoryRepository storyRepository;
private final TagRepository tagRepository; private final TagRepository tagRepository;
private final TypesenseService typesenseService; private final SearchServiceAdapter searchServiceAdapter;
private final ReadingTimeService readingTimeService;
@Autowired @Autowired
public CollectionService(CollectionRepository collectionRepository, public CollectionService(CollectionRepository collectionRepository,
CollectionStoryRepository collectionStoryRepository, CollectionStoryRepository collectionStoryRepository,
StoryRepository storyRepository, StoryRepository storyRepository,
TagRepository tagRepository, TagRepository tagRepository,
@Autowired(required = false) TypesenseService typesenseService) { SearchServiceAdapter searchServiceAdapter,
ReadingTimeService readingTimeService) {
this.collectionRepository = collectionRepository; this.collectionRepository = collectionRepository;
this.collectionStoryRepository = collectionStoryRepository; this.collectionStoryRepository = collectionStoryRepository;
this.storyRepository = storyRepository; this.storyRepository = storyRepository;
this.tagRepository = tagRepository; this.tagRepository = tagRepository;
this.typesenseService = typesenseService; this.searchServiceAdapter = searchServiceAdapter;
this.readingTimeService = readingTimeService;
} }
/** /**
* Search collections using Typesense (MANDATORY for all search/filter operations) * Search collections using Solr (MANDATORY for all search/filter operations)
* This method MUST be used instead of JPA queries for listing collections * This method MUST be used instead of JPA queries for listing collections
*/ */
public SearchResultDto<Collection> searchCollections(String query, List<String> tags, boolean includeArchived, int page, int limit) { public SearchResultDto<Collection> searchCollections(String query, List<String> tags, boolean includeArchived, int page, int limit) {
if (typesenseService == null) { try {
logger.warn("Typesense service not available, returning empty results"); // Use SearchServiceAdapter to search collections
return new SearchResultDto<>(new ArrayList<>(), 0, page, limit, query != null ? query : "", 0); SearchResultDto<CollectionDto> searchResult = searchServiceAdapter.searchCollections(query, tags, includeArchived, page, limit);
// Convert CollectionDto back to Collection entities by fetching from database
List<Collection> collections = new ArrayList<>();
for (CollectionDto dto : searchResult.getResults()) {
try {
Collection collection = findByIdBasic(dto.getId());
collections.add(collection);
} catch (ResourceNotFoundException e) {
logger.warn("Collection {} found in search index but not in database", dto.getId());
}
} }
// Delegate to TypesenseService for all search operations return new SearchResultDto<>(collections, (int) searchResult.getTotalHits(), page, limit,
return typesenseService.searchCollections(query, tags, includeArchived, page, limit); query != null ? query : "", searchResult.getSearchTimeMs());
} catch (Exception e) {
logger.error("Collection search failed, falling back to empty results", e);
return new SearchResultDto<>(new ArrayList<>(), 0, page, limit, query != null ? query : "", 0);
}
} }
/** /**
@@ -78,6 +94,13 @@ public class CollectionService {
.orElseThrow(() -> new ResourceNotFoundException("Collection not found with id: " + id)); .orElseThrow(() -> new ResourceNotFoundException("Collection not found with id: " + id));
} }
/**
* Find all collections with tags for reindexing
*/
public List<Collection> findAllWithTags() {
return collectionRepository.findAllWithTags();
}
/** /**
* Create a new collection with optional initial stories * Create a new collection with optional initial stories
*/ */
@@ -99,10 +122,7 @@ public class CollectionService {
savedCollection = findById(savedCollection.getId()); savedCollection = findById(savedCollection.getId());
} }
// Index in Typesense // Collections are not indexed in search engine yet
if (typesenseService != null) {
typesenseService.indexCollection(savedCollection);
}
logger.info("Created collection: {} with {} stories", name, initialStoryIds != null ? initialStoryIds.size() : 0); logger.info("Created collection: {} with {} stories", name, initialStoryIds != null ? initialStoryIds.size() : 0);
return savedCollection; return savedCollection;
@@ -132,10 +152,7 @@ public class CollectionService {
Collection savedCollection = collectionRepository.save(collection); Collection savedCollection = collectionRepository.save(collection);
// Update in Typesense // Collections are not indexed in search engine yet
if (typesenseService != null) {
typesenseService.indexCollection(savedCollection);
}
logger.info("Updated collection: {}", id); logger.info("Updated collection: {}", id);
return savedCollection; return savedCollection;
@@ -147,10 +164,7 @@ public class CollectionService {
public void deleteCollection(UUID id) { public void deleteCollection(UUID id) {
Collection collection = findByIdBasic(id); Collection collection = findByIdBasic(id);
// Remove from Typesense first // Collections are not indexed in search engine yet
if (typesenseService != null) {
typesenseService.removeCollection(id);
}
collectionRepository.delete(collection); collectionRepository.delete(collection);
logger.info("Deleted collection: {}", id); logger.info("Deleted collection: {}", id);
@@ -165,10 +179,7 @@ public class CollectionService {
Collection savedCollection = collectionRepository.save(collection); Collection savedCollection = collectionRepository.save(collection);
// Update in Typesense // Collections are not indexed in search engine yet
if (typesenseService != null) {
typesenseService.indexCollection(savedCollection);
}
logger.info("{} collection: {}", archived ? "Archived" : "Unarchived", id); logger.info("{} collection: {}", archived ? "Archived" : "Unarchived", id);
return savedCollection; return savedCollection;
@@ -213,10 +224,7 @@ public class CollectionService {
} }
// Update collection in Typesense // Update collection in Typesense
if (typesenseService != null) { // Collections are not indexed in search engine yet
Collection updatedCollection = findById(collectionId);
typesenseService.indexCollection(updatedCollection);
}
long totalStories = collectionStoryRepository.countByCollectionId(collectionId); long totalStories = collectionStoryRepository.countByCollectionId(collectionId);
@@ -241,10 +249,7 @@ public class CollectionService {
collectionStoryRepository.delete(collectionStory); collectionStoryRepository.delete(collectionStory);
// Update collection in Typesense // Update collection in Typesense
if (typesenseService != null) { // Collections are not indexed in search engine yet
Collection updatedCollection = findById(collectionId);
typesenseService.indexCollection(updatedCollection);
}
logger.info("Removed story {} from collection {}", storyId, collectionId); logger.info("Removed story {} from collection {}", storyId, collectionId);
} }
@@ -254,7 +259,7 @@ public class CollectionService {
*/ */
@Transactional @Transactional
public void reorderStories(UUID collectionId, List<Map<String, Object>> storyOrders) { public void reorderStories(UUID collectionId, List<Map<String, Object>> storyOrders) {
Collection collection = findByIdBasic(collectionId); findByIdBasic(collectionId); // Validate collection exists
// Two-phase update to avoid unique constraint violations: // Two-phase update to avoid unique constraint violations:
// Phase 1: Set all positions to negative values (temporary) // Phase 1: Set all positions to negative values (temporary)
@@ -277,10 +282,7 @@ public class CollectionService {
} }
// Update collection in Typesense // Update collection in Typesense
if (typesenseService != null) { // Collections are not indexed in search engine yet
Collection updatedCollection = findById(collectionId);
typesenseService.indexCollection(updatedCollection);
}
logger.info("Reordered {} stories in collection {}", storyOrders.size(), collectionId); logger.info("Reordered {} stories in collection {}", storyOrders.size(), collectionId);
} }
@@ -326,7 +328,7 @@ public class CollectionService {
); );
return Map.of( return Map.of(
"story", story, "story", convertToReadingDto(story),
"collection", collectionContext "collection", collectionContext
); );
} }
@@ -344,7 +346,7 @@ public class CollectionService {
int totalWordCount = collectionStories.stream() int totalWordCount = collectionStories.stream()
.mapToInt(cs -> cs.getStory().getWordCount() != null ? cs.getStory().getWordCount() : 0) .mapToInt(cs -> cs.getStory().getWordCount() != null ? cs.getStory().getWordCount() : 0)
.sum(); .sum();
int estimatedReadingTime = Math.max(1, totalWordCount / 200); // 200 words per minute int estimatedReadingTime = readingTimeService.calculateReadingTime(totalWordCount);
double averageStoryRating = collectionStories.stream() double averageStoryRating = collectionStories.stream()
.filter(cs -> cs.getStory().getRating() != null) .filter(cs -> cs.getStory().getRating() != null)
@@ -415,9 +417,54 @@ public class CollectionService {
} }
/** /**
* Get all collections for indexing (used by TypesenseService) * Get all collections for indexing (used by SearchServiceAdapter)
*/ */
public List<Collection> findAllForIndexing() { public List<Collection> findAllForIndexing() {
return collectionRepository.findAllActiveCollections(); return collectionRepository.findAllActiveCollections();
} }
private StoryReadingDto convertToReadingDto(Story story) {
StoryReadingDto dto = new StoryReadingDto();
dto.setId(story.getId());
dto.setTitle(story.getTitle());
dto.setSummary(story.getSummary());
dto.setDescription(story.getDescription());
dto.setContentHtml(story.getContentHtml());
dto.setSourceUrl(story.getSourceUrl());
dto.setCoverPath(story.getCoverPath());
dto.setWordCount(story.getWordCount());
dto.setRating(story.getRating());
dto.setVolume(story.getVolume());
dto.setCreatedAt(story.getCreatedAt());
dto.setUpdatedAt(story.getUpdatedAt());
// Reading progress fields
dto.setIsRead(story.getIsRead());
dto.setReadingPosition(story.getReadingPosition());
dto.setLastReadAt(story.getLastReadAt());
if (story.getAuthor() != null) {
dto.setAuthorId(story.getAuthor().getId());
dto.setAuthorName(story.getAuthor().getName());
}
if (story.getSeries() != null) {
dto.setSeriesId(story.getSeries().getId());
dto.setSeriesName(story.getSeries().getName());
}
dto.setTags(story.getTags().stream()
.map(this::convertTagToDto)
.collect(Collectors.toList()));
return dto;
}
private TagDto convertTagToDto(Tag tag) {
TagDto dto = new TagDto();
dto.setId(tag.getId());
dto.setName(tag.getName());
dto.setStoryCount(tag.getStories().size());
return dto;
}
} }

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,584 @@
package com.storycove.service;
import com.storycove.dto.EPUBExportRequest;
import com.storycove.entity.Collection;
import com.storycove.entity.ReadingPosition;
import com.storycove.entity.Story;
import com.storycove.repository.ReadingPositionRepository;
import com.storycove.service.exception.ResourceNotFoundException;
import nl.siegmann.epublib.domain.*;
import nl.siegmann.epublib.epub.EpubWriter;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.core.io.Resource;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
import java.util.stream.Collectors;
@Service
@Transactional
public class EPUBExportService {
private final StoryService storyService;
private final ReadingPositionRepository readingPositionRepository;
private final CollectionService collectionService;
@Autowired
public EPUBExportService(StoryService storyService,
ReadingPositionRepository readingPositionRepository,
CollectionService collectionService) {
this.storyService = storyService;
this.readingPositionRepository = readingPositionRepository;
this.collectionService = collectionService;
}
public Resource exportStoryAsEPUB(EPUBExportRequest request) throws IOException {
Story story = storyService.findById(request.getStoryId());
Book book = createEPUBBook(story, request);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
EpubWriter epubWriter = new EpubWriter();
epubWriter.write(book, outputStream);
return new ByteArrayResource(outputStream.toByteArray());
}
public Resource exportCollectionAsEPUB(UUID collectionId, EPUBExportRequest request) throws IOException {
Collection collection = collectionService.findById(collectionId);
List<Story> stories = collection.getCollectionStories().stream()
.sorted((cs1, cs2) -> Integer.compare(cs1.getPosition(), cs2.getPosition()))
.map(cs -> cs.getStory())
.collect(Collectors.toList());
if (stories.isEmpty()) {
throw new ResourceNotFoundException("Collection contains no stories to export");
}
Book book = createCollectionEPUBBook(collection, stories, request);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
EpubWriter epubWriter = new EpubWriter();
epubWriter.write(book, outputStream);
return new ByteArrayResource(outputStream.toByteArray());
}
private Book createEPUBBook(Story story, EPUBExportRequest request) throws IOException {
Book book = new Book();
setupMetadata(book, story, request);
addCoverImage(book, story, request);
addContent(book, story, request);
addReadingPosition(book, story, request);
return book;
}
private Book createCollectionEPUBBook(Collection collection, List<Story> stories, EPUBExportRequest request) throws IOException {
Book book = new Book();
setupCollectionMetadata(book, collection, stories, request);
addCollectionCoverImage(book, collection, request);
addCollectionContent(book, stories, request);
return book;
}
private void setupMetadata(Book book, Story story, EPUBExportRequest request) {
Metadata metadata = book.getMetadata();
String title = request.getCustomTitle() != null ?
request.getCustomTitle() : story.getTitle();
metadata.addTitle(title);
String authorName = request.getCustomAuthor() != null ?
request.getCustomAuthor() :
(story.getAuthor() != null ? story.getAuthor().getName() : "Unknown Author");
metadata.addAuthor(new Author(authorName));
metadata.setLanguage(request.getLanguage() != null ? request.getLanguage() : "en");
metadata.addIdentifier(new Identifier("storycove", story.getId().toString()));
if (story.getDescription() != null) {
metadata.addDescription(story.getDescription());
}
if (request.getIncludeMetadata()) {
metadata.addDate(new Date(java.util.Date.from(
story.getCreatedAt().atZone(java.time.ZoneId.systemDefault()).toInstant()
), Date.Event.CREATION));
if (story.getSeries() != null) {
// Add series and metadata info to description instead of using addMeta
StringBuilder description = new StringBuilder();
if (story.getDescription() != null) {
description.append(story.getDescription()).append("\n\n");
}
description.append("Series: ").append(story.getSeries().getName());
if (story.getVolume() != null) {
description.append(" (Volume ").append(story.getVolume()).append(")");
}
description.append("\n");
if (story.getWordCount() != null) {
description.append("Word Count: ").append(story.getWordCount()).append("\n");
}
if (story.getRating() != null) {
description.append("Rating: ").append(story.getRating()).append("/5\n");
}
if (!story.getTags().isEmpty()) {
String tags = story.getTags().stream()
.map(tag -> tag.getName())
.reduce((a, b) -> a + ", " + b)
.orElse("");
description.append("Tags: ").append(tags).append("\n");
}
description.append("\nGenerated by StoryCove on ")
.append(LocalDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME));
metadata.addDescription(description.toString());
}
}
if (request.getCustomMetadata() != null && !request.getCustomMetadata().isEmpty()) {
// Add custom metadata to description since addMeta doesn't exist
StringBuilder customDesc = new StringBuilder();
for (String customMeta : request.getCustomMetadata()) {
String[] parts = customMeta.split(":", 2);
if (parts.length == 2) {
customDesc.append(parts[0].trim()).append(": ").append(parts[1].trim()).append("\n");
}
}
if (customDesc.length() > 0) {
String existingDesc = metadata.getDescriptions().isEmpty() ? "" : metadata.getDescriptions().get(0);
metadata.addDescription(existingDesc + "\n" + customDesc.toString());
}
}
}
private void addCoverImage(Book book, Story story, EPUBExportRequest request) {
if (!request.getIncludeCoverImage() || story.getCoverPath() == null) {
return;
}
try {
Path coverPath = Paths.get(story.getCoverPath());
if (Files.exists(coverPath)) {
byte[] coverImageData = Files.readAllBytes(coverPath);
String mimeType = Files.probeContentType(coverPath);
if (mimeType == null) {
mimeType = "image/jpeg";
}
nl.siegmann.epublib.domain.Resource coverResource =
new nl.siegmann.epublib.domain.Resource(coverImageData, "cover.jpg");
book.setCoverImage(coverResource);
}
} catch (IOException e) {
// Skip cover image on error
}
}
private void addContent(Book book, Story story, EPUBExportRequest request) {
String content = story.getContentHtml();
if (content == null) {
content = story.getContentPlain() != null ?
"<p>" + story.getContentPlain().replace("\n", "</p><p>") + "</p>" :
"<p>No content available</p>";
}
if (request.getSplitByChapters()) {
addChapterizedContent(book, content, request);
} else {
addSingleChapterContent(book, content, story);
}
}
private void addSingleChapterContent(Book book, String content, Story story) {
String html = createChapterHTML(story.getTitle(), content);
nl.siegmann.epublib.domain.Resource chapterResource =
new nl.siegmann.epublib.domain.Resource(html.getBytes(), "chapter.html");
book.addSection(story.getTitle(), chapterResource);
}
private void addChapterizedContent(Book book, String content, EPUBExportRequest request) {
Document doc = Jsoup.parse(content);
Elements chapters = doc.select("div.chapter, h1, h2, h3");
if (chapters.isEmpty()) {
List<String> paragraphs = splitByWords(content,
request.getMaxWordsPerChapter() != null ? request.getMaxWordsPerChapter() : 2000);
for (int i = 0; i < paragraphs.size(); i++) {
String chapterTitle = "Chapter " + (i + 1);
String html = createChapterHTML(chapterTitle, paragraphs.get(i));
nl.siegmann.epublib.domain.Resource chapterResource =
new nl.siegmann.epublib.domain.Resource(html.getBytes(), "chapter" + (i + 1) + ".html");
book.addSection(chapterTitle, chapterResource);
}
} else {
for (int i = 0; i < chapters.size(); i++) {
Element chapter = chapters.get(i);
String chapterTitle = chapter.text();
if (chapterTitle.trim().isEmpty()) {
chapterTitle = "Chapter " + (i + 1);
}
String chapterContent = chapter.html();
String html = createChapterHTML(chapterTitle, chapterContent);
nl.siegmann.epublib.domain.Resource chapterResource =
new nl.siegmann.epublib.domain.Resource(html.getBytes(), "chapter" + (i + 1) + ".html");
book.addSection(chapterTitle, chapterResource);
}
}
}
private List<String> splitByWords(String content, int maxWordsPerChapter) {
String[] words = content.split("\\s+");
List<String> chapters = new ArrayList<>();
StringBuilder currentChapter = new StringBuilder();
int wordCount = 0;
for (String word : words) {
currentChapter.append(word).append(" ");
wordCount++;
if (wordCount >= maxWordsPerChapter) {
chapters.add(currentChapter.toString().trim());
currentChapter = new StringBuilder();
wordCount = 0;
}
}
if (currentChapter.length() > 0) {
chapters.add(currentChapter.toString().trim());
}
return chapters;
}
private String createChapterHTML(String title, String content) {
return "<?xml version=\"1.0\" encoding=\"UTF-8\"?>" +
"<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.1//EN\" " +
"\"http://www.w3.org/TR/xhtml11/DTD/xhtml11.dtd\">" +
"<html xmlns=\"http://www.w3.org/1999/xhtml\">" +
"<head>" +
"<title>" + escapeHtml(title) + "</title>" +
"<style type=\"text/css\">" +
"body { font-family: serif; margin: 1em; }" +
"h1 { text-align: center; }" +
"p { text-indent: 1em; margin: 0.5em 0; }" +
"</style>" +
"</head>" +
"<body>" +
"<h1>" + escapeHtml(title) + "</h1>" +
fixHtmlForXhtml(content) +
"</body>" +
"</html>";
}
private void addReadingPosition(Book book, Story story, EPUBExportRequest request) {
if (!request.getIncludeReadingPosition()) {
return;
}
Optional<ReadingPosition> positionOpt = readingPositionRepository.findByStoryId(story.getId());
if (positionOpt.isPresent()) {
ReadingPosition position = positionOpt.get();
Metadata metadata = book.getMetadata();
// Add reading position to description since addMeta doesn't exist
StringBuilder positionDesc = new StringBuilder();
if (position.getEpubCfi() != null) {
positionDesc.append("EPUB CFI: ").append(position.getEpubCfi()).append("\n");
}
if (position.getChapterIndex() != null && position.getWordPosition() != null) {
positionDesc.append("Reading Position: Chapter ")
.append(position.getChapterIndex())
.append(", Word ").append(position.getWordPosition()).append("\n");
}
if (position.getPercentageComplete() != null) {
positionDesc.append("Reading Progress: ")
.append(String.format("%.1f%%", position.getPercentageComplete())).append("\n");
}
positionDesc.append("Last Read: ")
.append(position.getUpdatedAt().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME));
String existingDesc = metadata.getDescriptions().isEmpty() ? "" : metadata.getDescriptions().get(0);
metadata.addDescription(existingDesc + "\n\n--- Reading Position ---\n" + positionDesc.toString());
}
}
private String fixHtmlForXhtml(String html) {
if (html == null) return "";
// Fix common XHTML validation issues
String fixed = html
// Fix self-closing tags to be XHTML compliant
.replaceAll("<br>", "<br />")
.replaceAll("<hr>", "<hr />")
.replaceAll("<img([^>]*)>", "<img$1 />")
.replaceAll("<input([^>]*)>", "<input$1 />")
.replaceAll("<area([^>]*)>", "<area$1 />")
.replaceAll("<base([^>]*)>", "<base$1 />")
.replaceAll("<col([^>]*)>", "<col$1 />")
.replaceAll("<embed([^>]*)>", "<embed$1 />")
.replaceAll("<link([^>]*)>", "<link$1 />")
.replaceAll("<meta([^>]*)>", "<meta$1 />")
.replaceAll("<param([^>]*)>", "<param$1 />")
.replaceAll("<source([^>]*)>", "<source$1 />")
.replaceAll("<track([^>]*)>", "<track$1 />")
.replaceAll("<wbr([^>]*)>", "<wbr$1 />");
return fixed;
}
private String escapeHtml(String text) {
if (text == null) return "";
return text.replace("&", "&amp;")
.replace("<", "&lt;")
.replace(">", "&gt;")
.replace("\"", "&quot;")
.replace("'", "&#39;");
}
public String getEPUBFilename(Story story) {
StringBuilder filename = new StringBuilder();
if (story.getAuthor() != null) {
filename.append(sanitizeFilename(story.getAuthor().getName()))
.append(" - ");
}
filename.append(sanitizeFilename(story.getTitle()));
if (story.getSeries() != null && story.getVolume() != null) {
filename.append(" (")
.append(sanitizeFilename(story.getSeries().getName()))
.append(" ")
.append(story.getVolume())
.append(")");
}
filename.append(".epub");
return filename.toString();
}
private String sanitizeFilename(String filename) {
if (filename == null) return "unknown";
return filename.replaceAll("[^a-zA-Z0-9._\\- ]", "")
.trim()
.replaceAll("\\s+", "_");
}
private void setupCollectionMetadata(Book book, Collection collection, List<Story> stories, EPUBExportRequest request) {
Metadata metadata = book.getMetadata();
String title = request.getCustomTitle() != null ?
request.getCustomTitle() : collection.getName();
metadata.addTitle(title);
// Use collection creator as author, or combine story authors
String authorName = "Collection";
if (stories.size() == 1) {
Story story = stories.get(0);
authorName = story.getAuthor() != null ? story.getAuthor().getName() : "Unknown Author";
} else {
// For multiple stories, use "Various Authors" or collection name
authorName = "Various Authors";
}
if (request.getCustomAuthor() != null) {
authorName = request.getCustomAuthor();
}
metadata.addAuthor(new Author(authorName));
metadata.setLanguage(request.getLanguage() != null ? request.getLanguage() : "en");
metadata.addIdentifier(new Identifier("storycove-collection", collection.getId().toString()));
// Create description from collection description and story list
StringBuilder description = new StringBuilder();
if (collection.getDescription() != null && !collection.getDescription().trim().isEmpty()) {
description.append(collection.getDescription()).append("\n\n");
}
description.append("This collection contains ").append(stories.size()).append(" stories:\n");
for (int i = 0; i < stories.size() && i < 10; i++) {
Story story = stories.get(i);
description.append((i + 1)).append(". ").append(story.getTitle());
if (story.getAuthor() != null) {
description.append(" by ").append(story.getAuthor().getName());
}
description.append("\n");
}
if (stories.size() > 10) {
description.append("... and ").append(stories.size() - 10).append(" more stories.");
}
metadata.addDescription(description.toString());
if (request.getIncludeMetadata()) {
metadata.addDate(new Date(java.util.Date.from(
collection.getCreatedAt().atZone(java.time.ZoneId.systemDefault()).toInstant()
), Date.Event.CREATION));
// Add collection statistics to description
int totalWordCount = stories.stream().mapToInt(s -> s.getWordCount() != null ? s.getWordCount() : 0).sum();
description.append("\n\nTotal Word Count: ").append(totalWordCount);
description.append("\nGenerated by StoryCove on ")
.append(LocalDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME));
metadata.addDescription(description.toString());
}
}
private void addCollectionCoverImage(Book book, Collection collection, EPUBExportRequest request) {
if (!request.getIncludeCoverImage()) {
return;
}
try {
// Try to use collection cover first
if (collection.getCoverImagePath() != null) {
Path coverPath = Paths.get(collection.getCoverImagePath());
if (Files.exists(coverPath)) {
byte[] coverImageData = Files.readAllBytes(coverPath);
String mimeType = Files.probeContentType(coverPath);
if (mimeType == null) {
mimeType = "image/jpeg";
}
nl.siegmann.epublib.domain.Resource coverResource =
new nl.siegmann.epublib.domain.Resource(coverImageData, "collection-cover.jpg");
book.setCoverImage(coverResource);
return;
}
}
// TODO: Could generate a composite cover from story covers
// For now, skip cover if collection doesn't have one
} catch (IOException e) {
// Skip cover image on error
}
}
private void addCollectionContent(Book book, List<Story> stories, EPUBExportRequest request) {
// Create table of contents chapter
StringBuilder tocContent = new StringBuilder();
tocContent.append("<h1>Table of Contents</h1>\n<ul>\n");
for (int i = 0; i < stories.size(); i++) {
Story story = stories.get(i);
tocContent.append("<li><a href=\"#story").append(i + 1).append("\">")
.append(escapeHtml(story.getTitle()));
if (story.getAuthor() != null) {
tocContent.append(" by ").append(escapeHtml(story.getAuthor().getName()));
}
tocContent.append("</a></li>\n");
}
tocContent.append("</ul>\n");
String tocHtml = createChapterHTML("Table of Contents", tocContent.toString());
nl.siegmann.epublib.domain.Resource tocResource =
new nl.siegmann.epublib.domain.Resource(tocHtml.getBytes(), "toc.html");
book.addSection("Table of Contents", tocResource);
// Add each story as a chapter
for (int i = 0; i < stories.size(); i++) {
Story story = stories.get(i);
String storyContent = story.getContentHtml();
if (storyContent == null) {
storyContent = story.getContentPlain() != null ?
"<p>" + story.getContentPlain().replace("\n", "</p><p>") + "</p>" :
"<p>No content available</p>";
}
// Add story metadata header
StringBuilder storyHtml = new StringBuilder();
storyHtml.append("<div id=\"story").append(i + 1).append("\">\n");
storyHtml.append("<h1>").append(escapeHtml(story.getTitle())).append("</h1>\n");
if (story.getAuthor() != null) {
storyHtml.append("<p><em>by ").append(escapeHtml(story.getAuthor().getName())).append("</em></p>\n");
}
if (story.getDescription() != null && !story.getDescription().trim().isEmpty()) {
storyHtml.append("<div class=\"summary\">\n")
.append("<p>").append(escapeHtml(story.getDescription())).append("</p>\n")
.append("</div>\n");
}
storyHtml.append("<hr />\n");
storyHtml.append(fixHtmlForXhtml(storyContent));
storyHtml.append("</div>\n");
String chapterTitle = story.getTitle();
if (story.getAuthor() != null) {
chapterTitle += " by " + story.getAuthor().getName();
}
String html = createChapterHTML(chapterTitle, storyHtml.toString());
nl.siegmann.epublib.domain.Resource storyResource =
new nl.siegmann.epublib.domain.Resource(html.getBytes(), "story" + (i + 1) + ".html");
book.addSection(chapterTitle, storyResource);
}
}
public boolean canExportStory(UUID storyId) {
try {
Story story = storyService.findById(storyId);
return story.getContentHtml() != null || story.getContentPlain() != null;
} catch (ResourceNotFoundException e) {
return false;
}
}
public String getCollectionEPUBFilename(Collection collection) {
StringBuilder filename = new StringBuilder();
filename.append(sanitizeFilename(collection.getName()));
filename.append("_collection.epub");
return filename.toString();
}
}

View File

@@ -0,0 +1,551 @@
package com.storycove.service;
import com.storycove.dto.EPUBImportRequest;
import com.storycove.dto.EPUBImportResponse;
import com.storycove.dto.ReadingPositionDto;
import com.storycove.entity.*;
import com.storycove.repository.ReadingPositionRepository;
import com.storycove.service.exception.InvalidFileException;
import com.storycove.service.exception.ResourceNotFoundException;
import nl.siegmann.epublib.domain.Book;
import nl.siegmann.epublib.domain.Metadata;
import nl.siegmann.epublib.domain.Resource;
import nl.siegmann.epublib.domain.SpineReference;
import nl.siegmann.epublib.epub.EpubReader;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.multipart.MultipartFile;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.List;
import java.util.Optional;
@Service
@Transactional
public class EPUBImportService {
private static final Logger log = LoggerFactory.getLogger(EPUBImportService.class);
private final StoryService storyService;
private final AuthorService authorService;
private final SeriesService seriesService;
private final TagService tagService;
private final ReadingPositionRepository readingPositionRepository;
private final HtmlSanitizationService sanitizationService;
private final ImageService imageService;
@Autowired
public EPUBImportService(StoryService storyService,
AuthorService authorService,
SeriesService seriesService,
TagService tagService,
ReadingPositionRepository readingPositionRepository,
HtmlSanitizationService sanitizationService,
ImageService imageService) {
this.storyService = storyService;
this.authorService = authorService;
this.seriesService = seriesService;
this.tagService = tagService;
this.readingPositionRepository = readingPositionRepository;
this.sanitizationService = sanitizationService;
this.imageService = imageService;
}
public EPUBImportResponse importEPUB(EPUBImportRequest request) {
try {
MultipartFile epubFile = request.getEpubFile();
if (epubFile == null || epubFile.isEmpty()) {
return EPUBImportResponse.error("EPUB file is required");
}
if (!isValidEPUBFile(epubFile)) {
return EPUBImportResponse.error("Invalid EPUB file format");
}
Book book = parseEPUBFile(epubFile);
Story story = createStoryFromEPUB(book, request);
Story savedStory = storyService.create(story);
// Process embedded images if content contains any
String originalContent = story.getContentHtml();
if (originalContent != null && originalContent.contains("<img")) {
try {
ImageService.ContentImageProcessingResult imageResult =
imageService.processContentImages(originalContent, savedStory.getId());
// Update story content with processed images if changed
if (!imageResult.getProcessedContent().equals(originalContent)) {
savedStory.setContentHtml(imageResult.getProcessedContent());
savedStory = storyService.update(savedStory.getId(), savedStory);
// Log the image processing results
log.debug("EPUB Import - Image processing completed for story {}. Downloaded {} images.",
savedStory.getId(), imageResult.getDownloadedImages().size());
if (imageResult.hasWarnings()) {
log.debug("EPUB Import - Image processing warnings: {}",
String.join(", ", imageResult.getWarnings()));
}
}
} catch (Exception e) {
// Log error but don't fail the import
System.err.println("EPUB Import - Failed to process embedded images for story " +
savedStory.getId() + ": " + e.getMessage());
}
}
EPUBImportResponse response = EPUBImportResponse.success(savedStory.getId(), savedStory.getTitle());
response.setWordCount(savedStory.getWordCount());
response.setTotalChapters(book.getSpine().size());
if (request.getPreserveReadingPosition() != null && request.getPreserveReadingPosition()) {
ReadingPosition readingPosition = extractReadingPosition(book, savedStory);
if (readingPosition != null) {
ReadingPosition savedPosition = readingPositionRepository.save(readingPosition);
response.setReadingPosition(convertToDto(savedPosition));
}
}
return response;
} catch (Exception e) {
return EPUBImportResponse.error("Failed to import EPUB: " + e.getMessage());
}
}
private boolean isValidEPUBFile(MultipartFile file) {
String filename = file.getOriginalFilename();
if (filename == null || !filename.toLowerCase().endsWith(".epub")) {
return false;
}
String contentType = file.getContentType();
return "application/epub+zip".equals(contentType) ||
"application/zip".equals(contentType) ||
contentType == null;
}
private Book parseEPUBFile(MultipartFile epubFile) throws IOException {
try (InputStream inputStream = epubFile.getInputStream()) {
EpubReader epubReader = new EpubReader();
return epubReader.readEpub(inputStream);
} catch (Exception e) {
throw new InvalidFileException("Failed to parse EPUB file: " + e.getMessage());
}
}
private Story createStoryFromEPUB(Book book, EPUBImportRequest request) {
Metadata metadata = book.getMetadata();
String title = extractTitle(metadata);
String authorName = extractAuthorName(metadata, request);
String description = extractDescription(metadata);
String content = extractContent(book);
Story story = new Story();
story.setTitle(title);
story.setDescription(description);
story.setContentHtml(sanitizationService.sanitize(content));
// Extract and process cover image
if (request.getExtractCover() == null || request.getExtractCover()) {
String coverPath = extractAndSaveCoverImage(book);
if (coverPath != null) {
story.setCoverPath(coverPath);
}
}
if (request.getAuthorId() != null) {
try {
Author author = authorService.findById(request.getAuthorId());
story.setAuthor(author);
} catch (ResourceNotFoundException e) {
if (request.getCreateMissingAuthor()) {
Author newAuthor = createAuthor(authorName);
story.setAuthor(newAuthor);
}
}
} else if (authorName != null && request.getCreateMissingAuthor()) {
Author author = findOrCreateAuthor(authorName);
story.setAuthor(author);
}
if (request.getSeriesId() != null && request.getSeriesVolume() != null) {
try {
Series series = seriesService.findById(request.getSeriesId());
story.setSeries(series);
story.setVolume(request.getSeriesVolume());
} catch (ResourceNotFoundException e) {
if (request.getCreateMissingSeries() && request.getSeriesName() != null) {
Series newSeries = createSeries(request.getSeriesName());
story.setSeries(newSeries);
story.setVolume(request.getSeriesVolume());
}
}
}
// Handle tags from request or extract from EPUB metadata
List<String> allTags = new ArrayList<>();
if (request.getTags() != null && !request.getTags().isEmpty()) {
allTags.addAll(request.getTags());
}
// Extract subjects/keywords from EPUB metadata
List<String> epubTags = extractTags(metadata);
if (epubTags != null && !epubTags.isEmpty()) {
allTags.addAll(epubTags);
}
// Remove duplicates and create tags
allTags.stream()
.distinct()
.forEach(tagName -> {
Tag tag = tagService.findOrCreate(tagName.trim());
story.addTag(tag);
});
// Extract additional metadata for potential future use
extractAdditionalMetadata(metadata, story);
return story;
}
private String extractTitle(Metadata metadata) {
List<String> titles = metadata.getTitles();
if (titles != null && !titles.isEmpty()) {
return titles.get(0);
}
return "Untitled EPUB";
}
private String extractAuthorName(Metadata metadata, EPUBImportRequest request) {
if (request.getAuthorName() != null && !request.getAuthorName().trim().isEmpty()) {
return request.getAuthorName().trim();
}
if (metadata.getAuthors() != null && !metadata.getAuthors().isEmpty()) {
return metadata.getAuthors().get(0).getFirstname() + " " + metadata.getAuthors().get(0).getLastname();
}
return "Unknown Author";
}
private String extractDescription(Metadata metadata) {
List<String> descriptions = metadata.getDescriptions();
if (descriptions != null && !descriptions.isEmpty()) {
return descriptions.get(0);
}
return null;
}
private List<String> extractTags(Metadata metadata) {
List<String> tags = new ArrayList<>();
// Extract subjects (main source of tags in EPUB)
List<String> subjects = metadata.getSubjects();
if (subjects != null && !subjects.isEmpty()) {
tags.addAll(subjects);
}
// Extract keywords from meta tags
String keywords = metadata.getMetaAttribute("keywords");
if (keywords != null && !keywords.trim().isEmpty()) {
String[] keywordArray = keywords.split("[,;]");
for (String keyword : keywordArray) {
String trimmed = keyword.trim();
if (!trimmed.isEmpty()) {
tags.add(trimmed);
}
}
}
// Extract genre information
String genre = metadata.getMetaAttribute("genre");
if (genre != null && !genre.trim().isEmpty()) {
tags.add(genre.trim());
}
return tags;
}
private void extractAdditionalMetadata(Metadata metadata, Story story) {
// Extract language (could be useful for future i18n)
String language = metadata.getLanguage();
if (language != null && !language.trim().isEmpty()) {
// Store as metadata in story description if needed
// For now, we'll just log it for potential future use
log.debug("EPUB Language: {}", language);
}
// Extract publisher information
List<String> publishers = metadata.getPublishers();
if (publishers != null && !publishers.isEmpty()) {
String publisher = publishers.get(0);
// Could append to description or store separately in future
log.debug("EPUB Publisher: {}", publisher);
}
// Extract publication date
List<nl.siegmann.epublib.domain.Date> dates = metadata.getDates();
if (dates != null && !dates.isEmpty()) {
for (nl.siegmann.epublib.domain.Date date : dates) {
log.debug("EPUB Date ({}): {}", date.getEvent(), date.getValue());
}
}
// Extract ISBN or other identifiers
List<nl.siegmann.epublib.domain.Identifier> identifiers = metadata.getIdentifiers();
if (identifiers != null && !identifiers.isEmpty()) {
for (nl.siegmann.epublib.domain.Identifier identifier : identifiers) {
log.debug("EPUB Identifier ({}): {}", identifier.getScheme(), identifier.getValue());
}
}
}
private String extractContent(Book book) {
StringBuilder contentBuilder = new StringBuilder();
List<SpineReference> spine = book.getSpine().getSpineReferences();
for (SpineReference spineRef : spine) {
try {
Resource resource = spineRef.getResource();
if (resource != null && resource.getData() != null) {
String html = new String(resource.getData(), "UTF-8");
Document doc = Jsoup.parse(html);
doc.select("script, style").remove();
String chapterContent = doc.body() != null ? doc.body().html() : doc.html();
contentBuilder.append("<div class=\"chapter\">")
.append(chapterContent)
.append("</div>");
}
} catch (Exception e) {
// Skip this chapter on error
continue;
}
}
return contentBuilder.toString();
}
private Author findOrCreateAuthor(String authorName) {
Optional<Author> existingAuthor = authorService.findByNameOptional(authorName);
if (existingAuthor.isPresent()) {
return existingAuthor.get();
}
return createAuthor(authorName);
}
private Author createAuthor(String authorName) {
Author author = new Author();
author.setName(authorName);
return authorService.create(author);
}
private Series createSeries(String seriesName) {
Series series = new Series();
series.setName(seriesName);
return seriesService.create(series);
}
private ReadingPosition extractReadingPosition(Book book, Story story) {
try {
Metadata metadata = book.getMetadata();
String positionMeta = metadata.getMetaAttribute("reading-position");
String cfiMeta = metadata.getMetaAttribute("epub-cfi");
ReadingPosition position = new ReadingPosition(story);
if (cfiMeta != null) {
position.setEpubCfi(cfiMeta);
}
if (positionMeta != null) {
try {
String[] parts = positionMeta.split(":");
if (parts.length >= 2) {
position.setChapterIndex(Integer.parseInt(parts[0]));
position.setWordPosition(Integer.parseInt(parts[1]));
}
} catch (NumberFormatException e) {
// Ignore invalid position format
}
}
return position;
} catch (Exception e) {
// Return null if no reading position found
return null;
}
}
private String extractAndSaveCoverImage(Book book) {
try {
Resource coverResource = book.getCoverImage();
if (coverResource != null && coverResource.getData() != null) {
// Create a temporary MultipartFile from the EPUB cover data
byte[] imageData = coverResource.getData();
String mediaType = coverResource.getMediaType() != null ?
coverResource.getMediaType().toString() : "image/jpeg";
// Determine file extension from media type
String extension = getExtensionFromMediaType(mediaType);
String filename = "epub_cover_" + System.currentTimeMillis() + "." + extension;
// Create a custom MultipartFile implementation for the cover image
MultipartFile coverFile = new EPUBCoverMultipartFile(imageData, filename, mediaType);
// Use ImageService to process and save the cover
return imageService.uploadImage(coverFile, ImageService.ImageType.COVER);
}
} catch (Exception e) {
// Log error but don't fail the import
System.err.println("Failed to extract cover image: " + e.getMessage());
}
return null;
}
private String getExtensionFromMediaType(String mediaType) {
switch (mediaType.toLowerCase()) {
case "image/jpeg":
case "image/jpg":
return "jpg";
case "image/png":
return "png";
case "image/gif":
return "gif";
case "image/webp":
return "webp";
default:
return "jpg"; // Default fallback
}
}
private ReadingPositionDto convertToDto(ReadingPosition position) {
if (position == null) return null;
ReadingPositionDto dto = new ReadingPositionDto();
dto.setId(position.getId());
dto.setStoryId(position.getStory().getId());
dto.setChapterIndex(position.getChapterIndex());
dto.setChapterTitle(position.getChapterTitle());
dto.setWordPosition(position.getWordPosition());
dto.setCharacterPosition(position.getCharacterPosition());
dto.setPercentageComplete(position.getPercentageComplete());
dto.setEpubCfi(position.getEpubCfi());
dto.setContextBefore(position.getContextBefore());
dto.setContextAfter(position.getContextAfter());
dto.setCreatedAt(position.getCreatedAt());
dto.setUpdatedAt(position.getUpdatedAt());
return dto;
}
public List<String> validateEPUBFile(MultipartFile file) {
List<String> errors = new ArrayList<>();
if (file == null || file.isEmpty()) {
errors.add("EPUB file is required");
return errors;
}
if (!isValidEPUBFile(file)) {
errors.add("Invalid EPUB file format. Only .epub files are supported");
}
if (file.getSize() > 100 * 1024 * 1024) { // 100MB limit
errors.add("EPUB file size exceeds 100MB limit");
}
try {
Book book = parseEPUBFile(file);
if (book.getMetadata() == null) {
errors.add("EPUB file contains no metadata");
}
if (book.getSpine() == null || book.getSpine().isEmpty()) {
errors.add("EPUB file contains no readable content");
}
} catch (Exception e) {
errors.add("Failed to parse EPUB file: " + e.getMessage());
}
return errors;
}
/**
* Custom MultipartFile implementation for EPUB cover images
*/
private static class EPUBCoverMultipartFile implements MultipartFile {
private final byte[] data;
private final String filename;
private final String contentType;
public EPUBCoverMultipartFile(byte[] data, String filename, String contentType) {
this.data = data;
this.filename = filename;
this.contentType = contentType;
}
@Override
public String getName() {
return "coverImage";
}
@Override
public String getOriginalFilename() {
return filename;
}
@Override
public String getContentType() {
return contentType;
}
@Override
public boolean isEmpty() {
return data == null || data.length == 0;
}
@Override
public long getSize() {
return data != null ? data.length : 0;
}
@Override
public byte[] getBytes() {
return data;
}
@Override
public InputStream getInputStream() {
return new java.io.ByteArrayInputStream(data);
}
@Override
public void transferTo(java.io.File dest) throws IOException {
try (java.io.FileOutputStream fos = new java.io.FileOutputStream(dest)) {
fos.write(data);
}
}
@Override
public void transferTo(java.nio.file.Path dest) throws IOException {
java.nio.file.Files.write(dest, data);
}
}
}

View File

@@ -54,7 +54,7 @@ public class HtmlSanitizationService {
"p", "br", "div", "span", "h1", "h2", "h3", "h4", "h5", "h6", "p", "br", "div", "span", "h1", "h2", "h3", "h4", "h5", "h6",
"b", "strong", "i", "em", "u", "s", "strike", "del", "ins", "b", "strong", "i", "em", "u", "s", "strike", "del", "ins",
"sup", "sub", "small", "big", "mark", "pre", "code", "sup", "sub", "small", "big", "mark", "pre", "code",
"ul", "ol", "li", "dl", "dt", "dd", "a", "ul", "ol", "li", "dl", "dt", "dd", "a", "img",
"table", "thead", "tbody", "tfoot", "tr", "th", "td", "caption", "table", "thead", "tbody", "tfoot", "tr", "th", "td", "caption",
"blockquote", "cite", "q", "hr" "blockquote", "cite", "q", "hr"
)); ));
@@ -65,7 +65,7 @@ public class HtmlSanitizationService {
} }
private void createSafelist() { private void createSafelist() {
this.allowlist = new Safelist(); this.allowlist = Safelist.relaxed();
// Add allowed tags // Add allowed tags
if (config.getAllowedTags() != null) { if (config.getAllowedTags() != null) {
@@ -83,7 +83,34 @@ public class HtmlSanitizationService {
} }
} }
// Remove specific attributes (like href from links for security) // Special handling for img tags - allow all src attributes and validate later
allowlist.removeProtocols("img", "src", "http", "https");
// This is the key: preserve relative URLs by not restricting them
allowlist.preserveRelativeLinks(true);
// Configure allowed protocols for other attributes
if (config.getAllowedProtocols() != null) {
for (Map.Entry<String, Map<String, List<String>>> tagEntry : config.getAllowedProtocols().entrySet()) {
String tag = tagEntry.getKey();
Map<String, List<String>> attributeProtocols = tagEntry.getValue();
if (attributeProtocols != null) {
for (Map.Entry<String, List<String>> attrEntry : attributeProtocols.entrySet()) {
String attribute = attrEntry.getKey();
List<String> protocols = attrEntry.getValue();
if (protocols != null && !("img".equals(tag) && "src".equals(attribute))) {
// Skip img src since we handled it above
allowlist.addProtocols(tag, attribute, protocols.toArray(new String[0]));
}
}
}
}
}
logger.info("Configured Jsoup Safelist with preserveRelativeLinks=true for local image URLs");
// Remove specific attributes if needed (deprecated in favor of protocol control)
if (config.getRemovedAttributes() != null) { if (config.getRemovedAttributes() != null) {
for (Map.Entry<String, List<String>> entry : config.getRemovedAttributes().entrySet()) { for (Map.Entry<String, List<String>> entry : config.getRemovedAttributes().entrySet()) {
String tag = entry.getKey(); String tag = entry.getKey();
@@ -110,12 +137,65 @@ public class HtmlSanitizationService {
return config; return config;
} }
/**
* Preprocess HTML to extract images from figure tags before sanitization
*/
private String preprocessFigureTags(String html) {
if (html == null || html.trim().isEmpty()) {
return html;
}
try {
org.jsoup.nodes.Document doc = Jsoup.parse(html);
org.jsoup.select.Elements figures = doc.select("figure");
for (org.jsoup.nodes.Element figure : figures) {
// Find img tags within the figure
org.jsoup.select.Elements images = figure.select("img");
if (!images.isEmpty()) {
// Extract the first image and replace the figure with it
org.jsoup.nodes.Element img = images.first();
// Check if there's a figcaption to preserve as alt text
org.jsoup.select.Elements figcaptions = figure.select("figcaption");
if (!figcaptions.isEmpty() && !img.hasAttr("alt")) {
String captionText = figcaptions.first().text();
if (captionText != null && !captionText.trim().isEmpty()) {
img.attr("alt", captionText);
}
}
// Replace the figure element with just the img
figure.replaceWith(img.clone());
logger.debug("Extracted image from figure tag: {}", img.attr("src"));
} else {
// No images in figure, remove it entirely
figure.remove();
logger.debug("Removed figure tag without images");
}
}
return doc.body().html();
} catch (Exception e) {
logger.warn("Failed to preprocess figure tags, returning original HTML: {}", e.getMessage());
return html;
}
}
public String sanitize(String html) { public String sanitize(String html) {
if (html == null || html.trim().isEmpty()) { if (html == null || html.trim().isEmpty()) {
return ""; return "";
} }
return Jsoup.clean(html, allowlist); logger.info("Content before sanitization: "+html);
// Preprocess to extract images from figure tags
String preprocessed = preprocessFigureTags(html);
String saniztedHtml = Jsoup.clean(preprocessed, allowlist.preserveRelativeLinks(true));
logger.info("Content after sanitization: "+saniztedHtml);
return saniztedHtml;
} }
public String extractPlainText(String html) { public String extractPlainText(String html) {

View File

@@ -0,0 +1,108 @@
package com.storycove.service;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.stereotype.Service;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
@Service
public class ImageProcessingProgressService {
private static final Logger logger = LoggerFactory.getLogger(ImageProcessingProgressService.class);
private final Map<UUID, ImageProcessingProgress> progressMap = new ConcurrentHashMap<>();
public static class ImageProcessingProgress {
private final UUID storyId;
private final int totalImages;
private volatile int processedImages;
private volatile String currentImageUrl;
private volatile String status;
private volatile boolean completed;
private volatile String errorMessage;
public ImageProcessingProgress(UUID storyId, int totalImages) {
this.storyId = storyId;
this.totalImages = totalImages;
this.processedImages = 0;
this.status = "Starting";
this.completed = false;
}
// Getters
public UUID getStoryId() { return storyId; }
public int getTotalImages() { return totalImages; }
public int getProcessedImages() { return processedImages; }
public String getCurrentImageUrl() { return currentImageUrl; }
public String getStatus() { return status; }
public boolean isCompleted() { return completed; }
public String getErrorMessage() { return errorMessage; }
public double getProgressPercentage() {
return totalImages > 0 ? (double) processedImages / totalImages * 100 : 100;
}
// Setters
public void setProcessedImages(int processedImages) { this.processedImages = processedImages; }
public void setCurrentImageUrl(String currentImageUrl) { this.currentImageUrl = currentImageUrl; }
public void setStatus(String status) { this.status = status; }
public void setCompleted(boolean completed) { this.completed = completed; }
public void setErrorMessage(String errorMessage) { this.errorMessage = errorMessage; }
public void incrementProcessed() {
this.processedImages++;
}
}
public ImageProcessingProgress startProgress(UUID storyId, int totalImages) {
ImageProcessingProgress progress = new ImageProcessingProgress(storyId, totalImages);
progressMap.put(storyId, progress);
logger.info("Started image processing progress tracking for story {} with {} images", storyId, totalImages);
return progress;
}
public ImageProcessingProgress getProgress(UUID storyId) {
return progressMap.get(storyId);
}
public void updateProgress(UUID storyId, int processedImages, String currentImageUrl, String status) {
ImageProcessingProgress progress = progressMap.get(storyId);
if (progress != null) {
progress.setProcessedImages(processedImages);
progress.setCurrentImageUrl(currentImageUrl);
progress.setStatus(status);
logger.debug("Updated progress for story {}: {}/{} - {}", storyId, processedImages, progress.getTotalImages(), status);
}
}
public void completeProgress(UUID storyId, String finalStatus) {
ImageProcessingProgress progress = progressMap.get(storyId);
if (progress != null) {
progress.setCompleted(true);
progress.setStatus(finalStatus);
logger.info("Completed image processing for story {}: {}", storyId, finalStatus);
}
}
public void setError(UUID storyId, String errorMessage) {
ImageProcessingProgress progress = progressMap.get(storyId);
if (progress != null) {
progress.setErrorMessage(errorMessage);
progress.setStatus("Error: " + errorMessage);
progress.setCompleted(true);
logger.error("Image processing error for story {}: {}", storyId, errorMessage);
}
}
public void removeProgress(UUID storyId) {
progressMap.remove(storyId);
logger.debug("Removed progress tracking for story {}", storyId);
}
public boolean isProcessing(UUID storyId) {
ImageProcessingProgress progress = progressMap.get(storyId);
return progress != null && !progress.isCompleted();
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,73 @@
package com.storycove.service;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.stereotype.Component;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.SQLException;
/**
* Base service class that provides library-aware database access.
*
* This approach is safer than routing at the datasource level because:
* 1. It doesn't interfere with Spring's initialization process
* 2. It allows fine-grained control over which operations are library-aware
* 3. It provides clear separation between authentication (uses default DB) and library operations
*/
@Component
public class LibraryAwareService {
@Autowired
private LibraryService libraryService;
@Autowired
@Qualifier("dataSource")
private DataSource defaultDataSource;
/**
* Get a database connection for the current active library.
* Falls back to default datasource if no library is active.
*/
public Connection getCurrentLibraryConnection() throws SQLException {
try {
// Try to get library-specific connection
DataSource libraryDataSource = libraryService.getCurrentDataSource();
return libraryDataSource.getConnection();
} catch (IllegalStateException e) {
// No active library - use default datasource
return defaultDataSource.getConnection();
}
}
/**
* Get a database connection for the default/fallback database.
* Use this for authentication and system-level operations.
*/
public Connection getDefaultConnection() throws SQLException {
return defaultDataSource.getConnection();
}
/**
* Check if a library is currently active
*/
public boolean hasActiveLibrary() {
try {
return libraryService.getCurrentLibraryId() != null;
} catch (Exception e) {
return false;
}
}
/**
* Get the current active library ID, or null if none
*/
public String getCurrentLibraryId() {
try {
return libraryService.getCurrentLibraryId();
} catch (Exception e) {
return null;
}
}
}

View File

@@ -0,0 +1,830 @@
package com.storycove.service;
import com.storycove.entity.Library;
import com.storycove.dto.LibraryDto;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.zaxxer.hikari.HikariConfig;
import com.zaxxer.hikari.HikariDataSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.security.crypto.bcrypt.BCryptPasswordEncoder;
import org.springframework.stereotype.Service;
import jakarta.annotation.PostConstruct;
import jakarta.annotation.PreDestroy;
import javax.sql.DataSource;
import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.sql.SQLException;
import java.util.*;
import java.util.concurrent.ConcurrentHashMap;
@Service
public class LibraryService implements ApplicationContextAware {
private static final Logger logger = LoggerFactory.getLogger(LibraryService.class);
@Value("${spring.datasource.url}")
private String baseDbUrl;
@Value("${spring.datasource.username}")
private String dbUsername;
@Value("${spring.datasource.password}")
private String dbPassword;
private final ObjectMapper objectMapper = new ObjectMapper();
private final BCryptPasswordEncoder passwordEncoder = new BCryptPasswordEncoder();
private final Map<String, Library> libraries = new ConcurrentHashMap<>();
// Spring ApplicationContext for accessing other services without circular dependencies
private ApplicationContext applicationContext;
// Current active resources
private volatile String currentLibraryId;
// Security: Track if user has explicitly authenticated in this session
private volatile boolean explicitlyAuthenticated = false;
private static final String LIBRARIES_CONFIG_PATH = "/app/config/libraries.json";
private static final Path libraryConfigDir = Paths.get("/app/config");
@Override
public void setApplicationContext(ApplicationContext applicationContext) {
this.applicationContext = applicationContext;
}
@PostConstruct
public void initialize() {
loadLibrariesFromFile();
// If no libraries exist, create a default one
if (libraries.isEmpty()) {
createDefaultLibrary();
}
// Security: Do NOT automatically switch to any library on startup
// Users must authenticate before accessing any library
explicitlyAuthenticated = false;
currentLibraryId = null;
if (!libraries.isEmpty()) {
logger.info("Loaded {} libraries. Authentication required to access any library.", libraries.size());
} else {
logger.info("No libraries found. A default library will be created on first authentication.");
}
logger.info("Security: Application startup completed. All users must re-authenticate.");
}
@PreDestroy
public void cleanup() {
currentLibraryId = null;
explicitlyAuthenticated = false;
}
/**
* Clear authentication state (for logout)
*/
public void clearAuthentication() {
explicitlyAuthenticated = false;
currentLibraryId = null;
logger.info("Authentication cleared - user must re-authenticate to access libraries");
}
public String authenticateAndGetLibrary(String password) {
for (Library library : libraries.values()) {
if (passwordEncoder.matches(password, library.getPasswordHash())) {
// Mark as explicitly authenticated for this session
explicitlyAuthenticated = true;
logger.info("User explicitly authenticated for library: {}", library.getId());
return library.getId();
}
}
return null; // Authentication failed
}
/**
* Switch to library after authentication with forced reindexing
* This ensures Solr is always up-to-date after login
*/
public synchronized void switchToLibraryAfterAuthentication(String libraryId) throws Exception {
logger.info("Switching to library after authentication: {} (forcing reindex)", libraryId);
switchToLibrary(libraryId, true);
}
public synchronized void switchToLibrary(String libraryId) throws Exception {
switchToLibrary(libraryId, false);
}
public synchronized void switchToLibrary(String libraryId, boolean forceReindex) throws Exception {
// Security: Only allow library switching after explicit authentication
if (!explicitlyAuthenticated) {
throw new IllegalStateException("Library switching requires explicit authentication. Please log in first.");
}
if (libraryId.equals(currentLibraryId) && !forceReindex) {
return; // Already active and no forced reindex requested
}
Library library = libraries.get(libraryId);
if (library == null) {
throw new IllegalArgumentException("Library not found: " + libraryId);
}
String previousLibraryId = currentLibraryId;
if (libraryId.equals(currentLibraryId) && forceReindex) {
logger.debug("Forcing reindex for current library: {} ({})", library.getName(), libraryId);
} else {
logger.debug("Switching to library: {} ({})", library.getName(), libraryId);
}
// Close current resources
closeCurrentResources();
// Set new active library (datasource routing handled by SmartRoutingDataSource)
currentLibraryId = libraryId;
// Solr indexes are global - no per-library initialization needed
logger.debug("Library switched to Solr mode for library: {}", libraryId);
logger.info("Successfully switched to library: {}", library.getName());
// Perform complete reindex AFTER library switch is fully complete
// This ensures database routing is properly established
if (forceReindex || !libraryId.equals(previousLibraryId)) {
logger.debug("Starting post-switch Solr reindex for library: {}", libraryId);
// Run reindex asynchronously to avoid blocking authentication response
// and allow time for database routing to fully stabilize
String finalLibraryId = libraryId;
new Thread(() -> {
try {
// Give routing time to stabilize
Thread.sleep(500);
logger.debug("Starting async Solr reindex for library: {}", finalLibraryId);
SearchServiceAdapter searchService = applicationContext.getBean(SearchServiceAdapter.class);
// Get all stories and authors for reindexing
StoryService storyService = applicationContext.getBean(StoryService.class);
AuthorService authorService = applicationContext.getBean(AuthorService.class);
var allStories = storyService.findAllWithAssociations();
var allAuthors = authorService.findAllWithStories();
searchService.bulkIndexStories(allStories);
searchService.bulkIndexAuthors(allAuthors);
logger.info("Completed async Solr reindexing for library: {} ({} stories, {} authors)",
finalLibraryId, allStories.size(), allAuthors.size());
} catch (Exception e) {
logger.warn("Failed to async reindex Solr for library {}: {}", finalLibraryId, e.getMessage());
}
}, "SolrReindex-" + libraryId).start();
}
}
public DataSource getCurrentDataSource() {
if (currentLibraryId == null) {
throw new IllegalStateException("No active library - please authenticate first");
}
// Return the Spring-managed primary datasource which handles routing automatically
try {
return applicationContext.getBean("dataSource", DataSource.class);
} catch (Exception e) {
throw new IllegalStateException("Failed to get routing datasource", e);
}
}
public String getCurrentLibraryId() {
return currentLibraryId;
}
public Library getCurrentLibrary() {
if (currentLibraryId == null) {
return null;
}
return libraries.get(currentLibraryId);
}
public List<LibraryDto> getAllLibraries() {
List<LibraryDto> result = new ArrayList<>();
for (Library library : libraries.values()) {
boolean isActive = library.getId().equals(currentLibraryId);
result.add(new LibraryDto(
library.getId(),
library.getName(),
library.getDescription(),
isActive,
library.isInitialized()
));
}
return result;
}
public LibraryDto getLibraryById(String libraryId) {
Library library = libraries.get(libraryId);
if (library != null) {
boolean isActive = library.getId().equals(currentLibraryId);
return new LibraryDto(
library.getId(),
library.getName(),
library.getDescription(),
isActive,
library.isInitialized()
);
}
return null;
}
public String getCurrentImagePath() {
Library current = getCurrentLibrary();
return current != null ? current.getImagePath() : "/images/default";
}
public String getImagePathForLibrary(String libraryId) {
if (libraryId == null) {
return "/images/default";
}
Library library = libraries.get(libraryId);
return library != null ? library.getImagePath() : "/images/default";
}
public boolean changeLibraryPassword(String libraryId, String currentPassword, String newPassword) {
Library library = libraries.get(libraryId);
if (library == null) {
return false;
}
// Verify current password
if (!passwordEncoder.matches(currentPassword, library.getPasswordHash())) {
return false;
}
// Update password
library.setPasswordHash(passwordEncoder.encode(newPassword));
saveLibrariesToFile();
logger.info("Password changed for library: {}", library.getName());
return true;
}
public Library createNewLibrary(String name, String description, String password) {
// Generate unique ID
String id = name.toLowerCase().replaceAll("[^a-z0-9]", "");
int counter = 1;
String originalId = id;
while (libraries.containsKey(id)) {
id = originalId + counter++;
}
Library newLibrary = new Library(
id,
name,
description,
passwordEncoder.encode(password),
"storycove_" + id
);
try {
// Test database creation by creating a connection
DataSource testDs = createDataSource(newLibrary.getDbName());
testDs.getConnection().close(); // This will create the database and schema if it doesn't exist
// Initialize library resources (image directories)
initializeNewLibraryResources(id);
newLibrary.setInitialized(true);
logger.info("Database and resources created for library: {}", newLibrary.getDbName());
} catch (Exception e) {
logger.warn("Database/resource creation failed for library {}: {}", id, e.getMessage());
// Continue anyway - resources will be created when needed
}
libraries.put(id, newLibrary);
saveLibrariesToFile();
logger.info("Created new library: {} ({})", name, id);
return newLibrary;
}
private void loadLibrariesFromFile() {
try {
File configFile = new File(LIBRARIES_CONFIG_PATH);
if (configFile.exists()) {
String content = Files.readString(Paths.get(LIBRARIES_CONFIG_PATH));
Map<String, Object> config = objectMapper.readValue(content, new TypeReference<Map<String, Object>>() {});
@SuppressWarnings("unchecked")
Map<String, Map<String, Object>> librariesData = (Map<String, Map<String, Object>>) config.get("libraries");
for (Map.Entry<String, Map<String, Object>> entry : librariesData.entrySet()) {
String id = entry.getKey();
Map<String, Object> data = entry.getValue();
Library library = new Library();
library.setId(id);
library.setName((String) data.get("name"));
library.setDescription((String) data.get("description"));
library.setPasswordHash((String) data.get("passwordHash"));
library.setDbName((String) data.get("dbName"));
library.setInitialized((Boolean) data.getOrDefault("initialized", false));
libraries.put(id, library);
logger.debug("Loaded library: {} ({})", library.getName(), id);
}
} else {
logger.debug("No libraries configuration file found, will create default");
}
} catch (IOException e) {
logger.error("Failed to load libraries configuration", e);
}
}
private void createDefaultLibrary() {
// Check if we're migrating from the old single-library system
String existingDbName = extractDatabaseName(baseDbUrl);
Library defaultLibrary = new Library(
"main",
"Main Library",
"Your existing story collection (migrated)",
passwordEncoder.encode("temp-password-change-me"), // Temporary password
existingDbName // Use existing database name
);
defaultLibrary.setInitialized(true); // Mark as initialized since it has existing data
libraries.put("main", defaultLibrary);
saveLibrariesToFile();
logger.warn("=".repeat(80));
logger.warn("MIGRATION: Created 'Main Library' for your existing data");
logger.warn("Temporary password: 'temp-password-change-me'");
logger.warn("IMPORTANT: Please set a proper password in Settings > Library Settings");
logger.warn("=".repeat(80));
}
private String extractDatabaseName(String jdbcUrl) {
// Extract database name from JDBC URL like "jdbc:postgresql://db:5432/storycove"
int lastSlash = jdbcUrl.lastIndexOf('/');
if (lastSlash != -1 && lastSlash < jdbcUrl.length() - 1) {
String dbPart = jdbcUrl.substring(lastSlash + 1);
// Remove any query parameters
int queryStart = dbPart.indexOf('?');
return queryStart != -1 ? dbPart.substring(0, queryStart) : dbPart;
}
return "storycove"; // fallback
}
private void saveLibrariesToFile() {
try {
Map<String, Object> config = new HashMap<>();
Map<String, Map<String, Object>> librariesData = new HashMap<>();
for (Library library : libraries.values()) {
Map<String, Object> data = new HashMap<>();
data.put("name", library.getName());
data.put("description", library.getDescription());
data.put("passwordHash", library.getPasswordHash());
data.put("dbName", library.getDbName());
data.put("initialized", library.isInitialized());
librariesData.put(library.getId(), data);
}
config.put("libraries", librariesData);
// Ensure config directory exists
new File("/app/config").mkdirs();
String json = objectMapper.writerWithDefaultPrettyPrinter().writeValueAsString(config);
Files.writeString(Paths.get(LIBRARIES_CONFIG_PATH), json);
logger.debug("Saved libraries configuration");
} catch (IOException e) {
logger.error("Failed to save libraries configuration", e);
}
}
private DataSource createDataSource(String dbName) {
String url = baseDbUrl.replaceAll("/[^/]*$", "/" + dbName);
logger.debug("Creating DataSource for: {}", url);
// First, ensure the database exists
ensureDatabaseExists(dbName);
HikariConfig config = new HikariConfig();
config.setJdbcUrl(url);
config.setUsername(dbUsername);
config.setPassword(dbPassword);
config.setDriverClassName("org.postgresql.Driver");
config.setMaximumPoolSize(10);
config.setConnectionTimeout(30000);
return new HikariDataSource(config);
}
private void ensureDatabaseExists(String dbName) {
// Connect to the 'postgres' database to create the new database
String adminUrl = baseDbUrl.replaceAll("/[^/]*$", "/postgres");
HikariConfig adminConfig = new HikariConfig();
adminConfig.setJdbcUrl(adminUrl);
adminConfig.setUsername(dbUsername);
adminConfig.setPassword(dbPassword);
adminConfig.setDriverClassName("org.postgresql.Driver");
adminConfig.setMaximumPoolSize(1);
adminConfig.setConnectionTimeout(30000);
boolean databaseCreated = false;
try (HikariDataSource adminDataSource = new HikariDataSource(adminConfig);
var connection = adminDataSource.getConnection();
var statement = connection.createStatement()) {
// Check if database exists
String checkQuery = "SELECT 1 FROM pg_database WHERE datname = ?";
try (var preparedStatement = connection.prepareStatement(checkQuery)) {
preparedStatement.setString(1, dbName);
try (var resultSet = preparedStatement.executeQuery()) {
if (resultSet.next()) {
logger.debug("Database {} already exists", dbName);
return; // Database exists, nothing to do
}
}
}
// Create database if it doesn't exist
// Note: Database names cannot be parameterized, but we validate the name is safe
if (!dbName.matches("^[a-zA-Z][a-zA-Z0-9_]*$")) {
throw new IllegalArgumentException("Invalid database name: " + dbName);
}
String createQuery = "CREATE DATABASE " + dbName;
statement.executeUpdate(createQuery);
logger.info("Created database: {}", dbName);
databaseCreated = true;
} catch (SQLException e) {
logger.error("Failed to ensure database {} exists: {}", dbName, e.getMessage());
throw new RuntimeException("Database creation failed", e);
}
// If we just created the database, initialize its schema
if (databaseCreated) {
initializeNewDatabaseSchema(dbName);
}
}
private void initializeNewDatabaseSchema(String dbName) {
logger.debug("Initializing schema for new database: {}", dbName);
// Create a temporary DataSource for the new database to initialize schema
String newDbUrl = baseDbUrl.replaceAll("/[^/]*$", "/" + dbName);
HikariConfig config = new HikariConfig();
config.setJdbcUrl(newDbUrl);
config.setUsername(dbUsername);
config.setPassword(dbPassword);
config.setDriverClassName("org.postgresql.Driver");
config.setMaximumPoolSize(1);
config.setConnectionTimeout(30000);
try (HikariDataSource tempDataSource = new HikariDataSource(config)) {
// Use Hibernate to create the schema
// This mimics what Spring Boot does during startup
createSchemaUsingHibernate(tempDataSource);
logger.debug("Schema initialized for database: {}", dbName);
} catch (Exception e) {
logger.error("Failed to initialize schema for database {}: {}", dbName, e.getMessage());
throw new RuntimeException("Schema initialization failed", e);
}
}
public void initializeNewLibraryResources(String libraryId) {
Library library = libraries.get(libraryId);
if (library == null) {
throw new IllegalArgumentException("Library not found: " + libraryId);
}
try {
logger.debug("Initializing resources for new library: {}", library.getName());
// 1. Create image directory structure
initializeImageDirectories(library);
// 2. Solr indexes are global and managed automatically
// No per-library initialization needed for Solr
logger.debug("Successfully initialized resources for library: {}", library.getName());
} catch (Exception e) {
logger.error("Failed to initialize resources for library {}: {}", libraryId, e.getMessage());
throw new RuntimeException("Library resource initialization failed", e);
}
}
private void initializeImageDirectories(Library library) {
try {
// Create the library-specific image directory
String imagePath = "/app/images/" + library.getId();
java.nio.file.Path libraryImagePath = java.nio.file.Paths.get(imagePath);
if (!java.nio.file.Files.exists(libraryImagePath)) {
java.nio.file.Files.createDirectories(libraryImagePath);
logger.debug("Created image directory: {}", imagePath);
// Create subdirectories for different image types
java.nio.file.Files.createDirectories(libraryImagePath.resolve("stories"));
java.nio.file.Files.createDirectories(libraryImagePath.resolve("authors"));
java.nio.file.Files.createDirectories(libraryImagePath.resolve("collections"));
logger.debug("Created image subdirectories for library: {}", library.getId());
} else {
logger.debug("Image directory already exists: {}", imagePath);
}
} catch (Exception e) {
logger.error("Failed to create image directories for library {}: {}", library.getId(), e.getMessage());
throw new RuntimeException("Image directory creation failed", e);
}
}
private void createSchemaUsingHibernate(DataSource dataSource) {
// Create the essential tables manually using the same DDL that Hibernate would generate
// This is simpler than setting up a full Hibernate configuration for schema creation
String[] createTableStatements = {
// Authors table
"""
CREATE TABLE authors (
author_rating integer,
created_at timestamp(6) not null,
updated_at timestamp(6) not null,
id uuid not null,
avatar_image_path varchar(255),
name varchar(255) not null,
notes TEXT,
primary key (id)
)
""",
// Author URLs table
"""
CREATE TABLE author_urls (
author_id uuid not null,
url varchar(255)
)
""",
// Series table
"""
CREATE TABLE series (
created_at timestamp(6) not null,
id uuid not null,
description varchar(1000),
name varchar(255) not null,
primary key (id)
)
""",
// Tags table
"""
CREATE TABLE tags (
color varchar(7),
created_at timestamp(6) not null,
id uuid not null,
description varchar(500),
name varchar(255) not null unique,
primary key (id)
)
""",
// Tag aliases table
"""
CREATE TABLE tag_aliases (
created_from_merge boolean not null,
created_at timestamp(6) not null,
canonical_tag_id uuid not null,
id uuid not null,
alias_name varchar(255) not null unique,
primary key (id)
)
""",
// Collections table
"""
CREATE TABLE collections (
is_archived boolean not null,
rating integer,
created_at timestamp(6) not null,
updated_at timestamp(6) not null,
id uuid not null,
cover_image_path varchar(500),
name varchar(500) not null,
description TEXT,
primary key (id)
)
""",
// Stories table
"""
CREATE TABLE stories (
is_read boolean,
rating integer,
reading_position integer,
volume integer,
word_count integer,
created_at timestamp(6) not null,
last_read_at timestamp(6),
updated_at timestamp(6) not null,
author_id uuid,
id uuid not null,
series_id uuid,
description varchar(1000),
content_html TEXT,
content_plain TEXT,
cover_path varchar(255),
source_url varchar(255),
summary TEXT,
title varchar(255) not null,
primary key (id)
)
""",
// Reading positions table
"""
CREATE TABLE reading_positions (
chapter_index integer,
character_position integer,
percentage_complete float(53),
word_position integer,
created_at timestamp(6) not null,
updated_at timestamp(6) not null,
id uuid not null,
story_id uuid not null,
context_after varchar(500),
context_before varchar(500),
chapter_title varchar(255),
epub_cfi TEXT,
primary key (id)
)
""",
// Junction tables
"""
CREATE TABLE story_tags (
story_id uuid not null,
tag_id uuid not null,
primary key (story_id, tag_id)
)
""",
"""
CREATE TABLE collection_stories (
position integer not null,
added_at timestamp(6) not null,
collection_id uuid not null,
story_id uuid not null,
primary key (collection_id, story_id),
unique (collection_id, position)
)
""",
"""
CREATE TABLE collection_tags (
collection_id uuid not null,
tag_id uuid not null,
primary key (collection_id, tag_id)
)
"""
};
String[] createIndexStatements = {
"CREATE INDEX idx_reading_position_story ON reading_positions (story_id)"
};
String[] createConstraintStatements = {
// Foreign key constraints
"ALTER TABLE author_urls ADD CONSTRAINT FKdqhp51m0uveybsts098gd79uo FOREIGN KEY (author_id) REFERENCES authors",
"ALTER TABLE stories ADD CONSTRAINT FKhwecpqeaxy40ftrctef1u7gw7 FOREIGN KEY (author_id) REFERENCES authors",
"ALTER TABLE stories ADD CONSTRAINT FK1kulyvy7wwcolp2gkndt57cp7 FOREIGN KEY (series_id) REFERENCES series",
"ALTER TABLE reading_positions ADD CONSTRAINT FKglfhdhflan3pgyr2u0gxi21i5 FOREIGN KEY (story_id) REFERENCES stories",
"ALTER TABLE story_tags ADD CONSTRAINT FKmans33ijt0nf65t0sng2r848j FOREIGN KEY (tag_id) REFERENCES tags",
"ALTER TABLE story_tags ADD CONSTRAINT FKq9guid7swnjxwdpgxj3jo1rsi FOREIGN KEY (story_id) REFERENCES stories",
"ALTER TABLE tag_aliases ADD CONSTRAINT FKqfsawmcj3ey4yycb6958y24ch FOREIGN KEY (canonical_tag_id) REFERENCES tags",
"ALTER TABLE collection_stories ADD CONSTRAINT FKr55ho4vhj0wp03x13iskr1jds FOREIGN KEY (collection_id) REFERENCES collections",
"ALTER TABLE collection_stories ADD CONSTRAINT FK7n41tbbrt7r2e81hpu3612r1o FOREIGN KEY (story_id) REFERENCES stories",
"ALTER TABLE collection_tags ADD CONSTRAINT FKceq7ggev8n8ibjui1x5yo4x67 FOREIGN KEY (tag_id) REFERENCES tags",
"ALTER TABLE collection_tags ADD CONSTRAINT FKq9sa5s8csdpbphrvb48tts8jt FOREIGN KEY (collection_id) REFERENCES collections"
};
try (var connection = dataSource.getConnection();
var statement = connection.createStatement()) {
// Create tables
for (String sql : createTableStatements) {
statement.executeUpdate(sql);
}
// Create indexes
for (String sql : createIndexStatements) {
statement.executeUpdate(sql);
}
// Create constraints
for (String sql : createConstraintStatements) {
statement.executeUpdate(sql);
}
logger.debug("Successfully created all database tables and constraints");
} catch (SQLException e) {
logger.error("Failed to create database schema", e);
throw new RuntimeException("Schema creation failed", e);
}
}
private void closeCurrentResources() {
// No need to close datasource - SmartRoutingDataSource handles this
// Solr service is managed by Spring - no explicit cleanup needed
// Don't clear currentLibraryId here - only when explicitly switching
}
/**
* Update library metadata (name and description)
*/
public synchronized void updateLibraryMetadata(String libraryId, String newName, String newDescription) throws Exception {
if (libraryId == null || libraryId.trim().isEmpty()) {
throw new IllegalArgumentException("Library ID cannot be null or empty");
}
Library library = libraries.get(libraryId);
if (library == null) {
throw new IllegalArgumentException("Library not found: " + libraryId);
}
// Validate new name
if (newName == null || newName.trim().isEmpty()) {
throw new IllegalArgumentException("Library name cannot be null or empty");
}
String oldName = library.getName();
String oldDescription = library.getDescription();
// Update the library object
library.setName(newName.trim());
library.setDescription(newDescription != null ? newDescription.trim() : "");
try {
// Save to configuration file
saveLibraryConfiguration(library);
logger.info("Updated library metadata - ID: {}, Name: '{}' -> '{}', Description: '{}' -> '{}'",
libraryId, oldName, newName, oldDescription, library.getDescription());
} catch (Exception e) {
// Rollback changes on failure
library.setName(oldName);
library.setDescription(oldDescription);
throw new RuntimeException("Failed to update library metadata: " + e.getMessage(), e);
}
}
/**
* Save library configuration to file
*/
private void saveLibraryConfiguration(Library library) throws Exception {
Path libraryConfigPath = libraryConfigDir.resolve(library.getId() + ".json");
// Create library configuration object
Map<String, Object> config = new HashMap<>();
config.put("id", library.getId());
config.put("name", library.getName());
config.put("description", library.getDescription());
config.put("passwordHash", library.getPasswordHash());
config.put("dbName", library.getDbName());
config.put("imagePath", library.getImagePath());
config.put("initialized", library.isInitialized());
// Write to file
ObjectMapper mapper = new ObjectMapper();
String configJson = mapper.writerWithDefaultPrettyPrinter().writeValueAsString(config);
Files.writeString(libraryConfigPath, configJson, StandardCharsets.UTF_8);
logger.debug("Saved library configuration to: {}", libraryConfigPath);
}
}

View File

@@ -0,0 +1,257 @@
package com.storycove.service;
import com.storycove.config.SolrProperties;
import com.storycove.dto.LibraryOverviewStatsDto;
import com.storycove.dto.LibraryOverviewStatsDto.StoryWordCountDto;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.params.StatsParams;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.util.Map;
@Service
@ConditionalOnProperty(
value = "storycove.search.engine",
havingValue = "solr",
matchIfMissing = false
)
public class LibraryStatisticsService {
private static final Logger logger = LoggerFactory.getLogger(LibraryStatisticsService.class);
private static final int WORDS_PER_MINUTE = 250;
@Autowired(required = false)
private SolrClient solrClient;
@Autowired
private SolrProperties properties;
@Autowired
private LibraryService libraryService;
/**
* Get overview statistics for a library
*/
public LibraryOverviewStatsDto getOverviewStatistics(String libraryId) throws IOException, SolrServerException {
LibraryOverviewStatsDto stats = new LibraryOverviewStatsDto();
// Collection Overview
stats.setTotalStories(getTotalStories(libraryId));
stats.setTotalAuthors(getTotalAuthors(libraryId));
stats.setTotalSeries(getTotalSeries(libraryId));
stats.setTotalTags(getTotalTags(libraryId));
stats.setTotalCollections(getTotalCollections(libraryId));
stats.setUniqueSourceDomains(getUniqueSourceDomains(libraryId));
// Content Metrics - use Solr Stats Component
WordCountStats wordStats = getWordCountStatistics(libraryId);
stats.setTotalWordCount(wordStats.sum);
stats.setAverageWordsPerStory(wordStats.mean);
stats.setLongestStory(getLongestStory(libraryId));
stats.setShortestStory(getShortestStory(libraryId));
// Reading Time
stats.setTotalReadingTimeMinutes(wordStats.sum / WORDS_PER_MINUTE);
stats.setAverageReadingTimeMinutes(wordStats.mean / WORDS_PER_MINUTE);
return stats;
}
/**
* Get total number of stories in library
*/
private long getTotalStories(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0); // We only want the count
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
return response.getResults().getNumFound();
}
/**
* Get total number of authors in library
*/
private long getTotalAuthors(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0);
QueryResponse response = solrClient.query(properties.getCores().getAuthors(), query);
return response.getResults().getNumFound();
}
/**
* Get total number of series using faceting on seriesId
*/
private long getTotalSeries(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.addFilterQuery("seriesId:[* TO *]"); // Only stories that have a series
query.setRows(0);
query.setFacet(true);
query.addFacetField("seriesId");
query.setFacetLimit(-1); // Get all unique series
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
FacetField seriesFacet = response.getFacetField("seriesId");
return (seriesFacet != null && seriesFacet.getValues() != null)
? seriesFacet.getValueCount()
: 0;
}
/**
* Get total number of unique tags using faceting
*/
private long getTotalTags(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0);
query.setFacet(true);
query.addFacetField("tagNames");
query.setFacetLimit(-1); // Get all unique tags
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
FacetField tagsFacet = response.getFacetField("tagNames");
return (tagsFacet != null && tagsFacet.getValues() != null)
? tagsFacet.getValueCount()
: 0;
}
/**
* Get total number of collections
*/
private long getTotalCollections(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0);
QueryResponse response = solrClient.query(properties.getCores().getCollections(), query);
return response.getResults().getNumFound();
}
/**
* Get number of unique source domains using faceting
*/
private long getUniqueSourceDomains(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.addFilterQuery("sourceDomain:[* TO *]"); // Only stories with a source domain
query.setRows(0);
query.setFacet(true);
query.addFacetField("sourceDomain");
query.setFacetLimit(-1);
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
FacetField domainFacet = response.getFacetField("sourceDomain");
return (domainFacet != null && domainFacet.getValues() != null)
? domainFacet.getValueCount()
: 0;
}
/**
* Get word count statistics using Solr Stats Component
*/
private WordCountStats getWordCountStatistics(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0);
query.setParam(StatsParams.STATS, true);
query.setParam(StatsParams.STATS_FIELD, "wordCount");
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
WordCountStats stats = new WordCountStats();
// Extract stats from response
var fieldStatsInfo = response.getFieldStatsInfo();
if (fieldStatsInfo != null && fieldStatsInfo.get("wordCount") != null) {
var fieldStat = fieldStatsInfo.get("wordCount");
Object sumObj = fieldStat.getSum();
Object meanObj = fieldStat.getMean();
stats.sum = (sumObj != null) ? ((Number) sumObj).longValue() : 0L;
stats.mean = (meanObj != null) ? ((Number) meanObj).doubleValue() : 0.0;
}
return stats;
}
/**
* Get the longest story in the library
*/
private StoryWordCountDto getLongestStory(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.addFilterQuery("wordCount:[1 TO *]"); // Exclude stories with 0 words
query.setSort("wordCount", SolrQuery.ORDER.desc);
query.setRows(1);
query.setFields("id", "title", "authorName", "wordCount");
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
if (response.getResults().isEmpty()) {
return null;
}
SolrDocument doc = response.getResults().get(0);
return createStoryWordCountDto(doc);
}
/**
* Get the shortest story in the library (excluding 0 word count)
*/
private StoryWordCountDto getShortestStory(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.addFilterQuery("wordCount:[1 TO *]"); // Exclude stories with 0 words
query.setSort("wordCount", SolrQuery.ORDER.asc);
query.setRows(1);
query.setFields("id", "title", "authorName", "wordCount");
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
if (response.getResults().isEmpty()) {
return null;
}
SolrDocument doc = response.getResults().get(0);
return createStoryWordCountDto(doc);
}
/**
* Helper method to create StoryWordCountDto from Solr document
*/
private StoryWordCountDto createStoryWordCountDto(SolrDocument doc) {
String id = (String) doc.getFieldValue("id");
String title = (String) doc.getFieldValue("title");
String authorName = (String) doc.getFieldValue("authorName");
Object wordCountObj = doc.getFieldValue("wordCount");
int wordCount = (wordCountObj != null) ? ((Number) wordCountObj).intValue() : 0;
long readingTime = wordCount / WORDS_PER_MINUTE;
return new StoryWordCountDto(id, title, authorName, wordCount, readingTime);
}
/**
* Helper class to hold word count statistics
*/
private static class WordCountStats {
long sum = 0;
double mean = 0.0;
}
}

View File

@@ -1,36 +1,83 @@
package com.storycove.service; package com.storycove.service;
import org.springframework.beans.factory.annotation.Value; import com.storycove.util.JwtUtil;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.security.crypto.password.PasswordEncoder; import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
@Service @Service
public class PasswordAuthenticationService { public class PasswordAuthenticationService {
@Value("${storycove.auth.password}") private static final Logger logger = LoggerFactory.getLogger(PasswordAuthenticationService.class);
private String applicationPassword;
private final PasswordEncoder passwordEncoder; private final PasswordEncoder passwordEncoder;
private final LibraryService libraryService;
private final JwtUtil jwtUtil;
public PasswordAuthenticationService(PasswordEncoder passwordEncoder) { @Autowired
public PasswordAuthenticationService(
PasswordEncoder passwordEncoder,
LibraryService libraryService,
JwtUtil jwtUtil) {
this.passwordEncoder = passwordEncoder; this.passwordEncoder = passwordEncoder;
this.libraryService = libraryService;
this.jwtUtil = jwtUtil;
} }
public boolean authenticate(String providedPassword) { /**
* Authenticate user and switch to the appropriate library
* Returns JWT token if authentication successful, null otherwise
*/
public String authenticateAndSwitchLibrary(String providedPassword) {
if (providedPassword == null || providedPassword.trim().isEmpty()) { if (providedPassword == null || providedPassword.trim().isEmpty()) {
return false; return null;
} }
// If application password starts with {bcrypt}, it's already encoded // Find which library this password belongs to
if (applicationPassword.startsWith("{bcrypt}") || applicationPassword.startsWith("$2")) { String libraryId = libraryService.authenticateAndGetLibrary(providedPassword);
return passwordEncoder.matches(providedPassword, applicationPassword); if (libraryId == null) {
logger.warn("Authentication failed - invalid password");
return null;
} }
// Otherwise, compare directly (for development/testing) try {
return applicationPassword.equals(providedPassword); // Switch to the authenticated library with forced reindexing (may take 2-3 seconds)
libraryService.switchToLibraryAfterAuthentication(libraryId);
// Generate JWT token with library context
String token = jwtUtil.generateToken("user", libraryId);
logger.info("Successfully authenticated and switched to library: {}", libraryId);
return token;
} catch (Exception e) {
logger.error("Failed to switch to library: {}", libraryId, e);
return null;
}
}
/**
* Legacy method - kept for backward compatibility
*/
@Deprecated
public boolean authenticate(String providedPassword) {
return authenticateAndSwitchLibrary(providedPassword) != null;
} }
public String encodePassword(String rawPassword) { public String encodePassword(String rawPassword) {
return passwordEncoder.encode(rawPassword); return passwordEncoder.encode(rawPassword);
} }
/**
* Get current library info for authenticated user
*/
public String getCurrentLibraryInfo() {
var library = libraryService.getCurrentLibrary();
if (library != null) {
return String.format("Library: %s (%s)", library.getName(), library.getId());
}
return "No library active";
}
} }

Some files were not shown because too many files have changed in this diff Show More