Various improvements & Epub support

This commit is contained in:
Stefan Hardegger
2025-08-08 14:09:14 +02:00
parent 090b858a54
commit 379c8c170f
37 changed files with 4069 additions and 298 deletions

View File

@@ -64,6 +64,32 @@ export default function AddStoryPage() {
}
}, [searchParams]);
// Load pending story data from bulk combine operation
useEffect(() => {
const fromBulkCombine = searchParams.get('from') === 'bulk-combine';
if (fromBulkCombine) {
const pendingStoryData = localStorage.getItem('pendingStory');
if (pendingStoryData) {
try {
const storyData = JSON.parse(pendingStoryData);
setFormData(prev => ({
...prev,
title: storyData.title || '',
authorName: storyData.author || '',
contentHtml: storyData.content || '',
sourceUrl: storyData.sourceUrl || '',
summary: storyData.summary || '',
tags: storyData.tags || []
}));
// Clear the pending data
localStorage.removeItem('pendingStory');
} catch (error) {
console.error('Failed to load pending story data:', error);
}
}
}
}, [searchParams]);
// Check for duplicates when title and author are both present
useEffect(() => {
const checkDuplicates = async () => {
@@ -442,7 +468,7 @@ export default function AddStoryPage() {
</label>
<ImageUpload
onImageSelect={setCoverImage}
accept="image/jpeg,image/png,image/webp"
accept="image/jpeg,image/png"
maxSizeMB={5}
aspectRatio="3:4"
placeholder="Drop a cover image here or click to select"

View File

@@ -269,7 +269,7 @@ export default function EditAuthorPage() {
</label>
<ImageUpload
onImageSelect={setAvatarImage}
accept="image/jpeg,image/png,image/webp"
accept="image/jpeg,image/png"
maxSizeMB={5}
aspectRatio="1:1"
placeholder="Drop an avatar image here or click to select"

View File

@@ -11,16 +11,17 @@ import TagFilter from '../../components/stories/TagFilter';
import LoadingSpinner from '../../components/ui/LoadingSpinner';
type ViewMode = 'grid' | 'list';
type SortOption = 'createdAt' | 'title' | 'authorName' | 'rating' | 'wordCount';
type SortOption = 'createdAt' | 'title' | 'authorName' | 'rating' | 'wordCount' | 'lastRead';
export default function LibraryPage() {
const [stories, setStories] = useState<Story[]>([]);
const [tags, setTags] = useState<Tag[]>([]);
const [loading, setLoading] = useState(false);
const [searchLoading, setSearchLoading] = useState(false);
const [searchQuery, setSearchQuery] = useState('');
const [selectedTags, setSelectedTags] = useState<string[]>([]);
const [viewMode, setViewMode] = useState<ViewMode>('list');
const [sortOption, setSortOption] = useState<SortOption>('createdAt');
const [sortOption, setSortOption] = useState<SortOption>('lastRead');
const [sortDirection, setSortDirection] = useState<'asc' | 'desc'>('desc');
const [page, setPage] = useState(0);
const [totalPages, setTotalPages] = useState(1);
@@ -47,7 +48,13 @@ export default function LibraryPage() {
const debounceTimer = setTimeout(() => {
const performSearch = async () => {
try {
setLoading(true);
// Use searchLoading for background search, loading only for initial load
const isInitialLoad = stories.length === 0 && !searchQuery && selectedTags.length === 0;
if (isInitialLoad) {
setLoading(true);
} else {
setSearchLoading(true);
}
// Always use search API for consistency - use '*' for match-all when no query
const result = await searchApi.search({
@@ -73,11 +80,12 @@ export default function LibraryPage() {
setStories([]);
} finally {
setLoading(false);
setSearchLoading(false);
}
};
performSearch();
}, searchQuery ? 300 : 0); // Debounce search, but not other changes
}, searchQuery ? 500 : 0); // 500ms debounce for search, immediate for other changes
return () => clearTimeout(debounceTimer);
}, [searchQuery, selectedTags, page, sortOption, sortDirection, refreshTrigger]);
@@ -154,16 +162,21 @@ export default function LibraryPage() {
</p>
</div>
<Button href="/add-story">
Add New Story
</Button>
<div className="flex gap-2">
<Button href="/add-story">
Add New Story
</Button>
<Button href="/stories/import/epub" variant="secondary">
📖 Import EPUB
</Button>
</div>
</div>
{/* Search and Filters */}
<div className="space-y-4">
{/* Search Bar */}
<div className="flex flex-col sm:flex-row gap-4">
<div className="flex-1">
<div className="flex-1 relative">
<Input
type="search"
placeholder="Search by title, author, or tags..."
@@ -171,6 +184,11 @@ export default function LibraryPage() {
onChange={handleSearchChange}
className="w-full"
/>
{searchLoading && (
<div className="absolute right-3 top-1/2 transform -translate-y-1/2">
<div className="animate-spin h-4 w-4 border-2 border-theme-accent border-t-transparent rounded-full"></div>
</div>
)}
</div>
{/* View Mode Toggle */}
@@ -215,6 +233,7 @@ export default function LibraryPage() {
<option value="authorName">Author</option>
<option value="rating">Rating</option>
<option value="wordCount">Word Count</option>
<option value="lastRead">Last Read</option>
</select>
{/* Sort Direction Toggle */}

View File

@@ -0,0 +1,93 @@
import { NextRequest } from 'next/server';
// Configure route timeout for long-running progress streams
export const maxDuration = 900; // 15 minutes (900 seconds)
interface ProgressUpdate {
type: 'progress' | 'completed' | 'error';
current: number;
total: number;
message: string;
url?: string;
title?: string;
author?: string;
wordCount?: number;
totalWordCount?: number;
error?: string;
combinedStory?: any;
results?: any[];
summary?: any;
}
// Global progress storage (in production, use Redis or database)
const progressStore = new Map<string, ProgressUpdate[]>();
export async function GET(request: NextRequest) {
const searchParams = request.nextUrl.searchParams;
const sessionId = searchParams.get('sessionId');
if (!sessionId) {
return new Response('Session ID required', { status: 400 });
}
// Set up Server-Sent Events
const stream = new ReadableStream({
start(controller) {
const encoder = new TextEncoder();
// Send initial connection message
const data = `data: ${JSON.stringify({ type: 'connected', sessionId })}\n\n`;
controller.enqueue(encoder.encode(data));
// Check for progress updates every 500ms
const interval = setInterval(() => {
const updates = progressStore.get(sessionId);
if (updates && updates.length > 0) {
// Send all pending updates
updates.forEach(update => {
const data = `data: ${JSON.stringify(update)}\n\n`;
controller.enqueue(encoder.encode(data));
});
// Clear sent updates
progressStore.delete(sessionId);
// If this was a completion or error, close the stream
const lastUpdate = updates[updates.length - 1];
if (lastUpdate.type === 'completed' || lastUpdate.type === 'error') {
clearInterval(interval);
controller.close();
}
}
}, 500);
// Cleanup after timeout
setTimeout(() => {
clearInterval(interval);
progressStore.delete(sessionId);
controller.close();
}, 900000); // 15 minutes
}
});
return new Response(stream, {
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Headers': 'Cache-Control',
},
});
}
// Helper function for other routes to send progress updates
export function sendProgressUpdate(sessionId: string, update: ProgressUpdate) {
if (!progressStore.has(sessionId)) {
progressStore.set(sessionId, []);
}
progressStore.get(sessionId)!.push(update);
}
// Export the helper for other modules to use
export { progressStore };

View File

@@ -1,7 +1,23 @@
import { NextRequest, NextResponse } from 'next/server';
// Configure route timeout for long-running scraping operations
export const maxDuration = 900; // 15 minutes (900 seconds)
// Import progress tracking helper
async function sendProgressUpdate(sessionId: string, update: any) {
try {
// Dynamic import to avoid circular dependency
const { sendProgressUpdate: sendUpdate } = await import('./progress/route');
sendUpdate(sessionId, update);
} catch (error) {
console.warn('Failed to send progress update:', error);
}
}
interface BulkImportRequest {
urls: string[];
combineIntoOne?: boolean;
sessionId?: string; // For progress tracking
}
interface ImportResult {
@@ -22,52 +38,430 @@ interface BulkImportResponse {
skipped: number;
errors: number;
};
combinedStory?: {
title: string;
author: string;
content: string;
summary?: string;
sourceUrl: string;
tags?: string[];
};
}
export async function POST(request: NextRequest) {
// Background processing function for combined mode
async function processCombinedMode(
urls: string[],
sessionId: string,
authorization: string,
scraper: any
) {
const results: ImportResult[] = [];
let importedCount = 0;
let errorCount = 0;
const combinedContent: string[] = [];
let baseTitle = '';
let baseAuthor = '';
let baseSummary = '';
let baseSourceUrl = '';
const combinedTags = new Set<string>();
let totalWordCount = 0;
// Send initial progress update
await sendProgressUpdate(sessionId, {
type: 'progress',
current: 0,
total: urls.length,
message: `Starting to scrape ${urls.length} URLs for combining...`,
totalWordCount: 0
});
for (let i = 0; i < urls.length; i++) {
const url = urls[i];
console.log(`Scraping URL ${i + 1}/${urls.length} for combine: ${url}`);
// Send progress update
await sendProgressUpdate(sessionId, {
type: 'progress',
current: i,
total: urls.length,
message: `Scraping URL ${i + 1} of ${urls.length}...`,
url: url,
totalWordCount
});
try {
const trimmedUrl = url.trim();
if (!trimmedUrl) {
results.push({
url: url || 'Empty URL',
status: 'error',
error: 'Empty URL in combined mode'
});
errorCount++;
continue;
}
const scrapedStory = await scraper.scrapeStory(trimmedUrl);
// Check if we got content - this is required for combined mode
if (!scrapedStory.content || scrapedStory.content.trim() === '') {
results.push({
url: trimmedUrl,
status: 'error',
error: 'No content found - required for combined mode'
});
errorCount++;
continue;
}
// Use first URL for base metadata (title can be empty for combined mode)
if (i === 0) {
baseTitle = scrapedStory.title || 'Combined Story';
baseAuthor = scrapedStory.author || 'Unknown Author';
baseSummary = scrapedStory.summary || '';
baseSourceUrl = trimmedUrl;
}
// Add content with URL separator
combinedContent.push(`<!-- Content from: ${trimmedUrl} -->`);
if (scrapedStory.title && i > 0) {
combinedContent.push(`<h2>${scrapedStory.title}</h2>`);
}
combinedContent.push(scrapedStory.content);
combinedContent.push('<hr/>'); // Visual separator between parts
// Calculate word count for this story
const textContent = scrapedStory.content.replace(/<[^>]*>/g, ''); // Strip HTML
const wordCount = textContent.split(/\s+/).filter((word: string) => word.length > 0).length;
totalWordCount += wordCount;
// Collect tags from all stories
if (scrapedStory.tags) {
scrapedStory.tags.forEach((tag: string) => combinedTags.add(tag));
}
results.push({
url: trimmedUrl,
status: 'imported',
title: scrapedStory.title,
author: scrapedStory.author
});
importedCount++;
// Send progress update with word count
await sendProgressUpdate(sessionId, {
type: 'progress',
current: i + 1,
total: urls.length,
message: `Scraped "${scrapedStory.title}" (${wordCount.toLocaleString()} words)`,
url: trimmedUrl,
title: scrapedStory.title,
author: scrapedStory.author,
wordCount: wordCount,
totalWordCount: totalWordCount
});
} catch (error) {
console.error(`Error processing URL ${url} in combined mode:`, error);
results.push({
url: url,
status: 'error',
error: error instanceof Error ? error.message : 'Unknown error'
});
errorCount++;
}
}
// If we have any errors, fail the entire combined operation
if (errorCount > 0) {
await sendProgressUpdate(sessionId, {
type: 'error',
current: urls.length,
total: urls.length,
message: 'Combined mode failed: some URLs could not be processed',
error: `${errorCount} URLs failed to process`
});
return;
}
// Check content size to prevent response size issues
const combinedContentString = combinedContent.join('\n');
const contentSizeInMB = new Blob([combinedContentString]).size / (1024 * 1024);
console.log(`Combined content size: ${contentSizeInMB.toFixed(2)} MB`);
console.log(`Combined content character length: ${combinedContentString.length}`);
console.log(`Combined content parts count: ${combinedContent.length}`);
// Return the combined story data via progress update
const combinedStory = {
title: baseTitle,
author: baseAuthor,
content: contentSizeInMB > 10 ?
combinedContentString.substring(0, Math.floor(combinedContentString.length * (10 / contentSizeInMB))) + '\n\n<!-- Content truncated due to size limit -->' :
combinedContentString,
summary: contentSizeInMB > 10 ? baseSummary + ' (Content truncated due to size limit)' : baseSummary,
sourceUrl: baseSourceUrl,
tags: Array.from(combinedTags)
};
// Send completion notification for combine mode
await sendProgressUpdate(sessionId, {
type: 'completed',
current: urls.length,
total: urls.length,
message: `Combined scraping completed: ${totalWordCount.toLocaleString()} words from ${importedCount} stories`,
totalWordCount: totalWordCount,
combinedStory: combinedStory
});
console.log(`Combined scraping completed: ${importedCount} URLs combined into one story`);
}
// Background processing function for individual mode
async function processIndividualMode(
urls: string[],
sessionId: string,
authorization: string,
scraper: any
) {
const results: ImportResult[] = [];
let importedCount = 0;
let skippedCount = 0;
let errorCount = 0;
await sendProgressUpdate(sessionId, {
type: 'progress',
current: 0,
total: urls.length,
message: `Starting to import ${urls.length} URLs individually...`
});
for (let i = 0; i < urls.length; i++) {
const url = urls[i];
console.log(`Processing URL ${i + 1}/${urls.length}: ${url}`);
await sendProgressUpdate(sessionId, {
type: 'progress',
current: i,
total: urls.length,
message: `Processing URL ${i + 1} of ${urls.length}...`,
url: url
});
try {
// Validate URL format
if (!url || typeof url !== 'string' || url.trim() === '') {
results.push({
url: url || 'Empty URL',
status: 'error',
error: 'Invalid URL format'
});
errorCount++;
continue;
}
const trimmedUrl = url.trim();
// Scrape the story
const scrapedStory = await scraper.scrapeStory(trimmedUrl);
// Validate required fields
if (!scrapedStory.title || !scrapedStory.author || !scrapedStory.content) {
const missingFields = [];
if (!scrapedStory.title) missingFields.push('title');
if (!scrapedStory.author) missingFields.push('author');
if (!scrapedStory.content) missingFields.push('content');
results.push({
url: trimmedUrl,
status: 'skipped',
reason: `Missing required fields: ${missingFields.join(', ')}`,
title: scrapedStory.title,
author: scrapedStory.author
});
skippedCount++;
continue;
}
// Check for duplicates using query parameters
try {
const duplicateCheckUrl = `http://backend:8080/api/stories/check-duplicate`;
const params = new URLSearchParams({
title: scrapedStory.title,
authorName: scrapedStory.author
});
const duplicateCheckResponse = await fetch(`${duplicateCheckUrl}?${params.toString()}`, {
method: 'GET',
headers: {
'Authorization': authorization,
'Content-Type': 'application/json',
},
});
if (duplicateCheckResponse.ok) {
const duplicateResult = await duplicateCheckResponse.json();
if (duplicateResult.hasDuplicates) {
results.push({
url: trimmedUrl,
status: 'skipped',
reason: `Duplicate story found (${duplicateResult.count} existing)`,
title: scrapedStory.title,
author: scrapedStory.author
});
skippedCount++;
continue;
}
}
} catch (error) {
console.warn('Duplicate check failed:', error);
// Continue with import if duplicate check fails
}
// Create the story
try {
const storyData = {
title: scrapedStory.title,
summary: scrapedStory.summary || undefined,
contentHtml: scrapedStory.content,
sourceUrl: scrapedStory.sourceUrl || trimmedUrl,
authorName: scrapedStory.author,
tagNames: scrapedStory.tags && scrapedStory.tags.length > 0 ? scrapedStory.tags : undefined,
};
const createUrl = `http://backend:8080/api/stories`;
const createResponse = await fetch(createUrl, {
method: 'POST',
headers: {
'Authorization': authorization,
'Content-Type': 'application/json',
},
body: JSON.stringify(storyData),
});
if (!createResponse.ok) {
const errorData = await createResponse.json();
throw new Error(errorData.message || 'Failed to create story');
}
const createdStory = await createResponse.json();
results.push({
url: trimmedUrl,
status: 'imported',
title: scrapedStory.title,
author: scrapedStory.author,
storyId: createdStory.id
});
importedCount++;
console.log(`Successfully imported: ${scrapedStory.title} by ${scrapedStory.author} (ID: ${createdStory.id})`);
// Send progress update for successful import
await sendProgressUpdate(sessionId, {
type: 'progress',
current: i + 1,
total: urls.length,
message: `Imported "${scrapedStory.title}" by ${scrapedStory.author}`,
url: trimmedUrl,
title: scrapedStory.title,
author: scrapedStory.author
});
} catch (error) {
console.error(`Failed to create story for ${trimmedUrl}:`, error);
let errorMessage = 'Failed to create story';
if (error instanceof Error) {
errorMessage = error.message;
}
results.push({
url: trimmedUrl,
status: 'error',
error: errorMessage,
title: scrapedStory.title,
author: scrapedStory.author
});
errorCount++;
}
} catch (error) {
console.error(`Error processing URL ${url}:`, error);
let errorMessage = 'Unknown error';
if (error instanceof Error) {
errorMessage = error.message;
}
results.push({
url: url,
status: 'error',
error: errorMessage
});
errorCount++;
}
}
// Send completion notification
await sendProgressUpdate(sessionId, {
type: 'completed',
current: urls.length,
total: urls.length,
message: `Bulk import completed: ${importedCount} imported, ${skippedCount} skipped, ${errorCount} errors`,
results: results,
summary: {
total: urls.length,
imported: importedCount,
skipped: skippedCount,
errors: errorCount
}
});
console.log(`Bulk import completed: ${importedCount} imported, ${skippedCount} skipped, ${errorCount} errors`);
// Trigger Typesense reindex if any stories were imported
if (importedCount > 0) {
try {
console.log('Triggering Typesense reindex after bulk import...');
const reindexUrl = `http://backend:8080/api/stories/reindex-typesense`;
const reindexResponse = await fetch(reindexUrl, {
method: 'POST',
headers: {
'Authorization': authorization,
'Content-Type': 'application/json',
},
});
if (reindexResponse.ok) {
const reindexResult = await reindexResponse.json();
console.log('Typesense reindex completed:', reindexResult);
} else {
console.warn('Typesense reindex failed:', reindexResponse.status);
}
} catch (error) {
console.warn('Failed to trigger Typesense reindex:', error);
// Don't fail the whole request if reindex fails
}
}
}
// Background processing function
async function processBulkImport(
urls: string[],
combineIntoOne: boolean,
sessionId: string,
authorization: string
) {
try {
// Check for authentication
const authorization = request.headers.get('authorization');
if (!authorization) {
return NextResponse.json(
{ error: 'Authentication required for bulk import' },
{ status: 401 }
);
}
const body = await request.json();
const { urls } = body as BulkImportRequest;
if (!urls || !Array.isArray(urls) || urls.length === 0) {
return NextResponse.json(
{ error: 'URLs array is required and must not be empty' },
{ status: 400 }
);
}
if (urls.length > 50) {
return NextResponse.json(
{ error: 'Maximum 50 URLs allowed per bulk import' },
{ status: 400 }
);
}
// Dynamic imports to prevent client-side bundling
const { StoryScraper } = await import('@/lib/scraper/scraper');
const scraper = new StoryScraper();
const results: ImportResult[] = [];
let importedCount = 0;
let skippedCount = 0;
let errorCount = 0;
console.log(`Starting bulk scraping for ${urls.length} URLs`);
console.log(`Environment NEXT_PUBLIC_API_URL: ${process.env.NEXT_PUBLIC_API_URL}`);
// For server-side API calls in Docker, use direct backend container URL
// Client-side calls use NEXT_PUBLIC_API_URL through nginx, but server-side needs direct container access
const serverSideApiBaseUrl = 'http://backend:8080/api';
console.log(`DEBUG: serverSideApiBaseUrl variable is: ${serverSideApiBaseUrl}`);
console.log(`Starting bulk scraping for ${urls.length} URLs${combineIntoOne ? ' (combine mode)' : ''}`);
console.log(`Session ID: ${sessionId}`);
// Quick test to verify backend connectivity
try {
@@ -84,208 +478,86 @@ export async function POST(request: NextRequest) {
console.error(`Backend connectivity test failed:`, error);
}
for (const url of urls) {
console.log(`Processing URL: ${url}`);
try {
// Validate URL format
if (!url || typeof url !== 'string' || url.trim() === '') {
results.push({
url: url || 'Empty URL',
status: 'error',
error: 'Invalid URL format'
});
errorCount++;
continue;
}
const trimmedUrl = url.trim();
// Scrape the story
const scrapedStory = await scraper.scrapeStory(trimmedUrl);
// Validate required fields
if (!scrapedStory.title || !scrapedStory.author || !scrapedStory.content) {
const missingFields = [];
if (!scrapedStory.title) missingFields.push('title');
if (!scrapedStory.author) missingFields.push('author');
if (!scrapedStory.content) missingFields.push('content');
results.push({
url: trimmedUrl,
status: 'skipped',
reason: `Missing required fields: ${missingFields.join(', ')}`,
title: scrapedStory.title,
author: scrapedStory.author
});
skippedCount++;
continue;
}
// Check for duplicates using query parameters
try {
// Use hardcoded backend URL for container-to-container communication
const duplicateCheckUrl = `http://backend:8080/api/stories/check-duplicate`;
console.log(`Duplicate check URL: ${duplicateCheckUrl}`);
const params = new URLSearchParams({
title: scrapedStory.title,
authorName: scrapedStory.author
});
const duplicateCheckResponse = await fetch(`${duplicateCheckUrl}?${params.toString()}`, {
method: 'GET',
headers: {
'Authorization': authorization,
'Content-Type': 'application/json',
},
});
if (duplicateCheckResponse.ok) {
const duplicateResult = await duplicateCheckResponse.json();
if (duplicateResult.hasDuplicates) {
results.push({
url: trimmedUrl,
status: 'skipped',
reason: `Duplicate story found (${duplicateResult.count} existing)`,
title: scrapedStory.title,
author: scrapedStory.author
});
skippedCount++;
continue;
}
}
} catch (error) {
console.warn('Duplicate check failed:', error);
// Continue with import if duplicate check fails
}
// Create the story
try {
const storyData = {
title: scrapedStory.title,
summary: scrapedStory.summary || undefined,
contentHtml: scrapedStory.content,
sourceUrl: scrapedStory.sourceUrl || trimmedUrl,
authorName: scrapedStory.author,
tagNames: scrapedStory.tags && scrapedStory.tags.length > 0 ? scrapedStory.tags : undefined,
};
// Use hardcoded backend URL for container-to-container communication
const createUrl = `http://backend:8080/api/stories`;
console.log(`Create story URL: ${createUrl}`);
const createResponse = await fetch(createUrl, {
method: 'POST',
headers: {
'Authorization': authorization,
'Content-Type': 'application/json',
},
body: JSON.stringify(storyData),
});
if (!createResponse.ok) {
const errorData = await createResponse.json();
throw new Error(errorData.message || 'Failed to create story');
}
const createdStory = await createResponse.json();
results.push({
url: trimmedUrl,
status: 'imported',
title: scrapedStory.title,
author: scrapedStory.author,
storyId: createdStory.id
});
importedCount++;
console.log(`Successfully imported: ${scrapedStory.title} by ${scrapedStory.author} (ID: ${createdStory.id})`);
} catch (error) {
console.error(`Failed to create story for ${trimmedUrl}:`, error);
let errorMessage = 'Failed to create story';
if (error instanceof Error) {
errorMessage = error.message;
}
results.push({
url: trimmedUrl,
status: 'error',
error: errorMessage,
title: scrapedStory.title,
author: scrapedStory.author
});
errorCount++;
}
} catch (error) {
console.error(`Error processing URL ${url}:`, error);
let errorMessage = 'Unknown error';
if (error instanceof Error) {
errorMessage = error.message;
}
results.push({
url: url,
status: 'error',
error: errorMessage
});
errorCount++;
}
// Handle combined mode
if (combineIntoOne) {
await processCombinedMode(urls, sessionId, authorization, scraper);
} else {
// Normal individual processing mode
await processIndividualMode(urls, sessionId, authorization, scraper);
}
const response: BulkImportResponse = {
results,
summary: {
total: urls.length,
imported: importedCount,
skipped: skippedCount,
errors: errorCount
}
};
console.log(`Bulk import completed:`, response.summary);
// Trigger Typesense reindex if any stories were imported
if (importedCount > 0) {
try {
console.log('Triggering Typesense reindex after bulk import...');
const reindexUrl = `http://backend:8080/api/stories/reindex-typesense`;
const reindexResponse = await fetch(reindexUrl, {
method: 'POST',
headers: {
'Authorization': authorization,
'Content-Type': 'application/json',
},
});
if (reindexResponse.ok) {
const reindexResult = await reindexResponse.json();
console.log('Typesense reindex completed:', reindexResult);
} else {
console.warn('Typesense reindex failed:', reindexResponse.status);
}
} catch (error) {
console.warn('Failed to trigger Typesense reindex:', error);
// Don't fail the whole request if reindex fails
}
}
return NextResponse.json(response);
} catch (error) {
console.error('Bulk import error:', error);
console.error('Background bulk import error:', error);
await sendProgressUpdate(sessionId, {
type: 'error',
current: 0,
total: urls.length,
message: 'Bulk import failed due to an error',
error: error instanceof Error ? error.message : 'Unknown error'
});
}
}
export async function POST(request: NextRequest) {
try {
// Check for authentication
const authorization = request.headers.get('authorization');
if (!authorization) {
return NextResponse.json(
{ error: 'Authentication required for bulk import' },
{ status: 401 }
);
}
const body = await request.json();
const { urls, combineIntoOne = false, sessionId } = body as BulkImportRequest;
if (!urls || !Array.isArray(urls) || urls.length === 0) {
return NextResponse.json(
{ error: 'URLs array is required and must not be empty' },
{ status: 400 }
);
}
if (urls.length > 200) {
return NextResponse.json(
{ error: 'Maximum 200 URLs allowed per bulk import' },
{ status: 400 }
);
}
if (!sessionId) {
return NextResponse.json(
{ error: 'Session ID is required for progress tracking' },
{ status: 400 }
);
}
// Start the background processing
processBulkImport(urls, combineIntoOne, sessionId, authorization).catch(error => {
console.error('Failed to start background processing:', error);
});
// Return immediately with session info
return NextResponse.json({
message: 'Bulk import started',
sessionId: sessionId,
totalUrls: urls.length,
combineMode: combineIntoOne
});
} catch (error) {
console.error('Bulk import initialization error:', error);
if (error instanceof Error) {
return NextResponse.json(
{ error: `Bulk import failed: ${error.message}` },
{ error: `Bulk import failed to start: ${error.message}` },
{ status: 500 }
);
}
return NextResponse.json(
{ error: 'Bulk import failed due to an unknown error' },
{ error: 'Bulk import failed to start due to an unknown error' },
{ status: 500 }
);
}

View File

@@ -21,6 +21,7 @@ export default function StoryDetailPage() {
const [collections, setCollections] = useState<Collection[]>([]);
const [loading, setLoading] = useState(true);
const [updating, setUpdating] = useState(false);
const [isExporting, setIsExporting] = useState(false);
useEffect(() => {
const loadStoryData = async () => {
@@ -65,6 +66,53 @@ export default function StoryDetailPage() {
}
};
const handleEPUBExport = async () => {
if (!story) return;
setIsExporting(true);
try {
const token = localStorage.getItem('auth-token');
const response = await fetch(`/api/stories/${story.id}/epub`, {
method: 'GET',
headers: {
'Authorization': token ? `Bearer ${token}` : '',
},
});
if (response.ok) {
const blob = await response.blob();
const url = window.URL.createObjectURL(blob);
const link = document.createElement('a');
link.href = url;
// Get filename from Content-Disposition header or create default
const contentDisposition = response.headers.get('Content-Disposition');
let filename = `${story.title}.epub`;
if (contentDisposition) {
const match = contentDisposition.match(/filename[^;=\n]*=((['"]).*?\2|[^;\n]*)/);
if (match && match[1]) {
filename = match[1].replace(/['"]/g, '');
}
}
link.download = filename;
document.body.appendChild(link);
link.click();
window.URL.revokeObjectURL(url);
document.body.removeChild(link);
} else if (response.status === 401 || response.status === 403) {
alert('Authentication required. Please log in.');
} else {
throw new Error('Failed to export EPUB');
}
} catch (error) {
console.error('Error exporting EPUB:', error);
alert('Failed to export EPUB. Please try again.');
} finally {
setIsExporting(false);
}
};
const formatDate = (dateString: string) => {
return new Date(dateString).toLocaleDateString('en-US', {
year: 'numeric',
@@ -358,6 +406,14 @@ export default function StoryDetailPage() {
>
📚 Start Reading
</Button>
<Button
onClick={handleEPUBExport}
variant="ghost"
size="lg"
disabled={isExporting}
>
{isExporting ? 'Exporting...' : '📖 Export EPUB'}
</Button>
<Button
href={`/stories/${story.id}/edit`}
variant="ghost"

View File

@@ -252,7 +252,7 @@ export default function EditStoryPage() {
</label>
<ImageUpload
onImageSelect={setCoverImage}
accept="image/jpeg,image/png,image/webp"
accept="image/jpeg,image/png"
maxSizeMB={5}
aspectRatio="3:4"
placeholder="Drop a new cover image here or click to select"

View File

@@ -201,6 +201,7 @@ export default function StoryReadingPage() {
}
};
const findNextStory = (): Story | null => {
if (!story?.seriesId || seriesStories.length <= 1) return null;

View File

@@ -4,6 +4,7 @@ import { useState } from 'react';
import { useRouter } from 'next/navigation';
import Link from 'next/link';
import { ArrowLeftIcon } from '@heroicons/react/24/outline';
import BulkImportProgress from '@/components/BulkImportProgress';
interface ImportResult {
url: string;
@@ -23,14 +24,25 @@ interface BulkImportResponse {
skipped: number;
errors: number;
};
combinedStory?: {
title: string;
author: string;
content: string;
summary?: string;
sourceUrl: string;
tags?: string[];
};
}
export default function BulkImportPage() {
const router = useRouter();
const [urls, setUrls] = useState('');
const [combineIntoOne, setCombineIntoOne] = useState(false);
const [isLoading, setIsLoading] = useState(false);
const [results, setResults] = useState<BulkImportResponse | null>(null);
const [error, setError] = useState<string | null>(null);
const [sessionId, setSessionId] = useState<string | null>(null);
const [showProgress, setShowProgress] = useState(false);
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
@@ -57,12 +69,17 @@ export default function BulkImportPage() {
return;
}
if (urlList.length > 50) {
setError('Maximum 50 URLs allowed per bulk import');
if (urlList.length > 200) {
setError('Maximum 200 URLs allowed per bulk import');
setIsLoading(false);
return;
}
// Generate session ID for progress tracking
const newSessionId = `bulk-import-${Date.now()}-${Math.random().toString(36).substr(2, 9)}`;
setSessionId(newSessionId);
setShowProgress(true);
// Get auth token for server-side API calls
const token = localStorage.getItem('auth-token');
@@ -72,16 +89,18 @@ export default function BulkImportPage() {
'Content-Type': 'application/json',
'Authorization': token ? `Bearer ${token}` : '',
},
body: JSON.stringify({ urls: urlList }),
body: JSON.stringify({ urls: urlList, combineIntoOne, sessionId: newSessionId }),
});
if (!response.ok) {
const errorData = await response.json();
throw new Error(errorData.error || 'Bulk import failed');
throw new Error(errorData.error || 'Failed to start bulk import');
}
const data: BulkImportResponse = await response.json();
setResults(data);
const startData = await response.json();
console.log('Bulk import started:', startData);
// The progress component will handle the rest via SSE
} catch (err) {
console.error('Bulk import error:', err);
@@ -93,8 +112,43 @@ export default function BulkImportPage() {
const handleReset = () => {
setUrls('');
setCombineIntoOne(false);
setResults(null);
setError(null);
setSessionId(null);
setShowProgress(false);
};
const handleProgressComplete = (data?: any) => {
// Progress component will handle this when the operation completes
setShowProgress(false);
setIsLoading(false);
// Handle completion data
if (data) {
if (data.combinedStory && combineIntoOne) {
// For combine mode, redirect to add story page with the combined content
localStorage.setItem('pendingStory', JSON.stringify(data.combinedStory));
router.push('/add-story?from=bulk-combine');
return;
} else if (data.results && data.summary) {
// For individual mode, show the results
setResults({
results: data.results,
summary: data.summary
});
return;
}
}
// Fallback: just hide progress and let user know it completed
console.log('Import completed successfully');
};
const handleProgressError = (errorMessage: string) => {
setError(errorMessage);
setIsLoading(false);
setShowProgress(false);
};
const getStatusColor = (status: string) => {
@@ -145,7 +199,7 @@ export default function BulkImportPage() {
Story URLs
</label>
<p className="text-sm text-gray-500 mb-3">
Enter one URL per line. Maximum 50 URLs per import.
Enter one URL per line. Maximum 200 URLs per import.
</p>
<textarea
id="urls"
@@ -160,6 +214,37 @@ export default function BulkImportPage() {
</p>
</div>
<div className="flex items-center">
<input
id="combine-into-one"
type="checkbox"
checked={combineIntoOne}
onChange={(e) => setCombineIntoOne(e.target.checked)}
className="h-4 w-4 text-blue-600 focus:ring-blue-500 border-gray-300 rounded"
disabled={isLoading}
/>
<label htmlFor="combine-into-one" className="ml-2 block text-sm text-gray-700">
Combine all URL content into a single story
</label>
</div>
{combineIntoOne && (
<div className="bg-blue-50 border border-blue-200 rounded-md p-4">
<div className="text-sm text-blue-800">
<p className="font-medium mb-2">Combined Story Mode:</p>
<ul className="list-disc list-inside space-y-1 text-blue-700">
<li>All URLs will be scraped and their content combined into one story</li>
<li>Story title and author will be taken from the first URL</li>
<li>Import will fail if any URL has no content (title/author can be empty)</li>
<li>You'll be redirected to the story creation page to review and edit</li>
{urls.split('\n').filter(url => url.trim().length > 0).length > 50 && (
<li className="text-yellow-700 font-medium">⚠️ Large imports (50+ URLs) may take several minutes and could be truncated if too large</li>
)}
</ul>
</div>
</div>
)}
{error && (
<div className="bg-red-50 border border-red-200 rounded-md p-4">
<div className="flex">
@@ -192,14 +277,25 @@ export default function BulkImportPage() {
</button>
</div>
{isLoading && (
{/* Progress Component */}
{showProgress && sessionId && (
<BulkImportProgress
sessionId={sessionId}
onComplete={handleProgressComplete}
onError={handleProgressError}
combineMode={combineIntoOne}
/>
)}
{/* Fallback loading indicator if progress isn't shown yet */}
{isLoading && !showProgress && (
<div className="bg-blue-50 border border-blue-200 rounded-md p-4">
<div className="flex items-center">
<div className="animate-spin rounded-full h-5 w-5 border-b-2 border-blue-600 mr-3"></div>
<div>
<p className="text-sm font-medium text-blue-800">Processing URLs...</p>
<p className="text-sm font-medium text-blue-800">Starting import...</p>
<p className="text-sm text-blue-600">
This may take a few minutes depending on the number of URLs and response times of the source websites.
Preparing to process {urls.split('\n').filter(url => url.trim().length > 0).length} URLs.
</p>
</div>
</div>

View File

@@ -0,0 +1,432 @@
'use client';
import { useState } from 'react';
import { useRouter } from 'next/navigation';
import Link from 'next/link';
import { ArrowLeftIcon, DocumentArrowUpIcon } from '@heroicons/react/24/outline';
import Button from '@/components/ui/Button';
import { Input } from '@/components/ui/Input';
interface EPUBImportResponse {
success: boolean;
message: string;
storyId?: string;
storyTitle?: string;
totalChapters?: number;
wordCount?: number;
readingPosition?: any;
warnings?: string[];
errors?: string[];
}
export default function EPUBImportPage() {
const router = useRouter();
const [selectedFile, setSelectedFile] = useState<File | null>(null);
const [isLoading, setIsLoading] = useState(false);
const [isValidating, setIsValidating] = useState(false);
const [validationResult, setValidationResult] = useState<any>(null);
const [importResult, setImportResult] = useState<EPUBImportResponse | null>(null);
const [error, setError] = useState<string | null>(null);
// Import options
const [authorName, setAuthorName] = useState<string>('');
const [seriesName, setSeriesName] = useState<string>('');
const [seriesVolume, setSeriesVolume] = useState<string>('');
const [tags, setTags] = useState<string>('');
const [preserveReadingPosition, setPreserveReadingPosition] = useState(true);
const [overwriteExisting, setOverwriteExisting] = useState(false);
const [createMissingAuthor, setCreateMissingAuthor] = useState(true);
const [createMissingSeries, setCreateMissingSeries] = useState(true);
const handleFileChange = async (e: React.ChangeEvent<HTMLInputElement>) => {
const file = e.target.files?.[0];
if (file) {
setSelectedFile(file);
setValidationResult(null);
setImportResult(null);
setError(null);
if (file.name.toLowerCase().endsWith('.epub')) {
await validateFile(file);
} else {
setError('Please select a valid EPUB file (.epub extension)');
}
}
};
const validateFile = async (file: File) => {
setIsValidating(true);
try {
const token = localStorage.getItem('auth-token');
const formData = new FormData();
formData.append('file', file);
const response = await fetch('/api/stories/epub/validate', {
method: 'POST',
headers: {
'Authorization': token ? `Bearer ${token}` : '',
},
body: formData,
});
if (response.ok) {
const result = await response.json();
setValidationResult(result);
if (!result.valid) {
setError('EPUB file validation failed: ' + result.errors.join(', '));
}
} else if (response.status === 401 || response.status === 403) {
setError('Authentication required. Please log in.');
} else {
setError('Failed to validate EPUB file');
}
} catch (err) {
setError('Error validating EPUB file: ' + (err as Error).message);
} finally {
setIsValidating(false);
}
};
const handleSubmit = async (e: React.FormEvent) => {
e.preventDefault();
if (!selectedFile) {
setError('Please select an EPUB file');
return;
}
if (validationResult && !validationResult.valid) {
setError('Cannot import invalid EPUB file');
return;
}
setIsLoading(true);
setError(null);
try {
const token = localStorage.getItem('auth-token');
const formData = new FormData();
formData.append('file', selectedFile);
if (authorName) formData.append('authorName', authorName);
if (seriesName) formData.append('seriesName', seriesName);
if (seriesVolume) formData.append('seriesVolume', seriesVolume);
if (tags) {
const tagList = tags.split(',').map(t => t.trim()).filter(t => t.length > 0);
tagList.forEach(tag => formData.append('tags', tag));
}
formData.append('preserveReadingPosition', preserveReadingPosition.toString());
formData.append('overwriteExisting', overwriteExisting.toString());
formData.append('createMissingAuthor', createMissingAuthor.toString());
formData.append('createMissingSeries', createMissingSeries.toString());
const response = await fetch('/api/stories/epub/import', {
method: 'POST',
headers: {
'Authorization': token ? `Bearer ${token}` : '',
},
body: formData,
});
const result = await response.json();
if (response.ok && result.success) {
setImportResult(result);
} else if (response.status === 401 || response.status === 403) {
setError('Authentication required. Please log in.');
} else {
setError(result.message || 'Failed to import EPUB');
}
} catch (err) {
setError('Error importing EPUB: ' + (err as Error).message);
} finally {
setIsLoading(false);
}
};
const resetForm = () => {
setSelectedFile(null);
setValidationResult(null);
setImportResult(null);
setError(null);
setAuthorName('');
setSeriesName('');
setSeriesVolume('');
setTags('');
};
if (importResult?.success) {
return (
<div className="container mx-auto px-4 py-8">
<div className="mb-6">
<Link
href="/stories"
className="inline-flex items-center text-blue-600 hover:text-blue-800 mb-4"
>
<ArrowLeftIcon className="h-4 w-4 mr-2" />
Back to Stories
</Link>
<h1 className="text-3xl font-bold text-gray-900 dark:text-white">
EPUB Import Successful
</h1>
</div>
<div className="bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-6">
<div className="mb-6">
<h2 className="text-xl font-semibold text-green-600 mb-2">Import Completed</h2>
<p className="text-gray-600 dark:text-gray-300">
Your EPUB has been successfully imported into StoryCove.
</p>
</div>
<div>
<div className="space-y-4">
<div>
<span className="font-semibold text-gray-700 dark:text-gray-300">Story Title:</span>
<p className="text-gray-900 dark:text-white">{importResult.storyTitle}</p>
</div>
{importResult.wordCount && (
<div>
<span className="font-semibold text-gray-700 dark:text-gray-300">Word Count:</span>
<p className="text-gray-900 dark:text-white">{importResult.wordCount.toLocaleString()} words</p>
</div>
)}
{importResult.totalChapters && (
<div>
<span className="font-semibold text-gray-700 dark:text-gray-300">Chapters:</span>
<p className="text-gray-900 dark:text-white">{importResult.totalChapters}</p>
</div>
)}
{importResult.warnings && importResult.warnings.length > 0 && (
<div className="bg-yellow-50 border border-yellow-200 rounded-md p-4">
<strong className="text-yellow-800">Warnings:</strong>
<ul className="list-disc list-inside mt-2 text-yellow-700">
{importResult.warnings.map((warning, index) => (
<li key={index}>{warning}</li>
))}
</ul>
</div>
)}
<div className="flex gap-4 mt-6">
<Button
onClick={() => router.push(`/stories/${importResult.storyId}`)}
className="bg-blue-600 hover:bg-blue-700 text-white"
>
View Story
</Button>
<Button
onClick={resetForm}
variant="secondary"
>
Import Another EPUB
</Button>
</div>
</div>
</div>
</div>
</div>
);
}
return (
<div className="container mx-auto px-4 py-8">
<div className="mb-6">
<Link
href="/stories"
className="inline-flex items-center text-blue-600 hover:text-blue-800 mb-4"
>
<ArrowLeftIcon className="h-4 w-4 mr-2" />
Back to Stories
</Link>
<h1 className="text-3xl font-bold text-gray-900 dark:text-white">
Import EPUB
</h1>
<p className="text-gray-600 dark:text-gray-300 mt-2">
Upload an EPUB file to import it as a story into your library.
</p>
</div>
{error && (
<div className="bg-red-50 border border-red-200 rounded-md p-4 mb-6">
<p className="text-red-700">{error}</p>
</div>
)}
<form onSubmit={handleSubmit} className="space-y-6">
{/* File Upload */}
<div className="bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-6">
<div className="mb-4">
<h3 className="text-lg font-semibold mb-2">Select EPUB File</h3>
<p className="text-gray-600 dark:text-gray-300">
Choose an EPUB file from your device to import.
</p>
</div>
<div className="space-y-4">
<div>
<label htmlFor="epub-file" className="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-1">EPUB File</label>
<Input
id="epub-file"
type="file"
accept=".epub,application/epub+zip"
onChange={handleFileChange}
disabled={isLoading || isValidating}
/>
</div>
{selectedFile && (
<div className="flex items-center gap-2">
<DocumentArrowUpIcon className="h-5 w-5" />
<span className="text-sm text-gray-600">
{selectedFile.name} ({(selectedFile.size / 1024 / 1024).toFixed(2)} MB)
</span>
</div>
)}
{isValidating && (
<div className="text-sm text-blue-600">
Validating EPUB file...
</div>
)}
{validationResult && (
<div className="text-sm">
{validationResult.valid ? (
<span className="inline-flex items-center px-2 py-1 rounded text-xs font-medium bg-green-100 text-green-800">
Valid EPUB
</span>
) : (
<span className="inline-flex items-center px-2 py-1 rounded text-xs font-medium bg-red-100 text-red-800">
Invalid EPUB
</span>
)}
</div>
)}
</div>
</div>
{/* Import Options */}
<div className="bg-white dark:bg-gray-800 border border-gray-200 dark:border-gray-700 rounded-lg p-6">
<div className="mb-4">
<h3 className="text-lg font-semibold mb-2">Import Options</h3>
<p className="text-gray-600 dark:text-gray-300">
Configure how the EPUB should be imported.
</p>
</div>
<div className="space-y-4">
<div>
<label htmlFor="author-name" className="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-1">Author Name (Override)</label>
<Input
id="author-name"
value={authorName}
onChange={(e) => setAuthorName(e.target.value)}
placeholder="Leave empty to use EPUB metadata"
/>
</div>
<div>
<label htmlFor="series-name" className="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-1">Series Name</label>
<Input
id="series-name"
value={seriesName}
onChange={(e) => setSeriesName(e.target.value)}
placeholder="Optional: Add to a series"
/>
</div>
{seriesName && (
<div>
<label htmlFor="series-volume" className="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-1">Series Volume</label>
<Input
id="series-volume"
type="number"
value={seriesVolume}
onChange={(e) => setSeriesVolume(e.target.value)}
placeholder="Volume number in series"
/>
</div>
)}
<div>
<label htmlFor="tags" className="block text-sm font-medium text-gray-700 dark:text-gray-300 mb-1">Tags</label>
<Input
id="tags"
value={tags}
onChange={(e) => setTags(e.target.value)}
placeholder="Comma-separated tags (e.g., fantasy, adventure, romance)"
/>
</div>
<div className="space-y-3">
<div className="flex items-center">
<input
type="checkbox"
id="preserve-reading-position"
checked={preserveReadingPosition}
onChange={(e) => setPreserveReadingPosition(e.target.checked)}
className="mr-2"
/>
<label htmlFor="preserve-reading-position" className="text-sm text-gray-700 dark:text-gray-300">
Preserve reading position from EPUB metadata
</label>
</div>
<div className="flex items-center">
<input
type="checkbox"
id="create-missing-author"
checked={createMissingAuthor}
onChange={(e) => setCreateMissingAuthor(e.target.checked)}
className="mr-2"
/>
<label htmlFor="create-missing-author" className="text-sm text-gray-700 dark:text-gray-300">
Create author if not found
</label>
</div>
<div className="flex items-center">
<input
type="checkbox"
id="create-missing-series"
checked={createMissingSeries}
onChange={(e) => setCreateMissingSeries(e.target.checked)}
className="mr-2"
/>
<label htmlFor="create-missing-series" className="text-sm text-gray-700 dark:text-gray-300">
Create series if not found
</label>
</div>
<div className="flex items-center">
<input
type="checkbox"
id="overwrite-existing"
checked={overwriteExisting}
onChange={(e) => setOverwriteExisting(e.target.checked)}
className="mr-2"
/>
<label htmlFor="overwrite-existing" className="text-sm text-gray-700 dark:text-gray-300">
Overwrite existing story with same title and author
</label>
</div>
</div>
</div>
</div>
{/* Submit Button */}
<div className="flex justify-end">
<Button
type="submit"
disabled={!selectedFile || isLoading || isValidating || (validationResult && !validationResult.valid)}
className="bg-blue-600 hover:bg-blue-700 text-white"
>
{isLoading ? 'Importing...' : 'Import EPUB'}
</Button>
</div>
</form>
</div>
);
}