12 Commits

Author SHA1 Message Date
Stefan Hardegger
924ae12b5b statistics 2025-10-21 10:53:33 +02:00
Stefan Hardegger
16983fd871 Merge branch 'main' into statistics 2025-10-21 07:58:25 +02:00
Stefan Hardegger
ff49589f32 Automatic backup 2025-10-20 14:51:27 +02:00
Stefan Hardegger
4abb442c50 fix async 2025-10-20 14:34:26 +02:00
Stefan Hardegger
1c004eb7d6 fix backup async 2025-10-20 14:25:12 +02:00
Stefan Hardegger
32544d4f4a different approach to migration 2025-10-20 14:13:45 +02:00
Stefan Hardegger
1ee9af8f28 deployment fix? 2025-10-20 12:55:56 +02:00
Stefan Hardegger
70599083b8 db migration 2025-10-20 12:43:58 +02:00
Stefan Hardegger
6a38189ef0 fix images 2025-10-20 12:30:28 +02:00
Stefan Hardegger
c9d58173f3 improved backup creation 2025-10-20 09:23:34 +02:00
Stefan Hardegger
3dd2ff50d8 Fix for memory issue during backup 2025-10-20 08:58:09 +02:00
Stefan Hardegger
378265c3a3 initial statistics implementation 2025-10-20 08:50:12 +02:00
33 changed files with 3526 additions and 79 deletions

45
apply_migration_production.sh Executable file
View File

@@ -0,0 +1,45 @@
#!/bin/bash
# Run this script on your production server to apply the backup_jobs table migration
# to all library databases
echo "Applying backup_jobs table migration to all databases..."
echo ""
# Apply to each database
for DB in storycove storycove_afterdark storycove_clas storycove_secret; do
echo "Applying to $DB..."
docker-compose exec -T postgres psql -U storycove -d "$DB" <<'SQL'
CREATE TABLE IF NOT EXISTS backup_jobs (
id UUID PRIMARY KEY,
library_id VARCHAR(255) NOT NULL,
type VARCHAR(50) NOT NULL CHECK (type IN ('DATABASE_ONLY', 'COMPLETE')),
status VARCHAR(50) NOT NULL CHECK (status IN ('PENDING', 'IN_PROGRESS', 'COMPLETED', 'FAILED', 'EXPIRED')),
file_path VARCHAR(1000),
file_size_bytes BIGINT,
progress_percent INTEGER,
error_message VARCHAR(1000),
created_at TIMESTAMP NOT NULL,
started_at TIMESTAMP,
completed_at TIMESTAMP,
expires_at TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_library_id ON backup_jobs(library_id);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_status ON backup_jobs(status);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_expires_at ON backup_jobs(expires_at);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_created_at ON backup_jobs(created_at DESC);
SQL
echo "✓ Done with $DB"
echo ""
done
echo "Migration complete! Verifying..."
echo ""
# Verify tables exist
for DB in storycove storycove_afterdark storycove_clas storycove_secret; do
echo "Checking $DB:"
docker-compose exec -T postgres psql -U storycove -d "$DB" -c "\d backup_jobs" 2>&1 | grep -E "Table|does not exist" || echo " ✓ Table exists"
echo ""
done

View File

@@ -0,0 +1,54 @@
#!/bin/bash
# Script to apply backup_jobs table migration to all library databases
# This should be run from the backend directory
set -e
# Use full docker path
DOCKER="/usr/local/bin/docker"
echo "Applying backup_jobs table migration..."
# Get database connection details from environment or use defaults
DB_HOST="${POSTGRES_HOST:-postgres}"
DB_PORT="${POSTGRES_PORT:-5432}"
DB_USER="${POSTGRES_USER:-storycove}"
DB_PASSWORD="${POSTGRES_PASSWORD:-password}"
# List of databases to update
DATABASES=("storycove" "storycove_afterdark")
for DB_NAME in "${DATABASES[@]}"; do
echo ""
echo "Applying migration to database: $DB_NAME"
# Check if database exists
if $DOCKER exec storycove-postgres-1 psql -U "$DB_USER" -lqt | cut -d \| -f 1 | grep -qw "$DB_NAME"; then
echo "Database $DB_NAME exists, applying migration..."
# Apply migration
$DOCKER exec -i storycove-postgres-1 psql -U "$DB_USER" -d "$DB_NAME" < create_backup_jobs_table.sql
if [ $? -eq 0 ]; then
echo "✓ Migration applied successfully to $DB_NAME"
else
echo "✗ Failed to apply migration to $DB_NAME"
exit 1
fi
else
echo "⚠ Database $DB_NAME does not exist, skipping..."
fi
done
echo ""
echo "Migration complete!"
echo ""
echo "Verifying table creation..."
for DB_NAME in "${DATABASES[@]}"; do
if $DOCKER exec storycove-postgres-1 psql -U "$DB_USER" -lqt | cut -d \| -f 1 | grep -qw "$DB_NAME"; then
echo ""
echo "Checking $DB_NAME:"
$DOCKER exec storycove-postgres-1 psql -U "$DB_USER" -d "$DB_NAME" -c "\d backup_jobs" 2>/dev/null || echo " Table not found in $DB_NAME"
fi
done

View File

@@ -0,0 +1,29 @@
-- Create backup_jobs table for async backup job tracking
-- This should be run on all library databases (default and afterdark)
CREATE TABLE IF NOT EXISTS backup_jobs (
id UUID PRIMARY KEY,
library_id VARCHAR(255) NOT NULL,
type VARCHAR(50) NOT NULL CHECK (type IN ('DATABASE_ONLY', 'COMPLETE')),
status VARCHAR(50) NOT NULL CHECK (status IN ('PENDING', 'IN_PROGRESS', 'COMPLETED', 'FAILED', 'EXPIRED')),
file_path VARCHAR(1000),
file_size_bytes BIGINT,
progress_percent INTEGER,
error_message VARCHAR(1000),
created_at TIMESTAMP NOT NULL,
started_at TIMESTAMP,
completed_at TIMESTAMP,
expires_at TIMESTAMP
);
-- Create index on library_id for faster lookups
CREATE INDEX IF NOT EXISTS idx_backup_jobs_library_id ON backup_jobs(library_id);
-- Create index on status for cleanup queries
CREATE INDEX IF NOT EXISTS idx_backup_jobs_status ON backup_jobs(status);
-- Create index on expires_at for cleanup queries
CREATE INDEX IF NOT EXISTS idx_backup_jobs_expires_at ON backup_jobs(expires_at);
-- Create index on created_at for ordering
CREATE INDEX IF NOT EXISTS idx_backup_jobs_created_at ON backup_jobs(created_at DESC);

View File

@@ -0,0 +1,111 @@
package com.storycove.config;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.CommandLineRunner;
import org.springframework.core.annotation.Order;
import org.springframework.stereotype.Component;
import javax.sql.DataSource;
import java.sql.Connection;
import java.sql.Statement;
import java.util.Arrays;
import java.util.List;
/**
* Runs database migrations on application startup.
* This ensures all library databases have the required schema,
* particularly for tables like backup_jobs that were added after initial deployment.
*/
@Component
@Order(1) // Run early in startup sequence
public class DatabaseMigrationRunner implements CommandLineRunner {
private static final Logger logger = LoggerFactory.getLogger(DatabaseMigrationRunner.class);
@Autowired
private DataSource dataSource;
@Value("${spring.datasource.username}")
private String dbUsername;
@Value("${spring.datasource.password}")
private String dbPassword;
// List of all library databases that need migrations
private static final List<String> LIBRARY_DATABASES = Arrays.asList(
"storycove", // default database
"storycove_afterdark",
"storycove_clas",
"storycove_secret"
);
// SQL for backup_jobs table migration (idempotent)
private static final String BACKUP_JOBS_MIGRATION = """
CREATE TABLE IF NOT EXISTS backup_jobs (
id UUID PRIMARY KEY,
library_id VARCHAR(255) NOT NULL,
type VARCHAR(50) NOT NULL CHECK (type IN ('DATABASE_ONLY', 'COMPLETE')),
status VARCHAR(50) NOT NULL CHECK (status IN ('PENDING', 'IN_PROGRESS', 'COMPLETED', 'FAILED', 'EXPIRED')),
file_path VARCHAR(1000),
file_size_bytes BIGINT,
progress_percent INTEGER,
error_message VARCHAR(1000),
created_at TIMESTAMP NOT NULL,
started_at TIMESTAMP,
completed_at TIMESTAMP,
expires_at TIMESTAMP
);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_library_id ON backup_jobs(library_id);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_status ON backup_jobs(status);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_expires_at ON backup_jobs(expires_at);
CREATE INDEX IF NOT EXISTS idx_backup_jobs_created_at ON backup_jobs(created_at DESC);
""";
@Override
public void run(String... args) throws Exception {
logger.info("🗄️ Starting database migrations...");
for (String database : LIBRARY_DATABASES) {
try {
applyMigrations(database);
logger.info("✅ Successfully applied migrations to database: {}", database);
} catch (Exception e) {
// Log error but don't fail startup if database doesn't exist yet
if (e.getMessage() != null && e.getMessage().contains("does not exist")) {
logger.warn("⚠️ Database {} does not exist yet, skipping migrations", database);
} else {
logger.error("❌ Failed to apply migrations to database: {}", database, e);
// Don't throw - allow application to start even if some migrations fail
}
}
}
logger.info("✅ Database migrations completed");
}
private void applyMigrations(String database) throws Exception {
// We need to connect directly to each database, not through SmartRoutingDataSource
// Build connection URL from the default datasource URL
String originalUrl = dataSource.getConnection().getMetaData().getURL();
String baseUrl = originalUrl.substring(0, originalUrl.lastIndexOf('/'));
String targetUrl = baseUrl + "/" + database;
// Connect directly to target database using credentials from application properties
try (Connection conn = java.sql.DriverManager.getConnection(
targetUrl,
dbUsername,
dbPassword
)) {
// Apply backup_jobs migration
try (Statement stmt = conn.createStatement()) {
stmt.execute(BACKUP_JOBS_MIGRATION);
}
logger.debug("Applied backup_jobs migration to {}", database);
}
}
}

View File

@@ -1,6 +1,8 @@
package com.storycove.controller; package com.storycove.controller;
import com.storycove.service.AsyncBackupService;
import com.storycove.service.DatabaseManagementService; import com.storycove.service.DatabaseManagementService;
import com.storycove.service.LibraryService;
import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.Resource; import org.springframework.core.io.Resource;
import org.springframework.http.HttpHeaders; import org.springframework.http.HttpHeaders;
@@ -12,6 +14,7 @@ import org.springframework.web.multipart.MultipartFile;
import java.io.IOException; import java.io.IOException;
import java.time.LocalDateTime; import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter; import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.Map; import java.util.Map;
@RestController @RestController
@@ -21,6 +24,12 @@ public class DatabaseController {
@Autowired @Autowired
private DatabaseManagementService databaseManagementService; private DatabaseManagementService databaseManagementService;
@Autowired
private AsyncBackupService asyncBackupService;
@Autowired
private LibraryService libraryService;
@PostMapping("/backup") @PostMapping("/backup")
public ResponseEntity<Resource> backupDatabase() { public ResponseEntity<Resource> backupDatabase() {
try { try {
@@ -83,19 +92,141 @@ public class DatabaseController {
} }
@PostMapping("/backup-complete") @PostMapping("/backup-complete")
public ResponseEntity<Resource> backupComplete() { public ResponseEntity<Map<String, Object>> backupCompleteAsync() {
try { try {
Resource backup = databaseManagementService.createCompleteBackup(); String libraryId = libraryService.getCurrentLibraryId();
if (libraryId == null) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "No library selected"));
}
String timestamp = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss")); // Start backup job asynchronously
String filename = "storycove_complete_backup_" + timestamp + ".zip"; com.storycove.entity.BackupJob job = asyncBackupService.startBackupJob(
libraryId,
com.storycove.entity.BackupJob.BackupType.COMPLETE
);
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Backup started",
"jobId", job.getId().toString(),
"status", job.getStatus().toString()
));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to start backup: " + e.getMessage()));
}
}
@GetMapping("/backup-status/{jobId}")
public ResponseEntity<Map<String, Object>> getBackupStatus(@PathVariable String jobId) {
try {
java.util.UUID uuid = java.util.UUID.fromString(jobId);
java.util.Optional<com.storycove.entity.BackupJob> jobOpt = asyncBackupService.getJobStatus(uuid);
if (jobOpt.isEmpty()) {
return ResponseEntity.notFound().build();
}
com.storycove.entity.BackupJob job = jobOpt.get();
return ResponseEntity.ok(Map.of(
"success", true,
"jobId", job.getId().toString(),
"status", job.getStatus().toString(),
"progress", job.getProgressPercent(),
"fileSizeBytes", job.getFileSizeBytes() != null ? job.getFileSizeBytes() : 0,
"createdAt", job.getCreatedAt().toString(),
"completedAt", job.getCompletedAt() != null ? job.getCompletedAt().toString() : "",
"errorMessage", job.getErrorMessage() != null ? job.getErrorMessage() : ""
));
} catch (IllegalArgumentException e) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "Invalid job ID"));
}
}
@GetMapping("/backup-download/{jobId}")
public ResponseEntity<Resource> downloadBackup(@PathVariable String jobId) {
try {
java.util.UUID uuid = java.util.UUID.fromString(jobId);
Resource backup = asyncBackupService.getBackupFile(uuid);
java.util.Optional<com.storycove.entity.BackupJob> jobOpt = asyncBackupService.getJobStatus(uuid);
if (jobOpt.isEmpty()) {
return ResponseEntity.notFound().build();
}
com.storycove.entity.BackupJob job = jobOpt.get();
String timestamp = job.getCreatedAt().format(DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss"));
String extension = job.getType() == com.storycove.entity.BackupJob.BackupType.COMPLETE ? "zip" : "sql";
String filename = "storycove_backup_" + timestamp + "." + extension;
return ResponseEntity.ok() return ResponseEntity.ok()
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + filename + "\"") .header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename=\"" + filename + "\"")
.header(HttpHeaders.CONTENT_TYPE, "application/zip") .header(HttpHeaders.CONTENT_TYPE,
job.getType() == com.storycove.entity.BackupJob.BackupType.COMPLETE
? "application/zip"
: "application/sql")
.body(backup); .body(backup);
} catch (IllegalArgumentException e) {
return ResponseEntity.badRequest().build();
} catch (Exception e) { } catch (Exception e) {
throw new RuntimeException("Failed to create complete backup: " + e.getMessage(), e); throw new RuntimeException("Failed to download backup: " + e.getMessage(), e);
}
}
@GetMapping("/backup-list")
public ResponseEntity<Map<String, Object>> listBackups() {
try {
String libraryId = libraryService.getCurrentLibraryId();
if (libraryId == null) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "No library selected"));
}
List<com.storycove.entity.BackupJob> jobs = asyncBackupService.listBackupJobs(libraryId);
List<Map<String, Object>> jobsList = jobs.stream()
.map(job -> {
Map<String, Object> jobMap = new java.util.HashMap<>();
jobMap.put("jobId", job.getId().toString());
jobMap.put("type", job.getType().toString());
jobMap.put("status", job.getStatus().toString());
jobMap.put("progress", job.getProgressPercent());
jobMap.put("fileSizeBytes", job.getFileSizeBytes() != null ? job.getFileSizeBytes() : 0L);
jobMap.put("createdAt", job.getCreatedAt().toString());
jobMap.put("completedAt", job.getCompletedAt() != null ? job.getCompletedAt().toString() : "");
return jobMap;
})
.collect(java.util.stream.Collectors.toList());
return ResponseEntity.ok(Map.of(
"success", true,
"backups", jobsList
));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to list backups: " + e.getMessage()));
}
}
@DeleteMapping("/backup/{jobId}")
public ResponseEntity<Map<String, Object>> deleteBackup(@PathVariable String jobId) {
try {
java.util.UUID uuid = java.util.UUID.fromString(jobId);
asyncBackupService.deleteBackupJob(uuid);
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Backup deleted successfully"
));
} catch (IllegalArgumentException e) {
return ResponseEntity.badRequest()
.body(Map.of("success", false, "message", "Invalid job ID"));
} catch (Exception e) {
return ResponseEntity.internalServerError()
.body(Map.of("success", false, "message", "Failed to delete backup: " + e.getMessage()));
} }
} }

View File

@@ -0,0 +1,183 @@
package com.storycove.controller;
import com.storycove.dto.LibraryOverviewStatsDto;
import com.storycove.service.LibraryService;
import com.storycove.service.LibraryStatisticsService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
@RestController
@RequestMapping("/api/libraries/{libraryId}/statistics")
public class LibraryStatisticsController {
private static final Logger logger = LoggerFactory.getLogger(LibraryStatisticsController.class);
@Autowired
private LibraryStatisticsService statisticsService;
@Autowired
private LibraryService libraryService;
/**
* Get overview statistics for a library
*/
@GetMapping("/overview")
public ResponseEntity<?> getOverviewStatistics(@PathVariable String libraryId) {
try {
// Verify library exists
if (libraryService.getLibraryById(libraryId) == null) {
return ResponseEntity.notFound().build();
}
LibraryOverviewStatsDto stats = statisticsService.getOverviewStatistics(libraryId);
return ResponseEntity.ok(stats);
} catch (Exception e) {
logger.error("Failed to get overview statistics for library: {}", libraryId, e);
return ResponseEntity.internalServerError()
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
}
}
/**
* Get top tags statistics
*/
@GetMapping("/top-tags")
public ResponseEntity<?> getTopTagsStatistics(
@PathVariable String libraryId,
@RequestParam(defaultValue = "20") int limit) {
try {
if (libraryService.getLibraryById(libraryId) == null) {
return ResponseEntity.notFound().build();
}
var stats = statisticsService.getTopTagsStatistics(libraryId, limit);
return ResponseEntity.ok(stats);
} catch (Exception e) {
logger.error("Failed to get top tags statistics for library: {}", libraryId, e);
return ResponseEntity.internalServerError()
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
}
}
/**
* Get top authors statistics
*/
@GetMapping("/top-authors")
public ResponseEntity<?> getTopAuthorsStatistics(
@PathVariable String libraryId,
@RequestParam(defaultValue = "10") int limit) {
try {
if (libraryService.getLibraryById(libraryId) == null) {
return ResponseEntity.notFound().build();
}
var stats = statisticsService.getTopAuthorsStatistics(libraryId, limit);
return ResponseEntity.ok(stats);
} catch (Exception e) {
logger.error("Failed to get top authors statistics for library: {}", libraryId, e);
return ResponseEntity.internalServerError()
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
}
}
/**
* Get rating statistics
*/
@GetMapping("/ratings")
public ResponseEntity<?> getRatingStatistics(@PathVariable String libraryId) {
try {
if (libraryService.getLibraryById(libraryId) == null) {
return ResponseEntity.notFound().build();
}
var stats = statisticsService.getRatingStatistics(libraryId);
return ResponseEntity.ok(stats);
} catch (Exception e) {
logger.error("Failed to get rating statistics for library: {}", libraryId, e);
return ResponseEntity.internalServerError()
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
}
}
/**
* Get source domain statistics
*/
@GetMapping("/source-domains")
public ResponseEntity<?> getSourceDomainStatistics(
@PathVariable String libraryId,
@RequestParam(defaultValue = "10") int limit) {
try {
if (libraryService.getLibraryById(libraryId) == null) {
return ResponseEntity.notFound().build();
}
var stats = statisticsService.getSourceDomainStatistics(libraryId, limit);
return ResponseEntity.ok(stats);
} catch (Exception e) {
logger.error("Failed to get source domain statistics for library: {}", libraryId, e);
return ResponseEntity.internalServerError()
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
}
}
/**
* Get reading progress statistics
*/
@GetMapping("/reading-progress")
public ResponseEntity<?> getReadingProgressStatistics(@PathVariable String libraryId) {
try {
if (libraryService.getLibraryById(libraryId) == null) {
return ResponseEntity.notFound().build();
}
var stats = statisticsService.getReadingProgressStatistics(libraryId);
return ResponseEntity.ok(stats);
} catch (Exception e) {
logger.error("Failed to get reading progress statistics for library: {}", libraryId, e);
return ResponseEntity.internalServerError()
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
}
}
/**
* Get reading activity statistics (last week)
*/
@GetMapping("/reading-activity")
public ResponseEntity<?> getReadingActivityStatistics(@PathVariable String libraryId) {
try {
if (libraryService.getLibraryById(libraryId) == null) {
return ResponseEntity.notFound().build();
}
var stats = statisticsService.getReadingActivityStatistics(libraryId);
return ResponseEntity.ok(stats);
} catch (Exception e) {
logger.error("Failed to get reading activity statistics for library: {}", libraryId, e);
return ResponseEntity.internalServerError()
.body(new ErrorResponse("Failed to retrieve statistics: " + e.getMessage()));
}
}
// Error response DTO
private static class ErrorResponse {
private String error;
public ErrorResponse(String error) {
this.error = error;
}
public String getError() {
return error;
}
}
}

View File

@@ -0,0 +1,183 @@
package com.storycove.dto;
public class LibraryOverviewStatsDto {
// Collection Overview
private long totalStories;
private long totalAuthors;
private long totalSeries;
private long totalTags;
private long totalCollections;
private long uniqueSourceDomains;
// Content Metrics
private long totalWordCount;
private double averageWordsPerStory;
private StoryWordCountDto longestStory;
private StoryWordCountDto shortestStory;
// Reading Time (based on 250 words/minute)
private long totalReadingTimeMinutes;
private double averageReadingTimeMinutes;
// Constructor
public LibraryOverviewStatsDto() {
}
// Getters and Setters
public long getTotalStories() {
return totalStories;
}
public void setTotalStories(long totalStories) {
this.totalStories = totalStories;
}
public long getTotalAuthors() {
return totalAuthors;
}
public void setTotalAuthors(long totalAuthors) {
this.totalAuthors = totalAuthors;
}
public long getTotalSeries() {
return totalSeries;
}
public void setTotalSeries(long totalSeries) {
this.totalSeries = totalSeries;
}
public long getTotalTags() {
return totalTags;
}
public void setTotalTags(long totalTags) {
this.totalTags = totalTags;
}
public long getTotalCollections() {
return totalCollections;
}
public void setTotalCollections(long totalCollections) {
this.totalCollections = totalCollections;
}
public long getUniqueSourceDomains() {
return uniqueSourceDomains;
}
public void setUniqueSourceDomains(long uniqueSourceDomains) {
this.uniqueSourceDomains = uniqueSourceDomains;
}
public long getTotalWordCount() {
return totalWordCount;
}
public void setTotalWordCount(long totalWordCount) {
this.totalWordCount = totalWordCount;
}
public double getAverageWordsPerStory() {
return averageWordsPerStory;
}
public void setAverageWordsPerStory(double averageWordsPerStory) {
this.averageWordsPerStory = averageWordsPerStory;
}
public StoryWordCountDto getLongestStory() {
return longestStory;
}
public void setLongestStory(StoryWordCountDto longestStory) {
this.longestStory = longestStory;
}
public StoryWordCountDto getShortestStory() {
return shortestStory;
}
public void setShortestStory(StoryWordCountDto shortestStory) {
this.shortestStory = shortestStory;
}
public long getTotalReadingTimeMinutes() {
return totalReadingTimeMinutes;
}
public void setTotalReadingTimeMinutes(long totalReadingTimeMinutes) {
this.totalReadingTimeMinutes = totalReadingTimeMinutes;
}
public double getAverageReadingTimeMinutes() {
return averageReadingTimeMinutes;
}
public void setAverageReadingTimeMinutes(double averageReadingTimeMinutes) {
this.averageReadingTimeMinutes = averageReadingTimeMinutes;
}
// Nested DTO for story word count info
public static class StoryWordCountDto {
private String id;
private String title;
private String authorName;
private int wordCount;
private long readingTimeMinutes;
public StoryWordCountDto() {
}
public StoryWordCountDto(String id, String title, String authorName, int wordCount, long readingTimeMinutes) {
this.id = id;
this.title = title;
this.authorName = authorName;
this.wordCount = wordCount;
this.readingTimeMinutes = readingTimeMinutes;
}
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public String getAuthorName() {
return authorName;
}
public void setAuthorName(String authorName) {
this.authorName = authorName;
}
public int getWordCount() {
return wordCount;
}
public void setWordCount(int wordCount) {
this.wordCount = wordCount;
}
public long getReadingTimeMinutes() {
return readingTimeMinutes;
}
public void setReadingTimeMinutes(long readingTimeMinutes) {
this.readingTimeMinutes = readingTimeMinutes;
}
}
}

View File

@@ -0,0 +1,45 @@
package com.storycove.dto;
import java.util.Map;
public class RatingStatsDto {
private double averageRating;
private long totalRatedStories;
private long totalUnratedStories;
private Map<Integer, Long> ratingDistribution; // rating (1-5) -> count
public RatingStatsDto() {
}
public double getAverageRating() {
return averageRating;
}
public void setAverageRating(double averageRating) {
this.averageRating = averageRating;
}
public long getTotalRatedStories() {
return totalRatedStories;
}
public void setTotalRatedStories(long totalRatedStories) {
this.totalRatedStories = totalRatedStories;
}
public long getTotalUnratedStories() {
return totalUnratedStories;
}
public void setTotalUnratedStories(long totalUnratedStories) {
this.totalUnratedStories = totalUnratedStories;
}
public Map<Integer, Long> getRatingDistribution() {
return ratingDistribution;
}
public void setRatingDistribution(Map<Integer, Long> ratingDistribution) {
this.ratingDistribution = ratingDistribution;
}
}

View File

@@ -0,0 +1,84 @@
package com.storycove.dto;
import java.util.List;
public class ReadingActivityStatsDto {
private long storiesReadLastWeek;
private long wordsReadLastWeek;
private long readingTimeMinutesLastWeek;
private List<DailyActivityDto> dailyActivity;
public ReadingActivityStatsDto() {
}
public long getStoriesReadLastWeek() {
return storiesReadLastWeek;
}
public void setStoriesReadLastWeek(long storiesReadLastWeek) {
this.storiesReadLastWeek = storiesReadLastWeek;
}
public long getWordsReadLastWeek() {
return wordsReadLastWeek;
}
public void setWordsReadLastWeek(long wordsReadLastWeek) {
this.wordsReadLastWeek = wordsReadLastWeek;
}
public long getReadingTimeMinutesLastWeek() {
return readingTimeMinutesLastWeek;
}
public void setReadingTimeMinutesLastWeek(long readingTimeMinutesLastWeek) {
this.readingTimeMinutesLastWeek = readingTimeMinutesLastWeek;
}
public List<DailyActivityDto> getDailyActivity() {
return dailyActivity;
}
public void setDailyActivity(List<DailyActivityDto> dailyActivity) {
this.dailyActivity = dailyActivity;
}
public static class DailyActivityDto {
private String date; // YYYY-MM-DD format
private long storiesRead;
private long wordsRead;
public DailyActivityDto() {
}
public DailyActivityDto(String date, long storiesRead, long wordsRead) {
this.date = date;
this.storiesRead = storiesRead;
this.wordsRead = wordsRead;
}
public String getDate() {
return date;
}
public void setDate(String date) {
this.date = date;
}
public long getStoriesRead() {
return storiesRead;
}
public void setStoriesRead(long storiesRead) {
this.storiesRead = storiesRead;
}
public long getWordsRead() {
return wordsRead;
}
public void setWordsRead(long wordsRead) {
this.wordsRead = wordsRead;
}
}
}

View File

@@ -0,0 +1,61 @@
package com.storycove.dto;
public class ReadingProgressStatsDto {
private long totalStories;
private long readStories;
private long unreadStories;
private double percentageRead;
private long totalWordsRead;
private long totalWordsUnread;
public ReadingProgressStatsDto() {
}
public long getTotalStories() {
return totalStories;
}
public void setTotalStories(long totalStories) {
this.totalStories = totalStories;
}
public long getReadStories() {
return readStories;
}
public void setReadStories(long readStories) {
this.readStories = readStories;
}
public long getUnreadStories() {
return unreadStories;
}
public void setUnreadStories(long unreadStories) {
this.unreadStories = unreadStories;
}
public double getPercentageRead() {
return percentageRead;
}
public void setPercentageRead(double percentageRead) {
this.percentageRead = percentageRead;
}
public long getTotalWordsRead() {
return totalWordsRead;
}
public void setTotalWordsRead(long totalWordsRead) {
this.totalWordsRead = totalWordsRead;
}
public long getTotalWordsUnread() {
return totalWordsUnread;
}
public void setTotalWordsUnread(long totalWordsUnread) {
this.totalWordsUnread = totalWordsUnread;
}
}

View File

@@ -0,0 +1,65 @@
package com.storycove.dto;
import java.util.List;
public class SourceDomainStatsDto {
private List<DomainStatsDto> topDomains;
private long storiesWithSource;
private long storiesWithoutSource;
public SourceDomainStatsDto() {
}
public List<DomainStatsDto> getTopDomains() {
return topDomains;
}
public void setTopDomains(List<DomainStatsDto> topDomains) {
this.topDomains = topDomains;
}
public long getStoriesWithSource() {
return storiesWithSource;
}
public void setStoriesWithSource(long storiesWithSource) {
this.storiesWithSource = storiesWithSource;
}
public long getStoriesWithoutSource() {
return storiesWithoutSource;
}
public void setStoriesWithoutSource(long storiesWithoutSource) {
this.storiesWithoutSource = storiesWithoutSource;
}
public static class DomainStatsDto {
private String domain;
private long storyCount;
public DomainStatsDto() {
}
public DomainStatsDto(String domain, long storyCount) {
this.domain = domain;
this.storyCount = storyCount;
}
public String getDomain() {
return domain;
}
public void setDomain(String domain) {
this.domain = domain;
}
public long getStoryCount() {
return storyCount;
}
public void setStoryCount(long storyCount) {
this.storyCount = storyCount;
}
}
}

View File

@@ -0,0 +1,76 @@
package com.storycove.dto;
import java.util.List;
public class TopAuthorsStatsDto {
private List<AuthorStatsDto> topAuthorsByStories;
private List<AuthorStatsDto> topAuthorsByWords;
public TopAuthorsStatsDto() {
}
public List<AuthorStatsDto> getTopAuthorsByStories() {
return topAuthorsByStories;
}
public void setTopAuthorsByStories(List<AuthorStatsDto> topAuthorsByStories) {
this.topAuthorsByStories = topAuthorsByStories;
}
public List<AuthorStatsDto> getTopAuthorsByWords() {
return topAuthorsByWords;
}
public void setTopAuthorsByWords(List<AuthorStatsDto> topAuthorsByWords) {
this.topAuthorsByWords = topAuthorsByWords;
}
public static class AuthorStatsDto {
private String authorId;
private String authorName;
private long storyCount;
private long totalWords;
public AuthorStatsDto() {
}
public AuthorStatsDto(String authorId, String authorName, long storyCount, long totalWords) {
this.authorId = authorId;
this.authorName = authorName;
this.storyCount = storyCount;
this.totalWords = totalWords;
}
public String getAuthorId() {
return authorId;
}
public void setAuthorId(String authorId) {
this.authorId = authorId;
}
public String getAuthorName() {
return authorName;
}
public void setAuthorName(String authorName) {
this.authorName = authorName;
}
public long getStoryCount() {
return storyCount;
}
public void setStoryCount(long storyCount) {
this.storyCount = storyCount;
}
public long getTotalWords() {
return totalWords;
}
public void setTotalWords(long totalWords) {
this.totalWords = totalWords;
}
}
}

View File

@@ -0,0 +1,51 @@
package com.storycove.dto;
import java.util.List;
public class TopTagsStatsDto {
private List<TagStatsDto> topTags;
public TopTagsStatsDto() {
}
public TopTagsStatsDto(List<TagStatsDto> topTags) {
this.topTags = topTags;
}
public List<TagStatsDto> getTopTags() {
return topTags;
}
public void setTopTags(List<TagStatsDto> topTags) {
this.topTags = topTags;
}
public static class TagStatsDto {
private String tagName;
private long storyCount;
public TagStatsDto() {
}
public TagStatsDto(String tagName, long storyCount) {
this.tagName = tagName;
this.storyCount = storyCount;
}
public String getTagName() {
return tagName;
}
public void setTagName(String tagName) {
this.tagName = tagName;
}
public long getStoryCount() {
return storyCount;
}
public void setStoryCount(long storyCount) {
this.storyCount = storyCount;
}
}
}

View File

@@ -0,0 +1,195 @@
package com.storycove.entity;
import jakarta.persistence.*;
import java.time.LocalDateTime;
import java.util.UUID;
@Entity
@Table(name = "backup_jobs")
public class BackupJob {
@Id
@GeneratedValue(strategy = GenerationType.UUID)
private UUID id;
@Column(nullable = false)
private String libraryId;
@Column(nullable = false)
@Enumerated(EnumType.STRING)
private BackupType type;
@Column(nullable = false)
@Enumerated(EnumType.STRING)
private BackupStatus status;
@Column
private String filePath;
@Column
private Long fileSizeBytes;
@Column
private Integer progressPercent;
@Column(length = 1000)
private String errorMessage;
@Column(nullable = false)
private LocalDateTime createdAt;
@Column
private LocalDateTime startedAt;
@Column
private LocalDateTime completedAt;
@Column
private LocalDateTime expiresAt;
@PrePersist
protected void onCreate() {
createdAt = LocalDateTime.now();
// Backups expire after 24 hours
expiresAt = LocalDateTime.now().plusDays(1);
}
// Enums
public enum BackupType {
DATABASE_ONLY,
COMPLETE
}
public enum BackupStatus {
PENDING,
IN_PROGRESS,
COMPLETED,
FAILED,
EXPIRED
}
// Constructors
public BackupJob() {
}
public BackupJob(String libraryId, BackupType type) {
this.libraryId = libraryId;
this.type = type;
this.status = BackupStatus.PENDING;
this.progressPercent = 0;
}
// Getters and Setters
public UUID getId() {
return id;
}
public void setId(UUID id) {
this.id = id;
}
public String getLibraryId() {
return libraryId;
}
public void setLibraryId(String libraryId) {
this.libraryId = libraryId;
}
public BackupType getType() {
return type;
}
public void setType(BackupType type) {
this.type = type;
}
public BackupStatus getStatus() {
return status;
}
public void setStatus(BackupStatus status) {
this.status = status;
}
public String getFilePath() {
return filePath;
}
public void setFilePath(String filePath) {
this.filePath = filePath;
}
public Long getFileSizeBytes() {
return fileSizeBytes;
}
public void setFileSizeBytes(Long fileSizeBytes) {
this.fileSizeBytes = fileSizeBytes;
}
public Integer getProgressPercent() {
return progressPercent;
}
public void setProgressPercent(Integer progressPercent) {
this.progressPercent = progressPercent;
}
public String getErrorMessage() {
return errorMessage;
}
public void setErrorMessage(String errorMessage) {
this.errorMessage = errorMessage;
}
public LocalDateTime getCreatedAt() {
return createdAt;
}
public void setCreatedAt(LocalDateTime createdAt) {
this.createdAt = createdAt;
}
public LocalDateTime getStartedAt() {
return startedAt;
}
public void setStartedAt(LocalDateTime startedAt) {
this.startedAt = startedAt;
}
public LocalDateTime getCompletedAt() {
return completedAt;
}
public void setCompletedAt(LocalDateTime completedAt) {
this.completedAt = completedAt;
}
public LocalDateTime getExpiresAt() {
return expiresAt;
}
public void setExpiresAt(LocalDateTime expiresAt) {
this.expiresAt = expiresAt;
}
// Helper methods
public boolean isExpired() {
return LocalDateTime.now().isAfter(expiresAt);
}
public boolean isCompleted() {
return status == BackupStatus.COMPLETED;
}
public boolean isFailed() {
return status == BackupStatus.FAILED;
}
public boolean isInProgress() {
return status == BackupStatus.IN_PROGRESS;
}
}

View File

@@ -0,0 +1,25 @@
package com.storycove.repository;
import com.storycove.entity.BackupJob;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.data.jpa.repository.Modifying;
import org.springframework.data.jpa.repository.Query;
import org.springframework.data.repository.query.Param;
import org.springframework.stereotype.Repository;
import java.time.LocalDateTime;
import java.util.List;
import java.util.UUID;
@Repository
public interface BackupJobRepository extends JpaRepository<BackupJob, UUID> {
List<BackupJob> findByLibraryIdOrderByCreatedAtDesc(String libraryId);
@Query("SELECT bj FROM BackupJob bj WHERE bj.expiresAt < :now AND bj.status = 'COMPLETED'")
List<BackupJob> findExpiredJobs(@Param("now") LocalDateTime now);
@Modifying
@Query("UPDATE BackupJob bj SET bj.status = 'EXPIRED' WHERE bj.expiresAt < :now AND bj.status = 'COMPLETED'")
int markExpiredJobs(@Param("now") LocalDateTime now);
}

View File

@@ -87,6 +87,9 @@ public interface StoryRepository extends JpaRepository<Story, UUID> {
@Query("SELECT COUNT(s) FROM Story s WHERE s.createdAt >= :since") @Query("SELECT COUNT(s) FROM Story s WHERE s.createdAt >= :since")
long countStoriesCreatedSince(@Param("since") LocalDateTime since); long countStoriesCreatedSince(@Param("since") LocalDateTime since);
@Query("SELECT COUNT(s) FROM Story s WHERE s.createdAt >= :since OR s.updatedAt >= :since")
long countStoriesModifiedAfter(@Param("since") LocalDateTime since);
@Query("SELECT AVG(s.wordCount) FROM Story s") @Query("SELECT AVG(s.wordCount) FROM Story s")
Double findAverageWordCount(); Double findAverageWordCount();

View File

@@ -0,0 +1,125 @@
package com.storycove.service;
import com.storycove.entity.BackupJob;
import com.storycove.repository.BackupJobRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Propagation;
import org.springframework.transaction.annotation.Transactional;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.util.Optional;
import java.util.UUID;
/**
* Separate service for async backup execution.
* This is needed because @Async doesn't work when called from within the same class.
*/
@Service
public class AsyncBackupExecutor {
private static final Logger logger = LoggerFactory.getLogger(AsyncBackupExecutor.class);
@Value("${storycove.upload.dir:/app/images}")
private String uploadDir;
@Autowired
private BackupJobRepository backupJobRepository;
@Autowired
private DatabaseManagementService databaseManagementService;
@Autowired
private LibraryService libraryService;
/**
* Execute backup asynchronously.
* This method MUST be in a separate service class for @Async to work properly.
*/
@Async
@Transactional(propagation = Propagation.REQUIRES_NEW)
public void executeBackupAsync(UUID jobId) {
logger.info("Async executor starting for job {}", jobId);
Optional<BackupJob> jobOpt = backupJobRepository.findById(jobId);
if (jobOpt.isEmpty()) {
logger.error("Backup job not found: {}", jobId);
return;
}
BackupJob job = jobOpt.get();
job.setStatus(BackupJob.BackupStatus.IN_PROGRESS);
job.setStartedAt(LocalDateTime.now());
job.setProgressPercent(0);
backupJobRepository.save(job);
try {
logger.info("Starting backup job {} for library {}", job.getId(), job.getLibraryId());
// Switch to the correct library
if (!job.getLibraryId().equals(libraryService.getCurrentLibraryId())) {
libraryService.switchToLibraryAfterAuthentication(job.getLibraryId());
}
// Create backup file
Path backupDir = Paths.get(uploadDir, "backups", job.getLibraryId());
Files.createDirectories(backupDir);
String filename = String.format("backup_%s_%s.%s",
job.getId().toString(),
LocalDateTime.now().toString().replaceAll(":", "-"),
job.getType() == BackupJob.BackupType.COMPLETE ? "zip" : "sql");
Path backupFile = backupDir.resolve(filename);
job.setProgressPercent(10);
backupJobRepository.save(job);
// Create the backup
Resource backupResource;
if (job.getType() == BackupJob.BackupType.COMPLETE) {
backupResource = databaseManagementService.createCompleteBackup();
} else {
backupResource = databaseManagementService.createBackup();
}
job.setProgressPercent(80);
backupJobRepository.save(job);
// Copy resource to permanent file
try (var inputStream = backupResource.getInputStream();
var outputStream = Files.newOutputStream(backupFile)) {
inputStream.transferTo(outputStream);
}
job.setProgressPercent(95);
backupJobRepository.save(job);
// Set file info
job.setFilePath(backupFile.toString());
job.setFileSizeBytes(Files.size(backupFile));
job.setStatus(BackupJob.BackupStatus.COMPLETED);
job.setCompletedAt(LocalDateTime.now());
job.setProgressPercent(100);
logger.info("Backup job {} completed successfully. File size: {} bytes",
job.getId(), job.getFileSizeBytes());
} catch (Exception e) {
logger.error("Backup job {} failed", job.getId(), e);
job.setStatus(BackupJob.BackupStatus.FAILED);
job.setErrorMessage(e.getMessage());
job.setCompletedAt(LocalDateTime.now());
} finally {
backupJobRepository.save(job);
}
}
}

View File

@@ -0,0 +1,167 @@
package com.storycove.service;
import com.storycove.entity.BackupJob;
import com.storycove.repository.BackupJobRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.FileSystemResource;
import org.springframework.core.io.Resource;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.util.List;
import java.util.Optional;
import java.util.UUID;
@Service
public class AsyncBackupService {
private static final Logger logger = LoggerFactory.getLogger(AsyncBackupService.class);
@Value("${storycove.upload.dir:/app/images}")
private String uploadDir;
@Autowired
private BackupJobRepository backupJobRepository;
@Autowired
private AsyncBackupExecutor asyncBackupExecutor;
/**
* Start a backup job asynchronously.
* This method returns immediately after creating the job record.
*/
@Transactional
public BackupJob startBackupJob(String libraryId, BackupJob.BackupType type) {
logger.info("Creating backup job for library: {}, type: {}", libraryId, type);
BackupJob job = new BackupJob(libraryId, type);
job = backupJobRepository.save(job);
logger.info("Backup job created with ID: {}. Starting async execution...", job.getId());
// Start backup in background using separate service (ensures @Async works properly)
asyncBackupExecutor.executeBackupAsync(job.getId());
logger.info("Async backup execution triggered for job: {}", job.getId());
return job;
}
/**
* Get backup job status
*/
public Optional<BackupJob> getJobStatus(UUID jobId) {
return backupJobRepository.findById(jobId);
}
/**
* Get backup file for download
*/
public Resource getBackupFile(UUID jobId) throws IOException {
Optional<BackupJob> jobOpt = backupJobRepository.findById(jobId);
if (jobOpt.isEmpty()) {
throw new IOException("Backup job not found");
}
BackupJob job = jobOpt.get();
if (!job.isCompleted()) {
throw new IOException("Backup is not completed yet");
}
if (job.isExpired()) {
throw new IOException("Backup has expired");
}
if (job.getFilePath() == null) {
throw new IOException("Backup file path not set");
}
Path backupPath = Paths.get(job.getFilePath());
if (!Files.exists(backupPath)) {
throw new IOException("Backup file not found");
}
return new FileSystemResource(backupPath);
}
/**
* List backup jobs for a library
*/
public List<BackupJob> listBackupJobs(String libraryId) {
return backupJobRepository.findByLibraryIdOrderByCreatedAtDesc(libraryId);
}
/**
* Clean up expired backup jobs and their files
* Runs daily at 2 AM
*/
@Scheduled(cron = "0 0 2 * * ?")
@Transactional
public void cleanupExpiredBackups() {
logger.info("Starting cleanup of expired backups");
LocalDateTime now = LocalDateTime.now();
// Mark expired jobs
int markedCount = backupJobRepository.markExpiredJobs(now);
logger.info("Marked {} jobs as expired", markedCount);
// Find all expired jobs to delete their files
List<BackupJob> expiredJobs = backupJobRepository.findExpiredJobs(now);
for (BackupJob job : expiredJobs) {
if (job.getFilePath() != null) {
try {
Path filePath = Paths.get(job.getFilePath());
if (Files.exists(filePath)) {
Files.delete(filePath);
logger.info("Deleted expired backup file: {}", filePath);
}
} catch (IOException e) {
logger.warn("Failed to delete expired backup file: {}", job.getFilePath(), e);
}
}
// Delete the job record
backupJobRepository.delete(job);
}
logger.info("Cleanup completed. Deleted {} expired backups", expiredJobs.size());
}
/**
* Delete a specific backup job and its file
*/
@Transactional
public void deleteBackupJob(UUID jobId) throws IOException {
Optional<BackupJob> jobOpt = backupJobRepository.findById(jobId);
if (jobOpt.isEmpty()) {
throw new IOException("Backup job not found");
}
BackupJob job = jobOpt.get();
// Delete file if it exists
if (job.getFilePath() != null) {
Path filePath = Paths.get(job.getFilePath());
if (Files.exists(filePath)) {
Files.delete(filePath);
logger.info("Deleted backup file: {}", filePath);
}
}
// Delete job record
backupJobRepository.delete(job);
logger.info("Deleted backup job: {}", jobId);
}
}

View File

@@ -0,0 +1,262 @@
package com.storycove.service;
import com.storycove.repository.StoryRepository;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.Comparator;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
/**
* Service for automatic daily backups.
* Runs at 4 AM daily and creates a backup if content has changed since last backup.
* Keeps maximum of 5 backups, rotating old ones out.
*/
@Service
public class AutomaticBackupService {
private static final Logger logger = LoggerFactory.getLogger(AutomaticBackupService.class);
private static final int MAX_BACKUPS = 5;
private static final DateTimeFormatter FILENAME_FORMATTER = DateTimeFormatter.ofPattern("yyyy-MM-dd_HH-mm-ss");
@Value("${storycove.automatic-backup.dir:/app/automatic-backups}")
private String automaticBackupDir;
@Autowired
private StoryRepository storyRepository;
@Autowired
private DatabaseManagementService databaseManagementService;
@Autowired
private LibraryService libraryService;
private LocalDateTime lastBackupCheck = null;
/**
* Scheduled job that runs daily at 4 AM.
* Creates a backup if content has changed since last backup.
*/
@Scheduled(cron = "0 0 4 * * ?")
public void performAutomaticBackup() {
logger.info("========================================");
logger.info("Starting automatic backup check at 4 AM");
logger.info("========================================");
try {
// Get current library ID (or default)
String libraryId = libraryService.getCurrentLibraryId();
if (libraryId == null) {
libraryId = "default";
}
logger.info("Checking for content changes in library: {}", libraryId);
// Check if content has changed since last backup
if (!hasContentChanged()) {
logger.info("No content changes detected since last backup. Skipping backup.");
logger.info("========================================");
return;
}
logger.info("Content changes detected! Creating automatic backup...");
// Create backup directory for this library
Path backupPath = Paths.get(automaticBackupDir, libraryId);
Files.createDirectories(backupPath);
// Create the backup
String timestamp = LocalDateTime.now().format(FILENAME_FORMATTER);
String filename = String.format("auto_backup_%s.zip", timestamp);
Path backupFile = backupPath.resolve(filename);
logger.info("Creating complete backup to: {}", backupFile);
Resource backup = databaseManagementService.createCompleteBackup();
// Write backup to file
try (var inputStream = backup.getInputStream();
var outputStream = Files.newOutputStream(backupFile)) {
inputStream.transferTo(outputStream);
}
long fileSize = Files.size(backupFile);
logger.info("✅ Automatic backup created successfully");
logger.info(" File: {}", backupFile.getFileName());
logger.info(" Size: {} MB", fileSize / 1024 / 1024);
// Rotate old backups (keep only MAX_BACKUPS)
rotateBackups(backupPath);
// Update last backup check time
lastBackupCheck = LocalDateTime.now();
logger.info("========================================");
logger.info("Automatic backup completed successfully");
logger.info("========================================");
} catch (Exception e) {
logger.error("❌ Automatic backup failed", e);
logger.info("========================================");
}
}
/**
* Check if content has changed since last backup.
* Looks for stories created or updated after the last backup time.
*/
private boolean hasContentChanged() {
try {
if (lastBackupCheck == null) {
// First run - check if there are any stories at all
long storyCount = storyRepository.count();
logger.info("First backup check - found {} stories", storyCount);
return storyCount > 0;
}
// Check for stories created or updated since last backup
long changedCount = storyRepository.countStoriesModifiedAfter(lastBackupCheck);
logger.info("Found {} stories modified since last backup ({})", changedCount, lastBackupCheck);
return changedCount > 0;
} catch (Exception e) {
logger.error("Error checking for content changes", e);
// On error, create backup to be safe
return true;
}
}
/**
* Rotate backups - keep only MAX_BACKUPS most recent backups.
* Deletes older backups.
*/
private void rotateBackups(Path backupPath) throws IOException {
logger.info("Checking for old backups to rotate...");
// Find all backup files in the directory
List<Path> backupFiles;
try (Stream<Path> stream = Files.list(backupPath)) {
backupFiles = stream
.filter(Files::isRegularFile)
.filter(p -> p.getFileName().toString().startsWith("auto_backup_"))
.filter(p -> p.getFileName().toString().endsWith(".zip"))
.sorted(Comparator.comparing((Path p) -> {
try {
return Files.getLastModifiedTime(p);
} catch (IOException e) {
return null;
}
}).reversed()) // Most recent first
.collect(Collectors.toList());
}
logger.info("Found {} automatic backups", backupFiles.size());
// Delete old backups if we exceed MAX_BACKUPS
if (backupFiles.size() > MAX_BACKUPS) {
List<Path> toDelete = backupFiles.subList(MAX_BACKUPS, backupFiles.size());
logger.info("Deleting {} old backups to maintain maximum of {}", toDelete.size(), MAX_BACKUPS);
for (Path oldBackup : toDelete) {
try {
Files.delete(oldBackup);
logger.info(" Deleted old backup: {}", oldBackup.getFileName());
} catch (IOException e) {
logger.warn("Failed to delete old backup: {}", oldBackup, e);
}
}
} else {
logger.info("Backup count within limit ({}), no rotation needed", MAX_BACKUPS);
}
}
/**
* Manual trigger for testing - creates backup immediately if content changed.
*/
public void triggerManualBackup() {
logger.info("Manual automatic backup triggered");
performAutomaticBackup();
}
/**
* Get list of automatic backups for the current library.
*/
public List<BackupInfo> listAutomaticBackups() throws IOException {
String libraryId = libraryService.getCurrentLibraryId();
if (libraryId == null) {
libraryId = "default";
}
Path backupPath = Paths.get(automaticBackupDir, libraryId);
if (!Files.exists(backupPath)) {
return List.of();
}
try (Stream<Path> stream = Files.list(backupPath)) {
return stream
.filter(Files::isRegularFile)
.filter(p -> p.getFileName().toString().startsWith("auto_backup_"))
.filter(p -> p.getFileName().toString().endsWith(".zip"))
.sorted(Comparator.comparing((Path p) -> {
try {
return Files.getLastModifiedTime(p);
} catch (IOException e) {
return null;
}
}).reversed())
.map(p -> {
try {
return new BackupInfo(
p.getFileName().toString(),
Files.size(p),
Files.getLastModifiedTime(p).toInstant().toString()
);
} catch (IOException e) {
return null;
}
})
.filter(info -> info != null)
.collect(Collectors.toList());
}
}
/**
* Simple backup info class.
*/
public static class BackupInfo {
private final String filename;
private final long sizeBytes;
private final String createdAt;
public BackupInfo(String filename, long sizeBytes, String createdAt) {
this.filename = filename;
this.sizeBytes = sizeBytes;
this.createdAt = createdAt;
}
public String getFilename() {
return filename;
}
public long getSizeBytes() {
return sizeBytes;
}
public String getCreatedAt() {
return createdAt;
}
}
}

View File

@@ -7,7 +7,6 @@ import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value; import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.ApplicationContext; import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware; import org.springframework.context.ApplicationContextAware;
import org.springframework.core.io.ByteArrayResource;
import org.springframework.core.io.Resource; import org.springframework.core.io.Resource;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional; import org.springframework.transaction.annotation.Transactional;
@@ -141,9 +140,12 @@ public class DatabaseManagementService implements ApplicationContextAware {
/** /**
* Create a comprehensive backup including database and files in ZIP format * Create a comprehensive backup including database and files in ZIP format
* Returns a streaming resource to avoid loading large backups into memory
*/ */
public Resource createCompleteBackup() throws SQLException, IOException { public Resource createCompleteBackup() throws SQLException, IOException {
// Create temp file with deleteOnExit as safety net
Path tempZip = Files.createTempFile("storycove-backup", ".zip"); Path tempZip = Files.createTempFile("storycove-backup", ".zip");
tempZip.toFile().deleteOnExit();
try (ZipOutputStream zipOut = new ZipOutputStream(Files.newOutputStream(tempZip))) { try (ZipOutputStream zipOut = new ZipOutputStream(Files.newOutputStream(tempZip))) {
// 1. Add database dump // 1. Add database dump
@@ -156,11 +158,30 @@ public class DatabaseManagementService implements ApplicationContextAware {
addMetadataToZip(zipOut); addMetadataToZip(zipOut);
} }
// Return the ZIP file as a resource // Return the ZIP file as a FileSystemResource for streaming
byte[] zipData = Files.readAllBytes(tempZip); // This avoids loading the entire file into memory
Files.deleteIfExists(tempZip); return new org.springframework.core.io.FileSystemResource(tempZip.toFile()) {
@Override
return new ByteArrayResource(zipData); public InputStream getInputStream() throws IOException {
// Wrap the input stream to delete the temp file after it's fully read
return new java.io.FilterInputStream(super.getInputStream()) {
@Override
public void close() throws IOException {
try {
super.close();
} finally {
// Clean up temp file after streaming is complete
try {
Files.deleteIfExists(tempZip);
} catch (IOException e) {
// Log but don't fail - deleteOnExit will handle it
System.err.println("Warning: Could not delete temp backup file: " + e.getMessage());
}
}
}
};
}
};
} }
/** /**
@@ -289,20 +310,34 @@ public class DatabaseManagementService implements ApplicationContextAware {
System.err.println("PostgreSQL backup completed successfully"); System.err.println("PostgreSQL backup completed successfully");
// Read the backup file into memory // Return the backup file as a streaming resource to avoid memory issues with large databases
byte[] backupData = Files.readAllBytes(tempBackupFile); tempBackupFile.toFile().deleteOnExit();
return new ByteArrayResource(backupData); return new org.springframework.core.io.FileSystemResource(tempBackupFile.toFile()) {
@Override
public InputStream getInputStream() throws IOException {
// Wrap the input stream to delete the temp file after it's fully read
return new java.io.FilterInputStream(super.getInputStream()) {
@Override
public void close() throws IOException {
try {
super.close();
} finally {
// Clean up temp file after streaming is complete
try {
Files.deleteIfExists(tempBackupFile);
} catch (IOException e) {
// Log but don't fail - deleteOnExit will handle it
System.err.println("Warning: Could not delete temp backup file: " + e.getMessage());
}
}
}
};
}
};
} catch (InterruptedException e) { } catch (InterruptedException e) {
Thread.currentThread().interrupt(); Thread.currentThread().interrupt();
throw new RuntimeException("Backup process was interrupted", e); throw new RuntimeException("Backup process was interrupted", e);
} finally {
// Clean up temporary file
try {
Files.deleteIfExists(tempBackupFile);
} catch (IOException e) {
System.err.println("Warning: Could not delete temporary backup file: " + e.getMessage());
}
} }
} }

View File

@@ -0,0 +1,643 @@
package com.storycove.service;
import com.storycove.config.SolrProperties;
import com.storycove.dto.*;
import com.storycove.dto.LibraryOverviewStatsDto.StoryWordCountDto;
import com.storycove.repository.CollectionRepository;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.FacetField;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.params.StatsParams;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.time.LocalDate;
import java.time.LocalDateTime;
import java.time.ZoneOffset;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.stream.Collectors;
@Service
@ConditionalOnProperty(
value = "storycove.search.engine",
havingValue = "solr",
matchIfMissing = false
)
public class LibraryStatisticsService {
private static final Logger logger = LoggerFactory.getLogger(LibraryStatisticsService.class);
private static final int WORDS_PER_MINUTE = 250;
@Autowired(required = false)
private SolrClient solrClient;
@Autowired
private SolrProperties properties;
@Autowired
private LibraryService libraryService;
@Autowired
private CollectionRepository collectionRepository;
/**
* Get overview statistics for a library
*/
public LibraryOverviewStatsDto getOverviewStatistics(String libraryId) throws IOException, SolrServerException {
LibraryOverviewStatsDto stats = new LibraryOverviewStatsDto();
// Collection Overview
stats.setTotalStories(getTotalStories(libraryId));
stats.setTotalAuthors(getTotalAuthors(libraryId));
stats.setTotalSeries(getTotalSeries(libraryId));
stats.setTotalTags(getTotalTags(libraryId));
stats.setTotalCollections(getTotalCollections(libraryId));
stats.setUniqueSourceDomains(getUniqueSourceDomains(libraryId));
// Content Metrics - use Solr Stats Component
WordCountStats wordStats = getWordCountStatistics(libraryId);
stats.setTotalWordCount(wordStats.sum);
stats.setAverageWordsPerStory(wordStats.mean);
stats.setLongestStory(getLongestStory(libraryId));
stats.setShortestStory(getShortestStory(libraryId));
// Reading Time
stats.setTotalReadingTimeMinutes(wordStats.sum / WORDS_PER_MINUTE);
stats.setAverageReadingTimeMinutes(wordStats.mean / WORDS_PER_MINUTE);
return stats;
}
/**
* Get total number of stories in library
*/
private long getTotalStories(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0); // We only want the count
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
return response.getResults().getNumFound();
}
/**
* Get total number of authors in library
*/
private long getTotalAuthors(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0);
QueryResponse response = solrClient.query(properties.getCores().getAuthors(), query);
return response.getResults().getNumFound();
}
/**
* Get total number of series using faceting on seriesId
*/
private long getTotalSeries(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.addFilterQuery("seriesId:[* TO *]"); // Only stories that have a series
query.setRows(0);
query.setFacet(true);
query.addFacetField("seriesId");
query.setFacetLimit(-1); // Get all unique series
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
FacetField seriesFacet = response.getFacetField("seriesId");
return (seriesFacet != null && seriesFacet.getValues() != null)
? seriesFacet.getValueCount()
: 0;
}
/**
* Get total number of unique tags using faceting
*/
private long getTotalTags(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0);
query.setFacet(true);
query.addFacetField("tagNames");
query.setFacetLimit(-1); // Get all unique tags
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
FacetField tagsFacet = response.getFacetField("tagNames");
return (tagsFacet != null && tagsFacet.getValues() != null)
? tagsFacet.getValueCount()
: 0;
}
/**
* Get total number of collections
*/
private long getTotalCollections(String libraryId) {
// Collections are stored in the database, not indexed in Solr
return collectionRepository.countByIsArchivedFalse();
}
/**
* Get number of unique source domains using faceting
*/
private long getUniqueSourceDomains(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.addFilterQuery("sourceDomain:[* TO *]"); // Only stories with a source domain
query.setRows(0);
query.setFacet(true);
query.addFacetField("sourceDomain");
query.setFacetLimit(-1);
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
FacetField domainFacet = response.getFacetField("sourceDomain");
return (domainFacet != null && domainFacet.getValues() != null)
? domainFacet.getValueCount()
: 0;
}
/**
* Get word count statistics using Solr Stats Component
*/
private WordCountStats getWordCountStatistics(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0);
query.setParam(StatsParams.STATS, true);
query.setParam(StatsParams.STATS_FIELD, "wordCount");
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
WordCountStats stats = new WordCountStats();
// Extract stats from response
var fieldStatsInfo = response.getFieldStatsInfo();
if (fieldStatsInfo != null && fieldStatsInfo.get("wordCount") != null) {
var fieldStat = fieldStatsInfo.get("wordCount");
Object sumObj = fieldStat.getSum();
Object meanObj = fieldStat.getMean();
stats.sum = (sumObj != null) ? ((Number) sumObj).longValue() : 0L;
stats.mean = (meanObj != null) ? ((Number) meanObj).doubleValue() : 0.0;
}
return stats;
}
/**
* Get the longest story in the library
*/
private StoryWordCountDto getLongestStory(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.addFilterQuery("wordCount:[1 TO *]"); // Exclude stories with 0 words
query.setSort("wordCount", SolrQuery.ORDER.desc);
query.setRows(1);
query.setFields("id", "title", "authorName", "wordCount");
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
if (response.getResults().isEmpty()) {
return null;
}
SolrDocument doc = response.getResults().get(0);
return createStoryWordCountDto(doc);
}
/**
* Get the shortest story in the library (excluding 0 word count)
*/
private StoryWordCountDto getShortestStory(String libraryId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.addFilterQuery("wordCount:[1 TO *]"); // Exclude stories with 0 words
query.setSort("wordCount", SolrQuery.ORDER.asc);
query.setRows(1);
query.setFields("id", "title", "authorName", "wordCount");
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
if (response.getResults().isEmpty()) {
return null;
}
SolrDocument doc = response.getResults().get(0);
return createStoryWordCountDto(doc);
}
/**
* Helper method to create StoryWordCountDto from Solr document
*/
private StoryWordCountDto createStoryWordCountDto(SolrDocument doc) {
String id = (String) doc.getFieldValue("id");
String title = (String) doc.getFieldValue("title");
String authorName = (String) doc.getFieldValue("authorName");
Object wordCountObj = doc.getFieldValue("wordCount");
int wordCount = (wordCountObj != null) ? ((Number) wordCountObj).intValue() : 0;
long readingTime = wordCount / WORDS_PER_MINUTE;
return new StoryWordCountDto(id, title, authorName, wordCount, readingTime);
}
/**
* Helper class to hold word count statistics
*/
private static class WordCountStats {
long sum = 0;
double mean = 0.0;
}
/**
* Get top tags statistics
*/
public TopTagsStatsDto getTopTagsStatistics(String libraryId, int limit) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0);
query.setFacet(true);
query.addFacetField("tagNames");
query.setFacetLimit(limit);
query.setFacetSort("count"); // Sort by count (most popular first)
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
FacetField tagsFacet = response.getFacetField("tagNames");
List<TopTagsStatsDto.TagStatsDto> topTags = new ArrayList<>();
if (tagsFacet != null && tagsFacet.getValues() != null) {
for (FacetField.Count count : tagsFacet.getValues()) {
topTags.add(new TopTagsStatsDto.TagStatsDto(count.getName(), count.getCount()));
}
}
return new TopTagsStatsDto(topTags);
}
/**
* Get top authors statistics
*/
public TopAuthorsStatsDto getTopAuthorsStatistics(String libraryId, int limit) throws IOException, SolrServerException {
TopAuthorsStatsDto stats = new TopAuthorsStatsDto();
// Top authors by story count
stats.setTopAuthorsByStories(getTopAuthorsByStoryCount(libraryId, limit));
// Top authors by total words
stats.setTopAuthorsByWords(getTopAuthorsByWordCount(libraryId, limit));
return stats;
}
private List<TopAuthorsStatsDto.AuthorStatsDto> getTopAuthorsByStoryCount(String libraryId, int limit)
throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0);
query.setFacet(true);
query.addFacetField("authorId");
query.setFacetLimit(limit);
query.setFacetSort("count");
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
FacetField authorFacet = response.getFacetField("authorId");
List<TopAuthorsStatsDto.AuthorStatsDto> topAuthors = new ArrayList<>();
if (authorFacet != null && authorFacet.getValues() != null) {
for (FacetField.Count count : authorFacet.getValues()) {
String authorId = count.getName();
long storyCount = count.getCount();
// Get author name and total words
SolrQuery authorQuery = new SolrQuery("authorId:" + authorId);
authorQuery.addFilterQuery("libraryId:" + libraryId);
authorQuery.setRows(1);
authorQuery.setFields("authorName");
QueryResponse authorResponse = solrClient.query(properties.getCores().getStories(), authorQuery);
String authorName = "";
if (!authorResponse.getResults().isEmpty()) {
authorName = (String) authorResponse.getResults().get(0).getFieldValue("authorName");
}
// Get total words for this author
long totalWords = getAuthorTotalWords(libraryId, authorId);
topAuthors.add(new TopAuthorsStatsDto.AuthorStatsDto(authorId, authorName, storyCount, totalWords));
}
}
return topAuthors;
}
private List<TopAuthorsStatsDto.AuthorStatsDto> getTopAuthorsByWordCount(String libraryId, int limit)
throws IOException, SolrServerException {
// First get all unique authors
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0);
query.setFacet(true);
query.addFacetField("authorId");
query.setFacetLimit(-1); // Get all authors
query.setFacetSort("count");
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
FacetField authorFacet = response.getFacetField("authorId");
List<TopAuthorsStatsDto.AuthorStatsDto> allAuthors = new ArrayList<>();
if (authorFacet != null && authorFacet.getValues() != null) {
for (FacetField.Count count : authorFacet.getValues()) {
String authorId = count.getName();
long storyCount = count.getCount();
// Get author name
SolrQuery authorQuery = new SolrQuery("authorId:" + authorId);
authorQuery.addFilterQuery("libraryId:" + libraryId);
authorQuery.setRows(1);
authorQuery.setFields("authorName");
QueryResponse authorResponse = solrClient.query(properties.getCores().getStories(), authorQuery);
String authorName = "";
if (!authorResponse.getResults().isEmpty()) {
authorName = (String) authorResponse.getResults().get(0).getFieldValue("authorName");
}
// Get total words for this author
long totalWords = getAuthorTotalWords(libraryId, authorId);
allAuthors.add(new TopAuthorsStatsDto.AuthorStatsDto(authorId, authorName, storyCount, totalWords));
}
}
// Sort by total words and return top N
return allAuthors.stream()
.sorted(Comparator.comparingLong(TopAuthorsStatsDto.AuthorStatsDto::getTotalWords).reversed())
.limit(limit)
.collect(Collectors.toList());
}
private long getAuthorTotalWords(String libraryId, String authorId) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("authorId:" + authorId);
query.addFilterQuery("libraryId:" + libraryId);
query.setRows(0);
query.setParam(StatsParams.STATS, true);
query.setParam(StatsParams.STATS_FIELD, "wordCount");
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
var fieldStatsInfo = response.getFieldStatsInfo();
if (fieldStatsInfo != null && fieldStatsInfo.get("wordCount") != null) {
var fieldStat = fieldStatsInfo.get("wordCount");
Object sumObj = fieldStat.getSum();
return (sumObj != null) ? ((Number) sumObj).longValue() : 0L;
}
return 0L;
}
/**
* Get rating statistics
*/
public RatingStatsDto getRatingStatistics(String libraryId) throws IOException, SolrServerException {
RatingStatsDto stats = new RatingStatsDto();
// Get average rating using stats component
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.addFilterQuery("rating:[* TO *]"); // Only rated stories
query.setRows(0);
query.setParam(StatsParams.STATS, true);
query.setParam(StatsParams.STATS_FIELD, "rating");
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
long totalRated = response.getResults().getNumFound();
var fieldStatsInfo = response.getFieldStatsInfo();
if (fieldStatsInfo != null && fieldStatsInfo.get("rating") != null) {
var fieldStat = fieldStatsInfo.get("rating");
Object meanObj = fieldStat.getMean();
stats.setAverageRating((meanObj != null) ? ((Number) meanObj).doubleValue() : 0.0);
}
stats.setTotalRatedStories(totalRated);
// Get total stories to calculate unrated
long totalStories = getTotalStories(libraryId);
stats.setTotalUnratedStories(totalStories - totalRated);
// Get rating distribution using faceting
SolrQuery distQuery = new SolrQuery("*:*");
distQuery.addFilterQuery("libraryId:" + libraryId);
distQuery.addFilterQuery("rating:[* TO *]");
distQuery.setRows(0);
distQuery.setFacet(true);
distQuery.addFacetField("rating");
distQuery.setFacetLimit(-1);
QueryResponse distResponse = solrClient.query(properties.getCores().getStories(), distQuery);
FacetField ratingFacet = distResponse.getFacetField("rating");
Map<Integer, Long> distribution = new HashMap<>();
if (ratingFacet != null && ratingFacet.getValues() != null) {
for (FacetField.Count count : ratingFacet.getValues()) {
try {
int rating = Integer.parseInt(count.getName());
distribution.put(rating, count.getCount());
} catch (NumberFormatException e) {
// Skip invalid ratings
}
}
}
stats.setRatingDistribution(distribution);
return stats;
}
/**
* Get source domain statistics
*/
public SourceDomainStatsDto getSourceDomainStatistics(String libraryId, int limit) throws IOException, SolrServerException {
SourceDomainStatsDto stats = new SourceDomainStatsDto();
// Get top domains using faceting
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.addFilterQuery("sourceDomain:[* TO *]"); // Only stories with source
query.setRows(0);
query.setFacet(true);
query.addFacetField("sourceDomain");
query.setFacetLimit(limit);
query.setFacetSort("count");
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
long storiesWithSource = response.getResults().getNumFound();
FacetField domainFacet = response.getFacetField("sourceDomain");
List<SourceDomainStatsDto.DomainStatsDto> topDomains = new ArrayList<>();
if (domainFacet != null && domainFacet.getValues() != null) {
for (FacetField.Count count : domainFacet.getValues()) {
topDomains.add(new SourceDomainStatsDto.DomainStatsDto(count.getName(), count.getCount()));
}
}
stats.setTopDomains(topDomains);
stats.setStoriesWithSource(storiesWithSource);
long totalStories = getTotalStories(libraryId);
stats.setStoriesWithoutSource(totalStories - storiesWithSource);
return stats;
}
/**
* Get reading progress statistics
*/
public ReadingProgressStatsDto getReadingProgressStatistics(String libraryId) throws IOException, SolrServerException {
ReadingProgressStatsDto stats = new ReadingProgressStatsDto();
long totalStories = getTotalStories(libraryId);
stats.setTotalStories(totalStories);
// Get read stories count
SolrQuery readQuery = new SolrQuery("*:*");
readQuery.addFilterQuery("libraryId:" + libraryId);
readQuery.addFilterQuery("isRead:true");
readQuery.setRows(0);
QueryResponse readResponse = solrClient.query(properties.getCores().getStories(), readQuery);
long readStories = readResponse.getResults().getNumFound();
stats.setReadStories(readStories);
stats.setUnreadStories(totalStories - readStories);
if (totalStories > 0) {
stats.setPercentageRead((readStories * 100.0) / totalStories);
}
// Get total words read
SolrQuery readWordsQuery = new SolrQuery("*:*");
readWordsQuery.addFilterQuery("libraryId:" + libraryId);
readWordsQuery.addFilterQuery("isRead:true");
readWordsQuery.setRows(0);
readWordsQuery.setParam(StatsParams.STATS, true);
readWordsQuery.setParam(StatsParams.STATS_FIELD, "wordCount");
QueryResponse readWordsResponse = solrClient.query(properties.getCores().getStories(), readWordsQuery);
var readFieldStats = readWordsResponse.getFieldStatsInfo();
if (readFieldStats != null && readFieldStats.get("wordCount") != null) {
var fieldStat = readFieldStats.get("wordCount");
Object sumObj = fieldStat.getSum();
stats.setTotalWordsRead((sumObj != null) ? ((Number) sumObj).longValue() : 0L);
}
// Get total words unread
SolrQuery unreadWordsQuery = new SolrQuery("*:*");
unreadWordsQuery.addFilterQuery("libraryId:" + libraryId);
unreadWordsQuery.addFilterQuery("isRead:false");
unreadWordsQuery.setRows(0);
unreadWordsQuery.setParam(StatsParams.STATS, true);
unreadWordsQuery.setParam(StatsParams.STATS_FIELD, "wordCount");
QueryResponse unreadWordsResponse = solrClient.query(properties.getCores().getStories(), unreadWordsQuery);
var unreadFieldStats = unreadWordsResponse.getFieldStatsInfo();
if (unreadFieldStats != null && unreadFieldStats.get("wordCount") != null) {
var fieldStat = unreadFieldStats.get("wordCount");
Object sumObj = fieldStat.getSum();
stats.setTotalWordsUnread((sumObj != null) ? ((Number) sumObj).longValue() : 0L);
}
return stats;
}
/**
* Get reading activity statistics for the last week
*/
public ReadingActivityStatsDto getReadingActivityStatistics(String libraryId) throws IOException, SolrServerException {
ReadingActivityStatsDto stats = new ReadingActivityStatsDto();
LocalDateTime oneWeekAgo = LocalDateTime.now().minusWeeks(1);
String oneWeekAgoStr = oneWeekAgo.toInstant(ZoneOffset.UTC).toString();
// Get stories read in last week
SolrQuery query = new SolrQuery("*:*");
query.addFilterQuery("libraryId:" + libraryId);
query.addFilterQuery("lastReadAt:[" + oneWeekAgoStr + " TO *]");
query.setRows(0);
QueryResponse response = solrClient.query(properties.getCores().getStories(), query);
long storiesReadLastWeek = response.getResults().getNumFound();
stats.setStoriesReadLastWeek(storiesReadLastWeek);
// Get words read in last week
SolrQuery wordsQuery = new SolrQuery("*:*");
wordsQuery.addFilterQuery("libraryId:" + libraryId);
wordsQuery.addFilterQuery("lastReadAt:[" + oneWeekAgoStr + " TO *]");
wordsQuery.setRows(0);
wordsQuery.setParam(StatsParams.STATS, true);
wordsQuery.setParam(StatsParams.STATS_FIELD, "wordCount");
QueryResponse wordsResponse = solrClient.query(properties.getCores().getStories(), wordsQuery);
var fieldStatsInfo = wordsResponse.getFieldStatsInfo();
long wordsReadLastWeek = 0L;
if (fieldStatsInfo != null && fieldStatsInfo.get("wordCount") != null) {
var fieldStat = fieldStatsInfo.get("wordCount");
Object sumObj = fieldStat.getSum();
wordsReadLastWeek = (sumObj != null) ? ((Number) sumObj).longValue() : 0L;
}
stats.setWordsReadLastWeek(wordsReadLastWeek);
stats.setReadingTimeMinutesLastWeek(wordsReadLastWeek / WORDS_PER_MINUTE);
// Get daily activity (last 7 days)
List<ReadingActivityStatsDto.DailyActivityDto> dailyActivity = new ArrayList<>();
for (int i = 6; i >= 0; i--) {
LocalDate date = LocalDate.now().minusDays(i);
LocalDateTime dayStart = date.atStartOfDay();
LocalDateTime dayEnd = date.atTime(23, 59, 59);
String dayStartStr = dayStart.toInstant(ZoneOffset.UTC).toString();
String dayEndStr = dayEnd.toInstant(ZoneOffset.UTC).toString();
SolrQuery dayQuery = new SolrQuery("*:*");
dayQuery.addFilterQuery("libraryId:" + libraryId);
dayQuery.addFilterQuery("lastReadAt:[" + dayStartStr + " TO " + dayEndStr + "]");
dayQuery.setRows(0);
dayQuery.setParam(StatsParams.STATS, true);
dayQuery.setParam(StatsParams.STATS_FIELD, "wordCount");
QueryResponse dayResponse = solrClient.query(properties.getCores().getStories(), dayQuery);
long storiesRead = dayResponse.getResults().getNumFound();
long wordsRead = 0L;
var dayFieldStats = dayResponse.getFieldStatsInfo();
if (dayFieldStats != null && dayFieldStats.get("wordCount") != null) {
var fieldStat = dayFieldStats.get("wordCount");
Object sumObj = fieldStat.getSum();
wordsRead = (sumObj != null) ? ((Number) sumObj).longValue() : 0L;
}
dailyActivity.add(new ReadingActivityStatsDto.DailyActivityDto(
date.format(DateTimeFormatter.ISO_LOCAL_DATE),
storiesRead,
wordsRead
));
}
stats.setDailyActivity(dailyActivity);
return stats;
}
}

View File

@@ -385,9 +385,69 @@ public class SolrService {
logger.warn("Could not add libraryId field to document (field may not exist in schema): {}", e.getMessage()); logger.warn("Could not add libraryId field to document (field may not exist in schema): {}", e.getMessage());
} }
// Add derived fields for statistics (Phase 1)
addDerivedStatisticsFields(doc, story);
return doc; return doc;
} }
/**
* Add derived fields to support statistics queries
*/
private void addDerivedStatisticsFields(SolrInputDocument doc, Story story) {
try {
// Boolean flags for filtering
doc.addField("hasDescription", story.getDescription() != null && !story.getDescription().trim().isEmpty());
doc.addField("hasCoverImage", story.getCoverPath() != null && !story.getCoverPath().trim().isEmpty());
doc.addField("hasRating", story.getRating() != null && story.getRating() > 0);
// Extract source domain from URL
if (story.getSourceUrl() != null && !story.getSourceUrl().trim().isEmpty()) {
String domain = extractDomain(story.getSourceUrl());
if (domain != null) {
doc.addField("sourceDomain", domain);
}
}
// Tag count for statistics
int tagCount = (story.getTags() != null) ? story.getTags().size() : 0;
doc.addField("tagCount", tagCount);
} catch (Exception e) {
// Don't fail indexing if derived fields can't be added
logger.debug("Could not add some derived statistics fields: {}", e.getMessage());
}
}
/**
* Extract domain from URL for source statistics
*/
private String extractDomain(String url) {
try {
if (url == null || url.trim().isEmpty()) {
return null;
}
// Handle URLs without protocol
if (!url.startsWith("http://") && !url.startsWith("https://")) {
url = "https://" + url;
}
java.net.URL parsedUrl = new java.net.URL(url);
String host = parsedUrl.getHost();
// Remove www. prefix if present
if (host.startsWith("www.")) {
host = host.substring(4);
}
return host;
} catch (Exception e) {
logger.debug("Failed to extract domain from URL: {}", url);
return null;
}
}
private SolrInputDocument createAuthorDocument(Author author) { private SolrInputDocument createAuthorDocument(Author author) {
SolrInputDocument doc = new SolrInputDocument(); SolrInputDocument doc = new SolrInputDocument();

View File

@@ -89,6 +89,8 @@ storycove:
enable-metrics: ${SOLR_ENABLE_METRICS:true} enable-metrics: ${SOLR_ENABLE_METRICS:true}
images: images:
storage-path: ${IMAGE_STORAGE_PATH:/app/images} storage-path: ${IMAGE_STORAGE_PATH:/app/images}
automatic-backup:
dir: ${AUTOMATIC_BACKUP_DIR:/app/automatic-backups}
management: management:
endpoints: endpoints:

View File

@@ -55,6 +55,11 @@ if [ $RETRY_COUNT -eq $MAX_RETRIES ]; then
exit 1 exit 1
fi fi
# Apply database migrations
echo -e "${YELLOW}🗄️ Applying database migrations...${NC}"
docker-compose run --rm migrations
echo -e "${GREEN}✅ Database migrations applied${NC}"
# Check if Solr is ready # Check if Solr is ready
echo -e "${YELLOW}🔍 Checking Solr health...${NC}" echo -e "${YELLOW}🔍 Checking Solr health...${NC}"
RETRY_COUNT=0 RETRY_COUNT=0

View File

@@ -44,9 +44,10 @@ services:
volumes: volumes:
- images_data:/app/images - images_data:/app/images
- library_config:/app/config - library_config:/app/config
- automatic_backups:/app/automatic-backups
depends_on: depends_on:
postgres: postgres:
condition: service_started condition: service_healthy
solr: solr:
condition: service_started condition: service_started
networks: networks:
@@ -65,6 +66,11 @@ services:
- postgres_data:/var/lib/postgresql/data - postgres_data:/var/lib/postgresql/data
networks: networks:
- storycove-network - storycove-network
healthcheck:
test: ["CMD-SHELL", "pg_isready -U storycove -d storycove"]
interval: 5s
timeout: 5s
retries: 5
solr: solr:
@@ -101,6 +107,7 @@ volumes:
solr_data: solr_data:
images_data: images_data:
library_config: library_config:
automatic_backups:
configs: configs:
nginx_config: nginx_config:

View File

@@ -0,0 +1,491 @@
'use client';
import { useState, useEffect } from 'react';
import { useRouter } from 'next/navigation';
import AppLayout from '@/components/layout/AppLayout';
import { statisticsApi, getCurrentLibraryId } from '@/lib/api';
import {
LibraryOverviewStats,
TopTagsStats,
TopAuthorsStats,
RatingStats,
SourceDomainStats,
ReadingProgressStats,
ReadingActivityStats
} from '@/types/api';
function StatisticsContent() {
const router = useRouter();
const [loading, setLoading] = useState(true);
const [error, setError] = useState<string | null>(null);
// Statistics state
const [overviewStats, setOverviewStats] = useState<LibraryOverviewStats | null>(null);
const [topTags, setTopTags] = useState<TopTagsStats | null>(null);
const [topAuthors, setTopAuthors] = useState<TopAuthorsStats | null>(null);
const [ratingStats, setRatingStats] = useState<RatingStats | null>(null);
const [sourceDomains, setSourceDomains] = useState<SourceDomainStats | null>(null);
const [readingProgress, setReadingProgress] = useState<ReadingProgressStats | null>(null);
const [readingActivity, setReadingActivity] = useState<ReadingActivityStats | null>(null);
useEffect(() => {
loadStatistics();
}, []);
const loadStatistics = async () => {
try {
setLoading(true);
setError(null);
const libraryId = getCurrentLibraryId();
if (!libraryId) {
router.push('/library');
return;
}
// Load all statistics in parallel
const [overview, tags, authors, ratings, domains, progress, activity] = await Promise.all([
statisticsApi.getOverviewStatistics(libraryId),
statisticsApi.getTopTags(libraryId, 20),
statisticsApi.getTopAuthors(libraryId, 10),
statisticsApi.getRatingStats(libraryId),
statisticsApi.getSourceDomainStats(libraryId, 10),
statisticsApi.getReadingProgress(libraryId),
statisticsApi.getReadingActivity(libraryId),
]);
setOverviewStats(overview);
setTopTags(tags);
setTopAuthors(authors);
setRatingStats(ratings);
setSourceDomains(domains);
setReadingProgress(progress);
setReadingActivity(activity);
} catch (err) {
console.error('Failed to load statistics:', err);
setError('Failed to load statistics. Please try again.');
} finally {
setLoading(false);
}
};
const formatNumber = (num: number): string => {
return num.toLocaleString();
};
const formatTime = (minutes: number): string => {
const hours = Math.floor(minutes / 60);
const mins = Math.round(minutes % 60);
if (hours > 24) {
const days = Math.floor(hours / 24);
const remainingHours = hours % 24;
return `${days}d ${remainingHours}h`;
}
if (hours > 0) {
return `${hours}h ${mins}m`;
}
return `${mins}m`;
};
if (loading) {
return (
<div className="container mx-auto px-4 py-8">
<div className="flex items-center justify-center min-h-[400px]">
<div className="text-center">
<div className="animate-spin rounded-full h-12 w-12 border-b-2 border-blue-600 mx-auto mb-4"></div>
<p className="text-gray-600 dark:text-gray-400">Loading statistics...</p>
</div>
</div>
</div>
);
}
if (error) {
return (
<div className="container mx-auto px-4 py-8">
<div className="bg-red-50 dark:bg-red-900/20 border border-red-200 dark:border-red-800 rounded-lg p-6">
<h3 className="text-lg font-semibold text-red-800 dark:text-red-200 mb-2">Error</h3>
<p className="text-red-600 dark:text-red-400">{error}</p>
<button
onClick={loadStatistics}
className="mt-4 px-4 py-2 bg-red-600 text-white rounded hover:bg-red-700 transition-colors"
>
Try Again
</button>
</div>
</div>
);
}
return (
<div className="container mx-auto px-4 py-8">
<div className="mb-8">
<h1 className="text-3xl font-bold text-gray-900 dark:text-white mb-2">Library Statistics</h1>
<p className="text-gray-600 dark:text-gray-400">
Insights and analytics for your story collection
</p>
</div>
{/* Collection Overview */}
{overviewStats && (
<section className="mb-8">
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Collection Overview</h2>
<div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
<StatCard title="Total Stories" value={formatNumber(overviewStats.totalStories)} />
<StatCard title="Total Authors" value={formatNumber(overviewStats.totalAuthors)} />
<StatCard title="Total Series" value={formatNumber(overviewStats.totalSeries)} />
<StatCard title="Total Tags" value={formatNumber(overviewStats.totalTags)} />
<StatCard title="Total Collections" value={formatNumber(overviewStats.totalCollections)} />
<StatCard title="Source Domains" value={formatNumber(overviewStats.uniqueSourceDomains)} />
</div>
</section>
)}
{/* Content Metrics */}
{overviewStats && (
<section className="mb-8">
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Content Metrics</h2>
<div className="grid grid-cols-1 md:grid-cols-2 gap-4">
<StatCard
title="Total Words"
value={formatNumber(overviewStats.totalWordCount)}
subtitle={`${formatTime(overviewStats.totalReadingTimeMinutes)} reading time`}
/>
<StatCard
title="Average Words per Story"
value={formatNumber(Math.round(overviewStats.averageWordsPerStory))}
subtitle={`${formatTime(overviewStats.averageReadingTimeMinutes)} avg reading time`}
/>
{overviewStats.longestStory && (
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<h3 className="text-sm font-medium text-gray-500 dark:text-gray-400 mb-2">Longest Story</h3>
<p className="text-2xl font-bold text-gray-900 dark:text-white mb-1">
{formatNumber(overviewStats.longestStory.wordCount)} words
</p>
<p className="text-sm text-gray-600 dark:text-gray-400 truncate" title={overviewStats.longestStory.title}>
{overviewStats.longestStory.title}
</p>
<p className="text-xs text-gray-500 dark:text-gray-500">
by {overviewStats.longestStory.authorName}
</p>
</div>
)}
{overviewStats.shortestStory && (
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<h3 className="text-sm font-medium text-gray-500 dark:text-gray-400 mb-2">Shortest Story</h3>
<p className="text-2xl font-bold text-gray-900 dark:text-white mb-1">
{formatNumber(overviewStats.shortestStory.wordCount)} words
</p>
<p className="text-sm text-gray-600 dark:text-gray-400 truncate" title={overviewStats.shortestStory.title}>
{overviewStats.shortestStory.title}
</p>
<p className="text-xs text-gray-500 dark:text-gray-500">
by {overviewStats.shortestStory.authorName}
</p>
</div>
)}
</div>
</section>
)}
{/* Reading Progress & Activity - Side by side */}
<div className="grid grid-cols-1 lg:grid-cols-2 gap-8 mb-8">
{/* Reading Progress */}
{readingProgress && (
<section>
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Reading Progress</h2>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<div className="mb-6">
<div className="flex justify-between items-center mb-2">
<span className="text-sm font-medium text-gray-600 dark:text-gray-400">
{formatNumber(readingProgress.readStories)} of {formatNumber(readingProgress.totalStories)} stories read
</span>
<span className="text-sm font-semibold text-blue-600 dark:text-blue-400">
{readingProgress.percentageRead.toFixed(1)}%
</span>
</div>
<div className="w-full bg-gray-200 dark:bg-gray-700 rounded-full h-3">
<div
className="bg-blue-600 h-3 rounded-full transition-all duration-500"
style={{ width: `${readingProgress.percentageRead}%` }}
></div>
</div>
</div>
<div className="grid grid-cols-2 gap-4">
<div>
<p className="text-sm text-gray-500 dark:text-gray-400">Words Read</p>
<p className="text-xl font-semibold text-green-600 dark:text-green-400">
{formatNumber(readingProgress.totalWordsRead)}
</p>
</div>
<div>
<p className="text-sm text-gray-500 dark:text-gray-400">Words Remaining</p>
<p className="text-xl font-semibold text-orange-600 dark:text-orange-400">
{formatNumber(readingProgress.totalWordsUnread)}
</p>
</div>
</div>
</div>
</section>
)}
{/* Reading Activity - Last Week */}
{readingActivity && (
<section>
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Last Week Activity</h2>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<div className="grid grid-cols-3 gap-4 mb-6">
<div className="text-center">
<p className="text-sm text-gray-500 dark:text-gray-400">Stories</p>
<p className="text-2xl font-bold text-gray-900 dark:text-white">
{formatNumber(readingActivity.storiesReadLastWeek)}
</p>
</div>
<div className="text-center">
<p className="text-sm text-gray-500 dark:text-gray-400">Words</p>
<p className="text-2xl font-bold text-gray-900 dark:text-white">
{formatNumber(readingActivity.wordsReadLastWeek)}
</p>
</div>
<div className="text-center">
<p className="text-sm text-gray-500 dark:text-gray-400">Time</p>
<p className="text-2xl font-bold text-gray-900 dark:text-white">
{formatTime(readingActivity.readingTimeMinutesLastWeek)}
</p>
</div>
</div>
{/* Daily Activity Chart */}
<div className="space-y-2">
<p className="text-sm font-medium text-gray-600 dark:text-gray-400 mb-3">Daily Breakdown</p>
{readingActivity.dailyActivity.map((day) => {
const maxWords = Math.max(...readingActivity.dailyActivity.map(d => d.wordsRead), 1);
const percentage = (day.wordsRead / maxWords) * 100;
return (
<div key={day.date} className="flex items-center gap-3">
<span className="text-xs text-gray-500 dark:text-gray-400 w-20">
{new Date(day.date).toLocaleDateString('en-US', { month: 'short', day: 'numeric' })}
</span>
<div className="flex-1 bg-gray-200 dark:bg-gray-700 rounded-full h-6 relative">
<div
className="bg-blue-500 h-6 rounded-full transition-all duration-300"
style={{ width: `${percentage}%` }}
></div>
{day.storiesRead > 0 && (
<span className="absolute inset-0 flex items-center justify-center text-xs font-medium text-gray-700 dark:text-gray-300">
{day.storiesRead} {day.storiesRead === 1 ? 'story' : 'stories'}
</span>
)}
</div>
</div>
);
})}
</div>
</div>
</section>
)}
</div>
{/* Ratings & Source Domains - Side by side */}
<div className="grid grid-cols-1 lg:grid-cols-2 gap-8 mb-8">
{/* Rating Statistics */}
{ratingStats && (
<section>
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Rating Statistics</h2>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<div className="text-center mb-6">
<p className="text-sm text-gray-500 dark:text-gray-400 mb-1">Average Rating</p>
<p className="text-4xl font-bold text-yellow-500">
{ratingStats.averageRating.toFixed(1)}
</p>
<p className="text-sm text-gray-600 dark:text-gray-400 mt-2">
{formatNumber(ratingStats.totalRatedStories)} rated {formatNumber(ratingStats.totalUnratedStories)} unrated
</p>
</div>
{/* Rating Distribution */}
<div className="space-y-2">
{[5, 4, 3, 2, 1].map(rating => {
const count = ratingStats.ratingDistribution[rating] || 0;
const percentage = ratingStats.totalRatedStories > 0
? (count / ratingStats.totalRatedStories) * 100
: 0;
return (
<div key={rating} className="flex items-center gap-2">
<span className="text-sm font-medium text-gray-600 dark:text-gray-400 w-12">
{rating}
</span>
<div className="flex-1 bg-gray-200 dark:bg-gray-700 rounded-full h-4">
<div
className="bg-yellow-500 h-4 rounded-full transition-all duration-300"
style={{ width: `${percentage}%` }}
></div>
</div>
<span className="text-sm text-gray-600 dark:text-gray-400 w-16 text-right">
{formatNumber(count)}
</span>
</div>
);
})}
</div>
</div>
</section>
)}
{/* Source Domains */}
{sourceDomains && (
<section>
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Source Domains</h2>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<div className="grid grid-cols-2 gap-4 mb-6">
<div className="text-center">
<p className="text-sm text-gray-500 dark:text-gray-400">With Source</p>
<p className="text-2xl font-bold text-green-600 dark:text-green-400">
{formatNumber(sourceDomains.storiesWithSource)}
</p>
</div>
<div className="text-center">
<p className="text-sm text-gray-500 dark:text-gray-400">No Source</p>
<p className="text-2xl font-bold text-gray-500 dark:text-gray-400">
{formatNumber(sourceDomains.storiesWithoutSource)}
</p>
</div>
</div>
<div className="space-y-3">
<p className="text-sm font-medium text-gray-600 dark:text-gray-400">Top Domains</p>
{sourceDomains.topDomains.slice(0, 5).map((domain, index) => (
<div key={domain.domain} className="flex items-center justify-between">
<div className="flex items-center gap-2 flex-1 min-w-0">
<span className="text-sm font-medium text-gray-500 dark:text-gray-400 w-5">
{index + 1}.
</span>
<span className="text-sm text-gray-700 dark:text-gray-300 truncate" title={domain.domain}>
{domain.domain}
</span>
</div>
<span className="text-sm font-semibold text-blue-600 dark:text-blue-400 ml-2">
{formatNumber(domain.storyCount)}
</span>
</div>
))}
</div>
</div>
</section>
)}
</div>
{/* Top Tags & Top Authors - Side by side */}
<div className="grid grid-cols-1 lg:grid-cols-2 gap-8">
{/* Top Tags */}
{topTags && (
<section>
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Most Used Tags</h2>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<div className="space-y-3">
{topTags.topTags.slice(0, 10).map((tag, index) => {
const maxCount = topTags.topTags[0]?.storyCount || 1;
const percentage = (tag.storyCount / maxCount) * 100;
return (
<div key={tag.tagName} className="flex items-center gap-3">
<span className="text-sm font-medium text-gray-500 dark:text-gray-400 w-6">
{index + 1}
</span>
<div className="flex-1">
<div className="flex items-center justify-between mb-1">
<span className="text-sm font-medium text-gray-700 dark:text-gray-300">
{tag.tagName}
</span>
<span className="text-sm text-gray-600 dark:text-gray-400">
{formatNumber(tag.storyCount)}
</span>
</div>
<div className="w-full bg-gray-200 dark:bg-gray-700 rounded-full h-2">
<div
className="bg-purple-500 h-2 rounded-full transition-all duration-300"
style={{ width: `${percentage}%` }}
></div>
</div>
</div>
</div>
);
})}
</div>
</div>
</section>
)}
{/* Top Authors */}
{topAuthors && (
<section>
<h2 className="text-2xl font-semibold text-gray-800 dark:text-gray-200 mb-4">Top Authors</h2>
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
{/* Tab switcher */}
<div className="flex gap-2 mb-4">
<button
onClick={() => {/* Could add tab switching if needed */}}
className="flex-1 px-4 py-2 text-sm font-medium bg-blue-100 dark:bg-blue-900/30 text-blue-700 dark:text-blue-300 rounded-lg"
>
By Stories
</button>
<button
onClick={() => {/* Could add tab switching if needed */}}
className="flex-1 px-4 py-2 text-sm font-medium text-gray-600 dark:text-gray-400 hover:bg-gray-100 dark:hover:bg-gray-700 rounded-lg"
>
By Words
</button>
</div>
<div className="space-y-3">
{topAuthors.topAuthorsByStories.slice(0, 5).map((author, index) => (
<div key={author.authorId} className="flex items-center justify-between p-3 bg-gray-50 dark:bg-gray-700/50 rounded-lg">
<div className="flex items-center gap-3 flex-1 min-w-0">
<span className="text-lg font-bold text-gray-400 dark:text-gray-500 w-6">
{index + 1}
</span>
<div className="flex-1 min-w-0">
<p className="text-sm font-medium text-gray-900 dark:text-white truncate" title={author.authorName}>
{author.authorName}
</p>
<p className="text-xs text-gray-500 dark:text-gray-400">
{formatNumber(author.storyCount)} stories {formatNumber(author.totalWords)} words
</p>
</div>
</div>
</div>
))}
</div>
</div>
</section>
)}
</div>
</div>
);
}
export default function StatisticsPage() {
return (
<AppLayout>
<StatisticsContent />
</AppLayout>
);
}
// Reusable stat card component
function StatCard({ title, value, subtitle }: { title: string; value: string; subtitle?: string }) {
return (
<div className="bg-white dark:bg-gray-800 rounded-lg shadow p-6">
<h3 className="text-sm font-medium text-gray-500 dark:text-gray-400 mb-2">{title}</h3>
<p className="text-2xl font-bold text-gray-900 dark:text-white">{value}</p>
{subtitle && (
<p className="text-sm text-gray-600 dark:text-gray-400 mt-1">{subtitle}</p>
)}
</div>
);
}

View File

@@ -81,6 +81,12 @@ export default function Header() {
> >
Authors Authors
</Link> </Link>
<Link
href="/statistics"
className="theme-text hover:theme-accent transition-colors font-medium"
>
Statistics
</Link>
<Dropdown <Dropdown
trigger="Add Story" trigger="Add Story"
items={addStoryItems} items={addStoryItems}
@@ -153,6 +159,13 @@ export default function Header() {
> >
Authors Authors
</Link> </Link>
<Link
href="/statistics"
className="theme-text hover:theme-accent transition-colors font-medium px-2 py-1"
onClick={() => setIsMenuOpen(false)}
>
Statistics
</Link>
<div className="px-2 py-1"> <div className="px-2 py-1">
<div className="font-medium theme-text mb-1">Add Story</div> <div className="font-medium theme-text mb-1">Add Story</div>
<div className="pl-4 space-y-1"> <div className="pl-4 space-y-1">

View File

@@ -33,11 +33,18 @@ export default function SystemSettings({}: SystemSettingsProps) {
}); });
const [databaseStatus, setDatabaseStatus] = useState<{ const [databaseStatus, setDatabaseStatus] = useState<{
completeBackup: { loading: boolean; message: string; success?: boolean }; completeBackup: {
loading: boolean;
message: string;
success?: boolean;
jobId?: string;
progress?: number;
downloadReady?: boolean;
};
completeRestore: { loading: boolean; message: string; success?: boolean }; completeRestore: { loading: boolean; message: string; success?: boolean };
completeClear: { loading: boolean; message: string; success?: boolean }; completeClear: { loading: boolean; message: string; success?: boolean };
}>({ }>({
completeBackup: { loading: false, message: '' }, completeBackup: { loading: false, message: '', progress: 0 },
completeRestore: { loading: false, message: '' }, completeRestore: { loading: false, message: '' },
completeClear: { loading: false, message: '' } completeClear: { loading: false, message: '' }
}); });
@@ -73,43 +80,117 @@ export default function SystemSettings({}: SystemSettingsProps) {
const handleCompleteBackup = async () => { const handleCompleteBackup = async () => {
setDatabaseStatus(prev => ({ setDatabaseStatus(prev => ({
...prev, ...prev,
completeBackup: { loading: true, message: 'Creating complete backup...', success: undefined } completeBackup: { loading: true, message: 'Starting backup...', success: undefined, progress: 0, downloadReady: false }
})); }));
try { try {
const backupBlob = await databaseApi.backupComplete(); // Start the async backup job
const startResponse = await databaseApi.backupComplete();
// Create download link const jobId = startResponse.jobId;
const url = window.URL.createObjectURL(backupBlob);
const link = document.createElement('a');
link.href = url;
const timestamp = new Date().toISOString().replace(/[:.]/g, '-').slice(0, 19);
link.download = `storycove_complete_backup_${timestamp}.zip`;
document.body.appendChild(link);
link.click();
document.body.removeChild(link);
window.URL.revokeObjectURL(url);
setDatabaseStatus(prev => ({ setDatabaseStatus(prev => ({
...prev, ...prev,
completeBackup: { loading: false, message: 'Complete backup downloaded successfully', success: true } completeBackup: { ...prev.completeBackup, jobId, message: 'Backup in progress...' }
})); }));
// Poll for progress
const pollInterval = setInterval(async () => {
try {
const status = await databaseApi.getBackupStatus(jobId);
if (status.status === 'COMPLETED') {
clearInterval(pollInterval);
setDatabaseStatus(prev => ({
...prev,
completeBackup: {
loading: false,
message: 'Backup completed! Ready to download.',
success: true,
jobId,
progress: 100,
downloadReady: true
}
}));
// Clear message after 30 seconds (keep download button visible)
setTimeout(() => {
setDatabaseStatus(prev => ({
...prev,
completeBackup: { ...prev.completeBackup, message: '' }
}));
}, 30000);
} else if (status.status === 'FAILED') {
clearInterval(pollInterval);
setDatabaseStatus(prev => ({
...prev,
completeBackup: {
loading: false,
message: `Backup failed: ${status.errorMessage}`,
success: false,
progress: 0,
downloadReady: false
}
}));
} else {
// Update progress
setDatabaseStatus(prev => ({
...prev,
completeBackup: {
...prev.completeBackup,
progress: status.progress,
message: `Creating backup... ${status.progress}%`
}
}));
}
} catch (pollError: any) {
clearInterval(pollInterval);
setDatabaseStatus(prev => ({
...prev,
completeBackup: {
loading: false,
message: `Failed to check backup status: ${pollError.message}`,
success: false,
progress: 0,
downloadReady: false
}
}));
}
}, 2000); // Poll every 2 seconds
} catch (error: any) { } catch (error: any) {
setDatabaseStatus(prev => ({ setDatabaseStatus(prev => ({
...prev, ...prev,
completeBackup: { loading: false, message: error.message || 'Complete backup failed', success: false } completeBackup: {
loading: false,
message: error.message || 'Failed to start backup',
success: false,
progress: 0,
downloadReady: false
}
})); }));
} }
};
// Clear message after 5 seconds const handleDownloadBackup = (jobId: string) => {
setTimeout(() => { const downloadUrl = databaseApi.downloadBackup(jobId);
setDatabaseStatus(prev => ({ const link = document.createElement('a');
...prev, link.href = downloadUrl;
completeBackup: { loading: false, message: '', success: undefined } link.download = ''; // Filename will be set by server
})); document.body.appendChild(link);
}, 5000); link.click();
document.body.removeChild(link);
// Clear the download ready state after download
setDatabaseStatus(prev => ({
...prev,
completeBackup: {
loading: false,
message: 'Backup downloaded successfully',
success: true,
progress: 100,
downloadReady: false
}
}));
}; };
const handleCompleteRestore = async (event: React.ChangeEvent<HTMLInputElement>) => { const handleCompleteRestore = async (event: React.ChangeEvent<HTMLInputElement>) => {
@@ -792,20 +873,50 @@ export default function SystemSettings({}: SystemSettingsProps) {
<p className="text-sm theme-text mb-3"> <p className="text-sm theme-text mb-3">
Download a complete backup as a ZIP file. This includes your database AND all uploaded files (cover images, avatars). This is a comprehensive backup of your entire StoryCove installation. Download a complete backup as a ZIP file. This includes your database AND all uploaded files (cover images, avatars). This is a comprehensive backup of your entire StoryCove installation.
</p> </p>
<Button <div className="space-y-3">
onClick={handleCompleteBackup} <Button
disabled={databaseStatus.completeBackup.loading} onClick={handleCompleteBackup}
loading={databaseStatus.completeBackup.loading} disabled={databaseStatus.completeBackup.loading || databaseStatus.completeBackup.downloadReady}
variant="primary" loading={databaseStatus.completeBackup.loading}
className="w-full sm:w-auto" variant="primary"
> className="w-full sm:w-auto"
{databaseStatus.completeBackup.loading ? 'Creating Backup...' : 'Download Backup'} >
</Button> {databaseStatus.completeBackup.loading ? 'Creating Backup...' : 'Create Backup'}
</Button>
{databaseStatus.completeBackup.downloadReady && databaseStatus.completeBackup.jobId && (
<Button
onClick={() => handleDownloadBackup(databaseStatus.completeBackup.jobId!)}
variant="primary"
className="w-full sm:w-auto ml-0 sm:ml-3 bg-green-600 hover:bg-green-700"
>
Download Backup
</Button>
)}
</div>
{databaseStatus.completeBackup.loading && databaseStatus.completeBackup.progress !== undefined && (
<div className="mt-3">
<div className="flex justify-between text-sm theme-text mb-1">
<span>Progress</span>
<span>{databaseStatus.completeBackup.progress}%</span>
</div>
<div className="w-full bg-gray-200 dark:bg-gray-700 rounded-full h-2.5">
<div
className="bg-blue-600 dark:bg-blue-500 h-2.5 rounded-full transition-all duration-300"
style={{ width: `${databaseStatus.completeBackup.progress}%` }}
></div>
</div>
</div>
)}
{databaseStatus.completeBackup.message && ( {databaseStatus.completeBackup.message && (
<div className={`text-sm p-2 rounded mt-3 ${ <div className={`text-sm p-2 rounded mt-3 ${
databaseStatus.completeBackup.success databaseStatus.completeBackup.success
? 'bg-green-50 dark:bg-green-900/20 text-green-800 dark:text-green-200' ? 'bg-green-50 dark:bg-green-900/20 text-green-800 dark:text-green-200'
: 'bg-red-50 dark:bg-red-900/20 text-red-800 dark:text-red-200' : databaseStatus.completeBackup.success === false
? 'bg-red-50 dark:bg-red-900/20 text-red-800 dark:text-red-200'
: 'bg-blue-50 dark:bg-blue-900/20 text-blue-800 dark:text-blue-200'
}`}> }`}>
{databaseStatus.completeBackup.message} {databaseStatus.completeBackup.message}
</div> </div>

View File

@@ -114,9 +114,10 @@ const htmlToSlate = (html: string): Descendant[] => {
const img = element as HTMLImageElement; const img = element as HTMLImageElement;
results.push({ results.push({
type: 'image', type: 'image',
src: img.src || img.getAttribute('src') || '', // Use getAttribute to preserve relative URLs instead of .src which converts to absolute
alt: img.alt || img.getAttribute('alt') || '', src: img.getAttribute('src') || '',
caption: img.title || img.getAttribute('title') || '', alt: img.getAttribute('alt') || '',
caption: img.getAttribute('title') || '',
children: [{ text: '' }] // Images need children in Slate children: [{ text: '' }] // Images need children in Slate
}); });
break; break;

View File

@@ -1013,10 +1013,47 @@ export const databaseApi = {
return response.data; return response.data;
}, },
backupComplete: async (): Promise<Blob> => { backupComplete: async (): Promise<{ success: boolean; jobId: string; status: string; message: string }> => {
const response = await api.post('/database/backup-complete', {}, { const response = await api.post('/database/backup-complete');
responseType: 'blob' return response.data;
}); },
getBackupStatus: async (jobId: string): Promise<{
success: boolean;
jobId: string;
status: string;
progress: number;
fileSizeBytes: number;
createdAt: string;
completedAt: string;
errorMessage: string;
}> => {
const response = await api.get(`/database/backup-status/${jobId}`);
return response.data;
},
downloadBackup: (jobId: string): string => {
return `/api/database/backup-download/${jobId}`;
},
listBackups: async (): Promise<{
success: boolean;
backups: Array<{
jobId: string;
type: string;
status: string;
progress: number;
fileSizeBytes: number;
createdAt: string;
completedAt: string;
}>;
}> => {
const response = await api.get('/database/backup-list');
return response.data;
},
deleteBackup: async (jobId: string): Promise<{ success: boolean; message: string }> => {
const response = await api.delete(`/database/backup/${jobId}`);
return response.data; return response.data;
}, },
@@ -1053,6 +1090,50 @@ export const clearLibraryCache = (): void => {
currentLibraryId = null; currentLibraryId = null;
}; };
// Library statistics endpoints
export const statisticsApi = {
getOverviewStatistics: async (libraryId: string): Promise<import('../types/api').LibraryOverviewStats> => {
const response = await api.get(`/libraries/${libraryId}/statistics/overview`);
return response.data;
},
getTopTags: async (libraryId: string, limit: number = 20): Promise<import('../types/api').TopTagsStats> => {
const response = await api.get(`/libraries/${libraryId}/statistics/top-tags`, {
params: { limit }
});
return response.data;
},
getTopAuthors: async (libraryId: string, limit: number = 10): Promise<import('../types/api').TopAuthorsStats> => {
const response = await api.get(`/libraries/${libraryId}/statistics/top-authors`, {
params: { limit }
});
return response.data;
},
getRatingStats: async (libraryId: string): Promise<import('../types/api').RatingStats> => {
const response = await api.get(`/libraries/${libraryId}/statistics/ratings`);
return response.data;
},
getSourceDomainStats: async (libraryId: string, limit: number = 10): Promise<import('../types/api').SourceDomainStats> => {
const response = await api.get(`/libraries/${libraryId}/statistics/source-domains`, {
params: { limit }
});
return response.data;
},
getReadingProgress: async (libraryId: string): Promise<import('../types/api').ReadingProgressStats> => {
const response = await api.get(`/libraries/${libraryId}/statistics/reading-progress`);
return response.data;
},
getReadingActivity: async (libraryId: string): Promise<import('../types/api').ReadingActivityStats> => {
const response = await api.get(`/libraries/${libraryId}/statistics/reading-activity`);
return response.data;
},
};
// Image utility - now library-aware // Image utility - now library-aware
export const getImageUrl = (path: string): string => { export const getImageUrl = (path: string): string => {
if (!path) return ''; if (!path) return '';

View File

@@ -205,3 +205,99 @@ export interface FilterPreset {
filters: Partial<AdvancedFilters>; filters: Partial<AdvancedFilters>;
category: 'length' | 'date' | 'rating' | 'reading' | 'content' | 'organization'; category: 'length' | 'date' | 'rating' | 'reading' | 'content' | 'organization';
} }
// Library Statistics
export interface LibraryOverviewStats {
// Collection Overview
totalStories: number;
totalAuthors: number;
totalSeries: number;
totalTags: number;
totalCollections: number;
uniqueSourceDomains: number;
// Content Metrics
totalWordCount: number;
averageWordsPerStory: number;
longestStory: StoryWordCount | null;
shortestStory: StoryWordCount | null;
// Reading Time
totalReadingTimeMinutes: number;
averageReadingTimeMinutes: number;
}
export interface StoryWordCount {
id: string;
title: string;
authorName: string;
wordCount: number;
readingTimeMinutes: number;
}
// Top Tags Statistics
export interface TopTagsStats {
topTags: TagStats[];
}
export interface TagStats {
tagName: string;
storyCount: number;
}
// Top Authors Statistics
export interface TopAuthorsStats {
topAuthorsByStories: AuthorStats[];
topAuthorsByWords: AuthorStats[];
}
export interface AuthorStats {
authorId: string;
authorName: string;
storyCount: number;
totalWords: number;
}
// Rating Statistics
export interface RatingStats {
averageRating: number;
totalRatedStories: number;
totalUnratedStories: number;
ratingDistribution: Record<number, number>; // rating -> count
}
// Source Domain Statistics
export interface SourceDomainStats {
topDomains: DomainStats[];
storiesWithSource: number;
storiesWithoutSource: number;
}
export interface DomainStats {
domain: string;
storyCount: number;
}
// Reading Progress Statistics
export interface ReadingProgressStats {
totalStories: number;
readStories: number;
unreadStories: number;
percentageRead: number;
totalWordsRead: number;
totalWordsUnread: number;
}
// Reading Activity Statistics
export interface ReadingActivityStats {
storiesReadLastWeek: number;
wordsReadLastWeek: number;
readingTimeMinutesLastWeek: number;
dailyActivity: DailyActivity[];
}
export interface DailyActivity {
date: string; // YYYY-MM-DD
storiesRead: number;
wordsRead: number;
}

File diff suppressed because one or more lines are too long

View File

@@ -112,6 +112,13 @@
<field name="searchScore" type="pdouble" indexed="false" stored="true"/> <field name="searchScore" type="pdouble" indexed="false" stored="true"/>
<field name="highlights" type="strings" indexed="false" stored="true"/> <field name="highlights" type="strings" indexed="false" stored="true"/>
<!-- Statistics-specific Fields -->
<field name="hasDescription" type="boolean" indexed="true" stored="true"/>
<field name="hasCoverImage" type="boolean" indexed="true" stored="true"/>
<field name="hasRating" type="boolean" indexed="true" stored="true"/>
<field name="sourceDomain" type="string" indexed="true" stored="true"/>
<field name="tagCount" type="pint" indexed="true" stored="true"/>
<!-- Combined search field for general queries --> <!-- Combined search field for general queries -->
<field name="text" type="text_general" indexed="true" stored="false" multiValued="true"/> <field name="text" type="text_general" indexed="true" stored="false" multiValued="true"/>