Compare commits
6 Commits
feature/co
...
c08082c0d6
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
c08082c0d6 | ||
|
|
860bf02d56 | ||
|
|
a501b27169 | ||
|
|
fcad028959 | ||
|
|
f95d7aa8bb | ||
| 5e8164c6a4 |
Binary file not shown.
|
Before Width: | Height: | Size: 37 KiB |
1
backend/backend.log
Normal file
1
backend/backend.log
Normal file
@@ -0,0 +1 @@
|
||||
(eval):1: no such file or directory: ./mvnw
|
||||
@@ -65,10 +65,12 @@ public class AuthorController {
|
||||
|
||||
@PostMapping
|
||||
public ResponseEntity<AuthorDto> createAuthor(@Valid @RequestBody CreateAuthorRequest request) {
|
||||
logger.info("Creating new author: {}", request.getName());
|
||||
Author author = new Author();
|
||||
updateAuthorFromRequest(author, request);
|
||||
|
||||
Author savedAuthor = authorService.create(author);
|
||||
logger.info("Successfully created author: {} (ID: {})", savedAuthor.getName(), savedAuthor.getId());
|
||||
return ResponseEntity.status(HttpStatus.CREATED).body(convertToDto(savedAuthor));
|
||||
}
|
||||
|
||||
@@ -81,13 +83,7 @@ public class AuthorController {
|
||||
@RequestParam(required = false, name = "authorRating") Integer rating,
|
||||
@RequestParam(required = false, name = "avatar") MultipartFile avatarFile) {
|
||||
|
||||
System.out.println("DEBUG: MULTIPART PUT called with:");
|
||||
System.out.println(" - name: " + name);
|
||||
System.out.println(" - notes: " + notes);
|
||||
System.out.println(" - urls: " + urls);
|
||||
System.out.println(" - rating: " + rating);
|
||||
System.out.println(" - avatar: " + (avatarFile != null ? avatarFile.getOriginalFilename() : "null"));
|
||||
|
||||
logger.info("Updating author with multipart data (ID: {})", id);
|
||||
try {
|
||||
Author existingAuthor = authorService.findById(id);
|
||||
|
||||
@@ -104,7 +100,6 @@ public class AuthorController {
|
||||
|
||||
// Handle rating update
|
||||
if (rating != null) {
|
||||
System.out.println("DEBUG: Setting author rating via PUT: " + rating);
|
||||
existingAuthor.setAuthorRating(rating);
|
||||
}
|
||||
|
||||
@@ -115,6 +110,7 @@ public class AuthorController {
|
||||
}
|
||||
|
||||
Author updatedAuthor = authorService.update(id, existingAuthor);
|
||||
logger.info("Successfully updated author: {} via multipart", updatedAuthor.getName());
|
||||
return ResponseEntity.ok(convertToDto(updatedAuthor));
|
||||
|
||||
} catch (Exception e) {
|
||||
@@ -125,31 +121,27 @@ public class AuthorController {
|
||||
@PutMapping(value = "/{id}", consumes = "application/json")
|
||||
public ResponseEntity<AuthorDto> updateAuthorJson(@PathVariable UUID id,
|
||||
@Valid @RequestBody UpdateAuthorRequest request) {
|
||||
System.out.println("DEBUG: JSON PUT called with:");
|
||||
System.out.println(" - name: " + request.getName());
|
||||
System.out.println(" - notes: " + request.getNotes());
|
||||
System.out.println(" - urls: " + request.getUrls());
|
||||
System.out.println(" - rating: " + request.getRating());
|
||||
logger.info("Updating author with JSON data: {} (ID: {})", request.getName(), id);
|
||||
|
||||
Author existingAuthor = authorService.findById(id);
|
||||
updateAuthorFromRequest(existingAuthor, request);
|
||||
|
||||
Author updatedAuthor = authorService.update(id, existingAuthor);
|
||||
logger.info("Successfully updated author: {} via JSON", updatedAuthor.getName());
|
||||
return ResponseEntity.ok(convertToDto(updatedAuthor));
|
||||
}
|
||||
|
||||
@PutMapping("/{id}")
|
||||
public ResponseEntity<String> updateAuthorGeneric(@PathVariable UUID id, HttpServletRequest request) {
|
||||
System.out.println("DEBUG: GENERIC PUT called!");
|
||||
System.out.println(" - Content-Type: " + request.getContentType());
|
||||
System.out.println(" - Method: " + request.getMethod());
|
||||
|
||||
return ResponseEntity.status(415).body("Unsupported Media Type. Expected multipart/form-data or application/json");
|
||||
}
|
||||
|
||||
@DeleteMapping("/{id}")
|
||||
public ResponseEntity<?> deleteAuthor(@PathVariable UUID id) {
|
||||
logger.info("Deleting author with ID: {}", id);
|
||||
authorService.delete(id);
|
||||
logger.info("Successfully deleted author with ID: {}", id);
|
||||
return ResponseEntity.ok(Map.of("message", "Author deleted successfully"));
|
||||
}
|
||||
|
||||
@@ -177,11 +169,8 @@ public class AuthorController {
|
||||
|
||||
@PostMapping("/{id}/rating")
|
||||
public ResponseEntity<AuthorDto> rateAuthor(@PathVariable UUID id, @RequestBody RatingRequest request) {
|
||||
System.out.println("DEBUG: Rating author " + id + " with rating " + request.getRating());
|
||||
Author author = authorService.setRating(id, request.getRating());
|
||||
System.out.println("DEBUG: After setRating, author rating is: " + author.getAuthorRating());
|
||||
AuthorDto dto = convertToDto(author);
|
||||
System.out.println("DEBUG: Final DTO rating is: " + dto.getAuthorRating());
|
||||
return ResponseEntity.ok(dto);
|
||||
}
|
||||
|
||||
@@ -211,9 +200,7 @@ public class AuthorController {
|
||||
@PostMapping("/{id}/test-rating/{rating}")
|
||||
public ResponseEntity<Map<String, Object>> testSetRating(@PathVariable UUID id, @PathVariable Integer rating) {
|
||||
try {
|
||||
System.out.println("DEBUG: Test setting rating " + rating + " for author " + id);
|
||||
Author author = authorService.setRating(id, rating);
|
||||
System.out.println("DEBUG: After test setRating, got: " + author.getAuthorRating());
|
||||
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
@@ -231,13 +218,11 @@ public class AuthorController {
|
||||
@PostMapping("/{id}/test-put-rating")
|
||||
public ResponseEntity<Map<String, Object>> testPutWithRating(@PathVariable UUID id, @RequestParam Integer rating) {
|
||||
try {
|
||||
System.out.println("DEBUG: Test PUT with rating " + rating + " for author " + id);
|
||||
|
||||
Author existingAuthor = authorService.findById(id);
|
||||
existingAuthor.setAuthorRating(rating);
|
||||
Author updatedAuthor = authorService.update(id, existingAuthor);
|
||||
|
||||
System.out.println("DEBUG: After PUT update, rating is: " + updatedAuthor.getAuthorRating());
|
||||
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
@@ -389,7 +374,6 @@ public class AuthorController {
|
||||
author.setUrls(updateReq.getUrls());
|
||||
}
|
||||
if (updateReq.getRating() != null) {
|
||||
System.out.println("DEBUG: Setting author rating via JSON: " + updateReq.getRating());
|
||||
author.setAuthorRating(updateReq.getRating());
|
||||
}
|
||||
}
|
||||
@@ -402,9 +386,6 @@ public class AuthorController {
|
||||
dto.setNotes(author.getNotes());
|
||||
dto.setAvatarImagePath(author.getAvatarImagePath());
|
||||
|
||||
// Debug logging for author rating
|
||||
System.out.println("DEBUG: Converting author " + author.getName() +
|
||||
" with rating: " + author.getAuthorRating());
|
||||
|
||||
dto.setAuthorRating(author.getAuthorRating());
|
||||
dto.setUrls(author.getUrls());
|
||||
@@ -415,7 +396,6 @@ public class AuthorController {
|
||||
// Calculate and set average story rating
|
||||
dto.setAverageStoryRating(authorService.calculateAverageStoryRating(author.getId()));
|
||||
|
||||
System.out.println("DEBUG: DTO authorRating set to: " + dto.getAuthorRating());
|
||||
|
||||
return dto;
|
||||
}
|
||||
|
||||
@@ -7,6 +7,8 @@ import com.storycove.entity.Story;
|
||||
import com.storycove.entity.Tag;
|
||||
import com.storycove.service.CollectionService;
|
||||
import com.storycove.service.ImageService;
|
||||
import com.storycove.service.ReadingTimeService;
|
||||
import com.storycove.service.TypesenseService;
|
||||
import jakarta.validation.Valid;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
@@ -28,12 +30,18 @@ public class CollectionController {
|
||||
|
||||
private final CollectionService collectionService;
|
||||
private final ImageService imageService;
|
||||
private final TypesenseService typesenseService;
|
||||
private final ReadingTimeService readingTimeService;
|
||||
|
||||
@Autowired
|
||||
public CollectionController(CollectionService collectionService,
|
||||
ImageService imageService) {
|
||||
ImageService imageService,
|
||||
@Autowired(required = false) TypesenseService typesenseService,
|
||||
ReadingTimeService readingTimeService) {
|
||||
this.collectionService = collectionService;
|
||||
this.imageService = imageService;
|
||||
this.typesenseService = typesenseService;
|
||||
this.readingTimeService = readingTimeService;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -48,8 +56,6 @@ public class CollectionController {
|
||||
@RequestParam(required = false) List<String> tags,
|
||||
@RequestParam(defaultValue = "false") boolean archived) {
|
||||
|
||||
logger.info("COLLECTIONS: Search request - search='{}', tags={}, archived={}, page={}, limit={}",
|
||||
search, tags, archived, page, limit);
|
||||
|
||||
// MANDATORY: Use Typesense for all search/filter operations
|
||||
SearchResultDto<Collection> results = collectionService.searchCollections(search, tags, archived, page, limit);
|
||||
@@ -86,13 +92,14 @@ public class CollectionController {
|
||||
*/
|
||||
@PostMapping
|
||||
public ResponseEntity<Collection> createCollection(@Valid @RequestBody CreateCollectionRequest request) {
|
||||
logger.info("Creating new collection: {}", request.getName());
|
||||
Collection collection = collectionService.createCollection(
|
||||
request.getName(),
|
||||
request.getDescription(),
|
||||
request.getTagNames(),
|
||||
request.getStoryIds()
|
||||
);
|
||||
|
||||
logger.info("Successfully created collection: {} (ID: {})", collection.getName(), collection.getId());
|
||||
return ResponseEntity.status(HttpStatus.CREATED).body(collection);
|
||||
}
|
||||
|
||||
@@ -107,6 +114,7 @@ public class CollectionController {
|
||||
@RequestParam(required = false) List<UUID> storyIds,
|
||||
@RequestParam(required = false, name = "coverImage") MultipartFile coverImage) {
|
||||
|
||||
logger.info("Creating new collection with image: {}", name);
|
||||
try {
|
||||
// Create collection first
|
||||
Collection collection = collectionService.createCollection(name, description, tags, storyIds);
|
||||
@@ -120,6 +128,7 @@ public class CollectionController {
|
||||
);
|
||||
}
|
||||
|
||||
logger.info("Successfully created collection with image: {} (ID: {})", collection.getName(), collection.getId());
|
||||
return ResponseEntity.status(HttpStatus.CREATED).body(collection);
|
||||
|
||||
} catch (Exception e) {
|
||||
@@ -152,7 +161,9 @@ public class CollectionController {
|
||||
*/
|
||||
@DeleteMapping("/{id}")
|
||||
public ResponseEntity<Map<String, String>> deleteCollection(@PathVariable UUID id) {
|
||||
logger.info("Deleting collection with ID: {}", id);
|
||||
collectionService.deleteCollection(id);
|
||||
logger.info("Successfully deleted collection with ID: {}", id);
|
||||
return ResponseEntity.ok(Map.of("message", "Collection deleted successfully"));
|
||||
}
|
||||
|
||||
@@ -270,6 +281,35 @@ public class CollectionController {
|
||||
return ResponseEntity.ok(Map.of("message", "Cover removed successfully"));
|
||||
}
|
||||
|
||||
/**
|
||||
* POST /api/collections/reindex-typesense - Reindex all collections in Typesense
|
||||
*/
|
||||
@PostMapping("/reindex-typesense")
|
||||
public ResponseEntity<Map<String, Object>> reindexCollectionsTypesense() {
|
||||
try {
|
||||
List<Collection> allCollections = collectionService.findAllWithTags();
|
||||
if (typesenseService != null) {
|
||||
typesenseService.reindexAllCollections(allCollections);
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", true,
|
||||
"message", "Successfully reindexed all collections",
|
||||
"count", allCollections.size()
|
||||
));
|
||||
} else {
|
||||
return ResponseEntity.ok(Map.of(
|
||||
"success", false,
|
||||
"message", "Typesense service not available"
|
||||
));
|
||||
}
|
||||
} catch (Exception e) {
|
||||
logger.error("Failed to reindex collections", e);
|
||||
return ResponseEntity.badRequest().body(Map.of(
|
||||
"success", false,
|
||||
"error", e.getMessage()
|
||||
));
|
||||
}
|
||||
}
|
||||
|
||||
// Mapper methods
|
||||
|
||||
private CollectionDto mapToCollectionDto(Collection collection) {
|
||||
@@ -290,6 +330,11 @@ public class CollectionController {
|
||||
.toList());
|
||||
}
|
||||
|
||||
// Map tag names for search results
|
||||
if (collection.getTagNames() != null) {
|
||||
dto.setTagNames(collection.getTagNames());
|
||||
}
|
||||
|
||||
// Map collection stories (lightweight)
|
||||
if (collection.getCollectionStories() != null) {
|
||||
dto.setCollectionStories(collection.getCollectionStories().stream()
|
||||
@@ -300,7 +345,7 @@ public class CollectionController {
|
||||
// Set calculated properties
|
||||
dto.setStoryCount(collection.getStoryCount());
|
||||
dto.setTotalWordCount(collection.getTotalWordCount());
|
||||
dto.setEstimatedReadingTime(collection.getEstimatedReadingTime());
|
||||
dto.setEstimatedReadingTime(readingTimeService.calculateReadingTime(collection.getTotalWordCount()));
|
||||
dto.setAverageStoryRating(collection.getAverageStoryRating());
|
||||
|
||||
return dto;
|
||||
|
||||
@@ -0,0 +1,54 @@
|
||||
package com.storycove.controller;
|
||||
|
||||
import com.storycove.dto.HtmlSanitizationConfigDto;
|
||||
import com.storycove.service.HtmlSanitizationService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/config")
|
||||
public class ConfigController {
|
||||
|
||||
private final HtmlSanitizationService htmlSanitizationService;
|
||||
|
||||
@Value("${app.reading.speed.default:200}")
|
||||
private int defaultReadingSpeed;
|
||||
|
||||
@Autowired
|
||||
public ConfigController(HtmlSanitizationService htmlSanitizationService) {
|
||||
this.htmlSanitizationService = htmlSanitizationService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the HTML sanitization configuration for frontend use
|
||||
* This allows the frontend to use the same sanitization rules as the backend
|
||||
*/
|
||||
@GetMapping("/html-sanitization")
|
||||
public ResponseEntity<HtmlSanitizationConfigDto> getHtmlSanitizationConfig() {
|
||||
HtmlSanitizationConfigDto config = htmlSanitizationService.getConfiguration();
|
||||
return ResponseEntity.ok(config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get application settings configuration
|
||||
*/
|
||||
@GetMapping("/settings")
|
||||
public ResponseEntity<Map<String, Object>> getSettings() {
|
||||
Map<String, Object> settings = Map.of(
|
||||
"defaultReadingSpeed", defaultReadingSpeed
|
||||
);
|
||||
return ResponseEntity.ok(settings);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get reading speed for calculation purposes
|
||||
*/
|
||||
@GetMapping("/reading-speed")
|
||||
public ResponseEntity<Map<String, Integer>> getReadingSpeed() {
|
||||
return ResponseEntity.ok(Map.of("wordsPerMinute", defaultReadingSpeed));
|
||||
}
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
package com.storycove.controller;
|
||||
|
||||
import com.storycove.dto.HtmlSanitizationConfigDto;
|
||||
import com.storycove.service.HtmlSanitizationService;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.GetMapping;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
|
||||
@RestController
|
||||
@RequestMapping("/api/config")
|
||||
public class HtmlSanitizationController {
|
||||
|
||||
private final HtmlSanitizationService htmlSanitizationService;
|
||||
|
||||
@Autowired
|
||||
public HtmlSanitizationController(HtmlSanitizationService htmlSanitizationService) {
|
||||
this.htmlSanitizationService = htmlSanitizationService;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the HTML sanitization configuration for frontend use
|
||||
* This allows the frontend to use the same sanitization rules as the backend
|
||||
*/
|
||||
@GetMapping("/html-sanitization")
|
||||
public ResponseEntity<HtmlSanitizationConfigDto> getHtmlSanitizationConfig() {
|
||||
HtmlSanitizationConfigDto config = htmlSanitizationService.getConfiguration();
|
||||
return ResponseEntity.ok(config);
|
||||
}
|
||||
}
|
||||
@@ -41,6 +41,7 @@ public class StoryController {
|
||||
private final ImageService imageService;
|
||||
private final TypesenseService typesenseService;
|
||||
private final CollectionService collectionService;
|
||||
private final ReadingTimeService readingTimeService;
|
||||
|
||||
public StoryController(StoryService storyService,
|
||||
AuthorService authorService,
|
||||
@@ -48,7 +49,8 @@ public class StoryController {
|
||||
HtmlSanitizationService sanitizationService,
|
||||
ImageService imageService,
|
||||
CollectionService collectionService,
|
||||
@Autowired(required = false) TypesenseService typesenseService) {
|
||||
@Autowired(required = false) TypesenseService typesenseService,
|
||||
ReadingTimeService readingTimeService) {
|
||||
this.storyService = storyService;
|
||||
this.authorService = authorService;
|
||||
this.seriesService = seriesService;
|
||||
@@ -56,6 +58,7 @@ public class StoryController {
|
||||
this.imageService = imageService;
|
||||
this.collectionService = collectionService;
|
||||
this.typesenseService = typesenseService;
|
||||
this.readingTimeService = readingTimeService;
|
||||
}
|
||||
|
||||
@GetMapping
|
||||
@@ -83,23 +86,29 @@ public class StoryController {
|
||||
|
||||
@PostMapping
|
||||
public ResponseEntity<StoryDto> createStory(@Valid @RequestBody CreateStoryRequest request) {
|
||||
logger.info("Creating new story: {}", request.getTitle());
|
||||
Story story = new Story();
|
||||
updateStoryFromRequest(story, request);
|
||||
|
||||
Story savedStory = storyService.createWithTagNames(story, request.getTagNames());
|
||||
logger.info("Successfully created story: {} (ID: {})", savedStory.getTitle(), savedStory.getId());
|
||||
return ResponseEntity.status(HttpStatus.CREATED).body(convertToDto(savedStory));
|
||||
}
|
||||
|
||||
@PutMapping("/{id}")
|
||||
public ResponseEntity<StoryDto> updateStory(@PathVariable UUID id,
|
||||
@Valid @RequestBody UpdateStoryRequest request) {
|
||||
logger.info("Updating story: {} (ID: {})", request.getTitle(), id);
|
||||
Story updatedStory = storyService.updateWithTagNames(id, request);
|
||||
logger.info("Successfully updated story: {}", updatedStory.getTitle());
|
||||
return ResponseEntity.ok(convertToDto(updatedStory));
|
||||
}
|
||||
|
||||
@DeleteMapping("/{id}")
|
||||
public ResponseEntity<?> deleteStory(@PathVariable UUID id) {
|
||||
logger.info("Deleting story with ID: {}", id);
|
||||
storyService.delete(id);
|
||||
logger.info("Successfully deleted story with ID: {}", id);
|
||||
return ResponseEntity.ok(Map.of("message", "Story deleted successfully"));
|
||||
}
|
||||
|
||||
@@ -143,6 +152,20 @@ public class StoryController {
|
||||
return ResponseEntity.ok(convertToDto(story));
|
||||
}
|
||||
|
||||
@PostMapping("/{id}/reading-progress")
|
||||
public ResponseEntity<StoryDto> updateReadingProgress(@PathVariable UUID id, @RequestBody ReadingProgressRequest request) {
|
||||
logger.info("Updating reading progress for story {} to position {}", id, request.getPosition());
|
||||
Story story = storyService.updateReadingProgress(id, request.getPosition());
|
||||
return ResponseEntity.ok(convertToDto(story));
|
||||
}
|
||||
|
||||
@PostMapping("/{id}/reading-status")
|
||||
public ResponseEntity<StoryDto> updateReadingStatus(@PathVariable UUID id, @RequestBody ReadingStatusRequest request) {
|
||||
logger.info("Updating reading status for story {} to {}", id, request.getIsRead() ? "read" : "unread");
|
||||
Story story = storyService.updateReadingStatus(id, request.getIsRead());
|
||||
return ResponseEntity.ok(convertToDto(story));
|
||||
}
|
||||
|
||||
@PostMapping("/reindex")
|
||||
public ResponseEntity<String> manualReindex() {
|
||||
if (typesenseService == null) {
|
||||
@@ -209,7 +232,6 @@ public class StoryController {
|
||||
@RequestParam(required = false) String sortBy,
|
||||
@RequestParam(required = false) String sortDir) {
|
||||
|
||||
logger.info("CONTROLLER DEBUG: Search request - query='{}', tags={}, authors={}", query, tags, authors);
|
||||
|
||||
if (typesenseService != null) {
|
||||
SearchResultDto<StorySearchDto> results = typesenseService.searchStories(
|
||||
@@ -394,6 +416,11 @@ public class StoryController {
|
||||
dto.setCreatedAt(story.getCreatedAt());
|
||||
dto.setUpdatedAt(story.getUpdatedAt());
|
||||
|
||||
// Reading progress fields
|
||||
dto.setIsRead(story.getIsRead());
|
||||
dto.setReadingPosition(story.getReadingPosition());
|
||||
dto.setLastReadAt(story.getLastReadAt());
|
||||
|
||||
if (story.getAuthor() != null) {
|
||||
dto.setAuthorId(story.getAuthor().getId());
|
||||
dto.setAuthorName(story.getAuthor().getName());
|
||||
@@ -426,6 +453,11 @@ public class StoryController {
|
||||
dto.setUpdatedAt(story.getUpdatedAt());
|
||||
dto.setPartOfSeries(story.isPartOfSeries());
|
||||
|
||||
// Reading progress fields
|
||||
dto.setIsRead(story.getIsRead());
|
||||
dto.setReadingPosition(story.getReadingPosition());
|
||||
dto.setLastReadAt(story.getLastReadAt());
|
||||
|
||||
if (story.getAuthor() != null) {
|
||||
dto.setAuthorId(story.getAuthor().getId());
|
||||
dto.setAuthorName(story.getAuthor().getName());
|
||||
@@ -467,12 +499,40 @@ public class StoryController {
|
||||
// to avoid circular references and keep it lightweight
|
||||
dto.setStoryCount(collection.getStoryCount());
|
||||
dto.setTotalWordCount(collection.getTotalWordCount());
|
||||
dto.setEstimatedReadingTime(collection.getEstimatedReadingTime());
|
||||
dto.setEstimatedReadingTime(readingTimeService.calculateReadingTime(collection.getTotalWordCount()));
|
||||
dto.setAverageStoryRating(collection.getAverageStoryRating());
|
||||
|
||||
return dto;
|
||||
}
|
||||
|
||||
@GetMapping("/check-duplicate")
|
||||
public ResponseEntity<Map<String, Object>> checkDuplicate(
|
||||
@RequestParam String title,
|
||||
@RequestParam String authorName) {
|
||||
try {
|
||||
List<Story> duplicates = storyService.findPotentialDuplicates(title, authorName);
|
||||
|
||||
Map<String, Object> response = Map.of(
|
||||
"hasDuplicates", !duplicates.isEmpty(),
|
||||
"count", duplicates.size(),
|
||||
"duplicates", duplicates.stream()
|
||||
.map(story -> Map.of(
|
||||
"id", story.getId(),
|
||||
"title", story.getTitle(),
|
||||
"authorName", story.getAuthor() != null ? story.getAuthor().getName() : "",
|
||||
"createdAt", story.getCreatedAt()
|
||||
))
|
||||
.collect(Collectors.toList())
|
||||
);
|
||||
|
||||
return ResponseEntity.ok(response);
|
||||
} catch (Exception e) {
|
||||
logger.error("Error checking for duplicates", e);
|
||||
return ResponseEntity.status(HttpStatus.INTERNAL_SERVER_ERROR)
|
||||
.body(Map.of("error", "Failed to check for duplicates"));
|
||||
}
|
||||
}
|
||||
|
||||
// Request DTOs
|
||||
public static class CreateStoryRequest {
|
||||
private String title;
|
||||
|
||||
@@ -132,17 +132,39 @@ public class TagController {
|
||||
return ResponseEntity.ok(stats);
|
||||
}
|
||||
|
||||
@GetMapping("/collections")
|
||||
public ResponseEntity<List<TagDto>> getTagsUsedByCollections() {
|
||||
List<Tag> tags = tagService.findTagsUsedByCollections();
|
||||
List<TagDto> tagDtos = tags.stream()
|
||||
.map(this::convertToDtoWithCollectionCount)
|
||||
.collect(Collectors.toList());
|
||||
|
||||
return ResponseEntity.ok(tagDtos);
|
||||
}
|
||||
|
||||
private TagDto convertToDto(Tag tag) {
|
||||
TagDto dto = new TagDto();
|
||||
dto.setId(tag.getId());
|
||||
dto.setName(tag.getName());
|
||||
dto.setStoryCount(tag.getStories() != null ? tag.getStories().size() : 0);
|
||||
dto.setCollectionCount(tag.getCollections() != null ? tag.getCollections().size() : 0);
|
||||
dto.setCreatedAt(tag.getCreatedAt());
|
||||
// updatedAt field not present in Tag entity per spec
|
||||
|
||||
return dto;
|
||||
}
|
||||
|
||||
private TagDto convertToDtoWithCollectionCount(Tag tag) {
|
||||
TagDto dto = new TagDto();
|
||||
dto.setId(tag.getId());
|
||||
dto.setName(tag.getName());
|
||||
dto.setCollectionCount(tag.getCollections() != null ? tag.getCollections().size() : 0);
|
||||
dto.setCreatedAt(tag.getCreatedAt());
|
||||
// Don't set storyCount for collection-focused endpoint
|
||||
|
||||
return dto;
|
||||
}
|
||||
|
||||
// Request DTOs
|
||||
public static class CreateTagRequest {
|
||||
private String name;
|
||||
|
||||
@@ -16,6 +16,7 @@ public class CollectionDto {
|
||||
private String coverImagePath;
|
||||
private Boolean isArchived;
|
||||
private List<TagDto> tags;
|
||||
private List<String> tagNames; // For search results
|
||||
private List<CollectionStoryDto> collectionStories;
|
||||
private Integer storyCount;
|
||||
private Integer totalWordCount;
|
||||
@@ -83,6 +84,14 @@ public class CollectionDto {
|
||||
this.tags = tags;
|
||||
}
|
||||
|
||||
public List<String> getTagNames() {
|
||||
return tagNames;
|
||||
}
|
||||
|
||||
public void setTagNames(List<String> tagNames) {
|
||||
this.tagNames = tagNames;
|
||||
}
|
||||
|
||||
public List<CollectionStoryDto> getCollectionStories() {
|
||||
return collectionStories;
|
||||
}
|
||||
|
||||
31
backend/src/main/java/com/storycove/dto/FacetCountDto.java
Normal file
31
backend/src/main/java/com/storycove/dto/FacetCountDto.java
Normal file
@@ -0,0 +1,31 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
public class FacetCountDto {
|
||||
|
||||
private String value;
|
||||
private int count;
|
||||
|
||||
public FacetCountDto() {}
|
||||
|
||||
public FacetCountDto(String value, int count) {
|
||||
this.value = value;
|
||||
this.count = count;
|
||||
}
|
||||
|
||||
// Getters and Setters
|
||||
public String getValue() {
|
||||
return value;
|
||||
}
|
||||
|
||||
public void setValue(String value) {
|
||||
this.value = value;
|
||||
}
|
||||
|
||||
public int getCount() {
|
||||
return count;
|
||||
}
|
||||
|
||||
public void setCount(int count) {
|
||||
this.count = count;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import jakarta.validation.constraints.Min;
|
||||
|
||||
public class ReadingProgressRequest {
|
||||
|
||||
@Min(value = 0, message = "Reading position must be non-negative")
|
||||
private Integer position;
|
||||
|
||||
public ReadingProgressRequest() {}
|
||||
|
||||
public ReadingProgressRequest(Integer position) {
|
||||
this.position = position;
|
||||
}
|
||||
|
||||
public Integer getPosition() {
|
||||
return position;
|
||||
}
|
||||
|
||||
public void setPosition(Integer position) {
|
||||
this.position = position;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,23 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import jakarta.validation.constraints.NotNull;
|
||||
|
||||
public class ReadingStatusRequest {
|
||||
|
||||
@NotNull(message = "Reading status is required")
|
||||
private Boolean isRead;
|
||||
|
||||
public ReadingStatusRequest() {}
|
||||
|
||||
public ReadingStatusRequest(Boolean isRead) {
|
||||
this.isRead = isRead;
|
||||
}
|
||||
|
||||
public Boolean getIsRead() {
|
||||
return isRead;
|
||||
}
|
||||
|
||||
public void setIsRead(Boolean isRead) {
|
||||
this.isRead = isRead;
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,7 @@
|
||||
package com.storycove.dto;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
public class SearchResultDto<T> {
|
||||
|
||||
@@ -10,6 +11,7 @@ public class SearchResultDto<T> {
|
||||
private int perPage;
|
||||
private String query;
|
||||
private long searchTimeMs;
|
||||
private Map<String, List<FacetCountDto>> facets;
|
||||
|
||||
public SearchResultDto() {}
|
||||
|
||||
@@ -22,6 +24,16 @@ public class SearchResultDto<T> {
|
||||
this.searchTimeMs = searchTimeMs;
|
||||
}
|
||||
|
||||
public SearchResultDto(List<T> results, long totalHits, int page, int perPage, String query, long searchTimeMs, Map<String, List<FacetCountDto>> facets) {
|
||||
this.results = results;
|
||||
this.totalHits = totalHits;
|
||||
this.page = page;
|
||||
this.perPage = perPage;
|
||||
this.query = query;
|
||||
this.searchTimeMs = searchTimeMs;
|
||||
this.facets = facets;
|
||||
}
|
||||
|
||||
// Getters and Setters
|
||||
public List<T> getResults() {
|
||||
return results;
|
||||
@@ -70,4 +82,12 @@ public class SearchResultDto<T> {
|
||||
public void setSearchTimeMs(long searchTimeMs) {
|
||||
this.searchTimeMs = searchTimeMs;
|
||||
}
|
||||
|
||||
public Map<String, List<FacetCountDto>> getFacets() {
|
||||
return facets;
|
||||
}
|
||||
|
||||
public void setFacets(Map<String, List<FacetCountDto>> facets) {
|
||||
this.facets = facets;
|
||||
}
|
||||
}
|
||||
@@ -28,6 +28,11 @@ public class StoryDto {
|
||||
private Integer rating;
|
||||
private Integer volume;
|
||||
|
||||
// Reading progress fields
|
||||
private Boolean isRead;
|
||||
private Integer readingPosition;
|
||||
private LocalDateTime lastReadAt;
|
||||
|
||||
// Related entities as simple references
|
||||
private UUID authorId;
|
||||
private String authorName;
|
||||
@@ -133,6 +138,30 @@ public class StoryDto {
|
||||
this.volume = volume;
|
||||
}
|
||||
|
||||
public Boolean getIsRead() {
|
||||
return isRead;
|
||||
}
|
||||
|
||||
public void setIsRead(Boolean isRead) {
|
||||
this.isRead = isRead;
|
||||
}
|
||||
|
||||
public Integer getReadingPosition() {
|
||||
return readingPosition;
|
||||
}
|
||||
|
||||
public void setReadingPosition(Integer readingPosition) {
|
||||
this.readingPosition = readingPosition;
|
||||
}
|
||||
|
||||
public LocalDateTime getLastReadAt() {
|
||||
return lastReadAt;
|
||||
}
|
||||
|
||||
public void setLastReadAt(LocalDateTime lastReadAt) {
|
||||
this.lastReadAt = lastReadAt;
|
||||
}
|
||||
|
||||
public UUID getAuthorId() {
|
||||
return authorId;
|
||||
}
|
||||
|
||||
@@ -16,6 +16,9 @@ public class StorySearchDto {
|
||||
private Integer rating;
|
||||
private Integer volume;
|
||||
|
||||
// Reading status
|
||||
private Boolean isRead;
|
||||
|
||||
// Author info
|
||||
private UUID authorId;
|
||||
private String authorName;
|
||||
@@ -109,6 +112,14 @@ public class StorySearchDto {
|
||||
this.volume = volume;
|
||||
}
|
||||
|
||||
public Boolean getIsRead() {
|
||||
return isRead;
|
||||
}
|
||||
|
||||
public void setIsRead(Boolean isRead) {
|
||||
this.isRead = isRead;
|
||||
}
|
||||
|
||||
public UUID getAuthorId() {
|
||||
return authorId;
|
||||
}
|
||||
|
||||
@@ -20,6 +20,11 @@ public class StorySummaryDto {
|
||||
private Integer rating;
|
||||
private Integer volume;
|
||||
|
||||
// Reading progress fields
|
||||
private Boolean isRead;
|
||||
private Integer readingPosition;
|
||||
private LocalDateTime lastReadAt;
|
||||
|
||||
// Related entities as simple references
|
||||
private UUID authorId;
|
||||
private String authorName;
|
||||
@@ -106,6 +111,30 @@ public class StorySummaryDto {
|
||||
this.volume = volume;
|
||||
}
|
||||
|
||||
public Boolean getIsRead() {
|
||||
return isRead;
|
||||
}
|
||||
|
||||
public void setIsRead(Boolean isRead) {
|
||||
this.isRead = isRead;
|
||||
}
|
||||
|
||||
public Integer getReadingPosition() {
|
||||
return readingPosition;
|
||||
}
|
||||
|
||||
public void setReadingPosition(Integer readingPosition) {
|
||||
this.readingPosition = readingPosition;
|
||||
}
|
||||
|
||||
public LocalDateTime getLastReadAt() {
|
||||
return lastReadAt;
|
||||
}
|
||||
|
||||
public void setLastReadAt(LocalDateTime lastReadAt) {
|
||||
this.lastReadAt = lastReadAt;
|
||||
}
|
||||
|
||||
public UUID getAuthorId() {
|
||||
return authorId;
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ public class TagDto {
|
||||
private String name;
|
||||
|
||||
private Integer storyCount;
|
||||
private Integer collectionCount;
|
||||
private LocalDateTime createdAt;
|
||||
private LocalDateTime updatedAt;
|
||||
|
||||
@@ -49,6 +50,14 @@ public class TagDto {
|
||||
this.storyCount = storyCount;
|
||||
}
|
||||
|
||||
public Integer getCollectionCount() {
|
||||
return collectionCount;
|
||||
}
|
||||
|
||||
public void setCollectionCount(Integer collectionCount) {
|
||||
this.collectionCount = collectionCount;
|
||||
}
|
||||
|
||||
public LocalDateTime getCreatedAt() {
|
||||
return createdAt;
|
||||
}
|
||||
|
||||
@@ -52,6 +52,10 @@ public class Collection {
|
||||
)
|
||||
private Set<Tag> tags = new HashSet<>();
|
||||
|
||||
// Transient field for search results - tag names only to avoid lazy loading issues
|
||||
@Transient
|
||||
private List<String> tagNames;
|
||||
|
||||
@CreationTimestamp
|
||||
@Column(name = "created_at", nullable = false, updatable = false)
|
||||
private LocalDateTime createdAt;
|
||||
@@ -192,6 +196,14 @@ public class Collection {
|
||||
this.tags = tags;
|
||||
}
|
||||
|
||||
public List<String> getTagNames() {
|
||||
return tagNames;
|
||||
}
|
||||
|
||||
public void setTagNames(List<String> tagNames) {
|
||||
this.tagNames = tagNames;
|
||||
}
|
||||
|
||||
public LocalDateTime getCreatedAt() {
|
||||
return createdAt;
|
||||
}
|
||||
|
||||
@@ -55,6 +55,15 @@ public class Story {
|
||||
@Column(name = "volume")
|
||||
private Integer volume;
|
||||
|
||||
@Column(name = "is_read")
|
||||
private Boolean isRead = false;
|
||||
|
||||
@Column(name = "reading_position")
|
||||
private Integer readingPosition = 0;
|
||||
|
||||
@Column(name = "last_read_at")
|
||||
private LocalDateTime lastReadAt;
|
||||
|
||||
@ManyToOne(fetch = FetchType.LAZY)
|
||||
@JoinColumn(name = "author_id")
|
||||
@JsonBackReference("author-stories")
|
||||
@@ -212,6 +221,30 @@ public class Story {
|
||||
this.volume = volume;
|
||||
}
|
||||
|
||||
public Boolean getIsRead() {
|
||||
return isRead;
|
||||
}
|
||||
|
||||
public void setIsRead(Boolean isRead) {
|
||||
this.isRead = isRead;
|
||||
}
|
||||
|
||||
public Integer getReadingPosition() {
|
||||
return readingPosition;
|
||||
}
|
||||
|
||||
public void setReadingPosition(Integer readingPosition) {
|
||||
this.readingPosition = readingPosition;
|
||||
}
|
||||
|
||||
public LocalDateTime getLastReadAt() {
|
||||
return lastReadAt;
|
||||
}
|
||||
|
||||
public void setLastReadAt(LocalDateTime lastReadAt) {
|
||||
this.lastReadAt = lastReadAt;
|
||||
}
|
||||
|
||||
public Author getAuthor() {
|
||||
return author;
|
||||
}
|
||||
@@ -252,6 +285,37 @@ public class Story {
|
||||
this.updatedAt = updatedAt;
|
||||
}
|
||||
|
||||
/**
|
||||
* Updates the reading progress and timestamp
|
||||
*/
|
||||
public void updateReadingProgress(Integer position) {
|
||||
this.readingPosition = position;
|
||||
this.lastReadAt = LocalDateTime.now();
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks the story as read and updates the reading position to the end
|
||||
*/
|
||||
public void markAsRead() {
|
||||
this.isRead = true;
|
||||
this.lastReadAt = LocalDateTime.now();
|
||||
// Set reading position to the end of content if available
|
||||
if (contentPlain != null) {
|
||||
this.readingPosition = contentPlain.length();
|
||||
} else if (contentHtml != null) {
|
||||
this.readingPosition = contentHtml.length();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Marks the story as unread and resets reading position
|
||||
*/
|
||||
public void markAsUnread() {
|
||||
this.isRead = false;
|
||||
this.readingPosition = 0;
|
||||
this.lastReadAt = null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (this == o) return true;
|
||||
@@ -272,6 +336,8 @@ public class Story {
|
||||
", title='" + title + '\'' +
|
||||
", wordCount=" + wordCount +
|
||||
", rating=" + rating +
|
||||
", isRead=" + isRead +
|
||||
", readingPosition=" + readingPosition +
|
||||
'}';
|
||||
}
|
||||
}
|
||||
@@ -29,6 +29,10 @@ public class Tag {
|
||||
@JsonBackReference("story-tags")
|
||||
private Set<Story> stories = new HashSet<>();
|
||||
|
||||
@ManyToMany(mappedBy = "tags")
|
||||
@JsonBackReference("collection-tags")
|
||||
private Set<Collection> collections = new HashSet<>();
|
||||
|
||||
@CreationTimestamp
|
||||
@Column(name = "created_at", nullable = false, updatable = false)
|
||||
private LocalDateTime createdAt;
|
||||
@@ -67,6 +71,14 @@ public class Tag {
|
||||
this.stories = stories;
|
||||
}
|
||||
|
||||
public Set<Collection> getCollections() {
|
||||
return collections;
|
||||
}
|
||||
|
||||
public void setCollections(Set<Collection> collections) {
|
||||
this.collections = collections;
|
||||
}
|
||||
|
||||
public LocalDateTime getCreatedAt() {
|
||||
return createdAt;
|
||||
}
|
||||
|
||||
@@ -45,4 +45,10 @@ public interface CollectionRepository extends JpaRepository<Collection, UUID> {
|
||||
*/
|
||||
@Query("SELECT c FROM Collection c WHERE c.isArchived = false ORDER BY c.updatedAt DESC")
|
||||
List<Collection> findAllActiveCollections();
|
||||
|
||||
/**
|
||||
* Find all collections with tags for reindexing operations
|
||||
*/
|
||||
@Query("SELECT c FROM Collection c LEFT JOIN FETCH c.tags ORDER BY c.updatedAt DESC")
|
||||
List<Collection> findAllWithTags();
|
||||
}
|
||||
@@ -114,4 +114,7 @@ public interface StoryRepository extends JpaRepository<Story, UUID> {
|
||||
"LEFT JOIN FETCH s.series " +
|
||||
"LEFT JOIN FETCH s.tags")
|
||||
List<Story> findAllWithAssociations();
|
||||
|
||||
@Query("SELECT s FROM Story s WHERE UPPER(s.title) = UPPER(:title) AND UPPER(s.author.name) = UPPER(:authorName)")
|
||||
List<Story> findByTitleAndAuthorNameIgnoreCase(@Param("title") String title, @Param("authorName") String authorName);
|
||||
}
|
||||
@@ -54,4 +54,7 @@ public interface TagRepository extends JpaRepository<Tag, UUID> {
|
||||
|
||||
@Query("SELECT COUNT(t) FROM Tag t WHERE SIZE(t.stories) > 0")
|
||||
long countUsedTags();
|
||||
|
||||
@Query("SELECT t FROM Tag t WHERE SIZE(t.collections) > 0 ORDER BY SIZE(t.collections) DESC, t.name ASC")
|
||||
List<Tag> findTagsUsedByCollections();
|
||||
}
|
||||
@@ -31,7 +31,7 @@ public class AuthorService {
|
||||
private final TypesenseService typesenseService;
|
||||
|
||||
@Autowired
|
||||
public AuthorService(AuthorRepository authorRepository, TypesenseService typesenseService) {
|
||||
public AuthorService(AuthorRepository authorRepository, @Autowired(required = false) TypesenseService typesenseService) {
|
||||
this.authorRepository = authorRepository;
|
||||
this.typesenseService = typesenseService;
|
||||
}
|
||||
@@ -133,11 +133,13 @@ public class AuthorService {
|
||||
Author savedAuthor = authorRepository.save(author);
|
||||
|
||||
// Index in Typesense
|
||||
if (typesenseService != null) {
|
||||
try {
|
||||
typesenseService.indexAuthor(savedAuthor);
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to index author in Typesense: " + savedAuthor.getName(), e);
|
||||
}
|
||||
}
|
||||
|
||||
return savedAuthor;
|
||||
}
|
||||
@@ -155,11 +157,13 @@ public class AuthorService {
|
||||
Author savedAuthor = authorRepository.save(existingAuthor);
|
||||
|
||||
// Update in Typesense
|
||||
if (typesenseService != null) {
|
||||
try {
|
||||
typesenseService.updateAuthor(savedAuthor);
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to update author in Typesense: " + savedAuthor.getName(), e);
|
||||
}
|
||||
}
|
||||
|
||||
return savedAuthor;
|
||||
}
|
||||
@@ -175,12 +179,14 @@ public class AuthorService {
|
||||
authorRepository.delete(author);
|
||||
|
||||
// Remove from Typesense
|
||||
if (typesenseService != null) {
|
||||
try {
|
||||
typesenseService.deleteAuthor(id.toString());
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to delete author from Typesense: " + author.getName(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public Author addUrl(UUID id, String url) {
|
||||
Author author = findById(id);
|
||||
@@ -188,11 +194,13 @@ public class AuthorService {
|
||||
Author savedAuthor = authorRepository.save(author);
|
||||
|
||||
// Update in Typesense
|
||||
if (typesenseService != null) {
|
||||
try {
|
||||
typesenseService.updateAuthor(savedAuthor);
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to update author in Typesense after adding URL: " + savedAuthor.getName(), e);
|
||||
}
|
||||
}
|
||||
|
||||
return savedAuthor;
|
||||
}
|
||||
@@ -203,11 +211,13 @@ public class AuthorService {
|
||||
Author savedAuthor = authorRepository.save(author);
|
||||
|
||||
// Update in Typesense
|
||||
if (typesenseService != null) {
|
||||
try {
|
||||
typesenseService.updateAuthor(savedAuthor);
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to update author in Typesense after removing URL: " + savedAuthor.getName(), e);
|
||||
}
|
||||
}
|
||||
|
||||
return savedAuthor;
|
||||
}
|
||||
@@ -242,11 +252,13 @@ public class AuthorService {
|
||||
refreshedAuthor.getAuthorRating(), refreshedAuthor.getName());
|
||||
|
||||
// Update in Typesense
|
||||
if (typesenseService != null) {
|
||||
try {
|
||||
typesenseService.updateAuthor(refreshedAuthor);
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to update author in Typesense after rating: " + refreshedAuthor.getName(), e);
|
||||
}
|
||||
}
|
||||
|
||||
return refreshedAuthor;
|
||||
}
|
||||
@@ -290,11 +302,13 @@ public class AuthorService {
|
||||
Author savedAuthor = authorRepository.save(author);
|
||||
|
||||
// Update in Typesense
|
||||
if (typesenseService != null) {
|
||||
try {
|
||||
typesenseService.updateAuthor(savedAuthor);
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to update author in Typesense after setting avatar: " + savedAuthor.getName(), e);
|
||||
}
|
||||
}
|
||||
|
||||
return savedAuthor;
|
||||
}
|
||||
@@ -305,11 +319,13 @@ public class AuthorService {
|
||||
Author savedAuthor = authorRepository.save(author);
|
||||
|
||||
// Update in Typesense
|
||||
if (typesenseService != null) {
|
||||
try {
|
||||
typesenseService.updateAuthor(savedAuthor);
|
||||
} catch (Exception e) {
|
||||
logger.warn("Failed to update author in Typesense after removing avatar: " + savedAuthor.getName(), e);
|
||||
}
|
||||
}
|
||||
|
||||
return savedAuthor;
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ public class CollectionSearchResult extends Collection {
|
||||
|
||||
private Integer storedStoryCount;
|
||||
private Integer storedTotalWordCount;
|
||||
private int wordsPerMinute = 200; // Default, can be overridden
|
||||
|
||||
public CollectionSearchResult(Collection collection) {
|
||||
this.setId(collection.getId());
|
||||
@@ -20,6 +21,7 @@ public class CollectionSearchResult extends Collection {
|
||||
this.setCreatedAt(collection.getCreatedAt());
|
||||
this.setUpdatedAt(collection.getUpdatedAt());
|
||||
this.setCoverImagePath(collection.getCoverImagePath());
|
||||
this.setTagNames(collection.getTagNames()); // Copy tag names for search results
|
||||
// Note: don't copy collectionStories or tags to avoid lazy loading issues
|
||||
}
|
||||
|
||||
@@ -31,6 +33,10 @@ public class CollectionSearchResult extends Collection {
|
||||
this.storedTotalWordCount = totalWordCount;
|
||||
}
|
||||
|
||||
public void setWordsPerMinute(int wordsPerMinute) {
|
||||
this.wordsPerMinute = wordsPerMinute;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int getStoryCount() {
|
||||
return storedStoryCount != null ? storedStoryCount : 0;
|
||||
@@ -43,8 +49,7 @@ public class CollectionSearchResult extends Collection {
|
||||
|
||||
@Override
|
||||
public int getEstimatedReadingTime() {
|
||||
// Assuming 200 words per minute reading speed
|
||||
return Math.max(1, getTotalWordCount() / 200);
|
||||
return Math.max(1, getTotalWordCount() / wordsPerMinute);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
||||
@@ -34,18 +34,21 @@ public class CollectionService {
|
||||
private final StoryRepository storyRepository;
|
||||
private final TagRepository tagRepository;
|
||||
private final TypesenseService typesenseService;
|
||||
private final ReadingTimeService readingTimeService;
|
||||
|
||||
@Autowired
|
||||
public CollectionService(CollectionRepository collectionRepository,
|
||||
CollectionStoryRepository collectionStoryRepository,
|
||||
StoryRepository storyRepository,
|
||||
TagRepository tagRepository,
|
||||
@Autowired(required = false) TypesenseService typesenseService) {
|
||||
@Autowired(required = false) TypesenseService typesenseService,
|
||||
ReadingTimeService readingTimeService) {
|
||||
this.collectionRepository = collectionRepository;
|
||||
this.collectionStoryRepository = collectionStoryRepository;
|
||||
this.storyRepository = storyRepository;
|
||||
this.tagRepository = tagRepository;
|
||||
this.typesenseService = typesenseService;
|
||||
this.readingTimeService = readingTimeService;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -78,6 +81,13 @@ public class CollectionService {
|
||||
.orElseThrow(() -> new ResourceNotFoundException("Collection not found with id: " + id));
|
||||
}
|
||||
|
||||
/**
|
||||
* Find all collections with tags for reindexing
|
||||
*/
|
||||
public List<Collection> findAllWithTags() {
|
||||
return collectionRepository.findAllWithTags();
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new collection with optional initial stories
|
||||
*/
|
||||
@@ -344,7 +354,7 @@ public class CollectionService {
|
||||
int totalWordCount = collectionStories.stream()
|
||||
.mapToInt(cs -> cs.getStory().getWordCount() != null ? cs.getStory().getWordCount() : 0)
|
||||
.sum();
|
||||
int estimatedReadingTime = Math.max(1, totalWordCount / 200); // 200 words per minute
|
||||
int estimatedReadingTime = readingTimeService.calculateReadingTime(totalWordCount);
|
||||
|
||||
double averageStoryRating = collectionStories.stream()
|
||||
.filter(cs -> cs.getStory().getRating() != null)
|
||||
|
||||
@@ -0,0 +1,28 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
@Service
|
||||
public class ReadingTimeService {
|
||||
|
||||
@Value("${app.reading.speed.default:200}")
|
||||
private int defaultWordsPerMinute;
|
||||
|
||||
/**
|
||||
* Calculate estimated reading time in minutes for the given word count
|
||||
* @param wordCount the number of words to read
|
||||
* @return estimated reading time in minutes (minimum 1 minute)
|
||||
*/
|
||||
public int calculateReadingTime(int wordCount) {
|
||||
return Math.max(1, wordCount / defaultWordsPerMinute);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current words per minute setting
|
||||
* @return words per minute reading speed
|
||||
*/
|
||||
public int getWordsPerMinute() {
|
||||
return defaultWordsPerMinute;
|
||||
}
|
||||
}
|
||||
@@ -271,6 +271,45 @@ public class StoryService {
|
||||
return savedStory;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public Story updateReadingProgress(UUID id, Integer position) {
|
||||
if (position != null && position < 0) {
|
||||
throw new IllegalArgumentException("Reading position must be non-negative");
|
||||
}
|
||||
|
||||
Story story = findById(id);
|
||||
story.updateReadingProgress(position);
|
||||
Story savedStory = storyRepository.save(story);
|
||||
|
||||
// Update Typesense index with new reading progress
|
||||
if (typesenseService != null) {
|
||||
typesenseService.updateStory(savedStory);
|
||||
}
|
||||
|
||||
return savedStory;
|
||||
}
|
||||
|
||||
@Transactional
|
||||
public Story updateReadingStatus(UUID id, Boolean isRead) {
|
||||
Story story = findById(id);
|
||||
|
||||
if (Boolean.TRUE.equals(isRead)) {
|
||||
story.markAsRead();
|
||||
} else {
|
||||
story.setIsRead(false);
|
||||
story.setLastReadAt(LocalDateTime.now());
|
||||
}
|
||||
|
||||
Story savedStory = storyRepository.save(story);
|
||||
|
||||
// Update Typesense index with new reading status
|
||||
if (typesenseService != null) {
|
||||
typesenseService.updateStory(savedStory);
|
||||
}
|
||||
|
||||
return savedStory;
|
||||
}
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public List<Story> findBySeriesOrderByVolume(UUID seriesId) {
|
||||
return storyRepository.findBySeriesOrderByVolume(seriesId);
|
||||
@@ -593,4 +632,12 @@ public class StoryService {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public List<Story> findPotentialDuplicates(String title, String authorName) {
|
||||
if (title == null || title.trim().isEmpty() || authorName == null || authorName.trim().isEmpty()) {
|
||||
return List.of();
|
||||
}
|
||||
return storyRepository.findByTitleAndAuthorNameIgnoreCase(title.trim(), authorName.trim());
|
||||
}
|
||||
}
|
||||
@@ -192,6 +192,11 @@ public class TagService {
|
||||
return tagRepository.countUsedTags();
|
||||
}
|
||||
|
||||
@Transactional(readOnly = true)
|
||||
public List<Tag> findTagsUsedByCollections() {
|
||||
return tagRepository.findTagsUsedByCollections();
|
||||
}
|
||||
|
||||
private void validateTagForCreate(Tag tag) {
|
||||
if (existsByName(tag.getName())) {
|
||||
throw new DuplicateResourceException("Tag", tag.getName());
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.dto.AuthorSearchDto;
|
||||
import com.storycove.dto.FacetCountDto;
|
||||
import com.storycove.dto.SearchResultDto;
|
||||
import com.storycove.dto.StorySearchDto;
|
||||
import com.storycove.entity.Author;
|
||||
@@ -32,12 +33,15 @@ public class TypesenseService {
|
||||
|
||||
private final Client typesenseClient;
|
||||
private final CollectionStoryRepository collectionStoryRepository;
|
||||
private final ReadingTimeService readingTimeService;
|
||||
|
||||
@Autowired
|
||||
public TypesenseService(Client typesenseClient,
|
||||
@Autowired(required = false) CollectionStoryRepository collectionStoryRepository) {
|
||||
@Autowired(required = false) CollectionStoryRepository collectionStoryRepository,
|
||||
ReadingTimeService readingTimeService) {
|
||||
this.typesenseClient = typesenseClient;
|
||||
this.collectionStoryRepository = collectionStoryRepository;
|
||||
this.readingTimeService = readingTimeService;
|
||||
}
|
||||
|
||||
@PostConstruct
|
||||
@@ -65,19 +69,19 @@ public class TypesenseService {
|
||||
private void createStoriesCollection() throws Exception {
|
||||
List<Field> fields = Arrays.asList(
|
||||
new Field().name("id").type("string").facet(false),
|
||||
new Field().name("title").type("string").facet(false),
|
||||
new Field().name("title").type("string").facet(false).sort(true),
|
||||
new Field().name("summary").type("string").facet(false).optional(true),
|
||||
new Field().name("description").type("string").facet(false),
|
||||
new Field().name("contentPlain").type("string").facet(false),
|
||||
new Field().name("authorId").type("string").facet(true),
|
||||
new Field().name("authorName").type("string").facet(true),
|
||||
new Field().name("authorName").type("string").facet(true).sort(true),
|
||||
new Field().name("seriesId").type("string").facet(true).optional(true),
|
||||
new Field().name("seriesName").type("string").facet(true).optional(true),
|
||||
new Field().name("seriesName").type("string").facet(true).sort(true).optional(true),
|
||||
new Field().name("tagNames").type("string[]").facet(true).optional(true),
|
||||
new Field().name("rating").type("int32").facet(true).optional(true),
|
||||
new Field().name("wordCount").type("int32").facet(true).optional(true),
|
||||
new Field().name("volume").type("int32").facet(true).optional(true),
|
||||
new Field().name("createdAt").type("int64").facet(false),
|
||||
new Field().name("rating").type("int32").facet(true).sort(true).optional(true),
|
||||
new Field().name("wordCount").type("int32").facet(true).sort(true).optional(true),
|
||||
new Field().name("volume").type("int32").facet(true).sort(true).optional(true),
|
||||
new Field().name("createdAt").type("int64").facet(false).sort(true),
|
||||
new Field().name("sourceUrl").type("string").facet(false).optional(true),
|
||||
new Field().name("coverPath").type("string").facet(false).optional(true)
|
||||
);
|
||||
@@ -101,6 +105,26 @@ public class TypesenseService {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Force recreate the stories collection, deleting it first if it exists
|
||||
*/
|
||||
public void recreateStoriesCollection() throws Exception {
|
||||
try {
|
||||
logger.info("Force deleting stories collection for recreation...");
|
||||
typesenseClient.collections(STORIES_COLLECTION).delete();
|
||||
logger.info("Successfully deleted stories collection");
|
||||
} catch (Exception e) {
|
||||
logger.debug("Stories collection didn't exist for deletion: {}", e.getMessage());
|
||||
}
|
||||
|
||||
// Wait a brief moment to ensure deletion is complete
|
||||
Thread.sleep(100);
|
||||
|
||||
logger.info("Creating stories collection with fresh schema...");
|
||||
createStoriesCollection();
|
||||
logger.info("Successfully created stories collection");
|
||||
}
|
||||
|
||||
/**
|
||||
* Force recreate the authors collection, deleting it first if it exists
|
||||
*/
|
||||
@@ -186,8 +210,6 @@ public class TypesenseService {
|
||||
try {
|
||||
long startTime = System.currentTimeMillis();
|
||||
|
||||
logger.info("SEARCH DEBUG: searchStories called with query='{}', tagFilters={}, authorFilters={}",
|
||||
query, tagFilters, authorFilters);
|
||||
|
||||
// Convert 0-based page (frontend/backend) to 1-based page (Typesense)
|
||||
int typesensePage = page + 1;
|
||||
@@ -206,6 +228,8 @@ public class TypesenseService {
|
||||
.highlightFields("title,description")
|
||||
.highlightStartTag("<mark>")
|
||||
.highlightEndTag("</mark>")
|
||||
.facetBy("tagNames,authorName,rating")
|
||||
.maxFacetValues(100)
|
||||
.sortBy(buildSortParameter(normalizedQuery, sortBy, sortDir));
|
||||
|
||||
// Add filters
|
||||
@@ -219,17 +243,12 @@ public class TypesenseService {
|
||||
}
|
||||
|
||||
if (tagFilters != null && !tagFilters.isEmpty()) {
|
||||
logger.info("SEARCH DEBUG: Processing {} tag filters: {}", tagFilters.size(), tagFilters);
|
||||
String tagFilter = tagFilters.stream()
|
||||
.map(tag -> {
|
||||
// Use AND logic for multiple tags - items must have ALL selected tags
|
||||
for (String tag : tagFilters) {
|
||||
String escaped = escapeTypesenseValue(tag);
|
||||
String condition = "tagNames:=" + escaped;
|
||||
logger.info("SEARCH DEBUG: Tag '{}' -> escaped '{}' -> condition '{}'", tag, escaped, condition);
|
||||
return condition;
|
||||
})
|
||||
.collect(Collectors.joining(" || "));
|
||||
logger.info("SEARCH DEBUG: Final tag filter condition: '{}'", tagFilter);
|
||||
filterConditions.add("(" + tagFilter + ")");
|
||||
filterConditions.add(condition);
|
||||
}
|
||||
}
|
||||
|
||||
if (minRating != null) {
|
||||
@@ -242,19 +261,17 @@ public class TypesenseService {
|
||||
|
||||
if (!filterConditions.isEmpty()) {
|
||||
String finalFilter = String.join(" && ", filterConditions);
|
||||
logger.info("SEARCH DEBUG: Final filter condition: '{}'", finalFilter);
|
||||
searchParameters.filterBy(finalFilter);
|
||||
} else {
|
||||
logger.info("SEARCH DEBUG: No filter conditions applied");
|
||||
}
|
||||
|
||||
SearchResult searchResult = typesenseClient.collections(STORIES_COLLECTION)
|
||||
.documents()
|
||||
.search(searchParameters);
|
||||
|
||||
logger.info("SEARCH DEBUG: Typesense returned {} results", searchResult.getFound());
|
||||
|
||||
List<StorySearchDto> results = convertSearchResult(searchResult);
|
||||
Map<String, List<FacetCountDto>> facets = processFacetCounts(searchResult);
|
||||
long searchTime = System.currentTimeMillis() - startTime;
|
||||
|
||||
return new SearchResultDto<>(
|
||||
@@ -263,7 +280,8 @@ public class TypesenseService {
|
||||
page,
|
||||
perPage,
|
||||
query,
|
||||
searchTime
|
||||
searchTime,
|
||||
facets
|
||||
);
|
||||
|
||||
} catch (Exception e) {
|
||||
@@ -294,15 +312,8 @@ public class TypesenseService {
|
||||
|
||||
public void reindexAllStories(List<Story> stories) {
|
||||
try {
|
||||
// Clear existing collection
|
||||
try {
|
||||
typesenseClient.collections(STORIES_COLLECTION).delete();
|
||||
} catch (Exception e) {
|
||||
logger.debug("Collection didn't exist for deletion: {}", e.getMessage());
|
||||
}
|
||||
|
||||
// Recreate collection
|
||||
createStoriesCollection();
|
||||
// Force recreate collection with proper schema
|
||||
recreateStoriesCollection();
|
||||
|
||||
// Bulk index all stories
|
||||
bulkIndexStories(stories);
|
||||
@@ -363,10 +374,8 @@ public class TypesenseService {
|
||||
List<String> tagNames = story.getTags().stream()
|
||||
.map(tag -> tag.getName())
|
||||
.collect(Collectors.toList());
|
||||
logger.debug("INDEXING DEBUG: Story '{}' has tags: {}", story.getTitle(), tagNames);
|
||||
document.put("tagNames", tagNames);
|
||||
} else {
|
||||
logger.debug("INDEXING DEBUG: Story '{}' has no tags", story.getTitle());
|
||||
}
|
||||
|
||||
document.put("rating", story.getRating() != null ? story.getRating() : 0);
|
||||
@@ -387,6 +396,45 @@ public class TypesenseService {
|
||||
return document;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private Map<String, List<FacetCountDto>> processFacetCounts(SearchResult searchResult) {
|
||||
Map<String, List<FacetCountDto>> facetMap = new HashMap<>();
|
||||
|
||||
if (searchResult.getFacetCounts() != null) {
|
||||
for (FacetCounts facetCounts : searchResult.getFacetCounts()) {
|
||||
String fieldName = facetCounts.getFieldName();
|
||||
List<FacetCountDto> facetValues = new ArrayList<>();
|
||||
|
||||
if (facetCounts.getCounts() != null) {
|
||||
for (Object countObj : facetCounts.getCounts()) {
|
||||
if (countObj instanceof Map) {
|
||||
Map<String, Object> countMap = (Map<String, Object>) countObj;
|
||||
String value = (String) countMap.get("value");
|
||||
Integer count = (Integer) countMap.get("count");
|
||||
|
||||
if (value != null && count != null && count > 0) {
|
||||
facetValues.add(new FacetCountDto(value, count));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!facetValues.isEmpty()) {
|
||||
// Sort by count descending, then by value ascending
|
||||
facetValues.sort((a, b) -> {
|
||||
int countCompare = Integer.compare(b.getCount(), a.getCount());
|
||||
if (countCompare != 0) return countCompare;
|
||||
return a.getValue().compareToIgnoreCase(b.getValue());
|
||||
});
|
||||
|
||||
facetMap.put(fieldName, facetValues);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return facetMap;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private List<StorySearchDto> convertSearchResult(SearchResult searchResult) {
|
||||
return searchResult.getHits().stream()
|
||||
@@ -732,8 +780,6 @@ public class TypesenseService {
|
||||
|
||||
public SearchResultDto<AuthorSearchDto> searchAuthors(String query, int page, int perPage, String sortBy, String sortOrder) {
|
||||
try {
|
||||
logger.info("AUTHORS SEARCH DEBUG: Searching collection '{}' with query='{}', sortBy='{}', sortOrder='{}'",
|
||||
AUTHORS_COLLECTION, query, sortBy, sortOrder);
|
||||
SearchParameters searchParameters = new SearchParameters()
|
||||
.q(query != null && !query.trim().isEmpty() ? query : "*")
|
||||
.queryBy("name,notes")
|
||||
@@ -745,8 +791,6 @@ public class TypesenseService {
|
||||
String sortDirection = "desc".equalsIgnoreCase(sortOrder) ? "desc" : "asc";
|
||||
String sortField = mapAuthorSortField(sortBy);
|
||||
String sortString = sortField + ":" + sortDirection;
|
||||
logger.info("AUTHORS SEARCH DEBUG: Original sortBy='{}', mapped to='{}', full sort string='{}'",
|
||||
sortBy, sortField, sortString);
|
||||
searchParameters.sortBy(sortString);
|
||||
}
|
||||
|
||||
@@ -757,17 +801,12 @@ public class TypesenseService {
|
||||
.search(searchParameters);
|
||||
} catch (Exception sortException) {
|
||||
// If sorting fails (likely due to schema issues), retry without sorting
|
||||
logger.error("SORTING ERROR DEBUG: Full exception details", sortException);
|
||||
logger.warn("Sorting failed for authors search, retrying without sort: " + sortException.getMessage());
|
||||
|
||||
// Try to get collection info for debugging
|
||||
try {
|
||||
CollectionResponse collection = typesenseClient.collections(AUTHORS_COLLECTION).retrieve();
|
||||
logger.error("COLLECTION DEBUG: Collection '{}' exists with {} documents and {} fields",
|
||||
collection.getName(), collection.getNumDocuments(), collection.getFields().size());
|
||||
logger.error("COLLECTION DEBUG: Fields: {}", collection.getFields());
|
||||
} catch (Exception debugException) {
|
||||
logger.error("COLLECTION DEBUG: Failed to retrieve collection info", debugException);
|
||||
}
|
||||
|
||||
searchParameters = new SearchParameters()
|
||||
@@ -1007,10 +1046,11 @@ public class TypesenseService {
|
||||
}
|
||||
|
||||
if (tags != null && !tags.isEmpty()) {
|
||||
String tagFilter = tags.stream()
|
||||
.map(tag -> "tags:=" + escapeTypesenseValue(tag))
|
||||
.collect(Collectors.joining(" || "));
|
||||
filterConditions.add("(" + tagFilter + ")");
|
||||
// Use AND logic for multiple tags - collections must have ALL selected tags
|
||||
for (String tag : tags) {
|
||||
String condition = "tags:=" + escapeTypesenseValue(tag);
|
||||
filterConditions.add(condition);
|
||||
}
|
||||
}
|
||||
|
||||
if (!filterConditions.isEmpty()) {
|
||||
@@ -1197,6 +1237,15 @@ public class TypesenseService {
|
||||
collection.setCoverImagePath((String) doc.get("cover_image_path"));
|
||||
collection.setIsArchived((Boolean) doc.get("is_archived"));
|
||||
|
||||
// Set tags from Typesense document
|
||||
if (doc.get("tags") != null) {
|
||||
@SuppressWarnings("unchecked")
|
||||
List<String> tagNames = (List<String>) doc.get("tags");
|
||||
// For search results, we'll store tag names in a special field for frontend
|
||||
// since we don't want to load full Tag entities for performance
|
||||
collection.setTagNames(tagNames);
|
||||
}
|
||||
|
||||
// Set timestamps
|
||||
if (doc.get("created_at") != null) {
|
||||
long createdAtSeconds = ((Number) doc.get("created_at")).longValue();
|
||||
@@ -1210,6 +1259,7 @@ public class TypesenseService {
|
||||
// For list/search views, we create a special lightweight collection that stores
|
||||
// the calculated values directly to avoid lazy loading issues
|
||||
CollectionSearchResult searchCollection = new CollectionSearchResult(collection);
|
||||
searchCollection.setWordsPerMinute(readingTimeService.getWordsPerMinute());
|
||||
|
||||
// Set the calculated statistics from the Typesense document
|
||||
if (doc.get("story_count") != null) {
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.entity.Author;
|
||||
import com.storycove.entity.Story;
|
||||
import com.storycove.repository.AuthorRepository;
|
||||
import com.storycove.service.exception.DuplicateResourceException;
|
||||
import com.storycove.service.exception.ResourceNotFoundException;
|
||||
@@ -24,6 +25,7 @@ import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.ArgumentMatchers.anyString;
|
||||
import static org.mockito.Mockito.*;
|
||||
import static org.mockito.Mockito.times;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
@DisplayName("Author Service Unit Tests")
|
||||
@@ -32,7 +34,6 @@ class AuthorServiceTest {
|
||||
@Mock
|
||||
private AuthorRepository authorRepository;
|
||||
|
||||
@InjectMocks
|
||||
private AuthorService authorService;
|
||||
|
||||
private Author testAuthor;
|
||||
@@ -44,6 +45,9 @@ class AuthorServiceTest {
|
||||
testAuthor = new Author("Test Author");
|
||||
testAuthor.setId(testId);
|
||||
testAuthor.setNotes("Test notes");
|
||||
|
||||
// Initialize service with null TypesenseService (which is allowed)
|
||||
authorService = new AuthorService(authorRepository, null);
|
||||
}
|
||||
|
||||
@Test
|
||||
@@ -307,4 +311,133 @@ class AuthorServiceTest {
|
||||
assertEquals(5L, count);
|
||||
verify(authorRepository).countRecentAuthors(any(java.time.LocalDateTime.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should set author rating with validation")
|
||||
void shouldSetAuthorRating() {
|
||||
when(authorRepository.findById(testId)).thenReturn(Optional.of(testAuthor));
|
||||
when(authorRepository.save(any(Author.class))).thenReturn(testAuthor);
|
||||
|
||||
Author result = authorService.setRating(testId, 4);
|
||||
|
||||
assertEquals(4, testAuthor.getAuthorRating());
|
||||
verify(authorRepository, times(2)).findById(testId); // Called twice: once initially, once after flush
|
||||
verify(authorRepository).save(testAuthor);
|
||||
verify(authorRepository).flush();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should throw exception for invalid rating range")
|
||||
void shouldThrowExceptionForInvalidRating() {
|
||||
assertThrows(IllegalArgumentException.class, () -> authorService.setRating(testId, 0));
|
||||
assertThrows(IllegalArgumentException.class, () -> authorService.setRating(testId, 6));
|
||||
|
||||
verify(authorRepository, never()).findById(any());
|
||||
verify(authorRepository, never()).save(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle null rating")
|
||||
void shouldHandleNullRating() {
|
||||
when(authorRepository.findById(testId)).thenReturn(Optional.of(testAuthor));
|
||||
when(authorRepository.save(any(Author.class))).thenReturn(testAuthor);
|
||||
|
||||
Author result = authorService.setRating(testId, null);
|
||||
|
||||
assertNull(testAuthor.getAuthorRating());
|
||||
verify(authorRepository, times(2)).findById(testId); // Called twice: once initially, once after flush
|
||||
verify(authorRepository).save(testAuthor);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should find all authors with stories")
|
||||
void shouldFindAllAuthorsWithStories() {
|
||||
List<Author> authors = List.of(testAuthor);
|
||||
when(authorRepository.findAll()).thenReturn(authors);
|
||||
|
||||
List<Author> result = authorService.findAllWithStories();
|
||||
|
||||
assertEquals(1, result.size());
|
||||
verify(authorRepository).findAll();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should get author rating from database")
|
||||
void shouldGetAuthorRatingFromDb() {
|
||||
when(authorRepository.findAuthorRatingById(testId)).thenReturn(4);
|
||||
|
||||
Integer rating = authorService.getAuthorRatingFromDb(testId);
|
||||
|
||||
assertEquals(4, rating);
|
||||
verify(authorRepository).findAuthorRatingById(testId);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should calculate average story rating")
|
||||
void shouldCalculateAverageStoryRating() {
|
||||
// Setup test author with stories
|
||||
Story story1 = new Story("Story 1");
|
||||
story1.setRating(4);
|
||||
Story story2 = new Story("Story 2");
|
||||
story2.setRating(5);
|
||||
|
||||
testAuthor.getStories().add(story1);
|
||||
testAuthor.getStories().add(story2);
|
||||
|
||||
when(authorRepository.findById(testId)).thenReturn(Optional.of(testAuthor));
|
||||
|
||||
Double avgRating = authorService.calculateAverageStoryRating(testId);
|
||||
|
||||
assertEquals(4.5, avgRating);
|
||||
verify(authorRepository).findById(testId);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should find authors with stories using repository method")
|
||||
void shouldFindAuthorsWithStoriesFromRepository() {
|
||||
List<Author> authors = List.of(testAuthor);
|
||||
when(authorRepository.findAuthorsWithStories()).thenReturn(authors);
|
||||
|
||||
List<Author> result = authorService.findAuthorsWithStories();
|
||||
|
||||
assertEquals(1, result.size());
|
||||
verify(authorRepository).findAuthorsWithStories();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should find top rated authors")
|
||||
void shouldFindTopRatedAuthors() {
|
||||
List<Author> authors = List.of(testAuthor);
|
||||
when(authorRepository.findTopRatedAuthors()).thenReturn(authors);
|
||||
|
||||
List<Author> result = authorService.findTopRatedAuthors();
|
||||
|
||||
assertEquals(1, result.size());
|
||||
verify(authorRepository).findTopRatedAuthors();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should find most prolific authors")
|
||||
void shouldFindMostProlificAuthors() {
|
||||
List<Author> authors = List.of(testAuthor);
|
||||
when(authorRepository.findMostProlificAuthors()).thenReturn(authors);
|
||||
|
||||
List<Author> result = authorService.findMostProlificAuthors();
|
||||
|
||||
assertEquals(1, result.size());
|
||||
verify(authorRepository).findMostProlificAuthors();
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should find authors by URL domain")
|
||||
void shouldFindAuthorsByUrlDomain() {
|
||||
List<Author> authors = List.of(testAuthor);
|
||||
when(authorRepository.findByUrlDomain("example.com")).thenReturn(authors);
|
||||
|
||||
List<Author> result = authorService.findByUrlDomain("example.com");
|
||||
|
||||
assertEquals(1, result.size());
|
||||
verify(authorRepository).findByUrlDomain("example.com");
|
||||
}
|
||||
|
||||
}
|
||||
@@ -0,0 +1,216 @@
|
||||
package com.storycove.service;
|
||||
|
||||
import com.storycove.entity.Story;
|
||||
import com.storycove.repository.StoryRepository;
|
||||
import com.storycove.repository.TagRepository;
|
||||
import com.storycove.service.exception.ResourceNotFoundException;
|
||||
import org.junit.jupiter.api.BeforeEach;
|
||||
import org.junit.jupiter.api.DisplayName;
|
||||
import org.junit.jupiter.api.Test;
|
||||
import org.junit.jupiter.api.extension.ExtendWith;
|
||||
import org.mockito.Mock;
|
||||
import org.mockito.junit.jupiter.MockitoExtension;
|
||||
|
||||
import java.time.LocalDateTime;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
import static org.junit.jupiter.api.Assertions.*;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
@ExtendWith(MockitoExtension.class)
|
||||
@DisplayName("Story Service Unit Tests - Reading Progress")
|
||||
class StoryServiceTest {
|
||||
|
||||
@Mock
|
||||
private StoryRepository storyRepository;
|
||||
|
||||
@Mock
|
||||
private TagRepository tagRepository;
|
||||
|
||||
private StoryService storyService;
|
||||
private Story testStory;
|
||||
private UUID testId;
|
||||
|
||||
@BeforeEach
|
||||
void setUp() {
|
||||
testId = UUID.randomUUID();
|
||||
testStory = new Story("Test Story");
|
||||
testStory.setId(testId);
|
||||
testStory.setContentHtml("<p>Test content for reading progress tracking</p>");
|
||||
|
||||
// Create StoryService with only required repositories, all services can be null for these tests
|
||||
storyService = new StoryService(
|
||||
storyRepository,
|
||||
tagRepository,
|
||||
null, // authorService - not needed for reading progress tests
|
||||
null, // tagService - not needed for reading progress tests
|
||||
null, // seriesService - not needed for reading progress tests
|
||||
null, // sanitizationService - not needed for reading progress tests
|
||||
null // typesenseService - will test both with and without
|
||||
);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should update reading progress successfully")
|
||||
void shouldUpdateReadingProgress() {
|
||||
Integer position = 150;
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
|
||||
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
|
||||
|
||||
Story result = storyService.updateReadingProgress(testId, position);
|
||||
|
||||
assertEquals(position, result.getReadingPosition());
|
||||
assertNotNull(result.getLastReadAt());
|
||||
verify(storyRepository).findById(testId);
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should update reading progress with zero position")
|
||||
void shouldUpdateReadingProgressWithZeroPosition() {
|
||||
Integer position = 0;
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
|
||||
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
|
||||
|
||||
Story result = storyService.updateReadingProgress(testId, position);
|
||||
|
||||
assertEquals(0, result.getReadingPosition());
|
||||
assertNotNull(result.getLastReadAt());
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should throw exception for negative reading position")
|
||||
void shouldThrowExceptionForNegativeReadingPosition() {
|
||||
Integer position = -1;
|
||||
|
||||
assertThrows(IllegalArgumentException.class,
|
||||
() -> storyService.updateReadingProgress(testId, position));
|
||||
|
||||
verify(storyRepository, never()).findById(any());
|
||||
verify(storyRepository, never()).save(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle null reading position")
|
||||
void shouldHandleNullReadingPosition() {
|
||||
Integer position = null;
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
|
||||
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
|
||||
|
||||
Story result = storyService.updateReadingProgress(testId, position);
|
||||
|
||||
assertNull(result.getReadingPosition());
|
||||
assertNotNull(result.getLastReadAt());
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should throw exception when story not found for reading progress update")
|
||||
void shouldThrowExceptionWhenStoryNotFoundForReadingProgress() {
|
||||
Integer position = 100;
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.empty());
|
||||
|
||||
assertThrows(ResourceNotFoundException.class,
|
||||
() -> storyService.updateReadingProgress(testId, position));
|
||||
|
||||
verify(storyRepository).findById(testId);
|
||||
verify(storyRepository, never()).save(any());
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should mark story as read")
|
||||
void shouldMarkStoryAsRead() {
|
||||
Boolean isRead = true;
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
|
||||
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
|
||||
|
||||
Story result = storyService.updateReadingStatus(testId, isRead);
|
||||
|
||||
assertTrue(result.getIsRead());
|
||||
assertNotNull(result.getLastReadAt());
|
||||
// When marked as read, position should be set to content length
|
||||
assertTrue(result.getReadingPosition() > 0);
|
||||
verify(storyRepository).findById(testId);
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should mark story as unread")
|
||||
void shouldMarkStoryAsUnread() {
|
||||
Boolean isRead = false;
|
||||
// First mark story as read to test transition
|
||||
testStory.markAsRead();
|
||||
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
|
||||
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
|
||||
|
||||
Story result = storyService.updateReadingStatus(testId, isRead);
|
||||
|
||||
assertFalse(result.getIsRead());
|
||||
assertNotNull(result.getLastReadAt());
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should handle null reading status")
|
||||
void shouldHandleNullReadingStatus() {
|
||||
Boolean isRead = null;
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
|
||||
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
|
||||
|
||||
Story result = storyService.updateReadingStatus(testId, isRead);
|
||||
|
||||
assertFalse(result.getIsRead());
|
||||
assertNotNull(result.getLastReadAt());
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should throw exception when story not found for reading status update")
|
||||
void shouldThrowExceptionWhenStoryNotFoundForReadingStatus() {
|
||||
Boolean isRead = true;
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.empty());
|
||||
|
||||
assertThrows(ResourceNotFoundException.class,
|
||||
() -> storyService.updateReadingStatus(testId, isRead));
|
||||
|
||||
verify(storyRepository).findById(testId);
|
||||
verify(storyRepository, never()).save(any());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
@DisplayName("Should update lastReadAt timestamp when updating progress")
|
||||
void shouldUpdateLastReadAtWhenUpdatingProgress() {
|
||||
Integer position = 50;
|
||||
LocalDateTime beforeUpdate = LocalDateTime.now().minusMinutes(1);
|
||||
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
|
||||
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
|
||||
|
||||
Story result = storyService.updateReadingProgress(testId, position);
|
||||
|
||||
assertNotNull(result.getLastReadAt());
|
||||
assertTrue(result.getLastReadAt().isAfter(beforeUpdate));
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
|
||||
@Test
|
||||
@DisplayName("Should update lastReadAt timestamp when updating status")
|
||||
void shouldUpdateLastReadAtWhenUpdatingStatus() {
|
||||
Boolean isRead = true;
|
||||
LocalDateTime beforeUpdate = LocalDateTime.now().minusMinutes(1);
|
||||
|
||||
when(storyRepository.findById(testId)).thenReturn(Optional.of(testStory));
|
||||
when(storyRepository.save(any(Story.class))).thenReturn(testStory);
|
||||
|
||||
Story result = storyService.updateReadingStatus(testId, isRead);
|
||||
|
||||
assertNotNull(result.getLastReadAt());
|
||||
assertTrue(result.getLastReadAt().isAfter(beforeUpdate));
|
||||
verify(storyRepository).save(testStory);
|
||||
}
|
||||
}
|
||||
@@ -1,13 +1,40 @@
|
||||
# Use node 18 alpine for smaller image size
|
||||
FROM node:18-alpine
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
COPY package*.json ./
|
||||
RUN npm ci --omit=dev
|
||||
# Install dumb-init for proper signal handling
|
||||
RUN apk add --no-cache dumb-init
|
||||
|
||||
# Copy package files
|
||||
COPY package*.json ./
|
||||
|
||||
# Install all dependencies (including devDependencies needed for build)
|
||||
# Set npm config for better CI performance
|
||||
RUN npm ci --prefer-offline --no-audit
|
||||
|
||||
# Copy source code
|
||||
COPY . .
|
||||
|
||||
# Set Node.js memory limit for build (helpful in constrained environments)
|
||||
ENV NODE_OPTIONS="--max-old-space-size=1024"
|
||||
|
||||
# Build the application
|
||||
RUN npm run build
|
||||
|
||||
# Remove devDependencies after build to reduce image size
|
||||
RUN npm prune --omit=dev
|
||||
|
||||
# Create non-root user for security
|
||||
RUN addgroup -g 1001 -S nodejs
|
||||
RUN adduser -S nextjs -u 1001
|
||||
|
||||
# Change ownership of the app directory
|
||||
RUN chown -R nextjs:nodejs /app
|
||||
USER nextjs
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
# Use dumb-init to handle signals properly
|
||||
ENTRYPOINT ["dumb-init", "--"]
|
||||
CMD ["npm", "start"]
|
||||
42
frontend/Dockerfile.alternative
Normal file
42
frontend/Dockerfile.alternative
Normal file
@@ -0,0 +1,42 @@
|
||||
# Multi-stage build for better caching and smaller final image
|
||||
FROM node:18-alpine AS dependencies
|
||||
|
||||
WORKDIR /app
|
||||
COPY package*.json ./
|
||||
RUN npm ci
|
||||
|
||||
FROM node:18-alpine AS builder
|
||||
|
||||
WORKDIR /app
|
||||
COPY --from=dependencies /app/node_modules ./node_modules
|
||||
COPY . .
|
||||
|
||||
# Increase memory limit for build
|
||||
ENV NODE_OPTIONS="--max-old-space-size=2048"
|
||||
|
||||
RUN npm run build
|
||||
|
||||
FROM node:18-alpine AS runner
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install dumb-init
|
||||
RUN apk add --no-cache dumb-init
|
||||
|
||||
# Create non-root user
|
||||
RUN addgroup -g 1001 -S nodejs
|
||||
RUN adduser -S nextjs -u 1001
|
||||
|
||||
# Copy necessary files
|
||||
COPY --from=builder /app/public ./public
|
||||
COPY --from=builder /app/.next/standalone ./
|
||||
COPY --from=builder /app/.next/static ./.next/static
|
||||
|
||||
# Set correct permissions
|
||||
RUN chown -R nextjs:nodejs /app
|
||||
USER nextjs
|
||||
|
||||
EXPOSE 3000
|
||||
|
||||
ENTRYPOINT ["dumb-init", "--"]
|
||||
CMD ["node", "server.js"]
|
||||
@@ -1,12 +1,19 @@
|
||||
/** @type {import('next').NextConfig} */
|
||||
const nextConfig = {
|
||||
async rewrites() {
|
||||
return [
|
||||
{
|
||||
source: '/api/:path*',
|
||||
destination: 'http://backend:8080/api/:path*',
|
||||
},
|
||||
];
|
||||
// Removed Next.js rewrites since nginx handles all API routing
|
||||
webpack: (config, { isServer }) => {
|
||||
// Exclude cheerio and its dependencies from client-side bundling
|
||||
if (!isServer) {
|
||||
config.resolve.fallback = {
|
||||
...config.resolve.fallback,
|
||||
fs: false,
|
||||
net: false,
|
||||
tls: false,
|
||||
'undici': false,
|
||||
};
|
||||
config.externals.push('cheerio', 'server-only');
|
||||
}
|
||||
return config;
|
||||
},
|
||||
images: {
|
||||
domains: ['localhost'],
|
||||
|
||||
225
frontend/package-lock.json
generated
225
frontend/package-lock.json
generated
@@ -8,14 +8,17 @@
|
||||
"name": "storycove-frontend",
|
||||
"version": "0.1.0",
|
||||
"dependencies": {
|
||||
"@heroicons/react": "^2.2.0",
|
||||
"autoprefixer": "^10.4.16",
|
||||
"axios": "^1.6.0",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"dompurify": "^3.0.5",
|
||||
"next": "14.0.0",
|
||||
"postcss": "^8.4.31",
|
||||
"react": "^18",
|
||||
"react-dom": "^18",
|
||||
"react-dropzone": "^14.2.3",
|
||||
"server-only": "^0.0.1",
|
||||
"tailwindcss": "^3.3.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
@@ -137,6 +140,15 @@
|
||||
"node": "^12.22.0 || ^14.17.0 || >=16.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@heroicons/react": {
|
||||
"version": "2.2.0",
|
||||
"resolved": "https://registry.npmjs.org/@heroicons/react/-/react-2.2.0.tgz",
|
||||
"integrity": "sha512-LMcepvRaS9LYHJGsF0zzmgKCUim/X3N/DQKc4jepAXJ7l8QxJ1PmxJzqplF2Z3FE4PqBAIGyJAQ/w4B5dsqbtQ==",
|
||||
"license": "MIT",
|
||||
"peerDependencies": {
|
||||
"react": ">= 16 || ^19.0.0-rc"
|
||||
}
|
||||
},
|
||||
"node_modules/@humanwhocodes/config-array": {
|
||||
"version": "0.13.0",
|
||||
"resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz",
|
||||
@@ -1398,6 +1410,12 @@
|
||||
"url": "https://github.com/sponsors/sindresorhus"
|
||||
}
|
||||
},
|
||||
"node_modules/boolbase": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz",
|
||||
"integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/brace-expansion": {
|
||||
"version": "1.1.12",
|
||||
"resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz",
|
||||
@@ -1569,6 +1587,44 @@
|
||||
"url": "https://github.com/chalk/chalk?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/cheerio": {
|
||||
"version": "1.0.0-rc.12",
|
||||
"resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.0.0-rc.12.tgz",
|
||||
"integrity": "sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"cheerio-select": "^2.1.0",
|
||||
"dom-serializer": "^2.0.0",
|
||||
"domhandler": "^5.0.3",
|
||||
"domutils": "^3.0.1",
|
||||
"htmlparser2": "^8.0.1",
|
||||
"parse5": "^7.0.0",
|
||||
"parse5-htmlparser2-tree-adapter": "^7.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/cheeriojs/cheerio?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/cheerio-select": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/cheerio-select/-/cheerio-select-2.1.0.tgz",
|
||||
"integrity": "sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==",
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"boolbase": "^1.0.0",
|
||||
"css-select": "^5.1.0",
|
||||
"css-what": "^6.1.0",
|
||||
"domelementtype": "^2.3.0",
|
||||
"domhandler": "^5.0.3",
|
||||
"domutils": "^3.0.1"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/fb55"
|
||||
}
|
||||
},
|
||||
"node_modules/chokidar": {
|
||||
"version": "3.6.0",
|
||||
"resolved": "https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz",
|
||||
@@ -1671,6 +1727,34 @@
|
||||
"node": ">= 8"
|
||||
}
|
||||
},
|
||||
"node_modules/css-select": {
|
||||
"version": "5.2.2",
|
||||
"resolved": "https://registry.npmjs.org/css-select/-/css-select-5.2.2.tgz",
|
||||
"integrity": "sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==",
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"boolbase": "^1.0.0",
|
||||
"css-what": "^6.1.0",
|
||||
"domhandler": "^5.0.2",
|
||||
"domutils": "^3.0.1",
|
||||
"nth-check": "^2.0.1"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/fb55"
|
||||
}
|
||||
},
|
||||
"node_modules/css-what": {
|
||||
"version": "6.2.2",
|
||||
"resolved": "https://registry.npmjs.org/css-what/-/css-what-6.2.2.tgz",
|
||||
"integrity": "sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==",
|
||||
"license": "BSD-2-Clause",
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/fb55"
|
||||
}
|
||||
},
|
||||
"node_modules/cssesc": {
|
||||
"version": "3.0.0",
|
||||
"resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz",
|
||||
@@ -1859,6 +1943,47 @@
|
||||
"node": ">=6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/dom-serializer": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz",
|
||||
"integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"domelementtype": "^2.3.0",
|
||||
"domhandler": "^5.0.2",
|
||||
"entities": "^4.2.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/cheeriojs/dom-serializer?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/domelementtype": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz",
|
||||
"integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fb55"
|
||||
}
|
||||
],
|
||||
"license": "BSD-2-Clause"
|
||||
},
|
||||
"node_modules/domhandler": {
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz",
|
||||
"integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==",
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"domelementtype": "^2.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/fb55/domhandler?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/dompurify": {
|
||||
"version": "3.2.6",
|
||||
"resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.2.6.tgz",
|
||||
@@ -1868,6 +1993,20 @@
|
||||
"@types/trusted-types": "^2.0.7"
|
||||
}
|
||||
},
|
||||
"node_modules/domutils": {
|
||||
"version": "3.2.2",
|
||||
"resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz",
|
||||
"integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==",
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"dom-serializer": "^2.0.0",
|
||||
"domelementtype": "^2.3.0",
|
||||
"domhandler": "^5.0.3"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/fb55/domutils?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/dunder-proto": {
|
||||
"version": "1.0.1",
|
||||
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
|
||||
@@ -1900,6 +2039,18 @@
|
||||
"integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/entities": {
|
||||
"version": "4.5.0",
|
||||
"resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
|
||||
"integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
|
||||
"license": "BSD-2-Clause",
|
||||
"engines": {
|
||||
"node": ">=0.12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/fb55/entities?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/es-abstract": {
|
||||
"version": "1.24.0",
|
||||
"resolved": "https://registry.npmjs.org/es-abstract/-/es-abstract-1.24.0.tgz",
|
||||
@@ -3096,6 +3247,25 @@
|
||||
"node": ">= 0.4"
|
||||
}
|
||||
},
|
||||
"node_modules/htmlparser2": {
|
||||
"version": "8.0.2",
|
||||
"resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-8.0.2.tgz",
|
||||
"integrity": "sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==",
|
||||
"funding": [
|
||||
"https://github.com/fb55/htmlparser2?sponsor=1",
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fb55"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"domelementtype": "^2.3.0",
|
||||
"domhandler": "^5.0.3",
|
||||
"domutils": "^3.0.1",
|
||||
"entities": "^4.4.0"
|
||||
}
|
||||
},
|
||||
"node_modules/ignore": {
|
||||
"version": "5.3.2",
|
||||
"resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz",
|
||||
@@ -4063,6 +4233,18 @@
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/nth-check": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz",
|
||||
"integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==",
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"boolbase": "^1.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/fb55/nth-check?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/object-assign": {
|
||||
"version": "4.1.1",
|
||||
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
|
||||
@@ -4291,6 +4473,43 @@
|
||||
"node": ">=6"
|
||||
}
|
||||
},
|
||||
"node_modules/parse5": {
|
||||
"version": "7.3.0",
|
||||
"resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz",
|
||||
"integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"entities": "^6.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/inikulin/parse5?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/parse5-htmlparser2-tree-adapter": {
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.1.0.tgz",
|
||||
"integrity": "sha512-ruw5xyKs6lrpo9x9rCZqZZnIUntICjQAd0Wsmp396Ul9lN/h+ifgVV1x1gZHi8euej6wTfpqX8j+BFQxF0NS/g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"domhandler": "^5.0.3",
|
||||
"parse5": "^7.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/inikulin/parse5?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/parse5/node_modules/entities": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz",
|
||||
"integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==",
|
||||
"license": "BSD-2-Clause",
|
||||
"engines": {
|
||||
"node": ">=0.12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/fb55/entities?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/path-exists": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
|
||||
@@ -4843,6 +5062,12 @@
|
||||
"node": ">=10"
|
||||
}
|
||||
},
|
||||
"node_modules/server-only": {
|
||||
"version": "0.0.1",
|
||||
"resolved": "https://registry.npmjs.org/server-only/-/server-only-0.0.1.tgz",
|
||||
"integrity": "sha512-qepMx2JxAa5jjfzxG79yPPq+8BuFToHd1hm7kI+Z4zAq1ftQiP7HcxMhDDItrbtwVeLg/cY2JnKnrcFkmiswNA==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/set-function-length": {
|
||||
"version": "1.2.2",
|
||||
"resolved": "https://registry.npmjs.org/set-function-length/-/set-function-length-1.2.2.tgz",
|
||||
|
||||
@@ -10,23 +10,26 @@
|
||||
"type-check": "tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@heroicons/react": "^2.2.0",
|
||||
"autoprefixer": "^10.4.16",
|
||||
"axios": "^1.6.0",
|
||||
"cheerio": "^1.0.0-rc.12",
|
||||
"dompurify": "^3.0.5",
|
||||
"next": "14.0.0",
|
||||
"postcss": "^8.4.31",
|
||||
"react": "^18",
|
||||
"react-dom": "^18",
|
||||
"axios": "^1.6.0",
|
||||
"dompurify": "^3.0.5",
|
||||
"react-dropzone": "^14.2.3",
|
||||
"tailwindcss": "^3.3.0",
|
||||
"autoprefixer": "^10.4.16",
|
||||
"postcss": "^8.4.31"
|
||||
"server-only": "^0.0.1",
|
||||
"tailwindcss": "^3.3.0"
|
||||
},
|
||||
"devDependencies": {
|
||||
"typescript": "^5",
|
||||
"@types/dompurify": "^3.0.5",
|
||||
"@types/node": "^20",
|
||||
"@types/react": "^18",
|
||||
"@types/react-dom": "^18",
|
||||
"@types/dompurify": "^3.0.5",
|
||||
"eslint": "^8",
|
||||
"eslint-config-next": "14.0.0"
|
||||
"eslint-config-next": "14.0.0",
|
||||
"typescript": "^5"
|
||||
}
|
||||
}
|
||||
@@ -1,16 +1,20 @@
|
||||
'use client';
|
||||
|
||||
import { useState, useRef } from 'react';
|
||||
import { useRouter } from 'next/navigation';
|
||||
import { useState, useRef, useEffect } from 'react';
|
||||
import { useRouter, useSearchParams } from 'next/navigation';
|
||||
import { useAuth } from '../../contexts/AuthContext';
|
||||
import AppLayout from '../../components/layout/AppLayout';
|
||||
import { Input, Textarea } from '../../components/ui/Input';
|
||||
import Button from '../../components/ui/Button';
|
||||
import TagInput from '../../components/stories/TagInput';
|
||||
import RichTextEditor from '../../components/stories/RichTextEditor';
|
||||
import ImageUpload from '../../components/ui/ImageUpload';
|
||||
import { storyApi } from '../../lib/api';
|
||||
import { storyApi, authorApi } from '../../lib/api';
|
||||
|
||||
export default function AddStoryPage() {
|
||||
const [importMode, setImportMode] = useState<'manual' | 'url'>('manual');
|
||||
const [importUrl, setImportUrl] = useState('');
|
||||
const [scraping, setScraping] = useState(false);
|
||||
const [formData, setFormData] = useState({
|
||||
title: '',
|
||||
summary: '',
|
||||
@@ -25,8 +29,84 @@ export default function AddStoryPage() {
|
||||
const [coverImage, setCoverImage] = useState<File | null>(null);
|
||||
const [loading, setLoading] = useState(false);
|
||||
const [errors, setErrors] = useState<Record<string, string>>({});
|
||||
const [duplicateWarning, setDuplicateWarning] = useState<{
|
||||
show: boolean;
|
||||
count: number;
|
||||
duplicates: Array<{
|
||||
id: string;
|
||||
title: string;
|
||||
authorName: string;
|
||||
createdAt: string;
|
||||
}>;
|
||||
}>({ show: false, count: 0, duplicates: [] });
|
||||
const [checkingDuplicates, setCheckingDuplicates] = useState(false);
|
||||
|
||||
const router = useRouter();
|
||||
const searchParams = useSearchParams();
|
||||
const { isAuthenticated } = useAuth();
|
||||
|
||||
// Pre-fill author if authorId is provided in URL
|
||||
useEffect(() => {
|
||||
const authorId = searchParams.get('authorId');
|
||||
if (authorId) {
|
||||
const loadAuthor = async () => {
|
||||
try {
|
||||
const author = await authorApi.getAuthor(authorId);
|
||||
setFormData(prev => ({
|
||||
...prev,
|
||||
authorName: author.name
|
||||
}));
|
||||
} catch (error) {
|
||||
console.error('Failed to load author:', error);
|
||||
}
|
||||
};
|
||||
loadAuthor();
|
||||
}
|
||||
}, [searchParams]);
|
||||
|
||||
// Check for duplicates when title and author are both present
|
||||
useEffect(() => {
|
||||
const checkDuplicates = async () => {
|
||||
const title = formData.title.trim();
|
||||
const authorName = formData.authorName.trim();
|
||||
|
||||
// Don't check if user isn't authenticated or if title/author are empty
|
||||
if (!isAuthenticated || !title || !authorName) {
|
||||
setDuplicateWarning({ show: false, count: 0, duplicates: [] });
|
||||
return;
|
||||
}
|
||||
|
||||
// Debounce the check to avoid too many API calls
|
||||
const timeoutId = setTimeout(async () => {
|
||||
try {
|
||||
setCheckingDuplicates(true);
|
||||
const result = await storyApi.checkDuplicate(title, authorName);
|
||||
|
||||
if (result.hasDuplicates) {
|
||||
setDuplicateWarning({
|
||||
show: true,
|
||||
count: result.count,
|
||||
duplicates: result.duplicates
|
||||
});
|
||||
} else {
|
||||
setDuplicateWarning({ show: false, count: 0, duplicates: [] });
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to check for duplicates:', error);
|
||||
// Clear any existing duplicate warnings on error
|
||||
setDuplicateWarning({ show: false, count: 0, duplicates: [] });
|
||||
// Don't show error to user as this is just a helpful warning
|
||||
// Authentication errors will be handled by the API interceptor
|
||||
} finally {
|
||||
setCheckingDuplicates(false);
|
||||
}
|
||||
}, 500); // 500ms debounce
|
||||
|
||||
return () => clearTimeout(timeoutId);
|
||||
};
|
||||
|
||||
checkDuplicates();
|
||||
}, [formData.title, formData.authorName, isAuthenticated]);
|
||||
|
||||
const handleInputChange = (field: string) => (
|
||||
e: React.ChangeEvent<HTMLInputElement | HTMLTextAreaElement>
|
||||
@@ -53,6 +133,57 @@ export default function AddStoryPage() {
|
||||
setFormData(prev => ({ ...prev, tags }));
|
||||
};
|
||||
|
||||
const handleImportFromUrl = async () => {
|
||||
if (!importUrl.trim()) {
|
||||
setErrors({ importUrl: 'URL is required' });
|
||||
return;
|
||||
}
|
||||
|
||||
setScraping(true);
|
||||
setErrors({});
|
||||
|
||||
try {
|
||||
const response = await fetch('/scrape/story', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ url: importUrl }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
throw new Error(errorData.error || 'Failed to scrape story');
|
||||
}
|
||||
|
||||
const scrapedStory = await response.json();
|
||||
|
||||
// Pre-fill the form with scraped data
|
||||
setFormData({
|
||||
title: scrapedStory.title || '',
|
||||
summary: scrapedStory.summary || '',
|
||||
authorName: scrapedStory.author || '',
|
||||
contentHtml: scrapedStory.content || '',
|
||||
sourceUrl: scrapedStory.sourceUrl || importUrl,
|
||||
tags: scrapedStory.tags || [],
|
||||
seriesName: '',
|
||||
volume: '',
|
||||
});
|
||||
|
||||
// Switch to manual mode so user can edit the pre-filled data
|
||||
setImportMode('manual');
|
||||
setImportUrl('');
|
||||
|
||||
// Show success message
|
||||
setErrors({ success: 'Story data imported successfully! Review and edit as needed before saving.' });
|
||||
} catch (error: any) {
|
||||
console.error('Failed to import story:', error);
|
||||
setErrors({ importUrl: error.message });
|
||||
} finally {
|
||||
setScraping(false);
|
||||
}
|
||||
};
|
||||
|
||||
const validateForm = () => {
|
||||
const newErrors: Record<string, string> = {};
|
||||
|
||||
@@ -129,6 +260,104 @@ export default function AddStoryPage() {
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{/* Import Mode Toggle */}
|
||||
<div className="mb-8">
|
||||
<div className="flex border-b border-gray-200 dark:border-gray-700">
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setImportMode('manual')}
|
||||
className={`px-6 py-3 text-sm font-medium border-b-2 transition-colors ${
|
||||
importMode === 'manual'
|
||||
? 'border-theme-accent text-theme-accent'
|
||||
: 'border-transparent theme-text hover:text-theme-accent'
|
||||
}`}
|
||||
>
|
||||
Manual Entry
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
onClick={() => setImportMode('url')}
|
||||
className={`px-6 py-3 text-sm font-medium border-b-2 transition-colors ${
|
||||
importMode === 'url'
|
||||
? 'border-theme-accent text-theme-accent'
|
||||
: 'border-transparent theme-text hover:text-theme-accent'
|
||||
}`}
|
||||
>
|
||||
Import from URL
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* URL Import Section */}
|
||||
{importMode === 'url' && (
|
||||
<div className="bg-gray-50 dark:bg-gray-800/50 rounded-lg p-6 mb-8">
|
||||
<h3 className="text-lg font-medium theme-header mb-4">Import Story from URL</h3>
|
||||
<p className="theme-text text-sm mb-4">
|
||||
Enter a URL from a supported story site to automatically extract the story content, title, author, and other metadata.
|
||||
</p>
|
||||
|
||||
<div className="space-y-4">
|
||||
<Input
|
||||
label="Story URL"
|
||||
type="url"
|
||||
value={importUrl}
|
||||
onChange={(e) => setImportUrl(e.target.value)}
|
||||
placeholder="https://example.com/story-url"
|
||||
error={errors.importUrl}
|
||||
disabled={scraping}
|
||||
/>
|
||||
|
||||
<div className="flex gap-3">
|
||||
<Button
|
||||
type="button"
|
||||
onClick={handleImportFromUrl}
|
||||
loading={scraping}
|
||||
disabled={!importUrl.trim() || scraping}
|
||||
>
|
||||
{scraping ? 'Importing...' : 'Import Story'}
|
||||
</Button>
|
||||
|
||||
<Button
|
||||
type="button"
|
||||
variant="ghost"
|
||||
onClick={() => setImportMode('manual')}
|
||||
disabled={scraping}
|
||||
>
|
||||
Enter Manually Instead
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className="border-t pt-4 mt-4">
|
||||
<p className="text-sm theme-text mb-2">
|
||||
Need to import multiple stories at once?
|
||||
</p>
|
||||
<Button
|
||||
type="button"
|
||||
variant="secondary"
|
||||
onClick={() => router.push('/stories/import/bulk')}
|
||||
disabled={scraping}
|
||||
size="sm"
|
||||
>
|
||||
Bulk Import Multiple URLs
|
||||
</Button>
|
||||
</div>
|
||||
|
||||
<div className="text-xs theme-text">
|
||||
<p className="font-medium mb-1">Supported Sites:</p>
|
||||
<p>Archive of Our Own, DeviantArt, FanFiction.Net, Literotica, Royal Road, Wattpad, and more</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Success Message */}
|
||||
{errors.success && (
|
||||
<div className="p-4 bg-green-50 dark:bg-green-900/20 border border-green-200 dark:border-green-800 rounded-lg mb-6">
|
||||
<p className="text-green-800 dark:text-green-200">{errors.success}</p>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{importMode === 'manual' && (
|
||||
<form onSubmit={handleSubmit} className="space-y-6">
|
||||
{/* Title */}
|
||||
<Input
|
||||
@@ -150,6 +379,46 @@ export default function AddStoryPage() {
|
||||
required
|
||||
/>
|
||||
|
||||
{/* Duplicate Warning */}
|
||||
{duplicateWarning.show && (
|
||||
<div className="p-4 bg-yellow-50 dark:bg-yellow-900/20 border border-yellow-200 dark:border-yellow-800 rounded-lg">
|
||||
<div className="flex items-start gap-3">
|
||||
<div className="text-yellow-600 dark:text-yellow-400 mt-0.5">
|
||||
⚠️
|
||||
</div>
|
||||
<div>
|
||||
<h4 className="font-medium text-yellow-800 dark:text-yellow-200">
|
||||
Potential Duplicate Detected
|
||||
</h4>
|
||||
<p className="text-sm text-yellow-700 dark:text-yellow-300 mt-1">
|
||||
Found {duplicateWarning.count} existing {duplicateWarning.count === 1 ? 'story' : 'stories'} with the same title and author:
|
||||
</p>
|
||||
<ul className="mt-2 space-y-1">
|
||||
{duplicateWarning.duplicates.map((duplicate, index) => (
|
||||
<li key={duplicate.id} className="text-sm text-yellow-700 dark:text-yellow-300">
|
||||
• <span className="font-medium">{duplicate.title}</span> by {duplicate.authorName}
|
||||
<span className="text-xs ml-2">
|
||||
(added {new Date(duplicate.createdAt).toLocaleDateString()})
|
||||
</span>
|
||||
</li>
|
||||
))}
|
||||
</ul>
|
||||
<p className="text-xs text-yellow-600 dark:text-yellow-400 mt-2">
|
||||
You can still create this story if it's different from the existing ones.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Checking indicator */}
|
||||
{checkingDuplicates && (
|
||||
<div className="flex items-center gap-2 text-sm theme-text">
|
||||
<div className="animate-spin w-4 h-4 border-2 border-theme-accent border-t-transparent rounded-full"></div>
|
||||
Checking for duplicates...
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Summary */}
|
||||
<div>
|
||||
<label className="block text-sm font-medium theme-header mb-2">
|
||||
@@ -262,6 +531,7 @@ export default function AddStoryPage() {
|
||||
</Button>
|
||||
</div>
|
||||
</form>
|
||||
)}
|
||||
</div>
|
||||
</AppLayout>
|
||||
);
|
||||
|
||||
@@ -207,9 +207,14 @@ export default function AuthorDetailPage() {
|
||||
<div className="lg:col-span-2 space-y-6">
|
||||
<div className="flex items-center justify-between">
|
||||
<h2 className="text-2xl font-semibold theme-header">Stories</h2>
|
||||
<div className="flex items-center gap-4">
|
||||
<p className="theme-text">
|
||||
{stories.length} {stories.length === 1 ? 'story' : 'stories'}
|
||||
</p>
|
||||
<Button href={`/add-story?authorId=${authorId}`}>
|
||||
Add Story
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{stories.length === 0 ? (
|
||||
|
||||
@@ -26,19 +26,27 @@ export default function CollectionsPage() {
|
||||
const [totalCollections, setTotalCollections] = useState(0);
|
||||
const [refreshTrigger, setRefreshTrigger] = useState(0);
|
||||
|
||||
// Load tags for filtering
|
||||
useEffect(() => {
|
||||
const loadTags = async () => {
|
||||
try {
|
||||
const tagsResult = await tagApi.getTags({ page: 0, size: 1000 });
|
||||
setTags(tagsResult?.content || []);
|
||||
} catch (error) {
|
||||
console.error('Failed to load tags:', error);
|
||||
}
|
||||
};
|
||||
|
||||
loadTags();
|
||||
}, []);
|
||||
// Extract tags from current collection results with counts
|
||||
const extractTagsFromResults = (collections: Collection[]): Tag[] => {
|
||||
const tagCounts: { [key: string]: number } = {};
|
||||
|
||||
collections.forEach(collection => {
|
||||
collection.tagNames?.forEach(tagName => {
|
||||
if (tagCounts[tagName]) {
|
||||
tagCounts[tagName]++;
|
||||
} else {
|
||||
tagCounts[tagName] = 1;
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return Object.entries(tagCounts).map(([tagName, count]) => ({
|
||||
id: tagName, // Use tag name as ID since we don't have actual IDs from search results
|
||||
name: tagName,
|
||||
collectionCount: count
|
||||
}));
|
||||
};
|
||||
|
||||
// Load collections with search and filters
|
||||
useEffect(() => {
|
||||
@@ -55,9 +63,14 @@ export default function CollectionsPage() {
|
||||
archived: showArchived,
|
||||
});
|
||||
|
||||
setCollections(result?.results || []);
|
||||
const currentCollections = result?.results || [];
|
||||
setCollections(currentCollections);
|
||||
setTotalPages(Math.ceil((result?.totalHits || 0) / pageSize));
|
||||
setTotalCollections(result?.totalHits || 0);
|
||||
|
||||
// Always update tags based on current search results (including initial wildcard search)
|
||||
const resultTags = extractTagsFromResults(currentCollections);
|
||||
setTags(resultTags);
|
||||
} catch (error) {
|
||||
console.error('Failed to load collections:', error);
|
||||
setCollections([]);
|
||||
@@ -223,6 +236,7 @@ export default function CollectionsPage() {
|
||||
tags={tags}
|
||||
selectedTags={selectedTags}
|
||||
onTagToggle={handleTagToggle}
|
||||
showCollectionCount={true}
|
||||
/>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
'use client';
|
||||
|
||||
import { useState, useEffect } from 'react';
|
||||
import { searchApi, tagApi } from '../../lib/api';
|
||||
import { Story, Tag } from '../../types/api';
|
||||
import { searchApi } from '../../lib/api';
|
||||
import { Story, Tag, FacetCount } from '../../types/api';
|
||||
import AppLayout from '../../components/layout/AppLayout';
|
||||
import { Input } from '../../components/ui/Input';
|
||||
import Button from '../../components/ui/Button';
|
||||
@@ -11,7 +11,7 @@ import TagFilter from '../../components/stories/TagFilter';
|
||||
import LoadingSpinner from '../../components/ui/LoadingSpinner';
|
||||
|
||||
type ViewMode = 'grid' | 'list';
|
||||
type SortOption = 'createdAt' | 'title' | 'authorName' | 'rating';
|
||||
type SortOption = 'createdAt' | 'title' | 'authorName' | 'rating' | 'wordCount';
|
||||
|
||||
export default function LibraryPage() {
|
||||
const [stories, setStories] = useState<Story[]>([]);
|
||||
@@ -28,19 +28,19 @@ export default function LibraryPage() {
|
||||
const [refreshTrigger, setRefreshTrigger] = useState(0);
|
||||
|
||||
|
||||
// Load tags for filtering
|
||||
useEffect(() => {
|
||||
const loadTags = async () => {
|
||||
try {
|
||||
const tagsResult = await tagApi.getTags({ page: 0, size: 1000 });
|
||||
setTags(tagsResult?.content || []);
|
||||
} catch (error) {
|
||||
console.error('Failed to load tags:', error);
|
||||
}
|
||||
};
|
||||
|
||||
loadTags();
|
||||
}, []);
|
||||
// Convert facet counts to Tag objects for the UI
|
||||
const convertFacetsToTags = (facets?: Record<string, FacetCount[]>): Tag[] => {
|
||||
if (!facets || !facets.tagNames) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return facets.tagNames.map(facet => ({
|
||||
id: facet.value, // Use tag name as ID since we don't have actual IDs from search results
|
||||
name: facet.value,
|
||||
storyCount: facet.count
|
||||
}));
|
||||
};
|
||||
|
||||
// Debounce search to avoid too many API calls
|
||||
useEffect(() => {
|
||||
@@ -59,9 +59,14 @@ export default function LibraryPage() {
|
||||
sortDir: sortDirection,
|
||||
});
|
||||
|
||||
setStories(result?.results || []);
|
||||
const currentStories = result?.results || [];
|
||||
setStories(currentStories);
|
||||
setTotalPages(Math.ceil((result?.totalHits || 0) / 20));
|
||||
setTotalElements(result?.totalHits || 0);
|
||||
|
||||
// Update tags from facets - these represent all matching stories, not just current page
|
||||
const resultTags = convertFacetsToTags(result?.facets);
|
||||
setTags(resultTags);
|
||||
} catch (error) {
|
||||
console.error('Failed to load stories:', error);
|
||||
setStories([]);
|
||||
@@ -99,16 +104,21 @@ export default function LibraryPage() {
|
||||
};
|
||||
|
||||
const handleSortChange = (newSortOption: SortOption) => {
|
||||
if (newSortOption === sortOption) {
|
||||
// Toggle direction if same option
|
||||
setSortDirection(prev => prev === 'asc' ? 'desc' : 'asc');
|
||||
} else {
|
||||
setSortOption(newSortOption);
|
||||
setSortDirection('desc'); // Default to desc for new sort option
|
||||
// Set appropriate default direction for the sort option
|
||||
if (newSortOption === 'title' || newSortOption === 'authorName') {
|
||||
setSortDirection('asc'); // Alphabetical fields default to ascending
|
||||
} else {
|
||||
setSortDirection('desc'); // Numeric/date fields default to descending
|
||||
}
|
||||
resetPage();
|
||||
};
|
||||
|
||||
const toggleSortDirection = () => {
|
||||
setSortDirection(prev => prev === 'asc' ? 'desc' : 'asc');
|
||||
resetPage();
|
||||
};
|
||||
|
||||
const clearFilters = () => {
|
||||
setSearchQuery('');
|
||||
setSelectedTags([]);
|
||||
@@ -203,7 +213,18 @@ export default function LibraryPage() {
|
||||
<option value="title">Title</option>
|
||||
<option value="authorName">Author</option>
|
||||
<option value="rating">Rating</option>
|
||||
<option value="wordCount">Word Count</option>
|
||||
</select>
|
||||
|
||||
{/* Sort Direction Toggle */}
|
||||
<button
|
||||
onClick={toggleSortDirection}
|
||||
className="p-2 rounded-lg theme-card theme-text hover:bg-opacity-80 transition-colors border theme-border"
|
||||
title={`Sort ${sortDirection === 'asc' ? 'Ascending' : 'Descending'}`}
|
||||
aria-label={`Toggle sort direction - currently ${sortDirection === 'asc' ? 'ascending' : 'descending'}`}
|
||||
>
|
||||
{sortDirection === 'asc' ? '↑' : '↓'}
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{/* Clear Filters */}
|
||||
|
||||
72
frontend/src/app/scrape/author/route.ts
Normal file
72
frontend/src/app/scrape/author/route.ts
Normal file
@@ -0,0 +1,72 @@
|
||||
import { NextRequest, NextResponse } from 'next/server';
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.json();
|
||||
const { url } = body;
|
||||
|
||||
if (!url || typeof url !== 'string') {
|
||||
return NextResponse.json(
|
||||
{ error: 'URL is required and must be a string' },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Dynamic import to prevent client-side bundling
|
||||
const { StoryScraper } = await import('@/lib/scraper/scraper');
|
||||
|
||||
const scraper = new StoryScraper();
|
||||
const stories = await scraper.scrapeAuthorPage(url);
|
||||
|
||||
return NextResponse.json({ stories });
|
||||
} catch (error) {
|
||||
console.error('Author page scraping error:', error);
|
||||
|
||||
// Check if it's a ScraperError without importing at module level
|
||||
if (error && typeof error === 'object' && error.constructor.name === 'ScraperError') {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: (error as any).message,
|
||||
url: (error as any).url
|
||||
},
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
// Handle specific error types
|
||||
if (error.message.includes('Invalid URL')) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid URL provided' },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
if (error.message.includes('not supported')) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Author page scraping is not supported for this website' },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
if (error.message.includes('HTTP 404')) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Author page not found at the provided URL' },
|
||||
{ status: 404 }
|
||||
);
|
||||
}
|
||||
|
||||
if (error.message.includes('timeout')) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Request timed out while fetching content' },
|
||||
{ status: 408 }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: 'Failed to scrape author page. Please try again.' },
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
292
frontend/src/app/scrape/bulk/route.ts
Normal file
292
frontend/src/app/scrape/bulk/route.ts
Normal file
@@ -0,0 +1,292 @@
|
||||
import { NextRequest, NextResponse } from 'next/server';
|
||||
|
||||
interface BulkImportRequest {
|
||||
urls: string[];
|
||||
}
|
||||
|
||||
interface ImportResult {
|
||||
url: string;
|
||||
status: 'imported' | 'skipped' | 'error';
|
||||
reason?: string;
|
||||
title?: string;
|
||||
author?: string;
|
||||
error?: string;
|
||||
storyId?: string;
|
||||
}
|
||||
|
||||
interface BulkImportResponse {
|
||||
results: ImportResult[];
|
||||
summary: {
|
||||
total: number;
|
||||
imported: number;
|
||||
skipped: number;
|
||||
errors: number;
|
||||
};
|
||||
}
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
// Check for authentication
|
||||
const authorization = request.headers.get('authorization');
|
||||
if (!authorization) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Authentication required for bulk import' },
|
||||
{ status: 401 }
|
||||
);
|
||||
}
|
||||
|
||||
const body = await request.json();
|
||||
const { urls } = body as BulkImportRequest;
|
||||
|
||||
if (!urls || !Array.isArray(urls) || urls.length === 0) {
|
||||
return NextResponse.json(
|
||||
{ error: 'URLs array is required and must not be empty' },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
if (urls.length > 50) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Maximum 50 URLs allowed per bulk import' },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Dynamic imports to prevent client-side bundling
|
||||
const { StoryScraper } = await import('@/lib/scraper/scraper');
|
||||
|
||||
const scraper = new StoryScraper();
|
||||
const results: ImportResult[] = [];
|
||||
let importedCount = 0;
|
||||
let skippedCount = 0;
|
||||
let errorCount = 0;
|
||||
|
||||
console.log(`Starting bulk scraping for ${urls.length} URLs`);
|
||||
console.log(`Environment NEXT_PUBLIC_API_URL: ${process.env.NEXT_PUBLIC_API_URL}`);
|
||||
|
||||
// For server-side API calls in Docker, use direct backend container URL
|
||||
// Client-side calls use NEXT_PUBLIC_API_URL through nginx, but server-side needs direct container access
|
||||
const serverSideApiBaseUrl = 'http://backend:8080/api';
|
||||
console.log(`DEBUG: serverSideApiBaseUrl variable is: ${serverSideApiBaseUrl}`);
|
||||
|
||||
// Quick test to verify backend connectivity
|
||||
try {
|
||||
console.log(`Testing backend connectivity at: http://backend:8080/api/stories/check-duplicate`);
|
||||
const testResponse = await fetch(`http://backend:8080/api/stories/check-duplicate?title=test&authorName=test`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': authorization,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
console.log(`Backend test response status: ${testResponse.status}`);
|
||||
} catch (error) {
|
||||
console.error(`Backend connectivity test failed:`, error);
|
||||
}
|
||||
|
||||
for (const url of urls) {
|
||||
console.log(`Processing URL: ${url}`);
|
||||
|
||||
try {
|
||||
// Validate URL format
|
||||
if (!url || typeof url !== 'string' || url.trim() === '') {
|
||||
results.push({
|
||||
url: url || 'Empty URL',
|
||||
status: 'error',
|
||||
error: 'Invalid URL format'
|
||||
});
|
||||
errorCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const trimmedUrl = url.trim();
|
||||
|
||||
// Scrape the story
|
||||
const scrapedStory = await scraper.scrapeStory(trimmedUrl);
|
||||
|
||||
// Validate required fields
|
||||
if (!scrapedStory.title || !scrapedStory.author || !scrapedStory.content) {
|
||||
const missingFields = [];
|
||||
if (!scrapedStory.title) missingFields.push('title');
|
||||
if (!scrapedStory.author) missingFields.push('author');
|
||||
if (!scrapedStory.content) missingFields.push('content');
|
||||
|
||||
results.push({
|
||||
url: trimmedUrl,
|
||||
status: 'skipped',
|
||||
reason: `Missing required fields: ${missingFields.join(', ')}`,
|
||||
title: scrapedStory.title,
|
||||
author: scrapedStory.author
|
||||
});
|
||||
skippedCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for duplicates using query parameters
|
||||
try {
|
||||
// Use hardcoded backend URL for container-to-container communication
|
||||
const duplicateCheckUrl = `http://backend:8080/api/stories/check-duplicate`;
|
||||
console.log(`Duplicate check URL: ${duplicateCheckUrl}`);
|
||||
const params = new URLSearchParams({
|
||||
title: scrapedStory.title,
|
||||
authorName: scrapedStory.author
|
||||
});
|
||||
|
||||
const duplicateCheckResponse = await fetch(`${duplicateCheckUrl}?${params.toString()}`, {
|
||||
method: 'GET',
|
||||
headers: {
|
||||
'Authorization': authorization,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (duplicateCheckResponse.ok) {
|
||||
const duplicateResult = await duplicateCheckResponse.json();
|
||||
if (duplicateResult.hasDuplicates) {
|
||||
results.push({
|
||||
url: trimmedUrl,
|
||||
status: 'skipped',
|
||||
reason: `Duplicate story found (${duplicateResult.count} existing)`,
|
||||
title: scrapedStory.title,
|
||||
author: scrapedStory.author
|
||||
});
|
||||
skippedCount++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Duplicate check failed:', error);
|
||||
// Continue with import if duplicate check fails
|
||||
}
|
||||
|
||||
// Create the story
|
||||
try {
|
||||
const storyData = {
|
||||
title: scrapedStory.title,
|
||||
summary: scrapedStory.summary || undefined,
|
||||
contentHtml: scrapedStory.content,
|
||||
sourceUrl: scrapedStory.sourceUrl || trimmedUrl,
|
||||
authorName: scrapedStory.author,
|
||||
tagNames: scrapedStory.tags && scrapedStory.tags.length > 0 ? scrapedStory.tags : undefined,
|
||||
};
|
||||
|
||||
// Use hardcoded backend URL for container-to-container communication
|
||||
const createUrl = `http://backend:8080/api/stories`;
|
||||
console.log(`Create story URL: ${createUrl}`);
|
||||
const createResponse = await fetch(createUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': authorization,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify(storyData),
|
||||
});
|
||||
|
||||
if (!createResponse.ok) {
|
||||
const errorData = await createResponse.json();
|
||||
throw new Error(errorData.message || 'Failed to create story');
|
||||
}
|
||||
|
||||
const createdStory = await createResponse.json();
|
||||
|
||||
results.push({
|
||||
url: trimmedUrl,
|
||||
status: 'imported',
|
||||
title: scrapedStory.title,
|
||||
author: scrapedStory.author,
|
||||
storyId: createdStory.id
|
||||
});
|
||||
importedCount++;
|
||||
|
||||
console.log(`Successfully imported: ${scrapedStory.title} by ${scrapedStory.author} (ID: ${createdStory.id})`);
|
||||
|
||||
} catch (error) {
|
||||
console.error(`Failed to create story for ${trimmedUrl}:`, error);
|
||||
|
||||
let errorMessage = 'Failed to create story';
|
||||
if (error instanceof Error) {
|
||||
errorMessage = error.message;
|
||||
}
|
||||
|
||||
results.push({
|
||||
url: trimmedUrl,
|
||||
status: 'error',
|
||||
error: errorMessage,
|
||||
title: scrapedStory.title,
|
||||
author: scrapedStory.author
|
||||
});
|
||||
errorCount++;
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
console.error(`Error processing URL ${url}:`, error);
|
||||
|
||||
let errorMessage = 'Unknown error';
|
||||
if (error instanceof Error) {
|
||||
errorMessage = error.message;
|
||||
}
|
||||
|
||||
results.push({
|
||||
url: url,
|
||||
status: 'error',
|
||||
error: errorMessage
|
||||
});
|
||||
errorCount++;
|
||||
}
|
||||
}
|
||||
|
||||
const response: BulkImportResponse = {
|
||||
results,
|
||||
summary: {
|
||||
total: urls.length,
|
||||
imported: importedCount,
|
||||
skipped: skippedCount,
|
||||
errors: errorCount
|
||||
}
|
||||
};
|
||||
|
||||
console.log(`Bulk import completed:`, response.summary);
|
||||
|
||||
// Trigger Typesense reindex if any stories were imported
|
||||
if (importedCount > 0) {
|
||||
try {
|
||||
console.log('Triggering Typesense reindex after bulk import...');
|
||||
const reindexUrl = `http://backend:8080/api/stories/reindex-typesense`;
|
||||
const reindexResponse = await fetch(reindexUrl, {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': authorization,
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
if (reindexResponse.ok) {
|
||||
const reindexResult = await reindexResponse.json();
|
||||
console.log('Typesense reindex completed:', reindexResult);
|
||||
} else {
|
||||
console.warn('Typesense reindex failed:', reindexResponse.status);
|
||||
}
|
||||
} catch (error) {
|
||||
console.warn('Failed to trigger Typesense reindex:', error);
|
||||
// Don't fail the whole request if reindex fails
|
||||
}
|
||||
}
|
||||
|
||||
return NextResponse.json(response);
|
||||
|
||||
} catch (error) {
|
||||
console.error('Bulk import error:', error);
|
||||
|
||||
if (error instanceof Error) {
|
||||
return NextResponse.json(
|
||||
{ error: `Bulk import failed: ${error.message}` },
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: 'Bulk import failed due to an unknown error' },
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
85
frontend/src/app/scrape/story/route.ts
Normal file
85
frontend/src/app/scrape/story/route.ts
Normal file
@@ -0,0 +1,85 @@
|
||||
import { NextRequest, NextResponse } from 'next/server';
|
||||
|
||||
export async function POST(request: NextRequest) {
|
||||
try {
|
||||
const body = await request.json();
|
||||
const { url } = body;
|
||||
|
||||
if (!url || typeof url !== 'string') {
|
||||
return NextResponse.json(
|
||||
{ error: 'URL is required and must be a string' },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
// Dynamic import to prevent client-side bundling
|
||||
const { StoryScraper } = await import('@/lib/scraper/scraper');
|
||||
const { ScraperError } = await import('@/lib/scraper/types');
|
||||
|
||||
const scraper = new StoryScraper();
|
||||
const story = await scraper.scrapeStory(url);
|
||||
|
||||
// Debug logging
|
||||
console.log('Scraped story data:', {
|
||||
url: url,
|
||||
title: story.title,
|
||||
author: story.author,
|
||||
summary: story.summary,
|
||||
contentLength: story.content?.length || 0,
|
||||
contentPreview: story.content?.substring(0, 200) + '...',
|
||||
tags: story.tags,
|
||||
coverImage: story.coverImage
|
||||
});
|
||||
|
||||
return NextResponse.json(story);
|
||||
} catch (error) {
|
||||
console.error('Story scraping error:', error);
|
||||
|
||||
// Check if it's a ScraperError without importing at module level
|
||||
if (error && typeof error === 'object' && error.constructor.name === 'ScraperError') {
|
||||
return NextResponse.json(
|
||||
{
|
||||
error: (error as any).message,
|
||||
url: (error as any).url
|
||||
},
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
if (error instanceof Error) {
|
||||
// Handle specific error types
|
||||
if (error.message.includes('Invalid URL')) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Invalid URL provided' },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
if (error.message.includes('Unsupported site')) {
|
||||
return NextResponse.json(
|
||||
{ error: 'This website is not supported for scraping' },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
if (error.message.includes('HTTP 404')) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Story not found at the provided URL' },
|
||||
{ status: 404 }
|
||||
);
|
||||
}
|
||||
|
||||
if (error.message.includes('timeout')) {
|
||||
return NextResponse.json(
|
||||
{ error: 'Request timed out while fetching content' },
|
||||
{ status: 408 }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return NextResponse.json(
|
||||
{ error: 'Failed to scrape story. Please try again.' },
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -15,6 +15,7 @@ interface Settings {
|
||||
fontFamily: FontFamily;
|
||||
fontSize: FontSize;
|
||||
readingWidth: ReadingWidth;
|
||||
readingSpeed: number; // words per minute
|
||||
}
|
||||
|
||||
const defaultSettings: Settings = {
|
||||
@@ -22,6 +23,7 @@ const defaultSettings: Settings = {
|
||||
fontFamily: 'serif',
|
||||
fontSize: 'medium',
|
||||
readingWidth: 'medium',
|
||||
readingSpeed: 200,
|
||||
};
|
||||
|
||||
export default function SettingsPage() {
|
||||
@@ -288,6 +290,33 @@ export default function SettingsPage() {
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Reading Speed */}
|
||||
<div>
|
||||
<label className="block text-sm font-medium theme-header mb-2">
|
||||
Reading Speed (words per minute)
|
||||
</label>
|
||||
<div className="flex items-center gap-4">
|
||||
<input
|
||||
type="range"
|
||||
min="100"
|
||||
max="400"
|
||||
step="25"
|
||||
value={settings.readingSpeed}
|
||||
onChange={(e) => updateSetting('readingSpeed', parseInt(e.target.value))}
|
||||
className="flex-1 h-2 bg-gray-200 rounded-lg appearance-none cursor-pointer dark:bg-gray-700"
|
||||
/>
|
||||
<div className="min-w-[80px] text-center">
|
||||
<span className="text-lg font-medium theme-header">{settings.readingSpeed}</span>
|
||||
<div className="text-xs theme-text">WPM</div>
|
||||
</div>
|
||||
</div>
|
||||
<div className="flex justify-between text-xs theme-text mt-1">
|
||||
<span>Slow (100)</span>
|
||||
<span>Average (200)</span>
|
||||
<span>Fast (400)</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@@ -9,6 +9,7 @@ import { Story, Collection } from '../../../../types/api';
|
||||
import AppLayout from '../../../../components/layout/AppLayout';
|
||||
import Button from '../../../../components/ui/Button';
|
||||
import LoadingSpinner from '../../../../components/ui/LoadingSpinner';
|
||||
import { calculateReadingTime } from '../../../../lib/settings';
|
||||
|
||||
export default function StoryDetailPage() {
|
||||
const params = useParams();
|
||||
@@ -73,9 +74,7 @@ export default function StoryDetailPage() {
|
||||
};
|
||||
|
||||
const estimateReadingTime = (wordCount: number) => {
|
||||
const wordsPerMinute = 200; // Average reading speed
|
||||
const minutes = Math.ceil(wordCount / wordsPerMinute);
|
||||
return minutes;
|
||||
return calculateReadingTime(wordCount);
|
||||
};
|
||||
|
||||
if (loading) {
|
||||
|
||||
300
frontend/src/app/stories/import/bulk/page.tsx
Normal file
300
frontend/src/app/stories/import/bulk/page.tsx
Normal file
@@ -0,0 +1,300 @@
|
||||
'use client';
|
||||
|
||||
import { useState } from 'react';
|
||||
import { useRouter } from 'next/navigation';
|
||||
import Link from 'next/link';
|
||||
import { ArrowLeftIcon } from '@heroicons/react/24/outline';
|
||||
|
||||
interface ImportResult {
|
||||
url: string;
|
||||
status: 'imported' | 'skipped' | 'error';
|
||||
reason?: string;
|
||||
title?: string;
|
||||
author?: string;
|
||||
error?: string;
|
||||
storyId?: string;
|
||||
}
|
||||
|
||||
interface BulkImportResponse {
|
||||
results: ImportResult[];
|
||||
summary: {
|
||||
total: number;
|
||||
imported: number;
|
||||
skipped: number;
|
||||
errors: number;
|
||||
};
|
||||
}
|
||||
|
||||
export default function BulkImportPage() {
|
||||
const router = useRouter();
|
||||
const [urls, setUrls] = useState('');
|
||||
const [isLoading, setIsLoading] = useState(false);
|
||||
const [results, setResults] = useState<BulkImportResponse | null>(null);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
|
||||
const handleSubmit = async (e: React.FormEvent) => {
|
||||
e.preventDefault();
|
||||
|
||||
if (!urls.trim()) {
|
||||
setError('Please enter at least one URL');
|
||||
return;
|
||||
}
|
||||
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
setResults(null);
|
||||
|
||||
try {
|
||||
// Parse URLs from textarea (one per line)
|
||||
const urlList = urls
|
||||
.split('\n')
|
||||
.map(url => url.trim())
|
||||
.filter(url => url.length > 0);
|
||||
|
||||
if (urlList.length === 0) {
|
||||
setError('Please enter at least one valid URL');
|
||||
setIsLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
if (urlList.length > 50) {
|
||||
setError('Maximum 50 URLs allowed per bulk import');
|
||||
setIsLoading(false);
|
||||
return;
|
||||
}
|
||||
|
||||
// Get auth token for server-side API calls
|
||||
const token = localStorage.getItem('auth-token');
|
||||
|
||||
const response = await fetch('/scrape/bulk', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': token ? `Bearer ${token}` : '',
|
||||
},
|
||||
body: JSON.stringify({ urls: urlList }),
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
const errorData = await response.json();
|
||||
throw new Error(errorData.error || 'Bulk import failed');
|
||||
}
|
||||
|
||||
const data: BulkImportResponse = await response.json();
|
||||
setResults(data);
|
||||
|
||||
} catch (err) {
|
||||
console.error('Bulk import error:', err);
|
||||
setError(err instanceof Error ? err.message : 'Failed to import stories');
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
const handleReset = () => {
|
||||
setUrls('');
|
||||
setResults(null);
|
||||
setError(null);
|
||||
};
|
||||
|
||||
const getStatusColor = (status: string) => {
|
||||
switch (status) {
|
||||
case 'imported': return 'text-green-700 bg-green-50 border-green-200';
|
||||
case 'skipped': return 'text-yellow-700 bg-yellow-50 border-yellow-200';
|
||||
case 'error': return 'text-red-700 bg-red-50 border-red-200';
|
||||
default: return 'text-gray-700 bg-gray-50 border-gray-200';
|
||||
}
|
||||
};
|
||||
|
||||
const getStatusIcon = (status: string) => {
|
||||
switch (status) {
|
||||
case 'imported': return '✓';
|
||||
case 'skipped': return '⚠';
|
||||
case 'error': return '✗';
|
||||
default: return '';
|
||||
}
|
||||
};
|
||||
|
||||
return (
|
||||
<div className="container mx-auto px-4 py-6">
|
||||
<div className="max-w-4xl mx-auto">
|
||||
{/* Header */}
|
||||
<div className="mb-6">
|
||||
<div className="flex items-center gap-4 mb-4">
|
||||
<Link
|
||||
href="/library"
|
||||
className="inline-flex items-center text-blue-600 hover:text-blue-800"
|
||||
>
|
||||
<ArrowLeftIcon className="h-4 w-4 mr-1" />
|
||||
Back to Library
|
||||
</Link>
|
||||
</div>
|
||||
|
||||
<h1 className="text-3xl font-bold text-gray-900 mb-2">Bulk Import Stories</h1>
|
||||
<p className="text-gray-600">
|
||||
Import multiple stories at once by providing a list of URLs. Each URL will be scraped
|
||||
and automatically added to your story collection.
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{!results ? (
|
||||
// Import Form
|
||||
<form onSubmit={handleSubmit} className="space-y-6">
|
||||
<div>
|
||||
<label htmlFor="urls" className="block text-sm font-medium text-gray-700 mb-2">
|
||||
Story URLs
|
||||
</label>
|
||||
<p className="text-sm text-gray-500 mb-3">
|
||||
Enter one URL per line. Maximum 50 URLs per import.
|
||||
</p>
|
||||
<textarea
|
||||
id="urls"
|
||||
value={urls}
|
||||
onChange={(e) => setUrls(e.target.value)}
|
||||
placeholder="https://example.com/story1 https://example.com/story2 https://example.com/story3"
|
||||
className="w-full h-64 px-3 py-2 border border-gray-300 rounded-md shadow-sm focus:outline-none focus:ring-2 focus:ring-blue-500 focus:border-transparent"
|
||||
disabled={isLoading}
|
||||
/>
|
||||
<p className="mt-2 text-sm text-gray-500">
|
||||
URLs: {urls.split('\n').filter(url => url.trim().length > 0).length}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
{error && (
|
||||
<div className="bg-red-50 border border-red-200 rounded-md p-4">
|
||||
<div className="flex">
|
||||
<div className="ml-3">
|
||||
<h3 className="text-sm font-medium text-red-800">Error</h3>
|
||||
<div className="mt-2 text-sm text-red-700">
|
||||
{error}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
|
||||
<div className="flex gap-4">
|
||||
<button
|
||||
type="submit"
|
||||
disabled={isLoading || !urls.trim()}
|
||||
className="px-6 py-2 bg-blue-600 text-white font-medium rounded-md hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-blue-500 focus:ring-offset-2 disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
{isLoading ? 'Importing...' : 'Start Import'}
|
||||
</button>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
onClick={handleReset}
|
||||
disabled={isLoading}
|
||||
className="px-6 py-2 bg-gray-600 text-white font-medium rounded-md hover:bg-gray-700 focus:outline-none focus:ring-2 focus:ring-gray-500 focus:ring-offset-2 disabled:opacity-50 disabled:cursor-not-allowed"
|
||||
>
|
||||
Clear
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{isLoading && (
|
||||
<div className="bg-blue-50 border border-blue-200 rounded-md p-4">
|
||||
<div className="flex items-center">
|
||||
<div className="animate-spin rounded-full h-5 w-5 border-b-2 border-blue-600 mr-3"></div>
|
||||
<div>
|
||||
<p className="text-sm font-medium text-blue-800">Processing URLs...</p>
|
||||
<p className="text-sm text-blue-600">
|
||||
This may take a few minutes depending on the number of URLs and response times of the source websites.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</form>
|
||||
) : (
|
||||
// Results
|
||||
<div className="space-y-6">
|
||||
{/* Summary */}
|
||||
<div className="bg-white border border-gray-200 rounded-lg p-6">
|
||||
<h2 className="text-xl font-semibold text-gray-900 mb-4">Import Summary</h2>
|
||||
<div className="grid grid-cols-2 md:grid-cols-4 gap-4">
|
||||
<div className="text-center">
|
||||
<div className="text-2xl font-bold text-gray-900">{results.summary.total}</div>
|
||||
<div className="text-sm text-gray-600">Total URLs</div>
|
||||
</div>
|
||||
<div className="text-center">
|
||||
<div className="text-2xl font-bold text-green-600">{results.summary.imported}</div>
|
||||
<div className="text-sm text-gray-600">Imported</div>
|
||||
</div>
|
||||
<div className="text-center">
|
||||
<div className="text-2xl font-bold text-yellow-600">{results.summary.skipped}</div>
|
||||
<div className="text-sm text-gray-600">Skipped</div>
|
||||
</div>
|
||||
<div className="text-center">
|
||||
<div className="text-2xl font-bold text-red-600">{results.summary.errors}</div>
|
||||
<div className="text-sm text-gray-600">Errors</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Detailed Results */}
|
||||
<div className="bg-white border border-gray-200 rounded-lg">
|
||||
<div className="px-6 py-4 border-b border-gray-200">
|
||||
<h3 className="text-lg font-medium text-gray-900">Detailed Results</h3>
|
||||
</div>
|
||||
<div className="divide-y divide-gray-200">
|
||||
{results.results.map((result, index) => (
|
||||
<div key={index} className="p-6">
|
||||
<div className="flex items-start justify-between">
|
||||
<div className="flex-1 min-w-0">
|
||||
<div className="flex items-center gap-2 mb-2">
|
||||
<span className={`inline-flex items-center px-2.5 py-0.5 rounded-full text-xs font-medium border ${getStatusColor(result.status)}`}>
|
||||
{getStatusIcon(result.status)} {result.status.charAt(0).toUpperCase() + result.status.slice(1)}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<p className="text-sm text-gray-900 font-medium truncate mb-1">
|
||||
{result.url}
|
||||
</p>
|
||||
|
||||
{result.title && result.author && (
|
||||
<p className="text-sm text-gray-600 mb-1">
|
||||
"{result.title}" by {result.author}
|
||||
</p>
|
||||
)}
|
||||
|
||||
{result.reason && (
|
||||
<p className="text-sm text-gray-500">
|
||||
{result.reason}
|
||||
</p>
|
||||
)}
|
||||
|
||||
{result.error && (
|
||||
<p className="text-sm text-red-600">
|
||||
Error: {result.error}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Actions */}
|
||||
<div className="flex gap-4">
|
||||
<button
|
||||
onClick={handleReset}
|
||||
className="px-6 py-2 bg-blue-600 text-white font-medium rounded-md hover:bg-blue-700 focus:outline-none focus:ring-2 focus:ring-blue-500 focus:ring-offset-2"
|
||||
>
|
||||
Import More URLs
|
||||
</button>
|
||||
|
||||
<Link
|
||||
href="/stories"
|
||||
className="px-6 py-2 bg-gray-600 text-white font-medium rounded-md hover:bg-gray-700 focus:outline-none focus:ring-2 focus:ring-gray-500 focus:ring-offset-2"
|
||||
>
|
||||
View Stories
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -7,6 +7,7 @@ import { useRouter } from 'next/navigation';
|
||||
import { useAuth } from '../../contexts/AuthContext';
|
||||
import { useTheme } from '../../lib/theme';
|
||||
import Button from '../ui/Button';
|
||||
import Dropdown from '../ui/Dropdown';
|
||||
|
||||
export default function Header() {
|
||||
const [isMenuOpen, setIsMenuOpen] = useState(false);
|
||||
@@ -14,6 +15,24 @@ export default function Header() {
|
||||
const { theme, toggleTheme } = useTheme();
|
||||
const router = useRouter();
|
||||
|
||||
const addStoryItems = [
|
||||
{
|
||||
href: '/add-story',
|
||||
label: 'Manual Entry',
|
||||
description: 'Add a story by manually entering details'
|
||||
},
|
||||
{
|
||||
href: '/stories/import',
|
||||
label: 'Import from URL',
|
||||
description: 'Import a single story from a website'
|
||||
},
|
||||
{
|
||||
href: '/stories/import/bulk',
|
||||
label: 'Bulk Import',
|
||||
description: 'Import multiple stories from a list of URLs'
|
||||
}
|
||||
];
|
||||
|
||||
const handleLogout = () => {
|
||||
logout();
|
||||
router.push('/login');
|
||||
@@ -57,12 +76,10 @@ export default function Header() {
|
||||
>
|
||||
Authors
|
||||
</Link>
|
||||
<Link
|
||||
href="/add-story"
|
||||
className="theme-text hover:theme-accent transition-colors font-medium"
|
||||
>
|
||||
Add Story
|
||||
</Link>
|
||||
<Dropdown
|
||||
trigger="Add Story"
|
||||
items={addStoryItems}
|
||||
/>
|
||||
</nav>
|
||||
|
||||
{/* Right side actions */}
|
||||
@@ -131,13 +148,32 @@ export default function Header() {
|
||||
>
|
||||
Authors
|
||||
</Link>
|
||||
<div className="px-2 py-1">
|
||||
<div className="font-medium theme-text mb-1">Add Story</div>
|
||||
<div className="pl-4 space-y-1">
|
||||
<Link
|
||||
href="/add-story"
|
||||
className="theme-text hover:theme-accent transition-colors font-medium px-2 py-1"
|
||||
className="block theme-text hover:theme-accent transition-colors text-sm py-1"
|
||||
onClick={() => setIsMenuOpen(false)}
|
||||
>
|
||||
Add Story
|
||||
Manual Entry
|
||||
</Link>
|
||||
<Link
|
||||
href="/stories/import"
|
||||
className="block theme-text hover:theme-accent transition-colors text-sm py-1"
|
||||
onClick={() => setIsMenuOpen(false)}
|
||||
>
|
||||
Import from URL
|
||||
</Link>
|
||||
<Link
|
||||
href="/stories/import/bulk"
|
||||
className="block theme-text hover:theme-accent transition-colors text-sm py-1"
|
||||
onClick={() => setIsMenuOpen(false)}
|
||||
>
|
||||
Bulk Import
|
||||
</Link>
|
||||
</div>
|
||||
</div>
|
||||
<Link
|
||||
href="/settings"
|
||||
className="theme-text hover:theme-accent transition-colors font-medium px-2 py-1"
|
||||
|
||||
@@ -23,6 +23,62 @@ export default function RichTextEditor({
|
||||
const previewRef = useRef<HTMLDivElement>(null);
|
||||
const visualTextareaRef = useRef<HTMLTextAreaElement>(null);
|
||||
const visualDivRef = useRef<HTMLDivElement>(null);
|
||||
const [isUserTyping, setIsUserTyping] = useState(false);
|
||||
|
||||
// Utility functions for cursor position preservation
|
||||
const saveCursorPosition = () => {
|
||||
const selection = window.getSelection();
|
||||
if (!selection || selection.rangeCount === 0) return null;
|
||||
|
||||
const range = selection.getRangeAt(0);
|
||||
const div = visualDivRef.current;
|
||||
if (!div) return null;
|
||||
|
||||
return {
|
||||
startContainer: range.startContainer,
|
||||
startOffset: range.startOffset,
|
||||
endContainer: range.endContainer,
|
||||
endOffset: range.endOffset
|
||||
};
|
||||
};
|
||||
|
||||
const restoreCursorPosition = (position: any) => {
|
||||
if (!position) return;
|
||||
|
||||
try {
|
||||
const selection = window.getSelection();
|
||||
if (!selection) return;
|
||||
|
||||
const range = document.createRange();
|
||||
range.setStart(position.startContainer, position.startOffset);
|
||||
range.setEnd(position.endContainer, position.endOffset);
|
||||
|
||||
selection.removeAllRanges();
|
||||
selection.addRange(range);
|
||||
} catch (e) {
|
||||
console.warn('Could not restore cursor position:', e);
|
||||
}
|
||||
};
|
||||
|
||||
// Set initial content when component mounts
|
||||
useEffect(() => {
|
||||
const div = visualDivRef.current;
|
||||
if (div && div.innerHTML !== value) {
|
||||
div.innerHTML = value || '';
|
||||
}
|
||||
}, []);
|
||||
|
||||
// Update div content when value changes externally (not from user typing)
|
||||
useEffect(() => {
|
||||
const div = visualDivRef.current;
|
||||
if (div && !isUserTyping && div.innerHTML !== value) {
|
||||
const cursorPosition = saveCursorPosition();
|
||||
div.innerHTML = value || '';
|
||||
if (cursorPosition) {
|
||||
setTimeout(() => restoreCursorPosition(cursorPosition), 0);
|
||||
}
|
||||
}
|
||||
}, [value, isUserTyping]);
|
||||
|
||||
// Preload sanitization config
|
||||
useEffect(() => {
|
||||
@@ -38,9 +94,17 @@ export default function RichTextEditor({
|
||||
const div = visualDivRef.current;
|
||||
if (div) {
|
||||
const newHtml = div.innerHTML;
|
||||
setIsUserTyping(true);
|
||||
|
||||
// Only call onChange if content actually changed
|
||||
if (newHtml !== value) {
|
||||
onChange(newHtml);
|
||||
setHtmlValue(newHtml);
|
||||
}
|
||||
|
||||
// Reset typing state after a short delay
|
||||
setTimeout(() => setIsUserTyping(false), 100);
|
||||
}
|
||||
};
|
||||
|
||||
const handlePaste = async (e: React.ClipboardEvent<HTMLTextAreaElement | HTMLDivElement>) => {
|
||||
@@ -155,8 +219,10 @@ export default function RichTextEditor({
|
||||
}
|
||||
|
||||
// Update the state
|
||||
setIsUserTyping(true);
|
||||
onChange(visualDiv.innerHTML);
|
||||
setHtmlValue(visualDiv.innerHTML);
|
||||
setTimeout(() => setIsUserTyping(false), 100);
|
||||
} else if (textarea) {
|
||||
// Fallback for textarea mode (shouldn't happen in visual mode but good to have)
|
||||
const start = textarea.selectionStart;
|
||||
@@ -213,8 +279,10 @@ export default function RichTextEditor({
|
||||
visualDiv.innerHTML += textAsHtml;
|
||||
}
|
||||
|
||||
setIsUserTyping(true);
|
||||
onChange(visualDiv.innerHTML);
|
||||
setHtmlValue(visualDiv.innerHTML);
|
||||
setTimeout(() => setIsUserTyping(false), 100);
|
||||
}
|
||||
} else {
|
||||
console.log('No usable clipboard content found');
|
||||
@@ -229,8 +297,10 @@ export default function RichTextEditor({
|
||||
.filter(paragraph => paragraph.trim())
|
||||
.map(paragraph => `<p>${paragraph.replace(/\n/g, '<br>')}</p>`)
|
||||
.join('\n');
|
||||
setIsUserTyping(true);
|
||||
onChange(value + textAsHtml);
|
||||
setHtmlValue(value + textAsHtml);
|
||||
setTimeout(() => setIsUserTyping(false), 100);
|
||||
}
|
||||
}
|
||||
};
|
||||
@@ -293,8 +363,10 @@ export default function RichTextEditor({
|
||||
}
|
||||
|
||||
// Update the state
|
||||
setIsUserTyping(true);
|
||||
onChange(visualDiv.innerHTML);
|
||||
setHtmlValue(visualDiv.innerHTML);
|
||||
setTimeout(() => setIsUserTyping(false), 100);
|
||||
}
|
||||
} else {
|
||||
// HTML mode - existing logic with improvements
|
||||
@@ -434,6 +506,7 @@ export default function RichTextEditor({
|
||||
{/* Editor */}
|
||||
<div className="border theme-border rounded-b-lg overflow-hidden">
|
||||
{viewMode === 'visual' ? (
|
||||
<div className="relative">
|
||||
<div
|
||||
ref={visualDivRef}
|
||||
contentEditable
|
||||
@@ -441,9 +514,17 @@ export default function RichTextEditor({
|
||||
onPaste={handlePaste}
|
||||
className="p-3 min-h-[300px] focus:outline-none focus:ring-0 whitespace-pre-wrap"
|
||||
style={{ minHeight: '300px' }}
|
||||
dangerouslySetInnerHTML={{ __html: value || `<p>${placeholder}</p>` }}
|
||||
suppressContentEditableWarning={true}
|
||||
/>
|
||||
{!value && (
|
||||
<div
|
||||
className="absolute top-3 left-3 text-gray-500 dark:text-gray-400 pointer-events-none select-none"
|
||||
style={{ minHeight: '300px' }}
|
||||
>
|
||||
{placeholder}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
) : (
|
||||
<Textarea
|
||||
value={htmlValue}
|
||||
|
||||
@@ -6,17 +6,21 @@ interface TagFilterProps {
|
||||
tags: Tag[];
|
||||
selectedTags: string[];
|
||||
onTagToggle: (tagName: string) => void;
|
||||
showCollectionCount?: boolean;
|
||||
}
|
||||
|
||||
export default function TagFilter({ tags, selectedTags, onTagToggle }: TagFilterProps) {
|
||||
export default function TagFilter({ tags, selectedTags, onTagToggle, showCollectionCount = false }: TagFilterProps) {
|
||||
if (!Array.isArray(tags) || tags.length === 0) return null;
|
||||
|
||||
// Filter out tags with no stories, then sort by usage count (descending) and then alphabetically
|
||||
// Filter out tags with no count, then sort by usage count (descending) and then alphabetically
|
||||
const sortedTags = [...tags]
|
||||
.filter(tag => (tag.storyCount || 0) > 0)
|
||||
.filter(tag => {
|
||||
const count = showCollectionCount ? (tag.collectionCount || 0) : (tag.storyCount || 0);
|
||||
return count > 0;
|
||||
})
|
||||
.sort((a, b) => {
|
||||
const aCount = a.storyCount || 0;
|
||||
const bCount = b.storyCount || 0;
|
||||
const aCount = showCollectionCount ? (a.collectionCount || 0) : (a.storyCount || 0);
|
||||
const bCount = showCollectionCount ? (b.collectionCount || 0) : (b.storyCount || 0);
|
||||
if (bCount !== aCount) {
|
||||
return bCount - aCount;
|
||||
}
|
||||
@@ -40,7 +44,7 @@ export default function TagFilter({ tags, selectedTags, onTagToggle }: TagFilter
|
||||
: 'theme-card theme-text theme-border hover:border-gray-400'
|
||||
}`}
|
||||
>
|
||||
{tag.name} ({tag.storyCount || 0})
|
||||
{tag.name} ({showCollectionCount ? (tag.collectionCount || 0) : (tag.storyCount || 0)})
|
||||
</button>
|
||||
);
|
||||
})}
|
||||
|
||||
98
frontend/src/components/ui/Dropdown.tsx
Normal file
98
frontend/src/components/ui/Dropdown.tsx
Normal file
@@ -0,0 +1,98 @@
|
||||
'use client';
|
||||
|
||||
import { useState, useRef, useEffect } from 'react';
|
||||
import Link from 'next/link';
|
||||
import { ChevronDownIcon } from '@heroicons/react/24/outline';
|
||||
|
||||
interface DropdownItem {
|
||||
href: string;
|
||||
label: string;
|
||||
description?: string;
|
||||
}
|
||||
|
||||
interface DropdownProps {
|
||||
trigger: string;
|
||||
items: DropdownItem[];
|
||||
className?: string;
|
||||
onItemClick?: () => void;
|
||||
}
|
||||
|
||||
export default function Dropdown({ trigger, items, className = '', onItemClick }: DropdownProps) {
|
||||
const [isOpen, setIsOpen] = useState(false);
|
||||
const dropdownRef = useRef<HTMLDivElement>(null);
|
||||
const timeoutRef = useRef<NodeJS.Timeout>();
|
||||
|
||||
useEffect(() => {
|
||||
const handleClickOutside = (event: MouseEvent) => {
|
||||
if (dropdownRef.current && !dropdownRef.current.contains(event.target as Node)) {
|
||||
setIsOpen(false);
|
||||
}
|
||||
};
|
||||
|
||||
if (isOpen) {
|
||||
document.addEventListener('mousedown', handleClickOutside);
|
||||
}
|
||||
|
||||
return () => {
|
||||
document.removeEventListener('mousedown', handleClickOutside);
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current);
|
||||
}
|
||||
};
|
||||
}, [isOpen]);
|
||||
|
||||
const handleMouseEnter = () => {
|
||||
if (timeoutRef.current) {
|
||||
clearTimeout(timeoutRef.current);
|
||||
}
|
||||
setIsOpen(true);
|
||||
};
|
||||
|
||||
const handleMouseLeave = () => {
|
||||
timeoutRef.current = setTimeout(() => {
|
||||
setIsOpen(false);
|
||||
}, 150);
|
||||
};
|
||||
|
||||
const handleItemClick = () => {
|
||||
setIsOpen(false);
|
||||
onItemClick?.();
|
||||
};
|
||||
|
||||
return (
|
||||
<div
|
||||
className={`relative ${className}`}
|
||||
ref={dropdownRef}
|
||||
onMouseEnter={handleMouseEnter}
|
||||
onMouseLeave={handleMouseLeave}
|
||||
>
|
||||
<button
|
||||
onClick={() => setIsOpen(!isOpen)}
|
||||
className="theme-text hover:theme-accent transition-colors font-medium flex items-center gap-1"
|
||||
>
|
||||
{trigger}
|
||||
<ChevronDownIcon
|
||||
className={`h-4 w-4 transition-transform duration-200 ${isOpen ? 'rotate-180' : ''}`}
|
||||
/>
|
||||
</button>
|
||||
|
||||
{isOpen && (
|
||||
<div className="absolute top-full left-0 mt-1 w-64 theme-card theme-shadow border theme-border rounded-lg py-2 z-50">
|
||||
{items.map((item, index) => (
|
||||
<Link
|
||||
key={index}
|
||||
href={item.href}
|
||||
onClick={handleItemClick}
|
||||
className="block px-4 py-2 theme-text hover:theme-accent transition-colors"
|
||||
>
|
||||
<div className="font-medium">{item.label}</div>
|
||||
{item.description && (
|
||||
<div className="text-sm theme-text-secondary mt-1">{item.description}</div>
|
||||
)}
|
||||
</Link>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
);
|
||||
}
|
||||
@@ -1,7 +1,8 @@
|
||||
'use client';
|
||||
|
||||
import { createContext, useContext, useEffect, useState } from 'react';
|
||||
import { authApi } from '../lib/api';
|
||||
import { useRouter } from 'next/navigation';
|
||||
import { authApi, setGlobalAuthFailureHandler } from '../lib/api';
|
||||
import { preloadSanitizationConfig } from '../lib/sanitization';
|
||||
|
||||
interface AuthContextType {
|
||||
@@ -16,8 +17,18 @@ const AuthContext = createContext<AuthContextType | undefined>(undefined);
|
||||
export function AuthProvider({ children }: { children: React.ReactNode }) {
|
||||
const [isAuthenticated, setIsAuthenticated] = useState(false);
|
||||
const [loading, setLoading] = useState(true);
|
||||
const router = useRouter();
|
||||
|
||||
// Handle authentication failures from API calls
|
||||
const handleAuthFailure = () => {
|
||||
console.log('Authentication token expired, logging out user');
|
||||
setIsAuthenticated(false);
|
||||
router.push('/login');
|
||||
};
|
||||
|
||||
useEffect(() => {
|
||||
// Register the auth failure handler for API interceptor
|
||||
setGlobalAuthFailureHandler(handleAuthFailure);
|
||||
// Check if user is already authenticated on app load
|
||||
const checkAuth = async () => {
|
||||
try {
|
||||
@@ -42,7 +53,7 @@ export function AuthProvider({ children }: { children: React.ReactNode }) {
|
||||
|
||||
checkAuth();
|
||||
loadSanitizationConfig();
|
||||
}, []);
|
||||
}, [router]);
|
||||
|
||||
const login = async (password: string) => {
|
||||
try {
|
||||
@@ -57,6 +68,7 @@ export function AuthProvider({ children }: { children: React.ReactNode }) {
|
||||
const logout = () => {
|
||||
authApi.logout();
|
||||
setIsAuthenticated(false);
|
||||
router.push('/login');
|
||||
};
|
||||
|
||||
return (
|
||||
|
||||
@@ -21,15 +21,36 @@ api.interceptors.request.use((config) => {
|
||||
return config;
|
||||
});
|
||||
|
||||
// Global auth failure handler - can be set by AuthContext
|
||||
let globalAuthFailureHandler: (() => void) | null = null;
|
||||
|
||||
export const setGlobalAuthFailureHandler = (handler: () => void) => {
|
||||
globalAuthFailureHandler = handler;
|
||||
};
|
||||
|
||||
// Response interceptor to handle auth errors
|
||||
api.interceptors.response.use(
|
||||
(response) => response,
|
||||
(error) => {
|
||||
if (error.response?.status === 401) {
|
||||
// Clear invalid token and redirect to login
|
||||
// Handle authentication failures
|
||||
if (error.response?.status === 401 || error.response?.status === 403) {
|
||||
console.warn('Authentication failed, token may be expired or invalid');
|
||||
|
||||
// Clear invalid token
|
||||
localStorage.removeItem('auth-token');
|
||||
|
||||
// Use global handler if available (from AuthContext), otherwise fallback to direct redirect
|
||||
if (globalAuthFailureHandler) {
|
||||
globalAuthFailureHandler();
|
||||
} else {
|
||||
// Fallback for cases where AuthContext isn't available
|
||||
window.location.href = '/login';
|
||||
}
|
||||
|
||||
// Return a more specific error for components to handle gracefully
|
||||
return Promise.reject(new Error('Authentication required'));
|
||||
}
|
||||
|
||||
return Promise.reject(error);
|
||||
}
|
||||
);
|
||||
@@ -150,6 +171,22 @@ export const storyApi = {
|
||||
const response = await api.post('/stories/recreate-typesense-collection');
|
||||
return response.data;
|
||||
},
|
||||
|
||||
checkDuplicate: async (title: string, authorName: string): Promise<{
|
||||
hasDuplicates: boolean;
|
||||
count: number;
|
||||
duplicates: Array<{
|
||||
id: string;
|
||||
title: string;
|
||||
authorName: string;
|
||||
createdAt: string;
|
||||
}>;
|
||||
}> => {
|
||||
const response = await api.get('/stories/check-duplicate', {
|
||||
params: { title, authorName }
|
||||
});
|
||||
return response.data;
|
||||
},
|
||||
};
|
||||
|
||||
// Author endpoints
|
||||
@@ -240,6 +277,11 @@ export const tagApi = {
|
||||
// Backend returns TagDto[], extract just the names
|
||||
return response.data.map((tag: Tag) => tag.name);
|
||||
},
|
||||
|
||||
getCollectionTags: async (): Promise<Tag[]> => {
|
||||
const response = await api.get('/tags/collections');
|
||||
return response.data;
|
||||
},
|
||||
};
|
||||
|
||||
// Series endpoints
|
||||
|
||||
334
frontend/src/lib/scraper/config/sites.json
Normal file
334
frontend/src/lib/scraper/config/sites.json
Normal file
@@ -0,0 +1,334 @@
|
||||
{
|
||||
"sites": {
|
||||
"deviantart.com": {
|
||||
"story": {
|
||||
"title": "h1",
|
||||
"titleFallback": "meta[property='og:title']",
|
||||
"titleFallbackAttribute": "content",
|
||||
"author": {
|
||||
"strategy": "text-pattern",
|
||||
"pattern": "by ([^\\s]+) on DeviantArt",
|
||||
"searchAfter": "<title>",
|
||||
"searchBefore": "</title>"
|
||||
},
|
||||
"content": {
|
||||
"strategy": "text-blocks",
|
||||
"minLength": 200,
|
||||
"containerHints": ["journal", "literature", "story", "text", "content"],
|
||||
"excludeSelectors": ["script", "style", "nav", "header", "footer", ".dev-page-sidebar"]
|
||||
},
|
||||
"summary": "meta[property='og:description']",
|
||||
"summaryAttribute": "content",
|
||||
"tags": "a[data-tagname]",
|
||||
"tagsAttribute": "data-tagname",
|
||||
"coverImage": "meta[property='og:image']",
|
||||
"coverImageAttribute": "content"
|
||||
},
|
||||
"authorPage": {
|
||||
"storyLinks": "a[data-hook='deviation_link']",
|
||||
"filterStrategy": "dom-check",
|
||||
"requiresChildElement": "div[class*='journal']"
|
||||
}
|
||||
},
|
||||
|
||||
"literotica.com": {
|
||||
"story": {
|
||||
"title": "h1",
|
||||
"titleFallback": "meta[property='og:title']",
|
||||
"titleFallbackAttribute": "content",
|
||||
"author": {
|
||||
"strategy": "link-with-path",
|
||||
"pathContains": "/authors/",
|
||||
"searchWithin": "header, .story-info, #story-meta"
|
||||
},
|
||||
"content": {
|
||||
"strategy": "text-blocks",
|
||||
"minLength": 500,
|
||||
"containerHints": ["story", "content", "text"],
|
||||
"excludeSelectors": ["script", "style", "nav", "header", "footer"]
|
||||
},
|
||||
"summary": "meta[name='description']",
|
||||
"summaryAttribute": "content",
|
||||
"multiPage": {
|
||||
"enabled": true,
|
||||
"strategy": "url-pattern",
|
||||
"pageParam": "page",
|
||||
"maxPages": 20
|
||||
}
|
||||
},
|
||||
"authorPage": {
|
||||
"storyLinks": {
|
||||
"strategy": "href-pattern",
|
||||
"pattern": "/s/[^/]+$",
|
||||
"searchWithin": "main, #content, .stories-list"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"mcstories.com": {
|
||||
"story": {
|
||||
"title": "title",
|
||||
"titleTransform": "remove-suffix: - MCStories.com",
|
||||
"author": "meta[name='dcterms.creator']",
|
||||
"authorAttribute": "content",
|
||||
"content": "article#mcstories",
|
||||
"summary": "meta[name='dcterms.description']",
|
||||
"summaryAttribute": "content"
|
||||
},
|
||||
"authorPage": {
|
||||
"storyLinks": "a[href$='.html']:not([href*='Authors'])",
|
||||
"linkPrefix": "https://mcstories.com/"
|
||||
}
|
||||
},
|
||||
|
||||
"docs-lab.com": {
|
||||
"story": {
|
||||
"title": "title",
|
||||
"titleTransform": "remove-suffix: - Doc's Lab",
|
||||
"author": "a[href*='/profiles/'] strong",
|
||||
"content": {
|
||||
"strategy": "html-between",
|
||||
"startMarker": "<h2>Story</h2>",
|
||||
"endMarker": "</div>",
|
||||
"includeStart": false
|
||||
},
|
||||
"tags": "span.label"
|
||||
},
|
||||
"authorPage": {
|
||||
"storyLinks": "a[href*='/submissions/']",
|
||||
"linkPrefix": "https://docs-lab.com"
|
||||
}
|
||||
},
|
||||
|
||||
"archiveofourown.org": {
|
||||
"story": {
|
||||
"title": "h2.title",
|
||||
"author": "a[rel='author']",
|
||||
"content": {
|
||||
"strategy": "chapters",
|
||||
"chapterSelector": "div.userstuff[role='article']",
|
||||
"chaptersWrapper": "#chapters",
|
||||
"singleChapter": "#workskin"
|
||||
},
|
||||
"summary": "div.summary blockquote.userstuff",
|
||||
"tags": {
|
||||
"strategy": "multiple-types",
|
||||
"selectors": {
|
||||
"fandom": "dd.fandom a.tag",
|
||||
"warning": "dd.warning a.tag",
|
||||
"category": "dd.category a.tag",
|
||||
"relationship": "dd.relationship a.tag",
|
||||
"character": "dd.character a.tag",
|
||||
"freeform": "dd.freeform a.tag"
|
||||
}
|
||||
},
|
||||
"multiPage": {
|
||||
"enabled": true,
|
||||
"strategy": "chapter-navigation",
|
||||
"chapterListSelector": "#chapter_index option",
|
||||
"urlPattern": "/chapters/{chapterId}"
|
||||
}
|
||||
},
|
||||
"authorPage": {
|
||||
"storyLinks": "h4.heading a[href*='/works/']",
|
||||
"pagination": {
|
||||
"enabled": true,
|
||||
"nextPageSelector": "li.next a[rel='next']"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"fanfiction.net": {
|
||||
"story": {
|
||||
"title": "#profile_top b.xcontrast_txt",
|
||||
"author": "#profile_top a[href*='/u/']",
|
||||
"content": "#storytext",
|
||||
"summary": "#profile_top div.xcontrast_txt",
|
||||
"coverImage": {
|
||||
"strategy": "lazy-loaded",
|
||||
"selector": "img.cimage",
|
||||
"attribute": "data-original"
|
||||
},
|
||||
"multiPage": {
|
||||
"enabled": true,
|
||||
"strategy": "chapter-dropdown",
|
||||
"chapterSelector": "select#chap_select option",
|
||||
"urlPattern": "{baseUrl}/{chapterNumber}"
|
||||
}
|
||||
},
|
||||
"authorPage": {
|
||||
"storyLinks": "div.z-list a.stitle",
|
||||
"metadata": {
|
||||
"strategy": "sibling-text",
|
||||
"metadataSelector": "div.z-padtop2",
|
||||
"parsePattern": "Rated: ([^-]+) - .+ - Chapters: (\\d+)"
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"royalroad.com": {
|
||||
"story": {
|
||||
"title": "h1[property='name']",
|
||||
"author": "h4[property='author'] a",
|
||||
"content": {
|
||||
"strategy": "chapter-content",
|
||||
"selector": "div.chapter-content",
|
||||
"cleanupSelectors": [".portlet", ".ads-holder", "div[style*='display:none']"]
|
||||
},
|
||||
"summary": "div.description div.hidden-content",
|
||||
"tags": "span.tags a.fiction-tag",
|
||||
"coverImage": "img.thumbnail",
|
||||
"coverImageAttribute": "src",
|
||||
"multiPage": {
|
||||
"enabled": true,
|
||||
"strategy": "table-of-contents",
|
||||
"tocSelector": "table#chapters tbody tr a[href*='/chapter/']",
|
||||
"requiresAuth": false
|
||||
}
|
||||
},
|
||||
"authorPage": {
|
||||
"storyLinks": "div.fiction-list-item h2.fiction-title a",
|
||||
"additionalInfo": {
|
||||
"strategy": "data-attributes",
|
||||
"statsSelector": "div.stats",
|
||||
"extractStats": ["pages", "followers", "views"]
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"wattpad.com": {
|
||||
"story": {
|
||||
"title": "h1",
|
||||
"author": {
|
||||
"strategy": "schema-org",
|
||||
"schemaType": "Person",
|
||||
"property": "name",
|
||||
"fallbackSelector": "a[href*='/user/']"
|
||||
},
|
||||
"content": {
|
||||
"strategy": "react-content",
|
||||
"contentClass": "pre-wrap",
|
||||
"paragraphSelector": "p[data-p-id]",
|
||||
"requiresJavaScript": true
|
||||
},
|
||||
"summary": "h2.description",
|
||||
"tags": "div.tag-items a.tag",
|
||||
"coverImage": {
|
||||
"strategy": "responsive-image",
|
||||
"selector": "img[alt*='cover']",
|
||||
"srcsetAttribute": "srcset",
|
||||
"selectLargest": true
|
||||
},
|
||||
"multiPage": {
|
||||
"enabled": true,
|
||||
"strategy": "api-based",
|
||||
"apiPattern": "/v4/parts/{partId}/text",
|
||||
"tocApiPattern": "/v5/stories/{storyId}/parts",
|
||||
"requiresAuth": true
|
||||
}
|
||||
},
|
||||
"authorPage": {
|
||||
"storyLinks": {
|
||||
"strategy": "infinite-scroll",
|
||||
"initialSelector": "a[href*='/story/']",
|
||||
"apiEndpoint": "/v4/users/{userId}/stories",
|
||||
"requiresJavaScript": true
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"strategies": {
|
||||
"text-blocks": {
|
||||
"description": "Find content by looking for large text blocks",
|
||||
"implementation": "Find all text nodes, group by parent, select parent with most text"
|
||||
},
|
||||
"link-with-path": {
|
||||
"description": "Find links containing specific path patterns",
|
||||
"implementation": "querySelector with href*= or iterate and check .href property"
|
||||
},
|
||||
"href-pattern": {
|
||||
"description": "Match links by regex pattern",
|
||||
"implementation": "Array.from(links).filter(a => pattern.test(a.href))"
|
||||
},
|
||||
"text-pattern": {
|
||||
"description": "Extract text using regex from raw HTML",
|
||||
"implementation": "Use regex on .html() with proper groups"
|
||||
},
|
||||
"html-between": {
|
||||
"description": "Extract HTML between markers",
|
||||
"implementation": "indexOf() to find positions, substring to extract"
|
||||
},
|
||||
"chapters": {
|
||||
"description": "Extract story content that may be in chapters",
|
||||
"implementation": "Check for multiple chapters or single chapter format"
|
||||
},
|
||||
"multiple-types": {
|
||||
"description": "Extract different categories of tags",
|
||||
"implementation": "Map over selector types and extract each category"
|
||||
},
|
||||
"chapter-navigation": {
|
||||
"description": "Navigate through chapters using chapter index",
|
||||
"implementation": "Extract chapter IDs and construct URLs"
|
||||
},
|
||||
"lazy-loaded": {
|
||||
"description": "Extract images that are lazy-loaded",
|
||||
"implementation": "Check data-* attributes for actual image source"
|
||||
},
|
||||
"chapter-dropdown": {
|
||||
"description": "Handle stories with chapter selection dropdown",
|
||||
"implementation": "Parse dropdown options and construct chapter URLs"
|
||||
},
|
||||
"table-of-contents": {
|
||||
"description": "Extract chapters from a table of contents",
|
||||
"implementation": "Find all chapter links in TOC structure"
|
||||
},
|
||||
"schema-org": {
|
||||
"description": "Extract data from schema.org structured data",
|
||||
"implementation": "Parse JSON-LD or microdata for specific properties"
|
||||
},
|
||||
"react-content": {
|
||||
"description": "Extract content from React-rendered pages",
|
||||
"implementation": "May require JavaScript execution or API access"
|
||||
},
|
||||
"responsive-image": {
|
||||
"description": "Select best quality from responsive images",
|
||||
"implementation": "Parse srcset and select highest resolution"
|
||||
},
|
||||
"api-based": {
|
||||
"description": "Use API endpoints instead of HTML scraping",
|
||||
"implementation": "Detect API patterns and make direct API calls"
|
||||
},
|
||||
"infinite-scroll": {
|
||||
"description": "Handle pages with infinite scroll",
|
||||
"implementation": "Detect scroll API endpoints or pagination"
|
||||
}
|
||||
},
|
||||
|
||||
"globalOptions": {
|
||||
"userAgent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36",
|
||||
"timeout": 30000,
|
||||
"retryAttempts": 3,
|
||||
"rateLimitMs": 1000,
|
||||
"cacheDuration": 300000,
|
||||
"javascriptTimeout": 10000
|
||||
},
|
||||
|
||||
"siteNotes": {
|
||||
"wattpad.com": {
|
||||
"warning": "Wattpad has aggressive anti-scraping measures. Consider using their API if available.",
|
||||
"requiresAuth": "Some stories may require login to access full content"
|
||||
},
|
||||
"royalroad.com": {
|
||||
"note": "Very scraper-friendly with good HTML structure"
|
||||
},
|
||||
"archiveofourown.org": {
|
||||
"note": "Respects robots.txt, has good semantic HTML",
|
||||
"rateLimit": "Be extra respectful of rate limits"
|
||||
},
|
||||
"fanfiction.net": {
|
||||
"note": "Older site with simpler HTML structure",
|
||||
"warning": "Known to block IPs for aggressive scraping"
|
||||
}
|
||||
}
|
||||
}
|
||||
379
frontend/src/lib/scraper/scraper.ts
Normal file
379
frontend/src/lib/scraper/scraper.ts
Normal file
@@ -0,0 +1,379 @@
|
||||
import 'server-only';
|
||||
|
||||
// Note: cheerio import is done dynamically to avoid client-side bundling issues
|
||||
// Using any type for CheerioAPI to prevent bundling issues
|
||||
import {
|
||||
SitesConfig,
|
||||
SiteConfig,
|
||||
ScrapedStory,
|
||||
ScrapedAuthorStory,
|
||||
SelectorStrategy,
|
||||
MultiPageConfig,
|
||||
ScraperError
|
||||
} from './types';
|
||||
import { RateLimiter } from './utils/rateLimit';
|
||||
import { ScraperCache } from './utils/cache';
|
||||
import { UrlParser } from './utils/urlParser';
|
||||
import {
|
||||
extractByTextPattern,
|
||||
extractTextBlocks,
|
||||
extractHtmlBetween,
|
||||
extractLinkText,
|
||||
extractLinkWithPath,
|
||||
extractHrefPattern,
|
||||
extractFirstImage,
|
||||
extractResponsiveImage,
|
||||
extractLazyLoadedImage,
|
||||
extractChapters,
|
||||
extractChapterContent,
|
||||
extractMultipleTypes,
|
||||
extractSchemaOrg,
|
||||
extractReactContent,
|
||||
cleanHtml,
|
||||
extractAttribute
|
||||
} from './strategies';
|
||||
import sitesConfig from './config/sites.json';
|
||||
|
||||
export class StoryScraper {
|
||||
private config: SitesConfig;
|
||||
private cache: ScraperCache;
|
||||
private rateLimiter: RateLimiter;
|
||||
|
||||
constructor() {
|
||||
this.config = sitesConfig as SitesConfig;
|
||||
this.cache = new ScraperCache(this.config.globalOptions.cacheDuration);
|
||||
this.rateLimiter = new RateLimiter(this.config.globalOptions.rateLimitMs);
|
||||
}
|
||||
|
||||
async scrapeStory(url: string): Promise<ScrapedStory> {
|
||||
try {
|
||||
if (!UrlParser.validateUrl(url)) {
|
||||
throw new Error(`Invalid URL: ${url}`);
|
||||
}
|
||||
|
||||
const domain = UrlParser.getDomain(url);
|
||||
const siteConfig = this.config.sites[domain];
|
||||
|
||||
if (!siteConfig) {
|
||||
throw new Error(`Unsupported site: ${domain}`);
|
||||
}
|
||||
|
||||
const html = await this.fetchWithCache(url);
|
||||
const cheerio = await import('cheerio');
|
||||
const $ = cheerio.load(html);
|
||||
|
||||
const story: ScrapedStory = {
|
||||
title: await this.extractFieldWithFallback($, siteConfig.story, 'title', html),
|
||||
author: await this.extractFieldWithFallback($, siteConfig.story, 'author', html),
|
||||
content: await this.extractContent($, siteConfig.story, url, html),
|
||||
sourceUrl: url
|
||||
};
|
||||
|
||||
// Extract optional fields
|
||||
if (siteConfig.story.summary) {
|
||||
story.summary = await this.extractField($, siteConfig.story.summary, html, siteConfig.story.summaryAttribute);
|
||||
}
|
||||
|
||||
if (siteConfig.story.coverImage) {
|
||||
story.coverImage = await this.extractField($, siteConfig.story.coverImage, html, siteConfig.story.coverImageAttribute);
|
||||
}
|
||||
|
||||
if (siteConfig.story.tags) {
|
||||
const tagsResult = await this.extractTags($, siteConfig.story.tags, html, siteConfig.story.tagsAttribute);
|
||||
if (Array.isArray(tagsResult)) {
|
||||
story.tags = tagsResult;
|
||||
} else if (typeof tagsResult === 'string' && tagsResult) {
|
||||
story.tags = [tagsResult];
|
||||
}
|
||||
}
|
||||
|
||||
// Apply post-processing
|
||||
story.title = this.applyTransforms(story.title, siteConfig.story.titleTransform);
|
||||
story.content = await cleanHtml(story.content);
|
||||
|
||||
return story;
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
throw new ScraperError(
|
||||
`Failed to scrape ${url}: ${error.message}`,
|
||||
url,
|
||||
error
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
async scrapeAuthorPage(url: string): Promise<ScrapedAuthorStory[]> {
|
||||
try {
|
||||
if (!UrlParser.validateUrl(url)) {
|
||||
throw new Error(`Invalid URL: ${url}`);
|
||||
}
|
||||
|
||||
const domain = UrlParser.getDomain(url);
|
||||
const siteConfig = this.config.sites[domain];
|
||||
|
||||
if (!siteConfig || !siteConfig.authorPage) {
|
||||
throw new Error(`Author page scraping not supported for: ${domain}`);
|
||||
}
|
||||
|
||||
const html = await this.fetchWithCache(url);
|
||||
const cheerio = await import('cheerio');
|
||||
const $ = cheerio.load(html);
|
||||
|
||||
const storyLinks = await this.extractField($, siteConfig.authorPage.storyLinks, html);
|
||||
const stories: ScrapedAuthorStory[] = [];
|
||||
|
||||
if (Array.isArray(storyLinks)) {
|
||||
for (const link of storyLinks) {
|
||||
const storyUrl = UrlParser.normalizeUrl(link, url);
|
||||
try {
|
||||
const scrapedStory = await this.scrapeStory(storyUrl);
|
||||
stories.push({
|
||||
url: storyUrl,
|
||||
title: scrapedStory.title,
|
||||
author: scrapedStory.author,
|
||||
summary: scrapedStory.summary
|
||||
});
|
||||
} catch (error) {
|
||||
console.warn(`Failed to scrape story ${storyUrl}:`, error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return stories;
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
throw new ScraperError(
|
||||
`Failed to scrape author page ${url}: ${error.message}`,
|
||||
url,
|
||||
error
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private async extractFieldWithFallback(
|
||||
$: any,
|
||||
config: any,
|
||||
fieldName: string,
|
||||
html: string
|
||||
): Promise<string> {
|
||||
const primarySelector = config[fieldName];
|
||||
const fallbackSelector = config[`${fieldName}Fallback`];
|
||||
const attribute = config[`${fieldName}Attribute`];
|
||||
const fallbackAttribute = config[`${fieldName}FallbackAttribute`];
|
||||
|
||||
// Try primary selector first
|
||||
if (primarySelector) {
|
||||
const result = await this.extractField($, primarySelector, html, attribute);
|
||||
if (result && result.trim()) {
|
||||
return result.trim();
|
||||
}
|
||||
}
|
||||
|
||||
// Try fallback selector if primary failed
|
||||
if (fallbackSelector) {
|
||||
const result = await this.extractField($, fallbackSelector, html, fallbackAttribute);
|
||||
if (result && result.trim()) {
|
||||
return result.trim();
|
||||
}
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
private async extractField(
|
||||
$: any,
|
||||
selector: string | SelectorStrategy,
|
||||
html: string,
|
||||
attribute?: string
|
||||
): Promise<any> {
|
||||
if (typeof selector === 'string') {
|
||||
// Simple CSS selector - always return single value (first element)
|
||||
const element = $(selector).first();
|
||||
if (attribute) {
|
||||
// Extract specific attribute instead of text
|
||||
return element.attr(attribute) || '';
|
||||
}
|
||||
return element.text().trim();
|
||||
}
|
||||
|
||||
// Strategy-based extraction
|
||||
return await this.executeStrategy($, selector, html);
|
||||
}
|
||||
|
||||
private async extractTags(
|
||||
$: any,
|
||||
selector: string | SelectorStrategy,
|
||||
html: string,
|
||||
attribute?: string
|
||||
): Promise<any> {
|
||||
if (typeof selector === 'string') {
|
||||
// Simple CSS selector - collect ALL matching elements for tags
|
||||
const elements = $(selector);
|
||||
|
||||
if (elements.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const results: string[] = [];
|
||||
elements.each((_: any, elem: any) => {
|
||||
const $elem = $(elem);
|
||||
const value = attribute ? $elem.attr(attribute) : $elem.text().trim();
|
||||
if (value) {
|
||||
results.push(value);
|
||||
}
|
||||
});
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
// Strategy-based extraction for tags
|
||||
return await this.executeStrategy($, selector, html);
|
||||
}
|
||||
|
||||
private async executeStrategy(
|
||||
$: any,
|
||||
strategy: SelectorStrategy,
|
||||
html: string
|
||||
): Promise<any> {
|
||||
switch (strategy.strategy) {
|
||||
case 'text-pattern':
|
||||
return extractByTextPattern(html, strategy as any);
|
||||
case 'link-with-path':
|
||||
return extractLinkWithPath($, strategy as any);
|
||||
case 'text-blocks':
|
||||
return extractTextBlocks($, strategy as any);
|
||||
case 'href-pattern':
|
||||
return extractHrefPattern($, strategy as any);
|
||||
case 'html-between':
|
||||
return extractHtmlBetween(html, strategy as any);
|
||||
case 'link-text':
|
||||
return extractLinkText($, strategy as any);
|
||||
case 'first-image':
|
||||
return extractFirstImage($, strategy as any);
|
||||
case 'responsive-image':
|
||||
return extractResponsiveImage($, strategy as any);
|
||||
case 'lazy-loaded':
|
||||
return extractLazyLoadedImage($, strategy as any);
|
||||
case 'chapters':
|
||||
return extractChapters($, strategy as any);
|
||||
case 'chapter-content':
|
||||
return extractChapterContent($, strategy as any);
|
||||
case 'multiple-types':
|
||||
return extractMultipleTypes($, strategy as any);
|
||||
case 'schema-org':
|
||||
return extractSchemaOrg($, strategy as any);
|
||||
case 'react-content':
|
||||
return extractReactContent($, strategy as any);
|
||||
default:
|
||||
throw new Error(`Unknown strategy: ${strategy.strategy}`);
|
||||
}
|
||||
}
|
||||
|
||||
private async extractContent(
|
||||
$: any,
|
||||
storyConfig: any,
|
||||
url: string,
|
||||
html: string
|
||||
): Promise<string> {
|
||||
let content = await this.extractField($, storyConfig.content, html);
|
||||
|
||||
if (storyConfig.multiPage?.enabled) {
|
||||
const additionalPages = await this.fetchAdditionalPages(
|
||||
$,
|
||||
url,
|
||||
storyConfig.multiPage
|
||||
);
|
||||
|
||||
for (const pageHtml of additionalPages) {
|
||||
const cheerioPage = await import('cheerio');
|
||||
const $page = cheerioPage.load(pageHtml);
|
||||
const pageContent = await this.extractField(
|
||||
$page,
|
||||
storyConfig.content,
|
||||
pageHtml
|
||||
);
|
||||
content += '\n\n' + pageContent;
|
||||
}
|
||||
}
|
||||
|
||||
return content;
|
||||
}
|
||||
|
||||
private async fetchAdditionalPages(
|
||||
$: any,
|
||||
baseUrl: string,
|
||||
config: MultiPageConfig
|
||||
): Promise<string[]> {
|
||||
const pages: string[] = [];
|
||||
let currentUrl = baseUrl;
|
||||
let pageNum = 2;
|
||||
|
||||
while (pageNum <= (config.maxPages || 20)) {
|
||||
let nextUrl: string | null = null;
|
||||
|
||||
if (config.strategy === 'url-pattern') {
|
||||
nextUrl = UrlParser.buildPageUrl(baseUrl, pageNum, config);
|
||||
} else if (config.nextPageSelector) {
|
||||
const nextLink = $(config.nextPageSelector).attr('href');
|
||||
if (nextLink) {
|
||||
nextUrl = UrlParser.normalizeUrl(nextLink, currentUrl);
|
||||
}
|
||||
}
|
||||
|
||||
if (!nextUrl) break;
|
||||
|
||||
try {
|
||||
await this.rateLimiter.throttle();
|
||||
const html = await this.fetchWithCache(nextUrl);
|
||||
pages.push(html);
|
||||
currentUrl = nextUrl;
|
||||
pageNum++;
|
||||
} catch (error) {
|
||||
console.error(`Failed to fetch page ${pageNum}:`, error);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return pages;
|
||||
}
|
||||
|
||||
private async fetchWithCache(url: string): Promise<string> {
|
||||
const cached = this.cache.get(url);
|
||||
if (cached) {
|
||||
return cached;
|
||||
}
|
||||
|
||||
await this.rateLimiter.throttle();
|
||||
|
||||
const response = await fetch(url, {
|
||||
headers: {
|
||||
'User-Agent': this.config.globalOptions.userAgent,
|
||||
},
|
||||
signal: AbortSignal.timeout(this.config.globalOptions.timeout)
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const html = await response.text();
|
||||
this.cache.set(url, html);
|
||||
|
||||
return html;
|
||||
}
|
||||
|
||||
private applyTransforms(text: string, transform?: string): string {
|
||||
if (!transform) return text;
|
||||
|
||||
if (transform.startsWith('remove-suffix:')) {
|
||||
const suffix = transform.substring('remove-suffix:'.length).trim();
|
||||
return text.replace(new RegExp(`${suffix}$`, 'i'), '').trim();
|
||||
}
|
||||
|
||||
return text;
|
||||
}
|
||||
}
|
||||
164
frontend/src/lib/scraper/strategies/contentCleaner.ts
Normal file
164
frontend/src/lib/scraper/strategies/contentCleaner.ts
Normal file
@@ -0,0 +1,164 @@
|
||||
// Dynamic cheerio import used to avoid client-side bundling issues
|
||||
// Using any type for CheerioAPI to prevent bundling issues
|
||||
import {
|
||||
ChaptersStrategy,
|
||||
ChapterContentStrategy,
|
||||
MultipleTypesStrategy,
|
||||
SchemaOrgStrategy,
|
||||
ReactContentStrategy
|
||||
} from '../types';
|
||||
|
||||
export function extractChapters(
|
||||
$: any,
|
||||
config: ChaptersStrategy
|
||||
): string {
|
||||
// Check for multiple chapters first
|
||||
if (config.chaptersWrapper) {
|
||||
const chaptersWrapper = $(config.chaptersWrapper);
|
||||
if (chaptersWrapper.length > 0) {
|
||||
const chapters = chaptersWrapper.find(config.chapterSelector);
|
||||
if (chapters.length > 1) {
|
||||
// Multiple chapters - combine them
|
||||
let content = '';
|
||||
chapters.each((_: any, elem: any) => {
|
||||
content += $(elem).html() + '\n\n';
|
||||
});
|
||||
return content.trim();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Single chapter fallback
|
||||
if (config.singleChapter) {
|
||||
const singleChapter = $(config.singleChapter);
|
||||
if (singleChapter.length > 0) {
|
||||
return singleChapter.html() || '';
|
||||
}
|
||||
}
|
||||
|
||||
// Direct chapter selector fallback
|
||||
const chapter = $(config.chapterSelector).first();
|
||||
return chapter.html() || '';
|
||||
}
|
||||
|
||||
export function extractChapterContent(
|
||||
$: any,
|
||||
config: ChapterContentStrategy
|
||||
): string {
|
||||
const content = $(config.selector);
|
||||
|
||||
// Remove cleanup selectors
|
||||
if (config.cleanupSelectors) {
|
||||
config.cleanupSelectors.forEach(selector => {
|
||||
content.find(selector).remove();
|
||||
});
|
||||
}
|
||||
|
||||
return content.html() || '';
|
||||
}
|
||||
|
||||
export function extractMultipleTypes(
|
||||
$: any,
|
||||
config: MultipleTypesStrategy
|
||||
): string[] {
|
||||
const tags: string[] = [];
|
||||
|
||||
Object.entries(config.selectors).forEach(([type, selector]) => {
|
||||
$(selector).each((_: any, elem: any) => {
|
||||
const tag = $(elem).text().trim();
|
||||
if (tag) {
|
||||
tags.push(`${type}: ${tag}`);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return tags;
|
||||
}
|
||||
|
||||
export function extractSchemaOrg(
|
||||
$: any,
|
||||
config: SchemaOrgStrategy
|
||||
): string {
|
||||
// Look for JSON-LD first
|
||||
$('script[type="application/ld+json"]').each((_: any, elem: any) => {
|
||||
try {
|
||||
const data = JSON.parse($(elem).html() || '');
|
||||
if (data['@type'] === config.schemaType ||
|
||||
(Array.isArray(data) && data.some(item => item['@type'] === config.schemaType))) {
|
||||
const item = Array.isArray(data) ?
|
||||
data.find(item => item['@type'] === config.schemaType) : data;
|
||||
if (item && item[config.property]) {
|
||||
return item[config.property];
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
// Invalid JSON, continue
|
||||
}
|
||||
});
|
||||
|
||||
// Fallback to selector
|
||||
if (config.fallbackSelector) {
|
||||
return $(config.fallbackSelector).first().text().trim();
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
export function extractReactContent(
|
||||
$: any,
|
||||
config: ReactContentStrategy
|
||||
): string {
|
||||
// This is a simplified version - full React content extraction
|
||||
// would require JavaScript execution or API access
|
||||
|
||||
const contentElements = $(config.paragraphSelector);
|
||||
let content = '';
|
||||
|
||||
contentElements.each((_: any, elem: any) => {
|
||||
const $elem = $(elem);
|
||||
if ($elem.hasClass(config.contentClass)) {
|
||||
content += $elem.html() + '\n\n';
|
||||
}
|
||||
});
|
||||
|
||||
return content.trim();
|
||||
}
|
||||
|
||||
export async function cleanHtml(html: string): Promise<string> {
|
||||
// Basic HTML cleaning - remove scripts, styles, and dangerous elements
|
||||
const cheerio = await import('cheerio');
|
||||
const $ = cheerio.load(html, {
|
||||
// Preserve self-closing tags like <br>
|
||||
xmlMode: false,
|
||||
decodeEntities: false
|
||||
});
|
||||
|
||||
// Remove dangerous elements
|
||||
$('script, style, iframe, embed, object').remove();
|
||||
|
||||
// Remove empty paragraphs and divs (but preserve <br> tags)
|
||||
$('p:empty, div:empty').not(':has(br)').remove();
|
||||
|
||||
// Clean up excessive whitespace in text nodes only, preserve <br> tags
|
||||
$('*').each((_, elem) => {
|
||||
const $elem = $(elem);
|
||||
if (elem.type === 'text') {
|
||||
const text = $elem.text();
|
||||
if (text && text.trim() !== text) {
|
||||
$elem.replaceWith(text.trim());
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Return HTML with proper self-closing tag format
|
||||
return $.html() || '';
|
||||
}
|
||||
|
||||
export function extractAttribute(
|
||||
$: any,
|
||||
selector: string,
|
||||
attribute: string
|
||||
): string {
|
||||
const element = $(selector).first();
|
||||
return element.attr(attribute) || '';
|
||||
}
|
||||
3
frontend/src/lib/scraper/strategies/index.ts
Normal file
3
frontend/src/lib/scraper/strategies/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from './textExtractor';
|
||||
export * from './linkExtractor';
|
||||
export * from './contentCleaner';
|
||||
98
frontend/src/lib/scraper/strategies/linkExtractor.ts
Normal file
98
frontend/src/lib/scraper/strategies/linkExtractor.ts
Normal file
@@ -0,0 +1,98 @@
|
||||
// Dynamic cheerio import used to avoid client-side bundling issues
|
||||
// Using any type for CheerioAPI to prevent bundling issues
|
||||
import {
|
||||
LinkWithPathStrategy,
|
||||
HrefPatternStrategy,
|
||||
FirstImageStrategy,
|
||||
ResponsiveImageStrategy,
|
||||
LazyLoadedStrategy
|
||||
} from '../types';
|
||||
|
||||
export function extractLinkWithPath(
|
||||
$: any,
|
||||
config: LinkWithPathStrategy
|
||||
): string {
|
||||
let searchScope = config.searchWithin ? $(config.searchWithin) : $('body');
|
||||
|
||||
const links = searchScope.find('a');
|
||||
|
||||
for (let i = 0; i < links.length; i++) {
|
||||
const link = links.eq(i);
|
||||
const href = link.attr('href');
|
||||
|
||||
if (href && href.includes(config.pathContains)) {
|
||||
return link.text().trim();
|
||||
}
|
||||
}
|
||||
|
||||
return '';
|
||||
}
|
||||
|
||||
export function extractHrefPattern(
|
||||
$: any,
|
||||
config: HrefPatternStrategy
|
||||
): string[] {
|
||||
let searchScope = config.searchWithin ? $(config.searchWithin) : $('body');
|
||||
|
||||
const pattern = new RegExp(config.pattern);
|
||||
const links: string[] = [];
|
||||
|
||||
searchScope.find('a').each((_: any, elem: any) => {
|
||||
const href = $(elem).attr('href');
|
||||
if (href && pattern.test(href)) {
|
||||
links.push(href);
|
||||
}
|
||||
});
|
||||
|
||||
return links;
|
||||
}
|
||||
|
||||
export function extractFirstImage(
|
||||
$: any,
|
||||
config: FirstImageStrategy
|
||||
): string {
|
||||
let searchScope = config.searchWithin ? $(config.searchWithin) : $('body');
|
||||
|
||||
const img = searchScope.find('img').first();
|
||||
return img.attr(config.attribute) || '';
|
||||
}
|
||||
|
||||
export function extractResponsiveImage(
|
||||
$: any,
|
||||
config: ResponsiveImageStrategy
|
||||
): string {
|
||||
const img = $(config.selector).first();
|
||||
|
||||
if (config.selectLargest && config.srcsetAttribute) {
|
||||
const srcset = img.attr(config.srcsetAttribute);
|
||||
if (srcset) {
|
||||
// Parse srcset and return the largest image
|
||||
const sources = srcset.split(',').map((src: string) => {
|
||||
const parts = src.trim().split(' ');
|
||||
const url = parts[0];
|
||||
const descriptor = parts[1] || '1x';
|
||||
const width = descriptor.includes('w') ?
|
||||
parseInt(descriptor.replace('w', '')) :
|
||||
descriptor.includes('x') ?
|
||||
parseInt(descriptor.replace('x', '')) * 100 : 100;
|
||||
return { url, width };
|
||||
});
|
||||
|
||||
const largest = sources.reduce((prev: any, current: any) =>
|
||||
prev.width > current.width ? prev : current
|
||||
);
|
||||
|
||||
return largest.url;
|
||||
}
|
||||
}
|
||||
|
||||
return img.attr('src') || '';
|
||||
}
|
||||
|
||||
export function extractLazyLoadedImage(
|
||||
$: any,
|
||||
config: LazyLoadedStrategy
|
||||
): string {
|
||||
const img = $(config.selector).first();
|
||||
return img.attr(config.attribute) || img.attr('src') || '';
|
||||
}
|
||||
151
frontend/src/lib/scraper/strategies/textExtractor.ts
Normal file
151
frontend/src/lib/scraper/strategies/textExtractor.ts
Normal file
@@ -0,0 +1,151 @@
|
||||
import * as cheerio from 'cheerio';
|
||||
import 'server-only';
|
||||
|
||||
// Dynamic cheerio import used to avoid client-side bundling issues
|
||||
// Using any type for CheerioAPI to prevent bundling issues
|
||||
import {
|
||||
TextPatternStrategy,
|
||||
TextBlockStrategy,
|
||||
HtmlBetweenStrategy,
|
||||
LinkTextStrategy
|
||||
} from '../types';
|
||||
|
||||
export function extractByTextPattern(
|
||||
html: string,
|
||||
config: TextPatternStrategy
|
||||
): string {
|
||||
let searchContent = html;
|
||||
|
||||
// Limit search scope if specified
|
||||
if (config.searchAfter) {
|
||||
const afterIndex = html.indexOf(config.searchAfter);
|
||||
if (afterIndex !== -1) {
|
||||
searchContent = html.substring(afterIndex);
|
||||
}
|
||||
}
|
||||
|
||||
if (config.searchBefore) {
|
||||
const beforeIndex = searchContent.indexOf(config.searchBefore);
|
||||
if (beforeIndex !== -1) {
|
||||
searchContent = searchContent.substring(0, beforeIndex);
|
||||
}
|
||||
}
|
||||
|
||||
const regex = new RegExp(config.pattern, 'i');
|
||||
const match = searchContent.match(regex);
|
||||
return match ? match[config.group || 1].trim() : '';
|
||||
}
|
||||
|
||||
export function extractTextBlocks(
|
||||
$: cheerio.CheerioAPI,
|
||||
config: TextBlockStrategy
|
||||
): string {
|
||||
const blocks: Array<{element: any, text: string}> = [];
|
||||
|
||||
// Remove excluded elements first
|
||||
if (config.excludeSelectors) {
|
||||
config.excludeSelectors.forEach(selector => {
|
||||
$(selector).remove();
|
||||
});
|
||||
}
|
||||
|
||||
$('*').each((_, elem) => {
|
||||
const $elem = $(elem);
|
||||
const text = $elem.clone().children().remove().end().text().trim();
|
||||
|
||||
if (text.length >= (config.minLength || 500)) {
|
||||
blocks.push({ element: elem, text });
|
||||
}
|
||||
});
|
||||
|
||||
// Find the block that likely contains story content
|
||||
const storyBlock = blocks.find(block => {
|
||||
if (config.containerHints && config.containerHints.length > 0) {
|
||||
const hasHints = config.containerHints.some(hint =>
|
||||
$(block.element).attr('class')?.includes(hint) ||
|
||||
$(block.element).attr('id')?.includes(hint)
|
||||
);
|
||||
return hasHints;
|
||||
}
|
||||
return blocks.length === 1;
|
||||
});
|
||||
|
||||
if (storyBlock) {
|
||||
return $(storyBlock.element).html() || '';
|
||||
}
|
||||
|
||||
// Fallback to largest block
|
||||
const largestBlock = blocks.reduce((prev, current) =>
|
||||
prev.text.length > current.text.length ? prev : current
|
||||
);
|
||||
|
||||
return largestBlock ? $(largestBlock.element).html() || '' : '';
|
||||
}
|
||||
|
||||
export function extractHtmlBetween(
|
||||
html: string,
|
||||
config: HtmlBetweenStrategy
|
||||
): string {
|
||||
const startIndex = html.indexOf(config.startMarker);
|
||||
if (startIndex === -1) return '';
|
||||
|
||||
const contentStart = config.includeStart ?
|
||||
startIndex :
|
||||
startIndex + config.startMarker.length;
|
||||
|
||||
const endIndex = html.indexOf(config.endMarker, contentStart);
|
||||
if (endIndex === -1) {
|
||||
return html.substring(contentStart);
|
||||
}
|
||||
|
||||
return html.substring(contentStart, endIndex).trim();
|
||||
}
|
||||
|
||||
export function extractLinkText(
|
||||
$: cheerio.CheerioAPI,
|
||||
config: LinkTextStrategy
|
||||
): string {
|
||||
let searchScope: cheerio.Cheerio<cheerio.AnyNode>;
|
||||
|
||||
if (config.searchWithin) {
|
||||
searchScope = $(config.searchWithin);
|
||||
} else {
|
||||
searchScope = $('body').length ? $('body') : $('*');
|
||||
}
|
||||
|
||||
// Look for links near the specified text patterns
|
||||
let foundText = '';
|
||||
|
||||
config.nearText.forEach(text => {
|
||||
if (foundText) return; // Already found
|
||||
|
||||
searchScope.find('*').each((_, elem) => {
|
||||
const $elem = $(elem);
|
||||
const elemText = $elem.text().toLowerCase();
|
||||
|
||||
if (elemText.includes(text.toLowerCase())) {
|
||||
// Look for nearby links
|
||||
const $link = $elem.find('a').first();
|
||||
if ($link.length) {
|
||||
foundText = $link.text().trim();
|
||||
return false; // Break out of each
|
||||
}
|
||||
|
||||
// Check if the element itself is a link
|
||||
if ($elem.is('a')) {
|
||||
foundText = $elem.text().trim();
|
||||
return false;
|
||||
}
|
||||
|
||||
// Look for links in the next few siblings
|
||||
const $siblings = $elem.nextAll().slice(0, 3);
|
||||
$siblings.find('a').first().each((_, link) => {
|
||||
foundText = $(link).text().trim();
|
||||
return false;
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return foundText;
|
||||
}
|
||||
248
frontend/src/lib/scraper/types.ts
Normal file
248
frontend/src/lib/scraper/types.ts
Normal file
@@ -0,0 +1,248 @@
|
||||
export interface SiteConfig {
|
||||
story: StorySelectors;
|
||||
authorPage: AuthorPageSelectors;
|
||||
}
|
||||
|
||||
export interface StorySelectors {
|
||||
title: string | SelectorStrategy;
|
||||
author: string | SelectorStrategy;
|
||||
content: string | SelectorStrategy;
|
||||
summary?: string | SelectorStrategy;
|
||||
coverImage?: string | SelectorStrategy;
|
||||
tags?: string | SelectorStrategy;
|
||||
multiPage?: MultiPageConfig;
|
||||
titleFallback?: string;
|
||||
titleFallbackAttribute?: string;
|
||||
titleTransform?: string;
|
||||
summaryAttribute?: string;
|
||||
coverImageAttribute?: string;
|
||||
tagsAttribute?: string;
|
||||
}
|
||||
|
||||
export interface AuthorPageSelectors {
|
||||
storyLinks: string | SelectorStrategy;
|
||||
pagination?: PaginationConfig;
|
||||
linkPrefix?: string;
|
||||
filterStrategy?: string;
|
||||
requiresChildElement?: string;
|
||||
requiresNavigation?: NavigationConfig;
|
||||
metadata?: MetadataConfig;
|
||||
additionalInfo?: AdditionalInfoConfig;
|
||||
}
|
||||
|
||||
export interface SelectorStrategy {
|
||||
strategy: string;
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
export interface MultiPageConfig {
|
||||
enabled: boolean;
|
||||
strategy: 'url-pattern' | 'next-link' | 'chapter-navigation' | 'chapter-dropdown' | 'table-of-contents' | 'api-based';
|
||||
nextPageSelector?: string;
|
||||
pageParam?: string;
|
||||
maxPages?: number;
|
||||
chapterListSelector?: string;
|
||||
chapterSelector?: string;
|
||||
urlPattern?: string;
|
||||
tocSelector?: string;
|
||||
requiresAuth?: boolean;
|
||||
apiPattern?: string;
|
||||
tocApiPattern?: string;
|
||||
}
|
||||
|
||||
export interface PaginationConfig {
|
||||
enabled: boolean;
|
||||
nextPageSelector: string;
|
||||
}
|
||||
|
||||
export interface NavigationConfig {
|
||||
enabled: boolean;
|
||||
clickText: string;
|
||||
waitMs: number;
|
||||
}
|
||||
|
||||
export interface MetadataConfig {
|
||||
strategy: string;
|
||||
metadataSelector: string;
|
||||
parsePattern: string;
|
||||
}
|
||||
|
||||
export interface AdditionalInfoConfig {
|
||||
strategy: string;
|
||||
statsSelector: string;
|
||||
extractStats: string[];
|
||||
}
|
||||
|
||||
export interface ScrapedStory {
|
||||
title: string;
|
||||
author: string;
|
||||
content: string;
|
||||
summary?: string;
|
||||
coverImage?: string;
|
||||
tags?: string[];
|
||||
sourceUrl: string;
|
||||
}
|
||||
|
||||
export interface ScrapedAuthorStory {
|
||||
url: string;
|
||||
title: string;
|
||||
author: string;
|
||||
summary?: string;
|
||||
}
|
||||
|
||||
export interface SitesConfig {
|
||||
sites: Record<string, SiteConfig>;
|
||||
strategies: Record<string, StrategyDescription>;
|
||||
globalOptions: GlobalOptions;
|
||||
siteNotes?: Record<string, SiteNotes>;
|
||||
}
|
||||
|
||||
export interface StrategyDescription {
|
||||
description: string;
|
||||
implementation: string;
|
||||
}
|
||||
|
||||
export interface GlobalOptions {
|
||||
userAgent: string;
|
||||
timeout: number;
|
||||
retryAttempts: number;
|
||||
rateLimitMs: number;
|
||||
cacheDuration?: number;
|
||||
javascriptTimeout?: number;
|
||||
}
|
||||
|
||||
export interface SiteNotes {
|
||||
warning?: string;
|
||||
note?: string;
|
||||
rateLimit?: string;
|
||||
requiresAuth?: string;
|
||||
}
|
||||
|
||||
// Strategy-specific interfaces
|
||||
export interface TextPatternStrategy extends SelectorStrategy {
|
||||
strategy: 'text-pattern';
|
||||
pattern: string;
|
||||
group?: number;
|
||||
searchAfter?: string;
|
||||
searchBefore?: string;
|
||||
}
|
||||
|
||||
export interface LinkWithPathStrategy extends SelectorStrategy {
|
||||
strategy: 'link-with-path';
|
||||
pathContains: string;
|
||||
searchWithin?: string;
|
||||
}
|
||||
|
||||
export interface TextBlockStrategy extends SelectorStrategy {
|
||||
strategy: 'text-blocks';
|
||||
minLength?: number;
|
||||
containerHints?: string[];
|
||||
excludeSelectors?: string[];
|
||||
}
|
||||
|
||||
export interface HrefPatternStrategy extends SelectorStrategy {
|
||||
strategy: 'href-pattern';
|
||||
pattern: string;
|
||||
searchWithin?: string;
|
||||
}
|
||||
|
||||
export interface HtmlBetweenStrategy extends SelectorStrategy {
|
||||
strategy: 'html-between';
|
||||
startMarker: string;
|
||||
endMarker: string;
|
||||
includeStart?: boolean;
|
||||
}
|
||||
|
||||
export interface ChaptersStrategy extends SelectorStrategy {
|
||||
strategy: 'chapters';
|
||||
chapterSelector: string;
|
||||
chaptersWrapper?: string;
|
||||
singleChapter?: string;
|
||||
}
|
||||
|
||||
export interface MultipleTypesStrategy extends SelectorStrategy {
|
||||
strategy: 'multiple-types';
|
||||
selectors: Record<string, string>;
|
||||
}
|
||||
|
||||
export interface LinkTextStrategy extends SelectorStrategy {
|
||||
strategy: 'link-text';
|
||||
nearText: string[];
|
||||
searchWithin?: string;
|
||||
}
|
||||
|
||||
export interface FirstImageStrategy extends SelectorStrategy {
|
||||
strategy: 'first-image';
|
||||
searchWithin: string;
|
||||
attribute: string;
|
||||
}
|
||||
|
||||
export interface SchemaOrgStrategy extends SelectorStrategy {
|
||||
strategy: 'schema-org';
|
||||
schemaType: string;
|
||||
property: string;
|
||||
fallbackSelector?: string;
|
||||
}
|
||||
|
||||
export interface ReactContentStrategy extends SelectorStrategy {
|
||||
strategy: 'react-content';
|
||||
contentClass: string;
|
||||
paragraphSelector: string;
|
||||
requiresJavaScript: boolean;
|
||||
}
|
||||
|
||||
export interface ResponsiveImageStrategy extends SelectorStrategy {
|
||||
strategy: 'responsive-image';
|
||||
selector: string;
|
||||
srcsetAttribute: string;
|
||||
selectLargest: boolean;
|
||||
}
|
||||
|
||||
export interface LazyLoadedStrategy extends SelectorStrategy {
|
||||
strategy: 'lazy-loaded';
|
||||
selector: string;
|
||||
attribute: string;
|
||||
}
|
||||
|
||||
export interface ChapterContentStrategy extends SelectorStrategy {
|
||||
strategy: 'chapter-content';
|
||||
selector: string;
|
||||
cleanupSelectors?: string[];
|
||||
}
|
||||
|
||||
export interface DataAttributesStrategy extends SelectorStrategy {
|
||||
strategy: 'data-attributes';
|
||||
statsSelector: string;
|
||||
extractStats: string[];
|
||||
}
|
||||
|
||||
export interface SiblingTextStrategy extends SelectorStrategy {
|
||||
strategy: 'sibling-text';
|
||||
metadataSelector: string;
|
||||
parsePattern: string;
|
||||
}
|
||||
|
||||
export interface ApiBasedStrategy extends SelectorStrategy {
|
||||
strategy: 'api-based';
|
||||
apiPattern: string;
|
||||
tocApiPattern?: string;
|
||||
requiresAuth: boolean;
|
||||
}
|
||||
|
||||
export interface InfiniteScrollStrategy extends SelectorStrategy {
|
||||
strategy: 'infinite-scroll';
|
||||
initialSelector: string;
|
||||
apiEndpoint: string;
|
||||
requiresJavaScript: boolean;
|
||||
}
|
||||
|
||||
export class ScraperError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public url: string,
|
||||
public originalError?: Error
|
||||
) {
|
||||
super(message);
|
||||
this.name = 'ScraperError';
|
||||
}
|
||||
}
|
||||
35
frontend/src/lib/scraper/utils/cache.ts
Normal file
35
frontend/src/lib/scraper/utils/cache.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
export class ScraperCache {
|
||||
private cache: Map<string, { data: any; timestamp: number }> = new Map();
|
||||
private ttl: number;
|
||||
|
||||
constructor(ttlMs: number = 300000) { // 5 minutes default
|
||||
this.ttl = ttlMs;
|
||||
}
|
||||
|
||||
get(key: string): any | null {
|
||||
const entry = this.cache.get(key);
|
||||
if (!entry) return null;
|
||||
|
||||
if (Date.now() - entry.timestamp > this.ttl) {
|
||||
this.cache.delete(key);
|
||||
return null;
|
||||
}
|
||||
|
||||
return entry.data;
|
||||
}
|
||||
|
||||
set(key: string, data: any): void {
|
||||
this.cache.set(key, {
|
||||
data,
|
||||
timestamp: Date.now()
|
||||
});
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.cache.clear();
|
||||
}
|
||||
|
||||
size(): number {
|
||||
return this.cache.size;
|
||||
}
|
||||
}
|
||||
23
frontend/src/lib/scraper/utils/rateLimit.ts
Normal file
23
frontend/src/lib/scraper/utils/rateLimit.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
export class RateLimiter {
|
||||
private lastRequest: number = 0;
|
||||
private minDelay: number;
|
||||
|
||||
constructor(minDelayMs: number = 1000) {
|
||||
this.minDelay = minDelayMs;
|
||||
}
|
||||
|
||||
async throttle(): Promise<void> {
|
||||
const now = Date.now();
|
||||
const timeSinceLastRequest = now - this.lastRequest;
|
||||
|
||||
if (timeSinceLastRequest < this.minDelay) {
|
||||
await this.delay(this.minDelay - timeSinceLastRequest);
|
||||
}
|
||||
|
||||
this.lastRequest = Date.now();
|
||||
}
|
||||
|
||||
private delay(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
}
|
||||
61
frontend/src/lib/scraper/utils/urlParser.ts
Normal file
61
frontend/src/lib/scraper/utils/urlParser.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
export class UrlParser {
|
||||
static getDomain(url: string): string {
|
||||
try {
|
||||
const urlObj = new URL(url);
|
||||
return urlObj.hostname.replace(/^www\./, '');
|
||||
} catch (error) {
|
||||
throw new Error(`Invalid URL: ${url}`);
|
||||
}
|
||||
}
|
||||
|
||||
static validateUrl(url: string): boolean {
|
||||
try {
|
||||
const urlObj = new URL(url);
|
||||
return urlObj.protocol === 'http:' || urlObj.protocol === 'https:';
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
static buildPageUrl(baseUrl: string, pageNum: number, config: any): string {
|
||||
try {
|
||||
const urlObj = new URL(baseUrl);
|
||||
if (config.pageParam) {
|
||||
urlObj.searchParams.set(config.pageParam, pageNum.toString());
|
||||
} else if (config.urlPattern) {
|
||||
// Replace {page} or similar patterns in URL
|
||||
return config.urlPattern.replace(/\{page\}/g, pageNum.toString());
|
||||
}
|
||||
return urlObj.toString();
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to build page URL: ${error}`);
|
||||
}
|
||||
}
|
||||
|
||||
static normalizeUrl(url: string, baseUrl?: string): string {
|
||||
try {
|
||||
if (url.startsWith('http://') || url.startsWith('https://')) {
|
||||
return url;
|
||||
}
|
||||
|
||||
if (baseUrl) {
|
||||
return new URL(url, baseUrl).toString();
|
||||
}
|
||||
|
||||
return url;
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to normalize URL: ${url}`);
|
||||
}
|
||||
}
|
||||
|
||||
static extractDomainConfig(url: string, sitesConfig: any): any {
|
||||
const domain = this.getDomain(url);
|
||||
const config = sitesConfig.sites[domain];
|
||||
|
||||
if (!config) {
|
||||
throw new Error(`Unsupported site: ${domain}`);
|
||||
}
|
||||
|
||||
return config;
|
||||
}
|
||||
}
|
||||
33
frontend/src/lib/settings.ts
Normal file
33
frontend/src/lib/settings.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
interface Settings {
|
||||
theme: 'light' | 'dark';
|
||||
fontFamily: 'serif' | 'sans' | 'mono';
|
||||
fontSize: 'small' | 'medium' | 'large' | 'extra-large';
|
||||
readingWidth: 'narrow' | 'medium' | 'wide';
|
||||
readingSpeed: number; // words per minute
|
||||
}
|
||||
|
||||
const defaultSettings: Settings = {
|
||||
theme: 'light',
|
||||
fontFamily: 'serif',
|
||||
fontSize: 'medium',
|
||||
readingWidth: 'medium',
|
||||
readingSpeed: 200,
|
||||
};
|
||||
|
||||
export const getReadingSpeed = (): number => {
|
||||
try {
|
||||
const savedSettings = localStorage.getItem('storycove-settings');
|
||||
if (savedSettings) {
|
||||
const parsed = JSON.parse(savedSettings);
|
||||
return parsed.readingSpeed || defaultSettings.readingSpeed;
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Failed to parse saved settings:', error);
|
||||
}
|
||||
return defaultSettings.readingSpeed;
|
||||
};
|
||||
|
||||
export const calculateReadingTime = (wordCount: number): number => {
|
||||
const wordsPerMinute = getReadingSpeed();
|
||||
return Math.max(1, Math.round(wordCount / wordsPerMinute));
|
||||
};
|
||||
@@ -14,6 +14,7 @@ export interface Story {
|
||||
rating?: number;
|
||||
coverPath?: string;
|
||||
tags: Tag[];
|
||||
tagNames?: string[] | null; // Used in search results
|
||||
createdAt: string;
|
||||
updatedAt: string;
|
||||
}
|
||||
@@ -41,6 +42,7 @@ export interface Tag {
|
||||
id: string;
|
||||
name: string;
|
||||
storyCount?: number;
|
||||
collectionCount?: number;
|
||||
createdAt?: string;
|
||||
updatedAt?: string;
|
||||
}
|
||||
@@ -56,6 +58,11 @@ export interface AuthResponse {
|
||||
expiresIn: number;
|
||||
}
|
||||
|
||||
export interface FacetCount {
|
||||
value: string;
|
||||
count: number;
|
||||
}
|
||||
|
||||
export interface SearchResult {
|
||||
results: Story[];
|
||||
totalHits: number;
|
||||
@@ -63,6 +70,7 @@ export interface SearchResult {
|
||||
perPage: number;
|
||||
query: string;
|
||||
searchTimeMs: number;
|
||||
facets?: Record<string, FacetCount[]>;
|
||||
}
|
||||
|
||||
export interface PagedResult<T> {
|
||||
@@ -85,6 +93,7 @@ export interface Collection {
|
||||
coverImagePath?: string;
|
||||
isArchived: boolean;
|
||||
tags: Tag[];
|
||||
tagNames?: string[] | null; // Used in search results
|
||||
collectionStories?: CollectionStory[];
|
||||
stories?: CollectionStory[]; // For compatibility
|
||||
storyCount: number;
|
||||
|
||||
File diff suppressed because one or more lines are too long
18
nginx.conf
18
nginx.conf
@@ -28,7 +28,23 @@ http {
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
}
|
||||
|
||||
# Backend API routes
|
||||
# Scraping routes - completely separate from /api/ to avoid conflicts
|
||||
location /scrape/ {
|
||||
proxy_pass http://frontend/scrape/;
|
||||
proxy_http_version 1.1;
|
||||
proxy_set_header Upgrade $http_upgrade;
|
||||
proxy_set_header Connection 'upgrade';
|
||||
proxy_set_header Host $host;
|
||||
proxy_set_header X-Real-IP $remote_addr;
|
||||
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
|
||||
proxy_set_header X-Forwarded-Proto $scheme;
|
||||
proxy_cache_bypass $http_upgrade;
|
||||
proxy_connect_timeout 60s;
|
||||
proxy_send_timeout 60s;
|
||||
proxy_read_timeout 60s;
|
||||
}
|
||||
|
||||
# Backend API routes (fallback for all other /api/ routes)
|
||||
location /api/ {
|
||||
proxy_pass http://backend/api/;
|
||||
proxy_set_header Host $host;
|
||||
|
||||
304
package-lock.json
generated
304
package-lock.json
generated
@@ -2,5 +2,307 @@
|
||||
"name": "StoryCove",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {}
|
||||
"packages": {
|
||||
"": {
|
||||
"dependencies": {
|
||||
"cheerio": "^1.1.2"
|
||||
}
|
||||
},
|
||||
"node_modules/boolbase": {
|
||||
"version": "1.0.0",
|
||||
"resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz",
|
||||
"integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==",
|
||||
"license": "ISC"
|
||||
},
|
||||
"node_modules/cheerio": {
|
||||
"version": "1.1.2",
|
||||
"resolved": "https://registry.npmjs.org/cheerio/-/cheerio-1.1.2.tgz",
|
||||
"integrity": "sha512-IkxPpb5rS/d1IiLbHMgfPuS0FgiWTtFIm/Nj+2woXDLTZ7fOT2eqzgYbdMlLweqlHbsZjxEChoVK+7iph7jyQg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"cheerio-select": "^2.1.0",
|
||||
"dom-serializer": "^2.0.0",
|
||||
"domhandler": "^5.0.3",
|
||||
"domutils": "^3.2.2",
|
||||
"encoding-sniffer": "^0.2.1",
|
||||
"htmlparser2": "^10.0.0",
|
||||
"parse5": "^7.3.0",
|
||||
"parse5-htmlparser2-tree-adapter": "^7.1.0",
|
||||
"parse5-parser-stream": "^7.1.2",
|
||||
"undici": "^7.12.0",
|
||||
"whatwg-mimetype": "^4.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=20.18.1"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/cheeriojs/cheerio?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/cheerio-select": {
|
||||
"version": "2.1.0",
|
||||
"resolved": "https://registry.npmjs.org/cheerio-select/-/cheerio-select-2.1.0.tgz",
|
||||
"integrity": "sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==",
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"boolbase": "^1.0.0",
|
||||
"css-select": "^5.1.0",
|
||||
"css-what": "^6.1.0",
|
||||
"domelementtype": "^2.3.0",
|
||||
"domhandler": "^5.0.3",
|
||||
"domutils": "^3.0.1"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/fb55"
|
||||
}
|
||||
},
|
||||
"node_modules/css-select": {
|
||||
"version": "5.2.2",
|
||||
"resolved": "https://registry.npmjs.org/css-select/-/css-select-5.2.2.tgz",
|
||||
"integrity": "sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==",
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"boolbase": "^1.0.0",
|
||||
"css-what": "^6.1.0",
|
||||
"domhandler": "^5.0.2",
|
||||
"domutils": "^3.0.1",
|
||||
"nth-check": "^2.0.1"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/fb55"
|
||||
}
|
||||
},
|
||||
"node_modules/css-what": {
|
||||
"version": "6.2.2",
|
||||
"resolved": "https://registry.npmjs.org/css-what/-/css-what-6.2.2.tgz",
|
||||
"integrity": "sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==",
|
||||
"license": "BSD-2-Clause",
|
||||
"engines": {
|
||||
"node": ">= 6"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/fb55"
|
||||
}
|
||||
},
|
||||
"node_modules/dom-serializer": {
|
||||
"version": "2.0.0",
|
||||
"resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-2.0.0.tgz",
|
||||
"integrity": "sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"domelementtype": "^2.3.0",
|
||||
"domhandler": "^5.0.2",
|
||||
"entities": "^4.2.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/cheeriojs/dom-serializer?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/domelementtype": {
|
||||
"version": "2.3.0",
|
||||
"resolved": "https://registry.npmjs.org/domelementtype/-/domelementtype-2.3.0.tgz",
|
||||
"integrity": "sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==",
|
||||
"funding": [
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fb55"
|
||||
}
|
||||
],
|
||||
"license": "BSD-2-Clause"
|
||||
},
|
||||
"node_modules/domhandler": {
|
||||
"version": "5.0.3",
|
||||
"resolved": "https://registry.npmjs.org/domhandler/-/domhandler-5.0.3.tgz",
|
||||
"integrity": "sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==",
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"domelementtype": "^2.3.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">= 4"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/fb55/domhandler?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/domutils": {
|
||||
"version": "3.2.2",
|
||||
"resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz",
|
||||
"integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==",
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"dom-serializer": "^2.0.0",
|
||||
"domelementtype": "^2.3.0",
|
||||
"domhandler": "^5.0.3"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/fb55/domutils?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/encoding-sniffer": {
|
||||
"version": "0.2.1",
|
||||
"resolved": "https://registry.npmjs.org/encoding-sniffer/-/encoding-sniffer-0.2.1.tgz",
|
||||
"integrity": "sha512-5gvq20T6vfpekVtqrYQsSCFZ1wEg5+wW0/QaZMWkFr6BqD3NfKs0rLCx4rrVlSWJeZb5NBJgVLswK/w2MWU+Gw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"iconv-lite": "^0.6.3",
|
||||
"whatwg-encoding": "^3.1.1"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/fb55/encoding-sniffer?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/entities": {
|
||||
"version": "4.5.0",
|
||||
"resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz",
|
||||
"integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==",
|
||||
"license": "BSD-2-Clause",
|
||||
"engines": {
|
||||
"node": ">=0.12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/fb55/entities?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/htmlparser2": {
|
||||
"version": "10.0.0",
|
||||
"resolved": "https://registry.npmjs.org/htmlparser2/-/htmlparser2-10.0.0.tgz",
|
||||
"integrity": "sha512-TwAZM+zE5Tq3lrEHvOlvwgj1XLWQCtaaibSN11Q+gGBAS7Y1uZSWwXXRe4iF6OXnaq1riyQAPFOBtYc77Mxq0g==",
|
||||
"funding": [
|
||||
"https://github.com/fb55/htmlparser2?sponsor=1",
|
||||
{
|
||||
"type": "github",
|
||||
"url": "https://github.com/sponsors/fb55"
|
||||
}
|
||||
],
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"domelementtype": "^2.3.0",
|
||||
"domhandler": "^5.0.3",
|
||||
"domutils": "^3.2.1",
|
||||
"entities": "^6.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/htmlparser2/node_modules/entities": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz",
|
||||
"integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==",
|
||||
"license": "BSD-2-Clause",
|
||||
"engines": {
|
||||
"node": ">=0.12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/fb55/entities?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/iconv-lite": {
|
||||
"version": "0.6.3",
|
||||
"resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz",
|
||||
"integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"safer-buffer": ">= 2.1.2 < 3.0.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=0.10.0"
|
||||
}
|
||||
},
|
||||
"node_modules/nth-check": {
|
||||
"version": "2.1.1",
|
||||
"resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz",
|
||||
"integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==",
|
||||
"license": "BSD-2-Clause",
|
||||
"dependencies": {
|
||||
"boolbase": "^1.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/fb55/nth-check?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/parse5": {
|
||||
"version": "7.3.0",
|
||||
"resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz",
|
||||
"integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"entities": "^6.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/inikulin/parse5?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/parse5-htmlparser2-tree-adapter": {
|
||||
"version": "7.1.0",
|
||||
"resolved": "https://registry.npmjs.org/parse5-htmlparser2-tree-adapter/-/parse5-htmlparser2-tree-adapter-7.1.0.tgz",
|
||||
"integrity": "sha512-ruw5xyKs6lrpo9x9rCZqZZnIUntICjQAd0Wsmp396Ul9lN/h+ifgVV1x1gZHi8euej6wTfpqX8j+BFQxF0NS/g==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"domhandler": "^5.0.3",
|
||||
"parse5": "^7.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/inikulin/parse5?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/parse5-parser-stream": {
|
||||
"version": "7.1.2",
|
||||
"resolved": "https://registry.npmjs.org/parse5-parser-stream/-/parse5-parser-stream-7.1.2.tgz",
|
||||
"integrity": "sha512-JyeQc9iwFLn5TbvvqACIF/VXG6abODeB3Fwmv/TGdLk2LfbWkaySGY72at4+Ty7EkPZj854u4CrICqNk2qIbow==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"parse5": "^7.0.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/inikulin/parse5?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/parse5/node_modules/entities": {
|
||||
"version": "6.0.1",
|
||||
"resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz",
|
||||
"integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==",
|
||||
"license": "BSD-2-Clause",
|
||||
"engines": {
|
||||
"node": ">=0.12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/fb55/entities?sponsor=1"
|
||||
}
|
||||
},
|
||||
"node_modules/safer-buffer": {
|
||||
"version": "2.1.2",
|
||||
"resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz",
|
||||
"integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==",
|
||||
"license": "MIT"
|
||||
},
|
||||
"node_modules/undici": {
|
||||
"version": "7.12.0",
|
||||
"resolved": "https://registry.npmjs.org/undici/-/undici-7.12.0.tgz",
|
||||
"integrity": "sha512-GrKEsc3ughskmGA9jevVlIOPMiiAHJ4OFUtaAH+NhfTUSiZ1wMPIQqQvAJUrJspFXJt3EBWgpAeoHEDVT1IBug==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=20.18.1"
|
||||
}
|
||||
},
|
||||
"node_modules/whatwg-encoding": {
|
||||
"version": "3.1.1",
|
||||
"resolved": "https://registry.npmjs.org/whatwg-encoding/-/whatwg-encoding-3.1.1.tgz",
|
||||
"integrity": "sha512-6qN4hJdMwfYBtE3YBTTHhoeuUrDBPZmbQaxWAqSALV/MeEnR5z1xd8UKud2RAkFoPkmB+hli1TZSnyi84xz1vQ==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"iconv-lite": "0.6.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
},
|
||||
"node_modules/whatwg-mimetype": {
|
||||
"version": "4.0.0",
|
||||
"resolved": "https://registry.npmjs.org/whatwg-mimetype/-/whatwg-mimetype-4.0.0.tgz",
|
||||
"integrity": "sha512-QaKxh0eNIi2mE9p2vEdzfagOKHCcj1pJ56EEHGQOVxp8r9/iszLUUV7v89x9O1p/T+NlTM5W7jW6+cz4Fq1YVg==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=18"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
474
storycove-scraper-spec.md
Normal file
474
storycove-scraper-spec.md
Normal file
@@ -0,0 +1,474 @@
|
||||
# StoryCove Web Scraper Feature Specification
|
||||
|
||||
## Overview
|
||||
|
||||
The Web Scraper feature allows users to import stories from external websites into StoryCove by providing URLs. The scraper extracts story metadata and content using configurable selectors defined in a JSON configuration file.
|
||||
|
||||
## Feature Requirements
|
||||
|
||||
### Core Functionality
|
||||
|
||||
1. **Single Story Import**: Users can provide a story URL and the scraper will extract:
|
||||
- Title (required)
|
||||
- Author (required)
|
||||
- Content (required)
|
||||
- Summary (optional)
|
||||
- Cover Image (optional)
|
||||
- Tags (optional)
|
||||
|
||||
2. **Author Page Scanning**: Users can provide an author page URL to:
|
||||
- Discover all stories by that author
|
||||
- Present a selectable list of stories
|
||||
- Allow bulk import of selected stories
|
||||
|
||||
3. **Multi-page Story Support**: Handle stories split across multiple pages by:
|
||||
- Detecting pagination
|
||||
- Fetching all pages
|
||||
- Merging content in correct order
|
||||
|
||||
### User Interface Flow
|
||||
|
||||
1. **Add Story View Enhancement**:
|
||||
```
|
||||
[Manual Entry] | [Import from URL]
|
||||
|
||||
When "Import from URL" selected:
|
||||
- URL input field
|
||||
- "Fetch" button
|
||||
- Loading indicator during fetch
|
||||
- Pre-filled form with scraped data
|
||||
- Ability to edit before saving
|
||||
```
|
||||
|
||||
2. **Bulk Import View** (future enhancement):
|
||||
```
|
||||
- URL input for author page
|
||||
- "Scan for Stories" button
|
||||
- Checkbox list of discovered stories
|
||||
- "Import Selected" button
|
||||
- Progress indicator
|
||||
```
|
||||
|
||||
## Technical Implementation
|
||||
|
||||
### Architecture
|
||||
|
||||
```
|
||||
/lib/scraper/
|
||||
├── config/
|
||||
│ └── sites.json # Site configurations
|
||||
├── scraper.ts # Main scraper class
|
||||
├── strategies/ # Strategy implementations
|
||||
│ ├── index.ts
|
||||
│ ├── textExtractor.ts
|
||||
│ ├── linkExtractor.ts
|
||||
│ └── contentCleaner.ts
|
||||
├── utils/
|
||||
│ ├── rateLimit.ts
|
||||
│ ├── cache.ts
|
||||
│ └── urlParser.ts
|
||||
└── types.ts # TypeScript definitions
|
||||
```
|
||||
|
||||
### API Routes
|
||||
|
||||
```typescript
|
||||
// /app/api/scrape/story/route.ts
|
||||
POST /api/scrape/story
|
||||
Body: { url: string }
|
||||
Response: {
|
||||
title: string,
|
||||
author: string,
|
||||
content: string,
|
||||
summary?: string,
|
||||
coverImage?: string,
|
||||
tags?: string[]
|
||||
}
|
||||
|
||||
// /app/api/scrape/author/route.ts
|
||||
POST /api/scrape/author
|
||||
Body: { url: string }
|
||||
Response: {
|
||||
stories: Array<{
|
||||
url: string,
|
||||
title: string,
|
||||
author: string,
|
||||
summary?: string
|
||||
}>
|
||||
}
|
||||
```
|
||||
|
||||
### Core Classes
|
||||
|
||||
```typescript
|
||||
// /lib/scraper/types.ts
|
||||
interface SiteConfig {
|
||||
story: StorySelectors;
|
||||
authorPage: AuthorPageSelectors;
|
||||
}
|
||||
|
||||
interface StorySelectors {
|
||||
title: string | SelectorStrategy;
|
||||
author: string | SelectorStrategy;
|
||||
content: string | SelectorStrategy;
|
||||
summary?: string | SelectorStrategy;
|
||||
coverImage?: string | SelectorStrategy;
|
||||
tags?: string | SelectorStrategy;
|
||||
multiPage?: MultiPageConfig;
|
||||
}
|
||||
|
||||
interface SelectorStrategy {
|
||||
strategy: string;
|
||||
[key: string]: any;
|
||||
}
|
||||
|
||||
interface ScrapedStory {
|
||||
title: string;
|
||||
author: string;
|
||||
content: string;
|
||||
summary?: string;
|
||||
coverImage?: string;
|
||||
tags?: string[];
|
||||
sourceUrl: string;
|
||||
}
|
||||
```
|
||||
|
||||
### Main Scraper Implementation
|
||||
|
||||
```typescript
|
||||
// /lib/scraper/scraper.ts
|
||||
import * as cheerio from 'cheerio';
|
||||
import { SiteConfig, ScrapedStory } from './types';
|
||||
import sitesConfig from './config/sites.json';
|
||||
|
||||
export class StoryScraper {
|
||||
private config: Record<string, SiteConfig>;
|
||||
private cache: Map<string, any>;
|
||||
|
||||
constructor() {
|
||||
this.config = sitesConfig.sites;
|
||||
this.cache = new Map();
|
||||
}
|
||||
|
||||
async scrapeStory(url: string): Promise<ScrapedStory> {
|
||||
const domain = this.getDomain(url);
|
||||
const siteConfig = this.config[domain];
|
||||
|
||||
if (!siteConfig) {
|
||||
throw new Error(`Unsupported site: ${domain}`);
|
||||
}
|
||||
|
||||
const html = await this.fetchWithCache(url);
|
||||
const $ = cheerio.load(html);
|
||||
|
||||
const story: ScrapedStory = {
|
||||
title: await this.extractField($, siteConfig.story.title, html),
|
||||
author: await this.extractField($, siteConfig.story.author, html),
|
||||
content: await this.extractContent($, siteConfig.story, url),
|
||||
sourceUrl: url
|
||||
};
|
||||
|
||||
// Extract optional fields
|
||||
if (siteConfig.story.summary) {
|
||||
story.summary = await this.extractField($, siteConfig.story.summary, html);
|
||||
}
|
||||
|
||||
return story;
|
||||
}
|
||||
|
||||
private async extractField(
|
||||
$: cheerio.CheerioAPI,
|
||||
selector: string | SelectorStrategy,
|
||||
html: string
|
||||
): Promise<string> {
|
||||
if (typeof selector === 'string') {
|
||||
// Simple CSS selector
|
||||
return $(selector).first().text().trim();
|
||||
}
|
||||
|
||||
// Strategy-based extraction
|
||||
return await this.executeStrategy($, selector, html);
|
||||
}
|
||||
|
||||
private async executeStrategy(
|
||||
$: cheerio.CheerioAPI,
|
||||
strategy: SelectorStrategy,
|
||||
html: string
|
||||
): Promise<string> {
|
||||
switch (strategy.strategy) {
|
||||
case 'text-pattern':
|
||||
return this.extractByTextPattern(html, strategy);
|
||||
case 'link-with-path':
|
||||
return this.extractLinkWithPath($, strategy);
|
||||
case 'text-blocks':
|
||||
return this.extractTextBlocks($, strategy);
|
||||
// ... other strategies
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Strategy Implementations
|
||||
|
||||
```typescript
|
||||
// /lib/scraper/strategies/textExtractor.ts
|
||||
export function extractByTextPattern(
|
||||
html: string,
|
||||
config: TextPatternStrategy
|
||||
): string {
|
||||
const regex = new RegExp(config.pattern, 'i');
|
||||
const match = html.match(regex);
|
||||
return match ? match[config.group || 1].trim() : '';
|
||||
}
|
||||
|
||||
export function extractTextBlocks(
|
||||
$: cheerio.CheerioAPI,
|
||||
config: TextBlockStrategy
|
||||
): string {
|
||||
const blocks: Array<{element: any, text: string}> = [];
|
||||
|
||||
$('*').each((_, elem) => {
|
||||
const $elem = $(elem);
|
||||
const text = $elem.clone().children().remove().end().text().trim();
|
||||
|
||||
if (text.length >= (config.minLength || 500)) {
|
||||
blocks.push({ element: elem, text });
|
||||
}
|
||||
});
|
||||
|
||||
// Find the block that likely contains story content
|
||||
const storyBlock = blocks.find(block => {
|
||||
const hasHints = config.containerHints?.some(hint =>
|
||||
$(block.element).attr('class')?.includes(hint) ||
|
||||
$(block.element).attr('id')?.includes(hint)
|
||||
);
|
||||
return hasHints || blocks.length === 1;
|
||||
});
|
||||
|
||||
return storyBlock ? $(storyBlock.element).html() || '' : '';
|
||||
}
|
||||
```
|
||||
|
||||
### Rate Limiting
|
||||
|
||||
```typescript
|
||||
// /lib/scraper/utils/rateLimit.ts
|
||||
export class RateLimiter {
|
||||
private lastRequest: number = 0;
|
||||
private minDelay: number;
|
||||
|
||||
constructor(minDelayMs: number = 1000) {
|
||||
this.minDelay = minDelayMs;
|
||||
}
|
||||
|
||||
async throttle(): Promise<void> {
|
||||
const now = Date.now();
|
||||
const timeSinceLastRequest = now - this.lastRequest;
|
||||
|
||||
if (timeSinceLastRequest < this.minDelay) {
|
||||
await this.delay(this.minDelay - timeSinceLastRequest);
|
||||
}
|
||||
|
||||
this.lastRequest = Date.now();
|
||||
}
|
||||
|
||||
private delay(ms: number): Promise<void> {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Multi-page Story Handling
|
||||
|
||||
```typescript
|
||||
// /lib/scraper/scraper.ts (addition)
|
||||
private async extractContent(
|
||||
$: cheerio.CheerioAPI,
|
||||
storyConfig: StorySelectors,
|
||||
url: string
|
||||
): Promise<string> {
|
||||
let content = await this.extractField($, storyConfig.content, $.html());
|
||||
|
||||
if (storyConfig.multiPage?.enabled) {
|
||||
const additionalPages = await this.fetchAdditionalPages(
|
||||
$,
|
||||
url,
|
||||
storyConfig.multiPage
|
||||
);
|
||||
|
||||
for (const pageHtml of additionalPages) {
|
||||
const $page = cheerio.load(pageHtml);
|
||||
const pageContent = await this.extractField(
|
||||
$page,
|
||||
storyConfig.content,
|
||||
pageHtml
|
||||
);
|
||||
content += '\n\n' + pageContent;
|
||||
}
|
||||
}
|
||||
|
||||
return content;
|
||||
}
|
||||
|
||||
private async fetchAdditionalPages(
|
||||
$: cheerio.CheerioAPI,
|
||||
baseUrl: string,
|
||||
config: MultiPageConfig
|
||||
): Promise<string[]> {
|
||||
const pages: string[] = [];
|
||||
let currentUrl = baseUrl;
|
||||
let pageNum = 2;
|
||||
|
||||
while (pageNum <= (config.maxPages || 20)) {
|
||||
let nextUrl: string | null = null;
|
||||
|
||||
if (config.strategy === 'url-pattern') {
|
||||
nextUrl = this.buildPageUrl(baseUrl, pageNum, config);
|
||||
} else if (config.nextPageSelector) {
|
||||
const nextLink = $(config.nextPageSelector).attr('href');
|
||||
if (nextLink) {
|
||||
nextUrl = new URL(nextLink, currentUrl).href;
|
||||
}
|
||||
}
|
||||
|
||||
if (!nextUrl) break;
|
||||
|
||||
try {
|
||||
await this.rateLimiter.throttle();
|
||||
const html = await this.fetchWithCache(nextUrl);
|
||||
pages.push(html);
|
||||
currentUrl = nextUrl;
|
||||
pageNum++;
|
||||
} catch (error) {
|
||||
console.error(`Failed to fetch page ${pageNum}:`, error);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return pages;
|
||||
}
|
||||
```
|
||||
|
||||
### Error Handling
|
||||
|
||||
```typescript
|
||||
// /lib/scraper/scraper.ts (addition)
|
||||
async scrapeStory(url: string): Promise<ScrapedStory> {
|
||||
try {
|
||||
// ... existing implementation
|
||||
} catch (error) {
|
||||
if (error instanceof Error) {
|
||||
throw new ScraperError(
|
||||
`Failed to scrape ${url}: ${error.message}`,
|
||||
url,
|
||||
error
|
||||
);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
export class ScraperError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public url: string,
|
||||
public originalError?: Error
|
||||
) {
|
||||
super(message);
|
||||
this.name = 'ScraperError';
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Configuration File Structure
|
||||
|
||||
```json
|
||||
{
|
||||
"sites": {
|
||||
"domain.com": {
|
||||
"story": {
|
||||
"title": "selector or strategy object",
|
||||
"author": "selector or strategy object",
|
||||
"content": "selector or strategy object",
|
||||
"summary": "optional selector or strategy",
|
||||
"coverImage": "optional selector or strategy",
|
||||
"tags": "optional selector or strategy",
|
||||
"multiPage": {
|
||||
"enabled": true,
|
||||
"strategy": "url-pattern|next-link",
|
||||
"nextPageSelector": "a.next-page",
|
||||
"pageParam": "page",
|
||||
"maxPages": 20
|
||||
}
|
||||
},
|
||||
"authorPage": {
|
||||
"storyLinks": "selector or strategy object",
|
||||
"pagination": {
|
||||
"enabled": true,
|
||||
"nextPageSelector": "a.next"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
"globalOptions": {
|
||||
"userAgent": "Mozilla/5.0...",
|
||||
"timeout": 30000,
|
||||
"retryAttempts": 3,
|
||||
"rateLimitMs": 1000
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Usage Example
|
||||
|
||||
```typescript
|
||||
// In a Next.js API route
|
||||
import { StoryScraper } from '@/lib/scraper/scraper';
|
||||
|
||||
export async function POST(request: Request) {
|
||||
const { url } = await request.json();
|
||||
|
||||
try {
|
||||
const scraper = new StoryScraper();
|
||||
const story = await scraper.scrapeStory(url);
|
||||
|
||||
return Response.json(story);
|
||||
} catch (error) {
|
||||
if (error instanceof ScraperError) {
|
||||
return Response.json(
|
||||
{ error: error.message },
|
||||
{ status: 400 }
|
||||
);
|
||||
}
|
||||
|
||||
return Response.json(
|
||||
{ error: 'Internal server error' },
|
||||
{ status: 500 }
|
||||
);
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Testing Considerations
|
||||
|
||||
1. **Unit Tests**: Test individual strategies and extractors
|
||||
2. **Integration Tests**: Test against saved HTML samples
|
||||
3. **Mock External Requests**: Use saved HTML fixtures to avoid hitting real sites
|
||||
4. **Edge Cases**: Empty content, missing fields, malformed HTML
|
||||
5. **Rate Limiting**: Verify delays are properly applied
|
||||
|
||||
## Security Considerations
|
||||
|
||||
1. **URL Validation**: Only accept HTTP/HTTPS URLs
|
||||
2. **Domain Allowlist**: Restrict to configured domains
|
||||
3. **Content Sanitization**: Clean HTML before storage
|
||||
4. **Request Timeouts**: Prevent hanging on slow sites
|
||||
5. **Rate Limiting**: Prevent abuse of the scraping endpoint
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
1. **Browser Automation**: Use Playwright for JavaScript-rendered content
|
||||
2. **AI Content Extraction**: Use LLMs for sites without clear patterns
|
||||
3. **User-Submitted Configurations**: Allow users to define selectors
|
||||
4. **Scheduled Imports**: Periodic author page checking
|
||||
5. **Import History**: Track what has been imported to avoid duplicates
|
||||
Reference in New Issue
Block a user