Full parallel implementation of typesense and opensearch

This commit is contained in:
Stefan Hardegger
2025-09-20 09:40:09 +02:00
parent 54df3c471e
commit f1773873d4
20 changed files with 2869 additions and 290 deletions

View File

@@ -1,5 +1,7 @@
package com.storycove.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import org.apache.hc.client5.http.auth.AuthScope;
import org.apache.hc.client5.http.auth.UsernamePasswordCredentials;
import org.apache.hc.client5.http.impl.auth.BasicCredentialsProvider;
@@ -8,13 +10,13 @@ import org.apache.hc.client5.http.impl.nio.PoolingAsyncClientConnectionManagerBu
import org.apache.hc.client5.http.ssl.ClientTlsStrategyBuilder;
import org.apache.hc.core5.http.HttpHost;
import org.apache.hc.core5.util.Timeout;
import org.opensearch.client.json.jackson.JacksonJsonpMapper;
import org.opensearch.client.opensearch.OpenSearchClient;
import org.opensearch.client.transport.OpenSearchTransport;
import org.opensearch.client.transport.httpclient5.ApacheHttpClient5TransportBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.boot.context.properties.EnableConfigurationProperties;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@@ -26,19 +28,17 @@ import java.security.KeyStore;
import java.security.cert.X509Certificate;
@Configuration
@EnableConfigurationProperties(OpenSearchProperties.class)
public class OpenSearchConfig {
private static final Logger logger = LoggerFactory.getLogger(OpenSearchConfig.class);
private final OpenSearchProperties properties;
public OpenSearchConfig(OpenSearchProperties properties) {
public OpenSearchConfig(@Qualifier("openSearchProperties") OpenSearchProperties properties) {
this.properties = properties;
}
@Bean
@ConditionalOnProperty(name = "storycove.search.engine", havingValue = "opensearch")
public OpenSearchClient openSearchClient() throws Exception {
logger.info("Initializing OpenSearch client for profile: {}", properties.getProfile());
@@ -51,13 +51,23 @@ public class OpenSearchConfig {
// Create connection manager with pooling
PoolingAsyncClientConnectionManager connectionManager = createConnectionManager(sslContext);
// Create the transport with all configurations
// Create custom ObjectMapper for proper date serialization
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.registerModule(new JavaTimeModule());
objectMapper.disable(com.fasterxml.jackson.databind.SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
// Create the transport with all configurations and custom Jackson mapper
OpenSearchTransport transport = ApacheHttpClient5TransportBuilder
.builder(new HttpHost(properties.getScheme(), properties.getHost(), properties.getPort()))
.setMapper(new JacksonJsonpMapper(objectMapper))
.setHttpClientConfigCallback(httpClientBuilder -> {
httpClientBuilder
.setDefaultCredentialsProvider(credentialsProvider)
.setConnectionManager(connectionManager);
// Only set credentials provider if authentication is configured
if (properties.getUsername() != null && !properties.getUsername().isEmpty() &&
properties.getPassword() != null && !properties.getPassword().isEmpty()) {
httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider);
}
httpClientBuilder.setConnectionManager(connectionManager);
// Set timeouts
httpClientBuilder.setDefaultRequestConfig(
@@ -81,13 +91,22 @@ public class OpenSearchConfig {
private BasicCredentialsProvider createCredentialsProvider() {
BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider();
credentialsProvider.setCredentials(
new AuthScope(properties.getHost(), properties.getPort()),
new UsernamePasswordCredentials(
properties.getUsername(),
properties.getPassword() != null ? properties.getPassword().toCharArray() : new char[0]
)
);
// Only set credentials if username and password are provided
if (properties.getUsername() != null && !properties.getUsername().isEmpty() &&
properties.getPassword() != null && !properties.getPassword().isEmpty()) {
credentialsProvider.setCredentials(
new AuthScope(properties.getHost(), properties.getPort()),
new UsernamePasswordCredentials(
properties.getUsername(),
properties.getPassword().toCharArray()
)
);
logger.info("OpenSearch credentials configured for user: {}", properties.getUsername());
} else {
logger.info("OpenSearch running without authentication (no credentials configured)");
}
return credentialsProvider;
}
@@ -184,8 +203,9 @@ public class OpenSearchConfig {
response.version().number(),
response.clusterName());
} catch (Exception e) {
logger.error("Failed to connect to OpenSearch cluster", e);
throw new RuntimeException("OpenSearch connection failed", e);
logger.warn("OpenSearch connection test failed during initialization: {}", e.getMessage());
logger.debug("OpenSearch connection test full error", e);
// Don't throw exception here - let the client be created and handle failures in service methods
}
}
}

View File

@@ -0,0 +1,296 @@
package com.storycove.controller;
import com.storycove.entity.Author;
import com.storycove.entity.Story;
import com.storycove.service.AuthorService;
import com.storycove.service.OpenSearchService;
import com.storycove.service.SearchMigrationManager;
import com.storycove.service.StoryService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;
import java.util.List;
import java.util.Map;
/**
* TEMPORARY ADMIN CONTROLLER - DELETE THIS ENTIRE CLASS WHEN TYPESENSE IS REMOVED
*
* This controller provides admin endpoints for managing the search engine migration.
* It allows real-time switching between engines and enabling/disabling dual-write.
*
* CLEANUP INSTRUCTIONS:
* 1. Delete this entire file: AdminSearchController.java
* 2. Remove any frontend components that call these endpoints
*/
@RestController
@RequestMapping("/api/admin/search")
public class AdminSearchController {
private static final Logger logger = LoggerFactory.getLogger(AdminSearchController.class);
@Autowired
private SearchMigrationManager migrationManager;
@Autowired(required = false)
private OpenSearchService openSearchService;
@Autowired
private StoryService storyService;
@Autowired
private AuthorService authorService;
/**
* Get current search engine configuration status
*/
@GetMapping("/status")
public ResponseEntity<SearchMigrationManager.SearchMigrationStatus> getStatus() {
try {
SearchMigrationManager.SearchMigrationStatus status = migrationManager.getStatus();
return ResponseEntity.ok(status);
} catch (Exception e) {
logger.error("Error getting search migration status", e);
return ResponseEntity.internalServerError().build();
}
}
/**
* Update search engine configuration
*/
@PostMapping("/configure")
public ResponseEntity<String> configureSearchEngine(@RequestBody SearchEngineConfigRequest request) {
try {
logger.info("Updating search engine configuration: engine={}, dualWrite={}",
request.getEngine(), request.isDualWrite());
// Validate engine
if (!"typesense".equalsIgnoreCase(request.getEngine()) &&
!"opensearch".equalsIgnoreCase(request.getEngine())) {
return ResponseEntity.badRequest().body("Invalid engine. Must be 'typesense' or 'opensearch'");
}
// Update configuration
migrationManager.updateConfiguration(request.getEngine(), request.isDualWrite());
return ResponseEntity.ok("Search engine configuration updated successfully");
} catch (Exception e) {
logger.error("Error updating search engine configuration", e);
return ResponseEntity.internalServerError().body("Failed to update configuration: " + e.getMessage());
}
}
/**
* Enable dual-write mode (writes to both engines)
*/
@PostMapping("/dual-write/enable")
public ResponseEntity<String> enableDualWrite() {
try {
String currentEngine = migrationManager.getCurrentSearchEngine();
migrationManager.updateConfiguration(currentEngine, true);
logger.info("Dual-write enabled for engine: {}", currentEngine);
return ResponseEntity.ok("Dual-write enabled");
} catch (Exception e) {
logger.error("Error enabling dual-write", e);
return ResponseEntity.internalServerError().body("Failed to enable dual-write: " + e.getMessage());
}
}
/**
* Disable dual-write mode
*/
@PostMapping("/dual-write/disable")
public ResponseEntity<String> disableDualWrite() {
try {
String currentEngine = migrationManager.getCurrentSearchEngine();
migrationManager.updateConfiguration(currentEngine, false);
logger.info("Dual-write disabled for engine: {}", currentEngine);
return ResponseEntity.ok("Dual-write disabled");
} catch (Exception e) {
logger.error("Error disabling dual-write", e);
return ResponseEntity.internalServerError().body("Failed to disable dual-write: " + e.getMessage());
}
}
/**
* Switch to OpenSearch engine
*/
@PostMapping("/switch/opensearch")
public ResponseEntity<String> switchToOpenSearch() {
try {
if (!migrationManager.canSwitchToOpenSearch()) {
return ResponseEntity.badRequest().body("OpenSearch is not available or healthy");
}
boolean currentDualWrite = migrationManager.isDualWriteEnabled();
migrationManager.updateConfiguration("opensearch", currentDualWrite);
logger.info("Switched to OpenSearch with dual-write: {}", currentDualWrite);
return ResponseEntity.ok("Switched to OpenSearch");
} catch (Exception e) {
logger.error("Error switching to OpenSearch", e);
return ResponseEntity.internalServerError().body("Failed to switch to OpenSearch: " + e.getMessage());
}
}
/**
* Switch to Typesense engine (rollback)
*/
@PostMapping("/switch/typesense")
public ResponseEntity<String> switchToTypesense() {
try {
if (!migrationManager.canSwitchToTypesense()) {
return ResponseEntity.badRequest().body("Typesense is not available");
}
boolean currentDualWrite = migrationManager.isDualWriteEnabled();
migrationManager.updateConfiguration("typesense", currentDualWrite);
logger.info("Switched to Typesense with dual-write: {}", currentDualWrite);
return ResponseEntity.ok("Switched to Typesense");
} catch (Exception e) {
logger.error("Error switching to Typesense", e);
return ResponseEntity.internalServerError().body("Failed to switch to Typesense: " + e.getMessage());
}
}
/**
* Emergency rollback to Typesense with dual-write disabled
*/
@PostMapping("/emergency-rollback")
public ResponseEntity<String> emergencyRollback() {
try {
migrationManager.updateConfiguration("typesense", false);
logger.warn("Emergency rollback to Typesense executed");
return ResponseEntity.ok("Emergency rollback completed - switched to Typesense only");
} catch (Exception e) {
logger.error("Error during emergency rollback", e);
return ResponseEntity.internalServerError().body("Emergency rollback failed: " + e.getMessage());
}
}
/**
* Reindex all data in OpenSearch (equivalent to Typesense reindex)
*/
@PostMapping("/opensearch/reindex")
public ResponseEntity<Map<String, Object>> reindexOpenSearch() {
try {
logger.info("Starting OpenSearch full reindex");
if (!migrationManager.canSwitchToOpenSearch()) {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "OpenSearch is not available or healthy"
));
}
// Get all data from services (similar to Typesense reindex)
List<Story> allStories = storyService.findAllWithAssociations();
List<Author> allAuthors = authorService.findAllWithStories();
// Bulk index directly in OpenSearch
if (openSearchService != null) {
openSearchService.bulkIndexStories(allStories);
openSearchService.bulkIndexAuthors(allAuthors);
} else {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "OpenSearch service not available"
));
}
int totalIndexed = allStories.size() + allAuthors.size();
return ResponseEntity.ok(Map.of(
"success", true,
"message", String.format("Reindexed %d stories and %d authors in OpenSearch",
allStories.size(), allAuthors.size()),
"storiesCount", allStories.size(),
"authorsCount", allAuthors.size(),
"totalCount", totalIndexed
));
} catch (Exception e) {
logger.error("Error during OpenSearch reindex", e);
return ResponseEntity.internalServerError().body(Map.of(
"success", false,
"error", "OpenSearch reindex failed: " + e.getMessage()
));
}
}
/**
* Recreate OpenSearch indices (equivalent to Typesense collection recreation)
*/
@PostMapping("/opensearch/recreate")
public ResponseEntity<Map<String, Object>> recreateOpenSearchIndices() {
try {
logger.info("Starting OpenSearch indices recreation");
if (!migrationManager.canSwitchToOpenSearch()) {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "OpenSearch is not available or healthy"
));
}
// Recreate OpenSearch indices directly
if (openSearchService != null) {
openSearchService.recreateIndices();
} else {
logger.error("OpenSearchService not available for index recreation");
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "OpenSearchService not available"
));
}
// Now populate the freshly created indices directly in OpenSearch
List<Story> allStories = storyService.findAllWithAssociations();
List<Author> allAuthors = authorService.findAllWithStories();
openSearchService.bulkIndexStories(allStories);
openSearchService.bulkIndexAuthors(allAuthors);
int totalIndexed = allStories.size() + allAuthors.size();
return ResponseEntity.ok(Map.of(
"success", true,
"message", String.format("Recreated OpenSearch indices and indexed %d stories and %d authors",
allStories.size(), allAuthors.size()),
"storiesCount", allStories.size(),
"authorsCount", allAuthors.size(),
"totalCount", totalIndexed
));
} catch (Exception e) {
logger.error("Error during OpenSearch indices recreation", e);
return ResponseEntity.internalServerError().body(Map.of(
"success", false,
"error", "OpenSearch indices recreation failed: " + e.getMessage()
));
}
}
/**
* DTO for search engine configuration requests
*/
public static class SearchEngineConfigRequest {
private String engine;
private boolean dualWrite;
public SearchEngineConfigRequest() {}
public SearchEngineConfigRequest(String engine, boolean dualWrite) {
this.engine = engine;
this.dualWrite = dualWrite;
}
public String getEngine() { return engine; }
public void setEngine(String engine) { this.engine = engine; }
public boolean isDualWrite() { return dualWrite; }
public void setDualWrite(boolean dualWrite) { this.dualWrite = dualWrite; }
}
}

View File

@@ -4,6 +4,7 @@ import com.storycove.dto.*;
import com.storycove.entity.Author;
import com.storycove.service.AuthorService;
import com.storycove.service.ImageService;
import com.storycove.service.SearchServiceAdapter;
import com.storycove.service.TypesenseService;
import jakarta.servlet.http.HttpServletRequest;
import jakarta.validation.Valid;
@@ -33,11 +34,13 @@ public class AuthorController {
private final AuthorService authorService;
private final ImageService imageService;
private final TypesenseService typesenseService;
private final SearchServiceAdapter searchServiceAdapter;
public AuthorController(AuthorService authorService, ImageService imageService, TypesenseService typesenseService) {
public AuthorController(AuthorService authorService, ImageService imageService, TypesenseService typesenseService, SearchServiceAdapter searchServiceAdapter) {
this.authorService = authorService;
this.imageService = imageService;
this.typesenseService = typesenseService;
this.searchServiceAdapter = searchServiceAdapter;
}
@GetMapping
@@ -258,7 +261,17 @@ public class AuthorController {
@RequestParam(defaultValue = "name") String sortBy,
@RequestParam(defaultValue = "asc") String sortOrder) {
SearchResultDto<AuthorSearchDto> searchResults = typesenseService.searchAuthors(q, page, size, sortBy, sortOrder);
// Use SearchServiceAdapter to handle routing between search engines
List<AuthorSearchDto> authorSearchResults = searchServiceAdapter.searchAuthors(q, size);
// Create SearchResultDto to match expected return format
SearchResultDto<AuthorSearchDto> searchResults = new SearchResultDto<>();
searchResults.setResults(authorSearchResults);
searchResults.setQuery(q);
searchResults.setPage(page);
searchResults.setPerPage(size);
searchResults.setTotalHits(authorSearchResults.size());
searchResults.setSearchTimeMs(0); // SearchServiceAdapter doesn't provide timing
// Convert AuthorSearchDto results to AuthorDto
SearchResultDto<AuthorDto> results = new SearchResultDto<>();

View File

@@ -42,6 +42,7 @@ public class StoryController {
private final HtmlSanitizationService sanitizationService;
private final ImageService imageService;
private final TypesenseService typesenseService;
private final SearchServiceAdapter searchServiceAdapter;
private final CollectionService collectionService;
private final ReadingTimeService readingTimeService;
private final EPUBImportService epubImportService;
@@ -54,6 +55,7 @@ public class StoryController {
ImageService imageService,
CollectionService collectionService,
@Autowired(required = false) TypesenseService typesenseService,
SearchServiceAdapter searchServiceAdapter,
ReadingTimeService readingTimeService,
EPUBImportService epubImportService,
EPUBExportService epubExportService) {
@@ -64,6 +66,7 @@ public class StoryController {
this.imageService = imageService;
this.collectionService = collectionService;
this.typesenseService = typesenseService;
this.searchServiceAdapter = searchServiceAdapter;
this.readingTimeService = readingTimeService;
this.epubImportService = epubImportService;
this.epubExportService = epubExportService;
@@ -326,7 +329,7 @@ public class StoryController {
@RequestParam(required = false) Integer maxRating,
@RequestParam(required = false) String sortBy,
@RequestParam(required = false) String sortDir,
@RequestParam(required = false) String facetBy,
@RequestParam(required = false) List<String> facetBy,
// Advanced filters
@RequestParam(required = false) Integer minWordCount,
@RequestParam(required = false) Integer maxWordCount,
@@ -345,16 +348,35 @@ public class StoryController {
@RequestParam(required = false) Boolean hiddenGemsOnly) {
if (typesenseService != null) {
SearchResultDto<StorySearchDto> results = typesenseService.searchStories(
query, page, size, authors, tags, minRating, maxRating, sortBy, sortDir, facetBy,
minWordCount, maxWordCount, createdAfter, createdBefore, lastReadAfter, lastReadBefore,
unratedOnly, readingStatus, hasReadingProgress, hasCoverImage, sourceDomain, seriesFilter,
minTagCount, popularOnly, hiddenGemsOnly);
// Use SearchServiceAdapter to handle routing between search engines
try {
// Convert authors list to single author string (for now, use first author)
String authorFilter = (authors != null && !authors.isEmpty()) ? authors.get(0) : null;
// DEBUG: Log all received parameters
logger.info("CONTROLLER DEBUG - Received parameters:");
logger.info(" readingStatus: '{}'", readingStatus);
logger.info(" seriesFilter: '{}'", seriesFilter);
logger.info(" hasReadingProgress: {}", hasReadingProgress);
logger.info(" hasCoverImage: {}", hasCoverImage);
logger.info(" createdAfter: '{}'", createdAfter);
logger.info(" lastReadAfter: '{}'", lastReadAfter);
logger.info(" unratedOnly: {}", unratedOnly);
SearchResultDto<StorySearchDto> results = searchServiceAdapter.searchStories(
query, tags, authorFilter, seriesFilter, minWordCount, maxWordCount,
minRating != null ? minRating.floatValue() : null,
null, // isRead - now handled by readingStatus advanced filter
null, // isFavorite - now handled by readingStatus advanced filter
sortBy, sortDir, page, size, facetBy,
// Advanced filters
createdAfter, createdBefore, lastReadAfter, lastReadBefore,
unratedOnly, readingStatus, hasReadingProgress, hasCoverImage,
sourceDomain, seriesFilter, minTagCount, popularOnly, hiddenGemsOnly);
return ResponseEntity.ok(results);
} else {
// Fallback to basic search if Typesense is not available
return ResponseEntity.badRequest().body(null);
} catch (Exception e) {
logger.error("Search failed", e);
return ResponseEntity.internalServerError().body(null);
}
}
@@ -363,10 +385,12 @@ public class StoryController {
@RequestParam String query,
@RequestParam(defaultValue = "5") int limit) {
if (typesenseService != null) {
List<String> suggestions = typesenseService.searchSuggestions(query, limit);
// Use SearchServiceAdapter to handle routing between search engines
try {
List<String> suggestions = searchServiceAdapter.getTagSuggestions(query, limit);
return ResponseEntity.ok(suggestions);
} else {
} catch (Exception e) {
logger.error("Failed to get search suggestions", e);
return ResponseEntity.ok(new ArrayList<>());
}
}

View File

@@ -17,6 +17,7 @@ public class StorySearchDto {
// Reading status
private Boolean isRead;
private Integer readingPosition;
private LocalDateTime lastReadAt;
// Author info
@@ -32,6 +33,9 @@ public class StorySearchDto {
private LocalDateTime createdAt;
private LocalDateTime updatedAt;
// Alias for createdAt to match frontend expectations
private LocalDateTime dateAdded;
// Search-specific fields
private double searchScore;
@@ -120,6 +124,14 @@ public class StorySearchDto {
public void setLastReadAt(LocalDateTime lastReadAt) {
this.lastReadAt = lastReadAt;
}
public Integer getReadingPosition() {
return readingPosition;
}
public void setReadingPosition(Integer readingPosition) {
this.readingPosition = readingPosition;
}
public UUID getAuthorId() {
return authorId;
@@ -176,6 +188,14 @@ public class StorySearchDto {
public void setUpdatedAt(LocalDateTime updatedAt) {
this.updatedAt = updatedAt;
}
public LocalDateTime getDateAdded() {
return dateAdded;
}
public void setDateAdded(LocalDateTime dateAdded) {
this.dateAdded = dateAdded;
}
public double getSearchScore() {
return searchScore;

View File

@@ -0,0 +1,473 @@
package com.storycove.service;
import com.storycove.dto.AuthorSearchDto;
import com.storycove.dto.SearchResultDto;
import com.storycove.dto.StorySearchDto;
import com.storycove.entity.Author;
import com.storycove.entity.Story;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Component;
import java.util.List;
import java.util.UUID;
/**
* TEMPORARY MIGRATION MANAGER - DELETE THIS ENTIRE CLASS WHEN TYPESENSE IS REMOVED
*
* This class handles dual-write functionality and engine switching during the
* migration from Typesense to OpenSearch. It's designed to be completely removed
* once the migration is complete.
*
* CLEANUP INSTRUCTIONS:
* 1. Delete this entire file: SearchMigrationManager.java
* 2. Update SearchServiceAdapter to call OpenSearchService directly
* 3. Remove migration-related configuration properties
* 4. Remove migration-related admin endpoints and UI
*/
@Component
public class SearchMigrationManager {
private static final Logger logger = LoggerFactory.getLogger(SearchMigrationManager.class);
@Autowired(required = false)
private TypesenseService typesenseService;
@Autowired(required = false)
private OpenSearchService openSearchService;
@Value("${storycove.search.engine:typesense}")
private String primaryEngine;
@Value("${storycove.search.dual-write:false}")
private boolean dualWrite;
// ===============================
// READ OPERATIONS (single engine)
// ===============================
public SearchResultDto<StorySearchDto> searchStories(String query, List<String> tags, String author,
String series, Integer minWordCount, Integer maxWordCount,
Float minRating, Boolean isRead, Boolean isFavorite,
String sortBy, String sortOrder, int page, int size,
List<String> facetBy,
// Advanced filters
String createdAfter, String createdBefore,
String lastReadAfter, String lastReadBefore,
Boolean unratedOnly, String readingStatus,
Boolean hasReadingProgress, Boolean hasCoverImage,
String sourceDomain, String seriesFilter,
Integer minTagCount, Boolean popularOnly,
Boolean hiddenGemsOnly) {
boolean openSearchAvailable = openSearchService != null;
boolean openSearchConnected = openSearchAvailable ? openSearchService.testConnection() : false;
boolean routingCondition = "opensearch".equalsIgnoreCase(primaryEngine) && openSearchAvailable;
logger.info("SEARCH ROUTING DEBUG:");
logger.info(" Primary engine: '{}'", primaryEngine);
logger.info(" OpenSearch available: {}", openSearchAvailable);
logger.info(" OpenSearch connected: {}", openSearchConnected);
logger.info(" Routing condition result: {}", routingCondition);
logger.info(" Will route to: {}", routingCondition ? "OpenSearch" : "Typesense");
if (routingCondition) {
logger.info("ROUTING TO OPENSEARCH");
return openSearchService.searchStories(query, tags, author, series, minWordCount, maxWordCount,
minRating, isRead, isFavorite, sortBy, sortOrder, page, size, facetBy,
createdAfter, createdBefore, lastReadAfter, lastReadBefore, unratedOnly, readingStatus,
hasReadingProgress, hasCoverImage, sourceDomain, seriesFilter, minTagCount, popularOnly,
hiddenGemsOnly);
} else if (typesenseService != null) {
logger.info("ROUTING TO TYPESENSE");
// Convert parameters to match TypesenseService signature
return typesenseService.searchStories(
query, page, size, tags, null, minWordCount, maxWordCount,
null, null, null, null, minRating != null ? minRating.intValue() : null,
null, null, sortBy, sortOrder, null, null, isRead, isFavorite,
author, series, null, null, null);
} else {
logger.error("No search service available! Primary engine: {}, OpenSearch: {}, Typesense: {}",
primaryEngine, openSearchService != null, typesenseService != null);
return new SearchResultDto<>(List.of(), 0, page, size, query != null ? query : "", 0);
}
}
public List<StorySearchDto> getRandomStories(int count, List<String> tags, String author,
String series, Integer minWordCount, Integer maxWordCount,
Float minRating, Boolean isRead, Boolean isFavorite,
Long seed) {
logger.debug("Getting random stories using primary engine: {}", primaryEngine);
if ("opensearch".equalsIgnoreCase(primaryEngine) && openSearchService != null) {
return openSearchService.getRandomStories(count, tags, author, series, minWordCount, maxWordCount,
minRating, isRead, isFavorite, seed);
} else if (typesenseService != null) {
// TypesenseService doesn't have getRandomStories, use random story ID approach
List<StorySearchDto> results = new java.util.ArrayList<>();
for (int i = 0; i < count; i++) {
var randomId = typesenseService.getRandomStoryId(null, tags, seed != null ? seed + i : null);
// Note: This is a simplified approach - full implementation would need story lookup
}
return results;
} else {
logger.error("No search service available for random stories");
return List.of();
}
}
public String getRandomStoryId(Long seed) {
logger.debug("Getting random story ID using primary engine: {}", primaryEngine);
if ("opensearch".equalsIgnoreCase(primaryEngine) && openSearchService != null) {
return openSearchService.getRandomStoryId(seed);
} else if (typesenseService != null) {
var randomId = typesenseService.getRandomStoryId(null, null, seed);
return randomId.map(UUID::toString).orElse(null);
} else {
logger.error("No search service available for random story ID");
return null;
}
}
public List<AuthorSearchDto> searchAuthors(String query, int limit) {
logger.debug("Searching authors using primary engine: {}", primaryEngine);
if ("opensearch".equalsIgnoreCase(primaryEngine) && openSearchService != null) {
return openSearchService.searchAuthors(query, limit);
} else if (typesenseService != null) {
var result = typesenseService.searchAuthors(query, 0, limit, null, null);
return result.getResults();
} else {
logger.error("No search service available for author search");
return List.of();
}
}
public List<String> getTagSuggestions(String query, int limit) {
logger.debug("Getting tag suggestions using primary engine: {}", primaryEngine);
if ("opensearch".equalsIgnoreCase(primaryEngine) && openSearchService != null) {
return openSearchService.getTagSuggestions(query, limit);
} else if (typesenseService != null) {
// TypesenseService may not have getTagSuggestions - return empty for now
logger.warn("Tag suggestions not implemented for Typesense");
return List.of();
} else {
logger.error("No search service available for tag suggestions");
return List.of();
}
}
// ===============================
// WRITE OPERATIONS (dual-write capable)
// ===============================
public void indexStory(Story story) {
logger.debug("Indexing story with dual-write: {}, primary engine: {}", dualWrite, primaryEngine);
// Write to OpenSearch
if ("opensearch".equalsIgnoreCase(primaryEngine) || dualWrite) {
if (openSearchService != null) {
try {
openSearchService.indexStory(story);
logger.debug("Successfully indexed story {} in OpenSearch", story.getId());
} catch (Exception e) {
logger.error("Failed to index story {} in OpenSearch", story.getId(), e);
}
} else {
logger.warn("OpenSearch service not available for indexing story {}", story.getId());
}
}
// Write to Typesense
if ("typesense".equalsIgnoreCase(primaryEngine) || dualWrite) {
if (typesenseService != null) {
try {
typesenseService.indexStory(story);
logger.debug("Successfully indexed story {} in Typesense", story.getId());
} catch (Exception e) {
logger.error("Failed to index story {} in Typesense", story.getId(), e);
}
} else {
logger.warn("Typesense service not available for indexing story {}", story.getId());
}
}
}
public void updateStory(Story story) {
logger.debug("Updating story with dual-write: {}, primary engine: {}", dualWrite, primaryEngine);
// Update in OpenSearch
if ("opensearch".equalsIgnoreCase(primaryEngine) || dualWrite) {
if (openSearchService != null) {
try {
openSearchService.updateStory(story);
logger.debug("Successfully updated story {} in OpenSearch", story.getId());
} catch (Exception e) {
logger.error("Failed to update story {} in OpenSearch", story.getId(), e);
}
}
}
// Update in Typesense
if ("typesense".equalsIgnoreCase(primaryEngine) || dualWrite) {
if (typesenseService != null) {
try {
typesenseService.updateStory(story);
logger.debug("Successfully updated story {} in Typesense", story.getId());
} catch (Exception e) {
logger.error("Failed to update story {} in Typesense", story.getId(), e);
}
}
}
}
public void deleteStory(UUID storyId) {
logger.debug("Deleting story with dual-write: {}, primary engine: {}", dualWrite, primaryEngine);
// Delete from OpenSearch
if ("opensearch".equalsIgnoreCase(primaryEngine) || dualWrite) {
if (openSearchService != null) {
try {
openSearchService.deleteStory(storyId);
logger.debug("Successfully deleted story {} from OpenSearch", storyId);
} catch (Exception e) {
logger.error("Failed to delete story {} from OpenSearch", storyId, e);
}
}
}
// Delete from Typesense
if ("typesense".equalsIgnoreCase(primaryEngine) || dualWrite) {
if (typesenseService != null) {
try {
typesenseService.deleteStory(storyId.toString());
logger.debug("Successfully deleted story {} from Typesense", storyId);
} catch (Exception e) {
logger.error("Failed to delete story {} from Typesense", storyId, e);
}
}
}
}
public void indexAuthor(Author author) {
logger.debug("Indexing author with dual-write: {}, primary engine: {}", dualWrite, primaryEngine);
// Index in OpenSearch
if ("opensearch".equalsIgnoreCase(primaryEngine) || dualWrite) {
if (openSearchService != null) {
try {
openSearchService.indexAuthor(author);
logger.debug("Successfully indexed author {} in OpenSearch", author.getId());
} catch (Exception e) {
logger.error("Failed to index author {} in OpenSearch", author.getId(), e);
}
}
}
// Index in Typesense
if ("typesense".equalsIgnoreCase(primaryEngine) || dualWrite) {
if (typesenseService != null) {
try {
typesenseService.indexAuthor(author);
logger.debug("Successfully indexed author {} in Typesense", author.getId());
} catch (Exception e) {
logger.error("Failed to index author {} in Typesense", author.getId(), e);
}
}
}
}
public void updateAuthor(Author author) {
logger.debug("Updating author with dual-write: {}, primary engine: {}", dualWrite, primaryEngine);
// Update in OpenSearch
if ("opensearch".equalsIgnoreCase(primaryEngine) || dualWrite) {
if (openSearchService != null) {
try {
openSearchService.updateAuthor(author);
logger.debug("Successfully updated author {} in OpenSearch", author.getId());
} catch (Exception e) {
logger.error("Failed to update author {} in OpenSearch", author.getId(), e);
}
}
}
// Update in Typesense
if ("typesense".equalsIgnoreCase(primaryEngine) || dualWrite) {
if (typesenseService != null) {
try {
typesenseService.updateAuthor(author);
logger.debug("Successfully updated author {} in Typesense", author.getId());
} catch (Exception e) {
logger.error("Failed to update author {} in Typesense", author.getId(), e);
}
}
}
}
public void deleteAuthor(UUID authorId) {
logger.debug("Deleting author with dual-write: {}, primary engine: {}", dualWrite, primaryEngine);
// Delete from OpenSearch
if ("opensearch".equalsIgnoreCase(primaryEngine) || dualWrite) {
if (openSearchService != null) {
try {
openSearchService.deleteAuthor(authorId);
logger.debug("Successfully deleted author {} from OpenSearch", authorId);
} catch (Exception e) {
logger.error("Failed to delete author {} from OpenSearch", authorId, e);
}
}
}
// Delete from Typesense
if ("typesense".equalsIgnoreCase(primaryEngine) || dualWrite) {
if (typesenseService != null) {
try {
typesenseService.deleteAuthor(authorId.toString());
logger.debug("Successfully deleted author {} from Typesense", authorId);
} catch (Exception e) {
logger.error("Failed to delete author {} from Typesense", authorId, e);
}
}
}
}
public void bulkIndexStories(List<Story> stories) {
logger.debug("Bulk indexing {} stories with dual-write: {}, primary engine: {}",
stories.size(), dualWrite, primaryEngine);
// Bulk index in OpenSearch
if ("opensearch".equalsIgnoreCase(primaryEngine) || dualWrite) {
if (openSearchService != null) {
try {
openSearchService.bulkIndexStories(stories);
logger.info("Successfully bulk indexed {} stories in OpenSearch", stories.size());
} catch (Exception e) {
logger.error("Failed to bulk index {} stories in OpenSearch", stories.size(), e);
}
}
}
// Bulk index in Typesense
if ("typesense".equalsIgnoreCase(primaryEngine) || dualWrite) {
if (typesenseService != null) {
try {
typesenseService.bulkIndexStories(stories);
logger.info("Successfully bulk indexed {} stories in Typesense", stories.size());
} catch (Exception e) {
logger.error("Failed to bulk index {} stories in Typesense", stories.size(), e);
}
}
}
}
public void bulkIndexAuthors(List<Author> authors) {
logger.debug("Bulk indexing {} authors with dual-write: {}, primary engine: {}",
authors.size(), dualWrite, primaryEngine);
// Bulk index in OpenSearch
if ("opensearch".equalsIgnoreCase(primaryEngine) || dualWrite) {
if (openSearchService != null) {
try {
openSearchService.bulkIndexAuthors(authors);
logger.info("Successfully bulk indexed {} authors in OpenSearch", authors.size());
} catch (Exception e) {
logger.error("Failed to bulk index {} authors in OpenSearch", authors.size(), e);
}
}
}
// Bulk index in Typesense
if ("typesense".equalsIgnoreCase(primaryEngine) || dualWrite) {
if (typesenseService != null) {
try {
typesenseService.bulkIndexAuthors(authors);
logger.info("Successfully bulk indexed {} authors in Typesense", authors.size());
} catch (Exception e) {
logger.error("Failed to bulk index {} authors in Typesense", authors.size(), e);
}
}
}
}
// ===============================
// UTILITY METHODS
// ===============================
public boolean isSearchServiceAvailable() {
if ("opensearch".equalsIgnoreCase(primaryEngine)) {
return openSearchService != null && openSearchService.testConnection();
} else {
return typesenseService != null;
}
}
public String getCurrentSearchEngine() {
return primaryEngine;
}
public boolean isDualWriteEnabled() {
return dualWrite;
}
public boolean canSwitchToOpenSearch() {
return openSearchService != null && openSearchService.testConnection();
}
public boolean canSwitchToTypesense() {
return typesenseService != null;
}
public OpenSearchService getOpenSearchService() {
return openSearchService;
}
/**
* Update configuration at runtime (for admin interface)
* Note: This requires @RefreshScope to work properly
*/
public void updateConfiguration(String engine, boolean enableDualWrite) {
logger.info("Updating search configuration: engine={}, dualWrite={}", engine, enableDualWrite);
this.primaryEngine = engine;
this.dualWrite = enableDualWrite;
}
/**
* Get current configuration status for admin interface
*/
public SearchMigrationStatus getStatus() {
return new SearchMigrationStatus(
primaryEngine,
dualWrite,
typesenseService != null,
openSearchService != null && openSearchService.testConnection()
);
}
/**
* DTO for search migration status
*/
public static class SearchMigrationStatus {
private final String primaryEngine;
private final boolean dualWrite;
private final boolean typesenseAvailable;
private final boolean openSearchAvailable;
public SearchMigrationStatus(String primaryEngine, boolean dualWrite,
boolean typesenseAvailable, boolean openSearchAvailable) {
this.primaryEngine = primaryEngine;
this.dualWrite = dualWrite;
this.typesenseAvailable = typesenseAvailable;
this.openSearchAvailable = openSearchAvailable;
}
public String getPrimaryEngine() { return primaryEngine; }
public boolean isDualWrite() { return dualWrite; }
public boolean isTypesenseAvailable() { return typesenseAvailable; }
public boolean isOpenSearchAvailable() { return openSearchAvailable; }
}
}

View File

@@ -0,0 +1,196 @@
package com.storycove.service;
import com.storycove.dto.AuthorSearchDto;
import com.storycove.dto.SearchResultDto;
import com.storycove.dto.StorySearchDto;
import com.storycove.entity.Author;
import com.storycove.entity.Story;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import java.util.List;
import java.util.UUID;
/**
* Service adapter that provides a unified interface for search operations.
*
* This adapter delegates to SearchMigrationManager during the migration period,
* which will be removed once Typesense is completely eliminated.
*
* POST-MIGRATION: This class will be simplified to call OpenSearchService directly.
*/
@Service
public class SearchServiceAdapter {
private static final Logger logger = LoggerFactory.getLogger(SearchServiceAdapter.class);
@Autowired
private SearchMigrationManager migrationManager;
// ===============================
// SEARCH OPERATIONS
// ===============================
/**
* Search stories with unified interface
*/
public SearchResultDto<StorySearchDto> searchStories(String query, List<String> tags, String author,
String series, Integer minWordCount, Integer maxWordCount,
Float minRating, Boolean isRead, Boolean isFavorite,
String sortBy, String sortOrder, int page, int size,
List<String> facetBy,
// Advanced filters
String createdAfter, String createdBefore,
String lastReadAfter, String lastReadBefore,
Boolean unratedOnly, String readingStatus,
Boolean hasReadingProgress, Boolean hasCoverImage,
String sourceDomain, String seriesFilter,
Integer minTagCount, Boolean popularOnly,
Boolean hiddenGemsOnly) {
return migrationManager.searchStories(query, tags, author, series, minWordCount, maxWordCount,
minRating, isRead, isFavorite, sortBy, sortOrder, page, size, facetBy,
createdAfter, createdBefore, lastReadAfter, lastReadBefore, unratedOnly, readingStatus,
hasReadingProgress, hasCoverImage, sourceDomain, seriesFilter, minTagCount, popularOnly,
hiddenGemsOnly);
}
/**
* Get random stories with unified interface
*/
public List<StorySearchDto> getRandomStories(int count, List<String> tags, String author,
String series, Integer minWordCount, Integer maxWordCount,
Float minRating, Boolean isRead, Boolean isFavorite,
Long seed) {
return migrationManager.getRandomStories(count, tags, author, series, minWordCount, maxWordCount,
minRating, isRead, isFavorite, seed);
}
/**
* Get random story ID with unified interface
*/
public String getRandomStoryId(Long seed) {
return migrationManager.getRandomStoryId(seed);
}
/**
* Search authors with unified interface
*/
public List<AuthorSearchDto> searchAuthors(String query, int limit) {
return migrationManager.searchAuthors(query, limit);
}
/**
* Get tag suggestions with unified interface
*/
public List<String> getTagSuggestions(String query, int limit) {
return migrationManager.getTagSuggestions(query, limit);
}
// ===============================
// INDEX OPERATIONS
// ===============================
/**
* Index a story with unified interface (supports dual-write)
*/
public void indexStory(Story story) {
migrationManager.indexStory(story);
}
/**
* Update a story in the index with unified interface (supports dual-write)
*/
public void updateStory(Story story) {
migrationManager.updateStory(story);
}
/**
* Delete a story from the index with unified interface (supports dual-write)
*/
public void deleteStory(UUID storyId) {
migrationManager.deleteStory(storyId);
}
/**
* Index an author with unified interface (supports dual-write)
*/
public void indexAuthor(Author author) {
migrationManager.indexAuthor(author);
}
/**
* Update an author in the index with unified interface (supports dual-write)
*/
public void updateAuthor(Author author) {
migrationManager.updateAuthor(author);
}
/**
* Delete an author from the index with unified interface (supports dual-write)
*/
public void deleteAuthor(UUID authorId) {
migrationManager.deleteAuthor(authorId);
}
/**
* Bulk index stories with unified interface (supports dual-write)
*/
public void bulkIndexStories(List<Story> stories) {
migrationManager.bulkIndexStories(stories);
}
/**
* Bulk index authors with unified interface (supports dual-write)
*/
public void bulkIndexAuthors(List<Author> authors) {
migrationManager.bulkIndexAuthors(authors);
}
// ===============================
// UTILITY METHODS
// ===============================
/**
* Check if search service is available and healthy
*/
public boolean isSearchServiceAvailable() {
return migrationManager.isSearchServiceAvailable();
}
/**
* Get current search engine name
*/
public String getCurrentSearchEngine() {
return migrationManager.getCurrentSearchEngine();
}
/**
* Check if dual-write is enabled
*/
public boolean isDualWriteEnabled() {
return migrationManager.isDualWriteEnabled();
}
/**
* Check if we can switch to OpenSearch
*/
public boolean canSwitchToOpenSearch() {
return migrationManager.canSwitchToOpenSearch();
}
/**
* Check if we can switch to Typesense
*/
public boolean canSwitchToTypesense() {
return migrationManager.canSwitchToTypesense();
}
/**
* Get current migration status for admin interface
*/
public SearchMigrationManager.SearchMigrationStatus getMigrationStatus() {
return migrationManager.getStatus();
}
}

View File

@@ -19,6 +19,12 @@ spring:
max-file-size: 256MB # Increased for backup restore
max-request-size: 260MB # Slightly higher to account for form data
jackson:
serialization:
write-dates-as-timestamps: false
deserialization:
adjust-dates-to-context-time-zone: false
server:
port: 8080
@@ -34,6 +40,7 @@ storycove:
password: ${APP_PASSWORD} # REQUIRED: No default password for security
search:
engine: ${SEARCH_ENGINE:typesense} # typesense or opensearch
dual-write: ${SEARCH_DUAL_WRITE:false} # enable dual-write during migration
typesense:
api-key: ${TYPESENSE_API_KEY:xyz}
host: ${TYPESENSE_HOST:localhost}
@@ -44,9 +51,9 @@ storycove:
# Connection settings
host: ${OPENSEARCH_HOST:localhost}
port: ${OPENSEARCH_PORT:9200}
scheme: ${OPENSEARCH_SCHEME:https}
username: ${OPENSEARCH_USERNAME:admin}
password: ${OPENSEARCH_PASSWORD} # REQUIRED when using OpenSearch
scheme: ${OPENSEARCH_SCHEME:http}
username: ${OPENSEARCH_USERNAME:}
password: ${OPENSEARCH_PASSWORD:} # Empty when security is disabled
# Environment-specific configuration
profile: ${SPRING_PROFILES_ACTIVE:development} # development, staging, production