replacing opensearch with solr

This commit is contained in:
Stefan Hardegger
2025-09-22 09:44:50 +02:00
parent 9e684a956b
commit 87f37567fb
40 changed files with 2000 additions and 3464 deletions

View File

@@ -1,211 +0,0 @@
package com.storycove.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule;
import org.apache.hc.client5.http.auth.AuthScope;
import org.apache.hc.client5.http.auth.UsernamePasswordCredentials;
import org.apache.hc.client5.http.impl.auth.BasicCredentialsProvider;
import org.apache.hc.client5.http.impl.nio.PoolingAsyncClientConnectionManager;
import org.apache.hc.client5.http.impl.nio.PoolingAsyncClientConnectionManagerBuilder;
import org.apache.hc.client5.http.ssl.ClientTlsStrategyBuilder;
import org.apache.hc.core5.http.HttpHost;
import org.apache.hc.core5.util.Timeout;
import org.opensearch.client.json.jackson.JacksonJsonpMapper;
import org.opensearch.client.opensearch.OpenSearchClient;
import org.opensearch.client.transport.OpenSearchTransport;
import org.opensearch.client.transport.httpclient5.ApacheHttpClient5TransportBuilder;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import javax.net.ssl.SSLContext;
import javax.net.ssl.TrustManager;
import javax.net.ssl.X509TrustManager;
import java.io.FileInputStream;
import java.security.KeyStore;
import java.security.cert.X509Certificate;
@Configuration
public class OpenSearchConfig {
private static final Logger logger = LoggerFactory.getLogger(OpenSearchConfig.class);
private final OpenSearchProperties properties;
public OpenSearchConfig(@Qualifier("openSearchProperties") OpenSearchProperties properties) {
this.properties = properties;
}
@Bean
public OpenSearchClient openSearchClient() throws Exception {
logger.info("Initializing OpenSearch client for profile: {}", properties.getProfile());
// Create credentials provider
BasicCredentialsProvider credentialsProvider = createCredentialsProvider();
// Create SSL context based on environment
SSLContext sslContext = createSSLContext();
// Create connection manager with pooling
PoolingAsyncClientConnectionManager connectionManager = createConnectionManager(sslContext);
// Create custom ObjectMapper for proper date serialization
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.registerModule(new JavaTimeModule());
objectMapper.disable(com.fasterxml.jackson.databind.SerializationFeature.WRITE_DATES_AS_TIMESTAMPS);
// Create the transport with all configurations and custom Jackson mapper
OpenSearchTransport transport = ApacheHttpClient5TransportBuilder
.builder(new HttpHost(properties.getScheme(), properties.getHost(), properties.getPort()))
.setMapper(new JacksonJsonpMapper(objectMapper))
.setHttpClientConfigCallback(httpClientBuilder -> {
// Only set credentials provider if authentication is configured
if (properties.getUsername() != null && !properties.getUsername().isEmpty() &&
properties.getPassword() != null && !properties.getPassword().isEmpty()) {
httpClientBuilder.setDefaultCredentialsProvider(credentialsProvider);
}
httpClientBuilder.setConnectionManager(connectionManager);
// Set timeouts
httpClientBuilder.setDefaultRequestConfig(
org.apache.hc.client5.http.config.RequestConfig.custom()
.setConnectionRequestTimeout(Timeout.ofMilliseconds(properties.getConnection().getTimeout()))
.setResponseTimeout(Timeout.ofMilliseconds(properties.getConnection().getSocketTimeout()))
.build()
);
return httpClientBuilder;
})
.build();
OpenSearchClient client = new OpenSearchClient(transport);
// Test connection
testConnection(client);
return client;
}
private BasicCredentialsProvider createCredentialsProvider() {
BasicCredentialsProvider credentialsProvider = new BasicCredentialsProvider();
// Only set credentials if username and password are provided
if (properties.getUsername() != null && !properties.getUsername().isEmpty() &&
properties.getPassword() != null && !properties.getPassword().isEmpty()) {
credentialsProvider.setCredentials(
new AuthScope(properties.getHost(), properties.getPort()),
new UsernamePasswordCredentials(
properties.getUsername(),
properties.getPassword().toCharArray()
)
);
logger.info("OpenSearch credentials configured for user: {}", properties.getUsername());
} else {
logger.info("OpenSearch running without authentication (no credentials configured)");
}
return credentialsProvider;
}
private SSLContext createSSLContext() throws Exception {
SSLContext sslContext;
if (isProduction() && !properties.getSecurity().isTrustAllCertificates()) {
// Production SSL configuration with proper certificate validation
sslContext = createProductionSSLContext();
} else {
// Development SSL configuration (trust all certificates)
sslContext = createDevelopmentSSLContext();
}
return sslContext;
}
private SSLContext createProductionSSLContext() throws Exception {
logger.info("Configuring production SSL context with certificate validation");
SSLContext sslContext = SSLContext.getInstance("TLS");
// Load custom keystore/truststore if provided
if (properties.getSecurity().getTruststorePath() != null) {
KeyStore trustStore = KeyStore.getInstance("JKS");
try (FileInputStream fis = new FileInputStream(properties.getSecurity().getTruststorePath())) {
trustStore.load(fis, properties.getSecurity().getTruststorePassword().toCharArray());
}
javax.net.ssl.TrustManagerFactory tmf =
javax.net.ssl.TrustManagerFactory.getInstance(javax.net.ssl.TrustManagerFactory.getDefaultAlgorithm());
tmf.init(trustStore);
sslContext.init(null, tmf.getTrustManagers(), null);
} else {
// Use default system SSL context for production
sslContext.init(null, null, null);
}
return sslContext;
}
private SSLContext createDevelopmentSSLContext() throws Exception {
logger.warn("Configuring development SSL context - TRUSTING ALL CERTIFICATES (not for production!)");
SSLContext sslContext = SSLContext.getInstance("TLS");
sslContext.init(null, new TrustManager[] {
new X509TrustManager() {
public X509Certificate[] getAcceptedIssuers() { return null; }
public void checkClientTrusted(X509Certificate[] certs, String authType) {}
public void checkServerTrusted(X509Certificate[] certs, String authType) {}
}
}, null);
return sslContext;
}
private PoolingAsyncClientConnectionManager createConnectionManager(SSLContext sslContext) {
PoolingAsyncClientConnectionManagerBuilder builder = PoolingAsyncClientConnectionManagerBuilder.create();
// Configure TLS strategy
if (properties.getScheme().equals("https")) {
if (isProduction() && properties.getSecurity().isSslVerification()) {
// Production TLS with hostname verification
builder.setTlsStrategy(ClientTlsStrategyBuilder.create()
.setSslContext(sslContext)
.build());
} else {
// Development TLS without hostname verification
builder.setTlsStrategy(ClientTlsStrategyBuilder.create()
.setSslContext(sslContext)
.setHostnameVerifier((hostname, session) -> true)
.build());
}
}
PoolingAsyncClientConnectionManager connectionManager = builder.build();
// Configure connection pool settings
connectionManager.setMaxTotal(properties.getConnection().getMaxConnectionsTotal());
connectionManager.setDefaultMaxPerRoute(properties.getConnection().getMaxConnectionsPerRoute());
return connectionManager;
}
private boolean isProduction() {
return "production".equalsIgnoreCase(properties.getProfile());
}
private void testConnection(OpenSearchClient client) {
try {
var response = client.info();
logger.info("OpenSearch connection successful - Version: {}, Cluster: {}",
response.version().number(),
response.clusterName());
} catch (Exception e) {
logger.warn("OpenSearch connection test failed during initialization: {}", e.getMessage());
logger.debug("OpenSearch connection test full error", e);
// Don't throw exception here - let the client be created and handle failures in service methods
}
}
}

View File

@@ -1,164 +0,0 @@
package com.storycove.config;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
@Component
@ConfigurationProperties(prefix = "storycove.opensearch")
public class OpenSearchProperties {
private String host = "localhost";
private int port = 9200;
private String scheme = "https";
private String username = "admin";
private String password;
private String profile = "development";
private Security security = new Security();
private Connection connection = new Connection();
private Indices indices = new Indices();
private Bulk bulk = new Bulk();
private Health health = new Health();
// Getters and setters
public String getHost() { return host; }
public void setHost(String host) { this.host = host; }
public int getPort() { return port; }
public void setPort(int port) { this.port = port; }
public String getScheme() { return scheme; }
public void setScheme(String scheme) { this.scheme = scheme; }
public String getUsername() { return username; }
public void setUsername(String username) { this.username = username; }
public String getPassword() { return password; }
public void setPassword(String password) { this.password = password; }
public String getProfile() { return profile; }
public void setProfile(String profile) { this.profile = profile; }
public Security getSecurity() { return security; }
public void setSecurity(Security security) { this.security = security; }
public Connection getConnection() { return connection; }
public void setConnection(Connection connection) { this.connection = connection; }
public Indices getIndices() { return indices; }
public void setIndices(Indices indices) { this.indices = indices; }
public Bulk getBulk() { return bulk; }
public void setBulk(Bulk bulk) { this.bulk = bulk; }
public Health getHealth() { return health; }
public void setHealth(Health health) { this.health = health; }
public static class Security {
private boolean sslVerification = false;
private boolean trustAllCertificates = true;
private String keystorePath;
private String keystorePassword;
private String truststorePath;
private String truststorePassword;
// Getters and setters
public boolean isSslVerification() { return sslVerification; }
public void setSslVerification(boolean sslVerification) { this.sslVerification = sslVerification; }
public boolean isTrustAllCertificates() { return trustAllCertificates; }
public void setTrustAllCertificates(boolean trustAllCertificates) { this.trustAllCertificates = trustAllCertificates; }
public String getKeystorePath() { return keystorePath; }
public void setKeystorePath(String keystorePath) { this.keystorePath = keystorePath; }
public String getKeystorePassword() { return keystorePassword; }
public void setKeystorePassword(String keystorePassword) { this.keystorePassword = keystorePassword; }
public String getTruststorePath() { return truststorePath; }
public void setTruststorePath(String truststorePath) { this.truststorePath = truststorePath; }
public String getTruststorePassword() { return truststorePassword; }
public void setTruststorePassword(String truststorePassword) { this.truststorePassword = truststorePassword; }
}
public static class Connection {
private int timeout = 30000;
private int socketTimeout = 60000;
private int maxConnectionsPerRoute = 10;
private int maxConnectionsTotal = 30;
private boolean retryOnFailure = true;
private int maxRetries = 3;
// Getters and setters
public int getTimeout() { return timeout; }
public void setTimeout(int timeout) { this.timeout = timeout; }
public int getSocketTimeout() { return socketTimeout; }
public void setSocketTimeout(int socketTimeout) { this.socketTimeout = socketTimeout; }
public int getMaxConnectionsPerRoute() { return maxConnectionsPerRoute; }
public void setMaxConnectionsPerRoute(int maxConnectionsPerRoute) { this.maxConnectionsPerRoute = maxConnectionsPerRoute; }
public int getMaxConnectionsTotal() { return maxConnectionsTotal; }
public void setMaxConnectionsTotal(int maxConnectionsTotal) { this.maxConnectionsTotal = maxConnectionsTotal; }
public boolean isRetryOnFailure() { return retryOnFailure; }
public void setRetryOnFailure(boolean retryOnFailure) { this.retryOnFailure = retryOnFailure; }
public int getMaxRetries() { return maxRetries; }
public void setMaxRetries(int maxRetries) { this.maxRetries = maxRetries; }
}
public static class Indices {
private int defaultShards = 1;
private int defaultReplicas = 0;
private String refreshInterval = "1s";
// Getters and setters
public int getDefaultShards() { return defaultShards; }
public void setDefaultShards(int defaultShards) { this.defaultShards = defaultShards; }
public int getDefaultReplicas() { return defaultReplicas; }
public void setDefaultReplicas(int defaultReplicas) { this.defaultReplicas = defaultReplicas; }
public String getRefreshInterval() { return refreshInterval; }
public void setRefreshInterval(String refreshInterval) { this.refreshInterval = refreshInterval; }
}
public static class Bulk {
private int actions = 1000;
private long size = 5242880; // 5MB
private int timeout = 10000;
private int concurrentRequests = 1;
// Getters and setters
public int getActions() { return actions; }
public void setActions(int actions) { this.actions = actions; }
public long getSize() { return size; }
public void setSize(long size) { this.size = size; }
public int getTimeout() { return timeout; }
public void setTimeout(int timeout) { this.timeout = timeout; }
public int getConcurrentRequests() { return concurrentRequests; }
public void setConcurrentRequests(int concurrentRequests) { this.concurrentRequests = concurrentRequests; }
}
public static class Health {
private int checkInterval = 30000;
private int slowQueryThreshold = 5000;
private boolean enableMetrics = true;
// Getters and setters
public int getCheckInterval() { return checkInterval; }
public void setCheckInterval(int checkInterval) { this.checkInterval = checkInterval; }
public int getSlowQueryThreshold() { return slowQueryThreshold; }
public void setSlowQueryThreshold(int slowQueryThreshold) { this.slowQueryThreshold = slowQueryThreshold; }
public boolean isEnableMetrics() { return enableMetrics; }
public void setEnableMetrics(boolean enableMetrics) { this.enableMetrics = enableMetrics; }
}
}

View File

@@ -0,0 +1,57 @@
package com.storycove.config;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.impl.HttpSolrClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
@Configuration
@ConditionalOnProperty(
value = "storycove.search.engine",
havingValue = "solr",
matchIfMissing = false
)
public class SolrConfig {
private static final Logger logger = LoggerFactory.getLogger(SolrConfig.class);
private final SolrProperties properties;
public SolrConfig(SolrProperties properties) {
this.properties = properties;
}
@Bean
public SolrClient solrClient() {
logger.info("Initializing Solr client with URL: {}", properties.getUrl());
HttpSolrClient.Builder builder = new HttpSolrClient.Builder(properties.getUrl())
.withConnectionTimeout(properties.getConnection().getTimeout())
.withSocketTimeout(properties.getConnection().getSocketTimeout());
SolrClient client = builder.build();
logger.info("Solr running without authentication");
// Test connection
testConnection(client);
return client;
}
private void testConnection(SolrClient client) {
try {
// Test connection by pinging the server
var response = client.ping();
logger.info("Solr connection successful - Response time: {}ms",
response.getElapsedTime());
} catch (Exception e) {
logger.warn("Solr connection test failed during initialization: {}", e.getMessage());
logger.debug("Solr connection test full error", e);
// Don't throw exception here - let the client be created and handle failures in service methods
}
}
}

View File

@@ -0,0 +1,140 @@
package com.storycove.config;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.stereotype.Component;
@Component
@ConfigurationProperties(prefix = "storycove.solr")
public class SolrProperties {
private String url = "http://localhost:8983/solr";
private String username;
private String password;
private Cores cores = new Cores();
private Connection connection = new Connection();
private Query query = new Query();
private Commit commit = new Commit();
private Health health = new Health();
// Getters and setters
public String getUrl() { return url; }
public void setUrl(String url) { this.url = url; }
public String getUsername() { return username; }
public void setUsername(String username) { this.username = username; }
public String getPassword() { return password; }
public void setPassword(String password) { this.password = password; }
public Cores getCores() { return cores; }
public void setCores(Cores cores) { this.cores = cores; }
public Connection getConnection() { return connection; }
public void setConnection(Connection connection) { this.connection = connection; }
public Query getQuery() { return query; }
public void setQuery(Query query) { this.query = query; }
public Commit getCommit() { return commit; }
public void setCommit(Commit commit) { this.commit = commit; }
public Health getHealth() { return health; }
public void setHealth(Health health) { this.health = health; }
public static class Cores {
private String stories = "storycove_stories";
private String authors = "storycove_authors";
// Getters and setters
public String getStories() { return stories; }
public void setStories(String stories) { this.stories = stories; }
public String getAuthors() { return authors; }
public void setAuthors(String authors) { this.authors = authors; }
}
public static class Connection {
private int timeout = 30000;
private int socketTimeout = 60000;
private int maxConnectionsPerRoute = 10;
private int maxConnectionsTotal = 30;
private boolean retryOnFailure = true;
private int maxRetries = 3;
// Getters and setters
public int getTimeout() { return timeout; }
public void setTimeout(int timeout) { this.timeout = timeout; }
public int getSocketTimeout() { return socketTimeout; }
public void setSocketTimeout(int socketTimeout) { this.socketTimeout = socketTimeout; }
public int getMaxConnectionsPerRoute() { return maxConnectionsPerRoute; }
public void setMaxConnectionsPerRoute(int maxConnectionsPerRoute) { this.maxConnectionsPerRoute = maxConnectionsPerRoute; }
public int getMaxConnectionsTotal() { return maxConnectionsTotal; }
public void setMaxConnectionsTotal(int maxConnectionsTotal) { this.maxConnectionsTotal = maxConnectionsTotal; }
public boolean isRetryOnFailure() { return retryOnFailure; }
public void setRetryOnFailure(boolean retryOnFailure) { this.retryOnFailure = retryOnFailure; }
public int getMaxRetries() { return maxRetries; }
public void setMaxRetries(int maxRetries) { this.maxRetries = maxRetries; }
}
public static class Query {
private int defaultRows = 10;
private int maxRows = 1000;
private String defaultOperator = "AND";
private boolean highlight = true;
private boolean facets = true;
// Getters and setters
public int getDefaultRows() { return defaultRows; }
public void setDefaultRows(int defaultRows) { this.defaultRows = defaultRows; }
public int getMaxRows() { return maxRows; }
public void setMaxRows(int maxRows) { this.maxRows = maxRows; }
public String getDefaultOperator() { return defaultOperator; }
public void setDefaultOperator(String defaultOperator) { this.defaultOperator = defaultOperator; }
public boolean isHighlight() { return highlight; }
public void setHighlight(boolean highlight) { this.highlight = highlight; }
public boolean isFacets() { return facets; }
public void setFacets(boolean facets) { this.facets = facets; }
}
public static class Commit {
private boolean softCommit = true;
private int commitWithin = 1000;
private boolean waitSearcher = false;
// Getters and setters
public boolean isSoftCommit() { return softCommit; }
public void setSoftCommit(boolean softCommit) { this.softCommit = softCommit; }
public int getCommitWithin() { return commitWithin; }
public void setCommitWithin(int commitWithin) { this.commitWithin = commitWithin; }
public boolean isWaitSearcher() { return waitSearcher; }
public void setWaitSearcher(boolean waitSearcher) { this.waitSearcher = waitSearcher; }
}
public static class Health {
private int checkInterval = 30000;
private int slowQueryThreshold = 5000;
private boolean enableMetrics = true;
// Getters and setters
public int getCheckInterval() { return checkInterval; }
public void setCheckInterval(int checkInterval) { this.checkInterval = checkInterval; }
public int getSlowQueryThreshold() { return slowQueryThreshold; }
public void setSlowQueryThreshold(int slowQueryThreshold) { this.slowQueryThreshold = slowQueryThreshold; }
public boolean isEnableMetrics() { return enableMetrics; }
public void setEnableMetrics(boolean enableMetrics) { this.enableMetrics = enableMetrics; }
}
}

View File

@@ -3,7 +3,7 @@ package com.storycove.controller;
import com.storycove.entity.Author;
import com.storycove.entity.Story;
import com.storycove.service.AuthorService;
import com.storycove.service.OpenSearchService;
import com.storycove.service.SolrService;
import com.storycove.service.SearchServiceAdapter;
import com.storycove.service.StoryService;
import org.slf4j.Logger;
@@ -16,7 +16,7 @@ import java.util.List;
import java.util.Map;
/**
* Admin controller for managing OpenSearch operations.
* Admin controller for managing Solr operations.
* Provides endpoints for reindexing and index management.
*/
@RestController
@@ -35,7 +35,7 @@ public class AdminSearchController {
private AuthorService authorService;
@Autowired(required = false)
private OpenSearchService openSearchService;
private SolrService solrService;
/**
* Get current search status
@@ -48,7 +48,7 @@ public class AdminSearchController {
return ResponseEntity.ok(Map.of(
"primaryEngine", status.getPrimaryEngine(),
"dualWrite", status.isDualWrite(),
"openSearchAvailable", status.isOpenSearchAvailable()
"solrAvailable", status.isSolrAvailable()
));
} catch (Exception e) {
logger.error("Error getting search status", e);
@@ -59,17 +59,17 @@ public class AdminSearchController {
}
/**
* Reindex all data in OpenSearch
* Reindex all data in Solr
*/
@PostMapping("/opensearch/reindex")
public ResponseEntity<Map<String, Object>> reindexOpenSearch() {
@PostMapping("/solr/reindex")
public ResponseEntity<Map<String, Object>> reindexSolr() {
try {
logger.info("Starting OpenSearch full reindex");
logger.info("Starting Solr full reindex");
if (!searchServiceAdapter.isSearchServiceAvailable()) {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "OpenSearch is not available or healthy"
"error", "Solr is not available or healthy"
));
}
@@ -77,14 +77,14 @@ public class AdminSearchController {
List<Story> allStories = storyService.findAllWithAssociations();
List<Author> allAuthors = authorService.findAllWithStories();
// Bulk index directly in OpenSearch
if (openSearchService != null) {
openSearchService.bulkIndexStories(allStories);
openSearchService.bulkIndexAuthors(allAuthors);
// Bulk index directly in Solr
if (solrService != null) {
solrService.bulkIndexStories(allStories);
solrService.bulkIndexAuthors(allAuthors);
} else {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "OpenSearch service not available"
"error", "Solr service not available"
));
}
@@ -92,7 +92,7 @@ public class AdminSearchController {
return ResponseEntity.ok(Map.of(
"success", true,
"message", String.format("Reindexed %d stories and %d authors in OpenSearch",
"message", String.format("Reindexed %d stories and %d authors in Solr",
allStories.size(), allAuthors.size()),
"storiesCount", allStories.size(),
"authorsCount", allAuthors.size(),
@@ -100,36 +100,36 @@ public class AdminSearchController {
));
} catch (Exception e) {
logger.error("Error during OpenSearch reindex", e);
logger.error("Error during Solr reindex", e);
return ResponseEntity.internalServerError().body(Map.of(
"success", false,
"error", "OpenSearch reindex failed: " + e.getMessage()
"error", "Solr reindex failed: " + e.getMessage()
));
}
}
/**
* Recreate OpenSearch indices
* Recreate Solr indices
*/
@PostMapping("/opensearch/recreate")
public ResponseEntity<Map<String, Object>> recreateOpenSearchIndices() {
@PostMapping("/solr/recreate")
public ResponseEntity<Map<String, Object>> recreateSolrIndices() {
try {
logger.info("Starting OpenSearch indices recreation");
logger.info("Starting Solr indices recreation");
if (!searchServiceAdapter.isSearchServiceAvailable()) {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "OpenSearch is not available or healthy"
"error", "Solr is not available or healthy"
));
}
// Recreate indices
if (openSearchService != null) {
openSearchService.recreateIndices();
if (solrService != null) {
solrService.recreateIndices();
} else {
return ResponseEntity.badRequest().body(Map.of(
"success", false,
"error", "OpenSearch service not available"
"error", "Solr service not available"
));
}
@@ -138,14 +138,14 @@ public class AdminSearchController {
List<Author> allAuthors = authorService.findAllWithStories();
// Bulk index after recreation
openSearchService.bulkIndexStories(allStories);
openSearchService.bulkIndexAuthors(allAuthors);
solrService.bulkIndexStories(allStories);
solrService.bulkIndexAuthors(allAuthors);
int totalIndexed = allStories.size() + allAuthors.size();
return ResponseEntity.ok(Map.of(
"success", true,
"message", String.format("Recreated OpenSearch indices and indexed %d stories and %d authors",
"message", String.format("Recreated Solr indices and indexed %d stories and %d authors",
allStories.size(), allAuthors.size()),
"storiesCount", allStories.size(),
"authorsCount", allAuthors.size(),
@@ -153,10 +153,10 @@ public class AdminSearchController {
));
} catch (Exception e) {
logger.error("Error during OpenSearch indices recreation", e);
logger.error("Error during Solr indices recreation", e);
return ResponseEntity.internalServerError().body(Map.of(
"success", false,
"error", "OpenSearch indices recreation failed: " + e.getMessage()
"error", "Solr indices recreation failed: " + e.getMessage()
));
}
}

View File

@@ -291,7 +291,7 @@ public class CollectionController {
// Collections are not indexed in search engine yet
return ResponseEntity.ok(Map.of(
"success", true,
"message", "Collections indexing not yet implemented in OpenSearch",
"message", "Collections indexing not yet implemented in Solr",
"count", allCollections.size()
));
} catch (Exception e) {

View File

@@ -33,6 +33,18 @@ public class SearchResultDto<T> {
this.searchTimeMs = searchTimeMs;
this.facets = facets;
}
// Simple constructor for basic search results with facet list
public SearchResultDto(List<T> results, long totalHits, int resultCount, List<FacetCountDto> facetsList) {
this.results = results;
this.totalHits = totalHits;
this.page = 0;
this.perPage = resultCount;
this.query = "";
this.searchTimeMs = 0;
// Convert list to map if needed - for now just set empty map
this.facets = java.util.Collections.emptyMap();
}
// Getters and Setters
public List<T> getResults() {

View File

@@ -132,7 +132,7 @@ public class AuthorService {
validateAuthorForCreate(author);
Author savedAuthor = authorRepository.save(author);
// Index in OpenSearch
// Index in Solr
searchServiceAdapter.indexAuthor(savedAuthor);
return savedAuthor;
@@ -150,7 +150,7 @@ public class AuthorService {
updateAuthorFields(existingAuthor, authorUpdates);
Author savedAuthor = authorRepository.save(existingAuthor);
// Update in OpenSearch
// Update in Solr
searchServiceAdapter.updateAuthor(savedAuthor);
return savedAuthor;
@@ -166,7 +166,7 @@ public class AuthorService {
authorRepository.delete(author);
// Remove from OpenSearch
// Remove from Solr
searchServiceAdapter.deleteAuthor(id);
}
@@ -175,7 +175,7 @@ public class AuthorService {
author.addUrl(url);
Author savedAuthor = authorRepository.save(author);
// Update in OpenSearch
// Update in Solr
searchServiceAdapter.updateAuthor(savedAuthor);
return savedAuthor;
@@ -186,7 +186,7 @@ public class AuthorService {
author.removeUrl(url);
Author savedAuthor = authorRepository.save(author);
// Update in OpenSearch
// Update in Solr
searchServiceAdapter.updateAuthor(savedAuthor);
return savedAuthor;
@@ -221,7 +221,7 @@ public class AuthorService {
logger.debug("Saved author rating: {} for author: {}",
refreshedAuthor.getAuthorRating(), refreshedAuthor.getName());
// Update in OpenSearch
// Update in Solr
searchServiceAdapter.updateAuthor(refreshedAuthor);
return refreshedAuthor;
@@ -265,7 +265,7 @@ public class AuthorService {
author.setAvatarImagePath(avatarPath);
Author savedAuthor = authorRepository.save(author);
// Update in OpenSearch
// Update in Solr
searchServiceAdapter.updateAuthor(savedAuthor);
return savedAuthor;
@@ -276,7 +276,7 @@ public class AuthorService {
author.setAvatarImagePath(null);
Author savedAuthor = authorRepository.save(author);
// Update in OpenSearch
// Update in Solr
searchServiceAdapter.updateAuthor(savedAuthor);
return savedAuthor;

View File

@@ -55,8 +55,8 @@ public class CollectionService {
*/
public SearchResultDto<Collection> searchCollections(String query, List<String> tags, boolean includeArchived, int page, int limit) {
// Collections are currently handled at database level, not indexed in search engine
// Return empty result for now as collections search is not implemented in OpenSearch
logger.warn("Collections search not yet implemented in OpenSearch, returning empty results");
// Return empty result for now as collections search is not implemented in Solr
logger.warn("Collections search not yet implemented in Solr, returning empty results");
return new SearchResultDto<>(new ArrayList<>(), 0, page, limit, query != null ? query : "", 0);
}

View File

@@ -115,7 +115,7 @@ public class LibraryService implements ApplicationContextAware {
/**
* Switch to library after authentication with forced reindexing
* This ensures OpenSearch is always up-to-date after login
* This ensures Solr is always up-to-date after login
*/
public synchronized void switchToLibraryAfterAuthentication(String libraryId) throws Exception {
logger.info("Switching to library after authentication: {} (forcing reindex)", libraryId);
@@ -154,15 +154,15 @@ public class LibraryService implements ApplicationContextAware {
// Set new active library (datasource routing handled by SmartRoutingDataSource)
currentLibraryId = libraryId;
// OpenSearch indexes are global - no per-library initialization needed
logger.debug("Library switched to OpenSearch mode for library: {}", libraryId);
// Solr indexes are global - no per-library initialization needed
logger.debug("Library switched to Solr mode for library: {}", libraryId);
logger.info("Successfully switched to library: {}", library.getName());
// Perform complete reindex AFTER library switch is fully complete
// This ensures database routing is properly established
if (forceReindex || !libraryId.equals(previousLibraryId)) {
logger.debug("Starting post-switch OpenSearch reindex for library: {}", libraryId);
logger.debug("Starting post-switch Solr reindex for library: {}", libraryId);
// Run reindex asynchronously to avoid blocking authentication response
// and allow time for database routing to fully stabilize
@@ -171,7 +171,7 @@ public class LibraryService implements ApplicationContextAware {
try {
// Give routing time to stabilize
Thread.sleep(500);
logger.debug("Starting async OpenSearch reindex for library: {}", finalLibraryId);
logger.debug("Starting async Solr reindex for library: {}", finalLibraryId);
SearchServiceAdapter searchService = applicationContext.getBean(SearchServiceAdapter.class);
// Get all stories and authors for reindexing
@@ -184,12 +184,12 @@ public class LibraryService implements ApplicationContextAware {
searchService.bulkIndexStories(allStories);
searchService.bulkIndexAuthors(allAuthors);
logger.info("Completed async OpenSearch reindexing for library: {} ({} stories, {} authors)",
logger.info("Completed async Solr reindexing for library: {} ({} stories, {} authors)",
finalLibraryId, allStories.size(), allAuthors.size());
} catch (Exception e) {
logger.warn("Failed to async reindex OpenSearch for library {}: {}", finalLibraryId, e.getMessage());
logger.warn("Failed to async reindex Solr for library {}: {}", finalLibraryId, e.getMessage());
}
}, "OpenSearchReindex-" + libraryId).start();
}, "SolrReindex-" + libraryId).start();
}
}
@@ -525,8 +525,8 @@ public class LibraryService implements ApplicationContextAware {
// 1. Create image directory structure
initializeImageDirectories(library);
// 2. OpenSearch indexes are global and managed automatically
// No per-library initialization needed for OpenSearch
// 2. Solr indexes are global and managed automatically
// No per-library initialization needed for Solr
logger.debug("Successfully initialized resources for library: {}", library.getName());
@@ -760,7 +760,7 @@ public class LibraryService implements ApplicationContextAware {
private void closeCurrentResources() {
// No need to close datasource - SmartRoutingDataSource handles this
// OpenSearch service is managed by Spring - no explicit cleanup needed
// Solr service is managed by Spring - no explicit cleanup needed
// Don't clear currentLibraryId here - only when explicitly switching
}

View File

@@ -1,133 +0,0 @@
package com.storycove.service;
import com.storycove.config.OpenSearchProperties;
import org.opensearch.client.opensearch.OpenSearchClient;
import org.opensearch.client.opensearch.cluster.HealthRequest;
import org.opensearch.client.opensearch.cluster.HealthResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.actuate.health.Health;
import org.springframework.boot.actuate.health.HealthIndicator;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import java.time.LocalDateTime;
import java.util.concurrent.atomic.AtomicReference;
@Service
@ConditionalOnProperty(name = "storycove.search.engine", havingValue = "opensearch")
public class OpenSearchHealthService implements HealthIndicator {
private static final Logger logger = LoggerFactory.getLogger(OpenSearchHealthService.class);
private final OpenSearchClient openSearchClient;
private final OpenSearchProperties properties;
private final AtomicReference<Health> lastKnownHealth = new AtomicReference<>(Health.unknown().build());
private LocalDateTime lastCheckTime = LocalDateTime.now();
@Autowired
public OpenSearchHealthService(OpenSearchClient openSearchClient, OpenSearchProperties properties) {
this.openSearchClient = openSearchClient;
this.properties = properties;
}
@Override
public Health health() {
return lastKnownHealth.get();
}
@Scheduled(fixedDelayString = "#{@openSearchProperties.health.checkInterval}")
public void performHealthCheck() {
try {
HealthResponse clusterHealth = openSearchClient.cluster().health(
HealthRequest.of(h -> h.timeout(t -> t.time("10s")))
);
Health.Builder healthBuilder = Health.up()
.withDetail("cluster_name", clusterHealth.clusterName())
.withDetail("status", clusterHealth.status().jsonValue())
.withDetail("number_of_nodes", clusterHealth.numberOfNodes())
.withDetail("number_of_data_nodes", clusterHealth.numberOfDataNodes())
.withDetail("active_primary_shards", clusterHealth.activePrimaryShards())
.withDetail("active_shards", clusterHealth.activeShards())
.withDetail("relocating_shards", clusterHealth.relocatingShards())
.withDetail("initializing_shards", clusterHealth.initializingShards())
.withDetail("unassigned_shards", clusterHealth.unassignedShards())
.withDetail("last_check", LocalDateTime.now());
// Check if cluster status is concerning
switch (clusterHealth.status()) {
case Red:
healthBuilder = Health.down()
.withDetail("reason", "Cluster status is RED - some primary shards are unassigned");
break;
case Yellow:
if (isProduction()) {
healthBuilder = Health.down()
.withDetail("reason", "Cluster status is YELLOW - some replica shards are unassigned (critical in production)");
} else {
// Yellow is acceptable in development (single node clusters)
healthBuilder.withDetail("warning", "Cluster status is YELLOW - acceptable for development");
}
break;
case Green:
// All good
break;
}
lastKnownHealth.set(healthBuilder.build());
lastCheckTime = LocalDateTime.now();
if (properties.getHealth().isEnableMetrics()) {
logMetrics(clusterHealth);
}
} catch (Exception e) {
logger.error("OpenSearch health check failed", e);
Health unhealthyStatus = Health.down()
.withDetail("error", e.getMessage())
.withDetail("last_successful_check", lastCheckTime)
.withDetail("current_time", LocalDateTime.now())
.build();
lastKnownHealth.set(unhealthyStatus);
}
}
private void logMetrics(HealthResponse clusterHealth) {
logger.info("OpenSearch Cluster Metrics - Status: {}, Nodes: {}, Active Shards: {}, Unassigned: {}",
clusterHealth.status().jsonValue(),
clusterHealth.numberOfNodes(),
clusterHealth.activeShards(),
clusterHealth.unassignedShards());
}
private boolean isProduction() {
return "production".equalsIgnoreCase(properties.getProfile());
}
/**
* Manual health check for immediate status
*/
public boolean isClusterHealthy() {
Health currentHealth = lastKnownHealth.get();
return currentHealth.getStatus() == org.springframework.boot.actuate.health.Status.UP;
}
/**
* Get detailed cluster information
*/
public String getClusterInfo() {
try {
var info = openSearchClient.info();
return String.format("OpenSearch %s (Cluster: %s, Lucene: %s)",
info.version().number(),
info.clusterName(),
info.version().luceneVersion());
} catch (Exception e) {
return "Unable to retrieve cluster information: " + e.getMessage();
}
}
}

View File

@@ -16,7 +16,7 @@ import java.util.UUID;
/**
* Service adapter that provides a unified interface for search operations.
*
* This adapter directly delegates to OpenSearchService.
* This adapter directly delegates to SolrService.
*/
@Service
public class SearchServiceAdapter {
@@ -24,7 +24,7 @@ public class SearchServiceAdapter {
private static final Logger logger = LoggerFactory.getLogger(SearchServiceAdapter.class);
@Autowired
private OpenSearchService openSearchService;
private SolrService solrService;
// ===============================
// SEARCH OPERATIONS
@@ -46,11 +46,20 @@ public class SearchServiceAdapter {
String sourceDomain, String seriesFilter,
Integer minTagCount, Boolean popularOnly,
Boolean hiddenGemsOnly) {
return openSearchService.searchStories(query, tags, author, series, minWordCount, maxWordCount,
minRating, isRead, isFavorite, sortBy, sortOrder, page, size, facetBy,
createdAfter, createdBefore, lastReadAfter, lastReadBefore, unratedOnly, readingStatus,
hasReadingProgress, hasCoverImage, sourceDomain, seriesFilter, minTagCount, popularOnly,
hiddenGemsOnly);
logger.info("SearchServiceAdapter: delegating search to SolrService");
try {
SearchResultDto<StorySearchDto> result = solrService.searchStories(query, tags, author, series, minWordCount, maxWordCount,
minRating, isRead, isFavorite, sortBy, sortOrder, page, size, facetBy,
createdAfter, createdBefore, lastReadAfter, lastReadBefore, unratedOnly, readingStatus,
hasReadingProgress, hasCoverImage, sourceDomain, seriesFilter, minTagCount, popularOnly,
hiddenGemsOnly);
logger.info("SearchServiceAdapter: received result with {} stories and {} facets",
result.getResults().size(), result.getFacets().size());
return result;
} catch (Exception e) {
logger.error("SearchServiceAdapter: error during search", e);
throw e;
}
}
/**
@@ -60,7 +69,7 @@ public class SearchServiceAdapter {
String series, Integer minWordCount, Integer maxWordCount,
Float minRating, Boolean isRead, Boolean isFavorite,
Long seed) {
return openSearchService.getRandomStories(count, tags, author, series, minWordCount, maxWordCount,
return solrService.getRandomStories(count, tags, author, series, minWordCount, maxWordCount,
minRating, isRead, isFavorite, seed);
}
@@ -69,7 +78,7 @@ public class SearchServiceAdapter {
*/
public void recreateIndices() {
try {
openSearchService.recreateIndices();
solrService.recreateIndices();
} catch (Exception e) {
logger.error("Failed to recreate search indices", e);
throw new RuntimeException("Failed to recreate search indices", e);
@@ -93,21 +102,21 @@ public class SearchServiceAdapter {
* Get random story ID with unified interface
*/
public String getRandomStoryId(Long seed) {
return openSearchService.getRandomStoryId(seed);
return solrService.getRandomStoryId(seed);
}
/**
* Search authors with unified interface
*/
public List<AuthorSearchDto> searchAuthors(String query, int limit) {
return openSearchService.searchAuthors(query, limit);
return solrService.searchAuthors(query, limit);
}
/**
* Get tag suggestions with unified interface
*/
public List<String> getTagSuggestions(String query, int limit) {
return openSearchService.getTagSuggestions(query, limit);
return solrService.getTagSuggestions(query, limit);
}
// ===============================
@@ -115,88 +124,88 @@ public class SearchServiceAdapter {
// ===============================
/**
* Index a story in OpenSearch
* Index a story in Solr
*/
public void indexStory(Story story) {
try {
openSearchService.indexStory(story);
solrService.indexStory(story);
} catch (Exception e) {
logger.error("Failed to index story {}", story.getId(), e);
}
}
/**
* Update a story in OpenSearch
* Update a story in Solr
*/
public void updateStory(Story story) {
try {
openSearchService.updateStory(story);
solrService.updateStory(story);
} catch (Exception e) {
logger.error("Failed to update story {}", story.getId(), e);
}
}
/**
* Delete a story from OpenSearch
* Delete a story from Solr
*/
public void deleteStory(UUID storyId) {
try {
openSearchService.deleteStory(storyId);
solrService.deleteStory(storyId);
} catch (Exception e) {
logger.error("Failed to delete story {}", storyId, e);
}
}
/**
* Index an author in OpenSearch
* Index an author in Solr
*/
public void indexAuthor(Author author) {
try {
openSearchService.indexAuthor(author);
solrService.indexAuthor(author);
} catch (Exception e) {
logger.error("Failed to index author {}", author.getId(), e);
}
}
/**
* Update an author in OpenSearch
* Update an author in Solr
*/
public void updateAuthor(Author author) {
try {
openSearchService.updateAuthor(author);
solrService.updateAuthor(author);
} catch (Exception e) {
logger.error("Failed to update author {}", author.getId(), e);
}
}
/**
* Delete an author from OpenSearch
* Delete an author from Solr
*/
public void deleteAuthor(UUID authorId) {
try {
openSearchService.deleteAuthor(authorId);
solrService.deleteAuthor(authorId);
} catch (Exception e) {
logger.error("Failed to delete author {}", authorId, e);
}
}
/**
* Bulk index stories in OpenSearch
* Bulk index stories in Solr
*/
public void bulkIndexStories(List<Story> stories) {
try {
openSearchService.bulkIndexStories(stories);
solrService.bulkIndexStories(stories);
} catch (Exception e) {
logger.error("Failed to bulk index {} stories", stories.size(), e);
}
}
/**
* Bulk index authors in OpenSearch
* Bulk index authors in Solr
*/
public void bulkIndexAuthors(List<Author> authors) {
try {
openSearchService.bulkIndexAuthors(authors);
solrService.bulkIndexAuthors(authors);
} catch (Exception e) {
logger.error("Failed to bulk index {} authors", authors.size(), e);
}
@@ -210,14 +219,14 @@ public class SearchServiceAdapter {
* Check if search service is available and healthy
*/
public boolean isSearchServiceAvailable() {
return openSearchService.testConnection();
return solrService.testConnection();
}
/**
* Get current search engine name
*/
public String getCurrentSearchEngine() {
return "opensearch";
return "solr";
}
/**
@@ -228,10 +237,10 @@ public class SearchServiceAdapter {
}
/**
* Check if we can switch to OpenSearch
* Check if we can switch to Solr
*/
public boolean canSwitchToOpenSearch() {
return true; // Already using OpenSearch
public boolean canSwitchToSolr() {
return true; // Already using Solr
}
/**
@@ -246,10 +255,10 @@ public class SearchServiceAdapter {
*/
public SearchStatus getSearchStatus() {
return new SearchStatus(
"opensearch",
"solr",
false, // no dual-write
false, // no typesense
openSearchService.testConnection()
solrService.testConnection()
);
}
@@ -260,19 +269,19 @@ public class SearchServiceAdapter {
private final String primaryEngine;
private final boolean dualWrite;
private final boolean typesenseAvailable;
private final boolean openSearchAvailable;
private final boolean solrAvailable;
public SearchStatus(String primaryEngine, boolean dualWrite,
boolean typesenseAvailable, boolean openSearchAvailable) {
boolean typesenseAvailable, boolean solrAvailable) {
this.primaryEngine = primaryEngine;
this.dualWrite = dualWrite;
this.typesenseAvailable = typesenseAvailable;
this.openSearchAvailable = openSearchAvailable;
this.solrAvailable = solrAvailable;
}
public String getPrimaryEngine() { return primaryEngine; }
public boolean isDualWrite() { return dualWrite; }
public boolean isTypesenseAvailable() { return typesenseAvailable; }
public boolean isOpenSearchAvailable() { return openSearchAvailable; }
public boolean isSolrAvailable() { return solrAvailable; }
}
}

View File

@@ -0,0 +1,931 @@
package com.storycove.service;
import com.storycove.config.SolrProperties;
import com.storycove.dto.AuthorSearchDto;
import com.storycove.dto.FacetCountDto;
import com.storycove.dto.SearchResultDto;
import com.storycove.dto.StorySearchDto;
import com.storycove.entity.Author;
import com.storycove.entity.Story;
import org.apache.solr.client.solrj.SolrClient;
import org.apache.solr.client.solrj.SolrQuery;
import org.apache.solr.client.solrj.SolrServerException;
import org.apache.solr.client.solrj.response.QueryResponse;
import org.apache.solr.client.solrj.response.UpdateResponse;
import org.apache.solr.common.SolrDocument;
import org.apache.solr.common.SolrDocumentList;
import org.apache.solr.common.SolrInputDocument;
import org.apache.solr.common.params.ModifiableSolrParams;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.condition.ConditionalOnProperty;
import org.springframework.context.annotation.Lazy;
import org.springframework.stereotype.Service;
import jakarta.annotation.PostConstruct;
import java.io.IOException;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.*;
import java.util.stream.Collectors;
@Service
@ConditionalOnProperty(
value = "storycove.search.engine",
havingValue = "solr",
matchIfMissing = false
)
public class SolrService {
private static final Logger logger = LoggerFactory.getLogger(SolrService.class);
@Autowired(required = false)
private SolrClient solrClient;
@Autowired
private SolrProperties properties;
@Autowired
@Lazy
private ReadingTimeService readingTimeService;
@PostConstruct
public void initializeCores() {
if (!isAvailable()) {
logger.debug("Solr client not available - skipping core initialization");
return;
}
try {
logger.debug("Testing Solr cores availability...");
testCoreAvailability(properties.getCores().getStories());
testCoreAvailability(properties.getCores().getAuthors());
logger.debug("Solr cores are available");
} catch (Exception e) {
logger.error("Failed to test Solr cores availability", e);
}
}
// ===============================
// CORE MANAGEMENT
// ===============================
private void testCoreAvailability(String coreName) throws IOException, SolrServerException {
SolrQuery query = new SolrQuery("*:*");
query.setRows(0);
QueryResponse response = solrClient.query(coreName, query);
logger.debug("Core {} is available - found {} documents", coreName, response.getResults().getNumFound());
}
// ===============================
// STORY INDEXING
// ===============================
public void indexStory(Story story) throws IOException {
if (!isAvailable()) {
logger.debug("Solr not available - skipping story indexing");
return;
}
try {
logger.debug("Indexing story: {} ({})", story.getTitle(), story.getId());
SolrInputDocument doc = createStoryDocument(story);
UpdateResponse response = solrClient.add(properties.getCores().getStories(), doc,
properties.getCommit().getCommitWithin());
if (response.getStatus() == 0) {
logger.debug("Successfully indexed story: {}", story.getId());
} else {
logger.warn("Story indexing returned non-zero status: {}", response.getStatus());
}
} catch (SolrServerException e) {
logger.error("Failed to index story: {}", story.getId(), e);
throw new IOException("Failed to index story", e);
}
}
public void updateStory(Story story) throws IOException {
// For Solr, update is the same as index (upsert behavior)
indexStory(story);
}
public void deleteStory(UUID storyId) throws IOException {
if (!isAvailable()) {
logger.debug("Solr not available - skipping story deletion");
return;
}
try {
logger.debug("Deleting story from index: {}", storyId);
UpdateResponse response = solrClient.deleteById(properties.getCores().getStories(),
storyId.toString(), properties.getCommit().getCommitWithin());
if (response.getStatus() == 0) {
logger.debug("Successfully deleted story: {}", storyId);
} else {
logger.warn("Story deletion returned non-zero status: {}", response.getStatus());
}
} catch (SolrServerException e) {
logger.error("Failed to delete story: {}", storyId, e);
throw new IOException("Failed to delete story", e);
}
}
// ===============================
// AUTHOR INDEXING
// ===============================
public void indexAuthor(Author author) throws IOException {
if (!isAvailable()) {
logger.debug("Solr not available - skipping author indexing");
return;
}
try {
logger.debug("Indexing author: {} ({})", author.getName(), author.getId());
SolrInputDocument doc = createAuthorDocument(author);
UpdateResponse response = solrClient.add(properties.getCores().getAuthors(), doc,
properties.getCommit().getCommitWithin());
if (response.getStatus() == 0) {
logger.debug("Successfully indexed author: {}", author.getId());
} else {
logger.warn("Author indexing returned non-zero status: {}", response.getStatus());
}
} catch (SolrServerException e) {
logger.error("Failed to index author: {}", author.getId(), e);
throw new IOException("Failed to index author", e);
}
}
public void updateAuthor(Author author) throws IOException {
// For Solr, update is the same as index (upsert behavior)
indexAuthor(author);
}
public void deleteAuthor(UUID authorId) throws IOException {
if (!isAvailable()) {
logger.debug("Solr not available - skipping author deletion");
return;
}
try {
logger.debug("Deleting author from index: {}", authorId);
UpdateResponse response = solrClient.deleteById(properties.getCores().getAuthors(),
authorId.toString(), properties.getCommit().getCommitWithin());
if (response.getStatus() == 0) {
logger.debug("Successfully deleted author: {}", authorId);
} else {
logger.warn("Author deletion returned non-zero status: {}", response.getStatus());
}
} catch (SolrServerException e) {
logger.error("Failed to delete author: {}", authorId, e);
throw new IOException("Failed to delete author", e);
}
}
// ===============================
// BULK OPERATIONS
// ===============================
public void bulkIndexStories(List<Story> stories) throws IOException {
if (!isAvailable() || stories.isEmpty()) {
logger.debug("Solr not available or empty stories list - skipping bulk indexing");
return;
}
try {
logger.debug("Bulk indexing {} stories", stories.size());
List<SolrInputDocument> docs = stories.stream()
.map(this::createStoryDocument)
.collect(Collectors.toList());
UpdateResponse response = solrClient.add(properties.getCores().getStories(), docs,
properties.getCommit().getCommitWithin());
if (response.getStatus() == 0) {
logger.debug("Successfully bulk indexed {} stories", stories.size());
} else {
logger.warn("Bulk story indexing returned non-zero status: {}", response.getStatus());
}
} catch (SolrServerException e) {
logger.error("Failed to bulk index stories", e);
throw new IOException("Failed to bulk index stories", e);
}
}
public void bulkIndexAuthors(List<Author> authors) throws IOException {
if (!isAvailable() || authors.isEmpty()) {
logger.debug("Solr not available or empty authors list - skipping bulk indexing");
return;
}
try {
logger.debug("Bulk indexing {} authors", authors.size());
List<SolrInputDocument> docs = authors.stream()
.map(this::createAuthorDocument)
.collect(Collectors.toList());
UpdateResponse response = solrClient.add(properties.getCores().getAuthors(), docs,
properties.getCommit().getCommitWithin());
if (response.getStatus() == 0) {
logger.debug("Successfully bulk indexed {} authors", authors.size());
} else {
logger.warn("Bulk author indexing returned non-zero status: {}", response.getStatus());
}
} catch (SolrServerException e) {
logger.error("Failed to bulk index authors", e);
throw new IOException("Failed to bulk index authors", e);
}
}
// ===============================
// DOCUMENT CREATION
// ===============================
private SolrInputDocument createStoryDocument(Story story) {
SolrInputDocument doc = new SolrInputDocument();
doc.addField("id", story.getId().toString());
doc.addField("title", story.getTitle());
doc.addField("description", story.getDescription());
doc.addField("sourceUrl", story.getSourceUrl());
doc.addField("coverPath", story.getCoverPath());
doc.addField("wordCount", story.getWordCount());
doc.addField("rating", story.getRating());
doc.addField("volume", story.getVolume());
doc.addField("isRead", story.getIsRead());
doc.addField("readingPosition", story.getReadingPosition());
if (story.getLastReadAt() != null) {
doc.addField("lastReadAt", formatDateTime(story.getLastReadAt()));
}
if (story.getAuthor() != null) {
doc.addField("authorId", story.getAuthor().getId().toString());
doc.addField("authorName", story.getAuthor().getName());
}
if (story.getSeries() != null) {
doc.addField("seriesId", story.getSeries().getId().toString());
doc.addField("seriesName", story.getSeries().getName());
}
if (story.getTags() != null && !story.getTags().isEmpty()) {
List<String> tagNames = story.getTags().stream()
.map(tag -> tag.getName())
.collect(Collectors.toList());
doc.addField("tagNames", tagNames);
}
doc.addField("createdAt", formatDateTime(story.getCreatedAt()));
doc.addField("updatedAt", formatDateTime(story.getUpdatedAt()));
doc.addField("dateAdded", formatDateTime(story.getCreatedAt()));
return doc;
}
private SolrInputDocument createAuthorDocument(Author author) {
SolrInputDocument doc = new SolrInputDocument();
doc.addField("id", author.getId().toString());
doc.addField("name", author.getName());
doc.addField("notes", author.getNotes());
doc.addField("authorRating", author.getAuthorRating());
doc.addField("avatarImagePath", author.getAvatarImagePath());
if (author.getUrls() != null && !author.getUrls().isEmpty()) {
doc.addField("urls", author.getUrls());
}
// Calculate derived fields
if (author.getStories() != null) {
doc.addField("storyCount", author.getStories().size());
OptionalDouble avgRating = author.getStories().stream()
.filter(story -> story.getRating() != null && story.getRating() > 0)
.mapToInt(Story::getRating)
.average();
if (avgRating.isPresent()) {
doc.addField("averageStoryRating", avgRating.getAsDouble());
}
}
doc.addField("createdAt", formatDateTime(author.getCreatedAt()));
doc.addField("updatedAt", formatDateTime(author.getUpdatedAt()));
return doc;
}
private String formatDateTime(LocalDateTime dateTime) {
if (dateTime == null) return null;
return dateTime.format(DateTimeFormatter.ISO_LOCAL_DATE_TIME) + "Z";
}
// ===============================
// UTILITY METHODS
// ===============================
public boolean isAvailable() {
return solrClient != null;
}
public boolean testConnection() {
if (!isAvailable()) {
return false;
}
try {
// Test connection by pinging a core
testCoreAvailability(properties.getCores().getStories());
return true;
} catch (Exception e) {
logger.debug("Solr connection test failed", e);
return false;
}
}
// ===============================
// SEARCH OPERATIONS
// ===============================
public SearchResultDto<StorySearchDto> searchStories(String query, List<String> tags, String author,
String series, Integer minWordCount, Integer maxWordCount,
Float minRating, Boolean isRead, Boolean isFavorite,
String sortBy, String sortOrder, int page, int size,
List<String> facetBy,
// Advanced filters
String createdAfter, String createdBefore,
String lastReadAfter, String lastReadBefore,
Boolean unratedOnly, String readingStatus,
Boolean hasReadingProgress, Boolean hasCoverImage,
String sourceDomain, String seriesFilter,
Integer minTagCount, Boolean popularOnly,
Boolean hiddenGemsOnly) {
if (!isAvailable()) {
logger.debug("Solr not available - returning empty search results");
return new SearchResultDto<StorySearchDto>(Collections.emptyList(), 0, 0, Collections.emptyList());
}
try {
SolrQuery solrQuery = new SolrQuery();
// Set query
if (query == null || query.trim().isEmpty()) {
solrQuery.setQuery("*:*");
} else {
// Use dismax query parser for better relevance
solrQuery.setQuery(query);
solrQuery.set("defType", "edismax");
solrQuery.set("qf", "title^3.0 description^2.0 authorName^2.0 seriesName^1.5 tagNames^1.0");
solrQuery.set("mm", "2<-1 5<-2 6<90%"); // Minimum should match
}
// Apply filters
applySearchFilters(solrQuery, tags, author, series, minWordCount, maxWordCount,
minRating, isRead, isFavorite, createdAfter, createdBefore, lastReadAfter,
lastReadBefore, unratedOnly, readingStatus, hasReadingProgress, hasCoverImage,
sourceDomain, seriesFilter, minTagCount, popularOnly, hiddenGemsOnly);
// Pagination
solrQuery.setStart(page * size);
solrQuery.setRows(size);
// Sorting
if (sortBy != null && !sortBy.isEmpty()) {
SolrQuery.ORDER order = "desc".equalsIgnoreCase(sortOrder) ?
SolrQuery.ORDER.desc : SolrQuery.ORDER.asc;
solrQuery.setSort(sortBy, order);
} else {
// Default relevance sorting
solrQuery.setSort("score", SolrQuery.ORDER.desc);
}
// Enable highlighting
if (properties.getQuery().isHighlight()) {
solrQuery.setHighlight(true);
solrQuery.addHighlightField("title");
solrQuery.addHighlightField("description");
solrQuery.setHighlightSimplePre("<em>");
solrQuery.setHighlightSimplePost("</em>");
solrQuery.setHighlightFragsize(150);
}
// Enable faceting
if (properties.getQuery().isFacets()) {
solrQuery.setFacet(true);
// Use optimized facet fields for better performance
solrQuery.addFacetField("authorName_facet");
solrQuery.addFacetField("tagNames_facet");
solrQuery.addFacetField("seriesName_facet");
solrQuery.addFacetField("rating");
solrQuery.addFacetField("isRead");
solrQuery.setFacetMinCount(1);
solrQuery.setFacetSort("count");
solrQuery.setFacetLimit(100); // Limit facet results for performance
}
// Debug: Log the query being sent to Solr
logger.info("SolrService: Executing Solr query: {}", solrQuery);
QueryResponse response = solrClient.query(properties.getCores().getStories(), solrQuery);
logger.info("SolrService: Query executed successfully, found {} results", response.getResults().getNumFound());
return buildStorySearchResult(response);
} catch (Exception e) {
logger.error("Story search failed for query: {}", query, e);
return new SearchResultDto<StorySearchDto>(Collections.emptyList(), 0, 0, Collections.emptyList());
}
}
public List<AuthorSearchDto> searchAuthors(String query, int limit) {
if (!isAvailable()) {
logger.debug("Solr not available - returning empty author search results");
return Collections.emptyList();
}
try {
SolrQuery solrQuery = new SolrQuery();
// Set query
if (query == null || query.trim().isEmpty()) {
solrQuery.setQuery("*:*");
} else {
solrQuery.setQuery(query);
solrQuery.set("defType", "edismax");
solrQuery.set("qf", "name^3.0 notes^1.0 urls^0.5");
}
solrQuery.setRows(limit);
solrQuery.setSort("storyCount", SolrQuery.ORDER.desc);
QueryResponse response = solrClient.query(properties.getCores().getAuthors(), solrQuery);
return buildAuthorSearchResults(response);
} catch (Exception e) {
logger.error("Author search failed for query: {}", query, e);
return Collections.emptyList();
}
}
public List<String> getTagSuggestions(String query, int limit) {
if (!isAvailable()) {
return Collections.emptyList();
}
try {
SolrQuery solrQuery = new SolrQuery();
solrQuery.setQuery("tagNames:*" + query + "*");
solrQuery.setRows(0);
solrQuery.setFacet(true);
solrQuery.addFacetField("tagNames_facet");
solrQuery.setFacetMinCount(1);
solrQuery.setFacetLimit(limit);
QueryResponse response = solrClient.query(properties.getCores().getStories(), solrQuery);
return response.getFacetField("tagNames_facet").getValues().stream()
.map(facet -> facet.getName())
.filter(name -> name.toLowerCase().contains(query.toLowerCase()))
.collect(Collectors.toList());
} catch (Exception e) {
logger.error("Tag suggestions failed for query: {}", query, e);
return Collections.emptyList();
}
}
public String getRandomStoryId(Long seed) {
if (!isAvailable()) {
return null;
}
try {
SolrQuery solrQuery = new SolrQuery("*:*");
solrQuery.setRows(1);
if (seed != null) {
solrQuery.setSort("random_" + seed, SolrQuery.ORDER.asc);
} else {
solrQuery.setSort("random_" + System.currentTimeMillis(), SolrQuery.ORDER.asc);
}
QueryResponse response = solrClient.query(properties.getCores().getStories(), solrQuery);
if (response.getResults().size() > 0) {
return (String) response.getResults().get(0).getFieldValue("id");
}
} catch (Exception e) {
logger.error("Random story ID retrieval failed", e);
}
return null;
}
// ===============================
// RESULT BUILDING
// ===============================
private SearchResultDto<StorySearchDto> buildStorySearchResult(QueryResponse response) {
SolrDocumentList results = response.getResults();
List<StorySearchDto> stories = new ArrayList<>();
for (SolrDocument doc : results) {
StorySearchDto story = convertToStorySearchDto(doc);
// Add highlights
if (response.getHighlighting() != null) {
String id = (String) doc.getFieldValue("id");
Map<String, List<String>> highlights = response.getHighlighting().get(id);
if (highlights != null) {
List<String> allHighlights = highlights.values().stream()
.flatMap(List::stream)
.collect(Collectors.toList());
story.setHighlights(allHighlights);
}
}
stories.add(story);
}
// Build facets organized by field name
Map<String, List<FacetCountDto>> facetsMap = new HashMap<>();
if (response.getFacetFields() != null) {
response.getFacetFields().forEach(facetField -> {
String fieldName = facetField.getName();
List<FacetCountDto> fieldFacets = new ArrayList<>();
facetField.getValues().forEach(count -> {
fieldFacets.add(new FacetCountDto(count.getName(), (int) count.getCount()));
});
facetsMap.put(fieldName, fieldFacets);
});
}
SearchResultDto<StorySearchDto> result = new SearchResultDto<StorySearchDto>(stories, (int) results.getNumFound(),
0, stories.size(), "", 0, facetsMap);
return result;
}
private List<AuthorSearchDto> buildAuthorSearchResults(QueryResponse response) {
return response.getResults().stream()
.map(this::convertToAuthorSearchDto)
.collect(Collectors.toList());
}
private StorySearchDto convertToStorySearchDto(SolrDocument doc) {
StorySearchDto story = new StorySearchDto();
story.setId(UUID.fromString((String) doc.getFieldValue("id")));
story.setTitle((String) doc.getFieldValue("title"));
story.setDescription((String) doc.getFieldValue("description"));
story.setSourceUrl((String) doc.getFieldValue("sourceUrl"));
story.setCoverPath((String) doc.getFieldValue("coverPath"));
story.setWordCount((Integer) doc.getFieldValue("wordCount"));
story.setRating((Integer) doc.getFieldValue("rating"));
story.setVolume((Integer) doc.getFieldValue("volume"));
story.setIsRead((Boolean) doc.getFieldValue("isRead"));
story.setReadingPosition((Integer) doc.getFieldValue("readingPosition"));
// Handle dates
story.setLastReadAt(parseDateTimeFromSolr(doc.getFieldValue("lastReadAt")));
story.setCreatedAt(parseDateTimeFromSolr(doc.getFieldValue("createdAt")));
story.setUpdatedAt(parseDateTimeFromSolr(doc.getFieldValue("updatedAt")));
story.setDateAdded(parseDateTimeFromSolr(doc.getFieldValue("dateAdded")));
// Handle author
String authorIdStr = (String) doc.getFieldValue("authorId");
if (authorIdStr != null) {
story.setAuthorId(UUID.fromString(authorIdStr));
}
story.setAuthorName((String) doc.getFieldValue("authorName"));
// Handle series
String seriesIdStr = (String) doc.getFieldValue("seriesId");
if (seriesIdStr != null) {
story.setSeriesId(UUID.fromString(seriesIdStr));
}
story.setSeriesName((String) doc.getFieldValue("seriesName"));
// Handle tags
Collection<Object> tagValues = doc.getFieldValues("tagNames");
if (tagValues != null) {
List<String> tagNames = tagValues.stream()
.map(Object::toString)
.collect(Collectors.toList());
story.setTagNames(tagNames);
}
return story;
}
private AuthorSearchDto convertToAuthorSearchDto(SolrDocument doc) {
AuthorSearchDto author = new AuthorSearchDto();
author.setId(UUID.fromString((String) doc.getFieldValue("id")));
author.setName((String) doc.getFieldValue("name"));
author.setNotes((String) doc.getFieldValue("notes"));
author.setAuthorRating((Integer) doc.getFieldValue("authorRating"));
author.setAvatarImagePath((String) doc.getFieldValue("avatarImagePath"));
author.setStoryCount((Integer) doc.getFieldValue("storyCount"));
Double avgRating = (Double) doc.getFieldValue("averageStoryRating");
if (avgRating != null) {
author.setAverageStoryRating(avgRating);
}
// Handle URLs
Collection<Object> urlValues = doc.getFieldValues("urls");
if (urlValues != null) {
List<String> urls = urlValues.stream()
.map(Object::toString)
.collect(Collectors.toList());
author.setUrls(urls);
}
// Handle dates
author.setCreatedAt(parseDateTimeFromSolr(doc.getFieldValue("createdAt")));
author.setUpdatedAt(parseDateTimeFromSolr(doc.getFieldValue("updatedAt")));
return author;
}
private LocalDateTime parseDateTime(String dateStr) {
if (dateStr == null || dateStr.isEmpty()) {
return null;
}
try {
// Remove 'Z' suffix if present and parse
String cleanDate = dateStr.endsWith("Z") ? dateStr.substring(0, dateStr.length() - 1) : dateStr;
return LocalDateTime.parse(cleanDate, DateTimeFormatter.ISO_LOCAL_DATE_TIME);
} catch (Exception e) {
logger.warn("Failed to parse date: {}", dateStr, e);
return null;
}
}
private LocalDateTime parseDateTimeFromSolr(Object dateValue) {
if (dateValue == null) {
return null;
}
if (dateValue instanceof Date) {
// Convert java.util.Date to LocalDateTime
return ((Date) dateValue).toInstant()
.atZone(java.time.ZoneId.systemDefault())
.toLocalDateTime();
} else if (dateValue instanceof String) {
return parseDateTime((String) dateValue);
} else {
logger.warn("Unexpected date type: {}", dateValue.getClass());
return null;
}
}
public List<StorySearchDto> getRandomStories(int count, List<String> tags, String author,
String series, Integer minWordCount, Integer maxWordCount,
Float minRating, Boolean isRead, Boolean isFavorite,
Long seed) {
if (!isAvailable()) {
return Collections.emptyList();
}
try {
SolrQuery solrQuery = new SolrQuery("*:*");
// Apply filters
applySearchFilters(solrQuery, tags, author, series, minWordCount, maxWordCount,
minRating, isRead, isFavorite, null, null, null, null, null, null,
null, null, null, null, null, null, null);
solrQuery.setRows(count);
// Use random sorting
if (seed != null) {
solrQuery.setSort("random_" + seed, SolrQuery.ORDER.asc);
} else {
solrQuery.setSort("random_" + System.currentTimeMillis(), SolrQuery.ORDER.asc);
}
QueryResponse response = solrClient.query(properties.getCores().getStories(), solrQuery);
return response.getResults().stream()
.map(this::convertToStorySearchDto)
.collect(Collectors.toList());
} catch (Exception e) {
logger.error("Random stories retrieval failed", e);
return Collections.emptyList();
}
}
// ===============================
// FILTER APPLICATION
// ===============================
private void applySearchFilters(SolrQuery solrQuery, List<String> tags, String author,
String series, Integer minWordCount, Integer maxWordCount,
Float minRating, Boolean isRead, Boolean isFavorite,
String createdAfter, String createdBefore,
String lastReadAfter, String lastReadBefore,
Boolean unratedOnly, String readingStatus,
Boolean hasReadingProgress, Boolean hasCoverImage,
String sourceDomain, String seriesFilter,
Integer minTagCount, Boolean popularOnly,
Boolean hiddenGemsOnly) {
List<String> filters = new ArrayList<>();
// Tag filters - use facet field for exact matching
if (tags != null && !tags.isEmpty()) {
String tagFilter = tags.stream()
.map(tag -> "tagNames_facet:\"" + escapeQueryChars(tag) + "\"")
.collect(Collectors.joining(" AND "));
filters.add("(" + tagFilter + ")");
}
// Author filter - use facet field for exact matching
if (author != null && !author.trim().isEmpty()) {
filters.add("authorName_facet:\"" + escapeQueryChars(author.trim()) + "\"");
}
// Series filter - use facet field for exact matching
if (series != null && !series.trim().isEmpty()) {
filters.add("seriesName_facet:\"" + escapeQueryChars(series.trim()) + "\"");
}
// Word count filters
if (minWordCount != null && maxWordCount != null) {
filters.add("wordCount:[" + minWordCount + " TO " + maxWordCount + "]");
} else if (minWordCount != null) {
filters.add("wordCount:[" + minWordCount + " TO *]");
} else if (maxWordCount != null) {
filters.add("wordCount:[* TO " + maxWordCount + "]");
}
// Rating filter
if (minRating != null) {
filters.add("rating:[" + minRating.intValue() + " TO *]");
}
// Read status filter
if (isRead != null) {
filters.add("isRead:" + isRead);
}
// Date filters - convert to ISO format for Solr
if (createdAfter != null) {
filters.add("createdAt:[" + formatDateForSolr(createdAfter) + " TO *]");
}
if (createdBefore != null) {
filters.add("createdAt:[* TO " + formatDateForSolr(createdBefore) + "]");
}
// Last read date filters
if (lastReadAfter != null) {
filters.add("lastReadAt:[" + formatDateForSolr(lastReadAfter) + " TO *]");
}
if (lastReadBefore != null) {
filters.add("lastReadAt:[* TO " + formatDateForSolr(lastReadBefore) + "]");
}
// Unrated filter
if (unratedOnly != null && unratedOnly) {
filters.add("(-rating:[1 TO *] OR rating:0)");
}
// Cover image filter
if (hasCoverImage != null) {
if (hasCoverImage) {
filters.add("coverPath:[\"\" TO *]"); // Has non-empty value
} else {
filters.add("-coverPath:[\"\" TO *]"); // No value or empty
}
}
// Reading status filter
if (readingStatus != null && !readingStatus.equals("all")) {
switch (readingStatus) {
case "unread":
// Unread: isRead is false AND readingPosition is 0 (or null)
filters.add("isRead:false AND (readingPosition:0 OR (*:* -readingPosition:[1 TO *]))");
break;
case "started":
// Started: has reading progress but not finished
filters.add("readingPosition:[1 TO *] AND isRead:false");
break;
case "completed":
// Completed: isRead is true
filters.add("isRead:true");
break;
}
}
// Reading progress filter
if (hasReadingProgress != null) {
if (hasReadingProgress) {
filters.add("readingPosition:[1 TO *]"); // Has reading progress
} else {
filters.add("(readingPosition:0 OR (*:* -readingPosition:[1 TO *]))"); // No reading progress
}
}
// Series filter
if (seriesFilter != null && !seriesFilter.equals("all")) {
switch (seriesFilter) {
case "standalone":
// Standalone: no series (seriesId is null or empty)
filters.add("(*:* -seriesId:[\"\" TO *])");
break;
case "series":
// Part of series: has seriesId
filters.add("seriesId:[\"\" TO *]");
break;
case "firstInSeries":
// First in series: has seriesId and volume is 1
filters.add("seriesId:[\"\" TO *] AND volume:1");
break;
case "lastInSeries":
// This would require complex logic to determine last volume per series
// For now, just filter by having a series (can be enhanced later)
filters.add("seriesId:[\"\" TO *]");
break;
}
}
// Source domain filter
if (sourceDomain != null && !sourceDomain.trim().isEmpty()) {
// Extract domain from sourceUrl field
filters.add("sourceUrl:*" + escapeQueryChars(sourceDomain.trim()) + "*");
}
// Apply all filters
for (String filter : filters) {
solrQuery.addFilterQuery(filter);
}
}
public void recreateCores() throws IOException {
logger.warn("Solr core recreation not supported through API - cores must be managed via Solr admin");
// Note: Core recreation in Solr requires admin API calls that are typically done
// through the Solr admin interface or by restarting with fresh cores
}
public void recreateIndices() throws IOException {
recreateCores();
}
/**
* Escape special characters in Solr query strings
*/
private String escapeQueryChars(String s) {
if (s == null) return null;
StringBuilder sb = new StringBuilder();
for (int i = 0; i < s.length(); i++) {
char c = s.charAt(i);
// These characters are part of the query syntax and must be escaped
if (c == '\\' || c == '+' || c == '-' || c == '!' || c == '(' || c == ')' || c == ':'
|| c == '^' || c == '[' || c == ']' || c == '\"' || c == '{' || c == '}' || c == '~'
|| c == '*' || c == '?' || c == '|' || c == '&' || c == ';' || c == '/'
|| Character.isWhitespace(c)) {
sb.append('\\');
}
sb.append(c);
}
return sb.toString();
}
/**
* Format date string for Solr queries
*/
private String formatDateForSolr(String dateStr) {
if (dateStr == null || dateStr.isEmpty()) {
return dateStr;
}
try {
// If it's already in ISO format, return as-is
if (dateStr.contains("T") || dateStr.endsWith("Z")) {
return dateStr;
}
// Convert date string like "2025-08-23" to ISO format "2025-08-23T00:00:00Z"
if (dateStr.matches("\\d{4}-\\d{2}-\\d{2}")) {
return dateStr + "T00:00:00Z";
}
return dateStr;
} catch (Exception e) {
logger.warn("Failed to format date for Solr: {}", dateStr, e);
return dateStr;
}
}
}

View File

@@ -39,54 +39,46 @@ storycove:
auth:
password: ${APP_PASSWORD} # REQUIRED: No default password for security
search:
engine: opensearch # OpenSearch is the only search engine
opensearch:
engine: solr # Apache Solr search engine
solr:
# Connection settings
host: ${OPENSEARCH_HOST:localhost}
port: ${OPENSEARCH_PORT:9200}
scheme: ${OPENSEARCH_SCHEME:http}
username: ${OPENSEARCH_USERNAME:}
password: ${OPENSEARCH_PASSWORD:} # Empty when security is disabled
url: ${SOLR_URL:http://solr:8983/solr}
username: ${SOLR_USERNAME:}
password: ${SOLR_PASSWORD:}
# Environment-specific configuration
profile: ${SPRING_PROFILES_ACTIVE:development} # development, staging, production
# Core configuration
cores:
stories: ${SOLR_STORIES_CORE:storycove_stories}
authors: ${SOLR_AUTHORS_CORE:storycove_authors}
# Security settings
security:
ssl-verification: ${OPENSEARCH_SSL_VERIFICATION:false}
trust-all-certificates: ${OPENSEARCH_TRUST_ALL_CERTS:true}
keystore-path: ${OPENSEARCH_KEYSTORE_PATH:}
keystore-password: ${OPENSEARCH_KEYSTORE_PASSWORD:}
truststore-path: ${OPENSEARCH_TRUSTSTORE_PATH:}
truststore-password: ${OPENSEARCH_TRUSTSTORE_PASSWORD:}
# Connection pool settings
# Connection settings
connection:
timeout: ${OPENSEARCH_CONNECTION_TIMEOUT:30000} # 30 seconds
socket-timeout: ${OPENSEARCH_SOCKET_TIMEOUT:60000} # 60 seconds
max-connections-per-route: ${OPENSEARCH_MAX_CONN_PER_ROUTE:10}
max-connections-total: ${OPENSEARCH_MAX_CONN_TOTAL:30}
retry-on-failure: ${OPENSEARCH_RETRY_ON_FAILURE:true}
max-retries: ${OPENSEARCH_MAX_RETRIES:3}
timeout: ${SOLR_CONNECTION_TIMEOUT:30000} # 30 seconds
socket-timeout: ${SOLR_SOCKET_TIMEOUT:60000} # 60 seconds
max-connections-per-route: ${SOLR_MAX_CONN_PER_ROUTE:10}
max-connections-total: ${SOLR_MAX_CONN_TOTAL:30}
retry-on-failure: ${SOLR_RETRY_ON_FAILURE:true}
max-retries: ${SOLR_MAX_RETRIES:3}
# Index settings
indices:
default-shards: ${OPENSEARCH_DEFAULT_SHARDS:1}
default-replicas: ${OPENSEARCH_DEFAULT_REPLICAS:0}
refresh-interval: ${OPENSEARCH_REFRESH_INTERVAL:1s}
# Query settings
query:
default-rows: ${SOLR_DEFAULT_ROWS:10}
max-rows: ${SOLR_MAX_ROWS:1000}
default-operator: ${SOLR_DEFAULT_OPERATOR:AND}
highlight: ${SOLR_ENABLE_HIGHLIGHT:true}
facets: ${SOLR_ENABLE_FACETS:true}
# Bulk operations
bulk:
actions: ${OPENSEARCH_BULK_ACTIONS:1000}
size: ${OPENSEARCH_BULK_SIZE:5242880} # 5MB
timeout: ${OPENSEARCH_BULK_TIMEOUT:10000} # 10 seconds
concurrent-requests: ${OPENSEARCH_BULK_CONCURRENT:1}
# Commit settings
commit:
soft-commit: ${SOLR_SOFT_COMMIT:true}
commit-within: ${SOLR_COMMIT_WITHIN:1000} # 1 second
wait-searcher: ${SOLR_WAIT_SEARCHER:false}
# Health and monitoring
health:
check-interval: ${OPENSEARCH_HEALTH_CHECK_INTERVAL:30000} # 30 seconds
slow-query-threshold: ${OPENSEARCH_SLOW_QUERY_THRESHOLD:5000} # 5 seconds
enable-metrics: ${OPENSEARCH_ENABLE_METRICS:true}
check-interval: ${SOLR_HEALTH_CHECK_INTERVAL:30000} # 30 seconds
slow-query-threshold: ${SOLR_SLOW_QUERY_THRESHOLD:5000} # 5 seconds
enable-metrics: ${SOLR_ENABLE_METRICS:true}
images:
storage-path: ${IMAGE_STORAGE_PATH:/app/images}
@@ -100,8 +92,8 @@ management:
show-details: when-authorized
show-components: always
health:
opensearch:
enabled: ${OPENSEARCH_HEALTH_ENABLED:true}
solr:
enabled: ${SOLR_HEALTH_ENABLED:true}
logging:
level:

View File

@@ -1,178 +0,0 @@
# OpenSearch Configuration - Best Practices Implementation
## Overview
This directory contains a production-ready OpenSearch configuration following industry best practices for security, scalability, and maintainability.
## Architecture
### 📁 Directory Structure
```
opensearch/
├── config/
│ ├── opensearch-development.yml # Development-specific settings
│ └── opensearch-production.yml # Production-specific settings
├── mappings/
│ ├── stories-mapping.json # Story index mapping
│ ├── authors-mapping.json # Author index mapping
│ └── collections-mapping.json # Collection index mapping
├── templates/
│ ├── stories-template.json # Index template for stories_*
│ └── index-lifecycle-policy.json # ILM policy for index management
└── README.md # This file
```
## ✅ Best Practices Implemented
### 🔒 **Security**
- **Environment-Aware SSL Configuration**
- Production: Full certificate validation with custom truststore support
- Development: Optional certificate validation for local development
- **Proper Authentication**: Basic auth with secure credential management
- **Connection Security**: TLS 1.3 support with hostname verification
### 🏗️ **Configuration Management**
- **Externalized Configuration**: JSON/YAML files instead of hardcoded values
- **Environment-Specific Settings**: Different configs for dev/staging/prod
- **Type-Safe Properties**: Strongly-typed configuration classes
- **Validation**: Configuration validation at startup
### 📈 **Scalability & Performance**
- **Connection Pooling**: Configurable connection pool with timeout management
- **Environment-Aware Sharding**:
- Development: 1 shard, 0 replicas (single node)
- Production: 3 shards, 1 replica (high availability)
- **Bulk Operations**: Optimized bulk indexing with configurable batch sizes
- **Index Templates**: Automatic application of settings to new indexes
### 🔄 **Index Lifecycle Management**
- **Automated Index Rollover**: Based on size, document count, and age
- **Hot-Warm-Cold Architecture**: Optimized storage costs
- **Retention Policies**: Automatic cleanup of old data
- **Force Merge**: Optimization in warm phase
### 📊 **Monitoring & Observability**
- **Health Checks**: Automatic cluster health monitoring
- **Spring Boot Actuator**: Health endpoints for monitoring systems
- **Metrics Collection**: Configurable performance metrics
- **Slow Query Detection**: Configurable thresholds for query performance
### 🛡️ **Error Handling & Resilience**
- **Connection Retry Logic**: Automatic retry with backoff
- **Circuit Breaker Pattern**: Fail-fast for unhealthy clusters
- **Graceful Degradation**: Graceful handling when OpenSearch unavailable
- **Detailed Error Logging**: Comprehensive error tracking
## 🚀 Usage
### Development Environment
```yaml
# application-development.yml
storycove:
opensearch:
profile: development
security:
ssl-verification: false
trust-all-certificates: true
indices:
default-shards: 1
default-replicas: 0
```
### Production Environment
```yaml
# application-production.yml
storycove:
opensearch:
profile: production
security:
ssl-verification: true
trust-all-certificates: false
truststore-path: /etc/ssl/opensearch-truststore.jks
indices:
default-shards: 3
default-replicas: 1
```
## 📋 Environment Variables
### Required
- `OPENSEARCH_PASSWORD`: Admin password for OpenSearch cluster
### Optional (with sensible defaults)
- `OPENSEARCH_HOST`: Cluster hostname (default: localhost)
- `OPENSEARCH_PORT`: Cluster port (default: 9200)
- `OPENSEARCH_USERNAME`: Admin username (default: admin)
- `OPENSEARCH_SSL_VERIFICATION`: Enable SSL verification (default: false for dev)
- `OPENSEARCH_MAX_CONN_TOTAL`: Max connections (default: 30 for dev, 200 for prod)
## 🎯 Index Templates
Index templates automatically apply configuration to new indexes:
```json
{
"index_patterns": ["stories_*"],
"template": {
"settings": {
"number_of_shards": "#{ENV_SPECIFIC}",
"analysis": {
"analyzer": {
"story_analyzer": {
"type": "standard",
"stopwords": "_english_"
}
}
}
}
}
}
```
## 🔍 Health Monitoring
Access health information:
- **Application Health**: `/actuator/health`
- **OpenSearch Specific**: `/actuator/health/opensearch`
- **Detailed Metrics**: Available when `enable-metrics: true`
## 🔄 Deployment Strategy
Recommended deployment approach:
1. **Development**: Test OpenSearch configuration locally
2. **Staging**: Validate performance and accuracy in staging environment
3. **Production**: Deploy with proper monitoring and backup procedures
## 🛠️ Troubleshooting
### Common Issues
1. **SSL Certificate Errors**
- Development: Set `trust-all-certificates: true`
- Production: Provide valid truststore path
2. **Connection Timeouts**
- Increase `connection.timeout` values
- Check network connectivity and firewall rules
3. **Index Creation Failures**
- Verify cluster health with `/actuator/health/opensearch`
- Check OpenSearch logs for detailed error messages
4. **Performance Issues**
- Monitor slow queries with configurable thresholds
- Adjust bulk operation settings
- Review shard allocation and replica settings
## 🔮 Future Enhancements
- **Multi-Cluster Support**: Connect to multiple OpenSearch clusters
- **Advanced Security**: Integration with OpenSearch Security plugin
- **Custom Analyzers**: Domain-specific text analysis
- **Index Aliases**: Zero-downtime index updates
- **Machine Learning**: Integration with OpenSearch ML features
---
This configuration provides a solid foundation that scales from development to enterprise production environments while maintaining security, performance, and operational excellence.

View File

@@ -1,32 +0,0 @@
# OpenSearch Development Configuration
opensearch:
cluster:
name: "storycove-dev"
initial_master_nodes: ["opensearch-node"]
# Development settings - single node, minimal resources
indices:
default_settings:
number_of_shards: 1
number_of_replicas: 0
refresh_interval: "1s"
# Security settings for development
security:
ssl_verification: false
trust_all_certificates: true
# Connection settings
connection:
timeout: "30s"
socket_timeout: "60s"
max_connections_per_route: 10
max_connections_total: 30
# Index management
index_management:
auto_create_templates: true
template_patterns:
stories: "stories_*"
authors: "authors_*"
collections: "collections_*"

View File

@@ -1,60 +0,0 @@
# OpenSearch Production Configuration
opensearch:
cluster:
name: "storycove-prod"
# Production settings - multi-shard, with replicas
indices:
default_settings:
number_of_shards: 3
number_of_replicas: 1
refresh_interval: "30s"
max_result_window: 50000
# Index lifecycle policies
lifecycle:
hot_phase_duration: "7d"
warm_phase_duration: "30d"
cold_phase_duration: "90d"
delete_after: "1y"
# Security settings for production
security:
ssl_verification: true
trust_all_certificates: false
certificate_verification: true
tls_version: "TLSv1.3"
# Connection settings
connection:
timeout: "10s"
socket_timeout: "30s"
max_connections_per_route: 50
max_connections_total: 200
retry_on_failure: true
max_retries: 3
retry_delay: "1s"
# Performance tuning
performance:
bulk_actions: 1000
bulk_size: "5MB"
bulk_timeout: "10s"
concurrent_requests: 4
# Monitoring and observability
monitoring:
health_check_interval: "30s"
slow_query_threshold: "5s"
enable_metrics: true
# Index management
index_management:
auto_create_templates: true
template_patterns:
stories: "stories_*"
authors: "authors_*"
collections: "collections_*"
retention_policy:
enabled: true
default_retention: "1y"

View File

@@ -1,79 +0,0 @@
{
"settings": {
"number_of_shards": 1,
"number_of_replicas": 0,
"analysis": {
"analyzer": {
"name_analyzer": {
"type": "standard",
"stopwords": "_english_"
},
"autocomplete_analyzer": {
"type": "custom",
"tokenizer": "standard",
"filter": ["lowercase", "edge_ngram"]
}
},
"filter": {
"edge_ngram": {
"type": "edge_ngram",
"min_gram": 2,
"max_gram": 20
}
}
}
},
"mappings": {
"properties": {
"id": {
"type": "keyword"
},
"name": {
"type": "text",
"analyzer": "name_analyzer",
"fields": {
"autocomplete": {
"type": "text",
"analyzer": "autocomplete_analyzer"
},
"keyword": {
"type": "keyword"
}
}
},
"bio": {
"type": "text",
"analyzer": "name_analyzer"
},
"urls": {
"type": "keyword"
},
"imageUrl": {
"type": "keyword"
},
"storyCount": {
"type": "integer"
},
"averageRating": {
"type": "float"
},
"totalWordCount": {
"type": "long"
},
"totalReadingTime": {
"type": "integer"
},
"createdAt": {
"type": "date",
"format": "strict_date_optional_time||epoch_millis"
},
"updatedAt": {
"type": "date",
"format": "strict_date_optional_time||epoch_millis"
},
"libraryId": {
"type": "keyword"
}
}
}
}

View File

@@ -1,73 +0,0 @@
{
"settings": {
"number_of_shards": 1,
"number_of_replicas": 0,
"analysis": {
"analyzer": {
"collection_analyzer": {
"type": "standard",
"stopwords": "_english_"
},
"autocomplete_analyzer": {
"type": "custom",
"tokenizer": "standard",
"filter": ["lowercase", "edge_ngram"]
}
},
"filter": {
"edge_ngram": {
"type": "edge_ngram",
"min_gram": 2,
"max_gram": 20
}
}
}
},
"mappings": {
"properties": {
"id": {
"type": "keyword"
},
"name": {
"type": "text",
"analyzer": "collection_analyzer",
"fields": {
"autocomplete": {
"type": "text",
"analyzer": "autocomplete_analyzer"
},
"keyword": {
"type": "keyword"
}
}
},
"description": {
"type": "text",
"analyzer": "collection_analyzer"
},
"storyCount": {
"type": "integer"
},
"totalWordCount": {
"type": "long"
},
"averageRating": {
"type": "float"
},
"isPublic": {
"type": "boolean"
},
"createdAt": {
"type": "date",
"format": "strict_date_optional_time||epoch_millis"
},
"updatedAt": {
"type": "date",
"format": "strict_date_optional_time||epoch_millis"
},
"libraryId": {
"type": "keyword"
}
}
}
}

View File

@@ -1,120 +0,0 @@
{
"settings": {
"number_of_shards": 1,
"number_of_replicas": 0,
"analysis": {
"analyzer": {
"story_analyzer": {
"type": "standard",
"stopwords": "_english_"
},
"autocomplete_analyzer": {
"type": "custom",
"tokenizer": "standard",
"filter": ["lowercase", "edge_ngram"]
}
},
"filter": {
"edge_ngram": {
"type": "edge_ngram",
"min_gram": 2,
"max_gram": 20
}
}
}
},
"mappings": {
"properties": {
"id": {
"type": "keyword"
},
"title": {
"type": "text",
"analyzer": "story_analyzer",
"fields": {
"autocomplete": {
"type": "text",
"analyzer": "autocomplete_analyzer"
},
"keyword": {
"type": "keyword"
}
}
},
"content": {
"type": "text",
"analyzer": "story_analyzer"
},
"summary": {
"type": "text",
"analyzer": "story_analyzer"
},
"authorNames": {
"type": "text",
"analyzer": "story_analyzer",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"authorIds": {
"type": "keyword"
},
"tagNames": {
"type": "keyword"
},
"seriesTitle": {
"type": "text",
"analyzer": "story_analyzer",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"seriesId": {
"type": "keyword"
},
"wordCount": {
"type": "integer"
},
"rating": {
"type": "float"
},
"readingTime": {
"type": "integer"
},
"language": {
"type": "keyword"
},
"status": {
"type": "keyword"
},
"createdAt": {
"type": "date",
"format": "strict_date_optional_time||epoch_millis"
},
"updatedAt": {
"type": "date",
"format": "strict_date_optional_time||epoch_millis"
},
"publishedAt": {
"type": "date",
"format": "strict_date_optional_time||epoch_millis"
},
"isRead": {
"type": "boolean"
},
"isFavorite": {
"type": "boolean"
},
"readingProgress": {
"type": "float"
},
"libraryId": {
"type": "keyword"
}
}
}
}

View File

@@ -1,77 +0,0 @@
{
"policy": {
"description": "StoryCove index lifecycle policy",
"default_state": "hot",
"states": [
{
"name": "hot",
"actions": [
{
"rollover": {
"min_size": "50gb",
"min_doc_count": 1000000,
"min_age": "7d"
}
}
],
"transitions": [
{
"state_name": "warm",
"conditions": {
"min_age": "7d"
}
}
]
},
{
"name": "warm",
"actions": [
{
"replica_count": {
"number_of_replicas": 0
}
},
{
"force_merge": {
"max_num_segments": 1
}
}
],
"transitions": [
{
"state_name": "cold",
"conditions": {
"min_age": "30d"
}
}
]
},
{
"name": "cold",
"actions": [],
"transitions": [
{
"state_name": "delete",
"conditions": {
"min_age": "365d"
}
}
]
},
{
"name": "delete",
"actions": [
{
"delete": {}
}
]
}
],
"ism_template": [
{
"index_patterns": ["stories_*", "authors_*", "collections_*"],
"priority": 100
}
]
}
}

View File

@@ -1,124 +0,0 @@
{
"index_patterns": ["stories_*"],
"priority": 1,
"template": {
"settings": {
"number_of_shards": 1,
"number_of_replicas": 0,
"analysis": {
"analyzer": {
"story_analyzer": {
"type": "standard",
"stopwords": "_english_"
},
"autocomplete_analyzer": {
"type": "custom",
"tokenizer": "standard",
"filter": ["lowercase", "edge_ngram"]
}
},
"filter": {
"edge_ngram": {
"type": "edge_ngram",
"min_gram": 2,
"max_gram": 20
}
}
}
},
"mappings": {
"properties": {
"id": {
"type": "keyword"
},
"title": {
"type": "text",
"analyzer": "story_analyzer",
"fields": {
"autocomplete": {
"type": "text",
"analyzer": "autocomplete_analyzer"
},
"keyword": {
"type": "keyword"
}
}
},
"content": {
"type": "text",
"analyzer": "story_analyzer"
},
"summary": {
"type": "text",
"analyzer": "story_analyzer"
},
"authorNames": {
"type": "text",
"analyzer": "story_analyzer",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"authorIds": {
"type": "keyword"
},
"tagNames": {
"type": "keyword"
},
"seriesTitle": {
"type": "text",
"analyzer": "story_analyzer",
"fields": {
"keyword": {
"type": "keyword"
}
}
},
"seriesId": {
"type": "keyword"
},
"wordCount": {
"type": "integer"
},
"rating": {
"type": "float"
},
"readingTime": {
"type": "integer"
},
"language": {
"type": "keyword"
},
"status": {
"type": "keyword"
},
"createdAt": {
"type": "date",
"format": "strict_date_optional_time||epoch_millis"
},
"updatedAt": {
"type": "date",
"format": "strict_date_optional_time||epoch_millis"
},
"publishedAt": {
"type": "date",
"format": "strict_date_optional_time||epoch_millis"
},
"isRead": {
"type": "boolean"
},
"isFavorite": {
"type": "boolean"
},
"readingProgress": {
"type": "float"
},
"libraryId": {
"type": "keyword"
}
}
}
}
}

View File

@@ -19,11 +19,14 @@ storycove:
auth:
password: test-password
search:
engine: opensearch
opensearch:
engine: solr
solr:
host: localhost
port: 9200
port: 8983
scheme: http
cores:
stories: storycove_stories
authors: storycove_authors
images:
storage-path: /tmp/test-images