diff --git a/.github/workflows/snyk.yml b/.github/workflows/snyk.yml index f6947db..7496d57 100644 --- a/.github/workflows/snyk.yml +++ b/.github/workflows/snyk.yml @@ -24,6 +24,6 @@ jobs: args: --sarif-file-output=snyk.sarif --include-provided-dependencies=false - name: Upload result to GitHub Code Scanning - uses: github/codeql-action/upload-sarif@v2 + uses: github/codeql-action/upload-sarif@v3 with: sarif_file: snyk.sarif diff --git a/README.md b/README.md index cf87a06..9b798ac 100644 --- a/README.md +++ b/README.md @@ -1,5 +1,8 @@ # Elasticsearch Appender +[v1]: https://github.com/Chavaillaz/elasticsearch-log4j-appender/wiki/Appender-1.x-‐-Log4j-1.x +[v2]: https://github.com/Chavaillaz/elasticsearch-log4j-appender/wiki/Appender-2.x-‐-Log4j-2.x + ![Quality Gate](https://github.com/chavaillaz/elasticsearch-log4j-appender/actions/workflows/sonarcloud.yml/badge.svg) ![Dependency Check](https://github.com/chavaillaz/elasticsearch-log4j-appender/actions/workflows/snyk.yml/badge.svg) [![Maven Central](https://maven-badges.herokuapp.com/maven-central/com.chavaillaz/elasticsearch-log4j-appender/badge.svg)](https://maven-badges.herokuapp.com/maven-central/com.chavaillaz/elasticsearch-log4j-appender) @@ -8,21 +11,21 @@ Elasticsearch appender allows you to send log events directly from Log4j to an elastic cluster. The delivery of logs is asynchronous (i.e. not on the main thread) and therefore will not block execution of the program. -| Appender version | Log4j version | Elasticsearch version | -|------------------|---------------|-----------------------| -| 1.0.0 | 1.2.17 | 7.x | -| 2.0.0 | 2.17.1 | 7.x | -| 2.1.0 | 2.17.2 | 8.x | -| 2.1.1 | 2.19.0 | 8.x | -| 2.1.2 | 2.20.0 | 8.x | -| 2.1.3 | 2.24.1 | 8.x | +| Appender version | Log4j version | Elasticsearch version | Documentation | +|------------------|---------------|-----------------------|-----------------| +| 1.0.0 | 1.2.17 | 7.x | [Version 1][v1] | +| 2.0.0 | 2.17.1 | 7.x | [Version 2][v2] | +| 2.1.0 | 2.17.2 | 8.x | [Version 2][v2] | +| 2.1.1 | 2.19.0 | 8.x | [Version 2][v2] | +| 2.1.2 | 2.20.0 | 8.x | [Version 2][v2] | +| 2.1.3 | 2.24.1 | 8.x | [Version 2][v2] | +| 3.0.0 | 2.24.1 | 8.x | See below | ## Installation The dependency is available in maven central (see badge and table above for the version): ```xml - com.chavaillaz elasticsearch-log4j-appender @@ -33,36 +36,27 @@ You then have to configure log4j in order to include this appender (see configur ## Configuration -In the configuration file, add a new appender `ElasticsearchAppender` (from package `com.chavaillaz.appender.log4j`) -with the following properties **(please note that for Log4j2 they all start with an uppercase letter)**: - -- `applicationName` (default `unknown`). It can also be specified as environment variable or system - property `APPLICATION`. -- `hostName` (default is the machine host name). It can also be specified as environment variable or system - property `HOST`. -- `environmentName` (default `local`). It can be specified also as environment variable or system property `ENV`. -- `elasticConverter` (default `com.chavaillaz.appender.log4j.converter.DefaultEventConverter`) is the class used to - convert a logging event into a key/value document to be stored in Elasticsearch. It can also be specified as - environment variable or system property `CONVERTER`. -- `elasticIndex` (default `ha`) and `elasticIndexSuffix` (default `-yyyy.MM.dd`) form together the index name where the - messages are sent to. Note that `elasticIndexSuffix` must contain a format pattern suitable for `DateTimeFormatter`. - They can also both be specified with environment variables or system properties `INDEX` and `INDEX_SUFFIX`. -- `elasticUrl` is the address of the server (or its load balancer) in the format `[scheme://][host]:[port]`. The scheme - is optional and defaults to `http`. -- `elasticApiKey` (encoded) or `elasticUser` + `elasticPassword` are the credentials for the server. -- `elasticParallelExecution` (default `true`) specifies the way the messages are sent to the server - (`true` send them in a separate thread and `false` send them sequentially). -- `elasticBatchSize` (default `1`) is the number of messages threshold triggering the sending to the server. -- `elasticBatchInitialDelay` (default `1000`) is the time in milliseconds before a first batch of messages is sent to - the server (after appender startup, even if the batch of messages is incomplete, meaning not reaching the threshold). -- `elasticBatchDelay` (default `1000`) is the time in milliseconds between two cleanups of incomplete batches. If after - this time there are less than `elasticBatchSize` messages waiting to be sent, the batch is sent nonetheless. Note that - once `elasticBatchSize` messages are waiting, the batch is sent immediately, without any delay. - -Note that `elasticUrl` is the only mandatory configuration to give, except if you need to overwrite the default value of -another ones. - -### XML file example (log4j version 2) +In the Log4j configuration file, add a new appender `ElasticsearchAppender` using package +`com.chavaillaz.appender.log4j.elastic` with the following properties: + +| Appender Property | Environment / System variable | Default value | Description | +|-------------------|-------------------------------|-----------------------------|-----------------------------------------------------------------------------------------------------------------------------------------| +| Application | APP | `unknown` | The name of the application generating the logs. | +| Host | HOST | Machine host name | The name of the host on which the application is running. | +| Environment | ENV | `local` | The name of the environment in which the application is running. | +| Converter | CONVERTER | `[...].DefaultLogConverter` | The path of the class used to convert logging events into key/value documents to be stored. | +| Index | INDEX | `ha` | The name of the Elasticsearch index to which the documents are sent. | +| IndexSuffix | INDEX_SUFFIX | - | The suffix added to the index name (using current date) in a format pattern suitable for `DateTimeFormatter`. | +| Url | ELASTIC_URL | - | The address of Elasticsearch in the format `scheme://host:port`. | +| User | ELASTIC_USER | - | The username to use as credentials to access Elasticsearch. | +| Password | ELASTIC_PASSWORD | - | The password to use as credentials to access Elasticsearch. | +| ApiKey | ELASTIC_API_KEY | - | The API key (already encoded) to use as credentials to access Elasticsearch. | +| FlushThreshold | - | `100` | The threshold number of messages triggering the transmission of documents to the server. | +| FlushInterval | - | `5000` | The time (ms) between two automatic flushes, which are triggering the transmission of logs, even if not reaching the defined threshold. | + +Note that `Url` is the only mandatory configuration, except if you need to overwrite the default value of another ones. + +## XML file example ```xml @@ -73,18 +67,16 @@ another ones. - myApplication - local - com.chavaillaz.appender.log4j.converter.DefaultEventConverter - http://localhost:9300 - ha - -yyyy.MM.dd - elastic - changeme - true - 10 - 1000 - 1000 + myApplication + local + com.chavaillaz.appender.log4j.DefaultLogConverter + ha + -yyyy.MM.dd + http://localhost:9300 + elastic + changeme + 100 + 5000 @@ -96,75 +88,6 @@ another ones. ``` -### XML file example (log4j version 1) - -```xml - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -``` - -### Property file example (log4j version 1) - -``` -log4j.appender.ELASTIC=com.chavaillaz.appender.log4j.ElasticsearchAppender -log4j.appender.ELASTIC.applicationName=myApplication -log4j.appender.ELASTIC.environmentName=myEnvironment -log4j.appender.ELASTIC.elasticConverter=com.package.CustomEventConverter -log4j.appender.ELASTIC.elasticUrl=myElasticsearchUrl -log4j.appender.ELASTIC.elasticIndex=myIndex -log4j.appender.ELASTIC.elasticIndexSuffix=-yyyy.MM.dd -log4j.appender.ELASTIC.elasticUser=myUser -log4j.appender.ELASTIC.elasticPassword=myPassword -log4j.appender.ELASTIC.elasticParallelExecution=true -log4j.appender.ELASTIC.elasticBatchSize=10 -log4j.appender.ELASTIC.elasticBatchInitialDelay=1000 -log4j.appender.ELASTIC.elasticBatchDelay=1000 - -# Avoid to propagate the logs from ElasticAppender to itself (cyclic) -log4j.logger.com.chavaillaz.appender=WARN, CONSOLE -log4j.logger.org.apache.http=WARN, CONSOLE -log4j.logger.org.elasticsearch.client=WARN, CONSOLE -log4j.additivity.com.chavaillaz.appender=false -log4j.additivity.org.apache.http=false -log4j.additivity.org.elasticsearch.client=false -``` - ## Contributing If you have a feature request or found a bug, you can: diff --git a/pom.xml b/pom.xml index f3fdc41..b981c25 100644 --- a/pom.xml +++ b/pom.xml @@ -6,7 +6,7 @@ com.chavaillaz elasticsearch-log4j-appender - 2.1.0-SNAPSHOT + 3.0.0-SNAPSHOT jar ${project.groupId}.${project.artifactId} diff --git a/src/main/java/com/chavaillaz/appender/CommonUtils.java b/src/main/java/com/chavaillaz/appender/CommonUtils.java new file mode 100644 index 0000000..adab801 --- /dev/null +++ b/src/main/java/com/chavaillaz/appender/CommonUtils.java @@ -0,0 +1,55 @@ +package com.chavaillaz.appender; + +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.Optional; + +import javax.net.ssl.SSLContext; +import lombok.SneakyThrows; +import lombok.experimental.UtilityClass; +import org.apache.http.conn.ssl.TrustAllStrategy; +import org.apache.http.ssl.SSLContextBuilder; + +/** + * Common utility methods. + */ +@UtilityClass +public class CommonUtils { + + /** + * Searches an environment or system property value. + * + * @param key The key to search in the properties + * @param defaultValue The default value to use in case the given key is not found + * @return The value found in environment/system properties or the given default value + */ + public static String getProperty(String key, String defaultValue) { + return Optional.ofNullable(System.getenv(key)).orElse(System.getProperty(key, defaultValue)); + } + + /** + * Creates a permissive SSL context trusting everything. + * + * @return The SSL context + */ + @SneakyThrows + public static SSLContext createSSLContext() { + return new SSLContextBuilder() + .loadTrustMaterial(null, TrustAllStrategy.INSTANCE) + .build(); + } + + /** + * Gets the current host name of the machine. + * + * @return The host name or {@code localhost} if it cannot be determined + */ + public static String getInitialHostname() { + try { + return InetAddress.getLocalHost().getCanonicalHostName(); + } catch (UnknownHostException e) { + return "localhost"; + } + } + +} diff --git a/src/main/java/com/chavaillaz/appender/LogConfiguration.java b/src/main/java/com/chavaillaz/appender/LogConfiguration.java new file mode 100644 index 0000000..513e369 --- /dev/null +++ b/src/main/java/com/chavaillaz/appender/LogConfiguration.java @@ -0,0 +1,82 @@ +package com.chavaillaz.appender; + +import java.time.Duration; + +/** + * Configuration for logs transmission. + */ +public interface LogConfiguration { + + /** + * Gets the name of the application generating the logs. + * + * @return The application name + */ + String getApplication(); + + /** + * Sets the name of the application generating the logs. + * + * @param application The application name + */ + void setApplication(String application); + + /** + * Gets the name of the host on which the application is running. + * + * @return The host name + */ + String getHost(); + + /** + * Sets the name of the host on which the application is running. + * + * @param host The host name + */ + void setHost(String host); + + /** + * Sets the environment in which the application is running. + * + * @return The environment name + */ + String getEnvironment(); + + /** + * Sets the environment in which the application is running. + * + * @param environment The environment name + */ + void setEnvironment(String environment); + + /** + * Gets the threshold number of messages triggering the transmission of the logs. + * + * @return The threshold number of messages + */ + long getFlushThreshold(); + + /** + * Sets the threshold number of messages triggering the transmission of the logs. + * + * @param messageNumber The threshold number of messages + */ + void setFlushThreshold(long messageNumber); + + /** + * Gets the maximum time interval between two flushes. + * After each interval, the transmission of logs is triggered, even if not reaching the threshold number of messages. + * + * @return The maximum interval between two flushes + */ + Duration getFlushInterval(); + + /** + * Gets the maximum time interval between two flushes. + * After each interval, the transmission of logs is triggered, even if not reaching the threshold number of messages. + * + * @param flushInterval The maximum interval between two flushes + */ + void setFlushInterval(Duration flushInterval); + +} diff --git a/src/main/java/com/chavaillaz/appender/LogDelivery.java b/src/main/java/com/chavaillaz/appender/LogDelivery.java new file mode 100644 index 0000000..5026a68 --- /dev/null +++ b/src/main/java/com/chavaillaz/appender/LogDelivery.java @@ -0,0 +1,31 @@ +package com.chavaillaz.appender; + +import java.util.List; +import java.util.Map; + +/** + * Transmission of logs to an external monitoring system. + */ +public interface LogDelivery extends AutoCloseable { + + /** + * Sends the given record data. + * + * @param document The record data + */ + void send(Map document); + + /** + * Sends the given list of record data. + * + * @param documents The list of record data + */ + void send(List> documents); + + /** + * Sends the pending records (generating a partially filled batch). + * Intended to be called periodically to clean out pending messages. + */ + void flush(); + +} diff --git a/src/main/java/com/chavaillaz/appender/log4j/AbstractLogDeliveryAppender.java b/src/main/java/com/chavaillaz/appender/log4j/AbstractLogDeliveryAppender.java new file mode 100644 index 0000000..106f1de --- /dev/null +++ b/src/main/java/com/chavaillaz/appender/log4j/AbstractLogDeliveryAppender.java @@ -0,0 +1,94 @@ +package com.chavaillaz.appender.log4j; + +import static java.lang.Thread.currentThread; +import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor; +import static java.util.concurrent.TimeUnit.MILLISECONDS; + +import java.util.concurrent.ScheduledExecutorService; +import java.util.concurrent.TimeUnit; + +import com.chavaillaz.appender.LogConfiguration; +import com.chavaillaz.appender.LogDelivery; +import lombok.Getter; +import org.apache.logging.log4j.core.Filter; +import org.apache.logging.log4j.core.Layout; +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.appender.AbstractAppender; +import org.apache.logging.log4j.core.config.Property; + +/** + * Abstract appender with basic implementation for transmissions of logs from Log4j2. + * + * @param The configuration type + */ +public abstract class AbstractLogDeliveryAppender extends AbstractAppender { + + private final ScheduledExecutorService threadPool = newSingleThreadScheduledExecutor(); + + @Getter + private final C logConfiguration; + + @Getter + private LogDelivery logDeliveryHandler; + + protected AbstractLogDeliveryAppender(String name, Filter filter, Layout layout, C configuration) { + super(name, filter, layout, true, Property.EMPTY_ARRAY); + this.logConfiguration = configuration; + } + + @Override + public void start() { + try { + logDeliveryHandler = createDeliveryHandler(); + } catch (Exception e) { + error("Client configuration error", e); + } + + if (logConfiguration.getFlushThreshold() > 1) { + threadPool.scheduleWithFixedDelay( + logDeliveryHandler::flush, + logConfiguration.getFlushInterval().toMillis(), + logConfiguration.getFlushInterval().toMillis(), + MILLISECONDS); + } + + super.start(); + } + + /** + * Creates the client for logs transmission. + */ + public abstract LogDelivery createDeliveryHandler(); + + @Override + public void append(LogEvent loggingEvent) { + threadPool.submit(createDeliveryTask(loggingEvent)); + } + + /** + * Creates a runnable in order to transmit the given log. + * + * @param loggingEvent The logging event to send + */ + public abstract Runnable createDeliveryTask(LogEvent loggingEvent); + + @Override + public boolean stop(long timeout, TimeUnit timeUnit) { + try { + threadPool.shutdown(); + threadPool.awaitTermination(timeout, timeUnit); + } catch (InterruptedException e) { + error("Thread interrupted during termination", e); + currentThread().interrupt(); + } finally { + try { + logDeliveryHandler.close(); + } catch (Exception e) { + error("Log delivery closing error", e); + } + super.stop(timeout, timeUnit); + } + return true; + } + +} diff --git a/src/main/java/com/chavaillaz/appender/log4j/converter/DefaultEventConverter.java b/src/main/java/com/chavaillaz/appender/log4j/DefaultLogConverter.java similarity index 75% rename from src/main/java/com/chavaillaz/appender/log4j/converter/DefaultEventConverter.java rename to src/main/java/com/chavaillaz/appender/log4j/DefaultLogConverter.java index 2170bb2..91fcc15 100644 --- a/src/main/java/com/chavaillaz/appender/log4j/converter/DefaultEventConverter.java +++ b/src/main/java/com/chavaillaz/appender/log4j/DefaultLogConverter.java @@ -1,4 +1,4 @@ -package com.chavaillaz.appender.log4j.converter; +package com.chavaillaz.appender.log4j; import java.io.PrintWriter; import java.io.StringWriter; @@ -8,13 +8,13 @@ import java.util.Map; import java.util.Optional; -import com.chavaillaz.appender.log4j.ElasticsearchAppender; +import com.chavaillaz.appender.LogConfiguration; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.core.LogEvent; import org.apache.logging.log4j.message.Message; /** - * Default converter converting the following fields: + * Default implementation converting the following fields: *
    *
  • datetime: Date of logging event
  • *
  • host: Taken from appender configuration
  • @@ -32,27 +32,29 @@ *
  • stacktrace: Stacktrace of the exception
  • *
*/ -public class DefaultEventConverter implements EventConverter { +public class DefaultLogConverter implements LogConverter { + + private LogConfiguration configuration; @Override - public String getDateField() { - return "datetime"; + public void configure(LogConfiguration configuration) { + this.configuration = configuration; } @Override - public Map convert(ElasticsearchAppender appender, LogEvent event) { + public Map convert(LogEvent event) { Map data = new HashMap<>(); - writeBasic(data, appender, event); + writeBasic(data, event); writeThrowable(data, event); writeMDC(data, event); return data; } - protected void writeBasic(Map json, ElasticsearchAppender appender, LogEvent event) { - json.put(getDateField(), Instant.ofEpochMilli(event.getInstant().getEpochMillisecond()).toString()); - json.put("host", appender.getConfiguration().getHostName()); - json.put("environment", appender.getConfiguration().getEnvironmentName()); - json.put("application", appender.getConfiguration().getApplicationName()); + protected void writeBasic(Map json, LogEvent event) { + json.put("datetime", Instant.ofEpochMilli(event.getInstant().getEpochMillisecond()).toString()); + json.put("host", configuration.getHost()); + json.put("environment", configuration.getEnvironment()); + json.put("application", configuration.getApplication()); json.put("logger", event.getLoggerFqcn()); json.put("level", Optional.of(event) .map(LogEvent::getLevel) diff --git a/src/main/java/com/chavaillaz/appender/log4j/ElasticsearchAppender.java b/src/main/java/com/chavaillaz/appender/log4j/ElasticsearchAppender.java deleted file mode 100644 index 91be756..0000000 --- a/src/main/java/com/chavaillaz/appender/log4j/ElasticsearchAppender.java +++ /dev/null @@ -1,199 +0,0 @@ -package com.chavaillaz.appender.log4j; - -import static com.chavaillaz.appender.log4j.ElasticsearchUtils.getInitialHostname; -import static com.chavaillaz.appender.log4j.ElasticsearchUtils.getProperty; -import static java.lang.Thread.currentThread; -import static java.util.concurrent.Executors.newSingleThreadScheduledExecutor; -import static java.util.concurrent.TimeUnit.MILLISECONDS; -import static org.apache.logging.log4j.core.Appender.ELEMENT_TYPE; -import static org.apache.logging.log4j.core.Core.CATEGORY_NAME; -import static org.apache.logging.log4j.core.layout.PatternLayout.createDefaultLayout; - -import java.util.concurrent.ScheduledExecutorService; -import java.util.concurrent.TimeUnit; - -import com.chavaillaz.appender.log4j.converter.DefaultEventConverter; -import lombok.Getter; -import lombok.Setter; -import lombok.ToString; -import lombok.ToString.Exclude; -import lombok.extern.log4j.Log4j2; -import org.apache.logging.log4j.core.Filter; -import org.apache.logging.log4j.core.Layout; -import org.apache.logging.log4j.core.LogEvent; -import org.apache.logging.log4j.core.appender.AbstractAppender; -import org.apache.logging.log4j.core.config.Property; -import org.apache.logging.log4j.core.config.plugins.Plugin; -import org.apache.logging.log4j.core.config.plugins.PluginBuilderAttribute; -import org.apache.logging.log4j.core.config.plugins.PluginBuilderFactory; -import org.apache.logging.log4j.core.config.plugins.PluginElement; -import org.apache.logging.log4j.core.config.plugins.validation.constraints.Required; - -/** - * Appender using Elasticsearch to store logging events. - */ -@Log4j2 -@Getter -@ToString -@Plugin(name = "ElasticsearchAppender", category = CATEGORY_NAME, elementType = ELEMENT_TYPE) -public class ElasticsearchAppender extends AbstractAppender { - - @Exclude - private final ScheduledExecutorService threadPool = newSingleThreadScheduledExecutor(); - private final ElasticsearchConfiguration configuration; - private ElasticsearchSender client; - - protected ElasticsearchAppender(String name, Filter filter, Layout layout, ElasticsearchConfiguration configuration) { - super(name, filter, layout, true, Property.EMPTY_ARRAY); - this.configuration = configuration; - } - - @PluginBuilderFactory - public static Builder builder() { - return new Builder(); - } - - /** - * Gets the Elasticsearch sender. - * - * @return The client to send messages - */ - protected ElasticsearchSender getClient() { - return client; - } - - /** - * Creates the elasticsearch client. - */ - @Override - public void start() { - try { - client = new ElasticsearchSender(configuration); - client.open(); - } catch (Exception e) { - error("Client configuration error", e); - } - - if (configuration.getBatchSize() > 1) { - threadPool.scheduleWithFixedDelay( - client::sendPartialBatches, - configuration.getBatchInitialDelay(), - configuration.getBatchDelay(), - MILLISECONDS); - } - - super.start(); - } - - /** - * Submits the logging event to insert the document if it reaches the severity threshold. - * - * @param loggingEvent The logging event to send. - */ - @Override - public void append(LogEvent loggingEvent) { - ElasticsearchAppenderTask task = new ElasticsearchAppenderTask(this, loggingEvent); - if (configuration.isParallelExecution()) { - threadPool.submit(task); - } else { - task.call(); - } - } - - /** - * Closes Elasticsearch client. - */ - @Override - public boolean stop(long timeout, TimeUnit timeUnit) { - threadPool.shutdown(); - try { - threadPool.awaitTermination(timeout, timeUnit); - } catch (InterruptedException e) { - error("Thread interrupted during termination", e); - currentThread().interrupt(); - } finally { - client.close(); - super.stop(timeout, timeUnit); - } - return true; - } - - @Setter - @Getter - public static class Builder implements org.apache.logging.log4j.core.util.Builder { - - @PluginBuilderAttribute - @Required(message = "No appender name provided") - private String name; - - @PluginElement("Layout") - private Layout layout = createDefaultLayout(); - - @PluginElement("Filter") - private Filter filter; - - @PluginBuilderAttribute("ApplicationName") - private String applicationName = getProperty("APPLICATION", "unknown"); - - @PluginBuilderAttribute("HostName") - private String hostName = getProperty("HOST", getInitialHostname()); - - @PluginBuilderAttribute("EnvironmentName") - private String environmentName = getProperty("ENV", "local"); - - @PluginBuilderAttribute("ElasticConverter") - private String elasticConverter = getProperty("CONVERTER", DefaultEventConverter.class.getName()); - - @PluginBuilderAttribute("ElasticIndex") - private String elasticIndex = getProperty("INDEX", "ha"); - - @PluginBuilderAttribute("ElasticIndexSuffix") - private String elasticIndexSuffix = getProperty("INDEX_SUFFIX", "-yyyy.MM.dd"); - - @PluginBuilderAttribute("ElasticUrl") - private String elasticUrl; - - @PluginBuilderAttribute("ElasticUser") - private String elasticUser; - - @PluginBuilderAttribute("ElasticPassword") - private String elasticPassword; - - @PluginBuilderAttribute("ElasticApiKey") - private String elasticApiKey; - - @PluginBuilderAttribute("ElasticParallelExecution") - private boolean elasticParallelExecution = true; - - @PluginBuilderAttribute("ElasticBatchSize") - private int elasticBatchSize = 1; - - @PluginBuilderAttribute("ElasticBatchDelay") - private long elasticBatchDelay = 1000; - - @PluginBuilderAttribute("ElasticBatchInitialDelay") - private long elasticBatchInitialDelay = 1000; - - @Override - public ElasticsearchAppender build() { - ElasticsearchConfiguration configuration = new ElasticsearchConfiguration(); - configuration.setApplicationName(getApplicationName()); - configuration.setHostName(getHostName()); - configuration.setEnvironmentName(getEnvironmentName()); - configuration.setEventConverter(getElasticConverter()); - configuration.setIndex(getElasticIndex()); - configuration.setIndexSuffix(getElasticIndexSuffix()); - configuration.setUrl(getElasticUrl()); - configuration.setUser(getElasticUser()); - configuration.setPassword(getElasticPassword()); - configuration.setApiKey(getElasticApiKey()); - configuration.setParallelExecution(isElasticParallelExecution()); - configuration.setBatchSize(getElasticBatchSize()); - configuration.setBatchDelay(getElasticBatchDelay()); - configuration.setBatchInitialDelay(getElasticBatchInitialDelay()); - return new ElasticsearchAppender(getName(), getFilter(), getLayout(), configuration); - } - - } - -} \ No newline at end of file diff --git a/src/main/java/com/chavaillaz/appender/log4j/ElasticsearchAppenderTask.java b/src/main/java/com/chavaillaz/appender/log4j/ElasticsearchAppenderTask.java deleted file mode 100644 index b2c62fe..0000000 --- a/src/main/java/com/chavaillaz/appender/log4j/ElasticsearchAppenderTask.java +++ /dev/null @@ -1,42 +0,0 @@ -package com.chavaillaz.appender.log4j; - -import java.util.concurrent.Callable; - -import com.chavaillaz.appender.log4j.converter.EventConverter; -import org.apache.logging.log4j.core.LogEvent; - -/** - * Simple callable that inserts the logging event into Elasticsearch. - */ -public class ElasticsearchAppenderTask implements Callable { - - private final ElasticsearchAppender appender; - private final LogEvent loggingEvent; - - /** - * Creates a new task to send an event to Elasticsearch. - * - * @param appender The appender from which the task is launched - * @param loggingEvent The logging event to send - */ - public ElasticsearchAppenderTask(ElasticsearchAppender appender, LogEvent loggingEvent) { - this.appender = appender; - this.loggingEvent = loggingEvent; - } - - /** - * Called by the executor service and inserts the document into Elasticsearch. - * - * @return The logging event - */ - @Override - public LogEvent call() { - if (appender.getClient() != null) { - EventConverter converter = appender.getClient().getConfiguration().getEventConverter(); - appender.getClient().send(converter.convert(appender, loggingEvent)); - } - return loggingEvent; - } - -} - diff --git a/src/main/java/com/chavaillaz/appender/log4j/ElasticsearchConfiguration.java b/src/main/java/com/chavaillaz/appender/log4j/ElasticsearchConfiguration.java deleted file mode 100644 index 4f63d98..0000000 --- a/src/main/java/com/chavaillaz/appender/log4j/ElasticsearchConfiguration.java +++ /dev/null @@ -1,75 +0,0 @@ -package com.chavaillaz.appender.log4j; - -import java.lang.reflect.Constructor; -import java.time.OffsetDateTime; -import java.time.format.DateTimeFormatter; - -import com.chavaillaz.appender.log4j.converter.DefaultEventConverter; -import com.chavaillaz.appender.log4j.converter.EventConverter; -import lombok.Getter; -import lombok.Setter; -import lombok.extern.log4j.Log4j2; - -@Log4j2 -@Getter -@Setter -public class ElasticsearchConfiguration { - - private String applicationName; - private String hostName; - private String environmentName; - private EventConverter eventConverter; - private String index; - private DateTimeFormatter indexSuffixFormatter; - private String url; - private String user; - private String password; - private String apiKey; - private boolean parallelExecution; - private int batchSize; - private long batchDelay; - private long batchInitialDelay; - - /** - * Sets the event converter by instantiating the given class name. - * Note that it has to have an empty constructor. - * - * @param className The class name of the converter - */ - public void setEventConverter(String className) { - try { - Class clazz = Class.forName(className); - Constructor constructor = clazz.getConstructor(); - this.eventConverter = (EventConverter) constructor.newInstance(); - } catch (Exception e) { - this.eventConverter = new DefaultEventConverter(); - } - } - - /** - * Sets the date suffix to use when generating the index to which send the documents. - * Note that it has to follow a pattern recognized by {@link DateTimeFormatter}. - * - * @param indexSuffix The index suffix - */ - public void setIndexSuffix(String indexSuffix) { - this.indexSuffixFormatter = DateTimeFormatter.ofPattern(indexSuffix); - } - - /** - * Generates the index name using the suffix if present. - * - * @param utcDateTime The date and time of the event in the format {@link DateTimeFormatter#ISO_OFFSET_DATE_TIME} - * or {@code null} to avoid using the index prefix if defined - * @return The computed index name - */ - public String generateIndexName(String utcDateTime) { - if (utcDateTime != null && getIndexSuffixFormatter() != null) { - OffsetDateTime odt = OffsetDateTime.parse(utcDateTime); - return getIndex() + odt.format(getIndexSuffixFormatter()); - } else { - return getIndex(); - } - } - -} diff --git a/src/main/java/com/chavaillaz/appender/log4j/LogConverter.java b/src/main/java/com/chavaillaz/appender/log4j/LogConverter.java new file mode 100644 index 0000000..3106ea9 --- /dev/null +++ b/src/main/java/com/chavaillaz/appender/log4j/LogConverter.java @@ -0,0 +1,28 @@ +package com.chavaillaz.appender.log4j; + +import java.util.Map; + +import com.chavaillaz.appender.LogConfiguration; +import org.apache.logging.log4j.core.LogEvent; + +/** + * Conversion of events to records for logs transmission. + */ +public interface LogConverter { + + /** + * Converts a logging event into multiple properties (key/value) to log. + * + * @param event The logging event + * @return The {@link Map} containing the properties to send + */ + Map convert(LogEvent event); + + /** + * Configures the converter with the given settings. + * + * @param configuration The configuration + */ + void configure(LogConfiguration configuration); + +} diff --git a/src/main/java/com/chavaillaz/appender/log4j/converter/EventConverter.java b/src/main/java/com/chavaillaz/appender/log4j/converter/EventConverter.java deleted file mode 100644 index 5dd057d..0000000 --- a/src/main/java/com/chavaillaz/appender/log4j/converter/EventConverter.java +++ /dev/null @@ -1,29 +0,0 @@ -package com.chavaillaz.appender.log4j.converter; - -import java.util.Map; - -import com.chavaillaz.appender.log4j.ElasticsearchAppender; -import org.apache.logging.log4j.core.LogEvent; - -/** - * Interface to implement in order to customize the creation of key/value document from the event that will be stored. - */ -public interface EventConverter { - - /** - * Gets the field used as date of the event. - * - * @return The date field name - */ - String getDateField(); - - /** - * Converts a logging event into multiple properties (key/value) to log in Elasticsearch. - * - * @param appender The logging appender - * @param event The logging event - * @return The {@link Map} containing the properties to send - */ - Map convert(ElasticsearchAppender appender, LogEvent event); - -} diff --git a/src/main/java/com/chavaillaz/appender/log4j/elastic/ElasticsearchAppender.java b/src/main/java/com/chavaillaz/appender/log4j/elastic/ElasticsearchAppender.java new file mode 100644 index 0000000..fefc75c --- /dev/null +++ b/src/main/java/com/chavaillaz/appender/log4j/elastic/ElasticsearchAppender.java @@ -0,0 +1,126 @@ +package com.chavaillaz.appender.log4j.elastic; + +import static com.chavaillaz.appender.CommonUtils.getInitialHostname; +import static com.chavaillaz.appender.CommonUtils.getProperty; +import static org.apache.logging.log4j.core.Appender.ELEMENT_TYPE; +import static org.apache.logging.log4j.core.Core.CATEGORY_NAME; +import static org.apache.logging.log4j.core.layout.PatternLayout.createDefaultLayout; + +import java.time.Duration; + +import com.chavaillaz.appender.LogDelivery; +import com.chavaillaz.appender.log4j.AbstractLogDeliveryAppender; +import com.chavaillaz.appender.log4j.DefaultLogConverter; +import com.chavaillaz.appender.log4j.LogConverter; +import lombok.Getter; +import lombok.Setter; +import org.apache.logging.log4j.core.Filter; +import org.apache.logging.log4j.core.Layout; +import org.apache.logging.log4j.core.LogEvent; +import org.apache.logging.log4j.core.config.plugins.Plugin; +import org.apache.logging.log4j.core.config.plugins.PluginBuilderAttribute; +import org.apache.logging.log4j.core.config.plugins.PluginBuilderFactory; +import org.apache.logging.log4j.core.config.plugins.PluginElement; +import org.apache.logging.log4j.core.config.plugins.validation.constraints.Required; + +/** + * Appender implementation using Elasticsearch for transmissions of logs from Log4j. + */ +@Plugin(name = "ElasticsearchAppender", category = CATEGORY_NAME, elementType = ELEMENT_TYPE) +public class ElasticsearchAppender extends AbstractLogDeliveryAppender { + + protected ElasticsearchAppender(String name, Filter filter, Layout layout, ElasticsearchConfiguration configuration) { + super(name, filter, layout, configuration); + } + + @PluginBuilderFactory + public static Builder builder() { + return new Builder(); + } + + @Override + public LogDelivery createDeliveryHandler() { + return new ElasticsearchLogDelivery(getLogConfiguration()); + } + + @Override + public Runnable createDeliveryTask(LogEvent loggingEvent) { + return () -> { + if (getLogDeliveryHandler() != null) { + LogConverter converter = getLogConfiguration().getConverter(); + getLogDeliveryHandler().send(converter.convert(loggingEvent)); + } + }; + } + + @Setter + @Getter + public static class Builder implements org.apache.logging.log4j.core.util.Builder { + + @PluginBuilderAttribute + @Required(message = "No appender name provided") + private String name; + + @PluginElement("Layout") + private Layout layout = createDefaultLayout(); + + @PluginElement("Filter") + private Filter filter; + + @PluginBuilderAttribute("Application") + private String applicationName = getProperty("APP", "unknown"); + + @PluginBuilderAttribute("Host") + private String hostName = getProperty("HOST", getInitialHostname()); + + @PluginBuilderAttribute("Environment") + private String environmentName = getProperty("ENV", "local"); + + @PluginBuilderAttribute("Converter") + private String elasticConverter = getProperty("CONVERTER", DefaultLogConverter.class.getName()); + + @PluginBuilderAttribute("Index") + private String elasticIndex = getProperty("INDEX", "ha"); + + @PluginBuilderAttribute("IndexSuffix") + private String elasticIndexSuffix = getProperty("INDEX_SUFFIX", ""); + + @PluginBuilderAttribute("Url") + private String elasticUrl = getProperty("ELASTIC_URL", null); + + @PluginBuilderAttribute("User") + private String elasticUser = getProperty("ELASTIC_USER", null); + + @PluginBuilderAttribute("Password") + private String elasticPassword = getProperty("ELASTIC_PASSWORD", null); + + @PluginBuilderAttribute("ApiKey") + private String elasticApiKey = getProperty("ELASTIC_API_KEY", null); + + @PluginBuilderAttribute("FlushThreshold") + private long flushThreshold = 100; + + @PluginBuilderAttribute("FlushInterval") + private long flushInterval = 5_000; + + @Override + public ElasticsearchAppender build() { + ElasticsearchConfiguration configuration = new ElasticsearchConfiguration(); + configuration.setApplication(getApplicationName()); + configuration.setHost(getHostName()); + configuration.setEnvironment(getEnvironmentName()); + configuration.setConverter(getElasticConverter()); + configuration.setIndex(getElasticIndex()); + configuration.setIndexSuffix(getElasticIndexSuffix()); + configuration.setUrl(getElasticUrl()); + configuration.setUser(getElasticUser()); + configuration.setPassword(getElasticPassword()); + configuration.setApiKey(getElasticApiKey()); + configuration.setFlushThreshold(getFlushThreshold()); + configuration.setFlushInterval(Duration.ofMillis(getFlushInterval())); + return new ElasticsearchAppender(getName(), getFilter(), getLayout(), configuration); + } + + } + +} \ No newline at end of file diff --git a/src/main/java/com/chavaillaz/appender/log4j/elastic/ElasticsearchConfiguration.java b/src/main/java/com/chavaillaz/appender/log4j/elastic/ElasticsearchConfiguration.java new file mode 100644 index 0000000..ebc2ff1 --- /dev/null +++ b/src/main/java/com/chavaillaz/appender/log4j/elastic/ElasticsearchConfiguration.java @@ -0,0 +1,91 @@ +package com.chavaillaz.appender.log4j.elastic; + +import static org.apache.commons.lang3.StringUtils.EMPTY; + +import java.lang.reflect.Constructor; +import java.time.Duration; +import java.time.OffsetDateTime; +import java.time.format.DateTimeFormatter; +import java.util.Optional; + +import com.chavaillaz.appender.LogConfiguration; +import com.chavaillaz.appender.log4j.DefaultLogConverter; +import com.chavaillaz.appender.log4j.LogConverter; +import lombok.Getter; +import lombok.Setter; + +/** + * Configuration for logs transmissions using Elasticsearch. + */ +@Getter +@Setter +public class ElasticsearchConfiguration implements LogConfiguration { + + private String application; + private String host; + private String environment; + private LogConverter converter; + private String index; + private DateTimeFormatter indexSuffix; + private String url; + private String user; + private String password; + private String apiKey; + private long flushThreshold; + private Duration flushInterval; + + /** + * Sets the logs converter by instantiating the given class name. + * Note that it must have an empty constructor. + * + * @param className The class name of the converter + */ + public void setConverter(String className) { + try { + Class clazz = Class.forName(className); + Constructor constructor = clazz.getConstructor(); + this.converter = (LogConverter) constructor.newInstance(); + } catch (Exception e) { + this.converter = new DefaultLogConverter(); + } + this.converter.configure(this); + } + + /** + * Sets the date suffix to use when generating the index to which send the documents. + * Note that it has to follow a pattern recognized by {@link DateTimeFormatter}. + * + * @param indexSuffix The index suffix + */ + public void setIndexSuffix(String indexSuffix) { + this.indexSuffix = DateTimeFormatter.ofPattern(Optional.ofNullable(indexSuffix).orElse(EMPTY)); + } + + /** + * Generates the index name using the suffix if present. + * + * @param dateTime The date and time of the event in the format {@link DateTimeFormatter#ISO_OFFSET_DATE_TIME} + * or {@code null} to avoid using the index prefix if defined + * @return The computed index name + */ + public String generateIndexName(String dateTime) { + return generateIndexName(Optional.ofNullable(dateTime) + .map(OffsetDateTime::parse) + .orElse(null)); + } + + /** + * Generates the index name using the suffix if present. + * + * @param dateTime The date and time of the event or {@code null} to avoid using the index prefix if defined + * @return The computed index name + */ + public String generateIndexName(OffsetDateTime dateTime) { + if (dateTime != null && getIndexSuffix() != null) { + return getIndex() + dateTime.format(getIndexSuffix()); + } else { + return getIndex(); + } + } + +} diff --git a/src/main/java/com/chavaillaz/appender/log4j/ElasticsearchSender.java b/src/main/java/com/chavaillaz/appender/log4j/elastic/ElasticsearchLogDelivery.java similarity index 61% rename from src/main/java/com/chavaillaz/appender/log4j/ElasticsearchSender.java rename to src/main/java/com/chavaillaz/appender/log4j/elastic/ElasticsearchLogDelivery.java index 0c2688a..6576111 100644 --- a/src/main/java/com/chavaillaz/appender/log4j/ElasticsearchSender.java +++ b/src/main/java/com/chavaillaz/appender/log4j/elastic/ElasticsearchLogDelivery.java @@ -1,7 +1,8 @@ -package com.chavaillaz.appender.log4j; +package com.chavaillaz.appender.log4j.elastic; -import static com.chavaillaz.appender.log4j.ElasticsearchUtils.createClient; -import static com.chavaillaz.appender.log4j.ElasticsearchUtils.createSSLContext; +import static com.chavaillaz.appender.CommonUtils.createSSLContext; +import static com.chavaillaz.appender.log4j.elastic.ElasticsearchUtils.createClient; +import static java.time.OffsetDateTime.now; import static java.util.Collections.singletonList; import static org.apache.commons.lang3.StringUtils.isNotBlank; @@ -12,14 +13,15 @@ import co.elastic.clients.elasticsearch.ElasticsearchClient; import co.elastic.clients.elasticsearch.core.BulkRequest; import co.elastic.clients.elasticsearch.core.BulkResponse; +import com.chavaillaz.appender.LogDelivery; import lombok.Getter; import lombok.extern.log4j.Log4j2; /** - * Data sender to Elasticsearch. + * Implementation of logs transmission for Elasticsearch. */ @Log4j2 -public class ElasticsearchSender implements AutoCloseable { +public class ElasticsearchLogDelivery implements LogDelivery { private final List> batch = new ArrayList<>(); @@ -27,22 +29,16 @@ public class ElasticsearchSender implements AutoCloseable { private final ElasticsearchConfiguration configuration; @Getter - private ElasticsearchClient client; + private final ElasticsearchClient client; /** - * Creates a new data sender. + * Creates a new logs delivery handler for Elasticsearch. * * @param configuration The configuration to use */ - public ElasticsearchSender(ElasticsearchConfiguration configuration) { + public ElasticsearchLogDelivery(ElasticsearchConfiguration configuration) { this.configuration = configuration; - } - /** - * Opens the connection to Elasticsearch. - * This method has to be called before sending data. - */ - public void open() { if (isNotBlank(configuration.getApiKey())) { client = createClient(configuration.getUrl(), createSSLContext(), configuration.getApiKey()); } else { @@ -50,46 +46,35 @@ public void open() { } } - /** - * Sends the record data in Elasticsearch. - * - * @param record The record data - */ - public void send(Map record) { - send(singletonList(record)); + @Override + public void send(Map document) { + send(singletonList(document)); } - /** - * Sends the list of record data in Elasticsearch. - * - * @param records The list of record data - */ - public void send(List> records) { - if (records != null && !records.isEmpty()) { - stackAndSend(records); + @Override + public void send(List> documents) { + if (documents != null && !documents.isEmpty()) { + stackAndSend(documents); } } - /** - * Sends a partially filled batch. - * Intended to be called periodically to clean out pending log messages - */ - public synchronized void sendPartialBatches() { + @Override + public synchronized void flush() { if (!batch.isEmpty()) { - log.debug("Sending partial batch of {}/{}", batch.size(), configuration.getBatchSize()); + log.debug("Sending partial batch of {}/{}", batch.size(), configuration.getFlushThreshold()); sendBatch(); } } private synchronized void stackAndSend(List> data) { batch.addAll(data); - log.trace("Batch size {}/{}", batch.size(), configuration.getBatchSize()); - if (batch.size() >= configuration.getBatchSize()) { + log.trace("Batch size {}/{}", batch.size(), configuration.getFlushThreshold()); + if (batch.size() >= configuration.getFlushThreshold()) { sendBatch(); } } - private void sendBatch() { + private synchronized void sendBatch() { if (!batch.isEmpty() && sendBulk(batch)) { batch.clear(); } @@ -99,10 +84,9 @@ private boolean sendBulk(List> documents) { try { BulkRequest.Builder builder = new BulkRequest.Builder(); for (Map document : documents) { - String dateTime = document.get(configuration.getEventConverter().getDateField()).toString(); builder.operations(operation -> operation .index(index -> index - .index(configuration.generateIndexName(dateTime)) + .index(configuration.generateIndexName(now())) .document(document))); } @@ -118,7 +102,7 @@ private boolean sendBulk(List> documents) { } @Override - public synchronized void close() { + public void close() { if (client != null) { sendBatch(); } diff --git a/src/main/java/com/chavaillaz/appender/log4j/ElasticsearchUtils.java b/src/main/java/com/chavaillaz/appender/log4j/elastic/ElasticsearchUtils.java similarity index 56% rename from src/main/java/com/chavaillaz/appender/log4j/ElasticsearchUtils.java rename to src/main/java/com/chavaillaz/appender/log4j/elastic/ElasticsearchUtils.java index 7f3d3d5..58f46aa 100644 --- a/src/main/java/com/chavaillaz/appender/log4j/ElasticsearchUtils.java +++ b/src/main/java/com/chavaillaz/appender/log4j/elastic/ElasticsearchUtils.java @@ -1,45 +1,28 @@ -package com.chavaillaz.appender.log4j; +package com.chavaillaz.appender.log4j.elastic; import static org.apache.http.auth.AuthScope.ANY; -import java.net.InetAddress; -import java.net.UnknownHostException; -import java.util.Optional; - import co.elastic.clients.elasticsearch.ElasticsearchClient; import co.elastic.clients.json.jackson.JacksonJsonpMapper; import co.elastic.clients.transport.rest_client.RestClientTransport; import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import javax.net.ssl.SSLContext; -import lombok.SneakyThrows; import lombok.experimental.UtilityClass; -import lombok.extern.log4j.Log4j2; import org.apache.http.Header; import org.apache.http.HttpHost; import org.apache.http.auth.UsernamePasswordCredentials; import org.apache.http.client.CredentialsProvider; import org.apache.http.conn.ssl.NoopHostnameVerifier; -import org.apache.http.conn.ssl.TrustAllStrategy; import org.apache.http.impl.client.BasicCredentialsProvider; import org.apache.http.message.BasicHeader; -import org.apache.http.ssl.SSLContextBuilder; import org.elasticsearch.client.RestClient; -@Log4j2 +/** + * Elasticsearch specific utility methods. + */ @UtilityClass public class ElasticsearchUtils { - /** - * Searches an environment or system property value. - * - * @param key The key to search in the properties - * @param defaultValue The default value to use in case the given key is not found - * @return The value found in environment/system properties or the given default value - */ - public static String getProperty(String key, String defaultValue) { - return Optional.ofNullable(System.getenv(key)).orElse(System.getProperty(key, defaultValue)); - } - /** * Creates a new Elasticsearch client. * @@ -52,50 +35,47 @@ public static String getProperty(String key, String defaultValue) { public static ElasticsearchClient createClient(String url, SSLContext sslContext, String username, String password) { CredentialsProvider credentialsProvider = new BasicCredentialsProvider(); credentialsProvider.setCredentials(ANY, new UsernamePasswordCredentials(username, password)); - return createClient(createRestClient(url, sslContext, credentialsProvider)); + return createClient(RestClient + .builder(HttpHost.create(url)) + .setHttpClientConfigCallback(httpClientBuilder -> + httpClientBuilder + .setDefaultCredentialsProvider(credentialsProvider) + .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE) + .setSSLContext(sslContext)) + .build()); } + /** + * Creates a new Elasticsearch client. + * + * @param url The URL of the Elasticsearch instance to reach + * @param sslContext The secure socket protocol implementation + * @param apiKey The encoded API key to authenticate + * @return The Elasticsearch client with the given configuration + */ public static ElasticsearchClient createClient(String url, SSLContext sslContext, String apiKey) { Header headerApiKey = new BasicHeader("Authorization", "ApiKey " + apiKey); - return createClient(createRestClient(url, sslContext, null, headerApiKey)); - } - - public static ElasticsearchClient createClient(RestClient restClient) { - JacksonJsonpMapper jsonMapper = new JacksonJsonpMapper(); - jsonMapper.objectMapper().registerModule(new JavaTimeModule()); - return new ElasticsearchClient(new RestClientTransport(restClient, jsonMapper)); - } - - public static RestClient createRestClient(String url, SSLContext sslContext, CredentialsProvider credentialsProvider, Header... defaultHeaders) { - return RestClient.builder(HttpHost.create(url)) - .setDefaultHeaders(defaultHeaders) + return createClient(RestClient + .builder(HttpHost.create(url)) + .setDefaultHeaders(new Header[]{headerApiKey}) .setHttpClientConfigCallback(httpClientBuilder -> httpClientBuilder - .setDefaultCredentialsProvider(credentialsProvider) .setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE) .setSSLContext(sslContext)) - .build(); - } - - @SneakyThrows - public static SSLContext createSSLContext() { - return new SSLContextBuilder() - .loadTrustMaterial(null, TrustAllStrategy.INSTANCE) - .build(); + .build()); } /** - * Gets the current host name of the machine. + * Creates a new Elasticsearch client using the given REST client + * and using a customized JSON Mapper with Java 8 Date/Time Module. * - * @return The host name or {@code localhost} if it cannot be determined + * @param restClient The REST client to use + * @return The Elasticsearch client with the given configuration */ - public static String getInitialHostname() { - try { - return InetAddress.getLocalHost().getCanonicalHostName(); - } catch (UnknownHostException e) { - log.warn("Host cannot be determined", e); - return "localhost"; - } + public static ElasticsearchClient createClient(RestClient restClient) { + JacksonJsonpMapper jsonMapper = new JacksonJsonpMapper(); + jsonMapper.objectMapper().registerModule(new JavaTimeModule()); + return new ElasticsearchClient(new RestClientTransport(restClient, jsonMapper)); } } diff --git a/src/test/java/com/chavaillaz/appender/log4j/ElasticsearchAppenderTest.java b/src/test/java/com/chavaillaz/appender/log4j/ElasticsearchAppenderTest.java index b5c6e82..85262b6 100644 --- a/src/test/java/com/chavaillaz/appender/log4j/ElasticsearchAppenderTest.java +++ b/src/test/java/com/chavaillaz/appender/log4j/ElasticsearchAppenderTest.java @@ -1,6 +1,6 @@ package com.chavaillaz.appender.log4j; -import static com.chavaillaz.appender.log4j.ElasticsearchUtils.createClient; +import static com.chavaillaz.appender.log4j.elastic.ElasticsearchUtils.createClient; import static java.net.InetAddress.getLocalHost; import static java.time.Duration.ofSeconds; import static org.apache.logging.log4j.Level.INFO; @@ -17,17 +17,18 @@ import co.elastic.clients.elasticsearch._types.Refresh; import co.elastic.clients.elasticsearch.core.search.Hit; import co.elastic.clients.elasticsearch.security.CreateApiKeyRequest; +import com.chavaillaz.appender.log4j.elastic.ElasticsearchAppender; import javax.net.ssl.SSLContext; import org.apache.logging.log4j.ThreadContext; import org.apache.logging.log4j.core.impl.Log4jLogEvent; import org.apache.logging.log4j.message.SimpleMessage; -import org.junit.jupiter.params.ParameterizedTest; -import org.junit.jupiter.params.provider.ValueSource; +import org.junit.jupiter.api.Test; import org.testcontainers.elasticsearch.ElasticsearchContainer; import org.testcontainers.utility.DockerImageName; class ElasticsearchAppenderTest { + // Default user and password for the docker image from Elastic public static final String ELASTICSEARCH_USERNAME = "elastic"; public static final String ELASTICSEARCH_PASSWORD = "changeme"; public static final DockerImageName ELASTICSEARCH_IMAGE = DockerImageName @@ -43,22 +44,17 @@ protected static String createApiKey(ElasticsearchClient client) throws IOExcept .encoded(); } - protected static ElasticsearchAppender createAppender(String application, String hostname, String elastic, boolean parallel) { + protected static ElasticsearchAppender createAppender(String application, String hostname, String elastic) { ElasticsearchAppender.Builder builder = ElasticsearchAppender.builder(); builder.setName("ElasticAppender"); builder.setApplicationName(application); builder.setHostName(hostname); builder.setElasticUrl(elastic); - // Default user and password for the docker image from Elastic builder.setElasticUser(ELASTICSEARCH_USERNAME); builder.setElasticPassword(ELASTICSEARCH_PASSWORD); - // Need to be done synchronously for the test - builder.setElasticParallelExecution(parallel); - builder.setElasticBatchDelay(10); - builder.setElasticBatchSize(5); - ElasticsearchAppender appender = builder.build(); - appender.start(); - return appender; + builder.setFlushInterval(500); + builder.setFlushThreshold(5); + return builder.build(); } protected static List searchLog(ElasticsearchClient client, String index, String id) throws IOException { @@ -76,9 +72,8 @@ protected static List searchLog(ElasticsearchClient client, St .toList(); } - @ParameterizedTest - @ValueSource(booleans = {true, false}) - void systemTestWithElasticsearch(boolean parallel) throws Exception { + @Test + void systemTestWithElasticsearch() throws Exception { try (ElasticsearchContainer container = new ElasticsearchContainer(ELASTICSEARCH_IMAGE)) { container.start(); @@ -89,10 +84,11 @@ void systemTestWithElasticsearch(boolean parallel) throws Exception { SSLContext ssl = container.createSslContextFromCa(); ElasticsearchClient client = createClient(address, ssl, ELASTICSEARCH_USERNAME, ELASTICSEARCH_PASSWORD); ElasticsearchClient apiClient = createClient(address, ssl, createApiKey(client)); - ElasticsearchAppender appender = createAppender("myApplication", getLocalHost().getHostName(), address, parallel); + ElasticsearchAppender appender = createAppender("myApplication", getLocalHost().getHostName(), address); ThreadContext.put("key", "value"); // When + appender.start(); Log4jLogEvent event = Log4jLogEvent.newBuilder() .setMessage(new SimpleMessage(id)) .setLoggerFqcn(logger) @@ -104,12 +100,12 @@ void systemTestWithElasticsearch(boolean parallel) throws Exception { appender.stop(); // Then - List logs = searchLog(apiClient, appender.getConfiguration().getIndex(), id); + List logs = searchLog(apiClient, appender.getLogConfiguration().getIndex(), id); assertEquals(1, logs.size()); ElasticsearchLog log = logs.get(0); - assertEquals(appender.getConfiguration().getHostName(), log.getHost()); - assertEquals(appender.getConfiguration().getEnvironmentName(), log.getEnvironment()); - assertEquals(appender.getConfiguration().getApplicationName(), log.getApplication()); + assertEquals(appender.getLogConfiguration().getHost(), log.getHost()); + assertEquals(appender.getLogConfiguration().getEnvironment(), log.getEnvironment()); + assertEquals(appender.getLogConfiguration().getApplication(), log.getApplication()); assertEquals(logger, log.getLogger()); assertEquals(INFO.toString(), log.getLevel()); assertEquals(id, log.getLogmessage()); diff --git a/src/test/java/com/chavaillaz/appender/log4j/ElasticsearchConfigurationTest.java b/src/test/java/com/chavaillaz/appender/log4j/ElasticsearchConfigurationTest.java index 7f78443..ba91bcb 100644 --- a/src/test/java/com/chavaillaz/appender/log4j/ElasticsearchConfigurationTest.java +++ b/src/test/java/com/chavaillaz/appender/log4j/ElasticsearchConfigurationTest.java @@ -1,14 +1,13 @@ package com.chavaillaz.appender.log4j; -import com.chavaillaz.appender.log4j.converter.DefaultEventConverter; -import com.chavaillaz.appender.log4j.converter.EventConverter; -import org.junit.jupiter.api.Test; +import static java.time.OffsetDateTime.now; +import static org.assertj.core.api.Assertions.assertThat; import java.time.OffsetDateTime; import java.time.format.DateTimeFormatter; -import static java.time.OffsetDateTime.now; -import static org.assertj.core.api.Assertions.assertThat; +import com.chavaillaz.appender.log4j.elastic.ElasticsearchConfiguration; +import org.junit.jupiter.api.Test; class ElasticsearchConfigurationTest { @@ -34,13 +33,13 @@ void testGenerateIndexNameDate() { void testWrongConverter() { // Given ElasticsearchConfiguration configuration = new ElasticsearchConfiguration(); - configuration.setEventConverter("package.does.not.exist.NotFoundConverter"); + configuration.setConverter("package.does.not.exist.NotFoundConverter"); // When - EventConverter converter = configuration.getEventConverter(); + LogConverter converter = configuration.getConverter(); // Then - assertThat(converter.getClass()).isEqualTo(DefaultEventConverter.class); + assertThat(converter.getClass()).isEqualTo(DefaultLogConverter.class); } } diff --git a/src/test/java/com/chavaillaz/appender/log4j/ElasticsearchLog.java b/src/test/java/com/chavaillaz/appender/log4j/ElasticsearchLog.java index 058dc5e..eb92dde 100644 --- a/src/test/java/com/chavaillaz/appender/log4j/ElasticsearchLog.java +++ b/src/test/java/com/chavaillaz/appender/log4j/ElasticsearchLog.java @@ -1,10 +1,10 @@ package com.chavaillaz.appender.log4j; +import java.time.LocalDateTime; + import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import lombok.Data; -import java.time.LocalDateTime; - @Data @JsonIgnoreProperties(ignoreUnknown = true) public class ElasticsearchLog {