package com.newrelic.labs;

import java.util.Map;

import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;

@JsonIgnoreProperties(ignoreUnknown = true)
public class LogEntry {
	private final String message;
	private final String applicationName;
	private final String name;
	private final String logtype;
	private final long timestamp;

	public LogEntry(String message, String applicationName, String name, String logtype, long timestamp,
			Map<String, Object> custom, boolean mergeCustomFields) {
		this.message = message;
		this.applicationName = applicationName;
		this.name = name;
		this.logtype = logtype;
		this.timestamp = timestamp;

	}

	// Default constructor for Jackson
	public LogEntry() {
		this.message = null;
		this.applicationName = null;
		this.name = null;
		this.logtype = null;
		this.timestamp = 0L;
	}

	@JsonCreator
	public LogEntry(@JsonProperty("message") String message, @JsonProperty("applicationname") String applicationName,
			@JsonProperty("name") String name, @JsonProperty("logtype") String logtype,
			@JsonProperty("timestamp") long timestamp) {
		this.message = message;
		this.applicationName = applicationName;
		this.name = name;
		this.logtype = logtype;
		this.timestamp = timestamp;
	}

	public String getMessage() {
		return message;
	}

	public String getApplicationName() {
		return applicationName;
	}

	public String getName() {
		return name;
	}

	public String getLogType() {
		return logtype;
	}

	public long getTimestamp() {
		return timestamp;
	}
}package com.newrelic.labs;

import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.zip.GZIPOutputStream;

import com.fasterxml.jackson.databind.ObjectMapper;

import okhttp3.MediaType;
import okhttp3.OkHttpClient;
import okhttp3.Request;
import okhttp3.RequestBody;
import okhttp3.Response;

public class LogForwarder {
	private final BlockingQueue<LogEntry> logQueue;
	private final String apiKey;
	private final String apiURL;
	private final OkHttpClient client;
	private final ObjectMapper objectMapper;
	private final long maxMessageSize;
	// 1.0.5
	private final int maxRetries;
	private final long timeout; // New parameter for connection timeout
	// 1.0.5

	public LogForwarder(String apiKey, String apiURL, long maxMessageSize, BlockingQueue<LogEntry> logQueue,
			int maxRetries, long timeout) {
		this.apiKey = apiKey;
		this.apiURL = apiURL;
		this.maxMessageSize = maxMessageSize;
		this.logQueue = logQueue;
		this.maxRetries = maxRetries;
		this.timeout = timeout;
		this.client = new OkHttpClient.Builder().connectTimeout(timeout, TimeUnit.MILLISECONDS).build();
		this.objectMapper = new ObjectMapper();

	}

	public boolean isInitialized() {
		return apiKey != null && apiURL != null;
	}

	public boolean flush(List<LogEntry> logEntries, boolean mergeCustomFields, Map<String, Object> customFields) {
		InetAddress localhost = null;
		boolean bStatus = false;
		try {
			localhost = InetAddress.getLocalHost();
		} catch (UnknownHostException e) {
			e.printStackTrace();
		}

		String hostname = localhost != null ? localhost.getHostName() : "unknown";

		try {
			List<Map<String, Object>> logEvents = new ArrayList<>();
			for (LogEntry entry : logEntries) {
				Map<String, Object> logEvent = objectMapper.convertValue(entry, LowercaseKeyMap.class);
				logEvent.put("hostname", hostname);
				logEvent.put("logtype", entry.getLogType());
				logEvent.put("timestamp", entry.getTimestamp());
				logEvent.put("applicationName", entry.getApplicationName());
				logEvent.put("name", entry.getName());
				logEvent.put("source", "NRBatchingAppender");

				// Add custom fields
				if (customFields != null) {
					if (mergeCustomFields) {
						// Traverse all keys and add each field separately
						Map<String, Object> customFields1 = customFields;
						for (Map.Entry<String, Object> field : customFields1.entrySet()) {
							logEvent.put(field.getKey(), field.getValue());
						}
					} else {
						// Directly add the custom fields as a single entry
						logEvent.put("custom", customFields);
					}
				}

				logEvents.add(logEvent);
			}

			String jsonPayload = objectMapper.writeValueAsString(logEvents);
			byte[] compressedPayload = gzipCompress(jsonPayload);

			if (compressedPayload.length > maxMessageSize) {
				// System.out.println("splitAndSendLogs: Called size exceeded " +
				// compressedPayload.length);
				bStatus = splitAndSendLogs(logEntries, mergeCustomFields, customFields);
			} else {
				bStatus = sendLogs(logEvents);
			}
		} catch (IOException e) {
			System.err.println("Error during log forwarding: " + e.getMessage());
			bStatus = false;
		}
		return bStatus;
	}

	private boolean splitAndSendLogs(List<LogEntry> logEntries, boolean mergeCustomFields,
			Map<String, Object> customFields) throws IOException {

		List<LogEntry> subBatch = new ArrayList<>();
		int currentSize = 0;
		boolean bStatus = false;
		for (LogEntry entry : logEntries) {
			Map<String, Object> logEvent = objectMapper.convertValue(entry, LowercaseKeyMap.class);
			logEvent.put("hostname", InetAddress.getLocalHost().getHostName());
			logEvent.put("logtype", entry.getLogType());
			logEvent.put("timestamp", entry.getTimestamp());
			logEvent.put("applicationName", entry.getApplicationName());
			logEvent.put("name", entry.getName());
			logEvent.put("source", "NRBatchingAppender");

			// Add custom fields
			if (customFields != null) {
				if (mergeCustomFields) {
					// Traverse all keys and add each field separately
					Map<String, Object> customFields1 = customFields;
					for (Map.Entry<String, Object> field : customFields1.entrySet()) {
						logEvent.put(field.getKey(), field.getValue());
					}
				} else {
					// Directly add the custom fields as a single entry
					logEvent.put("custom", customFields);
				}
			}

			String entryJson = objectMapper.writeValueAsString(logEvent);
			int entrySize = gzipCompress(entryJson).length;
			if (currentSize + entrySize > maxMessageSize) {
				bStatus = sendLogs(convertToLogEvents(subBatch, mergeCustomFields, customFields));
				subBatch.clear();
				currentSize = 0;
			}
			subBatch.add(entry);
			currentSize += entrySize;
		}
		if (!subBatch.isEmpty()) {
			bStatus = sendLogs(convertToLogEvents(subBatch, mergeCustomFields, customFields));
		}
		return bStatus;
	}

	private List<Map<String, Object>> convertToLogEvents(List<LogEntry> logEntries, boolean mergeCustomFields,
			Map<String, Object> customFields) {
		List<Map<String, Object>> logEvents = new ArrayList<>();
		try {
			InetAddress localhost = InetAddress.getLocalHost();
			String hostname = localhost.getHostName();

			for (LogEntry entry : logEntries) {
				Map<String, Object> logEvent = objectMapper.convertValue(entry, LowercaseKeyMap.class);
				logEvent.put("hostname", hostname);
				logEvent.put("logtype", entry.getLogType());
				logEvent.put("timestamp", entry.getTimestamp());
				logEvent.put("applicationName", entry.getApplicationName());
				logEvent.put("name", entry.getName());
				logEvent.put("source", "NRBatchingAppender");

				// Add custom fields
				if (customFields != null) {
					if (mergeCustomFields) {
						// Traverse all keys and add each field separately
						Map<String, Object> customFields1 = customFields;
						for (Map.Entry<String, Object> field : customFields1.entrySet()) {
							logEvent.put(field.getKey(), field.getValue());
						}
					} else {
						// Directly add the custom fields as a single entry
						logEvent.put("custom", customFields);
					}
				}

				logEvents.add(logEvent);
			}
		} catch (UnknownHostException e) {
			System.err.println("Error resolving local host: " + e.getMessage());
		}
		return logEvents;
	}

	private boolean sendLogs(List<Map<String, Object>> logEvents) throws IOException {
		String jsonPayload = objectMapper.writeValueAsString(logEvents);
		byte[] compressedPayload = gzipCompress(jsonPayload);

		MediaType mediaType = MediaType.parse("application/json");

		RequestBody requestBody = RequestBody.create(compressedPayload, mediaType);
		Request request = new Request.Builder().url(apiURL).post(requestBody).addHeader("X-License-Key", apiKey)
				.addHeader("Content-Type", "application/json").addHeader("Content-Encoding", "gzip").build();

		try (Response response = client.newCall(request).execute()) {
			if (!response.isSuccessful()) {
				System.err.println("Failed to send logs to New Relic: " + response.code() + " - " + response.message());
				System.err.println("Response body: " + response.body().string());
				requeueLogs(logEvents); // Requeue logs if the response is not successful
				return false;
			} else {
				// Comment out the following lines to prevent infinite loop
				// LocalDateTime timestamp = LocalDateTime.now();
				// System.out.println("Logs sent to New Relic successfully: " + "at " +
				// timestamp + " size: "
				// + compressedPayload.length + " Bytes");
				// System.out.println("Response: " + response.body().string());
			}
		} catch (IOException e) {
			System.err.println("Error during log forwarding: " + e.getMessage());
			requeueLogs(logEvents); // Requeue logs if an exception occurs
			return false;
		}
		return true;
	}

	private void requeueLogs(List<Map<String, Object>> logEvents) {

		for (Map<String, Object> logEvent : logEvents) {
			try {
				// Log the contents of logEvent
				// System.out.println("logEvent: " + logEvent);

				// Convert logEvent to LogEntry
				LogEntry logEntry = objectMapper.convertValue(logEvent, LogEntry.class);

				// Log the contents of the converted LogEntry
				// System.out.println("Converted LogEntry: ");
				// System.out.println(" message: " + logEntry.getMessage());
				// System.out.println(" applicationName: " + logEntry.getApplicationName());
				// System.out.println(" name: " + logEntry.getName());
				// System.out.println(" logtype: " + logEntry.getLogType());
				// System.out.println(" timestamp: " + logEntry.getTimestamp());

				// Requeue the log entry
				logQueue.put(logEntry);
			} catch (InterruptedException e) {
				Thread.currentThread().interrupt();
				System.err.println("Failed to requeue log entry: " + logEvent);
			} catch (IllegalArgumentException e) {
				System.err.println("Failed to convert log event to LogEntry: " + logEvent);
			}
		}

		System.err.println("Network issue - NewRelicBatchingAppenderhas re-queued " + logEvents.size() + " entries"
				+ " : queue size " + logQueue.size());
	}

	private byte[] gzipCompress(String input) throws IOException {
		ByteArrayOutputStream bos = new ByteArrayOutputStream();
		try (GZIPOutputStream gzipOS = new GZIPOutputStream(bos)) {
			gzipOS.write(input.getBytes());
		}
		return bos.toByteArray();
	}

	public void close(boolean mergeCustomFields, Map<String, Object> customFields) {
		List<LogEntry> remainingLogs = new ArrayList<>();
		logQueue.drainTo(remainingLogs);
		if (!remainingLogs.isEmpty()) {
			System.out.println("Flushing remaining " + remainingLogs.size() + " log events to New Relic...");
			flush(remainingLogs, mergeCustomFields, customFields);
		}
	}
}

package com.newrelic.labs;

import java.util.HashMap;
import java.util.Map;

@SuppressWarnings("serial")
public class LowercaseKeyMap extends HashMap<String, Object> {
	@Override
	public Object put(String key, Object value) {
		return super.put(key.toLowerCase(), value);
	}

	@Override
	public void putAll(Map<? extends String, ? extends Object> m) {
		for (Map.Entry<? extends String, ? extends Object> entry : m.entrySet()) {
			this.put(entry.getKey().toLowerCase(), entry.getValue());
		}
	}
}package com.newrelic.labs;

import java.io.Serializable;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher;
import java.util.regex.Pattern;

import org.apache.logging.log4j.Logger;
import org.apache.logging.log4j.core.Filter;
import org.apache.logging.log4j.core.Layout;
import org.apache.logging.log4j.core.LogEvent;
import org.apache.logging.log4j.core.appender.AbstractAppender;
import org.apache.logging.log4j.core.config.Property;
import org.apache.logging.log4j.core.config.plugins.Plugin;
import org.apache.logging.log4j.core.config.plugins.PluginAttribute;
import org.apache.logging.log4j.core.config.plugins.PluginElement;
import org.apache.logging.log4j.core.config.plugins.PluginFactory;
import org.apache.logging.log4j.core.layout.PatternLayout;
import org.apache.logging.log4j.status.StatusLogger;

@Plugin(name = "NewRelicBatchingAppender", category = "Core", elementType = "appender", printObject = true)
public class NewRelicBatchingAppender extends AbstractAppender {

	private final BlockingQueue<LogEntry> queue;

	private final String apiKey;
	private final String apiUrl;
	private final String applicationName;
	private final String logType;
	private final boolean mergeCustomFields;
	private final String name;
	private final LogForwarder logForwarder;
	private static final Logger logger = StatusLogger.getLogger();
	private int attempt = 0; // Track attempts across harvest cycles

	private final int batchSize;
	private final long maxMessageSize;
	private final long flushInterval;
	private final Map<String, Object> customFields;
	private final int maxRetries;

	private static final int DEFAULT_BATCH_SIZE = 5000;
	private static final long DEFAULT_MAX_MESSAGE_SIZE = 1048576; // 1 MB
	private static final long DEFAULT_FLUSH_INTERVAL = 120000; // 2 minutes
	private static final String LOG_TYPE = "muleLog"; // defaultType
	private static final boolean MERGE_CUSTOM_FIELDS = false; // by default there will be a separate field custom block
																// for custom fields i.e. custom.attribute1

	protected NewRelicBatchingAppender(String name, Filter filter, Layout<? extends Serializable> layout,
			final boolean ignoreExceptions, String apiKey, String apiUrl, String applicationName, Integer batchSize,
			Long maxMessageSize, Long flushInterval, String logType, String customFields, Boolean mergeCustomFields,
			int maxRetries, long timeout) {
		super(name, filter, layout, ignoreExceptions, Property.EMPTY_ARRAY);
		this.queue = new LinkedBlockingQueue<>();
		this.apiKey = apiKey;
		this.apiUrl = apiUrl;
		this.applicationName = applicationName;
		this.name = name;
		this.maxRetries = maxRetries;
		this.batchSize = batchSize != null ? batchSize : DEFAULT_BATCH_SIZE;
		this.maxMessageSize = maxMessageSize != null ? maxMessageSize : DEFAULT_MAX_MESSAGE_SIZE;
		this.flushInterval = flushInterval != null ? flushInterval : DEFAULT_FLUSH_INTERVAL;
		this.logType = ((logType != null) && (logType.length() > 0)) ? logType : LOG_TYPE;
		this.customFields = parsecustomFields(customFields);
		this.mergeCustomFields = mergeCustomFields != null ? mergeCustomFields : MERGE_CUSTOM_FIELDS;
		this.logForwarder = new LogForwarder(apiKey, apiUrl, this.maxMessageSize, this.queue, maxRetries, timeout);
		startFlushingTask();
	}

	private Map<String, Object> parsecustomFields(String customFields) {
		Map<String, Object> custom = new HashMap<>();
		if (customFields != null && !customFields.isEmpty()) {
			String[] pairs = customFields.split(",");
			for (String pair : pairs) {
				String[] keyValue = pair.split("=");
				if (keyValue.length == 2) {
					custom.put(keyValue[0], keyValue[1]);
				}
			}
		}
		return custom;
	}

	@PluginFactory
	public static NewRelicBatchingAppender createAppender(@PluginAttribute("name") String name,
			@PluginElement("Layout") Layout<? extends Serializable> layout,
			@PluginElement("Filter") final Filter filter, @PluginAttribute("apiKey") String apiKey,
			@PluginAttribute("apiUrl") String apiUrl, @PluginAttribute("applicationName") String applicationName,
			@PluginAttribute(value = "batchSize") Integer batchSize,
			@PluginAttribute(value = "maxMessageSize") Long maxMessageSize, @PluginAttribute("logType") String logType,
			@PluginAttribute(value = "flushInterval") Long flushInterval,
			@PluginAttribute("customFields") String customFields,
			@PluginAttribute(value = "mergeCustomFields") Boolean mergeCustomFields,
			@PluginAttribute(value = "maxRetries") Integer maxRetries,
			@PluginAttribute(value = "timeout") Long timeout) {

		if (name == null) {
			logger.error("No name provided for NewRelicBatchingAppender");
			return null;
		}

		if (layout == null) {
			layout = PatternLayout.createDefaultLayout();
		}

		if (apiKey == null || apiUrl == null || applicationName == null) {
			logger.error("API key, API URL, and application name must be provided for NewRelicBatchingAppender");
			return null;
		}

		int retries = maxRetries != null ? maxRetries : 3; // Default to 3 retries if not specified
		long connectionTimeout = timeout != null ? timeout : 30000; // Default to 30 seconds if not specified

		return new NewRelicBatchingAppender(name, filter, layout, true, apiKey, apiUrl, applicationName, batchSize,
				maxMessageSize, flushInterval, logType, customFields, mergeCustomFields, retries, connectionTimeout);
	}

	@Override
	public void append(LogEvent event) {
		if (!checkEntryConditions()) {
			logger.warn("Appender not initialized. Dropping log entry");
			return;
		}

		String message = new String(getLayout().toByteArray(event));
		String loggerName = event.getLoggerName();
		long timestamp = event.getTimeMillis(); // Capture the log creation timestamp

		// Extract MuleAppName from the message
		String muleAppName = extractMuleAppName(message);

		logger.debug("Queueing message for New Relic: " + message);

		try {
			// Extract custom fields from the event context
			Map<String, Object> custom = new HashMap<>(extractcustom(event));
			// Add static custom fields from configuration without a prefix
			for (Entry<String, Object> entry : this.customFields.entrySet()) {
				custom.putIfAbsent(entry.getKey(), entry.getValue());
			}
			// Directly add to the queue
			queue.add(
					new LogEntry(message, applicationName, muleAppName, logType, timestamp, custom, mergeCustomFields));
			// Check if the batch size is reached and flush immediately
			if (queue.size() >= batchSize) {
				if (attempt == 0) {
					boolean bStatus = flushQueue();
					if (!bStatus) {
						attempt++;
						logger.warn("Attempt {} failed. Retrying in next harvest cycle...", attempt);
						logger.warn("batchsize check is now disabled due to unhealthy connection");
					} else {
						logger.debug("Batchsize-check: Successfully sent logs.");
					}
				} else {
					logger.debug(
							"Skipping {}/{} sending log entries to New Relic ( batchsize check )  - harvest cycle did not report healthy connection",
							batchSize, queue.size());
				}
			}
		} catch (Exception e) {
			logger.error("Unable to insert log entry into log queue. ", e);
		}
	}

	private boolean flushQueue() {
		List<LogEntry> batch = new ArrayList<>();
		boolean bStatus = false;
		queue.drainTo(batch, batchSize);
		if (!batch.isEmpty()) {
			logger.debug("Flushing {}/{} log entries to New Relic", batch.size(), queue.size() + batch.size());
			bStatus = logForwarder.flush(batch, mergeCustomFields, customFields);
		}
		return bStatus;
	}

	private Map<String, Object> extractcustom(LogEvent event) {
		Map<String, Object> custom = new HashMap<>();
		event.getContextData().forEach(custom::put);
		return custom;
	}

	private String extractMuleAppName(String message) {
		Pattern pattern = Pattern.compile("\\[.*?\\]\\..*?\\[([^\\]]+)\\]");
		Matcher matcher = pattern.matcher(message);
		if (matcher.find()) {
			return matcher.group(1);
		}
		return "generic";
	}

	private boolean checkEntryConditions() {
		boolean initialized = logForwarder != null && logForwarder.isInitialized();
		logger.debug("Check entry conditions: " + initialized);
		return initialized;
	}

	private void startFlushingTask() {
		Runnable flushTask = new Runnable() {

			@Override
			public void run() {
				while (true) {
					try {
						logger.debug("Flushing task running...");
						List<LogEntry> batch = new ArrayList<>();
						queue.drainTo(batch, batchSize);

						if (!batch.isEmpty()) {
							logger.debug("Flushing {}/{} log entries to New Relic", batch.size(),
									queue.size() + batch.size());
							boolean success = logForwarder.flush(batch, mergeCustomFields, customFields);

							if (success) {
								logger.debug("Harvest Cycle: Successfully sent logs.");
								attempt = 0; // Reset attempt counter on success
							} else {
								attempt++;
								logger.warn("Attempt {} failed. Retrying in next cycle...", attempt);
							}

							if (attempt >= maxRetries) {
								logger.error("Exhausted all retry attempts across cycles. Discarding {} logs.",
										queue.size());
								queue.clear(); // Clear the queue after maxRetries
								attempt = 0; // Reset attempt counter after discarding

								logger.debug("Queue Size: {} ", queue.size());
							}
						}

						// Wait for the next harvest cycle
						Thread.sleep(flushInterval);
					} catch (InterruptedException e) {
						Thread.currentThread().interrupt();
						logger.error("Flushing task interrupted", e);
						break;
					}
				}
			}
		};

		Thread flushThread = new Thread(flushTask);
		flushThread.setDaemon(true);
		flushThread.start();

		// Log the configuration settings in use
		logger.info(
				"NewRelicBatchingAppender initialized with settings: batchSize={}, maxMessageSize={}, flushInterval={}",
				batchSize, maxMessageSize, flushInterval);
	}

	@Override
	public boolean stop(final long timeout, final TimeUnit timeUnit) {
		logger.debug("Stopping NewRelicBatchingAppender {}", getName());
		setStopping();
		final boolean stopped = super.stop(timeout, timeUnit, false);
		try {
			logForwarder.close(mergeCustomFields, customFields);
		} catch (Exception e) {
			logger.error("Unable to close appender", e);
		}
		setStopped();
		logger.debug("NewRelicBatchingAppender {} has been stopped", getName());
		return stopped;
	}
}