From c7596dcefc45112a0a8c6094449cd3f044ac2225 Mon Sep 17 00:00:00 2001 From: baubakg Date: Wed, 23 Apr 2025 18:56:55 +0200 Subject: [PATCH 01/26] Refactored code for enrichment --- .../tests/logparser/core/LogData.java | 184 +++++++++++------- .../tests/logparser/core/EnrichmentTests.java | 131 ++++++++----- 2 files changed, 199 insertions(+), 116 deletions(-) diff --git a/src/main/java/com/adobe/campaign/tests/logparser/core/LogData.java b/src/main/java/com/adobe/campaign/tests/logparser/core/LogData.java index 612a61c..737d401 100644 --- a/src/main/java/com/adobe/campaign/tests/logparser/core/LogData.java +++ b/src/main/java/com/adobe/campaign/tests/logparser/core/LogData.java @@ -76,7 +76,8 @@ public void setEntries(Map in_logMap) { } /** - * This method adds an entry to the log data. If the entry already exists we just increment the frequence + * This method adds an entry to the log data. If the entry already exists we + * just increment the frequence *

* Author : gandomi *

@@ -96,7 +97,8 @@ public void addEntry(T lt_cubeEntry) { } /** - * This method allows you to access an entry in the log data. For this you need the key of the Data + * This method allows you to access an entry in the log data. For this you need + * the key of the Data *

* Author : gandomi * @@ -108,7 +110,8 @@ public T get(String in_dataEntryKey) { } /** - * This method allows you to access a value within the cube map. For this you need the key of the Data and the title + * This method allows you to access a value within the cube map. For this you + * need the key of the Data and the title * of the value *

* Author : gandomi @@ -116,7 +119,8 @@ public T get(String in_dataEntryKey) { * @param in_dataEntryKey The key with which the data has been stored * @param in_valueKey The identity of the value. * @return The key value for the given entry. null if not found - * @throws IncorrectParseDefinitionException If the given valueKey was not found in the definition + * @throws IncorrectParseDefinitionException If the given valueKey was not found + * in the definition */ public Object get(String in_dataEntryKey, String in_valueKey) throws IncorrectParseDefinitionException { @@ -136,7 +140,8 @@ public Object get(String in_dataEntryKey, String in_valueKey) throws IncorrectPa } /** - * This method allows you to change a specific value in the log data. For this, you need the key and the parse + * This method allows you to change a specific value in the log data. For this, + * you need the key and the parse * definition title to find the value *

* Author : gandomi @@ -144,7 +149,9 @@ public Object get(String in_dataEntryKey, String in_valueKey) throws IncorrectPa * @param in_dataEntryKey The key with which the data has been stored * @param in_valueKey The identity of the value. * @param in_newValue The new value of the entry value - * @throws IncorrectParseDefinitionException When there is no entry for the given in_dataEntryKey and in_valueKey + * @throws IncorrectParseDefinitionException When there is no entry for the + * given in_dataEntryKey and + * in_valueKey */ public void put(String in_dataEntryKey, String in_valueKey, Object in_newValue) throws IncorrectParseDefinitionException { @@ -188,18 +195,23 @@ public boolean equals(Object obj) { } /** - * Here we create a new LogDataObject with the given ParseDefinitionEntry. This method performs a groupby for the + * Here we create a new LogDataObject with the given ParseDefinitionEntry. This + * method performs a groupby for the * given value. The frequence will also take into account the original frequence *

* Author : gandomi * - * @param in_parseDefinitionEntryKey The key name of the parse definition perform the GroupBy on - * @param in_transformationClass The class to which we should transform the cube data + * @param in_parseDefinitionEntryKey The key name of the parse definition + * perform the GroupBy on + * @param in_transformationClass The class to which we should transform the + * cube data * @param The return type of the group by cube. * @return a new LogData Object containing the groupBy values - * @throws IncorrectParseDefinitionException If the key is not in the ParseDefinitions of the Log data entry + * @throws IncorrectParseDefinitionException If the key is not in the + * ParseDefinitions of the Log data + * entry */ - LogData groupBy(String in_parseDefinitionEntryKey, + LogData groupBy(String in_parseDefinitionEntryKey, Class in_transformationClass) throws IncorrectParseDefinitionException { @@ -207,23 +219,28 @@ LogData groupBy(String in_parseDefinitionEntryKey, } /** - * Here we create a new LogDataObject with the given ParseDefinitionEntry. This method performs a groupby for the + * Here we create a new LogDataObject with the given ParseDefinitionEntry. This + * method performs a groupby for the * given value. The frequence will also take into account the original frequence *

* Author : gandomi * - * @param in_parseDefinitionEntryKeyList The list of key names of the parse definition perform the GroupBy on - * @param in_transformationClass The class to which we should transform the cube data + * @param in_parseDefinitionEntryKeyList The list of key names of the parse + * definition perform the GroupBy on + * @param in_transformationClass The class to which we should transform + * the cube data * @param The return type of the group by cube. * @return a new LogData Object containing the groupBy values - * @throws IncorrectParseDefinitionException If the key is not in the ParseDefinitions of the Log data entry + * @throws IncorrectParseDefinitionException If the key is not in the + * ParseDefinitions of the Log data + * entry */ LogData groupBy(List in_parseDefinitionEntryKeyList, Class in_transformationClass) throws IncorrectParseDefinitionException { LogData lr_cubeData = new LogData<>(); - //Creating new Definition + // Creating new Definition ParseDefinition l_cubeDefinition = new ParseDefinition( "cube " + String.join("-", in_parseDefinitionEntryKeyList)); @@ -231,19 +248,20 @@ LogData groupBy(List in_parseDefinitionEntryK l_cubeDefinition.addEntry(new ParseDefinitionEntry(lt_keyName)); } - //Filling STDLogData + // Filling STDLogData for (T lt_entry : getEntries().values()) { Map lt_cubeEntryValues = new HashMap<>(); U lt_cubeEntry = null; try { lt_cubeEntry = in_transformationClass.getDeclaredConstructor().newInstance(); - } catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) { + } catch (InstantiationException | IllegalAccessException | InvocationTargetException + | NoSuchMethodException e) { throw new LogParserPostManipulationException("Problem creating new host for our new grouping.", e); } lt_cubeEntry.setParseDefinition(l_cubeDefinition); for (String lt_parseDefinitionEntryKey : in_parseDefinitionEntryKeyList) { - //Merge with original headers + // Merge with original headers if (!lt_entry.fetchHeaders().contains(lt_parseDefinitionEntryKey)) { throw new IncorrectParseDefinitionException("The given header name " + lt_parseDefinitionEntryKey + " was not among the stored data"); @@ -258,18 +276,23 @@ LogData groupBy(List in_parseDefinitionEntryK lr_cubeData.addEntry(lt_cubeEntry); } + return lr_cubeData; } /** - * Here we create a new LogDataObject with the given ParseDefinitionEntry. This method performs a groupby for the + * Here we create a new LogDataObject with the given ParseDefinitionEntry. This + * method performs a groupby for the * given value. The frequence will also take into account the original frequence *

* Author : gandomi * - * @param in_parseDefinitionEntryKeyList The list of key names of the parse definition perform the GroupBy on + * @param in_parseDefinitionEntryKeyList The list of key names of the parse + * definition perform the GroupBy on * @return a new LogData Object containing the groupBy values - * @throws IncorrectParseDefinitionException If the key is not in the ParseDefinitions of the Log data entry + * @throws IncorrectParseDefinitionException If the key is not in the + * ParseDefinitions of the Log data + * entry */ public LogData groupBy(List in_parseDefinitionEntryKeyList) throws IncorrectParseDefinitionException { @@ -277,14 +300,18 @@ public LogData groupBy(List in_parseDefinitionEntryKeyList } /** - * Here we create a new LogDataObject with the given ParseDefinitionEntry. This method performs a groupby for the + * Here we create a new LogDataObject with the given ParseDefinitionEntry. This + * method performs a groupby for the * given value. The frequence will also take into account the original frequence *

* Author : gandomi * - * @param in_parseDefinitionEntryKey The key name of the parse definition perform the GroupBy on + * @param in_parseDefinitionEntryKey The key name of the parse definition + * perform the GroupBy on * @return a new LogData Object containing the groupBy values - * @throws IncorrectParseDefinitionException If the key is not in the ParseDefinitions of the Log data entry + * @throws IncorrectParseDefinitionException If the key is not in the + * ParseDefinitions of the Log data + * entry */ public LogData groupBy(String in_parseDefinitionEntryKey) throws IncorrectParseDefinitionException { @@ -296,7 +323,8 @@ public LogData groupBy(String in_parseDefinitionEntryKey) *

* Author : gandomi * - * @param in_filterKeyValues A map of <String,Matcher> representation the values we want to find + * @param in_filterKeyValues A map of <String,Matcher> representation the + * values we want to find * @return a new LogDataObject containing only the filtered values */ public LogData filterBy(Map in_filterKeyValues) { @@ -312,11 +340,13 @@ public LogData filterBy(Map in_filterKeyValues) { } /** - * This method searches the LogData for an entry with a specific value for a parse definition entry name + * This method searches the LogData for an entry with a specific value for a + * parse definition entry name *

* Author : gandomi * - * @param in_parseDefinitionName The name of the parse definition entry under which we search for a value + * @param in_parseDefinitionName The name of the parse definition entry under + * which we search for a value * @param in_searchValue The matcher * @return a new LogDataObject containing only the searched values */ @@ -332,7 +362,8 @@ public LogData searchEntries(String in_parseDefinitionName, Matcher in_search *

* Author : gandomi * - * @param in_searchKeyValues A map of <String,Matcher> representation the values we want to find + * @param in_searchKeyValues A map of <String,Matcher> representation the + * values we want to find * @return a new LogDataObject containing only the filtered values */ public LogData searchEntries(Map in_searchKeyValues) { @@ -345,7 +376,8 @@ public LogData searchEntries(Map in_searchKeyValues) { *

* Author : gandomi * - * @param in_parseDefinitionName The name of the parse definition entry under which we search for a value + * @param in_parseDefinitionName The name of the parse definition entry under + * which we search for a value * @param in_searchValue The search value * @return true if the search terms could be found. Otherwise false */ @@ -361,7 +393,8 @@ public boolean isEntryPresent(String in_parseDefinitionName, String in_searchVal *

* Author : gandomi * - * @param in_searchKeyValues A map of <String,Object> representation the values we want to find + * @param in_searchKeyValues A map of <String,Object> representation the + * values we want to find * @return true if the search terms could be found. Otherwise false */ public boolean isEntryPresent(Map in_searchKeyValues) { @@ -369,7 +402,8 @@ public boolean isEntryPresent(Map in_searchKeyValues) { } /** - * Exports the current LogData to a standard CSV file. By default the file will have an escaped version of the Parse + * Exports the current LogData to a standard CSV file. By default the file will + * have an escaped version of the Parse * Definition as the name * * @return a CSV file containing the LogData @@ -390,7 +424,9 @@ public File exportLogDataToCSV() throws LogDataExportToFileException { } /** - * Exports the current LogData to a standard CSV file with a name you give. By default the file will have an escaped version of the Parse + * Exports the current LogData to a standard CSV file with a name you give. By + * default the file will have an escaped version of the Parse + * * @param in_fileName a filename to store the CSV export * @return a CSV file containing the LogData */ @@ -432,15 +468,14 @@ public File exportLogDataToCSV(Collection in_headerSet, String in_csvFil throw new LogDataExportToFileException("Encountered error while exporting the log data to a CSV file.", ex); } - - return l_exportFile; } /** - * Exports the current LogData to an HTML file as a table. The headers will be extracted directly from the entries. + * Exports the current LogData to an HTML file as a table. The headers will be + * extracted directly from the entries. * - * @param in_reportTitle The title of the report + * @param in_reportTitle The title of the report * @param in_htmlFileName The file name to export * @return an HTML file containing the LogData as a table */ @@ -458,8 +493,8 @@ public File exportLogDataToHTML(String in_reportTitle, String in_htmlFileName) { /** * Exports the current LogData to an HTML file as a table. * - * @param in_headerSet A set of headers to be used as keys for exporting - * @param in_reportTitle The title of the report + * @param in_headerSet A set of headers to be used as keys for exporting + * @param in_reportTitle The title of the report * @param in_htmlFileName The file name to export * @return an HTML file containing the LogData as a table */ @@ -470,7 +505,7 @@ public File exportLogDataToHTML(Collection in_headerSet, String in_repor l_exportFile = LogParserFileUtils.createNewFile(in_htmlFileName); StringBuilder sb = new StringBuilder(); sb.append(HTMLReportUtils.fetchSTDPageStart("diffTable.css")); - //Creating the overview report + // Creating the overview report sb.append(HTMLReportUtils.fetchHeader(1, in_reportTitle)); sb.append("Here is an listing of out findings."); sb.append(HTMLReportUtils.fetchTableStartBracket()); @@ -480,7 +515,8 @@ public File exportLogDataToHTML(Collection in_headerSet, String in_repor for (StdLogEntry lt_entry : this.getEntries().values()) { Map lt_values = lt_entry.fetchValueMapPrintable(); sb.append(HTMLReportUtils.ROW_START); - in_headerSet.stream().map(h -> lt_values.get(h)).forEach(j -> sb.append(HTMLReportUtils.fetchCell_TD(j))); + in_headerSet.stream().map(h -> lt_values.get(h)) + .forEach(j -> sb.append(HTMLReportUtils.fetchCell_TD(j))); sb.append(HTMLReportUtils.ROW_END); } @@ -498,7 +534,8 @@ public File exportLogDataToHTML(Collection in_headerSet, String in_repor } /** - * Exports the current LogData to a standard JSON file. By default, the file will have an escape version of the Parse + * Exports the current LogData to a standard JSON file. By default, the file + * will have an escape version of the Parse * Definition as the name * * @return a JSON file containing the LogData @@ -507,7 +544,8 @@ public File exportLogDataToJSON() throws LogDataExportToFileException { T l_firstEntry = this.fetchFirst(); if (l_firstEntry != null) { - return exportLogDataToJSON(l_firstEntry.fetchHeaders(), l_firstEntry.getParseDefinition().fetchEscapedTitle() + "-export.json"); + return exportLogDataToJSON(l_firstEntry.fetchHeaders(), + l_firstEntry.getParseDefinition().fetchEscapedTitle() + "-export.json"); } else { log.warn("No Log data to export. Please load the log data before re-attempting"); return null; @@ -534,7 +572,7 @@ public File exportLogDataToJSON(String in_jsonFileName) throws LogDataExportToFi /** * Exports the current LogData to an JSON file * - * @param in_headerSet A set of headers to be used as keys for exporting + * @param in_headerSet A set of headers to be used as keys for exporting * @param in_jsonFileName The file name to export * @return a JSON file containing the LogData * @throws LogDataExportToFileException If the file could not be exported @@ -544,13 +582,13 @@ public File exportLogDataToJSON(Collection in_headerSet, String in_jsonF File l_exportFile; try { - l_exportFile= LogParserFileUtils.createNewFile(in_jsonFileName); + l_exportFile = LogParserFileUtils.createNewFile(in_jsonFileName); List> jsonList = new ArrayList<>(); jsonList.addAll(this.getEntries().values().stream().map(StdLogEntry::fetchValueMapPrintable) .collect(Collectors.toList())); - ObjectMapper objectMapper = new ObjectMapper(); - objectMapper.writeValue(l_exportFile, jsonList); + ObjectMapper objectMapper = new ObjectMapper(); + objectMapper.writeValue(l_exportFile, jsonList); } catch (IOException e) { throw new LogDataExportToFileException("Encountered error while exporting the log data to a JSON file.", e); @@ -559,8 +597,10 @@ public File exportLogDataToJSON(Collection in_headerSet, String in_jsonF } /** - * This method compares two LogData objects and returns the differences. The difference is map of - * LogDataComparisons. The values of the delta and the deltaRatio are negative if the frequency is decreasing or has + * This method compares two LogData objects and returns the differences. The + * difference is map of + * LogDataComparisons. The values of the delta and the deltaRatio are negative + * if the frequency is decreasing or has * been removed. * * @param in_logData A LogData @@ -592,19 +632,22 @@ public Map> compare(LogData in_logData) { /** * returns the first entry in the log data + * * @return a LogDataEntry, null if there are no entries */ protected T fetchFirst() { - return this.getEntries().values().stream().findFirst().orElse(null); + return this.getEntries().values().stream().findFirst().orElse(null); } /** * Returns the definition with which this LogData was created - * @return a ParseDefinition Object. Null if there are no entries in the log data + * + * @return a ParseDefinition Object. Null if there are no entries in the log + * data */ public ParseDefinition fetchParseDefinition() { var l_firstEntry = this.fetchFirst(); - if (l_firstEntry ==null) { + if (l_firstEntry == null) { return null; } @@ -612,29 +655,38 @@ public ParseDefinition fetchParseDefinition() { } /** - * Enriches the log data with the given values provided there are lines that match the query map - * @param in_queryMap A map definition entry and Matchers - * @param in_entryName The name of the entry to be added + * Enriches the log data with the given values provided there are lines that + * match the query map + * + * @param in_queryMap A map definition entry and Matchers + * @param in_entryName The name of the entry to be added * @param in_entryValue The value of the entry to be added */ public void enrichData(Map in_queryMap, String in_entryName, String in_entryValue) { - //add the entry to the definition - fetchParseDefinition().addEntry(new ParseDefinitionEntry(in_entryName)); - //Iterate over the entries - getEntries().entrySet().stream().filter(e -> e.getValue().matches(in_queryMap)).forEach(e -> { - e.getValue().put(in_entryName, in_entryValue); - }); + /* + * // add the entry to the definition + * fetchParseDefinition().addEntry(new ParseDefinitionEntry(in_entryName)); + * + * // Iterate over the entries + * getEntries().entrySet().stream().filter(e -> + * e.getValue().matches(in_queryMap)).forEach(e -> { + * e.getValue().put(in_entryName, in_entryValue); + * }); + */ + enrichData(in_queryMap, Map.of(in_entryName, in_entryValue)); } /** - * Enriches the log data with the given values provided there are lines that match the query map - * @param in_queryMap A map definition entry and Matchers + * Enriches the log data with the given values provided there are lines that + * match the query map + * + * @param in_queryMap A map definition entry and Matchers * @param keyValueToEnrich A map of key value pairs to be added to the log data */ public void enrichData(Map in_queryMap, Map keyValueToEnrich) { - //Iterate over the entries + // Iterate over the entries getEntries().entrySet().stream() .filter(e -> e.getValue().matches(in_queryMap)).forEach(e -> { keyValueToEnrich.forEach((in_entryName, in_entryValue) -> { @@ -645,10 +697,10 @@ public void enrichData(Map in_queryMap, Map key }); } - /** * Enriches the log data which have not been set with the given values - * @param in_entryName The name of the entry to be added + * + * @param in_entryName The name of the entry to be added * @param in_entryValue The value of the entry to be added */ public void enrichEmpty(String in_entryName, String in_entryValue) { diff --git a/src/test/java/com/adobe/campaign/tests/logparser/core/EnrichmentTests.java b/src/test/java/com/adobe/campaign/tests/logparser/core/EnrichmentTests.java index 64bd60d..9e1570d 100644 --- a/src/test/java/com/adobe/campaign/tests/logparser/core/EnrichmentTests.java +++ b/src/test/java/com/adobe/campaign/tests/logparser/core/EnrichmentTests.java @@ -25,7 +25,6 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; -import static org.testng.Assert.assertThrows; public class EnrichmentTests { @@ -33,11 +32,12 @@ public class EnrichmentTests { public void testSimpleEnrichment() { LogData l_cubeData = fetchTestLogEntry(); - //Checks before enrichment + // Checks before enrichment assertThat(l_cubeData.getEntries().size(), Matchers.is(3)); - assertThat(l_cubeData.get("12").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence")); + assertThat(l_cubeData.get("12").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence")); - ////enrich logData + //// enrich logData // Prepare inputs Map l_queryMap = new HashMap<>(); l_queryMap.put("AAZ", Matchers.startsWith("12")); @@ -46,13 +46,17 @@ public void testSimpleEnrichment() { LogData l_filteredLogs = l_cubeData.filterBy(Map.of("TIT", Matchers.equalTo("TAT"))); assertThat("We should have enriched two entries", l_filteredLogs.getEntries().size(), Matchers.equalTo(2)); - List l_definitionEntries = l_filteredLogs.get("12").getParseDefinition().getDefinitionEntries(); - assertThat("We should have one additional Parse definition entry", l_definitionEntries.size(), Matchers.equalTo(generateTestParseDefinition().getDefinitionEntries().size()+1)); + List l_definitionEntries = l_filteredLogs.get("12").getParseDefinition() + .getDefinitionEntries(); + assertThat("We should have one additional Parse definition entry", l_definitionEntries.size(), + Matchers.equalTo(generateTestParseDefinition().getDefinitionEntries().size() + 1)); - assertThat(l_cubeData.get("12").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT")); + assertThat(l_cubeData.get("12").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT")); assertThat(l_cubeData.get("12").get("TIT"), Matchers.equalTo("TAT")); - assertThat(l_cubeData.get("112").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT")); + assertThat(l_cubeData.get("112").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT")); assertThat(l_cubeData.get("112").get("TIT"), Matchers.equalTo("")); l_cubeData.exportLogDataToHTML("dsd", "enriched.html"); @@ -62,11 +66,12 @@ public void testSimpleEnrichment() { public void testDoubleEnrichment() { LogData l_cubeData = fetchTestLogEntry(); - //Checks before enrichment + // Checks before enrichment assertThat(l_cubeData.getEntries().size(), Matchers.is(3)); - assertThat(l_cubeData.get("12").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence")); + assertThat(l_cubeData.get("12").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence")); - ////enrich logData + //// enrich logData // Prepare inputs Map l_queryMap = new HashMap<>(); l_queryMap.put("AAZ", Matchers.startsWith("12")); @@ -80,13 +85,17 @@ public void testDoubleEnrichment() { LogData l_filteredLogs = l_cubeData.filterBy(Map.of("TIT", Matchers.equalTo("TAT"))); assertThat("We should have enriched two entries", l_filteredLogs.getEntries().size(), Matchers.equalTo(2)); - List l_definitionEntries = l_filteredLogs.get("12").getParseDefinition().getDefinitionEntries(); - assertThat("We should have one additional Parse definition entry", l_definitionEntries.size(), Matchers.equalTo(generateTestParseDefinition().getDefinitionEntries().size()+1)); + List l_definitionEntries = l_filteredLogs.get("12").getParseDefinition() + .getDefinitionEntries(); + assertThat("We should have one additional Parse definition entry", l_definitionEntries.size(), + Matchers.equalTo(generateTestParseDefinition().getDefinitionEntries().size() + 1)); - assertThat(l_cubeData.get("12").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT")); + assertThat(l_cubeData.get("12").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT")); assertThat(l_cubeData.get("12").get("TIT"), Matchers.equalTo("TAT")); - assertThat(l_cubeData.get("112").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT")); + assertThat(l_cubeData.get("112").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT")); assertThat(l_cubeData.get("112").get("TIT"), Matchers.equalTo("TUT")); l_cubeData.exportLogDataToHTML("dsd", "enriched.html"); @@ -96,11 +105,12 @@ public void testDoubleEnrichment() { public void testDoubleEnrichment_updateUnset() { LogData l_cubeData = fetchTestLogEntry(); - //Checks before enrichment + // Checks before enrichment assertThat(l_cubeData.getEntries().size(), Matchers.is(3)); - assertThat(l_cubeData.get("12").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence")); + assertThat(l_cubeData.get("12").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence")); - ////enrich logData + //// enrich logData // Prepare inputs Map l_queryMap = new HashMap<>(); l_queryMap.put("AAZ", Matchers.startsWith("12")); @@ -112,10 +122,12 @@ public void testDoubleEnrichment_updateUnset() { l_cubeData.enrichEmpty("TIT", "TUT"); - assertThat(l_cubeData.get("12").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT")); + assertThat(l_cubeData.get("12").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT")); assertThat(l_cubeData.get("12").get("TIT"), Matchers.equalTo("TAT")); - assertThat(l_cubeData.get("112").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT")); + assertThat(l_cubeData.get("112").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT")); assertThat(l_cubeData.get("112").get("TIT"), Matchers.equalTo("TUT")); l_cubeData.exportLogDataToHTML("dsd", "enriched.html"); @@ -138,9 +150,10 @@ public void testDoubleEnrichment_SDK() throws StringParseException { Map l_queryMap2 = Map.of("code", Matchers.startsWith("SOP")); l_entries.enrichData(l_queryMap2, "category", "AVERAGE"); - - assertThat("We should have the correct value", l_entries.get("INT-150612").get("category"), is(equalTo("GREAT"))); - assertThat("We should have the correct value", l_entries.get("SOP-338921").get("category"), is(equalTo("AVERAGE"))); + assertThat("We should have the correct value", l_entries.get("INT-150612").get("category"), + is(equalTo("GREAT"))); + assertThat("We should have the correct value", l_entries.get("SOP-338921").get("category"), + is(equalTo("AVERAGE"))); } @Test @@ -159,8 +172,8 @@ public void testDoubleEnrichment_SDK_timeBased() throws StringParseException { ZonedDateTime start = ZonedDateTime.parse("2024-06-13T03:00:10.727Z", DateTimeFormatter.ISO_ZONED_DATE_TIME); ZonedDateTime end = ZonedDateTime.parse("2024-06-13T11:00:19.727Z", DateTimeFormatter.ISO_ZONED_DATE_TIME); - - Map l_queryMap = Map.of("timeOfLog", Matchers.allOf(Matchers.greaterThanOrEqualTo(start), Matchers.lessThanOrEqualTo(end))); + Map l_queryMap = Map.of("timeOfLog", + Matchers.allOf(Matchers.greaterThanOrEqualTo(start), Matchers.lessThanOrEqualTo(end))); l_entries.enrichData(l_queryMap, "test", "Test1"); assertThat("We should have the correct value", l_entries.get("INT-150612").get("test"), is(equalTo("Test1"))); @@ -173,11 +186,12 @@ public void testDoubleEnrichment_SDK_timeBased() throws StringParseException { public void testSimpleEnrichmentWithMap() { LogData l_cubeData = fetchTestLogEntry(); - //Checks before enrichment + // Checks before enrichment assertThat(l_cubeData.getEntries().size(), Matchers.is(3)); - assertThat(l_cubeData.get("12").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence")); + assertThat(l_cubeData.get("12").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence")); - ////enrich logData + //// enrich logData // Prepare inputs Map l_queryMap = new HashMap<>(); l_queryMap.put("AAZ", Matchers.startsWith("12")); @@ -189,13 +203,17 @@ public void testSimpleEnrichmentWithMap() { LogData l_filteredLogs = l_cubeData.filterBy(Map.of("TIT", Matchers.equalTo("TAT"))); assertThat("We should have enriched two entries", l_filteredLogs.getEntries().size(), Matchers.equalTo(2)); - List l_definitionEntries = l_filteredLogs.get("12").getParseDefinition().getDefinitionEntries(); - assertThat("We should have one additional Parse definition entry", l_definitionEntries.size(), Matchers.equalTo(generateTestParseDefinition().getDefinitionEntries().size()+1)); + List l_definitionEntries = l_filteredLogs.get("12").getParseDefinition() + .getDefinitionEntries(); + assertThat("We should have one additional Parse definition entry", l_definitionEntries.size(), + Matchers.equalTo(generateTestParseDefinition().getDefinitionEntries().size() + 1)); - assertThat(l_cubeData.get("12").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT")); + assertThat(l_cubeData.get("12").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT")); assertThat(l_cubeData.get("12").get("TIT"), Matchers.equalTo("TAT")); - assertThat(l_cubeData.get("112").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT")); + assertThat(l_cubeData.get("112").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT")); assertThat(l_cubeData.get("112").get("TIT"), Matchers.equalTo("")); l_cubeData.exportLogDataToHTML("dsd", "enriched.html"); @@ -205,11 +223,12 @@ public void testSimpleEnrichmentWithMap() { public void testSimpleEnrichmentWithEmptyMap() { LogData l_cubeData = fetchTestLogEntry(); - //Checks before enrichment + // Checks before enrichment assertThat(l_cubeData.getEntries().size(), Matchers.is(3)); - assertThat(l_cubeData.get("12").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence")); + assertThat(l_cubeData.get("12").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence")); - ////enrich logData + //// enrich logData // Prepare inputs Map l_queryMap = new HashMap<>(); l_queryMap.put("AAZ", Matchers.startsWith("12")); @@ -218,10 +237,14 @@ public void testSimpleEnrichmentWithEmptyMap() { l_cubeData.enrichData(l_queryMap, keyValueToEnrich); LogData l_filteredLogs = l_cubeData.filterBy(Map.of("AAZ", Matchers.equalTo("12"))); - List l_definitionEntries = l_filteredLogs.get("12").getParseDefinition().getDefinitionEntries(); - assertThat("We should have one additional Parse definition entry", l_definitionEntries.size(), Matchers.equalTo(generateTestParseDefinition().getDefinitionEntries().size())); - assertThat(l_cubeData.get("12").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence")); - assertThat(l_cubeData.get("112").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence")); + List l_definitionEntries = l_filteredLogs.get("12").getParseDefinition() + .getDefinitionEntries(); + assertThat("We should have one additional Parse definition entry", l_definitionEntries.size(), + Matchers.equalTo(generateTestParseDefinition().getDefinitionEntries().size())); + assertThat(l_cubeData.get("12").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence")); + assertThat(l_cubeData.get("112").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence")); l_cubeData.exportLogDataToHTML("dsd", "enriched.html"); } @@ -230,11 +253,12 @@ public void testSimpleEnrichmentWithEmptyMap() { public void testDoubleEnrichmentWithMap() { LogData l_cubeData = fetchTestLogEntry(); - //Checks before enrichment + // Checks before enrichment assertThat(l_cubeData.getEntries().size(), Matchers.is(3)); - assertThat(l_cubeData.get("12").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence")); + assertThat(l_cubeData.get("12").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence")); - ////enrich logData + //// enrich logData // Prepare inputs Map l_queryMap = new HashMap<>(); l_queryMap.put("AAZ", Matchers.startsWith("12")); @@ -257,14 +281,19 @@ public void testDoubleEnrichmentWithMap() { LogData l_filteredLogs = l_cubeData.filterBy(Map.of("TIT", Matchers.equalTo("TAT"))); assertThat("We should have enriched two entries", l_filteredLogs.getEntries().size(), Matchers.equalTo(2)); LogData l_secondFilteredLogs = l_cubeData.filterBy(Map.of("TOT", Matchers.equalTo("TET"))); - assertThat("We should have enriched two entries", l_secondFilteredLogs.getEntries().size(), Matchers.equalTo(2)); - List l_definitionEntries = l_filteredLogs.get("12").getParseDefinition().getDefinitionEntries(); - assertThat("We should have one additional Parse definition entry", l_definitionEntries.size(), Matchers.equalTo(generateTestParseDefinition().getDefinitionEntries().size()+2)); - - assertThat(l_cubeData.get("12").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT", "TOT")); + assertThat("We should have enriched two entries", l_secondFilteredLogs.getEntries().size(), + Matchers.equalTo(2)); + List l_definitionEntries = l_filteredLogs.get("12").getParseDefinition() + .getDefinitionEntries(); + assertThat("We should have one additional Parse definition entry", l_definitionEntries.size(), + Matchers.equalTo(generateTestParseDefinition().getDefinitionEntries().size() + 2)); + + assertThat(l_cubeData.get("12").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT", "TOT")); assertThat(l_cubeData.get("12").get("TIT"), Matchers.equalTo("TAT")); - assertThat(l_cubeData.get("112").fetchStoredHeaders(), Matchers.containsInAnyOrder("key","AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT", "TOT")); + assertThat(l_cubeData.get("112").fetchStoredHeaders(), + Matchers.containsInAnyOrder("key", "AAZ", "ZZZ", "BAU", "DAT", "frequence", "TIT", "TOT")); assertThat(l_cubeData.get("112").get("TIT"), Matchers.equalTo("TUT")); assertThat(l_cubeData.get("112").get("TOT"), Matchers.equalTo("TUTU")); @@ -294,8 +323,10 @@ public void testDoubleEnrichment_SDK_WithMap() throws StringParseException { Map l_queryMap2 = Map.of("code", Matchers.startsWith("SOP")); l_entries.enrichData(l_queryMap2, keyValueToEnrich); - assertThat("We should have the correct value", l_entries.get("INT-150612").get("category"), is(equalTo("GREAT"))); - assertThat("We should have the correct value", l_entries.get("SOP-338921").get("category"), is(equalTo("AVERAGE"))); + assertThat("We should have the correct value", l_entries.get("INT-150612").get("category"), + is(equalTo("GREAT"))); + assertThat("We should have the correct value", l_entries.get("SOP-338921").get("category"), + is(equalTo("AVERAGE"))); } private static LogData fetchTestLogEntry() { From 6b37c28f24d30e111d401443b8f17201cfa829bd Mon Sep 17 00:00:00 2001 From: baubakg Date: Wed, 23 Apr 2025 19:40:15 +0200 Subject: [PATCH 02/26] Issue #203 adding guard rails --- .../tests/logparser/core/ParseGuardRails.java | 51 + .../logparser/core/StringParseFactory.java | 155 +- .../tests/logparser/utils/MemoryUtils.java | 24 + .../tests/logparser/core/ParseTesting.java | 2143 +++++++++-------- 4 files changed, 1286 insertions(+), 1087 deletions(-) create mode 100644 src/main/java/com/adobe/campaign/tests/logparser/core/ParseGuardRails.java create mode 100644 src/main/java/com/adobe/campaign/tests/logparser/utils/MemoryUtils.java diff --git a/src/main/java/com/adobe/campaign/tests/logparser/core/ParseGuardRails.java b/src/main/java/com/adobe/campaign/tests/logparser/core/ParseGuardRails.java new file mode 100644 index 0000000..5a40af8 --- /dev/null +++ b/src/main/java/com/adobe/campaign/tests/logparser/core/ParseGuardRails.java @@ -0,0 +1,51 @@ +/* + * Copyright 2022 Adobe + * All Rights Reserved. + * + * NOTICE: Adobe permits you to use, modify, and distribute this file in + * accordance with the terms of the Adobe license agreement accompanying + * it. + */ +package com.adobe.campaign.tests.logparser.core; + +import java.util.HashMap; +import java.util.Map; + +import com.adobe.campaign.tests.logparser.utils.MemoryUtils; + +/** + * Class to store guard rails for parsing operations + */ +public class ParseGuardRails { + public static final Map fileSizeLimitations = new HashMap<>(); + public static final long HEAP_SIZE_AT_START = MemoryUtils.getCurrentHeapSizeMB(); + public static int FILE_ENTRY_LIMIT = Integer.parseInt(System.getProperty("PROP_LOGPARSER_FILEENTRY_LIMIT", "-1")); + public static long HEAP_LIMIT = Integer.parseInt(System.getProperty("PROP_LOGPARSER_HEAP_LIMIT", "-1")); + + public static void reset() { + fileSizeLimitations.clear(); + FILE_ENTRY_LIMIT = -1; + HEAP_LIMIT = -1; + } + + /** + * Check if the current count has reached the entry limit + * + * @param currentCount the current count of entries + * @return true if the current count has reached the entry limit, false + * otherwise + */ + public static boolean hasReachedEntryLimit(int currentCount) { + return FILE_ENTRY_LIMIT > -1 && currentCount >= FILE_ENTRY_LIMIT; + } + + /** + * Check if the current memory usage has reached the memory limit + * + * @return true if the current memory usage has reached the memory limit, false + * otherwise + */ + public static boolean hasReachedHeapLimit() { + return HEAP_LIMIT > -1 && (MemoryUtils.getCurrentHeapSizeMB() - HEAP_SIZE_AT_START) >= HEAP_LIMIT; + } +} \ No newline at end of file diff --git a/src/main/java/com/adobe/campaign/tests/logparser/core/StringParseFactory.java b/src/main/java/com/adobe/campaign/tests/logparser/core/StringParseFactory.java index efa0f29..3146698 100644 --- a/src/main/java/com/adobe/campaign/tests/logparser/core/StringParseFactory.java +++ b/src/main/java/com/adobe/campaign/tests/logparser/core/StringParseFactory.java @@ -10,6 +10,8 @@ import com.adobe.campaign.tests.logparser.exceptions.LogParserSDKDefinitionException; import com.adobe.campaign.tests.logparser.exceptions.StringParseException; +import com.adobe.campaign.tests.logparser.utils.MemoryUtils; + import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -38,16 +40,20 @@ private StringParseFactory() { * Author : gandomi * * @param in_logFiles - * A collection of log file paths + * A collection of log file paths * @param in_parseDefinition - * The parsing rules as defined in the class ParseDefinition + * The parsing rules as defined in the class + * ParseDefinition * @param in_classTarget - * The target class that will be storing the results - * @param The type of data (subclass of {@link StdLogEntry}) we want to create and store - * @param The collection type with which we receive the parameter in_logFiles + * The target class that will be storing the results + * @param The type of data (subclass of {@link StdLogEntry}) + * we want to create and store + * @param The collection type with which we receive the + * parameter in_logFiles * @return A map of String and a Sub-class of {@link StdLogEntry} * @throws StringParseException - * When there are logical rules when parsing the given string + * When there are logical rules when parsing the + * given string * */ public static > Map extractLogEntryMap( @@ -64,13 +70,12 @@ public static > Map l_foundEntries = new HashMap<>(); long totalBytesAnalyzed = 0; - //Fetch File + // Fetch File for (String l_currentLogFile : in_logFiles) { int lt_foundEntryCount = 0; - int i = 0; - totalBytesAnalyzed+= new File(l_currentLogFile).length(); + totalBytesAnalyzed += new File(l_currentLogFile).length(); log.info("Parsing file {}", l_currentLogFile); try (BufferedReader reader = new BufferedReader(new FileReader(l_currentLogFile))) { @@ -87,22 +92,41 @@ public static > Map {log.info("Found {} entries in file {}", v, k); l_totalEntries.addAndGet(v);}); + l_foundEntries.forEach((k, v) -> { + log.info("Found {} entries in file {}", v, k); + l_totalEntries.addAndGet(v); + }); // Convert the bytes to Kilobytes (1 KB = 1024 Bytes) long fileSizeInKB = totalBytesAnalyzed / 1024; // Convert the KB to MegaBytes (1 MB = 1024 KBytes) long fileSizeInMB = fileSizeInKB / 1024; - log.info("RESULT : Found {} entries, {} unique keys in {} files, and {}Mb of data.", l_totalEntries, lr_entries.keySet().size(), l_foundEntries.keySet().size(),fileSizeInMB); + log.info("RESULT : Found {} entries, {} unique keys in {} files, and {}Mb of data.", l_totalEntries, + lr_entries.keySet().size(), l_foundEntries.keySet().size(), fileSizeInMB); return lr_entries; } @@ -112,17 +136,18 @@ public static > Map void updateEntryMapWithParsedData(final String in_logFile, final String in_logLine, @@ -134,23 +159,25 @@ static void updateEntryMapWithParsedData(final String in try { lt_entry = in_classTarget.getDeclaredConstructor().newInstance(); } catch (InstantiationException | IllegalAccessException e) { - throw new LogParserSDKDefinitionException("Structural Problems whith calling the Default constructor in SDK Parser Class", e); + throw new LogParserSDKDefinitionException( + "Structural Problems whith calling the Default constructor in SDK Parser Class", e); } catch (InvocationTargetException e) { - throw new LogParserSDKDefinitionException("Problems when calling the Default constructor in SDK Parser Class", e); + throw new LogParserSDKDefinitionException( + "Problems when calling the Default constructor in SDK Parser Class", e); } catch (NoSuchMethodException e) { throw new LogParserSDKDefinitionException("Missing Default constructor in SDK Parser Class", e); } lt_entry.setParseDefinition(in_parseDefinition); - var lt_fileObject = new File(in_logFile != null ? in_logFile : STD_DEFAULT_ENTRY_FILENAME); if (in_parseDefinition.isStoreFileName()) { lt_entry.setLogFileName(lt_fileObject.getName()); } if (in_parseDefinition.isStoreFilePath()) { - lt_entry.updatePath(lt_fileObject.exists() ? lt_fileObject.getParentFile().getPath() : STD_DEFAULT_ENTRY_FILENAME); + lt_entry.updatePath( + lt_fileObject.exists() ? lt_fileObject.getParentFile().getPath() : STD_DEFAULT_ENTRY_FILENAME); } lt_entry.setValuesFromMap(lt_lineResult); @@ -171,12 +198,13 @@ static void updateEntryMapWithParsedData(final String in * Author : gandomi * * @param in_logString - * A string that is to be parsed + * A string that is to be parsed * @param in_parseDefinition - * The parse definition rules for parsing the string + * The parse definition rules for parsing the string * @return A Map of strings containing the parse results of one parsed line * @throws StringParseException - * When there are logical rules when parsing the given string + * When there are logical rules when parsing the + * given string * */ public static Map parseString(String in_logString, ParseDefinition in_parseDefinition) @@ -191,12 +219,13 @@ public static Map parseString(String in_logString, ParseDefiniti * Author : gandomi * * @param in_stringToParse - * A string that is to be parsed + * A string that is to be parsed * @param in_parsRule - * A single parse definition item + * A single parse definition item * @return A Map of strings containing the parse results of one parsed line * @throws StringParseException - * When there are logical rules when parsing the given string + * When there are logical rules when parsing the + * given string * */ protected static Map parseString(String in_stringToParse, @@ -211,12 +240,13 @@ protected static Map parseString(String in_stringToParse, * Author : gandomi * * @param in_logString - * A string that is to be parsed + * A string that is to be parsed * @param in_parsRuleList - * A list of ParseDefinitionEntries + * A list of ParseDefinitionEntries * @return A Map of strings containing the parse results of one parsed line * @throws StringParseException - * When there are logical rules when parsing the given string + * When there are logical rules when parsing the + * given string * */ protected static Map parseString(String in_logString, @@ -240,19 +270,19 @@ protected static Map parseString(String in_logString, * Author : gandomi * * @param in_stringValue - * A string that is to be parsed + * A string that is to be parsed * @param in_parseDefinition - * A parse definition entry to parse the given string + * A parse definition entry to parse the given string * @return A string representing the value corresponding to the * ParseDefinition * @throws StringParseException - * Whenever the start and end markers are not found + * Whenever the start and end markers are not found * */ public static String fetchValue(String in_stringValue, ParseDefinitionEntry in_parseDefinition) throws StringParseException { - //Fetch where to start looking from + // Fetch where to start looking from final int l_startLocation = in_parseDefinition.fetchStartPosition(in_stringValue); final int l_endLocation = in_parseDefinition.fetchEndPosition(in_stringValue); @@ -267,13 +297,13 @@ public static String fetchValue(String in_stringValue, ParseDefinitionEntry in_p + in_parseDefinition.getTitle() + " in string \n" + in_stringValue + "."); } - //Anonymize + // Anonymize String rawExtraction = in_stringValue.substring(l_startLocation, l_endLocation).trim(); String lr_extraction = rawExtraction; for (String lt_anonymizer : in_parseDefinition.getAnonymizers().stream().filter(a -> stringsCorrespond(a, rawExtraction)).collect( - Collectors.toList())) { + Collectors.toList())) { lr_extraction = anonymizeString(lt_anonymizer.trim(), rawExtraction); } return lr_extraction; @@ -287,10 +317,11 @@ public static String fetchValue(String in_stringValue, ParseDefinitionEntry in_p * Author : gandomi * * @param in_logString - * A string that is to be parsed + * A string that is to be parsed * @param in_definitionList - * A list of parse definitions that will be used to fetch the values - * in the given string + * A list of parse definitions that will be used to + * fetch the values + * in the given string * @return true if the Parse Definition rules can be applied to the given * string * @@ -298,7 +329,7 @@ public static String fetchValue(String in_stringValue, ParseDefinitionEntry in_p protected static boolean isStringCompliant(String in_logString, List in_definitionList) { String l_workingString = in_logString; - //For every definition start and end. Check that the index follows + // For every definition start and end. Check that the index follows for (ParseDefinitionEntry lt_parseDefinitionItem : in_definitionList) { final int lt_startPosition = lt_parseDefinitionItem.fetchStartPosition(l_workingString); @@ -312,7 +343,7 @@ protected static boolean isStringCompliant(String in_logString, return false; } - //The delta is only relevant if we are preserving the value + // The delta is only relevant if we are preserving the value if ((lt_startPosition >= lt_endPosition) && lt_parseDefinitionItem.isToPreserve()) { return false; } @@ -330,10 +361,11 @@ protected static boolean isStringCompliant(String in_logString, * Author : gandomi * * @param in_logString - * A string that is to be parsed + * A string that is to be parsed * @param in_parseDefinition - * A list of parse definitions that will be used to fetch the values - * in the given string + * A list of parse definitions that will be used to + * fetch the values + * in the given string * @return true if the Parse Definition rules can be applied to the given * string * @@ -343,11 +375,14 @@ public static boolean isStringCompliant(String in_logString, ParseDefinition in_ } /** - * This method lets us know if the found String corresponds to the stored String. The stored string will have escape + * This method lets us know if the found String corresponds to the stored + * String. The stored string will have escape * characters like the log4J FormatMessages I.e. '{} * - * @param in_templateString A stored string reference containing the standard escape chracters '{} - * @param in_candidateString The string coming from the log file that we want to check correspondance with + * @param in_templateString A stored string reference containing the standard + * escape chracters '{} + * @param in_candidateString The string coming from the log file that we want to + * check correspondance with * @return true of the Strings correspond to each other */ public static boolean stringsCorrespond(String in_templateString, String in_candidateString) { @@ -367,14 +402,17 @@ public static boolean stringsCorrespond(String in_templateString, String in_cand } /** - * This method anonymizes a string based on a template string. If the template contains {} the corresponding value - * in the candidate string will be replaced. We also have the opportuning to ignore certain parts of the string by + * This method anonymizes a string based on a template string. If the template + * contains {} the corresponding value + * in the candidate string will be replaced. We also have the opportuning to + * ignore certain parts of the string by * passing []. *

* Author : gandomi * * @param in_templateString A string that is to be parsed - * @param in_candidateString A list of parse definitions that will be used to fetch the values in the given string + * @param in_candidateString A list of parse definitions that will be used to + * fetch the values in the given string * @return A string that is anonymized based on the template string */ public static String anonymizeString(String in_templateString, String in_candidateString) { @@ -391,13 +429,13 @@ public static String anonymizeString(String in_templateString, String in_candida l_keep = (l_keep < 0) ? NOT_FOUND_COEF : l_keep; int l_escapeIdx = Math.min(l_replace, l_keep); - //If replace is before keep recursively call the function up to the keep + // If replace is before keep recursively call the function up to the keep if (l_replace < l_keep) { int nextCandidateIdx = fetchNextExtractionIdxOfCandidate(in_templateString, in_candidateString, l_escapeIdx); - lr_string.append(in_templateString.substring(0, l_escapeIdx+2)); + lr_string.append(in_templateString.substring(0, l_escapeIdx + 2)); if (l_escapeIdx + 2 < in_templateString.length()) { lr_string.append(anonymizeString(in_templateString.substring(l_escapeIdx + 2), @@ -410,11 +448,11 @@ public static String anonymizeString(String in_templateString, String in_candida lr_string.append(in_candidateString.substring(0, nextCandidateIdx)); - //If keep is before replace recursively call the function up to the replace + // If keep is before replace recursively call the function up to the replace lr_string.append(anonymizeString(in_templateString.substring(l_escapeIdx + 2), in_candidateString.substring(nextCandidateIdx))); } else { - //If both are equal we can replace the values + // If both are equal we can replace the values lr_string.append(in_candidateString); } @@ -424,12 +462,13 @@ public static String anonymizeString(String in_templateString, String in_candida private static int fetchNextExtractionIdxOfCandidate(String in_templateString, String in_candidateString, int in_fromIdx) { - //find the next point of interest + // find the next point of interest int candSearchString = Math.min(in_templateString.indexOf("{}", in_fromIdx + 1) * -1, in_templateString.indexOf("[]", in_fromIdx + 1) * -1) * -1; return in_candidateString.indexOf( - (candSearchString < 0) ? in_templateString.substring(in_fromIdx + 2) : in_templateString.substring( - in_fromIdx + 2, candSearchString)); + (candSearchString < 0) ? in_templateString.substring(in_fromIdx + 2) + : in_templateString.substring( + in_fromIdx + 2, candSearchString)); } } diff --git a/src/main/java/com/adobe/campaign/tests/logparser/utils/MemoryUtils.java b/src/main/java/com/adobe/campaign/tests/logparser/utils/MemoryUtils.java new file mode 100644 index 0000000..75e07ac --- /dev/null +++ b/src/main/java/com/adobe/campaign/tests/logparser/utils/MemoryUtils.java @@ -0,0 +1,24 @@ +/* + * Copyright 2022 Adobe + * All Rights Reserved. + * + * NOTICE: Adobe permits you to use, modify, and distribute this file in + * accordance with the terms of the Adobe license agreement accompanying + * it. + */ +package com.adobe.campaign.tests.logparser.utils; + +/** + * Utility class for memory-related operations + */ +public class MemoryUtils { + + /** + * Gets the current heap size in megabytes + * + * @return the current heap size in MB + */ + public static long getCurrentHeapSizeMB() { + return Runtime.getRuntime().totalMemory() / (1024 * 1024); + } +} \ No newline at end of file diff --git a/src/test/java/com/adobe/campaign/tests/logparser/core/ParseTesting.java b/src/test/java/com/adobe/campaign/tests/logparser/core/ParseTesting.java index 5075874..05b4edf 100644 --- a/src/test/java/com/adobe/campaign/tests/logparser/core/ParseTesting.java +++ b/src/test/java/com/adobe/campaign/tests/logparser/core/ParseTesting.java @@ -12,1487 +12,1572 @@ import static org.hamcrest.Matchers.*; import static org.testng.Assert.assertThrows; +import java.io.File; import java.util.*; import org.hamcrest.Matchers; +import org.testng.annotations.AfterMethod; +import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import com.adobe.campaign.tests.logparser.exceptions.StringParseException; public class ParseTesting { - // private final String STD_GREP_STRING = IntegroGrepFactory.ACS_RestPath.regularExpression; + // private final String STD_GREP_STRING = + // IntegroGrepFactory.ACS_RestPath.regularExpression; + + @AfterMethod + @BeforeMethod + public void afterMethod() { + System.clearProperty("PROP_LOGPARSER_FILEENTRY_LIMIT"); + ParseGuardRails.reset(); + } - @Test - public void testItems() throws StringParseException { - String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; + @Test + public void testItems() throws StringParseException { + String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; - //Create a parse definition - ParseDefinitionEntry l_dateDefinition = new ParseDefinitionEntry(); + // Create a parse definition + ParseDefinitionEntry l_dateDefinition = new ParseDefinitionEntry(); - l_dateDefinition.setTitle("logDate"); - l_dateDefinition.setStart("["); - l_dateDefinition.setEnd("]"); + l_dateDefinition.setTitle("logDate"); + l_dateDefinition.setStart("["); + l_dateDefinition.setEnd("]"); - assertThat("We should have the correct definitions for the name", l_dateDefinition.getTitle(), - is(equalTo("logDate"))); - assertThat("We should have the correct definitions for the start string", l_dateDefinition.getStart(), - is(equalTo("["))); - assertThat("We should have the correct definitions for the end string", l_dateDefinition.getEnd(), - is(equalTo("]"))); + assertThat("We should have the correct definitions for the name", l_dateDefinition.getTitle(), + is(equalTo("logDate"))); + assertThat("We should have the correct definitions for the start string", l_dateDefinition.getStart(), + is(equalTo("["))); + assertThat("We should have the correct definitions for the end string", l_dateDefinition.getEnd(), + is(equalTo("]"))); - Map l_result = StringParseFactory.parseString(l_apacheLogString, l_dateDefinition); + Map l_result = StringParseFactory.parseString(l_apacheLogString, l_dateDefinition); - assertThat("We should have a result", l_result, is(notNullValue())); + assertThat("We should have a result", l_result, is(notNullValue())); - assertThat("We should have an entry for logDate", l_result.containsKey("logDate")); - assertThat("We should have the correct value for logDate", l_result.get("logDate"), - is(equalTo("02/Apr/2020:08:08:28 +0200"))); + assertThat("We should have an entry for logDate", l_result.containsKey("logDate")); + assertThat("We should have the correct value for logDate", l_result.get("logDate"), + is(equalTo("02/Apr/2020:08:08:28 +0200"))); - } + } - @Test - public void testItemParseDefinitionOfASimpleDate() throws StringParseException { - String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; + @Test + public void testItemParseDefinitionOfASimpleDate() throws StringParseException { + String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; - //Create a parse definition - ParseDefinitionEntry l_dateDefinition = new ParseDefinitionEntry(); + // Create a parse definition + ParseDefinitionEntry l_dateDefinition = new ParseDefinitionEntry(); - l_dateDefinition.setTitle("logDate"); - l_dateDefinition.setStart("["); - l_dateDefinition.setEnd("]"); + l_dateDefinition.setTitle("logDate"); + l_dateDefinition.setStart("["); + l_dateDefinition.setEnd("]"); - assertThat("We should have the correct value for logDate", - StringParseFactory.fetchValue(l_apacheLogString, l_dateDefinition), - is(equalTo("02/Apr/2020:08:08:28 +0200"))); + assertThat("We should have the correct value for logDate", + StringParseFactory.fetchValue(l_apacheLogString, l_dateDefinition), + is(equalTo("02/Apr/2020:08:08:28 +0200"))); - } + } + @Test(description = "A case where the separators are the same and larger than 1 character") + public void testItemParseDefinitionOfASimpleDateCase2() throws StringParseException { + String l_apacheLogString = "2330:DEBUG | 2020-04-03 17:46:38 | [main] core.NextTests (NextTests.java:209) - Before driver instantiation"; - @Test(description = "A case where the separators are the same and larger than 1 character") - public void testItemParseDefinitionOfASimpleDateCase2() throws StringParseException { - String l_apacheLogString = "2330:DEBUG | 2020-04-03 17:46:38 | [main] core.NextTests (NextTests.java:209) - Before driver instantiation"; + // Create a parse definition + ParseDefinitionEntry l_dateDefinition = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinitionEntry l_dateDefinition = new ParseDefinitionEntry(); + l_dateDefinition.setTitle("logDate"); + l_dateDefinition.setStart(" | "); + l_dateDefinition.setEnd(" | "); - l_dateDefinition.setTitle("logDate"); - l_dateDefinition.setStart(" | "); - l_dateDefinition.setEnd(" | "); + assertThat("We should have the correct value for logDate", + StringParseFactory.fetchValue(l_apacheLogString, l_dateDefinition), + is(equalTo("2020-04-03 17:46:38"))); - assertThat("We should have the correct value for logDate", - StringParseFactory.fetchValue(l_apacheLogString, l_dateDefinition), - is(equalTo("2020-04-03 17:46:38"))); + } - } + @Test + public void testVerbDefinition() throws StringParseException { + String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; - @Test - public void testVerbDefinition() throws StringParseException { - String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; + // Create a parse definition + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_verbDefinition2.setTitle("verb"); + l_verbDefinition2.setStart("\""); + l_verbDefinition2.setEnd(" /"); - l_verbDefinition2.setTitle("verb"); - l_verbDefinition2.setStart("\""); - l_verbDefinition2.setEnd(" /"); + assertThat("We should have the correct value for verb", + StringParseFactory.fetchValue(l_apacheLogString, l_verbDefinition2), + is(equalTo("GET"))); - assertThat("We should have the correct value for verb", - StringParseFactory.fetchValue(l_apacheLogString, l_verbDefinition2), is(equalTo("GET"))); + } - } + @Test + public void testImmediatelyAfter() throws StringParseException { + String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; - @Test - public void testImmediatelyAfter() throws StringParseException { - String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; + // Create a parse definition + ParseDefinition l_parseD = new ParseDefinition("testing immediatelyAfter"); + ParseDefinitionEntry l_def1 = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinition l_parseD = new ParseDefinition("testing immediatelyAfter"); - ParseDefinitionEntry l_def1 = new ParseDefinitionEntry(); + l_def1.setTitle("section"); + l_def1.setStart("afthost32."); + l_def1.setEnd("."); - l_def1.setTitle("section"); - l_def1.setStart("afthost32."); - l_def1.setEnd("."); + ParseDefinitionEntry l_def2 = new ParseDefinitionEntry(); - ParseDefinitionEntry l_def2 = new ParseDefinitionEntry(); + l_def2.setTitle("product"); + l_def2.setStart("."); + l_def2.setEnd("."); - l_def2.setTitle("product"); - l_def2.setStart("."); - l_def2.setEnd("."); + l_parseD.addEntry(l_def1); + l_parseD.addEntry(l_def2); - l_parseD.addEntry(l_def1); - l_parseD.addEntry(l_def2); + Map l_currentValues = StringParseFactory.parseString(l_apacheLogString, l_parseD); - Map l_currentValues = StringParseFactory.parseString(l_apacheLogString, l_parseD); + assertThat("We should have two entries", l_currentValues.keySet(), + Matchers.containsInAnyOrder("section", "product")); - assertThat("We should have two entries", l_currentValues.keySet(), Matchers.containsInAnyOrder("section", "product")); + assertThat("We should have the correct value for section", l_currentValues.get("section"), + is(equalTo("qa"))); - assertThat("We should have the correct value for section", l_currentValues.get("section"), - is(equalTo("qa"))); + assertThat("We should have the correct value for product", l_currentValues.get("product"), + is(equalTo("campaign"))); - assertThat("We should have the correct value for product", l_currentValues.get("product"), is(equalTo("campaign"))); + } - } + @Test + public void testAPIDefinition() throws StringParseException { + String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; - @Test - public void testAPIDefinition() throws StringParseException { - String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; + // Create a parse definition + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_verbDefinition2.setTitle("api"); + l_verbDefinition2.setStart(" /rest/head/"); + l_verbDefinition2.setEnd(" "); - l_verbDefinition2.setTitle("api"); - l_verbDefinition2.setStart(" /rest/head/"); - l_verbDefinition2.setEnd(" "); + assertThat("We should have the correct value for api", + StringParseFactory.fetchValue(l_apacheLogString, l_verbDefinition2), + is(equalTo("workflow/WKF193"))); - assertThat("We should have the correct value for api", - StringParseFactory.fetchValue(l_apacheLogString, l_verbDefinition2), - is(equalTo("workflow/WKF193"))); + } - } + @Test + public void testCaseSensitive() throws StringParseException { + String logString = "J_BfmC8mfw==|soapaction:xtk%3aqueryDef#ExecuteQuery|Content-Length:591"; - @Test - public void testCaseSensitive() throws StringParseException { - String logString = "J_BfmC8mfw==|soapaction:xtk%3aqueryDef#ExecuteQuery|Content-Length:591"; + // Create a parse definition + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); + l_apiDefinition.setTitle("path"); + l_apiDefinition.setStart("soapAction:"); + l_apiDefinition.setEnd("#"); + l_apiDefinition.setCaseSensitive(false); - l_apiDefinition.setTitle("path"); - l_apiDefinition.setStart("soapAction:"); - l_apiDefinition.setEnd("#"); - l_apiDefinition.setCaseSensitive(false); + assertThat(l_apiDefinition.isCaseSensitive(), is(equalTo(false))); - assertThat(l_apiDefinition.isCaseSensitive(), is(equalTo(false))); + assertThat("We should have found the correct path", + StringParseFactory.fetchValue(logString, l_apiDefinition), + is(equalTo("xtk%3aqueryDef"))); - assertThat("We should have found the correct path", - StringParseFactory.fetchValue(logString, l_apiDefinition), is(equalTo("xtk%3aqueryDef"))); + Map l_entries = StringParseFactory.parseString(logString, l_apiDefinition); - Map l_entries = StringParseFactory.parseString(logString, l_apiDefinition); + assertThat("We should have the correct value for api", l_entries.get("path"), + is(equalTo("xtk%3aqueryDef"))); - assertThat("We should have the correct value for api", l_entries.get("path"), - is(equalTo("xtk%3aqueryDef"))); + } - } + @Test + public void testCaseSensitive2() throws StringParseException { + String logString = "J_BfmC8mfw==|soapAction:xtk%3aqueryDef#ExecuteQuery|Content-Length:591"; - @Test - public void testCaseSensitive2() throws StringParseException { - String logString = "J_BfmC8mfw==|soapAction:xtk%3aqueryDef#ExecuteQuery|Content-Length:591"; + // Create a parse definition + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); + l_apiDefinition.setTitle("path"); + l_apiDefinition.setStart("SOAPACTION:"); + l_apiDefinition.setEnd("#"); + l_apiDefinition.setCaseSensitive(false); - l_apiDefinition.setTitle("path"); - l_apiDefinition.setStart("SOAPACTION:"); - l_apiDefinition.setEnd("#"); - l_apiDefinition.setCaseSensitive(false); + assertThat(l_apiDefinition.isCaseSensitive(), is(equalTo(false))); - assertThat(l_apiDefinition.isCaseSensitive(), is(equalTo(false))); + assertThat("We should have found the correct path", + StringParseFactory.fetchValue(logString, l_apiDefinition), + is(equalTo("xtk%3aqueryDef"))); - assertThat("We should have found the correct path", - StringParseFactory.fetchValue(logString, l_apiDefinition), is(equalTo("xtk%3aqueryDef"))); + Map l_entries = StringParseFactory.parseString(logString, l_apiDefinition); - Map l_entries = StringParseFactory.parseString(logString, l_apiDefinition); + assertThat("We should have the correct value for api", l_entries.get("path"), + is(equalTo("xtk%3aqueryDef"))); - assertThat("We should have the correct value for api", l_entries.get("path"), - is(equalTo("xtk%3aqueryDef"))); + } - } + @Test(description = "In this case we check if the class ApacheLogEntry correctly stores a path with filters") + public void testAPIDefinition2() throws StringParseException { + String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:06:25:44 +0200] \"POST /rest/head/session HTTP/1.1\" 201 5385 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; - @Test(description = "In this case we check if the class ApacheLogEntry correctly stores a path with filters") - public void testAPIDefinition2() throws StringParseException { - String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:06:25:44 +0200] \"POST /rest/head/session HTTP/1.1\" 201 5385 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; + // Create a parse definition + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_verbDefinition2.setTitle("verb"); + l_verbDefinition2.setStart("\""); + l_verbDefinition2.setEnd(" /"); - l_verbDefinition2.setTitle("verb"); - l_verbDefinition2.setStart("\""); - l_verbDefinition2.setEnd(" /"); + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); - ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); + l_apiDefinition.setTitle("path"); + l_apiDefinition.setStart(" /rest/head/"); + l_apiDefinition.setEnd(" "); - l_apiDefinition.setTitle("path"); - l_apiDefinition.setStart(" /rest/head/"); - l_apiDefinition.setEnd(" "); + ParseDefinition l_parseDefinition = new ParseDefinition("rest calls"); + l_parseDefinition.setDefinitionEntries(Arrays.asList(l_verbDefinition2, l_apiDefinition)); - ParseDefinition l_parseDefinition = new ParseDefinition("rest calls"); - l_parseDefinition.setDefinitionEntries(Arrays.asList(l_verbDefinition2, l_apiDefinition)); + Map l_currentValues = StringParseFactory.parseString(l_apacheLogString, + l_parseDefinition); - Map l_currentValues = StringParseFactory.parseString(l_apacheLogString, - l_parseDefinition); + assertThat("We should have two entries", l_currentValues.size(), is(equalTo(2))); - assertThat("We should have two entries", l_currentValues.size(), is(equalTo(2))); + assertThat("We should be able to find our path", l_currentValues.get("path"), is(equalTo("session"))); - assertThat("We should be able to find our path", l_currentValues.get("path"), is(equalTo("session"))); + GenericEntry l_entry = new GenericEntry(l_parseDefinition); + l_entry.setValuesFromMap(l_currentValues); - GenericEntry l_entry = new GenericEntry(l_parseDefinition); - l_entry.setValuesFromMap(l_currentValues); + assertThat("We should have the correct value for api", l_entry.fetchValueMap().get("path"), + is(equalTo("session"))); - assertThat("We should have the correct value for api", l_entry.fetchValueMap().get("path"), - is(equalTo("session"))); + } - } + @Test + public void testAPIDefinitionRestHead() throws StringParseException { + String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; - @Test - public void testAPIDefinitionRestHead() throws StringParseException { - String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; + // Create a parse definition + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_verbDefinition2.setTitle("api"); + l_verbDefinition2.setStart(" /rest/head/"); + l_verbDefinition2.setEnd("/"); - l_verbDefinition2.setTitle("api"); - l_verbDefinition2.setStart(" /rest/head/"); - l_verbDefinition2.setEnd("/"); + assertThat("We should have the correct value for api", + StringParseFactory.fetchValue(l_apacheLogString, l_verbDefinition2), + is(equalTo("workflow"))); - assertThat("We should have the correct value for api", - StringParseFactory.fetchValue(l_apacheLogString, l_verbDefinition2), is(equalTo("workflow"))); + } - } + @Test + public void testAPIDefinitionRestHeadNegative() { + String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /xtk/logon.jssp HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; - @Test - public void testAPIDefinitionRestHeadNegative() { - String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /xtk/logon.jssp HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; + // Create a parse definition + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_verbDefinition2.setTitle("api"); + l_verbDefinition2.setStart(" /rest/head/"); + l_verbDefinition2.setEnd("/"); - l_verbDefinition2.setTitle("api"); - l_verbDefinition2.setStart(" /rest/head/"); - l_verbDefinition2.setEnd("/"); + assertThrows(StringParseException.class, + () -> StringParseFactory.fetchValue(l_apacheLogString, l_verbDefinition2)); - assertThrows(StringParseException.class, - () -> StringParseFactory.fetchValue(l_apacheLogString, l_verbDefinition2)); + } - } + @Test + public void testAPIDefinitionRestHeadNegative2() { + String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /xtk/logon.jssp HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; - @Test - public void testAPIDefinitionRestHeadNegative2() { - String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /xtk/logon.jssp HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; + // Create a parse definition + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_verbDefinition2.setTitle("api"); + l_verbDefinition2.setStart(" /xtk/"); + l_verbDefinition2.setEnd("@"); - l_verbDefinition2.setTitle("api"); - l_verbDefinition2.setStart(" /xtk/"); - l_verbDefinition2.setEnd("@"); + assertThrows(StringParseException.class, + () -> StringParseFactory.fetchValue(l_apacheLogString, l_verbDefinition2)); - assertThrows(StringParseException.class, - () -> StringParseFactory.fetchValue(l_apacheLogString, l_verbDefinition2)); + } - } + @Test + public void testAnswer() throws StringParseException { + String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; - @Test - public void testAnswer() throws StringParseException { - String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; + // Create a parse definition + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_verbDefinition2.setTitle("response"); + l_verbDefinition2.setStart("\" "); + l_verbDefinition2.setEnd(" "); - l_verbDefinition2.setTitle("response"); - l_verbDefinition2.setStart("\" "); - l_verbDefinition2.setEnd(" "); + assertThat("We should have the correct value for api", + StringParseFactory.fetchValue(l_apacheLogString, l_verbDefinition2), + is(equalTo("200"))); + } - assertThat("We should have the correct value for api", - StringParseFactory.fetchValue(l_apacheLogString, l_verbDefinition2), is(equalTo("200"))); - } + @Test(description = "Checking that we correctly identify the message") + public void testingStartOfLine() throws StringParseException { + String logString = "2020-06-15T17:17:20.728Z 70011 70030 2 info soap"; - @Test(description = "Checking that we correctly identify the message") - public void testingStartOfLine() throws StringParseException { - String logString = "2020-06-15T17:17:20.728Z 70011 70030 2 info soap"; + ParseDefinitionEntry l_definition = new ParseDefinitionEntry(); - ParseDefinitionEntry l_definition = new ParseDefinitionEntry(); + l_definition.setTitle("Time"); + l_definition.setStart(null); + l_definition.setEnd(" "); - l_definition.setTitle("Time"); - l_definition.setStart(null); - l_definition.setEnd(" "); + assertThat("The end should be identitified as SOL", l_definition.isStartStartOfLine()); - assertThat("The end should be identitified as SOL", l_definition.isStartStartOfLine()); + assertThat("The end position should be the length of the string", + l_definition.fetchEndPosition(logString), is(equalTo(24))); - assertThat("The end position should be the length of the string", - l_definition.fetchEndPosition(logString), is(equalTo(24))); + assertThat(StringParseFactory.fetchValue(logString, l_definition), + is(equalTo("2020-06-15T17:17:20.728Z"))); - assertThat(StringParseFactory.fetchValue(logString, l_definition), - is(equalTo("2020-06-15T17:17:20.728Z"))); + ParseDefinitionEntry l_definition2 = new ParseDefinitionEntry(); - ParseDefinitionEntry l_definition2 = new ParseDefinitionEntry(); + l_definition2.setTitle("Time"); + l_definition2.setStartStartOfLine(); + l_definition2.setEnd(" "); - l_definition2.setTitle("Time"); - l_definition2.setStartStartOfLine(); - l_definition2.setEnd(" "); + assertThat("The end should be identitified as SOL", l_definition2.isStartStartOfLine()); - assertThat("The end should be identitified as SOL", l_definition2.isStartStartOfLine()); + } - } + @Test(description = "Checking that we correctly identify the message") + public void testingStartOfLine2() throws StringParseException { + String logString = "2020-06-15T17:17:20.728Z 70011 70030 2 info soap"; - @Test(description = "Checking that we correctly identify the message") - public void testingStartOfLine2() throws StringParseException { - String logString = "2020-06-15T17:17:20.728Z 70011 70030 2 info soap"; + ParseDefinitionEntry l_definition = new ParseDefinitionEntry(); - ParseDefinitionEntry l_definition = new ParseDefinitionEntry(); + l_definition.setTitle("Time"); + l_definition.setEnd(" "); - l_definition.setTitle("Time"); - l_definition.setEnd(" "); + assertThat("The end should be identitified as SOL", l_definition.isStartStartOfLine()); - assertThat("The end should be identitified as SOL", l_definition.isStartStartOfLine()); + assertThat("The end position should be the length of the string", + l_definition.fetchEndPosition(logString), is(equalTo(24))); - assertThat("The end position should be the length of the string", - l_definition.fetchEndPosition(logString), is(equalTo(24))); + assertThat(StringParseFactory.fetchValue(logString, l_definition), + is(equalTo("2020-06-15T17:17:20.728Z"))); - assertThat(StringParseFactory.fetchValue(logString, l_definition), - is(equalTo("2020-06-15T17:17:20.728Z"))); + } - } + @Test(description = "Checking that we correctly identify the message") + public void testingEndOfLine() throws StringParseException { + String l_resultString = " - Before driver instantiation"; - @Test(description = "Checking that we correctly identify the message") - public void testingEndOfLine() throws StringParseException { - String l_resultString = " - Before driver instantiation"; + ParseDefinitionEntry l_definition = new ParseDefinitionEntry(); - ParseDefinitionEntry l_definition = new ParseDefinitionEntry(); + l_definition.setTitle("sourceFileLine"); + l_definition.setStart(" - "); + l_definition.setEndEOL(); - l_definition.setTitle("sourceFileLine"); - l_definition.setStart(" - "); - l_definition.setEndEOL(); + assertThat("The end should be identitified as EOL", l_definition.isEndEOL()); - assertThat("The end should be identitified as EOL", l_definition.isEndEOL()); + assertThat("The end position should be the length of the string", + l_definition.fetchEndPosition(l_resultString), is(equalTo(l_resultString.length()))); - assertThat("The end position should be the length of the string", - l_definition.fetchEndPosition(l_resultString), is(equalTo(l_resultString.length()))); + assertThat(StringParseFactory.fetchValue(l_resultString, l_definition), + is(equalTo("Before driver instantiation"))); - assertThat(StringParseFactory.fetchValue(l_resultString, l_definition), - is(equalTo("Before driver instantiation"))); + } - } + @Test + public void testStringCompliance() { + String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; - @Test - public void testStringCompliance() { - String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_verbDefinition2.setTitle("verb"); + l_verbDefinition2.setStart("\""); + l_verbDefinition2.setEnd(" /"); - l_verbDefinition2.setTitle("verb"); - l_verbDefinition2.setStart("\""); - l_verbDefinition2.setEnd(" /"); + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); - ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); + l_apiDefinition.setTitle("path"); + l_apiDefinition.setStart(" /rest/head/"); + l_apiDefinition.setEnd("/"); - l_apiDefinition.setTitle("path"); - l_apiDefinition.setStart(" /rest/head/"); - l_apiDefinition.setEnd("/"); + List l_definitionList = new ArrayList<>(); + l_definitionList.add(l_verbDefinition2); + l_definitionList.add(l_apiDefinition); - List l_definitionList = new ArrayList<>(); - l_definitionList.add(l_verbDefinition2); - l_definitionList.add(l_apiDefinition); + assertThat("The given string does not comply to the expected format", + StringParseFactory.isStringCompliant(l_apacheLogString, l_definitionList)); + } - assertThat("The given string does not comply to the expected format", - StringParseFactory.isStringCompliant(l_apacheLogString, l_definitionList)); - } + @Test + public void testStringComplianceNegative1() { + String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.61 - - [02/Apr/2020:06:26:09 +0200] \"GET /xtk/logon.jssp?code=..--7--D4C89vHhLLCL_yJguuL3kiR3PRM0T-UaOCk_Q162y6HIbdL2YA9L1ErbA2NLnen-- HTTP/1.1\" 302 938 \"https://auth-stg1.services.adobe.com/en_US/index.html?callback=https%3A%2F%2Fims-na1-stg1.adobelogin.com%2Fims%2Fadobeid%2FCampaignRD1%2FAdobeID%2Fcode%3Fredirect_uri%3Dhttps%253A%252F%252Fafthost32.qa.campaign.adobe.com%252Fxtk%252Flogon.jssp&client_id=CampaignRD1&scope=AdobeID%2Cperson%2Csession%2Cadditional_info.projectedProductContext%2Cread_organizations%2Cadditional_info.user_image_url%2Cwrite_pc%2Copenid%2Ctriggers%2Caudiencemanager_api&denied_callback=https%3A%2F%2Fims-na1-stg1.adobelogin.com%2Fims%2Fdenied%2FCampaignRD1%3Fredirect_uri%3Dhttps%253A%252F%252Fafthost32.qa.campaign.adobe.com%252Fxtk%252Flogon.jssp%26response_type%3Dcode&relay=f1cff3df-18ac-44a2-bc5f-b89331c11084&locale=en_US&flow_type=code&idp_flow_type=login\" \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36\""; - @Test - public void testStringComplianceNegative1() { - String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.61 - - [02/Apr/2020:06:26:09 +0200] \"GET /xtk/logon.jssp?code=..--7--D4C89vHhLLCL_yJguuL3kiR3PRM0T-UaOCk_Q162y6HIbdL2YA9L1ErbA2NLnen-- HTTP/1.1\" 302 938 \"https://auth-stg1.services.adobe.com/en_US/index.html?callback=https%3A%2F%2Fims-na1-stg1.adobelogin.com%2Fims%2Fadobeid%2FCampaignRD1%2FAdobeID%2Fcode%3Fredirect_uri%3Dhttps%253A%252F%252Fafthost32.qa.campaign.adobe.com%252Fxtk%252Flogon.jssp&client_id=CampaignRD1&scope=AdobeID%2Cperson%2Csession%2Cadditional_info.projectedProductContext%2Cread_organizations%2Cadditional_info.user_image_url%2Cwrite_pc%2Copenid%2Ctriggers%2Caudiencemanager_api&denied_callback=https%3A%2F%2Fims-na1-stg1.adobelogin.com%2Fims%2Fdenied%2FCampaignRD1%3Fredirect_uri%3Dhttps%253A%252F%252Fafthost32.qa.campaign.adobe.com%252Fxtk%252Flogon.jssp%26response_type%3Dcode&relay=f1cff3df-18ac-44a2-bc5f-b89331c11084&locale=en_US&flow_type=code&idp_flow_type=login\" \"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/80.0.3987.149 Safari/537.36\""; + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_verbDefinition2.setTitle("verb"); + l_verbDefinition2.setStart("\""); + l_verbDefinition2.setEnd(" /"); - l_verbDefinition2.setTitle("verb"); - l_verbDefinition2.setStart("\""); - l_verbDefinition2.setEnd(" /"); + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); - ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); + l_apiDefinition.setTitle("path"); + l_apiDefinition.setStart(" /rest/head/"); + l_apiDefinition.setEnd("/"); - l_apiDefinition.setTitle("path"); - l_apiDefinition.setStart(" /rest/head/"); - l_apiDefinition.setEnd("/"); + List l_definitionList = new ArrayList<>(); + l_definitionList.add(l_verbDefinition2); + l_definitionList.add(l_apiDefinition); - List l_definitionList = new ArrayList<>(); - l_definitionList.add(l_verbDefinition2); - l_definitionList.add(l_apiDefinition); + assertThat("The given string does not comply to the expected format", + not(StringParseFactory.isStringCompliant(l_apacheLogString, l_definitionList))); + } - assertThat("The given string does not comply to the expected format", - not(StringParseFactory.isStringCompliant(l_apacheLogString, l_definitionList))); - } + @Test + public void testStringComplianceNegative_searchStringDifferentCase() { + String l_apacheLogString = "-:443 10.10.247.65 - - [02/Apr/2020:07:11:04 +0200] \"-\" 408 4640 \"-\" \"-\""; - @Test - public void testStringComplianceNegative_searchStringDifferentCase() { - String l_apacheLogString = "-:443 10.10.247.65 - - [02/Apr/2020:07:11:04 +0200] \"-\" 408 4640 \"-\" \"-\""; + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_verbDefinition2.setTitle("verb"); + l_verbDefinition2.setStart("\""); + l_verbDefinition2.setEnd(" /"); - l_verbDefinition2.setTitle("verb"); - l_verbDefinition2.setStart("\""); - l_verbDefinition2.setEnd(" /"); + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); - ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); + l_apiDefinition.setTitle("path"); + l_apiDefinition.setStart(" /rest/head/"); + l_apiDefinition.setEnd("/"); - l_apiDefinition.setTitle("path"); - l_apiDefinition.setStart(" /rest/head/"); - l_apiDefinition.setEnd("/"); + List l_definitionList = new ArrayList<>(); + l_definitionList.add(l_verbDefinition2); + l_definitionList.add(l_apiDefinition); - List l_definitionList = new ArrayList<>(); - l_definitionList.add(l_verbDefinition2); - l_definitionList.add(l_apiDefinition); + assertThat("The given string does not comply to the expected format", + not(StringParseFactory.isStringCompliant(l_apacheLogString, l_definitionList))); + } - assertThat("The given string does not comply to the expected format", - not(StringParseFactory.isStringCompliant(l_apacheLogString, l_definitionList))); - } + @Test + public void testTwoItems() throws StringParseException { + String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; - @Test - public void testTwoItems() throws StringParseException { - String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; + // Create a parse definition + ParseDefinitionEntry l_dateDefinition1 = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinitionEntry l_dateDefinition1 = new ParseDefinitionEntry(); + l_dateDefinition1.setTitle("logDate"); + l_dateDefinition1.setStart("["); + l_dateDefinition1.setEnd("]"); - l_dateDefinition1.setTitle("logDate"); - l_dateDefinition1.setStart("["); - l_dateDefinition1.setEnd("]"); + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_verbDefinition2.setTitle("verb"); + l_verbDefinition2.setStart("\""); + l_verbDefinition2.setEnd(" /"); - l_verbDefinition2.setTitle("verb"); - l_verbDefinition2.setStart("\""); - l_verbDefinition2.setEnd(" /"); + List l_definitionList = new ArrayList<>(); + l_definitionList.add(l_dateDefinition1); + l_definitionList.add(l_verbDefinition2); - List l_definitionList = new ArrayList<>(); - l_definitionList.add(l_dateDefinition1); - l_definitionList.add(l_verbDefinition2); + Map l_result = StringParseFactory.parseString(l_apacheLogString, l_definitionList); - Map l_result = StringParseFactory.parseString(l_apacheLogString, l_definitionList); + assertThat("We should have a result", l_result, is(notNullValue())); - assertThat("We should have a result", l_result, is(notNullValue())); + assertThat("We should have two entries", l_result.size(), is(equalTo(2))); - assertThat("We should have two entries", l_result.size(), is(equalTo(2))); + assertThat("We should have an entry for logDate", l_result.containsKey("logDate")); + assertThat("We should have the correct value for logDate", l_result.get("logDate"), + is(equalTo("02/Apr/2020:08:08:28 +0200"))); - assertThat("We should have an entry for logDate", l_result.containsKey("logDate")); - assertThat("We should have the correct value for logDate", l_result.get("logDate"), - is(equalTo("02/Apr/2020:08:08:28 +0200"))); + assertThat("We should have an entry for verb", l_result.containsKey("verb")); + assertThat("We should have the correct value for logDate", l_result.get("verb"), is(equalTo("GET"))); - assertThat("We should have an entry for verb", l_result.containsKey("verb")); - assertThat("We should have the correct value for logDate", l_result.get("verb"), is(equalTo("GET"))); + } - } + @Test + public void testThreeItems() throws StringParseException { + String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; - @Test - public void testThreeItems() throws StringParseException { - String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; + // Create a parse definition + ParseDefinitionEntry l_dateDefinition1 = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinitionEntry l_dateDefinition1 = new ParseDefinitionEntry(); + l_dateDefinition1.setTitle("logDate"); + l_dateDefinition1.setStart("["); + l_dateDefinition1.setEnd("]"); - l_dateDefinition1.setTitle("logDate"); - l_dateDefinition1.setStart("["); - l_dateDefinition1.setEnd("]"); + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_verbDefinition2.setTitle("verb"); + l_verbDefinition2.setStart("\""); + l_verbDefinition2.setEnd(" /"); - l_verbDefinition2.setTitle("verb"); - l_verbDefinition2.setStart("\""); - l_verbDefinition2.setEnd(" /"); + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); - ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); + l_apiDefinition.setTitle("path"); + l_apiDefinition.setStart(" /"); + l_apiDefinition.setEnd(" "); - l_apiDefinition.setTitle("path"); - l_apiDefinition.setStart(" /"); - l_apiDefinition.setEnd(" "); + List l_definitionList = new ArrayList<>(); + l_definitionList.add(l_dateDefinition1); + l_definitionList.add(l_verbDefinition2); + l_definitionList.add(l_apiDefinition); - List l_definitionList = new ArrayList<>(); - l_definitionList.add(l_dateDefinition1); - l_definitionList.add(l_verbDefinition2); - l_definitionList.add(l_apiDefinition); + Map l_result = StringParseFactory.parseString(l_apacheLogString, l_definitionList); - Map l_result = StringParseFactory.parseString(l_apacheLogString, l_definitionList); + assertThat("We should have a result", l_result, is(notNullValue())); - assertThat("We should have a result", l_result, is(notNullValue())); + assertThat("We should have two entries", l_result.size(), is(equalTo(3))); - assertThat("We should have two entries", l_result.size(), is(equalTo(3))); + assertThat("We should have an entry for logDate", l_result.containsKey("logDate")); + assertThat("We should have the correct value for logDate", l_result.get("logDate"), + is(equalTo("02/Apr/2020:08:08:28 +0200"))); - assertThat("We should have an entry for logDate", l_result.containsKey("logDate")); - assertThat("We should have the correct value for logDate", l_result.get("logDate"), - is(equalTo("02/Apr/2020:08:08:28 +0200"))); + assertThat("We should have an entry for verb", l_result.containsKey("verb")); + assertThat("We should have the correct value for logDate", l_result.get("verb"), is(equalTo("GET"))); - assertThat("We should have an entry for verb", l_result.containsKey("verb")); - assertThat("We should have the correct value for logDate", l_result.get("verb"), is(equalTo("GET"))); + assertThat("We should have an entry for the API", l_result.containsKey("path")); + assertThat("We should have the correct value for logDate", l_result.get("path"), + is(equalTo("rest/head/workflow/WKF193"))); + } - assertThat("We should have an entry for the API", l_result.containsKey("path")); - assertThat("We should have the correct value for logDate", l_result.get("path"), - is(equalTo("rest/head/workflow/WKF193"))); - } + @Test + public void testCreateApacheProfileFile() + throws StringParseException { - @Test - public void testCreateApacheProfileFile() - throws StringParseException { + // Create a parse definition - //Create a parse definition + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_verbDefinition2.setTitle("verb"); + l_verbDefinition2.setStart("\""); + l_verbDefinition2.setEnd(" /"); - l_verbDefinition2.setTitle("verb"); - l_verbDefinition2.setStart("\""); - l_verbDefinition2.setEnd(" /"); + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); - ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); + l_apiDefinition.setTitle("path"); + l_apiDefinition.setStart(" /rest/head/"); + l_apiDefinition.setEnd(" "); - l_apiDefinition.setTitle("path"); - l_apiDefinition.setStart(" /rest/head/"); - l_apiDefinition.setEnd(" "); + ParseDefinition l_pDefinition = new ParseDefinition("SSL Log"); + l_pDefinition.setDefinitionEntries(Arrays.asList(l_verbDefinition2, l_apiDefinition)); + l_pDefinition.defineKeys(Arrays.asList(l_apiDefinition, l_verbDefinition2)); - ParseDefinition l_pDefinition = new ParseDefinition("SSL Log"); - l_pDefinition.setDefinitionEntries(Arrays.asList(l_verbDefinition2, l_apiDefinition)); - l_pDefinition.defineKeys(Arrays.asList(l_apiDefinition, l_verbDefinition2)); + final String apacheLogFile = "src/test/resources/logTests/apache/ssl_accessSmall.log"; - final String apacheLogFile = "src/test/resources/logTests/apache/ssl_accessSmall.log"; + Map l_entries = StringParseFactory + .extractLogEntryMap(Arrays.asList(apacheLogFile), l_pDefinition, GenericEntry.class); - Map l_entries = StringParseFactory - .extractLogEntryMap(Arrays.asList(apacheLogFile), l_pDefinition, GenericEntry.class); + assertThat(l_entries, is(notNullValue())); + assertThat("We should have entries", l_entries.size(), is(greaterThan(0))); + assertThat("We should have entries", l_entries.size(), is(lessThan(19))); + assertThat("We should have the key for amcDataSource", + l_entries.containsKey("amcDataSource/AMCDS745177#GET")); - assertThat(l_entries, is(notNullValue())); - assertThat("We should have entries", l_entries.size(), is(greaterThan(0))); - assertThat("We should have entries", l_entries.size(), is(lessThan(19))); - assertThat("We should have the key for amcDataSource", - l_entries.containsKey("amcDataSource/AMCDS745177#GET")); + for (GenericEntry lt_entry : l_entries.values()) { + System.out.println(lt_entry.fetchPrintOut()); + } - for (GenericEntry lt_entry : l_entries.values()) { - System.out.println(lt_entry.fetchPrintOut()); } - } + @Test + public void testCreateApacheProfileFile_storeFile() + throws StringParseException { - @Test - public void testCreateApacheProfileFile_storeFile() - throws StringParseException { + // Create a parse definition - //Create a parse definition + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_verbDefinition2.setTitle("verb"); + l_verbDefinition2.setStart("\""); + l_verbDefinition2.setEnd(" /"); - l_verbDefinition2.setTitle("verb"); - l_verbDefinition2.setStart("\""); - l_verbDefinition2.setEnd(" /"); + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); - ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); + l_apiDefinition.setTitle("path"); + l_apiDefinition.setStart(" /rest/head/"); + l_apiDefinition.setEnd(" "); - l_apiDefinition.setTitle("path"); - l_apiDefinition.setStart(" /rest/head/"); - l_apiDefinition.setEnd(" "); + ParseDefinition l_pDefinition = new ParseDefinition("SSL Log"); + l_pDefinition.setDefinitionEntries(Arrays.asList(l_verbDefinition2, l_apiDefinition)); + l_pDefinition.defineKeys(Arrays.asList(l_apiDefinition, l_verbDefinition2)); + l_pDefinition.setStoreFileName(true); + l_pDefinition.setStoreFilePath(true); - ParseDefinition l_pDefinition = new ParseDefinition("SSL Log"); - l_pDefinition.setDefinitionEntries(Arrays.asList(l_verbDefinition2, l_apiDefinition)); - l_pDefinition.defineKeys(Arrays.asList(l_apiDefinition, l_verbDefinition2)); - l_pDefinition.setStoreFileName(true); - l_pDefinition.setStoreFilePath(true); + final String apacheLogFile = "src/test/resources/logTests/apache/ssl_accessSmall.log"; - final String apacheLogFile = "src/test/resources/logTests/apache/ssl_accessSmall.log"; + Map l_entries = StringParseFactory + .extractLogEntryMap(Arrays.asList(apacheLogFile), l_pDefinition, GenericEntry.class); - Map l_entries = StringParseFactory - .extractLogEntryMap(Arrays.asList(apacheLogFile), l_pDefinition, GenericEntry.class); + assertThat(l_entries, is(notNullValue())); + assertThat("We should have entries", l_entries.size(), is(greaterThan(0))); + assertThat("We should have entries", l_entries.size(), is(lessThan(19))); + assertThat("We should have the key for amcDataSource", + l_entries.containsKey("amcDataSource/AMCDS745177#GET")); + assertThat(l_entries.get("amcDataSource/AMCDS745177#GET").getFileName(), + is(equalTo("ssl_accessSmall.log"))); + assertThat(l_entries.get("amcDataSource/AMCDS745177#GET").getFilePath(), + is(equalTo("src/test/resources/logTests/apache"))); - assertThat(l_entries, is(notNullValue())); - assertThat("We should have entries", l_entries.size(), is(greaterThan(0))); - assertThat("We should have entries", l_entries.size(), is(lessThan(19))); - assertThat("We should have the key for amcDataSource", - l_entries.containsKey("amcDataSource/AMCDS745177#GET")); - assertThat(l_entries.get("amcDataSource/AMCDS745177#GET").getFileName(), - is(equalTo("ssl_accessSmall.log"))); - assertThat(l_entries.get("amcDataSource/AMCDS745177#GET").getFilePath(), - is(equalTo("src/test/resources/logTests/apache"))); + l_entries.get("amcDataSource/AMCDS745177#GET").fetchHeaders().stream().forEach(System.out::println); + l_entries.get("amcDataSource/AMCDS745177#GET").fetchValuesAsList().stream() + .forEach(System.out::println); - l_entries.get("amcDataSource/AMCDS745177#GET").fetchHeaders().stream().forEach(System.out::println); - l_entries.get("amcDataSource/AMCDS745177#GET").fetchValuesAsList().stream().forEach(System.out::println); + for (GenericEntry lt_entry : l_entries.values()) { + System.out.println(lt_entry.fetchPrintOut()); + } + } + @Test + public void testCreateApacheProfileFile_storePathStoreFrom() + throws StringParseException { - for (GenericEntry lt_entry : l_entries.values()) { - System.out.println(lt_entry.fetchPrintOut()); - } - } + // Create a parse definition - @Test - public void testCreateApacheProfileFile_storePathStoreFrom() - throws StringParseException { + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - //Create a parse definition + l_verbDefinition2.setTitle("verb"); + l_verbDefinition2.setStart("\""); + l_verbDefinition2.setEnd(" /"); - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); - l_verbDefinition2.setTitle("verb"); - l_verbDefinition2.setStart("\""); - l_verbDefinition2.setEnd(" /"); + l_apiDefinition.setTitle("path"); + l_apiDefinition.setStart(" /rest/head/"); + l_apiDefinition.setEnd(" "); - ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); + ParseDefinition l_pDefinition = new ParseDefinition("SSL Log"); + l_pDefinition.setDefinitionEntries(Arrays.asList(l_verbDefinition2, l_apiDefinition)); + l_pDefinition.defineKeys(Arrays.asList(l_apiDefinition, l_verbDefinition2)); + l_pDefinition.setStoreFileName(true); + l_pDefinition.setStoreFilePath(true); + l_pDefinition.setStorePathFrom("src/test/resources"); - l_apiDefinition.setTitle("path"); - l_apiDefinition.setStart(" /rest/head/"); - l_apiDefinition.setEnd(" "); + final String apacheLogFile = "src/test/resources/logTests/apache/ssl_accessSmall.log"; - ParseDefinition l_pDefinition = new ParseDefinition("SSL Log"); - l_pDefinition.setDefinitionEntries(Arrays.asList(l_verbDefinition2, l_apiDefinition)); - l_pDefinition.defineKeys(Arrays.asList(l_apiDefinition, l_verbDefinition2)); - l_pDefinition.setStoreFileName(true); - l_pDefinition.setStoreFilePath(true); - l_pDefinition.setStorePathFrom("src/test/resources"); + Map l_entries = StringParseFactory + .extractLogEntryMap(Arrays.asList(apacheLogFile), l_pDefinition, GenericEntry.class); - final String apacheLogFile = "src/test/resources/logTests/apache/ssl_accessSmall.log"; + assertThat(l_entries, is(notNullValue())); + assertThat("We should have entries", l_entries.size(), is(greaterThan(0))); + assertThat("We should have entries", l_entries.size(), is(lessThan(19))); + assertThat("We should have the key for amcDataSource", + l_entries.containsKey("amcDataSource/AMCDS745177#GET")); + assertThat(l_entries.get("amcDataSource/AMCDS745177#GET").getFileName(), + is(equalTo("ssl_accessSmall.log"))); - Map l_entries = StringParseFactory - .extractLogEntryMap(Arrays.asList(apacheLogFile), l_pDefinition, GenericEntry.class); + assertThat(l_entries.get("amcDataSource/AMCDS745177#GET").getFilePath(), + is(equalTo("logTests/apache"))); - assertThat(l_entries, is(notNullValue())); - assertThat("We should have entries", l_entries.size(), is(greaterThan(0))); - assertThat("We should have entries", l_entries.size(), is(lessThan(19))); - assertThat("We should have the key for amcDataSource", - l_entries.containsKey("amcDataSource/AMCDS745177#GET")); - assertThat(l_entries.get("amcDataSource/AMCDS745177#GET").getFileName(), - is(equalTo("ssl_accessSmall.log"))); + l_entries.get("amcDataSource/AMCDS745177#GET").fetchHeaders().stream().forEach(System.out::println); + l_entries.get("amcDataSource/AMCDS745177#GET").fetchValuesAsList().stream() + .forEach(System.out::println); - assertThat(l_entries.get("amcDataSource/AMCDS745177#GET").getFilePath(), - is(equalTo("logTests/apache"))); + for (GenericEntry lt_entry : l_entries.values()) { + System.out.println(lt_entry.fetchPrintOut()); + } + } - l_entries.get("amcDataSource/AMCDS745177#GET").fetchHeaders().stream().forEach(System.out::println); - l_entries.get("amcDataSource/AMCDS745177#GET").fetchValuesAsList().stream().forEach(System.out::println); + @Test + public void testCreateApacheProfileFile_storePathButNotFile() + throws StringParseException { + // Create a parse definition - for (GenericEntry lt_entry : l_entries.values()) { - System.out.println(lt_entry.fetchPrintOut()); - } - } + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - @Test - public void testCreateApacheProfileFile_storePathButNotFile() - throws StringParseException { + l_verbDefinition2.setTitle("verb"); + l_verbDefinition2.setStart("\""); + l_verbDefinition2.setEnd(" /"); - //Create a parse definition + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_apiDefinition.setTitle("path"); + l_apiDefinition.setStart(" /rest/head/"); + l_apiDefinition.setEnd(" "); - l_verbDefinition2.setTitle("verb"); - l_verbDefinition2.setStart("\""); - l_verbDefinition2.setEnd(" /"); + ParseDefinition l_pDefinition = new ParseDefinition("SSL Log"); + l_pDefinition.setDefinitionEntries(Arrays.asList(l_verbDefinition2, l_apiDefinition)); + l_pDefinition.defineKeys(Arrays.asList(l_apiDefinition, l_verbDefinition2)); + l_pDefinition.setStoreFileName(false); + l_pDefinition.setStoreFilePath(true); - ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); + final String apacheLogFile = "src/test/resources/logTests/apache/ssl_accessSmall.log"; - l_apiDefinition.setTitle("path"); - l_apiDefinition.setStart(" /rest/head/"); - l_apiDefinition.setEnd(" "); + Map l_entries = StringParseFactory + .extractLogEntryMap(Arrays.asList(apacheLogFile), l_pDefinition, GenericEntry.class); - ParseDefinition l_pDefinition = new ParseDefinition("SSL Log"); - l_pDefinition.setDefinitionEntries(Arrays.asList(l_verbDefinition2, l_apiDefinition)); - l_pDefinition.defineKeys(Arrays.asList(l_apiDefinition, l_verbDefinition2)); - l_pDefinition.setStoreFileName(false); - l_pDefinition.setStoreFilePath(true); + assertThat(l_entries, is(notNullValue())); + assertThat("We should have entries", l_entries.size(), is(greaterThan(0))); + assertThat("We should have entries", l_entries.size(), is(lessThan(19))); + assertThat("We should have the key for amcDataSource", + l_entries.containsKey("amcDataSource/AMCDS745177#GET")); + assertThat(l_entries.get("amcDataSource/AMCDS745177#GET").getFileName(), + nullValue()); + assertThat(l_entries.get("amcDataSource/AMCDS745177#GET").getFilePath(), + is(equalTo("src/test/resources/logTests/apache"))); - final String apacheLogFile = "src/test/resources/logTests/apache/ssl_accessSmall.log"; + l_entries.get("amcDataSource/AMCDS745177#GET").fetchHeaders().stream().forEach(System.out::println); + l_entries.get("amcDataSource/AMCDS745177#GET").fetchValuesAsList().stream() + .forEach(System.out::println); - Map l_entries = StringParseFactory - .extractLogEntryMap(Arrays.asList(apacheLogFile), l_pDefinition, GenericEntry.class); + for (GenericEntry lt_entry : l_entries.values()) { + System.out.println(lt_entry.fetchPrintOut()); + } + } - assertThat(l_entries, is(notNullValue())); - assertThat("We should have entries", l_entries.size(), is(greaterThan(0))); - assertThat("We should have entries", l_entries.size(), is(lessThan(19))); - assertThat("We should have the key for amcDataSource", - l_entries.containsKey("amcDataSource/AMCDS745177#GET")); - assertThat(l_entries.get("amcDataSource/AMCDS745177#GET").getFileName(), - nullValue()); - assertThat(l_entries.get("amcDataSource/AMCDS745177#GET").getFilePath(), - is(equalTo("src/test/resources/logTests/apache"))); + @Test + public void testIncludingTheFileAsAnEntry() + throws StringParseException { - l_entries.get("amcDataSource/AMCDS745177#GET").fetchHeaders().stream().forEach(System.out::println); - l_entries.get("amcDataSource/AMCDS745177#GET").fetchValuesAsList().stream().forEach(System.out::println); + // Create a parse definition + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - for (GenericEntry lt_entry : l_entries.values()) { - System.out.println(lt_entry.fetchPrintOut()); - } - } + l_verbDefinition2.setTitle("verb"); + l_verbDefinition2.setStart("\""); + l_verbDefinition2.setEnd(" /"); + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); - @Test - public void testIncludingTheFileAsAnEntry() - throws StringParseException { + l_apiDefinition.setTitle("path"); + l_apiDefinition.setStart(" /rest/head/"); + l_apiDefinition.setEnd(" "); - //Create a parse definition + ParseDefinition l_pDefinition = new ParseDefinition("SSL Log"); + l_pDefinition.setDefinitionEntries(Arrays.asList(l_verbDefinition2, l_apiDefinition)); + l_pDefinition.defineKeys(Arrays.asList(l_apiDefinition, l_verbDefinition2)); - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + final String apacheLogFile = "src/test/resources/logTests/apache/ssl_accessSmall.log"; - l_verbDefinition2.setTitle("verb"); - l_verbDefinition2.setStart("\""); - l_verbDefinition2.setEnd(" /"); + Map l_entries = StringParseFactory + .extractLogEntryMap(Arrays.asList(apacheLogFile), l_pDefinition, GenericEntry.class); - ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); + assertThat(l_entries, is(notNullValue())); + assertThat("We should have entries", l_entries.size(), is(greaterThan(0))); + assertThat("We should have entries", l_entries.size(), is(lessThan(19))); + assertThat("We should have the key for amcDataSource", + l_entries.containsKey("amcDataSource/AMCDS745177#GET")); - l_apiDefinition.setTitle("path"); - l_apiDefinition.setStart(" /rest/head/"); - l_apiDefinition.setEnd(" "); + for (GenericEntry lt_entry : l_entries.values()) { + System.out.println(lt_entry.fetchPrintOut()); + } - ParseDefinition l_pDefinition = new ParseDefinition("SSL Log"); - l_pDefinition.setDefinitionEntries(Arrays.asList(l_verbDefinition2, l_apiDefinition)); - l_pDefinition.defineKeys(Arrays.asList(l_apiDefinition, l_verbDefinition2)); + } + + @Test + public void testCreateApacheProfileFile_Negative() + throws StringParseException { + + // Create a parse definition + + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + + l_verbDefinition2.setTitle("verb"); + l_verbDefinition2.setStart("\""); + l_verbDefinition2.setEnd(" /"); - final String apacheLogFile = "src/test/resources/logTests/apache/ssl_accessSmall.log"; + ParseDefinition l_pDefinition = new ParseDefinition("SSL Log"); + l_pDefinition.setDefinitionEntries(Arrays.asList(l_verbDefinition2)); - Map l_entries = StringParseFactory - .extractLogEntryMap(Arrays.asList(apacheLogFile), l_pDefinition, GenericEntry.class); + Map l_entries = StringParseFactory.extractLogEntryMap(Arrays.asList(), + l_pDefinition, GenericEntry.class); - assertThat(l_entries, is(notNullValue())); - assertThat("We should have entries", l_entries.size(), is(greaterThan(0))); - assertThat("We should have entries", l_entries.size(), is(lessThan(19))); - assertThat("We should have the key for amcDataSource", - l_entries.containsKey("amcDataSource/AMCDS745177#GET")); + assertThat(l_entries, is(notNullValue())); + assertThat("We should have entries", l_entries.size(), is(equalTo(0))); - for (GenericEntry lt_entry : l_entries.values()) { - System.out.println(lt_entry.fetchPrintOut()); } - } + @Test + public void testCreateApacheProfileFile_Negative2() + throws StringParseException { - @Test - public void testCreateApacheProfileFile_Negative() - throws StringParseException { + // Create a parse definition - //Create a parse definition + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_verbDefinition2.setTitle("verb"); + l_verbDefinition2.setStart("\""); + l_verbDefinition2.setEnd(" /"); - l_verbDefinition2.setTitle("verb"); - l_verbDefinition2.setStart("\""); - l_verbDefinition2.setEnd(" /"); + ParseDefinition l_pDefinition = new ParseDefinition("SSL Log"); + l_pDefinition.setDefinitionEntries(Arrays.asList(l_verbDefinition2)); - ParseDefinition l_pDefinition = new ParseDefinition("SSL Log"); - l_pDefinition.setDefinitionEntries(Arrays.asList(l_verbDefinition2)); + final String apacheLogFile = "src/test/resources/logTests/apache/NonExistant.log"; - Map l_entries = StringParseFactory.extractLogEntryMap(Arrays.asList(), - l_pDefinition, GenericEntry.class); + Map l_entries = StringParseFactory + .extractLogEntryMap(Arrays.asList(apacheLogFile), l_pDefinition, GenericEntry.class); - assertThat(l_entries, is(notNullValue())); - assertThat("We should have entries", l_entries.size(), is(equalTo(0))); + assertThat(l_entries, is(notNullValue())); + assertThat("We should have entries", l_entries.size(), is(equalTo(0))); - } + } - @Test - public void testCreateApacheProfileFile_Negative2() - throws StringParseException { + @Test + public void testStartPosition() { + String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; - //Create a parse definition + // Create a parse definition + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_verbDefinition2.setTitle("verb"); + l_verbDefinition2.setStart("\""); + l_verbDefinition2.setEnd(" /"); - l_verbDefinition2.setTitle("verb"); - l_verbDefinition2.setStart("\""); - l_verbDefinition2.setEnd(" /"); + assertThat("We should have the correct start position", + l_verbDefinition2.fetchStartPosition(l_apacheLogString), is(equalTo(83))); - ParseDefinition l_pDefinition = new ParseDefinition("SSL Log"); - l_pDefinition.setDefinitionEntries(Arrays.asList(l_verbDefinition2)); + assertThat("We should have -1 for the start position", l_verbDefinition2.fetchStartPosition(" "), + is(equalTo(-1))); - final String apacheLogFile = "src/test/resources/logTests/apache/NonExistant.log"; + } - Map l_entries = StringParseFactory - .extractLogEntryMap(Arrays.asList(apacheLogFile), l_pDefinition, GenericEntry.class); + @Test + public void testStartPositionCaseInsensitive() { + String logString = "+69802:5ee7914c:15|POST /nl/jsp/soaprouter.jsp HTTP/1.1|X-Security-Token:@tTD6JQ5HcTcxBqbuE1SqEpNNsMbnjOCaV_Kv5ern7fvljTyUK71i9TWy5d6HBrFgCxfzCWt5OkJcJ_BfmC8mfw==|soapaction:xtk%3aqueryDef#ExecuteQuery|Content-Length:591|Content-Type:text/plain; charset=ISO-8859-1|Host:rd-dev54.rd.campaign.adobe.com|Connection:Keep-Alive|User-Agent:Apache-HttpClient/4.5.2 (Java/1.8.0_171)|Cookie:__sessiontoken=___6fffa340-b973-4e90-a965-4ae88e713c11|Accept-Encoding:gzip,deflate"; - assertThat(l_entries, is(notNullValue())); - assertThat("We should have entries", l_entries.size(), is(equalTo(0))); + // Create a parse definition + ParseDefinitionEntry l_definitionCI = new ParseDefinitionEntry(); - } + l_definitionCI.setTitle("path"); + l_definitionCI.setStart("soapAction:"); + l_definitionCI.setEnd("#"); + l_definitionCI.setCaseSensitive(false); - @Test - public void testStartPosition() { - String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; + // Create a parse definition + ParseDefinitionEntry l_definitionCS = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + l_definitionCS.setTitle("path"); + l_definitionCS.setStart("soapaction:"); + l_definitionCS.setEnd("#"); + l_definitionCS.setCaseSensitive(true); - l_verbDefinition2.setTitle("verb"); - l_verbDefinition2.setStart("\""); - l_verbDefinition2.setEnd(" /"); + assertThat("A start position should have been found", l_definitionCI.fetchStartPosition(logString), + is(greaterThan(0))); - assertThat("We should have the correct start position", - l_verbDefinition2.fetchStartPosition(l_apacheLogString), is(equalTo(83))); + assertThat("We should have the correct start position", l_definitionCS.fetchStartPosition(logString), + is(equalTo(l_definitionCI.fetchStartPosition(logString)))); + } - assertThat("We should have -1 for the start position", l_verbDefinition2.fetchStartPosition(" "), - is(equalTo(-1))); + @Test + public void testStartPositionCaseInsensitive_searchStringDifferentCase() { + String logString = "t5OkJcJ_BfmC8mfw==|SOAPAction:xtk%3aqueryDef#ExecuteQuery|Content-Length:591|Content-Type:text/plain; "; - } + // Create a parse definition + ParseDefinitionEntry l_definitionCI = new ParseDefinitionEntry(); - @Test - public void testStartPositionCaseInsensitive() { - String logString = "+69802:5ee7914c:15|POST /nl/jsp/soaprouter.jsp HTTP/1.1|X-Security-Token:@tTD6JQ5HcTcxBqbuE1SqEpNNsMbnjOCaV_Kv5ern7fvljTyUK71i9TWy5d6HBrFgCxfzCWt5OkJcJ_BfmC8mfw==|soapaction:xtk%3aqueryDef#ExecuteQuery|Content-Length:591|Content-Type:text/plain; charset=ISO-8859-1|Host:rd-dev54.rd.campaign.adobe.com|Connection:Keep-Alive|User-Agent:Apache-HttpClient/4.5.2 (Java/1.8.0_171)|Cookie:__sessiontoken=___6fffa340-b973-4e90-a965-4ae88e713c11|Accept-Encoding:gzip,deflate"; + l_definitionCI.setTitle("path"); + l_definitionCI.setStart("soapAction:"); + l_definitionCI.setEnd("#"); + l_definitionCI.setCaseSensitive(false); - //Create a parse definition - ParseDefinitionEntry l_definitionCI = new ParseDefinitionEntry(); + // Create a parse definition + ParseDefinitionEntry l_definitionCS = new ParseDefinitionEntry(); - l_definitionCI.setTitle("path"); - l_definitionCI.setStart("soapAction:"); - l_definitionCI.setEnd("#"); - l_definitionCI.setCaseSensitive(false); + l_definitionCS.setTitle("path"); + l_definitionCS.setStart("SOAPAction:"); + l_definitionCS.setEnd("#"); + l_definitionCS.setCaseSensitive(true); - //Create a parse definition - ParseDefinitionEntry l_definitionCS = new ParseDefinitionEntry(); + assertThat("A start position should have been found", l_definitionCI.fetchStartPosition(logString), + is(greaterThan(0))); - l_definitionCS.setTitle("path"); - l_definitionCS.setStart("soapaction:"); - l_definitionCS.setEnd("#"); - l_definitionCS.setCaseSensitive(true); + assertThat("We should have the correct start position", l_definitionCS.fetchStartPosition(logString), + is(equalTo(l_definitionCI.fetchStartPosition(logString)))); + } - assertThat("A start position should have been found", l_definitionCI.fetchStartPosition(logString), - is(greaterThan(0))); + @Test + public void testEndPosition() { + String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; - assertThat("We should have the correct start position", l_definitionCS.fetchStartPosition(logString), - is(equalTo(l_definitionCI.fetchStartPosition(logString)))); - } + // Create a parse definition + ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); - @Test - public void testStartPositionCaseInsensitive_searchStringDifferentCase() { - String logString = "t5OkJcJ_BfmC8mfw==|SOAPAction:xtk%3aqueryDef#ExecuteQuery|Content-Length:591|Content-Type:text/plain; "; + l_verbDefinition2.setTitle("verb"); + l_verbDefinition2.setStart("\""); + l_verbDefinition2.setEnd(" /"); - //Create a parse definition - ParseDefinitionEntry l_definitionCI = new ParseDefinitionEntry(); + assertThat("We should have the correct start position", + l_verbDefinition2.fetchEndPosition(l_apacheLogString), is(equalTo(86))); - l_definitionCI.setTitle("path"); - l_definitionCI.setStart("soapAction:"); - l_definitionCI.setEnd("#"); - l_definitionCI.setCaseSensitive(false); + assertThat("We should have -1 for the start position", l_verbDefinition2.fetchStartPosition(" "), + is(equalTo(-1))); - //Create a parse definition - ParseDefinitionEntry l_definitionCS = new ParseDefinitionEntry(); + } - l_definitionCS.setTitle("path"); - l_definitionCS.setStart("SOAPAction:"); - l_definitionCS.setEnd("#"); - l_definitionCS.setCaseSensitive(true); + @Test + public void testEndPositionCaseInsensitive() { + String logString = "+69802:5ee7914c:15|POST /nl/jsp/soaprouter.jsp HTTP/1.1|X-Security-Token:@tTD6JQ5HcTfzCWt5OkJcJ_BfmC8mfw==|soapaction:xtk%3aqueryDef#ExecuteQuery|Content-Length:591|Content-Type:text/plain; charset=ISO-8859-1|Host:rd-dev54.rd.campaign.adobe.com|Connection:Keep-Alive|User-Agent:Apache-HttpClient/4.5.2 (Java/1.8.0_171)|Cookie:__sessiontoken=___6fffa340-b973-4e90-a965-4ae88e713c11|Accept-Encoding:gzip,deflate"; - assertThat("A start position should have been found", l_definitionCI.fetchStartPosition(logString), - is(greaterThan(0))); + // Create a parse definition + ParseDefinitionEntry l_definitionCI = new ParseDefinitionEntry(); - assertThat("We should have the correct start position", l_definitionCS.fetchStartPosition(logString), - is(equalTo(l_definitionCI.fetchStartPosition(logString)))); - } + l_definitionCI.setTitle("path"); + l_definitionCI.setStart("X-Security-Token"); + l_definitionCI.setEnd("soapAction:"); - @Test - public void testEndPosition() { - String l_apacheLogString = "afthost32.qa.campaign.adobe.com:443 10.10.247.85 - - [02/Apr/2020:08:08:28 +0200] \"GET /rest/head/workflow/WKF193 HTTP/1.1\" 200 20951 \"-\" \"Apache-HttpClient/4.5.2 (Java/1.8.0_242)\""; + l_definitionCI.setCaseSensitive(false); - //Create a parse definition - ParseDefinitionEntry l_verbDefinition2 = new ParseDefinitionEntry(); + // Create a parse definition + ParseDefinitionEntry l_definitionCS = new ParseDefinitionEntry(); - l_verbDefinition2.setTitle("verb"); - l_verbDefinition2.setStart("\""); - l_verbDefinition2.setEnd(" /"); + l_definitionCS.setTitle("path"); + l_definitionCS.setStart("X-Security-Token"); + l_definitionCS.setEnd("soapaction:"); + l_definitionCS.setCaseSensitive(true); - assertThat("We should have the correct start position", - l_verbDefinition2.fetchEndPosition(l_apacheLogString), is(equalTo(86))); + assertThat("An end position should have been found", l_definitionCI.fetchEndPosition(logString), + is(greaterThan(0))); - assertThat("We should have -1 for the start position", l_verbDefinition2.fetchStartPosition(" "), - is(equalTo(-1))); + assertThat("We should have the correct start position", l_definitionCS.fetchEndPosition(logString), + is(equalTo(l_definitionCI.fetchEndPosition(logString)))); + } - } + @Test + public void testEndPositionCaseInsensitive_searchStringDifferentCase() { + String logString = "HTTP/1.1|X-Security-Token:@tTD6JQ5HcTfzCWt5OkJcJ_BfmC8mfw==|SOAPAction:xtk%3aqueryDef#ExecuteQuery|Content-Length:591|"; - @Test - public void testEndPositionCaseInsensitive() { - String logString = "+69802:5ee7914c:15|POST /nl/jsp/soaprouter.jsp HTTP/1.1|X-Security-Token:@tTD6JQ5HcTcxBqbuE1SqEpNNsMbnjOCaV_Kv5ern7fvljTyUK71i9TWy5d6HBrFgCxfzCWt5OkJcJ_BfmC8mfw==|soapaction:xtk%3aqueryDef#ExecuteQuery|Content-Length:591|Content-Type:text/plain; charset=ISO-8859-1|Host:rd-dev54.rd.campaign.adobe.com|Connection:Keep-Alive|User-Agent:Apache-HttpClient/4.5.2 (Java/1.8.0_171)|Cookie:__sessiontoken=___6fffa340-b973-4e90-a965-4ae88e713c11|Accept-Encoding:gzip,deflate"; + // Create a parse definition + ParseDefinitionEntry l_definitionCI = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinitionEntry l_definitionCI = new ParseDefinitionEntry(); + l_definitionCI.setTitle("path"); + l_definitionCI.setStart("HTTP/1.1|"); + l_definitionCI.setEnd("|Content-Length"); + l_definitionCI.setCaseSensitive(false); - l_definitionCI.setTitle("path"); - l_definitionCI.setStart("X-Security-Token"); - l_definitionCI.setEnd("soapAction:"); + // Create a parse definition + ParseDefinitionEntry l_definitionCS = new ParseDefinitionEntry(); - l_definitionCI.setCaseSensitive(false); + l_definitionCS.setTitle("path"); + l_definitionCS.setStart("HTTP/1.1|"); + l_definitionCS.setEnd("|Content-Length"); + l_definitionCS.setCaseSensitive(true); - //Create a parse definition - ParseDefinitionEntry l_definitionCS = new ParseDefinitionEntry(); + assertThat("An end position should have been found", l_definitionCI.fetchEndPosition(logString), + is(greaterThan(0))); - l_definitionCS.setTitle("path"); - l_definitionCS.setStart("X-Security-Token"); - l_definitionCS.setEnd("soapaction:"); - l_definitionCS.setCaseSensitive(true); + assertThat("We should have the correct start position", l_definitionCS.fetchEndPosition(logString), + is(equalTo(l_definitionCI.fetchEndPosition(logString)))); + } - assertThat("An end position should have been found", l_definitionCI.fetchEndPosition(logString), - is(greaterThan(0))); + @Test + public void testApplySensitivity() { + // Create a parse definition Case Insensitive + ParseDefinitionEntry l_definitionCI = new ParseDefinitionEntry(); - assertThat("We should have the correct start position", l_definitionCS.fetchEndPosition(logString), - is(equalTo(l_definitionCI.fetchEndPosition(logString)))); - } + l_definitionCI.setTitle("path"); + l_definitionCI.setStart("X-Security-Token"); - @Test - public void testEndPositionCaseInsensitive_searchStringDifferentCase() { - String logString = "HTTP/1.1|X-Security-Token:@tTD6JQ5HcTfzCWt5OkJcJ_BfmC8mfw==|SOAPAction:xtk%3aqueryDef#ExecuteQuery|Content-Length:591|"; + final String l_targetString = "soapAction:"; + l_definitionCI.setEnd(l_targetString); - //Create a parse definition - ParseDefinitionEntry l_definitionCI = new ParseDefinitionEntry(); + l_definitionCI.setCaseSensitive(false); - l_definitionCI.setTitle("path"); - l_definitionCI.setStart("X-Security-Token"); - l_definitionCI.setEnd("soapAction:"); - l_definitionCI.setCaseSensitive(false); + // Create a parse definition Case Sensitive + ParseDefinitionEntry l_definitionCS = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinitionEntry l_definitionCS = new ParseDefinitionEntry(); + l_definitionCS.setTitle("path"); + l_definitionCI.setStart("X-Security-Token"); + l_definitionCS.setEnd(l_targetString); + l_definitionCS.setCaseSensitive(true); - l_definitionCS.setTitle("path"); - l_definitionCS.setStart("X-Security-Token"); - l_definitionCS.setEnd("SOAPAction:"); - l_definitionCS.setCaseSensitive(true); + assertThat("The apply sensitivity for CaseInsensitive failed", + l_definitionCI.fetchAppliedSensitivity(l_definitionCI.getEnd()), + is(equalTo(l_targetString.toLowerCase()))); + } - assertThat("An end position should have been found", l_definitionCI.fetchEndPosition(logString), - is(greaterThan(0))); + @Test + public void testNextString() { + String l_apacheLogString = "2330:DEBUG | 2020-04-03 17:46:38 | [main] core.NextTests (NextTests.java:209) - Before driver instantiation"; - assertThat("We should have the correct start position", l_definitionCS.fetchEndPosition(logString), - is(equalTo(l_definitionCI.fetchEndPosition(logString)))); - } + // Create a parse definition + ParseDefinitionEntry l_fileDefinition = new ParseDefinitionEntry(); - @Test - public void testApplySensitivity() { - //Create a parse definition Case Insensitive - ParseDefinitionEntry l_definitionCI = new ParseDefinitionEntry(); + l_fileDefinition.setTitle("sourceFile"); + l_fileDefinition.setStart("("); + l_fileDefinition.setEnd(":"); - l_definitionCI.setTitle("path"); - l_definitionCI.setStart("X-Security-Token"); + assertThat("Testing that we can correctly fetch the following substring", + l_fileDefinition.fetchFollowingSubstring(l_apacheLogString), + is(equalTo(":209) - Before driver instantiation"))); - final String l_targetString = "soapAction:"; - l_definitionCI.setEnd(l_targetString); + } - l_definitionCI.setCaseSensitive(false); + @Test + public void testNextStringEOL() { + String l_apacheLogString = "(NextTests.java:209) - Before driver instantiation"; - //Create a parse definition Case Sensitive - ParseDefinitionEntry l_definitionCS = new ParseDefinitionEntry(); + // Create a parse definition + ParseDefinitionEntry l_fileDefinition = new ParseDefinitionEntry(); - l_definitionCS.setTitle("path"); - l_definitionCI.setStart("X-Security-Token"); - l_definitionCS.setEnd(l_targetString); - l_definitionCS.setCaseSensitive(true); + l_fileDefinition.setTitle("sourceFile"); + l_fileDefinition.setStart(" - "); + l_fileDefinition.setEnd(null); - assertThat("The apply sensitivity for CaseInsensitive failed", - l_definitionCI.fetchAppliedSensitivity(l_definitionCI.getEnd()), - is(equalTo(l_targetString.toLowerCase()))); - } + assertThat("We shuld return the end of the string", + l_fileDefinition.fetchFollowingSubstring(l_apacheLogString), + is(equalTo(""))); - @Test - public void testNextString() { - String l_apacheLogString = "2330:DEBUG | 2020-04-03 17:46:38 | [main] core.NextTests (NextTests.java:209) - Before driver instantiation"; + } - //Create a parse definition - ParseDefinitionEntry l_fileDefinition = new ParseDefinitionEntry(); + @Test(description = "This test should fail because no end string can ve found") + public void testNextStringNegative() { + String l_apacheLogString = "2330:DEBUG | 2020-"; - l_fileDefinition.setTitle("sourceFile"); - l_fileDefinition.setStart("("); - l_fileDefinition.setEnd(":"); + // Create a parse definition + ParseDefinitionEntry l_fileDefinition = new ParseDefinitionEntry(); - assertThat("Testing that we can correctly fetch the following substring", - l_fileDefinition.fetchFollowingSubstring(l_apacheLogString), - is(equalTo(":209) - Before driver instantiation"))); + l_fileDefinition.setTitle("sourceFile"); + l_fileDefinition.setStart(":"); + l_fileDefinition.setEnd("9"); - } + assertThrows(IllegalArgumentException.class, + () -> l_fileDefinition.fetchFollowingSubstring(l_apacheLogString)); - @Test - public void testNextStringEOL() { - String l_apacheLogString = "(NextTests.java:209) - Before driver instantiation"; + } - //Create a parse definition - ParseDefinitionEntry l_fileDefinition = new ParseDefinitionEntry(); + @Test(description = "Fixing bug where the patterns could be used elsewhere in the line") + public void testFetchValueDuplicatedStartEndPatterns() throws StringParseException { + String l_resultString = "2330:DEBUG | 2020-04-03 17:46:38 | [main] core.NextTests (NextTests.java:209) - Before driver instantiation"; - l_fileDefinition.setTitle("sourceFile"); - l_fileDefinition.setStart(" - "); - l_fileDefinition.setEnd(null); + // Create a parse definition - assertThat("We shuld return the end of the string", - l_fileDefinition.fetchFollowingSubstring(l_apacheLogString), - is(equalTo(""))); + ParseDefinitionEntry l_fileDefinition = new ParseDefinitionEntry(); - } + l_fileDefinition.setTitle("sourceFile"); + l_fileDefinition.setStart("("); + l_fileDefinition.setEnd(":"); - @Test(description = "This test should fail because no end string can ve found") - public void testNextStringNegative() { - String l_apacheLogString = "2330:DEBUG | 2020-"; + ParseDefinitionEntry l_fileLineDefinition = new ParseDefinitionEntry(); - //Create a parse definition - ParseDefinitionEntry l_fileDefinition = new ParseDefinitionEntry(); + l_fileLineDefinition.setTitle("sourceFileLine"); + l_fileLineDefinition.setStart(":"); + l_fileLineDefinition.setEnd(")"); - l_fileDefinition.setTitle("sourceFile"); - l_fileDefinition.setStart(":"); - l_fileDefinition.setEnd("9"); + List l_definitionList = new ArrayList<>(); + l_definitionList.add(l_fileDefinition); + l_definitionList.add(l_fileLineDefinition); + Map l_result = StringParseFactory.parseString(l_resultString, l_definitionList); - assertThrows(IllegalArgumentException.class, - () -> l_fileDefinition.fetchFollowingSubstring(l_apacheLogString)); + assertThat("We should have a result", l_result, is(notNullValue())); - } + assertThat("We should have two entries", l_result.size(), is(equalTo(2))); - @Test(description = "Fixing bug where the patterns could be used elsewhere in the line") - public void testFetchValueDuplicatedStartEndPatterns() throws StringParseException { - String l_resultString = "2330:DEBUG | 2020-04-03 17:46:38 | [main] core.NextTests (NextTests.java:209) - Before driver instantiation"; + assertThat("We should have an entry for sourceFile", l_result.containsKey("sourceFile")); + assertThat("We should have the correct value for sourceFile", l_result.get("sourceFile"), + is(equalTo("NextTests.java"))); - //Create a parse definition + assertThat("We should have an entry for sourceFileLine", l_result.containsKey("sourceFileLine")); + assertThat("We should have the correct value for sourceFileLine", l_result.get("sourceFileLine"), + is(equalTo("209"))); - ParseDefinitionEntry l_fileDefinition = new ParseDefinitionEntry(); + } - l_fileDefinition.setTitle("sourceFile"); - l_fileDefinition.setStart("("); - l_fileDefinition.setEnd(":"); + ///////////////////////// RESULT PUBLISHER ///////////////////////// - ParseDefinitionEntry l_fileLineDefinition = new ParseDefinitionEntry(); + @Test + public void testIncrementation() { + GenericEntry l_entry = new GenericEntry(); - l_fileLineDefinition.setTitle("sourceFileLine"); - l_fileLineDefinition.setStart(":"); - l_fileLineDefinition.setEnd(")"); + assertThat("Initial value of occurences should be 1", l_entry.getFrequence(), is(equalTo(1))); + l_entry.incrementUsage(); + assertThat(l_entry.getFrequence(), is(equalTo(2))); - List l_definitionList = new ArrayList<>(); - l_definitionList.add(l_fileDefinition); - l_definitionList.add(l_fileLineDefinition); - Map l_result = StringParseFactory.parseString(l_resultString, l_definitionList); + l_entry.addFrequence(3); + assertThat(l_entry.getFrequence(), is(equalTo(5))); + } - assertThat("We should have a result", l_result, is(notNullValue())); + @Test + public void testProblemContentsWithStringsAroundDefault() throws StringParseException { + String logString = "SOAPAction:\"xtk%3aworkflow\"#DeleteResult|Content-Length:"; - assertThat("We should have two entries", l_result.size(), is(equalTo(2))); + // Create a parse definition + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); - assertThat("We should have an entry for sourceFile", l_result.containsKey("sourceFile")); - assertThat("We should have the correct value for sourceFile", l_result.get("sourceFile"), - is(equalTo("NextTests.java"))); + l_apiDefinition.setTitle("path"); + l_apiDefinition.setStart("soapaction:"); + l_apiDefinition.setEnd("#"); + l_apiDefinition.setCaseSensitive(false); - assertThat("We should have an entry for sourceFileLine", l_result.containsKey("sourceFileLine")); - assertThat("We should have the correct value for sourceFileLine", l_result.get("sourceFileLine"), - is(equalTo("209"))); + assertThat("We should be default remove trailing quotes", !l_apiDefinition.isTrimQuotes()); - } + assertThat("", l_apiDefinition.fetchStartPosition(logString), is(equalTo(11))); - ///////////////////////// RESULT PUBLISHER ///////////////////////// + assertThat("", l_apiDefinition.fetchEndPosition(logString), is(equalTo(27))); - @Test - public void testIncrementation() { - GenericEntry l_entry = new GenericEntry(); + List l_definitionList = new ArrayList<>(); + l_definitionList.add(l_apiDefinition); - assertThat("Initial value of occurences should be 1", l_entry.getFrequence(), is(equalTo(1))); - l_entry.incrementUsage(); - assertThat(l_entry.getFrequence(), is(equalTo(2))); + assertThat("The String should be compatible", + StringParseFactory.isStringCompliant(logString, l_definitionList)); - l_entry.addFrequence(3); - assertThat(l_entry.getFrequence(), is(equalTo(5))); - } + assertThat("We should have found the correct path", + StringParseFactory.fetchValue(logString, l_apiDefinition), + is(equalTo("\"xtk%3aworkflow\""))); - @Test - public void testProblemContentsWithStringsAroundDefault() throws StringParseException { - String logString = "SOAPAction:\"xtk%3aworkflow\"#DeleteResult|Content-Length:"; + Map l_entries = StringParseFactory.parseString(logString, l_definitionList); - //Create a parse definition - ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); + assertThat("We should have the correct value for api", l_entries.get("path"), + is(equalTo("\"xtk%3aworkflow\""))); + } - l_apiDefinition.setTitle("path"); - l_apiDefinition.setStart("soapaction:"); - l_apiDefinition.setEnd("#"); - l_apiDefinition.setCaseSensitive(false); + @Test + public void testProblemContentsWithStringsAroundTrimmed() throws StringParseException { + String logString = "SOAPAction:\"xtk%3aworkflow\"#DeleteResult|Content-Length:"; - assertThat("We should be default remove trailing quotes", !l_apiDefinition.isTrimQuotes()); + // Create a parse definition + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); - assertThat("", l_apiDefinition.fetchStartPosition(logString), is(equalTo(11))); + l_apiDefinition.setTitle("path"); + l_apiDefinition.setStart("soapaction:"); + l_apiDefinition.setEnd("#"); + l_apiDefinition.setCaseSensitive(false); - assertThat("", l_apiDefinition.fetchEndPosition(logString), is(equalTo(27))); + l_apiDefinition.setTrimQuotes(true); + assertThat("We should be default remove trailing quotes", l_apiDefinition.isTrimQuotes()); - List l_definitionList = new ArrayList<>(); - l_definitionList.add(l_apiDefinition); + assertThat("", l_apiDefinition.fetchStartPosition(logString), is(equalTo(12))); - assertThat("The String should be compatible", - StringParseFactory.isStringCompliant(logString, l_definitionList)); + assertThat("", l_apiDefinition.fetchEndPosition(logString), is(equalTo(26))); - assertThat("We should have found the correct path", - StringParseFactory.fetchValue(logString, l_apiDefinition), is(equalTo("\"xtk%3aworkflow\""))); + List l_definitionList = new ArrayList<>(); + l_definitionList.add(l_apiDefinition); - Map l_entries = StringParseFactory.parseString(logString, l_definitionList); + assertThat("The String should be compatible", + StringParseFactory.isStringCompliant(logString, l_definitionList)); - assertThat("We should have the correct value for api", l_entries.get("path"), - is(equalTo("\"xtk%3aworkflow\""))); - } + assertThat("We should have found the correct path", + StringParseFactory.fetchValue(logString, l_apiDefinition), + is(equalTo("xtk%3aworkflow"))); - @Test - public void testProblemContentsWithStringsAroundTrimmed() throws StringParseException { - String logString = "SOAPAction:\"xtk%3aworkflow\"#DeleteResult|Content-Length:"; + Map l_entries = StringParseFactory.parseString(logString, l_definitionList); - //Create a parse definition - ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); + assertThat("We should have the correct value for api", l_entries.get("path"), + is(equalTo("xtk%3aworkflow"))); + } - l_apiDefinition.setTitle("path"); - l_apiDefinition.setStart("soapaction:"); - l_apiDefinition.setEnd("#"); - l_apiDefinition.setCaseSensitive(false); + @Test + public void testProblemTrimming() { + String logString = "DEBUG | 2020-06-18 16:06:44 | [main] parser.StringParseFactory (StringParseFactory.java:47) - 46472 - +29043:5eeae354:3a5b|POST /nl/jsp/soaprouter.jsp HTTP/1.1|soapaction:atdvhwetsjlcafl|Content-Length:384|Content-Type:text/plain; charset=ISO-8859-1|Host:rd-dev59.rd.campaign.adobe.com|Connection:Keep-Alive|User-Agent:Apache-HttpClient/4.5.2 (Java/1.8.0_171)|Accept-Encoding:gzip,deflate"; - l_apiDefinition.setTrimQuotes(true); - assertThat("We should be default remove trailing quotes", l_apiDefinition.isTrimQuotes()); + // Create a parse definition + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); - assertThat("", l_apiDefinition.fetchStartPosition(logString), is(equalTo(12))); + l_apiDefinition.setTitle("path"); + l_apiDefinition.setStart("soapaction:"); + l_apiDefinition.setEnd("#"); + l_apiDefinition.setCaseSensitive(false); - assertThat("", l_apiDefinition.fetchEndPosition(logString), is(equalTo(26))); + l_apiDefinition.setTrimQuotes(true); - List l_definitionList = new ArrayList<>(); - l_definitionList.add(l_apiDefinition); + List l_definitionList = new ArrayList<>(); + l_definitionList.add(l_apiDefinition); - assertThat("The String should be compatible", - StringParseFactory.isStringCompliant(logString, l_definitionList)); + assertThat("The String should not be compatible", + !StringParseFactory.isStringCompliant(logString, l_definitionList)); - assertThat("We should have found the correct path", - StringParseFactory.fetchValue(logString, l_apiDefinition), is(equalTo("xtk%3aworkflow"))); + } - Map l_entries = StringParseFactory.parseString(logString, l_definitionList); + @Test + public void testToPreserve() throws StringParseException { - assertThat("We should have the correct value for api", l_entries.get("path"), - is(equalTo("xtk%3aworkflow"))); - } + ParseDefinitionEntry l_lineFinder = new ParseDefinitionEntry(); - @Test - public void testProblemTrimming() { - String logString = "DEBUG | 2020-06-18 16:06:44 | [main] parser.StringParseFactory (StringParseFactory.java:47) - 46472 - +29043:5eeae354:3a5b|POST /nl/jsp/soaprouter.jsp HTTP/1.1|soapaction:atdvhwetsjlcafl|Content-Length:384|Content-Type:text/plain; charset=ISO-8859-1|Host:rd-dev59.rd.campaign.adobe.com|Connection:Keep-Alive|User-Agent:Apache-HttpClient/4.5.2 (Java/1.8.0_171)|Accept-Encoding:gzip,deflate"; + l_lineFinder.setTitle("lineBeginning"); + l_lineFinder.setStart(" l_definitionList = new ArrayList<>(); - l_definitionList.add(l_apiDefinition); + ParseDefinitionEntry l_srcSchema = new ParseDefinitionEntry(); - assertThat("The String should not be compatible", - !StringParseFactory.isStringCompliant(logString, l_definitionList)); + l_srcSchema.setTitle("path"); + l_srcSchema.setStart("service=\""); + l_srcSchema.setEnd("\""); + l_srcSchema.setCaseSensitive(false); - } + List l_definitionList = new ArrayList<>(); + l_definitionList.add(l_lineFinder); + l_definitionList.add(l_verb); + l_definitionList.add(l_srcSchema); - @Test - public void testToPreserve() throws StringParseException { + // Check that the definition is correct + String l_line = " "; + Map l_parseResult = StringParseFactory.parseString(l_line, l_definitionList); + assertThat("The given log should be compliant", + StringParseFactory.isStringCompliant(l_line, l_definitionList)); + assertThat("We should have values", l_parseResult.size(), is(equalTo(3))); + } - ParseDefinitionEntry l_lineFinder = new ParseDefinitionEntry(); + @Test(description = "Related to issue #102, where the parsing stops or no reason") + public void testFileInterruption() + throws StringParseException { - l_lineFinder.setTitle("lineBeginning"); - l_lineFinder.setStart(" l_entries = StringParseFactory + .extractLogEntryMap(Arrays.asList(bugFile), l_parseDefinition, GenericEntry.class); - assertThat("We would by default be preserving entries", l_verb.isToPreserve()); + assertThat(l_entries, is(notNullValue())); + assertThat("We should have entries", l_entries.size(), is(greaterThan(19))); - ParseDefinitionEntry l_srcSchema = new ParseDefinitionEntry(); + } - l_srcSchema.setTitle("path"); - l_srcSchema.setStart("service=\""); - l_srcSchema.setEnd("\""); - l_srcSchema.setCaseSensitive(false); + @Test(description = "Related to issue #103, where the parsing stops or no reason") + public void testSkipOnLimit_Issue103() + throws StringParseException { - List l_definitionList = new ArrayList<>(); - l_definitionList.add(l_lineFinder); - l_definitionList.add(l_verb); - l_definitionList.add(l_srcSchema); + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); + l_apiDefinition.setTitle("Finding a specific line"); + l_apiDefinition.setStart(" "); + l_apiDefinition.setEnd(null); + ParseDefinition l_parseDefinition = new ParseDefinition("Post Upgrade Logs"); + l_parseDefinition.addEntry(l_apiDefinition); - //Check that the definition is correct - String l_line = " "; - Map l_parseResult = StringParseFactory.parseString(l_line, l_definitionList); - assertThat("The given log should be compliant", - StringParseFactory.isStringCompliant(l_line, l_definitionList)); - assertThat("We should have values", l_parseResult.size(), is(equalTo(3))); - } + final String bugFile = "src/test/resources/bugs/issue102_charSetBadUTFChar.log"; - @Test(description = "Related to issue #102, where the parsing stops or no reason") - public void testFileInterruption() - throws StringParseException { + ParseGuardRails.FILE_ENTRY_LIMIT = 10; - ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); - l_apiDefinition.setTitle("Finding a specific line"); - l_apiDefinition.setStart(" "); - l_apiDefinition.setEnd(null); - ParseDefinition l_parseDefinition = new ParseDefinition("Post Upgrade Logs"); - l_parseDefinition.addEntry(l_apiDefinition); + assertThat("We should have the correct value for the file entry limit", + ParseGuardRails.FILE_ENTRY_LIMIT, is(equalTo(10))); - final String bugFile = "src/test/resources/bugs/issue102_charSetBadUTFChar.log"; + Map l_entries = StringParseFactory + .extractLogEntryMap(Arrays.asList(bugFile), l_parseDefinition, GenericEntry.class); - Map l_entries = StringParseFactory - .extractLogEntryMap(Arrays.asList(bugFile), l_parseDefinition, GenericEntry.class); + assertThat(l_entries, is(notNullValue())); + assertThat("We should have entries", l_entries.size(), Matchers.equalTo(10)); - assertThat(l_entries, is(notNullValue())); - assertThat("We should have entries", l_entries.size(), is(greaterThan(19))); + assertThat("We should have an entry for the file size", + ParseGuardRails.fileSizeLimitations.containsKey(bugFile), is(true)); - } + assertThat("We should have an entry for the file size", + ParseGuardRails.fileSizeLimitations.get(bugFile), + is(equalTo(new File(bugFile).length()))); + } - @Test - public void testReplacementEquals() { - String l_candidateString = "string 1 and string 2"; + @Test(description = "Related to issue #102, where the parsing stops or no reason") + public void testFileInterruption2() + throws StringParseException { - assertThat("Both strings should be equal", - StringParseFactory.stringsCorrespond(l_candidateString, l_candidateString)); + ParseDefinitionEntry l_apiDefinition = new ParseDefinitionEntry(); + l_apiDefinition.setTitle("Finding a specific line"); + l_apiDefinition.setStart(" "); + l_apiDefinition.setEnd(null); + ParseDefinition l_parseDefinition = new ParseDefinition("Post Upgrade Logs"); + l_parseDefinition.addEntry(l_apiDefinition); - //assertThat("we should get the same value", StringParseFactory.fetchCorresponding(l_candidateString,l_candidateString), Matchers.equalTo(l_candidateString)); + final String bugFile = "src/test/resources/bugs/issue102_charSetBadUTFChar.log"; - assertThat("we should get the same value", - StringParseFactory.anonymizeString(l_candidateString, l_candidateString), - Matchers.equalTo(l_candidateString)); - } + Map l_entries = StringParseFactory + .extractLogEntryMap(Arrays.asList(bugFile), l_parseDefinition, GenericEntry.class); - @Test - public void testReplacementEquals_negative() { - String l_templateString = "string 1 and string 2"; - String l_candidateString = "something completely different"; + assertThat(l_entries, is(notNullValue())); + assertThat("We should have entries", l_entries.size(), is(greaterThan(19))); + } - assertThat("Both strings should be equal", - !StringParseFactory.stringsCorrespond(l_templateString, l_candidateString)); + @Test + public void testReplacementEquals() { + String l_candidateString = "string 1 and string 2"; - assertThat("we should get the same value", - StringParseFactory.anonymizeString(l_templateString, l_candidateString), - Matchers.equalTo(l_candidateString)); - } + assertThat("Both strings should be equal", + StringParseFactory.stringsCorrespond(l_candidateString, l_candidateString)); - @Test - public void testReplacementCorresponds_1part() { - String l_storedString = "string {} and string 2"; + // assertThat("we should get the same value", + // StringParseFactory.fetchCorresponding(l_candidateString,l_candidateString), + // Matchers.equalTo(l_candidateString)); - String l_candidateString = "string 1 and string 2"; + assertThat("we should get the same value", + StringParseFactory.anonymizeString(l_candidateString, l_candidateString), + Matchers.equalTo(l_candidateString)); + } - assertThat("Both strings should correspond", - StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); + @Test + public void testReplacementEquals_negative() { + String l_templateString = "string 1 and string 2"; + String l_candidateString = "something completely different"; - //assertThat("we should get the same value", StringParseFactory.fetchCorresponding(l_storedString,l_candidateString), Matchers.equalTo(l_storedString)); - assertThat("we should get the same value", - StringParseFactory.anonymizeString(l_storedString, l_candidateString), - Matchers.equalTo(l_storedString)); - } + assertThat("Both strings should be equal", + !StringParseFactory.stringsCorrespond(l_templateString, l_candidateString)); - @Test - public void testReplacementCorresponds_1part_issueWithDot() { - String l_storedString = "Null domain corresponding to KLIP {}."; + assertThat("we should get the same value", + StringParseFactory.anonymizeString(l_templateString, l_candidateString), + Matchers.equalTo(l_candidateString)); + } - String l_candidateString = "DDD-123 Wrong configuration of remote redirection server. Please check the server configuration. (iRc=-55)"; + @Test + public void testReplacementCorresponds_1part() { + String l_storedString = "string {} and string 2"; - assertThat("Both strings should correspond", - !StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); + String l_candidateString = "string 1 and string 2"; - //assertThat("we should get the same value", StringParseFactory.fetchCorresponding(l_storedString,l_candidateString2), Matchers.equalTo(l_storedString)); - assertThat("we should get the same value", - StringParseFactory.anonymizeString(l_storedString, l_candidateString), - Matchers.equalTo(l_candidateString)); - } + assertThat("Both strings should correspond", + StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); - @Test - public void testReplacementCorresponds_1part_withChars() { - String l_storedString = "string {} and string 2 and {value 8}"; + // assertThat("we should get the same value", + // StringParseFactory.fetchCorresponding(l_storedString,l_candidateString), + // Matchers.equalTo(l_storedString)); + assertThat("we should get the same value", + StringParseFactory.anonymizeString(l_storedString, l_candidateString), + Matchers.equalTo(l_storedString)); + } - String l_candidateString = "string 1 and string 2 and {value 8}"; + @Test + public void testReplacementCorresponds_1part_issueWithDot() { + String l_storedString = "Null domain corresponding to KLIP {}."; - assertThat("Both strings should correspond", - StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); + String l_candidateString = "DDD-123 Wrong configuration of remote redirection server. Please check the server configuration. (iRc=-55)"; - //assertThat("we should get the same value", StringParseFactory.fetchCorresponding(l_storedString,l_candidateString), Matchers.equalTo(l_storedString)); - assertThat("we should get the same value", - StringParseFactory.anonymizeString(l_storedString, l_candidateString), - Matchers.equalTo(l_storedString)); - } + assertThat("Both strings should correspond", + !StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); - @Test - public void testReplacementEquals_negative2() { - String l_templateString = "string {} and string 2 and {value 8}"; + // assertThat("we should get the same value", + // StringParseFactory.fetchCorresponding(l_storedString,l_candidateString2), + // Matchers.equalTo(l_storedString)); + assertThat("we should get the same value", + StringParseFactory.anonymizeString(l_storedString, l_candidateString), + Matchers.equalTo(l_candidateString)); + } - String l_candidateString = "something quite different"; + @Test + public void testReplacementCorresponds_1part_withChars() { + String l_storedString = "string {} and string 2 and {value 8}"; + String l_candidateString = "string 1 and string 2 and {value 8}"; - assertThat("Both strings should be equal", - !StringParseFactory.stringsCorrespond(l_templateString, l_candidateString)); + assertThat("Both strings should correspond", + StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); - assertThat("we should get the same value", - StringParseFactory.anonymizeString(l_templateString, l_candidateString), - Matchers.equalTo(l_candidateString)); - } + // assertThat("we should get the same value", + // StringParseFactory.fetchCorresponding(l_storedString,l_candidateString), + // Matchers.equalTo(l_storedString)); + assertThat("we should get the same value", + StringParseFactory.anonymizeString(l_storedString, l_candidateString), + Matchers.equalTo(l_storedString)); + } + @Test + public void testReplacementEquals_negative2() { + String l_templateString = "string {} and string 2 and {value 8}"; - @Test - public void testReplacementCorresponds_1part_withChars2() { - String l_storedString = "{\"error\":\"interaction_required\",\"error_description\":\"AADSTS50076: Due to a configuration change made by your administrator, or because you moved to a new location, you must use multi-factor authentication to access '00000007-0000-0000-c000-000000000000'. Trace ID: {} Correlation ID: {} Timestamp: {},\"error_codes\":[50076],\"timestamp\":{},\"trace_id\":{},\"correlation_id\":{},\"error_uri\":\"https://login.microsoftonline.com/error?code=50076\",\"suberror\":\"basic_action\"}"; + String l_candidateString = "something quite different"; - String l_candidateString = "{\"error\":\"interaction_required\",\"error_description\":\"AADSTS50076: Due to a configuration change made by your administrator, or because you moved to a new location, you must use multi-factor authentication to access '00000007-0000-0000-c000-000000000000'. Trace ID: 0f86bebe-f5cf-485d-a664-4768d945dd01 Correlation ID: 50c911f0-4c36-4988-b632-837e8c9afd0b Timestamp: 2024-06-26 08:32:18Z\",\"error_codes\":[50076],\"timestamp\":\"2024-06-26 08:32:18Z\",\"trace_id\":\"0f86bebe-f5cf-485d-a664-4768d945dd01\",\"correlation_id\":\"50c911f0-4c36-4988-b632-837e8c9afd0b\",\"error_uri\":\"https://login.microsoftonline.com/error?code=50076\",\"suberror\":\"basic_action\"}"; + assertThat("Both strings should be equal", + !StringParseFactory.stringsCorrespond(l_templateString, l_candidateString)); - assertThat("Both strings should correspond", - StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); + assertThat("we should get the same value", + StringParseFactory.anonymizeString(l_templateString, l_candidateString), + Matchers.equalTo(l_candidateString)); + } - //assertThat("we should get the same value", StringParseFactory.fetchCorresponding(l_storedString,l_candidateString), Matchers.equalTo(l_storedString)); - assertThat("we should get the same value", - StringParseFactory.anonymizeString(l_storedString, l_candidateString), - Matchers.equalTo(l_storedString)); - } + @Test + public void testReplacementCorresponds_1part_withChars2() { + String l_storedString = "{\"error\":\"interaction_required\",\"error_description\":\"AADSTS50076: Due to a configuration change made by your administrator, or because you moved to a new location, you must use multi-factor authentication to access '00000007-0000-0000-c000-000000000000'. Trace ID: {} Correlation ID: {} Timestamp: {},\"error_codes\":[50076],\"timestamp\":{},\"trace_id\":{},\"correlation_id\":{},\"error_uri\":\"https://login.microsoftonline.com/error?code=50076\",\"suberror\":\"basic_action\"}"; - @Test - public void testReplacementCorresponds_1part_withChars2_negative() { - String l_storedString = "{\"error\":\"interaction_required\",\"error_description\":\"AADSTS50076: Due to a configuration change made by your administrator, or because you moved to a new location, you must use multi-factor authentication to access '00000007-0000-0000-c000-000000000000'. Trace ID: {} Correlation ID: {} Timestamp: {},\"error_codes\":[50076],\"timestamp\":{},\"trace_id\":{},\"correlation_id\":{},\"error_uri\":\"https://login.microsoftonline.com/error?code=50076\",\"suberror\":\"basic_action\"}"; + String l_candidateString = "{\"error\":\"interaction_required\",\"error_description\":\"AADSTS50076: Due to a configuration change made by your administrator, or because you moved to a new location, you must use multi-factor authentication to access '00000007-0000-0000-c000-000000000000'. Trace ID: 0f86bebe-f5cf-485d-a664-4768d945dd01 Correlation ID: 50c911f0-4c36-4988-b632-837e8c9afd0b Timestamp: 2024-06-26 08:32:18Z\",\"error_codes\":[50076],\"timestamp\":\"2024-06-26 08:32:18Z\",\"trace_id\":\"0f86bebe-f5cf-485d-a664-4768d945dd01\",\"correlation_id\":\"50c911f0-4c36-4988-b632-837e8c9afd0b\",\"error_uri\":\"https://login.microsoftonline.com/error?code=50076\",\"suberror\":\"basic_action\"}"; - String l_candidateString = "ODB-240000 ODBC error: SQL compilation error: error line 1 at position 7#012invalid identifier '$4' SQLState: 42000 (iRc=-2006)"; -/* - assertThat("Both strings should correspond", - StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); + assertThat("Both strings should correspond", + StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); + // assertThat("we should get the same value", + // StringParseFactory.fetchCorresponding(l_storedString,l_candidateString), + // Matchers.equalTo(l_storedString)); + assertThat("we should get the same value", + StringParseFactory.anonymizeString(l_storedString, l_candidateString), + Matchers.equalTo(l_storedString)); + } - */ - //assertThat("we should get the same value", StringParseFactory.fetchCorresponding(l_storedString,l_candidateString), Matchers.equalTo(l_storedString)); - assertThat("we should get the candidate value", - StringParseFactory.anonymizeString(l_storedString, l_candidateString), - Matchers.equalTo(l_candidateString)); - } + @Test + public void testReplacementCorresponds_1part_withChars2_negative() { + String l_storedString = "{\"error\":\"interaction_required\",\"error_description\":\"AADSTS50076: Due to a configuration change made by your administrator, or because you moved to a new location, you must use multi-factor authentication to access '00000007-0000-0000-c000-000000000000'. Trace ID: {} Correlation ID: {} Timestamp: {},\"error_codes\":[50076],\"timestamp\":{},\"trace_id\":{},\"correlation_id\":{},\"error_uri\":\"https://login.microsoftonline.com/error?code=50076\",\"suberror\":\"basic_action\"}"; + + String l_candidateString = "ODB-240000 ODBC error: SQL compilation error: error line 1 at position 7#012invalid identifier '$4' SQLState: 42000 (iRc=-2006)"; + /* + * assertThat("Both strings should correspond", + * StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); + * + * + */ + // assertThat("we should get the same value", + // StringParseFactory.fetchCorresponding(l_storedString,l_candidateString), + // Matchers.equalTo(l_storedString)); + assertThat("we should get the candidate value", + StringParseFactory.anonymizeString(l_storedString, l_candidateString), + Matchers.equalTo(l_candidateString)); + } - @Test - public void testReplacementCorrespondsDoNotReplace_1part() { - String l_storedString = "string [] and string 2"; + @Test + public void testReplacementCorrespondsDoNotReplace_1part() { + String l_storedString = "string [] and string 2"; - String l_candidateString = "string 1 and string 2"; + String l_candidateString = "string 1 and string 2"; - //assertThat("we should get the same value", StringParseFactory.fetchCorresponding(l_storedString,l_candidateString), Matchers.equalTo(l_candidateString)); - assertThat("we should get the same value", - StringParseFactory.anonymizeString(l_storedString, l_candidateString), - Matchers.equalTo(l_candidateString)); + // assertThat("we should get the same value", + // StringParseFactory.fetchCorresponding(l_storedString,l_candidateString), + // Matchers.equalTo(l_candidateString)); + assertThat("we should get the same value", + StringParseFactory.anonymizeString(l_storedString, l_candidateString), + Matchers.equalTo(l_candidateString)); - assertThat("Both strings should correspond", - StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); + assertThat("Both strings should correspond", + StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); - } + } - @Test - public void testReplacementCorresponds_2parts() { - String l_storedString = "string {} and string {}"; + @Test + public void testReplacementCorresponds_2parts() { + String l_storedString = "string {} and string {}"; - String l_candidateString = "string 1 and string 2"; + String l_candidateString = "string 1 and string 2"; - assertThat("Both strings should correspond", - StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); - //assertThat("we should get the same value", StringParseFactory.fetchCorresponding(l_storedString,l_candidateString), Matchers.equalTo(l_storedString)); - assertThat("we should get the same value", - StringParseFactory.anonymizeString(l_storedString, l_candidateString), - Matchers.equalTo(l_storedString)); + assertThat("Both strings should correspond", + StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); + // assertThat("we should get the same value", + // StringParseFactory.fetchCorresponding(l_storedString,l_candidateString), + // Matchers.equalTo(l_storedString)); + assertThat("we should get the same value", + StringParseFactory.anonymizeString(l_storedString, l_candidateString), + Matchers.equalTo(l_storedString)); - String l_storedString2 = "string {} and string {} and"; + String l_storedString2 = "string {} and string {} and"; - String l_candidateString2 = "string 1 and string 2 and many more"; + String l_candidateString2 = "string 1 and string 2 and many more"; - assertThat("Both strings should correspond", - StringParseFactory.stringsCorrespond(l_storedString2, l_candidateString2)); - } + assertThat("Both strings should correspond", + StringParseFactory.stringsCorrespond(l_storedString2, l_candidateString2)); + } - @Test - public void testReplacementCorrespondsDoNotReplace_2partsSame() { - String l_storedString = "string [] and string []"; + @Test + public void testReplacementCorrespondsDoNotReplace_2partsSame() { + String l_storedString = "string [] and string []"; - String l_candidateString = "string 1 and string 2 jkjkj"; + String l_candidateString = "string 1 and string 2 jkjkj"; - assertThat("we should get the same value", - StringParseFactory.anonymizeString(l_storedString, l_candidateString), - Matchers.equalTo(l_candidateString)); - } + assertThat("we should get the same value", + StringParseFactory.anonymizeString(l_storedString, l_candidateString), + Matchers.equalTo(l_candidateString)); + } - @Test - public void testReplacementCorrespondsDoNotReplace_2differentTypes_1() { - String l_storedString = "string [] and string {}"; + @Test + public void testReplacementCorrespondsDoNotReplace_2differentTypes_1() { + String l_storedString = "string [] and string {}"; - String l_candidateString = "string 1 and string 2"; + String l_candidateString = "string 1 and string 2"; - assertThat("we should get the same value", - StringParseFactory.anonymizeString(l_storedString, l_candidateString), - Matchers.equalTo("string 1 and string {}")); + assertThat("we should get the same value", + StringParseFactory.anonymizeString(l_storedString, l_candidateString), + Matchers.equalTo("string 1 and string {}")); - assertThat("The strings should correspond", - StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); - } + assertThat("The strings should correspond", + StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); + } - @Test - public void testReplacementCorrespondsDoNotReplace_2differentTypes_2() { - String l_storedString = "string [] and string {}"; + @Test + public void testReplacementCorrespondsDoNotReplace_2differentTypes_2() { + String l_storedString = "string [] and string {}"; - String l_candidateString = "string 1 and string 2 and many more"; + String l_candidateString = "string 1 and string 2 and many more"; - assertThat("we should get the same value", - StringParseFactory.anonymizeString(l_storedString, l_candidateString), - Matchers.equalTo("string 1 and string {}")); + assertThat("we should get the same value", + StringParseFactory.anonymizeString(l_storedString, l_candidateString), + Matchers.equalTo("string 1 and string {}")); - assertThat("The strings should correspond", - StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); - } + assertThat("The strings should correspond", + StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); + } - @Test - public void testReplacementCorresponds_empty() { - String l_storedString = "string {} and string {}"; + @Test + public void testReplacementCorresponds_empty() { + String l_storedString = "string {} and string {}"; - String l_candidateString = ""; + String l_candidateString = ""; - assertThat("Both strings should NOT correspond", - !StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); - } + assertThat("Both strings should NOT correspond", + !StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); + } - @Test - public void testReplacementCorresponds_universal() { - //This is quite useless - String l_storedString = "{}"; + @Test + public void testReplacementCorresponds_universal() { + // This is quite useless + String l_storedString = "{}"; - String l_candidateString = "lmvcxmkvcxmlkvcx"; + String l_candidateString = "lmvcxmkvcxmlkvcx"; - assertThat("Both strings should correspond", - StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); + assertThat("Both strings should correspond", + StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); - assertThat("Return string should be the template", - StringParseFactory.anonymizeString(l_storedString, l_candidateString), Matchers.equalTo(l_storedString)); - } + assertThat("Return string should be the template", + StringParseFactory.anonymizeString(l_storedString, l_candidateString), + Matchers.equalTo(l_storedString)); + } - @Test - public void testReplacementCorresponds_compareStartAndEnd() { - //This is quite useless - String l_storedString = "{} abs"; + @Test + public void testReplacementCorresponds_compareStartAndEnd() { + // This is quite useless + String l_storedString = "{} abs"; - String l_candidateString = "lmvcxmkv abs"; + String l_candidateString = "lmvcxmkv abs"; - assertThat("Both strings should correspond", - StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); + assertThat("Both strings should correspond", + StringParseFactory.stringsCorrespond(l_storedString, l_candidateString)); - String l_storedStringEnd = "lmvcxmkv {}"; + String l_storedStringEnd = "lmvcxmkv {}"; - String l_candidateStringEnd = "lmvcxmkv abs"; + String l_candidateStringEnd = "lmvcxmkv abs"; - assertThat("Both strings should correspond", - StringParseFactory.stringsCorrespond(l_storedStringEnd, l_candidateStringEnd)); - } + assertThat("Both strings should correspond", + StringParseFactory.stringsCorrespond(l_storedStringEnd, l_candidateStringEnd)); + } - @Test - public void testAnonymization() throws StringParseException { - String logString = "HTTP/1.1|X-Security-Token:@tTD6JQ5HcTfzCWt5OkJcJ_BfmC8mfw==|SOAPAction:xtk%3aqueryDef#ExecuteQuery|Content-Length:591|"; + @Test + public void testAnonymization() throws StringParseException { + String logString = "HTTP/1.1|X-Security-Token:@tTD6JQ5HcTfzCWt5OkJcJ_BfmC8mfw==|SOAPAction:xtk%3aqueryDef#ExecuteQuery|Content-Length:591|"; - ParseDefinition pd = new ParseDefinition("Anonymization"); - //Create a parse definition - ParseDefinitionEntry l_definitionCI = new ParseDefinitionEntry(); + ParseDefinition pd = new ParseDefinition("Anonymization"); + // Create a parse definition + ParseDefinitionEntry l_definitionCI = new ParseDefinitionEntry(); - l_definitionCI.setTitle("path"); - l_definitionCI.setStart("HTTP/1.1|"); - l_definitionCI.setEnd("|Content-Length"); - l_definitionCI.setCaseSensitive(false); - l_definitionCI.addAnonymizer("X-Security-Token:{}|SOAPAction:[]"); - pd.addEntry(l_definitionCI); + l_definitionCI.setTitle("path"); + l_definitionCI.setStart("HTTP/1.1|"); + l_definitionCI.setEnd("|Content-Length"); + l_definitionCI.setCaseSensitive(false); + l_definitionCI.addAnonymizer("X-Security-Token:{}|SOAPAction:[]"); + pd.addEntry(l_definitionCI); - assertThat("before parsing the anonization will not work in its current format", - StringParseFactory.anonymizeString("X-Security-Token:{}|SOAPAction:[]", logString), - Matchers.not(Matchers.equalTo(logString))); + assertThat("before parsing the anonization will not work in its current format", + StringParseFactory.anonymizeString("X-Security-Token:{}|SOAPAction:[]", logString), + Matchers.not(Matchers.equalTo(logString))); - Map l_entries = StringParseFactory.parseString(logString, pd); + Map l_entries = StringParseFactory.parseString(logString, pd); - assertThat(l_entries.values().stream().findFirst().get(), is(notNullValue())); - assertThat(l_entries.values().stream().findFirst().get(), - Matchers.equalTo("X-Security-Token:{}|SOAPAction:xtk%3aqueryDef#ExecuteQuery")); - ParseDefinitionFactory.exportParseDefinitionToJSON(pd, - "src/test/resources/parseDefinitions/anonymization.json"); - } + assertThat(l_entries.values().stream().findFirst().get(), is(notNullValue())); + assertThat(l_entries.values().stream().findFirst().get(), + Matchers.equalTo("X-Security-Token:{}|SOAPAction:xtk%3aqueryDef#ExecuteQuery")); + ParseDefinitionFactory.exportParseDefinitionToJSON(pd, + "src/test/resources/parseDefinitions/anonymization.json"); + } - @Test - public void testAnonymizationImported() throws StringParseException { - String logString = "HTTP/1.1|X-Security-Token:@tTD6JQ5HcTfzCWt5OkJcJ_BfmC8mfw==|SOAPAction:xtk%3aqueryDef#ExecuteQuery|Content-Length:591|"; + @Test + public void testAnonymizationImported() throws StringParseException { + String logString = "HTTP/1.1|X-Security-Token:@tTD6JQ5HcTfzCWt5OkJcJ_BfmC8mfw==|SOAPAction:xtk%3aqueryDef#ExecuteQuery|Content-Length:591|"; - ParseDefinition pd = ParseDefinitionFactory.importParseDefinition( - "src/test/resources/parseDefinitions/anonymization.json"); + ParseDefinition pd = ParseDefinitionFactory.importParseDefinition( + "src/test/resources/parseDefinitions/anonymization.json"); - Map l_entries = StringParseFactory.parseString(logString, pd); + Map l_entries = StringParseFactory.parseString(logString, pd); - assertThat(l_entries.values().stream().findFirst().get(), is(notNullValue())); - assertThat(l_entries.values().stream().findFirst().get(), - Matchers.equalTo("X-Security-Token:{}|SOAPAction:xtk%3aqueryDef#ExecuteQuery")); - } + assertThat(l_entries.values().stream().findFirst().get(), is(notNullValue())); + assertThat(l_entries.values().stream().findFirst().get(), + Matchers.equalTo("X-Security-Token:{}|SOAPAction:xtk%3aqueryDef#ExecuteQuery")); + } } From 08228763c5c3262e25c2d5e5f53d6acf43663345 Mon Sep 17 00:00:00 2001 From: baubakg Date: Wed, 23 Apr 2025 21:06:09 +0200 Subject: [PATCH 03/26] changing command --- .github/workflows/onPushSimpleTest.yml | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/onPushSimpleTest.yml b/.github/workflows/onPushSimpleTest.yml index 9a32bed..1040529 100644 --- a/.github/workflows/onPushSimpleTest.yml +++ b/.github/workflows/onPushSimpleTest.yml @@ -4,7 +4,7 @@ # © Copyright 2020 Adobe. All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -# +# # The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. @@ -16,9 +16,9 @@ name: unit-tests on: push: branches: - - '*' + - "*" pull_request: - branches: [ main ] + branches: [main] jobs: build: @@ -32,7 +32,7 @@ jobs: java-version: 11 distribution: temurin - name: Build with Maven - run: mvn --batch-mode clean test + run: mvn clean test env: OSSRH_ARTIFACTORY_USER: ${{ secrets.OSSRH_ARTIFACTORY_USER }} OSSRH_ARTIFACTORY_API_TOKEN: ${{ secrets.OSSRH_ARTIFACTORY_API_TOKEN }} @@ -53,4 +53,4 @@ jobs: uses: actions/upload-artifact@v4 with: name: jacoco-report - path: target/site/jacoco/ \ No newline at end of file + path: target/site/jacoco/ From b8af2293bd7ece0fef52c890be1c579384b92128 Mon Sep 17 00:00:00 2001 From: baubakg Date: Wed, 23 Apr 2025 21:10:29 +0200 Subject: [PATCH 04/26] fixing config --- .mvn/maven.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.mvn/maven.config b/.mvn/maven.config index 6857f7b..8825d74 100644 --- a/.mvn/maven.config +++ b/.mvn/maven.config @@ -1 +1 @@ ---settings .mvn/settings.xml \ No newline at end of file +-s .mvn/settings.xml \ No newline at end of file From f7fdd6943e524212eb96ef47f2bedd186f1cf24d Mon Sep 17 00:00:00 2001 From: baubakg Date: Wed, 23 Apr 2025 21:11:24 +0200 Subject: [PATCH 05/26] fixing config --- .github/workflows/onPushSimpleTest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/onPushSimpleTest.yml b/.github/workflows/onPushSimpleTest.yml index 1040529..78bdbe9 100644 --- a/.github/workflows/onPushSimpleTest.yml +++ b/.github/workflows/onPushSimpleTest.yml @@ -32,7 +32,7 @@ jobs: java-version: 11 distribution: temurin - name: Build with Maven - run: mvn clean test + run: mvn --batch-mode clean test env: OSSRH_ARTIFACTORY_USER: ${{ secrets.OSSRH_ARTIFACTORY_USER }} OSSRH_ARTIFACTORY_API_TOKEN: ${{ secrets.OSSRH_ARTIFACTORY_API_TOKEN }} From 23b7ebb58658cc47ca196c6300ea1189e37155f4 Mon Sep 17 00:00:00 2001 From: baubakg Date: Wed, 23 Apr 2025 21:13:18 +0200 Subject: [PATCH 06/26] fixing config --- .mvn/maven.config | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.mvn/maven.config b/.mvn/maven.config index 8825d74..56cdbf9 100644 --- a/.mvn/maven.config +++ b/.mvn/maven.config @@ -1 +1 @@ --s .mvn/settings.xml \ No newline at end of file +-s ../.mvn/settings.xml \ No newline at end of file From dce07e38761e522b50f53e8be1ecbd7edc729ced Mon Sep 17 00:00:00 2001 From: baubakg Date: Wed, 23 Apr 2025 21:16:47 +0200 Subject: [PATCH 07/26] Fixing job --- .github/workflows/unit-tests.yml | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) create mode 100644 .github/workflows/unit-tests.yml diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml new file mode 100644 index 0000000..80b1005 --- /dev/null +++ b/.github/workflows/unit-tests.yml @@ -0,0 +1,23 @@ +name: Unit Tests2 + +on: + push: + branches: [main] + pull_request: + branches: [main] + +jobs: + test: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: Set up JDK 11 + uses: actions/setup-java@v3 + with: + java-version: "11" + distribution: "temurin" + + - name: Run Tests + run: mvn test From 637afb12f3e47d2ce6323f9cccb7582ac3e288b9 Mon Sep 17 00:00:00 2001 From: baubakg Date: Wed, 23 Apr 2025 21:33:17 +0200 Subject: [PATCH 08/26] Fixing job --- .github/workflows/unit-tests.yml | 23 ----------------------- .mvn/maven.config | 2 +- 2 files changed, 1 insertion(+), 24 deletions(-) delete mode 100644 .github/workflows/unit-tests.yml diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml deleted file mode 100644 index 80b1005..0000000 --- a/.github/workflows/unit-tests.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Unit Tests2 - -on: - push: - branches: [main] - pull_request: - branches: [main] - -jobs: - test: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - - name: Set up JDK 11 - uses: actions/setup-java@v3 - with: - java-version: "11" - distribution: "temurin" - - - name: Run Tests - run: mvn test diff --git a/.mvn/maven.config b/.mvn/maven.config index 56cdbf9..a6940c4 100644 --- a/.mvn/maven.config +++ b/.mvn/maven.config @@ -1 +1 @@ --s ../.mvn/settings.xml \ No newline at end of file +-s ./.mvn/settings.xml \ No newline at end of file From 9925f1573eea55313f5a96e8660c4a2dd00d66e6 Mon Sep 17 00:00:00 2001 From: baubakg Date: Wed, 23 Apr 2025 21:38:16 +0200 Subject: [PATCH 09/26] Fixing job --- .github/workflows/onPushSimpleTest.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/onPushSimpleTest.yml b/.github/workflows/onPushSimpleTest.yml index 78bdbe9..018a4e4 100644 --- a/.github/workflows/onPushSimpleTest.yml +++ b/.github/workflows/onPushSimpleTest.yml @@ -33,6 +33,7 @@ jobs: distribution: temurin - name: Build with Maven run: mvn --batch-mode clean test + working-directory: ./ env: OSSRH_ARTIFACTORY_USER: ${{ secrets.OSSRH_ARTIFACTORY_USER }} OSSRH_ARTIFACTORY_API_TOKEN: ${{ secrets.OSSRH_ARTIFACTORY_API_TOKEN }} From c7eced66ef631af92334b47f3d98b2e46874c892 Mon Sep 17 00:00:00 2001 From: baubakg Date: Wed, 23 Apr 2025 21:38:59 +0200 Subject: [PATCH 10/26] Fixing job --- .github/workflows/onPushSimpleTest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/onPushSimpleTest.yml b/.github/workflows/onPushSimpleTest.yml index 018a4e4..785d1ae 100644 --- a/.github/workflows/onPushSimpleTest.yml +++ b/.github/workflows/onPushSimpleTest.yml @@ -33,7 +33,7 @@ jobs: distribution: temurin - name: Build with Maven run: mvn --batch-mode clean test - working-directory: ./ + working-directory: ../ env: OSSRH_ARTIFACTORY_USER: ${{ secrets.OSSRH_ARTIFACTORY_USER }} OSSRH_ARTIFACTORY_API_TOKEN: ${{ secrets.OSSRH_ARTIFACTORY_API_TOKEN }} From 48ea7b3d4f4d40ae2459db3c76bf01bf3d6f9a0c Mon Sep 17 00:00:00 2001 From: baubakg Date: Wed, 23 Apr 2025 21:40:38 +0200 Subject: [PATCH 11/26] Fixing job --- .github/workflows/onPushSimpleTest.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/onPushSimpleTest.yml b/.github/workflows/onPushSimpleTest.yml index 785d1ae..626d250 100644 --- a/.github/workflows/onPushSimpleTest.yml +++ b/.github/workflows/onPushSimpleTest.yml @@ -31,9 +31,10 @@ jobs: with: java-version: 11 distribution: temurin + - name: list directory + run: ls -la - name: Build with Maven run: mvn --batch-mode clean test - working-directory: ../ env: OSSRH_ARTIFACTORY_USER: ${{ secrets.OSSRH_ARTIFACTORY_USER }} OSSRH_ARTIFACTORY_API_TOKEN: ${{ secrets.OSSRH_ARTIFACTORY_API_TOKEN }} From 539df93ec67a70a80578384799d32a664daeb717 Mon Sep 17 00:00:00 2001 From: baubakg Date: Wed, 23 Apr 2025 21:41:37 +0200 Subject: [PATCH 12/26] Fixing job --- .github/workflows/onPushSimpleTest.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/onPushSimpleTest.yml b/.github/workflows/onPushSimpleTest.yml index 626d250..5afa11b 100644 --- a/.github/workflows/onPushSimpleTest.yml +++ b/.github/workflows/onPushSimpleTest.yml @@ -33,6 +33,8 @@ jobs: distribution: temurin - name: list directory run: ls -la + - name: show path + run: pwd - name: Build with Maven run: mvn --batch-mode clean test env: From 63b323a6b465e9b23e93d403c542ffeff45340a4 Mon Sep 17 00:00:00 2001 From: baubakg Date: Wed, 23 Apr 2025 21:43:44 +0200 Subject: [PATCH 13/26] Fixing job --- .github/workflows/onPushSimpleTest.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/onPushSimpleTest.yml b/.github/workflows/onPushSimpleTest.yml index 5afa11b..5151f9b 100644 --- a/.github/workflows/onPushSimpleTest.yml +++ b/.github/workflows/onPushSimpleTest.yml @@ -36,7 +36,7 @@ jobs: - name: show path run: pwd - name: Build with Maven - run: mvn --batch-mode clean test + run: mvn --batch-mode clean test --settings .mvn/settings.xml env: OSSRH_ARTIFACTORY_USER: ${{ secrets.OSSRH_ARTIFACTORY_USER }} OSSRH_ARTIFACTORY_API_TOKEN: ${{ secrets.OSSRH_ARTIFACTORY_API_TOKEN }} From bcff8d88e2a3a11b72fdeffe605d16159d25ba98 Mon Sep 17 00:00:00 2001 From: baubakg Date: Wed, 23 Apr 2025 22:40:26 +0200 Subject: [PATCH 14/26] Fixing job --- .github/workflows/codeql-analysis.yml | 58 +++++++++++++------------- .github/workflows/onPushSimpleTest.yml | 4 -- 2 files changed, 29 insertions(+), 33 deletions(-) diff --git a/.github/workflows/codeql-analysis.yml b/.github/workflows/codeql-analysis.yml index 65a0538..e755469 100644 --- a/.github/workflows/codeql-analysis.yml +++ b/.github/workflows/codeql-analysis.yml @@ -13,12 +13,12 @@ name: "CodeQL" on: push: - branches: [ main ] + branches: [main] pull_request: # The branches below must be a subset of the branches above - branches: [ main ] + branches: [main] schedule: - - cron: '38 3 * * 1' + - cron: "38 3 * * 1" jobs: analyze: @@ -32,39 +32,39 @@ jobs: strategy: fail-fast: false matrix: - language: [ 'java' ] + language: ["java"] # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] # Learn more about CodeQL language support at https://git.io/codeql-language-support steps: - - name: Checkout repository - uses: actions/checkout@v4 + - name: Checkout repository + uses: actions/checkout@v4 - # Initializes the CodeQL tools for scanning. - - name: Initialize CodeQL - uses: github/codeql-action/init@v3 - with: - languages: ${{ matrix.language }} - # If you wish to specify custom queries, you can do so here or in a config file. - # By default, queries listed here will override any specified in a config file. - # Prefix the list here with "+" to use these queries and those in the config file. - # queries: ./path/to/local/query, your-org/your-repo/queries@main + # Initializes the CodeQL tools for scanning. + - name: Initialize CodeQL + uses: github/codeql-action/init@v3 + with: + languages: ${{ matrix.language }} + # If you wish to specify custom queries, you can do so here or in a config file. + # By default, queries listed here will override any specified in a config file. + # Prefix the list here with "+" to use these queries and those in the config file. + # queries: ./path/to/local/query, your-org/your-repo/queries@main - # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). - # If this step fails, then you should remove it and run the build manually (see below) - - name: Autobuild - uses: github/codeql-action/autobuild@v3 + # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). + # If this step fails, then you should remove it and run the build manually (see below) + #- name: Autobuild + # uses: github/codeql-action/autobuild@v3 - # ℹ️ Command-line programs to run using the OS shell. - # 📚 https://git.io/JvXDl + # ℹ️ Command-line programs to run using the OS shell. + # 📚 https://git.io/JvXDl - # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines - # and modify them (or add more) to build your code if your project - # uses a compiled language + # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines + # and modify them (or add more) to build your code if your project + # uses a compiled language - #- run: | - # make bootstrap - # make release + - run: | + mvn --batch-mode clean test --settings .mvn/settings.xml - - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + # make release + - name: Perform CodeQL Analysis + uses: github/codeql-action/analyze@v3 diff --git a/.github/workflows/onPushSimpleTest.yml b/.github/workflows/onPushSimpleTest.yml index 5151f9b..2c50066 100644 --- a/.github/workflows/onPushSimpleTest.yml +++ b/.github/workflows/onPushSimpleTest.yml @@ -31,10 +31,6 @@ jobs: with: java-version: 11 distribution: temurin - - name: list directory - run: ls -la - - name: show path - run: pwd - name: Build with Maven run: mvn --batch-mode clean test --settings .mvn/settings.xml env: From 481d7b14cd9fe6ff1c39fd99baf94b89d10ae1d1 Mon Sep 17 00:00:00 2001 From: baubakg Date: Thu, 24 Apr 2025 09:32:10 +0200 Subject: [PATCH 15/26] Adding tests for Gurad Rails --- .../tests/logparser/utils/MemoryUtils.java | 14 ++++ .../logparser/utils/ParseGuardRails.java | 62 +++++++++++++++++ .../logparser/utils/ParseGuardRailsTest.java | 67 +++++++++++++++++++ 3 files changed, 143 insertions(+) create mode 100644 src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java create mode 100644 src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java diff --git a/src/main/java/com/adobe/campaign/tests/logparser/utils/MemoryUtils.java b/src/main/java/com/adobe/campaign/tests/logparser/utils/MemoryUtils.java index 75e07ac..672590e 100644 --- a/src/main/java/com/adobe/campaign/tests/logparser/utils/MemoryUtils.java +++ b/src/main/java/com/adobe/campaign/tests/logparser/utils/MemoryUtils.java @@ -21,4 +21,18 @@ public class MemoryUtils { public static long getCurrentHeapSizeMB() { return Runtime.getRuntime().totalMemory() / (1024 * 1024); } + + /** + * Gets the remaining system memory as a percentage + * + * @return the remaining system memory as a percentage (0-100) + */ + public static double getRemainingMemoryPercentage() { + Runtime runtime = Runtime.getRuntime(); + long maxMemory = runtime.maxMemory(); + long allocatedMemory = runtime.totalMemory(); + long freeMemory = runtime.freeMemory(); + long usedMemory = allocatedMemory - freeMemory; + return ((double) (maxMemory - usedMemory) / maxMemory) * 100; + } } \ No newline at end of file diff --git a/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java b/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java new file mode 100644 index 0000000..d62fdf5 --- /dev/null +++ b/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java @@ -0,0 +1,62 @@ +/* + * Copyright 2022 Adobe + * All Rights Reserved. + * + * NOTICE: Adobe permits you to use, modify, and distribute this file in + * accordance with the terms of the Adobe license agreement accompanying + * it. + */ +package com.adobe.campaign.tests.logparser.utils; + +import java.util.HashMap; +import java.util.Map; + +/** + * Class to store guard rails for parsing operations + */ +public class ParseGuardRails { + public static final Map fileSizeLimitations = new HashMap<>(); + public static long HEAP_SIZE_AT_START = MemoryUtils.getCurrentHeapSizeMB(); + public static int FILE_ENTRY_LIMIT = Integer.parseInt(System.getProperty("PROP_LOGPARSER_FILEENTRY_LIMIT", "-1")); + public static long HEAP_LIMIT = Integer.parseInt(System.getProperty("PROP_LOGPARSER_HEAP_LIMIT", "-1")); + public static double MEMORY_LIMIT_PERCENTAGE = Double + .parseDouble(System.getProperty("PROP_LOGPARSER_MEMORY_LIMIT_PERCENTAGE", "-1")); + + public static void reset() { + fileSizeLimitations.clear(); + FILE_ENTRY_LIMIT = -1; + HEAP_LIMIT = -1; + MEMORY_LIMIT_PERCENTAGE = -1; + } + + /** + * Check if the current count has reached the entry limit + * + * @param currentCount the current count of entries + * @return true if the current count has reached the entry limit, false + * otherwise + */ + public static boolean hasReachedEntryLimit(int currentCount) { + return FILE_ENTRY_LIMIT > -1 && currentCount >= FILE_ENTRY_LIMIT; + } + + /** + * Check if the current memory usage has reached the memory limit + * + * @return true if the current memory usage has reached the memory limit, false + * otherwise + */ + public static boolean hasReachedHeapLimit() { + return HEAP_LIMIT > -1 && (MemoryUtils.getCurrentHeapSizeMB() - HEAP_SIZE_AT_START) >= HEAP_LIMIT; + } + + /** + * Check if the remaining memory percentage has fallen below the set limit + * + * @return true if the remaining memory percentage is below the limit, false + * otherwise + */ + public static boolean hasReachedMemoryPercentageLimit() { + return MEMORY_LIMIT_PERCENTAGE > -1 && MemoryUtils.getRemainingMemoryPercentage() <= MEMORY_LIMIT_PERCENTAGE; + } +} \ No newline at end of file diff --git a/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java b/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java new file mode 100644 index 0000000..372bc57 --- /dev/null +++ b/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java @@ -0,0 +1,67 @@ +/* + * Copyright 2022 Adobe + * All Rights Reserved. + * + * NOTICE: Adobe permits you to use, modify, and distribute this file in + * accordance with the terms of the Adobe license agreement accompanying + * it. + */ +package com.adobe.campaign.tests.logparser.utils; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.is; + +import org.testng.annotations.AfterMethod; +import org.testng.annotations.BeforeMethod; +import org.testng.annotations.Test; + +public class ParseGuardRailsTest { + + @BeforeMethod + @AfterMethod + public void reset() { + ParseGuardRails.reset(); + } + + @Test + public void testHasReachedHeapLimit_WhenLimitNotSet() { + assertThat("Should not reach limit when not set", + ParseGuardRails.hasReachedHeapLimit(), is(false)); + } + + @Test + public void testHasReachedHeapLimit_WhenBelowLimit() { + ParseGuardRails.HEAP_LIMIT = 1000; // Set a high limit + assertThat("Should not reach limit when below threshold", + ParseGuardRails.hasReachedHeapLimit(), is(false)); + } + + @Test + public void testHasReachedHeapLimit_WhenAboveLimit() { + ParseGuardRails.HEAP_LIMIT = 1; // Set limit to 1 to force exceeding it + ParseGuardRails.HEAP_SIZE_AT_START = -20; // Set heap size to -20 to force exceeding it + assertThat("Should reach limit when above threshold", + ParseGuardRails.hasReachedHeapLimit(), is(true)); + } + + @Test + public void testHasReachedMemoryPercentageLimit_WhenLimitNotSet() { + assertThat("Should not reach limit when not set", + ParseGuardRails.hasReachedMemoryPercentageLimit(), is(false)); + } + + @Test + public void testHasReachedMemoryPercentageLimit_WhenBelowLimit() { + ParseGuardRails.MEMORY_LIMIT_PERCENTAGE = 20.0; // Set limit to 20% + assertThat("Should not reach limit when below threshold", + ParseGuardRails.hasReachedMemoryPercentageLimit(), is(false)); + } + + @Test + public void testHasReachedMemoryPercentageLimit_WhenAtLimit() { + + ParseGuardRails.MEMORY_LIMIT_PERCENTAGE = 100.0; // Set limit to 0% + assertThat("Should reach limit when at threshold", + ParseGuardRails.hasReachedMemoryPercentageLimit(), is(true)); + } +} \ No newline at end of file From 818e703b8145f425f443ad8847847e5bf60112c1 Mon Sep 17 00:00:00 2001 From: baubakg Date: Thu, 24 Apr 2025 13:39:17 +0200 Subject: [PATCH 16/26] Fixed #203 added simple checks fr the memory --- README.md | 357 ++++++++++++------ .../tests/logparser/core/ParseGuardRails.java | 51 --- .../logparser/core/StringParseFactory.java | 15 +- .../MemoryLimitExceededException.java | 19 + .../tests/logparser/utils/MemoryUtils.java | 13 +- .../logparser/utils/ParseGuardRails.java | 91 ++++- .../tests/logparser/core/ParseTesting.java | 8 +- .../logparser/utils/ParseGuardRailsTest.java | 104 ++++- 8 files changed, 464 insertions(+), 194 deletions(-) delete mode 100644 src/main/java/com/adobe/campaign/tests/logparser/core/ParseGuardRails.java create mode 100644 src/main/java/com/adobe/campaign/tests/logparser/exceptions/MemoryLimitExceededException.java diff --git a/README.md b/README.md index b3ca636..588dbb2 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,9 @@ # log-parser -[![unit-tests](https://github.com/adobe/log-parser/actions/workflows/onPushSimpleTest.yml/badge.svg)](https://github.com/adobe/log-parser/actions/workflows/onPushSimpleTest.yml) + +[![unit-tests](https://github.com/adobe/log-parser/actions/workflows/onPushSimpleTest.yml/badge.svg)](https://github.com/adobe/log-parser/actions/workflows/onPushSimpleTest.yml) [![codecov](https://codecov.io/gh/adobe/log-parser/branch/main/graph/badge.svg?token=T94S3VFEUU)](https://codecov.io/gh/adobe/log-parser) [![javadoc](https://javadoc.io/badge2/com.adobe.campaign.tests/log-parser/javadoc.svg)](https://javadoc.io/doc/com.adobe.campaign.tests/log-parser) -[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=adobe_log-parser&metric=alert_status)](https://sonarcloud.io/summary/new_code?id=adobe_log-parser) +[![Quality Gate Status](https://sonarcloud.io/api/project_badges/measure?project=adobe_log-parser&metric=alert_status)](https://sonarcloud.io/summary/new_code?id=adobe_log-parser) The log parser is designed to help include log results in tests, reports and general applicative processes. It allows you to parse and analyze log files in order to extract relevant data. It can be used as is or as an SDK, where you can define your own parsing. @@ -11,61 +12,64 @@ The basic method for using this library is, that you create a definition for you ![The Processes](diagrams/Log_Parser-Processes.drawio.png) ## Table of contents + - * [Installation](#installation) - * [Maven](#maven) - * [Running the Log Parser](#running-the-log-parser) - * [Parse Definitions](#parse-definitions) - * [Defining a Parsing](#defining-a-parsing) - * [Defining an Entry](#defining-an-entry) - * [How parsing works](#how-parsing-works) - * [Anonymizing Data](#anonymizing-data) - * [Code Example](#code-example) - * [Import and Export of Parse Definitions](#import-and-export-of-parse-definitions) - * [Importing a JSON File](#importing-a-json-file) - * [Extracting Data from Logs](#extracting-data-from-logs) - * [Using the Standard Method](#using-the-standard-method) - * [Using the Log-Parser as an SDK](#using-the-log-parser-as-an-sdk) - * [Writing your own SDK](#writing-your-own-sdk) - * [Declaring a Default and Copy Constructor](#declaring-a-default-and-copy-constructor) - * [Declaring the transformation Rules in setValuesFromMap](#declaring-the-transformation-rules-in-setvaluesfrommap) - * [Declaring the Key](#declaring-the-key) - * [Declare the HeaderMap, and ValueMap](#declare-the-headermap-and-valuemap) - * [Assisting Exports](#assisting-exports) - * [Code Structure](#code-structure) - * [Searching and organizing log data](#searching-and-organizing-log-data) - * [Search and Filter Mechanisms](#search-and-filter-mechanisms) - * [Defining a Search Term](#defining-a-search-term) - * [Enriching Log Data](#enriching-log-data) - * [GroupBy Mechanisms](#groupby-mechanisms) - * [Passing a list](#passing-a-list) - * [Chaining GroupBy](#chaining-groupby) - * [Comparing Log Data](#comparing-log-data) - * [Creating a Differentiation Report](#creating-a-differentiation-report) - * [Assertions and LogDataAssertions](#assertions-and-logdataassertions) - * [Exporting Parse Results](#exporting-parse-results) - * [Exporting Results to a CSV File](#exporting-results-to-a-csv-file) - * [Exporting Results to an HTML File](#exporting-results-to-an-html-file) - * [Exporting Results to an JSON File](#exporting-results-to-an-json-file) - * [Command-line Execution of the Log-Parser](#command-line-execution-of-the-log-parser) - * [Changelog](#changelog) - * [1.11.2](#1112) - * [1.11.0](#1110) - * [1.0.10](#1010) - * [1.0.8.2](#1082) - * [1.0.8](#108) - * [1.0.7](#107) - * [1.0.6](#106) - * [1.0.5](#105) - * [1.0.4](#104) - * [1.0.3](#103) - * [1.0.1](#101) + +- [Installation](#installation) + - [Maven](#maven) +- [Running the Log Parser](#running-the-log-parser) +- [Parse Definitions](#parse-definitions) + - [Defining a Parsing](#defining-a-parsing) + - [Defining an Entry](#defining-an-entry) + - [How parsing works](#how-parsing-works) + - [Anonymizing Data](#anonymizing-data) + - [Code Example](#code-example) + - [Import and Export of Parse Definitions](#import-and-export-of-parse-definitions) + - [Importing a JSON File](#importing-a-json-file) +- [Extracting Data from Logs](#extracting-data-from-logs) + - [Using the Standard Method](#using-the-standard-method) + - [Using the Log-Parser as an SDK](#using-the-log-parser-as-an-sdk) + - [Writing your own SDK](#writing-your-own-sdk) + - [Declaring a Default and Copy Constructor](#declaring-a-default-and-copy-constructor) + - [Declaring the transformation Rules in setValuesFromMap](#declaring-the-transformation-rules-in-setvaluesfrommap) + - [Declaring the Key](#declaring-the-key) + - [Declare the HeaderMap, and ValueMap](#declare-the-headermap-and-valuemap) + - [Assisting Exports](#assisting-exports) +- [Code Structure](#code-structure) +- [Searching and organizing log data](#searching-and-organizing-log-data) + - [Search and Filter Mechanisms](#search-and-filter-mechanisms) + - [Defining a Search Term](#defining-a-search-term) + - [Enriching Log Data](#enriching-log-data) + - [GroupBy Mechanisms](#groupby-mechanisms) + - [Passing a list](#passing-a-list) + - [Chaining GroupBy](#chaining-groupby) + - [Comparing Log Data](#comparing-log-data) + - [Creating a Differentiation Report](#creating-a-differentiation-report) +- [Assertions and LogDataAssertions](#assertions-and-logdataassertions) +- [Exporting Parse Results](#exporting-parse-results) + - [Exporting Results to a CSV File](#exporting-results-to-a-csv-file) + - [Exporting Results to an HTML File](#exporting-results-to-an-html-file) + - [Exporting Results to an JSON File](#exporting-results-to-an-json-file) +- [Command-line Execution of the Log-Parser](#command-line-execution-of-the-log-parser) +- [Changelog](#changelog) +_ [1.11.2](#1112) +_ [1.11.0](#1110) +_ [1.0.10](#1010) +_ [1.0.8.2](#1082) +_ [1.0.8](#108) +_ [1.0.7](#107) +_ [1.0.6](#106) +_ [1.0.5](#105) +_ [1.0.4](#104) +_ [1.0.3](#103) \* [1.0.1](#101) ## Installation + For now, we are using this library with maven, in later iteration we will publish other build system examples: ### Maven + The following dependency needs to be added to your pom file: ``` @@ -75,16 +79,22 @@ The following dependency needs to be added to your pom file: 1.11.2 ``` + ## Running the Log Parser + We have two ways of running the log parser: + 1. Programmatically, as a library and in your test you can simply use the log-parser to analyse your log files. 2. Command-Line, as of version 1.11.0, we allow you to run your log-parsing from the command-line. Further details can be found in the section [Command-line Execution of the Log-Parser](#command-line-execution-of-the-log-parser). ## Parse Definitions + In order to parse logs you need to define a ParseDefinition. A ParseDefinition contains a set of ordered ParseDefinition Entries. While parsing a line of logs, the LogParser will see if all entries can be found in the line of logs. If that is the case, the line is stored according to the definitions. ### Defining a Parsing + Each Parse Definition consists of : + - Title - A set of entries - A Padding allowing us to create a legible key @@ -92,7 +102,9 @@ Each Parse Definition consists of : - If you want the result to include the log file name and path ### Defining an Entry + Each entry for a Parse Definition allows us to define: + - A title for the value which will be found. - The start pattern of the string that will contain the value (null if in the start of a line) - The end pattern of the string that will contain the value (null if in the end of a line) @@ -101,7 +113,9 @@ Each entry for a Parse Definition allows us to define: - Anonymizers, we can provide a set of anonymizers so that some values are skipped when parsing a line. ### How parsing works + When you have defined your parsing you use the LogDataFactory by passing it: + 1. The log files it should parse 2. The ParseDefinition @@ -109,43 +123,49 @@ By using the StringParseFactory we get a LogData object with allows us to manage ![Parsing a log line](diagrams/Log_Parser-log-parsing.drawio.png) -As mentioned in the chapter [Defining an Entry](#defining-an-entry), each Parse Defnition Entry contains a start and end pattern. We extract and store the values between these two points, and continue with the rest of the line until there is no more data to parse. +As mentioned in the chapter [Defining an Entry](#defining-an-entry), each Parse Defnition Entry contains a start and end pattern. We extract and store the values between these two points, and continue with the rest of the line until there is no more data to parse. A line is only considered if all the Parse Definition Entries can be matched in the order they have been defined. -**Note:** Once we have extracted the data corresponding to an entry, the following string will include the end pattern of that entry. This is because sometimes the end pattern may be part of a different data to store. +**Note:** Once we have extracted the data corresponding to an entry, the following string will include the end pattern of that entry. This is because sometimes the end pattern may be part of a different data to store. #### Anonymizing Data + We have discovered that it would be useful to anonymize data. This will aloow you to group some log data that contains variables. Anonymization has two features: -* Replacing Data using `{}`, -* Ignoring Data using `[]`. + +- Replacing Data using `{}`, +- Ignoring Data using `[]`. For example if you store an anonymizer with the value: + ``` Storing key '{}' in the system ``` the log-parser will merge all lines that contain the same text, but with different values for the key. For example: -* Storing key 'F' in the system -* Storing key 'B' in the system -* Storing key 'G' in the system -will all be stored as `Storing key '{}' in the system`. +- Storing key 'F' in the system +- Storing key 'B' in the system +- Storing key 'G' in the system + +will all be stored as `Storing key '{}' in the system`. Sometimes we just want to anonymize part of a line. This is useful if you want to do post-treatment. For example in our previous example as explained `Storing key 'G' in the system`, would be merged, however `NEO-1234 : Storing key 'G' in the system` would not be merged. In this cas we can do a partial anonymization using the `[]` notation. For example if we enrich our original template: + ``` []Storing key '{}' in the system ``` In this case the lines: -* `NEO-1234 : Storing key 'G' in the system` will be stored as `NEO-1234 : Storing key '{}' in the system` -* `NEO-1234 : Storing key 'H' in the system` will be stored as `NEO-1234 : Storing key '{}' in the system` -* `EXA-1234 : Storing key 'Z' in the system` will be stored as `EXA-1234 : Storing key '{}' in the system` -* `EXA-1234 : Storing key 'X' in the system` will be stored as `EXA-1234 : Storing key '{}' in the system` +- `NEO-1234 : Storing key 'G' in the system` will be stored as `NEO-1234 : Storing key '{}' in the system` +- `NEO-1234 : Storing key 'H' in the system` will be stored as `NEO-1234 : Storing key '{}' in the system` +- `EXA-1234 : Storing key 'Z' in the system` will be stored as `EXA-1234 : Storing key '{}' in the system` +- `EXA-1234 : Storing key 'X' in the system` will be stored as `EXA-1234 : Storing key '{}' in the system` ### Code Example + Here is an example of how we can parse a string. The method is leveraged to perform the same parsing in one or many files. ```java @@ -178,17 +198,20 @@ public void parseAStringDemo() throws StringParseException { is(equalTo("rest/head/workflow/WKF193"))); } ``` -In the code above we want to parse the log line below, and want to find the REST call "GET /rest/head/workflow/WKF193", and to extract the verb "GET", and the api "/rest/head/workflow/WKF193". + +In the code above we want to parse the log line below, and want to find the REST call "GET /rest/head/workflow/WKF193", and to extract the verb "GET", and the api "/rest/head/workflow/WKF193". `afthostXX.qa.campaign.adobe.com:443 - - [02/Apr/2022:08:08:28 +0200] "GET /rest/head/workflow/WKF193 HTTP/1.1" 200` The code starts with the creation a parse definition with at least two parse definitions that tell us between which markers should each data be extracted. The parse difinition is then handed to the StringParseFactory so that the data can be extracted. At the end we can see that each data is stored in a map with the parse definition entry title as a key. ### Import and Export of Parse Definitions + You can import or store a Parse Definition to or from a JSON file. ### Importing a JSON File -You can define a Parse Definition in a JSON file. + +You can define a Parse Definition in a JSON file. This can then be imported and used for parsing using the method `ParseDefinitionFactory.importParseDefinition`. Here is small example of how the JSON would look like: @@ -217,64 +240,80 @@ This can then be imported and used for parsing using the method `ParseDefinition ``` ## Extracting Data from Logs + By default, the Log-Parser will generate a standardized key-value extraction of the log you generate. All values are then stored as Strings. For more advanced transformations we suggest you write your own Log SDK. We will describe each in detail in this chapter. ### Using the Standard Method + By default, each entry for your lag parsing will be stored as a Generic entry. This means that all values will be stored as Strings. Each entry will have a : + - Key - A set of values - The frequency of the key as found in the logs ### Using the Log-Parser as an SDK + Using the log parser as an SDK allow you to define your own transformations and also to override many of the behaviors. By fefault we can look at the SDK mode as a second parsing, where we first parse the logs using the generic ParseDefinitions, and then a second treatment is performed with the SDK you write. Typical use cases are: -* Transformation of parts of the parsed log data into non-string types. -* Additional parsing of the parsed data. + +- Transformation of parts of the parsed log data into non-string types. +- Additional parsing of the parsed data. #### Writing your own SDK + In order to use this feature you need to define a class that extends the class StdLogEntry. You will often want to transform the parsed information into a more manageable object by defining your own fields in the SDK class. In the project we have two examples of SDKs (under `src/test/java``: -* `com.adobe.campaign.tests.logparser.data.SDKCaseSTD` where we perform additional parsing of the log data. -* `com.adobe.campaign.tests.logparser.data.SDKCase2` where we transform the time into a date object. + +- `com.adobe.campaign.tests.logparser.data.SDKCaseSTD` where we perform additional parsing of the log data. +- `com.adobe.campaign.tests.logparser.data.SDKCase2` where we transform the time into a date object. ##### Declaring a Default and Copy Constructor + You will need to declare a default constructor and a copy constructor. The copy constructor will allow you to copy the values from one object to another. ##### Declaring the transformation Rules in setValuesFromMap + You will need to declare how the parsed variables are transformed into your SDL. This is done in the method `setValuesFromMap()`. In there you can define a fine-grained extraction of the variables. This could be extracting hidden data in strings of the extracted data, or simple data transformations such as integer or dates. ##### Declaring the Key + You will need to define how a unique line will look like. Although this is already done in the Definition Rules, you may want to provide more precisions. This is doen in the method `makeKey()`. ##### Declare the HeaderMap, and ValueMap + Depending on the fields you have defined, you will want to define how the results are represented when they are stored in your system. You will need to give names to the headers, and provide a map that extracts the values. ##### Assisting Exports + One of the added values of writing your own log data is the possibility of using non-String objects, and perform additional operations on the data. This has the drawback that we can have odd behaviors when exporting the logs data. For this we, y default, transforml all data in an entry to a map of Strings. -In some cases the default String transformation may not be to your liking. In this case you will have to override the method `Map fetchValueMapPrintable()`. To do this the method needs to call perform your own transformation to the results of the `fetchValueMap()` method. +In some cases the default String transformation may not be to your liking. In this case you will have to override the method `Map fetchValueMapPrintable()`. To do this the method needs to call perform your own transformation to the results of the `fetchValueMap()` method. ## Code Structure + Below is a diagram representing the class structure: ![The Class relationship](diagrams/Log_Parser-Classes.drawio.png) ## Searching and organizing log data -We have a series of search and organizing the log data. These by general use Hamcrest Matchers to allow you to define different querires. + +We have a series of search and organizing the log data. These by general use Hamcrest Matchers to allow you to define different querires. ### Search and Filter Mechanisms + We have introduced the filter and search mechanisms. These allow you to search the LogData for values for a given ParseDefinitionEntry. For this we have introduced the following methods: -* isElementPresent -* searchEntries -* filterBy + +- isElementPresent +- searchEntries +- filterBy We currently have the following signatures: @@ -287,6 +326,7 @@ public LogData filterBy(Map in_filterKeyValues) ``` #### Defining a Search Term + When we define a search term, we do this by defining it as a map of ParseDefining Entry Name and a Matcher. The Matcher we use is a Hamcrest matcher which provides great flexibility in defining the search terms. ```java @@ -294,64 +334,70 @@ Map l_filterProperties = new HashMap<>(); l_filterProperties.put("Definition 1", Matchers.equalTo("14")); l_filterProperties.put("Definition 2", Matchers.startsWith("13")); -LogData l_foundEntries = l_logData.searchEntries(l_filterProperties)); +LogData l_foundEntries = l_logData.searchEntries(l_filterProperties)); ``` In versions prior to 1.11.0 we used a map of key and Objects for search terms. In these queries it was implicitly an equality check. Because of that these search terms can be replaced with `Matchers.equalTo` or `Matchers.is`. Example of a search term in version 1.10.0: + ```java Map l_filterProperties = new HashMap<>(); l_filterProperties.put("Definition 1", "14"); ``` In version 1.11.0 the same search term would look like this: + ```java Map l_filterProperties = new HashMap<>(); l_filterProperties.put("Definition 1", Matchers.equalTo("14")); ``` ### Enriching Log Data + We have the capability to enrich log data with additional information. This is done by using the method `LogData#enrichData(Map, String, String)`. This method accepts: -* A search term (as defined in the section [Defining a Search Term](#defining-a-search-term)) -* The title of the entry to be added -* The value for the new entry in the search lines + +- A search term (as defined in the section [Defining a Search Term](#defining-a-search-term)) +- The title of the entry to be added +- The value for the new entry in the search lines If you want to add multiple values for the enrichment, you can run this method several times, or using an other method which is more suitable which is the method `LogData#enrichData(Map, Map keyValueToEnrich)`.This method accepts: -* A search term (as defined in the section [Defining a Search Term](#defining-a-search-term)) -* a key value map with The title of the entry as the key and the value for the new entry as the value -We have also introduced a method called `LogData#enrichEmpty(String, String)`, which sets a value for the entries which have not yet have a value set for them. +- A search term (as defined in the section [Defining a Search Term](#defining-a-search-term)) +- a key value map with The title of the entry as the key and the value for the new entry as the value + +We have also introduced a method called `LogData#enrichEmpty(String, String)`, which sets a value for the entries which have not yet have a value set for them. ### GroupBy Mechanisms + We have introduced the groupBy mechanism. This functionality allows you to organize your results with more detail. Given a log data object, and an array of ParseDefinitionEntry names, we generate a new LogData Object containing groups made by the passed ParseDeinitionEnries and and number of entries for each group. Let's take the following case: -Definition 1 | Definition 2 | Definition 3 | Definition 4 ------------- | ------------ | ------------ | ------------ -12 | 14 | 13 | AA -112 | 114 | 113 | AAA -120 | 14 | 13 | AA - +| Definition 1 | Definition 2 | Definition 3 | Definition 4 | +| ------------ | ------------ | ------------ | ------------ | +| 12 | 14 | 13 | AA | +| 112 | 114 | 113 | AAA | +| 120 | 14 | 13 | AA | If we perform groupBy with the parseDefinition `Definition 2`, we will be getting a new LogData object with two entries: -Definition 2 | Frequence ------------- | ------------ -14 | 2 -114 | 1 +| Definition 2 | Frequence | +| ------------ | --------- | +| 14 | 2 | +| 114 | 1 | We can also pass a list of group by items, or even perform a chaining of the group by predicates. #### Passing a list + We can create a sub group of the LogData by creating group by function: ```java LogData l_myGroupedData = logData.groupBy(Arrays.asList("Definition 1", "Definition 4")); -//or +//or LogData l_myGroupedData = logData.groupBy(Arrays.asList("Definition 1", "Definition 4"), MyImplementationOfStdLogEntry.class); ``` @@ -359,13 +405,13 @@ LogData l_myGroupedData = logData.groupBy(Arrays. In this case we get : | Definition 1 | Definition 4 | Frequence | -|--------------|--------------|-----------| +| ------------ | ------------ | --------- | | 12 | AA | 1 | | 112 | AAA | 1 | | 120 | AA | 1 | - #### Chaining GroupBy + The GroupBy can also be chained. Example: ```java @@ -375,29 +421,33 @@ LogData l_myGroupedData = logData.groupBy(Arrays.asList("Definitio In this case we get : | Definition 4 | Frequence | -| ------------ |-----------| -| AA | 2 | -| AAA | 1 | +| ------------ | --------- | +| AA | 2 | +| AAA | 1 | ### Comparing Log Data + As of version 1.11.0 we have introduced the possibility to compare two LogData objects. This is a light compare that checks that for a given key, if it is absent, added or changes in frequency. The method `compare` returns a `LogDataComparison` object that contains the results of the comparison. A comparison can be of three types: -* NEW : The entry has been added -* Removed : The entry has been removed -* Changed : The entry has changed in frequency + +- NEW : The entry has been added +- Removed : The entry has been removed +- Changed : The entry has changed in frequency Apart from this we return the : -* delta : The difference in frequency -* deltaRatio : The difference in frequency as a ratio in % + +- delta : The difference in frequency +- deltaRatio : The difference in frequency as a ratio in % These values are negative if the values have decreased. Creating a differentiation report is done with the method `LogData.compare(LogData in_logData)`. This method returns a `LogDataComparison` object that contains the results of the comparison. #### Creating a Differentiation Report -We can generate an HTML Report where the differences are high-lighted. This is done with the method `LogDataFactory.generateComparisonReport(LogData reference, LogData target, String filename)`. This method will generate an HTML Report detailing the found differences. +We can generate an HTML Report where the differences are high-lighted. This is done with the method `LogDataFactory.generateComparisonReport(LogData reference, LogData target, String filename)`. This method will generate an HTML Report detailing the found differences. ## Assertions and LogDataAssertions + As of version 1.0.5 we have introduced the notion of assertions. Assertions can either take a LogData object or a set of files as input. We currently have the following assertions: @@ -413,69 +463,125 @@ We currently have the following assertions: You have two types of assertions. A simple one where you give an entry key and a matcher, and a more complex one where you give a map of parse Definition Entry keys entries and corresponding matchers. An assertion will only work if: -* The log data is not empty -* There is a Parse Definition entry with the given title. + +- The log data is not empty +- There is a Parse Definition entry with the given title. Otherwise, you will get a failed assertion for these causes. ## Exporting Parse Results + We have the possibility to export the log data results into files. Currently the following formats are supported: -* CSV -* HTML + +- CSV +- HTML All reports are stored in the directory `log-parser-reports/export/`. If you are using an SDK to control the log parsing, you may want to override the method `fetchValueMapPrintable` to provide a more suitable export of the data. For mor information on this please refer to the chapter describing this topic. ### Exporting Results to a CSV File + We have the possibility to export the log data results into a CSV file. This is done by calling the methods `LogData#exportLogDataToCSV`. You have the possibility to define the data, and order to be exported as well as the file name. ### Exporting Results to an HTML File + We have the possibility to export the log data results into an HTML file. This is done by calling the methods `LogData#exportLogDataToHTML`. You have the possibility to define the data, and order to be exported, the file name and the title of the report. ### Exporting Results to an JSON File + We have the possibility to export the log data results into an JSON file. This is done by calling the methods `LogData#exportLogDataToJSON`. You have the possibility to define the data, and order to be exported, the file name and the title of the report. ## Command-line Execution of the Log-Parser -As of version 1.11.0 we have introduced the possibility of running the log-parser from the command line. This is done by using the executable jar file or executing the main method in maven. + +As of version 1.11.0 we have introduced the possibility of running the log-parser from the command line. This is done by using the executable jar file or executing the main method in maven. The results will currently be stored as a CSV or HTML file. The command line requires you to at least provide the following information: -* `--startDir` : The root path from which the logs should be searched. -* `--parseDefinition` : The path to the parse definition file. + +- `--startDir` : The root path from which the logs should be searched. +- `--parseDefinition` : The path to the parse definition file. The typical command line would look like this: + ``` mvn exec:java -Dexec.args="--startDir=src/test/resources/nestedDirs/ --parseDefinition=src/test/resources/parseDefinition.json" ``` + or + ``` java -jar log-parser-1.11.0.jar --startDir=/path/to/logs --parseDefinition=/path/to/parseDefinition.json -``` +``` You can provide additional information such as: -* `--fileFilter` : The wildcard used for selecting the log files. The default value is *.log -* `--reportType` : The format of the report. The allowed values are currently HTML, JSON & CSV. The default value is HTML -* `--reportFileName` : The name of the report file. By default, this is the name of the Parse Definition name suffixed with '-export' -* `--reportName` : The report title as show in an HTML report. By default, the title includes the Parse Definition name -You can get a print out of the command line options by running the command with the `--help` flag. +- `--fileFilter` : The wildcard used for selecting the log files. The default value is \*.log +- `--reportType` : The format of the report. The allowed values are currently HTML, JSON & CSV. The default value is HTML +- `--reportFileName` : The name of the report file. By default, this is the name of the Parse Definition name suffixed with '-export' +- `--reportName` : The report title as show in an HTML report. By default, the title includes the Parse Definition name + +You can get a print out of the command line options by running the command with the `--help` flag. All reports are stored in the directory `log-parser-reports/export/`. +## Memory Guard Rails + +As of 1.11.3 we have introduced a series of guard rails. These allow you to control how the parser reacts to very large data. + +**NOTE** : All Guard Rails are diabled by default. + +### Guard Rail Properties + +The following table lists all available guard rail properties and their default values: + +| Property | Description | Default Value | +| ---------------------------------------- | ------------------------------------------------------ | ------------- | +| PROP_LOGPARSER_FILEENTRY_LIMIT | Maximum number of entries to parse per file | -1 (disabled) | +| PROP_LOGPARSER_FILESIZE_LIMIT | Maximum file size in MB to parse | -1 (disabled) | +| PROP_LOGPARSER_HEAP_LIMIT | Maximum heap size increase in MB before warning | -1 (disabled) | +| PROP_LOGPARSER_MEMORY_LIMIT_PERCENTAGE | Maximum percentage of memory usage before warning | -1 (disabled) | +| PROP_LOGPARSER_EXCEPTION_ON_MEMORY_LIMIT | Whether to throw exception when memory limits exceeded | false | + +### File Entry Limitations + +For whatever reason, you may want to set a limit on the number of entries you can extract from a file. This cab be done by setting the system property _PROP_LOGPARSER_FILEENTRY_LIMIT_ . + +When set, the log parser stops parsing after reaching the limit in a file, and moves to the next file. Whenever this happens we log a WARNING and add the skipped file to our internal list of issues. + +### File Size Limitations + +For whatever reason, you may want to set a limit on the number of entries you can extract from a file. This cab be done by setting the system property _PROP_LOGPARSER_FILESIZE_LIMIT_ . + +When set, the we create a warning regarding the file size, and store it among the file size issues. + +### Memory Limitations + +Although we will not stop a process from executing due to memory issues, we provide mechanisms for you to have control over the memory when running the log parser programmatically. + +These limitations are set with the following System properties: + +- _PROP_LOGPARSER_HEAP_LIMIT_ : Setting a limit above which we log these occurences. +- _PROP_LOGPARSER_MEMORY_LIMIT_PERCENTAGE_ : A percentage of the occupied memory in reference to the max memory. + +We also have the possibility of throwing an exception iin the case of surpassing the memory rules. This is activated by setting the System property _PROP_LOGPARSER_EXCEPTION_ON_MEMORY_LIMIT_ to true. + ## Changelog + ### 1.11.2 + - [#188](https://github.com/adobe/log-parser/issues/188) We solved problems with exporting when the directory hierarchy is not present. - [#189](https://github.com/adobe/log-parser/issues/189) The generated JSON when exporting results included unnecessary escape characters. ### 1.11.0 + - **(new feature)** [#10](https://github.com/adobe/log-parser/issues/10) We now have an executable for the log-parser. You can perform a log parsing using the command line. For more information please read the section on [Command-line Execution of the Log-Parser](#command-line-execution-of-the-log-parser). - **(new feature)** [#127](https://github.com/adobe/log-parser/issues/127) You can now compare two LogData Objects. This is a light compare that checks that for a given key, if it is absent, added or changes in frequency. - **(new feature)** [#154](https://github.com/adobe/log-parser/issues/154) We have a data enrichment feature, where you can enrich the log data with additional information. For further details please refer to the section on [Enriching Log Data](#enriching-log-data). @@ -493,6 +599,7 @@ All reports are stored in the directory `log-parser-reports/export/`. - [#185](https://github.com/adobe/log-parser/issues/185) Resolved issue with deserializing unexpected objects in SDK Log entries.. ### 1.0.10 + - Moved main code and tests to the package "core" - [#67](https://github.com/adobe/log-parser/issues/67) We can now select the files using a wild card. Given a directory we can now look for files in the sub-directory given a wild-card. The wildcards are implemented using Adobe Commons IO. You can read more on this in the [WildcardFilter JavaDoc](https://commons.apache.org/proper/commons-io/apidocs/org/apache/commons/io/filefilter/WildcardFilter.html) - [#68](https://github.com/adobe/log-parser/issues/68) We now present a report of the findings at the end of the analysis. @@ -501,35 +608,51 @@ All reports are stored in the directory `log-parser-reports/export/`. - [#120](https://github.com/adobe/log-parser/issues/120) Corrected the export system as it did not work well with SDK defined entries. - [#148](https://github.com/adobe/log-parser/issues/148) The LogData#groupBy method did not work well when it is based on an SDK. We now look at the headers and values of the SDK. Also the target for a groupBy will have to be a GenricEntry as cannot guarantee that the target class can support a groupBy. - Removed ambiguities in the methods for StdLogEntry. For example "fetchValueMap" is no longer abstract, but it can be overriden. + ### 1.0.8.2 -- Building with java8. + +- Building with java8. - Upgraded Jackson XML to remove critical version - Setting system to work in both java8 and java11. (Java11 used for sonar) + ### 1.0.8 + - Moving back to Java 8 as our clients are still using Java8 + ### 1.0.7 + - #39 updated the log4J library to 2.17.1 to avoid the PSIRT vulnerability + ### 1.0.6 + - #38 Resolved some issues with HashCode - #37 Upgraded the build to Java11 - #34 Activated sonar in the build process + ### 1.0.5 + - #23 Added the searchEntries, and the isEntryPresent methods. - #20 Adding log data assertions - keyOrder is now a List - #32 we have solved an issue with exporting and importing the key orders - #30 Allowing for the LogDataFactory to accept a JSON file as input for the ParseDefinitions - #31 Solved bug with importing the JSON file + ### 1.0.4 + - #6 We Can now import a definition from a JSON file. You can also export a ParseDefinition into a JSON file. -- #8 & #18 Added the filter function. +- #8 & #18 Added the filter function. - #13 Added copy constructors. - #13 Added a copy method in the StdLogEntry (#13). - #14 Added a set method to LogData. This allows you to change a Log data given a key value and ParseDefinition entry title - Renamed exception IncorrectParseDefinitionTitleException to IncorrectParseDefinitionException. + ### 1.0.3 + - Introduced the LogData Top Class. This encapsulates all results. - Introduced the LogDataFactory - Added the groupBy method to extract data from the results + ### 1.0.1 + - Open source release. diff --git a/src/main/java/com/adobe/campaign/tests/logparser/core/ParseGuardRails.java b/src/main/java/com/adobe/campaign/tests/logparser/core/ParseGuardRails.java deleted file mode 100644 index 5a40af8..0000000 --- a/src/main/java/com/adobe/campaign/tests/logparser/core/ParseGuardRails.java +++ /dev/null @@ -1,51 +0,0 @@ -/* - * Copyright 2022 Adobe - * All Rights Reserved. - * - * NOTICE: Adobe permits you to use, modify, and distribute this file in - * accordance with the terms of the Adobe license agreement accompanying - * it. - */ -package com.adobe.campaign.tests.logparser.core; - -import java.util.HashMap; -import java.util.Map; - -import com.adobe.campaign.tests.logparser.utils.MemoryUtils; - -/** - * Class to store guard rails for parsing operations - */ -public class ParseGuardRails { - public static final Map fileSizeLimitations = new HashMap<>(); - public static final long HEAP_SIZE_AT_START = MemoryUtils.getCurrentHeapSizeMB(); - public static int FILE_ENTRY_LIMIT = Integer.parseInt(System.getProperty("PROP_LOGPARSER_FILEENTRY_LIMIT", "-1")); - public static long HEAP_LIMIT = Integer.parseInt(System.getProperty("PROP_LOGPARSER_HEAP_LIMIT", "-1")); - - public static void reset() { - fileSizeLimitations.clear(); - FILE_ENTRY_LIMIT = -1; - HEAP_LIMIT = -1; - } - - /** - * Check if the current count has reached the entry limit - * - * @param currentCount the current count of entries - * @return true if the current count has reached the entry limit, false - * otherwise - */ - public static boolean hasReachedEntryLimit(int currentCount) { - return FILE_ENTRY_LIMIT > -1 && currentCount >= FILE_ENTRY_LIMIT; - } - - /** - * Check if the current memory usage has reached the memory limit - * - * @return true if the current memory usage has reached the memory limit, false - * otherwise - */ - public static boolean hasReachedHeapLimit() { - return HEAP_LIMIT > -1 && (MemoryUtils.getCurrentHeapSizeMB() - HEAP_SIZE_AT_START) >= HEAP_LIMIT; - } -} \ No newline at end of file diff --git a/src/main/java/com/adobe/campaign/tests/logparser/core/StringParseFactory.java b/src/main/java/com/adobe/campaign/tests/logparser/core/StringParseFactory.java index 3146698..74c26d9 100644 --- a/src/main/java/com/adobe/campaign/tests/logparser/core/StringParseFactory.java +++ b/src/main/java/com/adobe/campaign/tests/logparser/core/StringParseFactory.java @@ -11,6 +11,7 @@ import com.adobe.campaign.tests.logparser.exceptions.LogParserSDKDefinitionException; import com.adobe.campaign.tests.logparser.exceptions.StringParseException; import com.adobe.campaign.tests.logparser.utils.MemoryUtils; +import com.adobe.campaign.tests.logparser.utils.ParseGuardRails; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -75,8 +76,10 @@ public static > Map> Map fileSizeLimitations = new HashMap<>(); + public static final Map entryLimitations = new HashMap<>(); + public static long HEAP_SIZE_AT_START = MemoryUtils.getCurrentHeapSizeMB(); public static int FILE_ENTRY_LIMIT = Integer.parseInt(System.getProperty("PROP_LOGPARSER_FILEENTRY_LIMIT", "-1")); public static long HEAP_LIMIT = Integer.parseInt(System.getProperty("PROP_LOGPARSER_HEAP_LIMIT", "-1")); public static double MEMORY_LIMIT_PERCENTAGE = Double .parseDouble(System.getProperty("PROP_LOGPARSER_MEMORY_LIMIT_PERCENTAGE", "-1")); + protected static boolean EXCEPTION_ON_MEMORY_LIMIT = Boolean + .parseBoolean(System.getProperty("PROP_LOGPARSER_EXCEPTION_ON_MEMORY_LIMIT", "false")); + + protected static long FILE_SIZE_LIMIT = Long + .parseLong(System.getProperty("PROP_LOGPARSER_FILESIZE_LIMIT", "-1")); + protected static int MEASUREMENT_SCALE = 1024 * 1024; public static void reset() { fileSizeLimitations.clear(); + entryLimitations.clear(); FILE_ENTRY_LIMIT = -1; HEAP_LIMIT = -1; MEMORY_LIMIT_PERCENTAGE = -1; + MEASUREMENT_SCALE = 1024 * 1024; + EXCEPTION_ON_MEMORY_LIMIT = false; } /** @@ -51,12 +69,77 @@ public static boolean hasReachedHeapLimit() { } /** - * Check if the remaining memory percentage has fallen below the set limit + * Check if the occupied memory is reaching the set limit * - * @return true if the remaining memory percentage is below the limit, false + * @return true if the used memory has gone beyong the accepted threshhold, + * false * otherwise */ - public static boolean hasReachedMemoryPercentageLimit() { - return MEMORY_LIMIT_PERCENTAGE > -1 && MemoryUtils.getRemainingMemoryPercentage() <= MEMORY_LIMIT_PERCENTAGE; + public static boolean hasReachedMemoryLimit() { + return MEMORY_LIMIT_PERCENTAGE > -1 && MemoryUtils.getUsedMemoryPercentage() >= MEMORY_LIMIT_PERCENTAGE; + } + + /** + * Check if any memory limits have been reached + * + * @return true if any memory limit has been reached, false otherwise + * @throws MemoryLimitExceededException if EXCEPTION_ON_MEMORY_LIMIT is true and + * a limit is reached + */ + public static boolean checkMemoryLimits() { + if (hasReachedHeapLimit()) { + String message = "Heap limit of " + HEAP_LIMIT + " MB has been reached"; + log.warn(message); + if (EXCEPTION_ON_MEMORY_LIMIT) { + throw new MemoryLimitExceededException(message); + } + return true; + } + if (hasReachedMemoryLimit()) { + String message = "Memory usage limit of " + MEMORY_LIMIT_PERCENTAGE + "% has been reached"; + log.warn(message); + if (EXCEPTION_ON_MEMORY_LIMIT) { + throw new MemoryLimitExceededException(message); + } + return true; + } + return false; + } + + /** + * Manages the Guard Rails for the file parsing + * + * @param in_fileName the file to check + * @param currentCount the current count of entries + * @return true if a warning was generated + */ + public static boolean checkEntryLimits(File in_fileName, int currentCount) { + if (ParseGuardRails.hasReachedEntryLimit(currentCount)) { + log.warn("Reached entry limit of {} for file {}. Skipping the remaining lines.", + ParseGuardRails.FILE_ENTRY_LIMIT, in_fileName); + + // Add file size info to map + ParseGuardRails.entryLimitations.put(in_fileName.getAbsolutePath(), in_fileName.length()); + + // Force garbage collection + System.gc(); + return true; + } + + return false; + } + + public static void checkFileSizeLimits(File in_fileName) { + // Large files should not cause a problem, but we should log it + if (ParseGuardRails.hasReachedFileSizeLimit(in_fileName.length())) { + log.warn("Reached file size limit of {} for file {}. Skipping the remaining lines.", + ParseGuardRails.FILE_SIZE_LIMIT, in_fileName); + + ParseGuardRails.fileSizeLimitations.put(in_fileName.getAbsolutePath(), in_fileName.length()); + } + } + + private static boolean hasReachedFileSizeLimit(long length) { + return FILE_SIZE_LIMIT > -1 && (length / MEASUREMENT_SCALE) >= FILE_SIZE_LIMIT; } } \ No newline at end of file diff --git a/src/test/java/com/adobe/campaign/tests/logparser/core/ParseTesting.java b/src/test/java/com/adobe/campaign/tests/logparser/core/ParseTesting.java index 05b4edf..bf59863 100644 --- a/src/test/java/com/adobe/campaign/tests/logparser/core/ParseTesting.java +++ b/src/test/java/com/adobe/campaign/tests/logparser/core/ParseTesting.java @@ -21,6 +21,7 @@ import org.testng.annotations.Test; import com.adobe.campaign.tests.logparser.exceptions.StringParseException; +import com.adobe.campaign.tests.logparser.utils.ParseGuardRails; public class ParseTesting { // private final String STD_GREP_STRING = @@ -1256,12 +1257,13 @@ public void testSkipOnLimit_Issue103() assertThat(l_entries, is(notNullValue())); assertThat("We should have entries", l_entries.size(), Matchers.equalTo(10)); + File l_file = new File(bugFile); assertThat("We should have an entry for the file size", - ParseGuardRails.fileSizeLimitations.containsKey(bugFile), is(true)); + ParseGuardRails.entryLimitations.containsKey(l_file.getAbsolutePath())); assertThat("We should have an entry for the file size", - ParseGuardRails.fileSizeLimitations.get(bugFile), - is(equalTo(new File(bugFile).length()))); + ParseGuardRails.entryLimitations.get(l_file.getAbsolutePath()), + is(equalTo(l_file.length()))); } @Test(description = "Related to issue #102, where the parsing stops or no reason") diff --git a/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java b/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java index 372bc57..32b3f49 100644 --- a/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java +++ b/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java @@ -10,17 +10,71 @@ import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.is; +import static org.testng.Assert.assertThrows; + +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; +import com.adobe.campaign.tests.logparser.exceptions.MemoryLimitExceededException; + public class ParseGuardRailsTest { + private Path tempFile; + @BeforeMethod + public void setup() throws IOException { + ParseGuardRails.reset(); + + tempFile = Files.createTempFile("test", ".log"); + } + @AfterMethod - public void reset() { + public void cleanup() throws IOException { ParseGuardRails.reset(); + Files.deleteIfExists(tempFile); + } + + @Test + public void testCheckFileSizeLimits_WhenFileSizeNotSet() { + ParseGuardRails.checkFileSizeLimits(tempFile.toFile()); + assertThat("Should not have file size limitations when not set", + ParseGuardRails.fileSizeLimitations.isEmpty(), is(true)); + } + + @Test + public void testCheckFileSizeLimits_WhenFileSizeBelowLimit() throws IOException { + ParseGuardRails.MEASUREMENT_SCALE = 1; + ParseGuardRails.FILE_SIZE_LIMIT = 1000; // Set a high limit + Files.write(tempFile, "small content".getBytes()); + ParseGuardRails.checkFileSizeLimits(tempFile.toFile()); + assertThat("Should not have recorded file size", + ParseGuardRails.fileSizeLimitations.isEmpty()); + } + + @Test + public void testCheckFileSizeLimits_WhenFileSizeAtLimit() throws IOException { + ParseGuardRails.MEASUREMENT_SCALE = 1; + Files.write(tempFile, "content".getBytes()); + ParseGuardRails.FILE_SIZE_LIMIT = tempFile.toFile().length(); // Set limit to 0 + ParseGuardRails.checkFileSizeLimits(tempFile.toFile()); + assertThat("Should have recorded file size", + ParseGuardRails.fileSizeLimitations.containsKey(tempFile.toString()), is(true)); + } + + @Test + public void testCheckFileSizeLimits_WhenFileSizeAboveLimit() throws IOException { + ParseGuardRails.MEASUREMENT_SCALE = 1; + + ParseGuardRails.FILE_SIZE_LIMIT = 1; // Set a small limit + Files.write(tempFile, "large content that exceeds the limit".getBytes()); + ParseGuardRails.checkFileSizeLimits(tempFile.toFile()); + assertThat("Should have recorded file size", + ParseGuardRails.fileSizeLimitations.containsKey(tempFile.toString()), is(true)); } @Test @@ -47,21 +101,61 @@ public void testHasReachedHeapLimit_WhenAboveLimit() { @Test public void testHasReachedMemoryPercentageLimit_WhenLimitNotSet() { assertThat("Should not reach limit when not set", - ParseGuardRails.hasReachedMemoryPercentageLimit(), is(false)); + ParseGuardRails.hasReachedMemoryLimit(), is(false)); } @Test public void testHasReachedMemoryPercentageLimit_WhenBelowLimit() { ParseGuardRails.MEMORY_LIMIT_PERCENTAGE = 20.0; // Set limit to 20% assertThat("Should not reach limit when below threshold", - ParseGuardRails.hasReachedMemoryPercentageLimit(), is(false)); + ParseGuardRails.hasReachedMemoryLimit(), is(false)); } @Test public void testHasReachedMemoryPercentageLimit_WhenAtLimit() { - ParseGuardRails.MEMORY_LIMIT_PERCENTAGE = 100.0; // Set limit to 0% + ParseGuardRails.MEMORY_LIMIT_PERCENTAGE = 0.1; // Set limit to 0% assertThat("Should reach limit when at threshold", - ParseGuardRails.hasReachedMemoryPercentageLimit(), is(true)); + ParseGuardRails.hasReachedMemoryLimit(), is(true)); + } + + @Test + public void testCheckMemoryLimits_WhenNoLimitsSet() { + assertThat("Should not reach limit when not set", + ParseGuardRails.checkMemoryLimits(), is(false)); + } + + @Test + public void testCheckMemoryLimits_WhenHeapLimitReached() { + ParseGuardRails.HEAP_LIMIT = 0; // Set limit to 0 to force reaching it + assertThat("Should reach limit when heap limit is reached", + ParseGuardRails.checkMemoryLimits(), is(true)); + } + + @Test + public void testCheckMemoryLimits_WhenMemoryLimitReached() { + ParseGuardRails.MEMORY_LIMIT_PERCENTAGE = 0.0; // Set limit to 0% to force reaching it + assertThat("Should reach limit when memory limit is reached", + ParseGuardRails.checkMemoryLimits(), is(true)); + } + + @Test + public void testCheckMemoryLimits_WhenHeapLimitReachedWithException() { + ParseGuardRails.EXCEPTION_ON_MEMORY_LIMIT = true; + + ParseGuardRails.HEAP_LIMIT = 1; // Set limit to 1 to force exceeding it + ParseGuardRails.HEAP_SIZE_AT_START = -20; // Set heap size to -20 to force exceeding it + + assertThat("We should have reached the heap limit", ParseGuardRails.hasReachedHeapLimit()); + + assertThrows(MemoryLimitExceededException.class, () -> ParseGuardRails.checkMemoryLimits()); + } + + @Test + public void testCheckMemoryLimits_WhenMemoryLimitReachedWithException() { + ParseGuardRails.EXCEPTION_ON_MEMORY_LIMIT = true; + + ParseGuardRails.MEMORY_LIMIT_PERCENTAGE = 0.0; // Set limit to 0% to force reaching it + assertThrows(MemoryLimitExceededException.class, () -> ParseGuardRails.checkMemoryLimits()); } } \ No newline at end of file From 0ec451cadb38cb4279bf51a92827e9331810feb1 Mon Sep 17 00:00:00 2001 From: baubakg Date: Thu, 24 Apr 2025 14:22:33 +0200 Subject: [PATCH 17/26] #203 Added guard rails --- .../tests/logparser/core/LogData.java | 8 +++- .../logparser/core/StringParseFactory.java | 3 +- .../tests/logparser/utils/MemoryUtils.java | 5 --- .../logparser/utils/ParseGuardRails.java | 43 +++++++++++++++++-- .../logparser/utils/ParseGuardRailsTest.java | 18 ++++++++ 5 files changed, 66 insertions(+), 11 deletions(-) diff --git a/src/main/java/com/adobe/campaign/tests/logparser/core/LogData.java b/src/main/java/com/adobe/campaign/tests/logparser/core/LogData.java index 737d401..3b26eda 100644 --- a/src/main/java/com/adobe/campaign/tests/logparser/core/LogData.java +++ b/src/main/java/com/adobe/campaign/tests/logparser/core/LogData.java @@ -13,6 +13,7 @@ import com.adobe.campaign.tests.logparser.exceptions.LogParserPostManipulationException; import com.adobe.campaign.tests.logparser.utils.HTMLReportUtils; import com.adobe.campaign.tests.logparser.utils.LogParserFileUtils; +import com.adobe.campaign.tests.logparser.utils.ParseGuardRails; import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.commons.csv.CSVFormat; import org.apache.commons.csv.CSVPrinter; @@ -276,7 +277,7 @@ LogData groupBy(List in_parseDefinitionEntryK lr_cubeData.addEntry(lt_cubeEntry); } - + ParseGuardRails.checkMemoryLimits("Grouping data"); return lr_cubeData; } @@ -335,7 +336,7 @@ public LogData filterBy(Map in_filterKeyValues) { lr_filteredLogData.addEntry(this.get(lt_logDataKey)); } } - + ParseGuardRails.checkMemoryLimits("Filtering data"); return lr_filteredLogData; } @@ -695,6 +696,9 @@ public void enrichData(Map in_queryMap, Map key e.getValue().put(in_entryName, in_entryValue); }); }); + + ParseGuardRails.checkMemoryLimits("Enriching data"); + } /** diff --git a/src/main/java/com/adobe/campaign/tests/logparser/core/StringParseFactory.java b/src/main/java/com/adobe/campaign/tests/logparser/core/StringParseFactory.java index 74c26d9..1dfbdb2 100644 --- a/src/main/java/com/adobe/campaign/tests/logparser/core/StringParseFactory.java +++ b/src/main/java/com/adobe/campaign/tests/logparser/core/StringParseFactory.java @@ -102,11 +102,12 @@ public static > Map heapLimitations = new HashMap<>(); + protected static Map memoryLimitations = new HashMap<>(); public static void reset() { fileSizeLimitations.clear(); entryLimitations.clear(); + heapLimitations.clear(); + memoryLimitations.clear(); FILE_ENTRY_LIMIT = -1; HEAP_LIMIT = -1; MEMORY_LIMIT_PERCENTAGE = -1; @@ -86,18 +92,28 @@ public static boolean hasReachedMemoryLimit() { * @throws MemoryLimitExceededException if EXCEPTION_ON_MEMORY_LIMIT is true and * a limit is reached */ - public static boolean checkMemoryLimits() { + public static boolean checkMemoryLimits(String location) { + if (location == null) { + location = String.valueOf(System.currentTimeMillis()); + } + if (hasReachedHeapLimit()) { - String message = "Heap limit of " + HEAP_LIMIT + " MB has been reached"; + String message = "Heap limit of " + HEAP_LIMIT + " MB has been reached at " + location; log.warn(message); + heapLimitations.put(location, Runtime.getRuntime().totalMemory()); + System.gc(); + if (EXCEPTION_ON_MEMORY_LIMIT) { throw new MemoryLimitExceededException(message); } return true; } if (hasReachedMemoryLimit()) { - String message = "Memory usage limit of " + MEMORY_LIMIT_PERCENTAGE + "% has been reached"; + String message = "Memory usage limit of " + MEMORY_LIMIT_PERCENTAGE + "% has been reached at " + location; log.warn(message); + memoryLimitations.put(location, MemoryUtils.getUsedMemoryPercentage()); + System.gc(); + if (EXCEPTION_ON_MEMORY_LIMIT) { throw new MemoryLimitExceededException(message); } @@ -106,6 +122,10 @@ public static boolean checkMemoryLimits() { return false; } + public static boolean checkMemoryLimits() { + return checkMemoryLimits(null); + } + /** * Manages the Guard Rails for the file parsing * @@ -142,4 +162,21 @@ public static void checkFileSizeLimits(File in_fileName) { private static boolean hasReachedFileSizeLimit(long length) { return FILE_SIZE_LIMIT > -1 && (length / MEASUREMENT_SCALE) >= FILE_SIZE_LIMIT; } + + /** + * Returns a map containing all anomalies discovered during parsing + * + * @return Map containing all limitation reports + */ + public static Map> getAnomalyReport() { + Map> report = new HashMap<>(); + + report.put("heapLimitations", heapLimitations.keySet()); + report.put("memoryLimitations", memoryLimitations.keySet()); + report.put("fileSizeLimitations", fileSizeLimitations.keySet()); + report.put("entryLimitations", entryLimitations.keySet()); + + return report; + } + } \ No newline at end of file diff --git a/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java b/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java index 32b3f49..45afe4a 100644 --- a/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java +++ b/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java @@ -123,6 +123,7 @@ public void testHasReachedMemoryPercentageLimit_WhenAtLimit() { public void testCheckMemoryLimits_WhenNoLimitsSet() { assertThat("Should not reach limit when not set", ParseGuardRails.checkMemoryLimits(), is(false)); + } @Test @@ -130,6 +131,20 @@ public void testCheckMemoryLimits_WhenHeapLimitReached() { ParseGuardRails.HEAP_LIMIT = 0; // Set limit to 0 to force reaching it assertThat("Should reach limit when heap limit is reached", ParseGuardRails.checkMemoryLimits(), is(true)); + + assertThat("Should have anomaly report", ParseGuardRails.getAnomalyReport().size(), is(4)); + assertThat("Should have heap limitation", ParseGuardRails.getAnomalyReport().get("heapLimitations").size(), + is(1)); + + assertThat("Should have memory limitation", ParseGuardRails.getAnomalyReport().get("memoryLimitations").size(), + is(0)); + + assertThat("Should have file size limitation", + ParseGuardRails.getAnomalyReport().get("fileSizeLimitations").size(), + is(0)); + + assertThat("Should have entry limitation", ParseGuardRails.getAnomalyReport().get("entryLimitations").size(), + is(0)); } @Test @@ -137,6 +152,9 @@ public void testCheckMemoryLimits_WhenMemoryLimitReached() { ParseGuardRails.MEMORY_LIMIT_PERCENTAGE = 0.0; // Set limit to 0% to force reaching it assertThat("Should reach limit when memory limit is reached", ParseGuardRails.checkMemoryLimits(), is(true)); + + assertThat("Should have memory limitations", ParseGuardRails.memoryLimitations.size(), is(1)); + assertThat("Should no have heap limitations", ParseGuardRails.heapLimitations.isEmpty()); } @Test From 415aa247f6b4f2f6e9fbdaf7be6aae988ed37e18 Mon Sep 17 00:00:00 2001 From: baubakg Date: Thu, 24 Apr 2025 14:27:48 +0200 Subject: [PATCH 18/26] #203 Added guard rails --- README.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 588dbb2..0441c4f 100644 --- a/README.md +++ b/README.md @@ -542,13 +542,13 @@ As of 1.11.3 we have introduced a series of guard rails. These allow you to cont The following table lists all available guard rail properties and their default values: -| Property | Description | Default Value | -| ---------------------------------------- | ------------------------------------------------------ | ------------- | -| PROP_LOGPARSER_FILEENTRY_LIMIT | Maximum number of entries to parse per file | -1 (disabled) | -| PROP_LOGPARSER_FILESIZE_LIMIT | Maximum file size in MB to parse | -1 (disabled) | -| PROP_LOGPARSER_HEAP_LIMIT | Maximum heap size increase in MB before warning | -1 (disabled) | -| PROP_LOGPARSER_MEMORY_LIMIT_PERCENTAGE | Maximum percentage of memory usage before warning | -1 (disabled) | -| PROP_LOGPARSER_EXCEPTION_ON_MEMORY_LIMIT | Whether to throw exception when memory limits exceeded | false | +| Property | Description | Affects | Scale | Default Value | +| ---------------------------------------- | ------------------------------------------------------ | ----------------------------------------------- | ---------- | ------------- | +| PROP_LOGPARSER_FILEENTRY_LIMIT | Maximum number of entries to parse per file | File parsing | Count | -1 (disabled) | +| PROP_LOGPARSER_FILESIZE_LIMIT | Maximum file size in MB to parse | File parsing | Megabytes | -1 (disabled) | +| PROP_LOGPARSER_HEAP_LIMIT | Maximum heap size increase in MB before warning | File parsing, FilterBy, Search, enrich, groupBy | Megabytes | -1 (disabled) | +| PROP_LOGPARSER_MEMORY_LIMIT_PERCENTAGE | Maximum percentage of memory usage before warning | File parsing, FilterBy, Search, enrich, groupBy | Percentage | -1 (disabled) | +| PROP_LOGPARSER_EXCEPTION_ON_MEMORY_LIMIT | Whether to throw exception when memory limits exceeded | Error handling | Boolean | false | ### File Entry Limitations From dd10689f7e7be52ef9eafb1db61aeb2765ca99eb Mon Sep 17 00:00:00 2001 From: baubakg Date: Thu, 24 Apr 2025 14:47:59 +0200 Subject: [PATCH 19/26] Updating TOC --- README.md | 37 +++++++++++++++++++++++++------------ 1 file changed, 25 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 0441c4f..9efb108 100644 --- a/README.md +++ b/README.md @@ -51,18 +51,25 @@ The basic method for using this library is, that you create a definition for you - [Exporting Results to an HTML File](#exporting-results-to-an-html-file) - [Exporting Results to an JSON File](#exporting-results-to-an-json-file) - [Command-line Execution of the Log-Parser](#command-line-execution-of-the-log-parser) +- [Memory Guard Rails](#memory-guard-rails) + - [Guard Rail Properties](#guard-rail-properties) + - [File Entry Limitations](#file-entry-limitations) + - [File Size Limitations](#file-size-limitations) + - [Memory Limitations](#memory-limitations) - [Changelog](#changelog) -_ [1.11.2](#1112) -_ [1.11.0](#1110) -_ [1.0.10](#1010) -_ [1.0.8.2](#1082) -_ [1.0.8](#108) -_ [1.0.7](#107) -_ [1.0.6](#106) -_ [1.0.5](#105) -_ [1.0.4](#104) -_ [1.0.3](#103) \* [1.0.1](#101) - + - [1.11.3 (In-Progress)](#1113-in-progress) + - [1.11.2](#1112) + - [1.11.0](#1110) + - [1.0.10](#1010) + - [1.0.8.2](#1082) + - [1.0.8](#108) + - [1.0.7](#107) + - [1.0.6](#106) + - [1.0.5](#105) + - [1.0.4](#104) + - [1.0.3](#103) + - [1.0.1](#101) + ## Installation @@ -548,7 +555,7 @@ The following table lists all available guard rail properties and their default | PROP_LOGPARSER_FILESIZE_LIMIT | Maximum file size in MB to parse | File parsing | Megabytes | -1 (disabled) | | PROP_LOGPARSER_HEAP_LIMIT | Maximum heap size increase in MB before warning | File parsing, FilterBy, Search, enrich, groupBy | Megabytes | -1 (disabled) | | PROP_LOGPARSER_MEMORY_LIMIT_PERCENTAGE | Maximum percentage of memory usage before warning | File parsing, FilterBy, Search, enrich, groupBy | Percentage | -1 (disabled) | -| PROP_LOGPARSER_EXCEPTION_ON_MEMORY_LIMIT | Whether to throw exception when memory limits exceeded | Error handling | Boolean | false | +| PROP_LOGPARSER_EXCEPTION_ON_MEMORY_LIMIT | Whether to throw exception when memory limits exceeded | Memory Checks | Boolean | false | ### File Entry Limitations @@ -573,8 +580,14 @@ These limitations are set with the following System properties: We also have the possibility of throwing an exception iin the case of surpassing the memory rules. This is activated by setting the System property _PROP_LOGPARSER_EXCEPTION_ON_MEMORY_LIMIT_ to true. +You can also call the memory guard rails in your own implementation by calling `ParseGuardRails.checkMemoryLimits()`. This will check both heap and memory percentage limits. + ## Changelog +### 1.11.3 (In-Progress) + +- [#203](https://github.com/adobe/log-parser/issues/203) Have added possibilities, to control, log memory consumption. + ### 1.11.2 - [#188](https://github.com/adobe/log-parser/issues/188) We solved problems with exporting when the directory hierarchy is not present. From 943db9adba68dfd76d108cb0decf1e5322d0dbf1 Mon Sep 17 00:00:00 2001 From: baubakg Date: Thu, 24 Apr 2025 15:58:56 +0200 Subject: [PATCH 20/26] Fixing unit test --- .../tests/logparser/utils/ParseGuardRails.java | 11 ++++++----- .../campaign/tests/logparser/core/ParseTesting.java | 7 ++----- .../tests/logparser/utils/ParseGuardRailsTest.java | 2 +- 3 files changed, 9 insertions(+), 11 deletions(-) diff --git a/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java b/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java index 6728f77..2e45521 100644 --- a/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java +++ b/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java @@ -24,8 +24,11 @@ */ public class ParseGuardRails { private static final Logger log = LogManager.getLogger(); - public static final Map fileSizeLimitations = new HashMap<>(); - public static final Map entryLimitations = new HashMap<>(); + protected static final Map fileSizeLimitations = new HashMap<>(); + protected static final Map entryLimitations = new HashMap<>(); + protected static Map heapLimitations = new HashMap<>(); + protected static Map memoryLimitations = new HashMap<>(); + public static long HEAP_SIZE_AT_START = MemoryUtils.getCurrentHeapSizeMB(); public static int FILE_ENTRY_LIMIT = Integer.parseInt(System.getProperty("PROP_LOGPARSER_FILEENTRY_LIMIT", "-1")); @@ -38,9 +41,7 @@ public class ParseGuardRails { protected static long FILE_SIZE_LIMIT = Long .parseLong(System.getProperty("PROP_LOGPARSER_FILESIZE_LIMIT", "-1")); protected static int MEASUREMENT_SCALE = 1024 * 1024; - protected static Map heapLimitations = new HashMap<>(); - protected static Map memoryLimitations = new HashMap<>(); - + public static void reset() { fileSizeLimitations.clear(); entryLimitations.clear(); diff --git a/src/test/java/com/adobe/campaign/tests/logparser/core/ParseTesting.java b/src/test/java/com/adobe/campaign/tests/logparser/core/ParseTesting.java index bf59863..9290651 100644 --- a/src/test/java/com/adobe/campaign/tests/logparser/core/ParseTesting.java +++ b/src/test/java/com/adobe/campaign/tests/logparser/core/ParseTesting.java @@ -1259,11 +1259,8 @@ public void testSkipOnLimit_Issue103() File l_file = new File(bugFile); assertThat("We should have an entry for the file size", - ParseGuardRails.entryLimitations.containsKey(l_file.getAbsolutePath())); - - assertThat("We should have an entry for the file size", - ParseGuardRails.entryLimitations.get(l_file.getAbsolutePath()), - is(equalTo(l_file.length()))); + ParseGuardRails.getAnomalyReport().get("entryLimitations") + .contains(l_file.getAbsolutePath())); } @Test(description = "Related to issue #102, where the parsing stops or no reason") diff --git a/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java b/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java index 45afe4a..d3ff116 100644 --- a/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java +++ b/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java @@ -114,7 +114,7 @@ public void testHasReachedMemoryPercentageLimit_WhenBelowLimit() { @Test public void testHasReachedMemoryPercentageLimit_WhenAtLimit() { - ParseGuardRails.MEMORY_LIMIT_PERCENTAGE = 0.1; // Set limit to 0% + ParseGuardRails.MEMORY_LIMIT_PERCENTAGE = 0.0; // Set limit to 0% assertThat("Should reach limit when at threshold", ParseGuardRails.hasReachedMemoryLimit(), is(true)); } From 82f144136c82c5bb1c589ddbb2190f0dd99b6ffa Mon Sep 17 00:00:00 2001 From: baubakg Date: Thu, 24 Apr 2025 16:02:51 +0200 Subject: [PATCH 21/26] Hardening unit tests --- .../campaign/tests/logparser/utils/ParseGuardRailsTest.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java b/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java index d3ff116..79fcc36 100644 --- a/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java +++ b/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java @@ -128,6 +128,9 @@ public void testCheckMemoryLimits_WhenNoLimitsSet() { @Test public void testCheckMemoryLimits_WhenHeapLimitReached() { + + ParseGuardRails.HEAP_SIZE_AT_START = -20; // Set heap size to -20 to force exceeding it + ParseGuardRails.HEAP_LIMIT = 0; // Set limit to 0 to force reaching it assertThat("Should reach limit when heap limit is reached", ParseGuardRails.checkMemoryLimits(), is(true)); From 9671256a65cb7520803f2c85f25bb144c228cd47 Mon Sep 17 00:00:00 2001 From: baubakg Date: Thu, 24 Apr 2025 22:17:32 +0200 Subject: [PATCH 22/26] #203 renaming properties, removing the 'PROP_' prefix --- README.md | 20 +++++++++---------- .../logparser/utils/ParseGuardRails.java | 10 +++++----- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index 9efb108..ee2baa4 100644 --- a/README.md +++ b/README.md @@ -551,21 +551,21 @@ The following table lists all available guard rail properties and their default | Property | Description | Affects | Scale | Default Value | | ---------------------------------------- | ------------------------------------------------------ | ----------------------------------------------- | ---------- | ------------- | -| PROP_LOGPARSER_FILEENTRY_LIMIT | Maximum number of entries to parse per file | File parsing | Count | -1 (disabled) | -| PROP_LOGPARSER_FILESIZE_LIMIT | Maximum file size in MB to parse | File parsing | Megabytes | -1 (disabled) | -| PROP_LOGPARSER_HEAP_LIMIT | Maximum heap size increase in MB before warning | File parsing, FilterBy, Search, enrich, groupBy | Megabytes | -1 (disabled) | -| PROP_LOGPARSER_MEMORY_LIMIT_PERCENTAGE | Maximum percentage of memory usage before warning | File parsing, FilterBy, Search, enrich, groupBy | Percentage | -1 (disabled) | -| PROP_LOGPARSER_EXCEPTION_ON_MEMORY_LIMIT | Whether to throw exception when memory limits exceeded | Memory Checks | Boolean | false | +| LOGPARSER_FILEENTRY_LIMIT | Maximum number of entries to parse per file | File parsing | Count | -1 (disabled) | +| LOGPARSER_FILESIZE_LIMIT | Maximum file size in MB to parse | File parsing | Megabytes | -1 (disabled) | +| LOGPARSER_HEAP_LIMIT | Maximum heap size increase in MB before warning | File parsing, FilterBy, Search, enrich, groupBy | Megabytes | -1 (disabled) | +| LOGPARSER_MEMORY_LIMIT_PERCENTAGE | Maximum percentage of memory usage before warning | File parsing, FilterBy, Search, enrich, groupBy | Percentage | -1 (disabled) | +| LOGPARSER_EXCEPTION_ON_MEMORY_LIMIT | Whether to throw exception when memory limits exceeded | Memory Checks | Boolean | false | ### File Entry Limitations -For whatever reason, you may want to set a limit on the number of entries you can extract from a file. This cab be done by setting the system property _PROP_LOGPARSER_FILEENTRY_LIMIT_ . +For whatever reason, you may want to set a limit on the number of entries you can extract from a file. This cab be done by setting the system property _LOGPARSER_FILEENTRY_LIMIT_ . When set, the log parser stops parsing after reaching the limit in a file, and moves to the next file. Whenever this happens we log a WARNING and add the skipped file to our internal list of issues. ### File Size Limitations -For whatever reason, you may want to set a limit on the number of entries you can extract from a file. This cab be done by setting the system property _PROP_LOGPARSER_FILESIZE_LIMIT_ . +For whatever reason, you may want to set a limit on the number of entries you can extract from a file. This cab be done by setting the system property _LOGPARSER_FILESIZE_LIMIT_ . When set, the we create a warning regarding the file size, and store it among the file size issues. @@ -575,10 +575,10 @@ Although we will not stop a process from executing due to memory issues, we prov These limitations are set with the following System properties: -- _PROP_LOGPARSER_HEAP_LIMIT_ : Setting a limit above which we log these occurences. -- _PROP_LOGPARSER_MEMORY_LIMIT_PERCENTAGE_ : A percentage of the occupied memory in reference to the max memory. +- _LOGPARSER_HEAP_LIMIT_ : Setting a limit above which we log these occurences. +- _LOGPARSER_MEMORY_LIMIT_PERCENTAGE_ : A percentage of the occupied memory in reference to the max memory. -We also have the possibility of throwing an exception iin the case of surpassing the memory rules. This is activated by setting the System property _PROP_LOGPARSER_EXCEPTION_ON_MEMORY_LIMIT_ to true. +We also have the possibility of throwing an exception iin the case of surpassing the memory rules. This is activated by setting the System property _LOGPARSER_EXCEPTION_ON_MEMORY_LIMIT_ to true. You can also call the memory guard rails in your own implementation by calling `ParseGuardRails.checkMemoryLimits()`. This will check both heap and memory percentage limits. diff --git a/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java b/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java index 2b8fd82..3e76dc9 100644 --- a/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java +++ b/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java @@ -31,15 +31,15 @@ public class ParseGuardRails { public static long HEAP_SIZE_AT_START = MemoryUtils.getCurrentHeapSizeMB(); - public static int FILE_ENTRY_LIMIT = Integer.parseInt(System.getProperty("PROP_LOGPARSER_FILEENTRY_LIMIT", "-1")); - public static long HEAP_LIMIT = Integer.parseInt(System.getProperty("PROP_LOGPARSER_HEAP_LIMIT", "-1")); + public static int FILE_ENTRY_LIMIT = Integer.parseInt(System.getProperty("LOGPARSER_FILEENTRY_LIMIT", "-1")); + public static long HEAP_LIMIT = Integer.parseInt(System.getProperty("LOGPARSER_HEAP_LIMIT", "-1")); public static double MEMORY_LIMIT_PERCENTAGE = Double - .parseDouble(System.getProperty("PROP_LOGPARSER_MEMORY_LIMIT_PERCENTAGE", "-1")); + .parseDouble(System.getProperty("LOGPARSER_MEMORY_LIMIT_PERCENTAGE", "-1")); protected static boolean EXCEPTION_ON_MEMORY_LIMIT = Boolean - .parseBoolean(System.getProperty("PROP_LOGPARSER_EXCEPTION_ON_MEMORY_LIMIT", "false")); + .parseBoolean(System.getProperty("LOGPARSER_EXCEPTION_ON_MEMORY_LIMIT", "false")); protected static long FILE_SIZE_LIMIT = Long - .parseLong(System.getProperty("PROP_LOGPARSER_FILESIZE_LIMIT", "-1")); + .parseLong(System.getProperty("LOGPARSER_FILESIZE_LIMIT", "-1")); protected static int MEASUREMENT_SCALE = 1024 * 1024; public static void reset() { From 676edf40b189206ee61221beae25dbff4f3d9b13 Mon Sep 17 00:00:00 2001 From: baubakg Date: Mon, 28 Apr 2025 14:56:15 +0200 Subject: [PATCH 23/26] Adding a report --- README.md | 120 +++++++++--------- .../tests/logparser/core/LogData.java | 11 +- .../logparser/utils/ParseGuardRails.java | 47 ++++++- .../logparser/utils/ParseGuardRailsTest.java | 79 ++++++++++-- 4 files changed, 181 insertions(+), 76 deletions(-) diff --git a/README.md b/README.md index ee2baa4..46b4315 100644 --- a/README.md +++ b/README.md @@ -14,62 +14,62 @@ The basic method for using this library is, that you create a definition for you ## Table of contents - -- [Installation](#installation) - - [Maven](#maven) -- [Running the Log Parser](#running-the-log-parser) -- [Parse Definitions](#parse-definitions) - - [Defining a Parsing](#defining-a-parsing) - - [Defining an Entry](#defining-an-entry) - - [How parsing works](#how-parsing-works) - - [Anonymizing Data](#anonymizing-data) - - [Code Example](#code-example) - - [Import and Export of Parse Definitions](#import-and-export-of-parse-definitions) - - [Importing a JSON File](#importing-a-json-file) -- [Extracting Data from Logs](#extracting-data-from-logs) - - [Using the Standard Method](#using-the-standard-method) - - [Using the Log-Parser as an SDK](#using-the-log-parser-as-an-sdk) - - [Writing your own SDK](#writing-your-own-sdk) - - [Declaring a Default and Copy Constructor](#declaring-a-default-and-copy-constructor) - - [Declaring the transformation Rules in setValuesFromMap](#declaring-the-transformation-rules-in-setvaluesfrommap) - - [Declaring the Key](#declaring-the-key) - - [Declare the HeaderMap, and ValueMap](#declare-the-headermap-and-valuemap) - - [Assisting Exports](#assisting-exports) -- [Code Structure](#code-structure) -- [Searching and organizing log data](#searching-and-organizing-log-data) - - [Search and Filter Mechanisms](#search-and-filter-mechanisms) - - [Defining a Search Term](#defining-a-search-term) - - [Enriching Log Data](#enriching-log-data) - - [GroupBy Mechanisms](#groupby-mechanisms) - - [Passing a list](#passing-a-list) - - [Chaining GroupBy](#chaining-groupby) - - [Comparing Log Data](#comparing-log-data) - - [Creating a Differentiation Report](#creating-a-differentiation-report) -- [Assertions and LogDataAssertions](#assertions-and-logdataassertions) -- [Exporting Parse Results](#exporting-parse-results) - - [Exporting Results to a CSV File](#exporting-results-to-a-csv-file) - - [Exporting Results to an HTML File](#exporting-results-to-an-html-file) - - [Exporting Results to an JSON File](#exporting-results-to-an-json-file) -- [Command-line Execution of the Log-Parser](#command-line-execution-of-the-log-parser) -- [Memory Guard Rails](#memory-guard-rails) - - [Guard Rail Properties](#guard-rail-properties) - - [File Entry Limitations](#file-entry-limitations) - - [File Size Limitations](#file-size-limitations) - - [Memory Limitations](#memory-limitations) -- [Changelog](#changelog) - - [1.11.3 (In-Progress)](#1113-in-progress) - - [1.11.2](#1112) - - [1.11.0](#1110) - - [1.0.10](#1010) - - [1.0.8.2](#1082) - - [1.0.8](#108) - - [1.0.7](#107) - - [1.0.6](#106) - - [1.0.5](#105) - - [1.0.4](#104) - - [1.0.3](#103) - - [1.0.1](#101) - + * [Installation](#installation) + * [Maven](#maven) + * [Running the Log Parser](#running-the-log-parser) + * [Parse Definitions](#parse-definitions) + * [Defining a Parsing](#defining-a-parsing) + * [Defining an Entry](#defining-an-entry) + * [How parsing works](#how-parsing-works) + * [Anonymizing Data](#anonymizing-data) + * [Code Example](#code-example) + * [Import and Export of Parse Definitions](#import-and-export-of-parse-definitions) + * [Importing a JSON File](#importing-a-json-file) + * [Extracting Data from Logs](#extracting-data-from-logs) + * [Using the Standard Method](#using-the-standard-method) + * [Using the Log-Parser as an SDK](#using-the-log-parser-as-an-sdk) + * [Writing your own SDK](#writing-your-own-sdk) + * [Declaring a Default and Copy Constructor](#declaring-a-default-and-copy-constructor) + * [Declaring the transformation Rules in setValuesFromMap](#declaring-the-transformation-rules-in-setvaluesfrommap) + * [Declaring the Key](#declaring-the-key) + * [Declare the HeaderMap, and ValueMap](#declare-the-headermap-and-valuemap) + * [Assisting Exports](#assisting-exports) + * [Code Structure](#code-structure) + * [Searching and organizing log data](#searching-and-organizing-log-data) + * [Search and Filter Mechanisms](#search-and-filter-mechanisms) + * [Defining a Search Term](#defining-a-search-term) + * [Enriching Log Data](#enriching-log-data) + * [GroupBy Mechanisms](#groupby-mechanisms) + * [Passing a list](#passing-a-list) + * [Chaining GroupBy](#chaining-groupby) + * [Comparing Log Data](#comparing-log-data) + * [Creating a Differentiation Report](#creating-a-differentiation-report) + * [Assertions and LogDataAssertions](#assertions-and-logdataassertions) + * [Exporting Parse Results](#exporting-parse-results) + * [Exporting Results to a CSV File](#exporting-results-to-a-csv-file) + * [Exporting Results to an HTML File](#exporting-results-to-an-html-file) + * [Exporting Results to an JSON File](#exporting-results-to-an-json-file) + * [Command-line Execution of the Log-Parser](#command-line-execution-of-the-log-parser) + * [Memory Guard Rails](#memory-guard-rails) + * [Guard Rail Properties](#guard-rail-properties) + * [File Entry Limitations](#file-entry-limitations) + * [File Size Limitations](#file-size-limitations) + * [Memory Limitations](#memory-limitations) + * [Exporting Anomalies Report](#exporting-anomalies-report) + * [Changelog](#changelog) + * [1.11.3 (In-Progress)](#1113--in-progress-) + * [1.11.2](#1112) + * [1.11.0](#1110) + * [1.0.10](#1010) + * [1.0.8.2](#1082) + * [1.0.8](#108) + * [1.0.7](#107) + * [1.0.6](#106) + * [1.0.5](#105) + * [1.0.4](#104) + * [1.0.3](#103) + * [1.0.1](#101) + ## Installation @@ -549,8 +549,8 @@ As of 1.11.3 we have introduced a series of guard rails. These allow you to cont The following table lists all available guard rail properties and their default values: -| Property | Description | Affects | Scale | Default Value | -| ---------------------------------------- | ------------------------------------------------------ | ----------------------------------------------- | ---------- | ------------- | +| Property | Description | Affects | Scale | Default Value | +| ----------------------------------- | ------------------------------------------------------ | ----------------------------------------------- | ---------- | ------------- | | LOGPARSER_FILEENTRY_LIMIT | Maximum number of entries to parse per file | File parsing | Count | -1 (disabled) | | LOGPARSER_FILESIZE_LIMIT | Maximum file size in MB to parse | File parsing | Megabytes | -1 (disabled) | | LOGPARSER_HEAP_LIMIT | Maximum heap size increase in MB before warning | File parsing, FilterBy, Search, enrich, groupBy | Megabytes | -1 (disabled) | @@ -582,6 +582,10 @@ We also have the possibility of throwing an exception iin the case of surpassing You can also call the memory guard rails in your own implementation by calling `ParseGuardRails.checkMemoryLimits()`. This will check both heap and memory percentage limits. +### Exporting Anomalies Report + +We have the possibility of exporting the anomalies report. This is done by calling the method `LogData#exportAnomaliesReport(String fileName)`. If you do not give an argument `LogData#exportAnomaliesReport()` will export the anomalies to a file called anomalies.json. + ## Changelog ### 1.11.3 (In-Progress) diff --git a/src/main/java/com/adobe/campaign/tests/logparser/core/LogData.java b/src/main/java/com/adobe/campaign/tests/logparser/core/LogData.java index 3b26eda..0a4d936 100644 --- a/src/main/java/com/adobe/campaign/tests/logparser/core/LogData.java +++ b/src/main/java/com/adobe/campaign/tests/logparser/core/LogData.java @@ -37,6 +37,7 @@ */ public class LogData { + public static final String STD_LOG_ERROR_ON_EMPTY_LOG_DATA = "No Log data to export. Please load the log data before re-attempting"; protected static Logger log = LogManager.getLogger(); /** @@ -418,7 +419,7 @@ public File exportLogDataToCSV() throws LogDataExportToFileException { .fetchEscapedTitle() + "-export.csv"); } else { - log.warn("No Log data to export. Please load the log data before re-attempting"); + log.warn(STD_LOG_ERROR_ON_EMPTY_LOG_DATA); return null; } @@ -437,7 +438,7 @@ public File exportLogDataToCSV(String in_fileName) { if (l_firstEntry != null) { return exportLogDataToCSV(l_firstEntry.fetchHeaders(), in_fileName); } else { - log.warn("No Log data to export. Please load the log data before re-attempting"); + log.warn(STD_LOG_ERROR_ON_EMPTY_LOG_DATA); return null; } } @@ -484,7 +485,7 @@ public File exportLogDataToHTML(String in_reportTitle, String in_htmlFileName) { T l_firstEntry = this.fetchFirst(); if (l_firstEntry == null) { - log.error("No Log data to export. Please load the log data before re-attempting"); + log.error(STD_LOG_ERROR_ON_EMPTY_LOG_DATA); return null; } return exportLogDataToHTML(l_firstEntry.fetchHeaders(), in_reportTitle, @@ -548,7 +549,7 @@ public File exportLogDataToJSON() throws LogDataExportToFileException { return exportLogDataToJSON(l_firstEntry.fetchHeaders(), l_firstEntry.getParseDefinition().fetchEscapedTitle() + "-export.json"); } else { - log.warn("No Log data to export. Please load the log data before re-attempting"); + log.warn(STD_LOG_ERROR_ON_EMPTY_LOG_DATA); return null; } } @@ -565,7 +566,7 @@ public File exportLogDataToJSON(String in_jsonFileName) throws LogDataExportToFi if (l_firstEntry != null) { return exportLogDataToJSON(l_firstEntry.fetchHeaders(), in_jsonFileName); } else { - log.warn("No Log data to export. Please load the log data before re-attempting"); + log.warn(STD_LOG_ERROR_ON_EMPTY_LOG_DATA); return null; } } diff --git a/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java b/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java index 3e76dc9..7cc19a7 100644 --- a/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java +++ b/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java @@ -9,11 +9,13 @@ package com.adobe.campaign.tests.logparser.utils; import java.io.File; +import java.io.IOException; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.Set; +import com.fasterxml.jackson.databind.ObjectMapper; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; @@ -29,7 +31,6 @@ public class ParseGuardRails { protected static Map heapLimitations = new HashMap<>(); protected static Map memoryLimitations = new HashMap<>(); - public static long HEAP_SIZE_AT_START = MemoryUtils.getCurrentHeapSizeMB(); public static int FILE_ENTRY_LIMIT = Integer.parseInt(System.getProperty("LOGPARSER_FILEENTRY_LIMIT", "-1")); public static long HEAP_LIMIT = Integer.parseInt(System.getProperty("LOGPARSER_HEAP_LIMIT", "-1")); @@ -172,12 +173,48 @@ private static boolean hasReachedFileSizeLimit(long length) { public static Map> getAnomalyReport() { Map> report = new HashMap<>(); - report.put("heapLimitations", heapLimitations.keySet()); - report.put("memoryLimitations", memoryLimitations.keySet()); - report.put("fileSizeLimitations", fileSizeLimitations.keySet()); - report.put("entryLimitations", entryLimitations.keySet()); + if (heapLimitations.size() > 0) { + report.put("heapLimitations", heapLimitations.keySet()); + } + + if (memoryLimitations.size() > 0) { + report.put("memoryLimitations", memoryLimitations.keySet()); + } + + if (fileSizeLimitations.size() > 0) { + report.put("fileSizeLimitations", fileSizeLimitations.keySet()); + } + + if (entryLimitations.size() > 0) { + report.put("entryLimitations", entryLimitations.keySet()); + } return report; } + /** + * Exports the anomaly report to a JSON file + * The file will be created if it doesn't exist, or replaced if it does + * Only exports if there are anomalies to report + */ + public static void exportAnomalyReport() { + exportAnomalyReport("./anomalies.json"); + } + + /** + * Exports the anomaly report to a JSON file at the specified path + * + * @param filePath The path where to save the anomaly report + */ + public static void exportAnomalyReport(String filePath) { + Map> report = getAnomalyReport(); + if (!report.isEmpty()) { + try { + ObjectMapper mapper = new ObjectMapper(); + mapper.writeValue(new File(filePath), report); + } catch (IOException e) { + log.error("Failed to export anomaly report to {}", filePath, e); + } + } + } } \ No newline at end of file diff --git a/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java b/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java index 79fcc36..3166c52 100644 --- a/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java +++ b/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java @@ -12,31 +12,43 @@ import static org.hamcrest.Matchers.is; import static org.testng.Assert.assertThrows; +import java.io.File; import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.util.List; +import java.util.Map; +import java.util.Set; +import org.hamcrest.Matchers; import org.testng.annotations.AfterMethod; import org.testng.annotations.BeforeMethod; import org.testng.annotations.Test; import com.adobe.campaign.tests.logparser.exceptions.MemoryLimitExceededException; +import com.fasterxml.jackson.databind.ObjectMapper; public class ParseGuardRailsTest { private Path tempFile; + private static final String ANOMALY_REPORT_PATH = "anomalies.json"; @BeforeMethod public void setup() throws IOException { ParseGuardRails.reset(); - + System.clearProperty("EXCEPTION_ON_MEMORY_LIMIT"); tempFile = Files.createTempFile("test", ".log"); + // Clean up any existing anomaly report + new File(ANOMALY_REPORT_PATH).delete(); } @AfterMethod public void cleanup() throws IOException { ParseGuardRails.reset(); + System.clearProperty("EXCEPTION_ON_MEMORY_LIMIT"); Files.deleteIfExists(tempFile); + // Clean up the anomaly report + new File(ANOMALY_REPORT_PATH).delete(); } @Test @@ -135,19 +147,18 @@ public void testCheckMemoryLimits_WhenHeapLimitReached() { assertThat("Should reach limit when heap limit is reached", ParseGuardRails.checkMemoryLimits(), is(true)); - assertThat("Should have anomaly report", ParseGuardRails.getAnomalyReport().size(), is(4)); + assertThat("Should have anomaly report", ParseGuardRails.getAnomalyReport().size(), is(1)); assertThat("Should have heap limitation", ParseGuardRails.getAnomalyReport().get("heapLimitations").size(), is(1)); - assertThat("Should have memory limitation", ParseGuardRails.getAnomalyReport().get("memoryLimitations").size(), - is(0)); + assertThat("Should not have a memory limitation", + !ParseGuardRails.getAnomalyReport().containsKey("memoryLimitations")); assertThat("Should have file size limitation", - ParseGuardRails.getAnomalyReport().get("fileSizeLimitations").size(), - is(0)); + !ParseGuardRails.getAnomalyReport().containsKey("fileSizeLimitations")); - assertThat("Should have entry limitation", ParseGuardRails.getAnomalyReport().get("entryLimitations").size(), - is(0)); + assertThat("Should have entry limitation", + !ParseGuardRails.getAnomalyReport().containsKey("entryLimitations")); } @Test @@ -179,4 +190,56 @@ public void testCheckMemoryLimits_WhenMemoryLimitReachedWithException() { ParseGuardRails.MEMORY_LIMIT_PERCENTAGE = 0.0; // Set limit to 0% to force reaching it assertThrows(MemoryLimitExceededException.class, () -> ParseGuardRails.checkMemoryLimits()); } + + @Test + public void testExportAnomalyReport_WhenNoAnomalies() { + ParseGuardRails.exportAnomalyReport(); + assertThat("Should not create file when no anomalies", + (new File(ANOMALY_REPORT_PATH)).exists(), is(false)); + } + + @Test + public void testExportAnomalyReport_WhenHasAnomalies() throws IOException { + // Create some anomalies + ParseGuardRails.HEAP_LIMIT = 1; + ParseGuardRails.HEAP_SIZE_AT_START = -20; + ParseGuardRails.checkMemoryLimits(); + + ParseGuardRails.exportAnomalyReport(); + + // Verify file exists + File reportFile = new File(ANOMALY_REPORT_PATH); + assertThat("Should create file when anomalies exist", + reportFile.exists(), is(true)); + + // Verify content + ObjectMapper mapper = new ObjectMapper(); + Map> report = mapper.readValue(reportFile, Map.class); + assertThat("Should have heap limitations", + report.containsKey("heapLimitations")); + assertThat("Should have at least one heap limitation", + report.get("heapLimitations").size(), Matchers.greaterThan(0)); + } + + @Test + public void testExportAnomalyReport_WhenFileExists() throws IOException { + // Create initial file + ObjectMapper mapper = new ObjectMapper(); + Map> initialData = Map.of("test", Set.of("data")); + mapper.writeValue(new File(ANOMALY_REPORT_PATH), initialData); + + // Create some anomalies + ParseGuardRails.HEAP_LIMIT = 1; + ParseGuardRails.HEAP_SIZE_AT_START = -20; + ParseGuardRails.checkMemoryLimits(); + + ParseGuardRails.exportAnomalyReport(); + + // Verify file was replaced + Map> report = mapper.readValue(new File(ANOMALY_REPORT_PATH), Map.class); + assertThat("Should have replaced old content", + report.containsKey("heapLimitations"), is(true)); + assertThat("Should not have old content", + report.containsKey("test"), is(false)); + } } \ No newline at end of file From 62a4845a44e3509b64b6a0d767dd205ea45ad30c Mon Sep 17 00:00:00 2001 From: baubakg Date: Mon, 28 Apr 2025 15:35:08 +0200 Subject: [PATCH 24/26] Simplifying code --- README.md | 111 +++++++++--------- .../logparser/utils/ParseGuardRails.java | 24 ++-- 2 files changed, 65 insertions(+), 70 deletions(-) diff --git a/README.md b/README.md index 46b4315..d0c2a75 100644 --- a/README.md +++ b/README.md @@ -14,61 +14,62 @@ The basic method for using this library is, that you create a definition for you ## Table of contents - * [Installation](#installation) - * [Maven](#maven) - * [Running the Log Parser](#running-the-log-parser) - * [Parse Definitions](#parse-definitions) - * [Defining a Parsing](#defining-a-parsing) - * [Defining an Entry](#defining-an-entry) - * [How parsing works](#how-parsing-works) - * [Anonymizing Data](#anonymizing-data) - * [Code Example](#code-example) - * [Import and Export of Parse Definitions](#import-and-export-of-parse-definitions) - * [Importing a JSON File](#importing-a-json-file) - * [Extracting Data from Logs](#extracting-data-from-logs) - * [Using the Standard Method](#using-the-standard-method) - * [Using the Log-Parser as an SDK](#using-the-log-parser-as-an-sdk) - * [Writing your own SDK](#writing-your-own-sdk) - * [Declaring a Default and Copy Constructor](#declaring-a-default-and-copy-constructor) - * [Declaring the transformation Rules in setValuesFromMap](#declaring-the-transformation-rules-in-setvaluesfrommap) - * [Declaring the Key](#declaring-the-key) - * [Declare the HeaderMap, and ValueMap](#declare-the-headermap-and-valuemap) - * [Assisting Exports](#assisting-exports) - * [Code Structure](#code-structure) - * [Searching and organizing log data](#searching-and-organizing-log-data) - * [Search and Filter Mechanisms](#search-and-filter-mechanisms) - * [Defining a Search Term](#defining-a-search-term) - * [Enriching Log Data](#enriching-log-data) - * [GroupBy Mechanisms](#groupby-mechanisms) - * [Passing a list](#passing-a-list) - * [Chaining GroupBy](#chaining-groupby) - * [Comparing Log Data](#comparing-log-data) - * [Creating a Differentiation Report](#creating-a-differentiation-report) - * [Assertions and LogDataAssertions](#assertions-and-logdataassertions) - * [Exporting Parse Results](#exporting-parse-results) - * [Exporting Results to a CSV File](#exporting-results-to-a-csv-file) - * [Exporting Results to an HTML File](#exporting-results-to-an-html-file) - * [Exporting Results to an JSON File](#exporting-results-to-an-json-file) - * [Command-line Execution of the Log-Parser](#command-line-execution-of-the-log-parser) - * [Memory Guard Rails](#memory-guard-rails) - * [Guard Rail Properties](#guard-rail-properties) - * [File Entry Limitations](#file-entry-limitations) - * [File Size Limitations](#file-size-limitations) - * [Memory Limitations](#memory-limitations) - * [Exporting Anomalies Report](#exporting-anomalies-report) - * [Changelog](#changelog) - * [1.11.3 (In-Progress)](#1113--in-progress-) - * [1.11.2](#1112) - * [1.11.0](#1110) - * [1.0.10](#1010) - * [1.0.8.2](#1082) - * [1.0.8](#108) - * [1.0.7](#107) - * [1.0.6](#106) - * [1.0.5](#105) - * [1.0.4](#104) - * [1.0.3](#103) - * [1.0.1](#101) + +- [Installation](#installation) + - [Maven](#maven) +- [Running the Log Parser](#running-the-log-parser) +- [Parse Definitions](#parse-definitions) + - [Defining a Parsing](#defining-a-parsing) + - [Defining an Entry](#defining-an-entry) + - [How parsing works](#how-parsing-works) + - [Anonymizing Data](#anonymizing-data) + - [Code Example](#code-example) + - [Import and Export of Parse Definitions](#import-and-export-of-parse-definitions) + - [Importing a JSON File](#importing-a-json-file) +- [Extracting Data from Logs](#extracting-data-from-logs) + - [Using the Standard Method](#using-the-standard-method) + - [Using the Log-Parser as an SDK](#using-the-log-parser-as-an-sdk) + - [Writing your own SDK](#writing-your-own-sdk) + - [Declaring a Default and Copy Constructor](#declaring-a-default-and-copy-constructor) + - [Declaring the transformation Rules in setValuesFromMap](#declaring-the-transformation-rules-in-setvaluesfrommap) + - [Declaring the Key](#declaring-the-key) + - [Declare the HeaderMap, and ValueMap](#declare-the-headermap-and-valuemap) + - [Assisting Exports](#assisting-exports) +- [Code Structure](#code-structure) +- [Searching and organizing log data](#searching-and-organizing-log-data) + - [Search and Filter Mechanisms](#search-and-filter-mechanisms) + - [Defining a Search Term](#defining-a-search-term) + - [Enriching Log Data](#enriching-log-data) + - [GroupBy Mechanisms](#groupby-mechanisms) + - [Passing a list](#passing-a-list) + - [Chaining GroupBy](#chaining-groupby) + - [Comparing Log Data](#comparing-log-data) + - [Creating a Differentiation Report](#creating-a-differentiation-report) +- [Assertions and LogDataAssertions](#assertions-and-logdataassertions) +- [Exporting Parse Results](#exporting-parse-results) + - [Exporting Results to a CSV File](#exporting-results-to-a-csv-file) + - [Exporting Results to an HTML File](#exporting-results-to-an-html-file) + - [Exporting Results to an JSON File](#exporting-results-to-an-json-file) +- [Command-line Execution of the Log-Parser](#command-line-execution-of-the-log-parser) +- [Memory Guard Rails](#memory-guard-rails) + - [Guard Rail Properties](#guard-rail-properties) + - [File Entry Limitations](#file-entry-limitations) + - [File Size Limitations](#file-size-limitations) + - [Memory Limitations](#memory-limitations) + - [Exporting Anomalies Report](#exporting-anomalies-report) +- [Changelog](#changelog) +_ [1.11.3 (In-Progress)](#1113--in-progress-) +_ [1.11.2](#1112) +_ [1.11.0](#1110) +_ [1.0.10](#1010) +_ [1.0.8.2](#1082) +_ [1.0.8](#108) +_ [1.0.7](#107) +_ [1.0.6](#106) +_ [1.0.5](#105) +_ [1.0.4](#104) +_ [1.0.3](#103) +_ [1.0.1](#101) ## Installation diff --git a/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java b/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java index 7cc19a7..c5fd84f 100644 --- a/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java +++ b/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java @@ -173,21 +173,15 @@ private static boolean hasReachedFileSizeLimit(long length) { public static Map> getAnomalyReport() { Map> report = new HashMap<>(); - if (heapLimitations.size() > 0) { - report.put("heapLimitations", heapLimitations.keySet()); - } - - if (memoryLimitations.size() > 0) { - report.put("memoryLimitations", memoryLimitations.keySet()); - } - - if (fileSizeLimitations.size() > 0) { - report.put("fileSizeLimitations", fileSizeLimitations.keySet()); - } - - if (entryLimitations.size() > 0) { - report.put("entryLimitations", entryLimitations.keySet()); - } + Map.of( + "heapLimitations", heapLimitations, + "memoryLimitations", memoryLimitations, + "fileSizeLimitations", fileSizeLimitations, + "entryLimitations", entryLimitations).forEach((key, map) -> { + if (!map.isEmpty()) { + report.put(key, map.keySet()); + } + }); return report; } From 7de2a26c111d77d3694164f69708fb1449a2bd4c Mon Sep 17 00:00:00 2001 From: baubakg Date: Mon, 28 Apr 2025 15:43:00 +0200 Subject: [PATCH 25/26] Adding ioException test --- .../logparser/utils/ParseGuardRailsTest.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java b/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java index 3166c52..c1dbb1d 100644 --- a/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java +++ b/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java @@ -242,4 +242,24 @@ public void testExportAnomalyReport_WhenFileExists() throws IOException { assertThat("Should not have old content", report.containsKey("test"), is(false)); } + + @Test + public void testExportAnomalyReport_WhenIOExceptionOccurs() throws IOException { + // Create some anomalies + ParseGuardRails.HEAP_LIMIT = 1; + ParseGuardRails.HEAP_SIZE_AT_START = -20; + ParseGuardRails.checkMemoryLimits(); + + // Create a file that will cause an IOException when trying to write + File reportFile = new File(ANOMALY_REPORT_PATH); + reportFile.createNewFile(); + reportFile.setReadOnly(); + + // This should log an error but not throw an exception + ParseGuardRails.exportAnomalyReport(); + + // Clean up + reportFile.setWritable(true); + reportFile.delete(); + } } \ No newline at end of file From b0be04ac873b2686c974c5dc2d364853b6cddaa8 Mon Sep 17 00:00:00 2001 From: baubakg Date: Mon, 28 Apr 2025 17:01:47 +0200 Subject: [PATCH 26/26] Renamed the error file and generalized it as a constant --- .../adobe/campaign/tests/logparser/utils/ParseGuardRails.java | 3 ++- .../campaign/tests/logparser/utils/ParseGuardRailsTest.java | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java b/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java index c5fd84f..952be20 100644 --- a/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java +++ b/src/main/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRails.java @@ -31,6 +31,7 @@ public class ParseGuardRails { protected static Map heapLimitations = new HashMap<>(); protected static Map memoryLimitations = new HashMap<>(); + public static final String ANOMALY_REPORT_PATH = "./logParserAnomalies.json"; public static long HEAP_SIZE_AT_START = MemoryUtils.getCurrentHeapSizeMB(); public static int FILE_ENTRY_LIMIT = Integer.parseInt(System.getProperty("LOGPARSER_FILEENTRY_LIMIT", "-1")); public static long HEAP_LIMIT = Integer.parseInt(System.getProperty("LOGPARSER_HEAP_LIMIT", "-1")); @@ -192,7 +193,7 @@ public static Map> getAnomalyReport() { * Only exports if there are anomalies to report */ public static void exportAnomalyReport() { - exportAnomalyReport("./anomalies.json"); + exportAnomalyReport(ANOMALY_REPORT_PATH); } /** diff --git a/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java b/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java index c1dbb1d..4bf47c8 100644 --- a/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java +++ b/src/test/java/com/adobe/campaign/tests/logparser/utils/ParseGuardRailsTest.java @@ -31,7 +31,7 @@ public class ParseGuardRailsTest { private Path tempFile; - private static final String ANOMALY_REPORT_PATH = "anomalies.json"; + private static final String ANOMALY_REPORT_PATH = ParseGuardRails.ANOMALY_REPORT_PATH; @BeforeMethod public void setup() throws IOException {