in_logMap) {
}
/**
- * This method adds an entry to the log data. If the entry already exists we just increment the frequence
+ * This method adds an entry to the log data. If the entry already exists we
+ * just increment the frequence
*
* Author : gandomi
*
@@ -96,7 +98,8 @@ public void addEntry(T lt_cubeEntry) {
}
/**
- * This method allows you to access an entry in the log data. For this you need the key of the Data
+ * This method allows you to access an entry in the log data. For this you need
+ * the key of the Data
*
* Author : gandomi
*
@@ -108,7 +111,8 @@ public T get(String in_dataEntryKey) {
}
/**
- * This method allows you to access a value within the cube map. For this you need the key of the Data and the title
+ * This method allows you to access a value within the cube map. For this you
+ * need the key of the Data and the title
* of the value
*
* Author : gandomi
@@ -116,7 +120,8 @@ public T get(String in_dataEntryKey) {
* @param in_dataEntryKey The key with which the data has been stored
* @param in_valueKey The identity of the value.
* @return The key value for the given entry. null if not found
- * @throws IncorrectParseDefinitionException If the given valueKey was not found in the definition
+ * @throws IncorrectParseDefinitionException If the given valueKey was not found
+ * in the definition
*/
public Object get(String in_dataEntryKey, String in_valueKey) throws IncorrectParseDefinitionException {
@@ -136,7 +141,8 @@ public Object get(String in_dataEntryKey, String in_valueKey) throws IncorrectPa
}
/**
- * This method allows you to change a specific value in the log data. For this, you need the key and the parse
+ * This method allows you to change a specific value in the log data. For this,
+ * you need the key and the parse
* definition title to find the value
*
* Author : gandomi
@@ -144,7 +150,9 @@ public Object get(String in_dataEntryKey, String in_valueKey) throws IncorrectPa
* @param in_dataEntryKey The key with which the data has been stored
* @param in_valueKey The identity of the value.
* @param in_newValue The new value of the entry value
- * @throws IncorrectParseDefinitionException When there is no entry for the given in_dataEntryKey and in_valueKey
+ * @throws IncorrectParseDefinitionException When there is no entry for the
+ * given in_dataEntryKey and
+ * in_valueKey
*/
public void put(String in_dataEntryKey, String in_valueKey, Object in_newValue)
throws IncorrectParseDefinitionException {
@@ -188,18 +196,23 @@ public boolean equals(Object obj) {
}
/**
- * Here we create a new LogDataObject with the given ParseDefinitionEntry. This method performs a groupby for the
+ * Here we create a new LogDataObject with the given ParseDefinitionEntry. This
+ * method performs a groupby for the
* given value. The frequence will also take into account the original frequence
*
* Author : gandomi
*
- * @param in_parseDefinitionEntryKey The key name of the parse definition perform the GroupBy on
- * @param in_transformationClass The class to which we should transform the cube data
+ * @param in_parseDefinitionEntryKey The key name of the parse definition
+ * perform the GroupBy on
+ * @param in_transformationClass The class to which we should transform the
+ * cube data
* @param The return type of the group by cube.
* @return a new LogData Object containing the groupBy values
- * @throws IncorrectParseDefinitionException If the key is not in the ParseDefinitions of the Log data entry
+ * @throws IncorrectParseDefinitionException If the key is not in the
+ * ParseDefinitions of the Log data
+ * entry
*/
- LogData groupBy(String in_parseDefinitionEntryKey,
+ LogData groupBy(String in_parseDefinitionEntryKey,
Class in_transformationClass)
throws IncorrectParseDefinitionException {
@@ -207,23 +220,28 @@ LogData groupBy(String in_parseDefinitionEntryKey,
}
/**
- * Here we create a new LogDataObject with the given ParseDefinitionEntry. This method performs a groupby for the
+ * Here we create a new LogDataObject with the given ParseDefinitionEntry. This
+ * method performs a groupby for the
* given value. The frequence will also take into account the original frequence
*
* Author : gandomi
*
- * @param in_parseDefinitionEntryKeyList The list of key names of the parse definition perform the GroupBy on
- * @param in_transformationClass The class to which we should transform the cube data
+ * @param in_parseDefinitionEntryKeyList The list of key names of the parse
+ * definition perform the GroupBy on
+ * @param in_transformationClass The class to which we should transform
+ * the cube data
* @param The return type of the group by cube.
* @return a new LogData Object containing the groupBy values
- * @throws IncorrectParseDefinitionException If the key is not in the ParseDefinitions of the Log data entry
+ * @throws IncorrectParseDefinitionException If the key is not in the
+ * ParseDefinitions of the Log data
+ * entry
*/
LogData groupBy(List in_parseDefinitionEntryKeyList,
Class in_transformationClass)
throws IncorrectParseDefinitionException {
LogData lr_cubeData = new LogData<>();
- //Creating new Definition
+ // Creating new Definition
ParseDefinition l_cubeDefinition = new ParseDefinition(
"cube " + String.join("-", in_parseDefinitionEntryKeyList));
@@ -231,19 +249,20 @@ LogData groupBy(List in_parseDefinitionEntryK
l_cubeDefinition.addEntry(new ParseDefinitionEntry(lt_keyName));
}
- //Filling STDLogData
+ // Filling STDLogData
for (T lt_entry : getEntries().values()) {
Map lt_cubeEntryValues = new HashMap<>();
U lt_cubeEntry = null;
try {
lt_cubeEntry = in_transformationClass.getDeclaredConstructor().newInstance();
- } catch (InstantiationException | IllegalAccessException | InvocationTargetException | NoSuchMethodException e) {
+ } catch (InstantiationException | IllegalAccessException | InvocationTargetException
+ | NoSuchMethodException e) {
throw new LogParserPostManipulationException("Problem creating new host for our new grouping.", e);
}
lt_cubeEntry.setParseDefinition(l_cubeDefinition);
for (String lt_parseDefinitionEntryKey : in_parseDefinitionEntryKeyList) {
- //Merge with original headers
+ // Merge with original headers
if (!lt_entry.fetchHeaders().contains(lt_parseDefinitionEntryKey)) {
throw new IncorrectParseDefinitionException("The given header name "
+ lt_parseDefinitionEntryKey + " was not among the stored data");
@@ -258,18 +277,23 @@ LogData groupBy(List in_parseDefinitionEntryK
lr_cubeData.addEntry(lt_cubeEntry);
}
+ ParseGuardRails.checkMemoryLimits("Grouping data");
return lr_cubeData;
}
/**
- * Here we create a new LogDataObject with the given ParseDefinitionEntry. This method performs a groupby for the
+ * Here we create a new LogDataObject with the given ParseDefinitionEntry. This
+ * method performs a groupby for the
* given value. The frequence will also take into account the original frequence
*
* Author : gandomi
*
- * @param in_parseDefinitionEntryKeyList The list of key names of the parse definition perform the GroupBy on
+ * @param in_parseDefinitionEntryKeyList The list of key names of the parse
+ * definition perform the GroupBy on
* @return a new LogData Object containing the groupBy values
- * @throws IncorrectParseDefinitionException If the key is not in the ParseDefinitions of the Log data entry
+ * @throws IncorrectParseDefinitionException If the key is not in the
+ * ParseDefinitions of the Log data
+ * entry
*/
public LogData groupBy(List in_parseDefinitionEntryKeyList)
throws IncorrectParseDefinitionException {
@@ -277,14 +301,18 @@ public LogData groupBy(List in_parseDefinitionEntryKeyList
}
/**
- * Here we create a new LogDataObject with the given ParseDefinitionEntry. This method performs a groupby for the
+ * Here we create a new LogDataObject with the given ParseDefinitionEntry. This
+ * method performs a groupby for the
* given value. The frequence will also take into account the original frequence
*
* Author : gandomi
*
- * @param in_parseDefinitionEntryKey The key name of the parse definition perform the GroupBy on
+ * @param in_parseDefinitionEntryKey The key name of the parse definition
+ * perform the GroupBy on
* @return a new LogData Object containing the groupBy values
- * @throws IncorrectParseDefinitionException If the key is not in the ParseDefinitions of the Log data entry
+ * @throws IncorrectParseDefinitionException If the key is not in the
+ * ParseDefinitions of the Log data
+ * entry
*/
public LogData groupBy(String in_parseDefinitionEntryKey)
throws IncorrectParseDefinitionException {
@@ -296,7 +324,8 @@ public LogData groupBy(String in_parseDefinitionEntryKey)
*
* Author : gandomi
*
- * @param in_filterKeyValues A map of <String,Matcher> representation the values we want to find
+ * @param in_filterKeyValues A map of <String,Matcher> representation the
+ * values we want to find
* @return a new LogDataObject containing only the filtered values
*/
public LogData filterBy(Map in_filterKeyValues) {
@@ -307,16 +336,18 @@ public LogData filterBy(Map in_filterKeyValues) {
lr_filteredLogData.addEntry(this.get(lt_logDataKey));
}
}
-
+ ParseGuardRails.checkMemoryLimits("Filtering data");
return lr_filteredLogData;
}
/**
- * This method searches the LogData for an entry with a specific value for a parse definition entry name
+ * This method searches the LogData for an entry with a specific value for a
+ * parse definition entry name
*
* Author : gandomi
*
- * @param in_parseDefinitionName The name of the parse definition entry under which we search for a value
+ * @param in_parseDefinitionName The name of the parse definition entry under
+ * which we search for a value
* @param in_searchValue The matcher
* @return a new LogDataObject containing only the searched values
*/
@@ -332,7 +363,8 @@ public LogData searchEntries(String in_parseDefinitionName, Matcher in_search
*
* Author : gandomi
*
- * @param in_searchKeyValues A map of <String,Matcher> representation the values we want to find
+ * @param in_searchKeyValues A map of <String,Matcher> representation the
+ * values we want to find
* @return a new LogDataObject containing only the filtered values
*/
public LogData searchEntries(Map in_searchKeyValues) {
@@ -345,7 +377,8 @@ public LogData searchEntries(Map in_searchKeyValues) {
*
* Author : gandomi
*
- * @param in_parseDefinitionName The name of the parse definition entry under which we search for a value
+ * @param in_parseDefinitionName The name of the parse definition entry under
+ * which we search for a value
* @param in_searchValue The search value
* @return true if the search terms could be found. Otherwise false
*/
@@ -361,7 +394,8 @@ public boolean isEntryPresent(String in_parseDefinitionName, String in_searchVal
*
* Author : gandomi
*
- * @param in_searchKeyValues A map of <String,Object> representation the values we want to find
+ * @param in_searchKeyValues A map of <String,Object> representation the
+ * values we want to find
* @return true if the search terms could be found. Otherwise false
*/
public boolean isEntryPresent(Map in_searchKeyValues) {
@@ -369,7 +403,8 @@ public boolean isEntryPresent(Map in_searchKeyValues) {
}
/**
- * Exports the current LogData to a standard CSV file. By default the file will have an escaped version of the Parse
+ * Exports the current LogData to a standard CSV file. By default the file will
+ * have an escaped version of the Parse
* Definition as the name
*
* @return a CSV file containing the LogData
@@ -390,7 +425,9 @@ public File exportLogDataToCSV() throws LogDataExportToFileException {
}
/**
- * Exports the current LogData to a standard CSV file with a name you give. By default the file will have an escaped version of the Parse
+ * Exports the current LogData to a standard CSV file with a name you give. By
+ * default the file will have an escaped version of the Parse
+ *
* @param in_fileName a filename to store the CSV export
* @return a CSV file containing the LogData
*/
@@ -432,15 +469,14 @@ public File exportLogDataToCSV(Collection in_headerSet, String in_csvFil
throw new LogDataExportToFileException("Encountered error while exporting the log data to a CSV file.", ex);
}
-
-
return l_exportFile;
}
/**
- * Exports the current LogData to an HTML file as a table. The headers will be extracted directly from the entries.
+ * Exports the current LogData to an HTML file as a table. The headers will be
+ * extracted directly from the entries.
*
- * @param in_reportTitle The title of the report
+ * @param in_reportTitle The title of the report
* @param in_htmlFileName The file name to export
* @return an HTML file containing the LogData as a table
*/
@@ -458,8 +494,8 @@ public File exportLogDataToHTML(String in_reportTitle, String in_htmlFileName) {
/**
* Exports the current LogData to an HTML file as a table.
*
- * @param in_headerSet A set of headers to be used as keys for exporting
- * @param in_reportTitle The title of the report
+ * @param in_headerSet A set of headers to be used as keys for exporting
+ * @param in_reportTitle The title of the report
* @param in_htmlFileName The file name to export
* @return an HTML file containing the LogData as a table
*/
@@ -470,7 +506,7 @@ public File exportLogDataToHTML(Collection in_headerSet, String in_repor
l_exportFile = LogParserFileUtils.createNewFile(in_htmlFileName);
StringBuilder sb = new StringBuilder();
sb.append(HTMLReportUtils.fetchSTDPageStart("diffTable.css"));
- //Creating the overview report
+ // Creating the overview report
sb.append(HTMLReportUtils.fetchHeader(1, in_reportTitle));
sb.append("Here is an listing of out findings.");
sb.append(HTMLReportUtils.fetchTableStartBracket());
@@ -480,7 +516,8 @@ public File exportLogDataToHTML(Collection in_headerSet, String in_repor
for (StdLogEntry lt_entry : this.getEntries().values()) {
Map lt_values = lt_entry.fetchValueMapPrintable();
sb.append(HTMLReportUtils.ROW_START);
- in_headerSet.stream().map(h -> lt_values.get(h)).forEach(j -> sb.append(HTMLReportUtils.fetchCell_TD(j)));
+ in_headerSet.stream().map(h -> lt_values.get(h))
+ .forEach(j -> sb.append(HTMLReportUtils.fetchCell_TD(j)));
sb.append(HTMLReportUtils.ROW_END);
}
@@ -498,7 +535,8 @@ public File exportLogDataToHTML(Collection in_headerSet, String in_repor
}
/**
- * Exports the current LogData to a standard JSON file. By default, the file will have an escape version of the Parse
+ * Exports the current LogData to a standard JSON file. By default, the file
+ * will have an escape version of the Parse
* Definition as the name
*
* @return a JSON file containing the LogData
@@ -507,7 +545,8 @@ public File exportLogDataToJSON() throws LogDataExportToFileException {
T l_firstEntry = this.fetchFirst();
if (l_firstEntry != null) {
- return exportLogDataToJSON(l_firstEntry.fetchHeaders(), l_firstEntry.getParseDefinition().fetchEscapedTitle() + "-export.json");
+ return exportLogDataToJSON(l_firstEntry.fetchHeaders(),
+ l_firstEntry.getParseDefinition().fetchEscapedTitle() + "-export.json");
} else {
log.warn("No Log data to export. Please load the log data before re-attempting");
return null;
@@ -534,7 +573,7 @@ public File exportLogDataToJSON(String in_jsonFileName) throws LogDataExportToFi
/**
* Exports the current LogData to an JSON file
*
- * @param in_headerSet A set of headers to be used as keys for exporting
+ * @param in_headerSet A set of headers to be used as keys for exporting
* @param in_jsonFileName The file name to export
* @return a JSON file containing the LogData
* @throws LogDataExportToFileException If the file could not be exported
@@ -544,13 +583,13 @@ public File exportLogDataToJSON(Collection in_headerSet, String in_jsonF
File l_exportFile;
try {
- l_exportFile= LogParserFileUtils.createNewFile(in_jsonFileName);
+ l_exportFile = LogParserFileUtils.createNewFile(in_jsonFileName);
List