diff --git a/engine/src/main/java/org/apache/hop/pipeline/transform/BaseTransform.java b/engine/src/main/java/org/apache/hop/pipeline/transform/BaseTransform.java index 622cc93e407..0fe8a432767 100644 --- a/engine/src/main/java/org/apache/hop/pipeline/transform/BaseTransform.java +++ b/engine/src/main/java/org/apache/hop/pipeline/transform/BaseTransform.java @@ -145,7 +145,7 @@ public class BaseTransform parentWorkflow; /** The log channel interface object, used for logging */ - protected ILogChannel log; + private ILogChannel log; /** The log level */ private LogLevel logLevel = DefaultLogLevel.getLogLevel(); diff --git a/plugins/actions/checkfilelocked/src/main/java/org/apache/hop/workflow/actions/checkfilelocked/ActionCheckFilesLocked.java b/plugins/actions/checkfilelocked/src/main/java/org/apache/hop/workflow/actions/checkfilelocked/ActionCheckFilesLocked.java index ff09e496c4e..ba823e38bc1 100644 --- a/plugins/actions/checkfilelocked/src/main/java/org/apache/hop/workflow/actions/checkfilelocked/ActionCheckFilesLocked.java +++ b/plugins/actions/checkfilelocked/src/main/java/org/apache/hop/workflow/actions/checkfilelocked/ActionCheckFilesLocked.java @@ -114,7 +114,7 @@ public Result execute(Result previousResult, int nr) { } else if (!checkedFiles.isEmpty()) { oneFileLocked = isOneSpecifiedFileLocked(); } else { - log.logBasic( + logBasic( "This action didn't execute any locking checks " + "as there were no lines to check and no arguments provided."); } diff --git a/plugins/actions/createfile/src/main/java/org/apache/hop/workflow/actions/createfile/ActionCreateFile.java b/plugins/actions/createfile/src/main/java/org/apache/hop/workflow/actions/createfile/ActionCreateFile.java index 411495df660..231f2bea0c9 100644 --- a/plugins/actions/createfile/src/main/java/org/apache/hop/workflow/actions/createfile/ActionCreateFile.java +++ b/plugins/actions/createfile/src/main/java/org/apache/hop/workflow/actions/createfile/ActionCreateFile.java @@ -193,7 +193,7 @@ private void addFilenameToResult( resultFile.setComment(""); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionCreateFile.FileAddedToResult", targetFilename)); } diff --git a/plugins/actions/createfolder/src/main/java/org/apache/hop/workflow/actions/createfolder/ActionCreateFolder.java b/plugins/actions/createfolder/src/main/java/org/apache/hop/workflow/actions/createfolder/ActionCreateFolder.java index 100d5ff3273..5160d4fcfa4 100644 --- a/plugins/actions/createfolder/src/main/java/org/apache/hop/workflow/actions/createfolder/ActionCreateFolder.java +++ b/plugins/actions/createfolder/src/main/java/org/apache/hop/workflow/actions/createfolder/ActionCreateFolder.java @@ -109,7 +109,7 @@ public Result execute(Result previousResult, int nr) { // Folder already exists: there is no reason to try and create it. // result.setResult(true); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(CONST_FOLDER + realFolderName + "] already exists, not recreating."); } } @@ -118,7 +118,7 @@ public Result execute(Result previousResult, int nr) { // No Folder yet, create an empty Folder. folderObject.createFolder(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(CONST_FOLDER + realFolderName + "] created!"); } result.setResult(true); diff --git a/plugins/actions/delay/src/main/java/org/apache/hop/workflow/actions/delay/ActionDelay.java b/plugins/actions/delay/src/main/java/org/apache/hop/workflow/actions/delay/ActionDelay.java index 54d431fd250..c86b12980e4 100644 --- a/plugins/actions/delay/src/main/java/org/apache/hop/workflow/actions/delay/ActionDelay.java +++ b/plugins/actions/delay/src/main/java/org/apache/hop/workflow/actions/delay/ActionDelay.java @@ -141,7 +141,7 @@ public Result execute(Result previousResult, int nr) { // Let's check the limit time if ((iMaximumTimeout >= 0) && (now >= (timeStart + iMaximumTimeout))) { // We have reached the time limit - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionDelay.WaitTimeIsElapsed.Label")); } continueLoop = false; diff --git a/plugins/actions/deletefile/src/main/java/org/apache/hop/workflow/actions/deletefile/ActionDeleteFile.java b/plugins/actions/deletefile/src/main/java/org/apache/hop/workflow/actions/deletefile/ActionDeleteFile.java index 6155f6afdab..cb677319528 100644 --- a/plugins/actions/deletefile/src/main/java/org/apache/hop/workflow/actions/deletefile/ActionDeleteFile.java +++ b/plugins/actions/deletefile/src/main/java/org/apache/hop/workflow/actions/deletefile/ActionDeleteFile.java @@ -119,7 +119,7 @@ public Result execute(Result previousResult, int nr) { } else { // File already deleted, no reason to try to delete it result.setResult(true); - if (log.isBasic()) { + if (isBasic()) { logBasic( BaseMessages.getString( PKG, "ActionDeleteFile.File_Already_Deleted", realFilename)); @@ -134,7 +134,7 @@ public Result execute(Result previousResult, int nr) { result.setResult(false); result.setNrErrors(1); } - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "ActionDeleteFile.File_Deleted", realFilename)); } result.setResult(true); diff --git a/plugins/actions/deletefiles/src/main/java/org/apache/hop/workflow/actions/deletefiles/ActionDeleteFiles.java b/plugins/actions/deletefiles/src/main/java/org/apache/hop/workflow/actions/deletefiles/ActionDeleteFiles.java index a8f53c4dfa8..5aff2d77cf1 100644 --- a/plugins/actions/deletefiles/src/main/java/org/apache/hop/workflow/actions/deletefiles/ActionDeleteFiles.java +++ b/plugins/actions/deletefiles/src/main/java/org/apache/hop/workflow/actions/deletefiles/ActionDeleteFiles.java @@ -162,7 +162,7 @@ public Result execute(Result result, int nr) throws HopException { result.setResult(false); result.setNrErrors(1); - if (argFromPrevious && log.isDetailed()) { + if (argFromPrevious && isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -179,7 +179,7 @@ public Result execute(Result result, int nr) throws HopException { // a root pdi-folder. // It is much more likely to be a mistake than a desirable action, so we don't delete // anything - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionDeleteFiles.NoPathProvided")); } } else { @@ -233,7 +233,7 @@ private Multimap populateDataForJobExecution( String pathToFile = resultRow.getString(0, null); String fileMask = resultRow.getString(1, null); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionDeleteFiles.ProcessingRow", pathToFile, fileMask)); } @@ -242,7 +242,7 @@ private Multimap populateDataForJobExecution( } } else if (arguments != null) { for (int i = 0; i < arguments.length; i++) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionDeleteFiles.ProcessingArg", arguments[i], filemasks[i])); @@ -264,7 +264,7 @@ boolean processFile(String path, String wildcard, IWorkflowEngine if (fileFolder.exists()) { if (fileFolder.getType() == FileType.FOLDER) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionDeleteFiles.ProcessingFolder", path)); } @@ -272,7 +272,7 @@ boolean processFile(String path, String wildcard, IWorkflowEngine fileFolder.delete( new TextFileSelector(fileFolder.toString(), wildcard, parentWorkflow)); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionDeleteFiles.TotalDeleted", String.valueOf(totalDeleted))); @@ -280,21 +280,21 @@ boolean processFile(String path, String wildcard, IWorkflowEngine isDeleted = true; } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionDeleteFiles.ProcessingFile", path)); } isDeleted = fileFolder.delete(); if (!isDeleted) { logError(BaseMessages.getString(PKG, "ActionDeleteFiles.CouldNotDeleteFile", path)); } else { - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "ActionDeleteFiles.FileDeleted", path)); } } } } else { // File already deleted, no reason to try to delete it - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "ActionDeleteFiles.FileAlreadyDeleted", path)); } isDeleted = true; @@ -348,7 +348,7 @@ public boolean includeFile(FileSelectInfo info) { if (includeSubfolders && (info.getFile().getType() == FileType.FILE) && getFileWildcard(shortFilename, fileWildcard)) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionDeleteFiles.DeletingFile", info.getFile().toString())); @@ -359,7 +359,7 @@ && getFileWildcard(shortFilename, fileWildcard)) { // In the Base Folder... if ((info.getFile().getType() == FileType.FILE) && getFileWildcard(shortFilename, fileWildcard)) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionDeleteFiles.DeletingFile", info.getFile().toString())); @@ -369,7 +369,7 @@ && getFileWildcard(shortFilename, fileWildcard)) { } } } catch (Exception e) { - log.logError( + logError( BaseMessages.getString(PKG, "ActionDeleteFiles.Error.Exception.DeleteProcessError"), BaseMessages.getString( PKG, diff --git a/plugins/actions/deletefolders/src/main/java/org/apache/hop/workflow/actions/deletefolders/ActionDeleteFolders.java b/plugins/actions/deletefolders/src/main/java/org/apache/hop/workflow/actions/deletefolders/ActionDeleteFolders.java index 9b5d1cae876..6ddba42474e 100644 --- a/plugins/actions/deletefolders/src/main/java/org/apache/hop/workflow/actions/deletefolders/ActionDeleteFolders.java +++ b/plugins/actions/deletefolders/src/main/java/org/apache/hop/workflow/actions/deletefolders/ActionDeleteFolders.java @@ -174,7 +174,7 @@ public Result execute(Result result, int nr) throws HopException { successConditionBrokenExit = false; nrLimitFolders = Const.toInt(resolve(getLimitFolders()), 10); - if (argFromPrevious && log.isDetailed()) { + if (argFromPrevious && isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -229,7 +229,7 @@ public Result execute(Result result, int nr) throws HopException { } } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("======================================="); logDetailed( BaseMessages.getString(PKG, "ActionDeleteFolders.Log.Info.NrError", "" + nrErrors)); @@ -283,14 +283,14 @@ private boolean deleteFolder(String folderName) { // the file or folder exists if (filefolder.getType() == FileType.FOLDER) { // It's a folder - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionDeleteFolders.ProcessingFolder", folderName)); } // Delete Files int count = filefolder.delete(new TextFileSelector()); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionDeleteFolders.TotalDeleted", folderName, String.valueOf(count))); @@ -302,7 +302,7 @@ private boolean deleteFolder(String folderName) { } } else { // File already deleted, no reason to try to delete it - if (log.isBasic()) { + if (isBasic()) { logBasic( BaseMessages.getString(PKG, "ActionDeleteFolders.FolderAlreadyDeleted", folderName)); } diff --git a/plugins/actions/deleteresultfilenames/src/main/java/org/apache/hop/workflow/actions/deleteresultfilenames/ActionDeleteResultFilenames.java b/plugins/actions/deleteresultfilenames/src/main/java/org/apache/hop/workflow/actions/deleteresultfilenames/ActionDeleteResultFilenames.java index 481aa7eac29..88647f025da 100644 --- a/plugins/actions/deleteresultfilenames/src/main/java/org/apache/hop/workflow/actions/deleteresultfilenames/ActionDeleteResultFilenames.java +++ b/plugins/actions/deleteresultfilenames/src/main/java/org/apache/hop/workflow/actions/deleteresultfilenames/ActionDeleteResultFilenames.java @@ -156,14 +156,14 @@ public Result execute(Result previousResult, int nr) { if (previousResult != null) { try { int size = previousResult.getResultFiles().size(); - if (log.isBasic()) { + if (isBasic()) { logBasic( BaseMessages.getString(PKG, "ActionDeleteResultFilenames.log.FilesFound", "" + size)); } if (!specifyWildcard) { // Delete all files previousResult.getResultFiles().clear(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionDeleteResultFilenames.log.DeletedFiles", "" + size)); @@ -183,7 +183,7 @@ public Result execute(Result previousResult, int nr) { // Remove file from result files list result.getResultFiles().remove(resultFile.getFile().toString()); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionDeleteResultFilenames.log.DeletedFile", file.toString())); diff --git a/plugins/actions/evalfilesmetrics/src/main/java/org/apache/hop/workflow/actions/evalfilesmetrics/ActionEvalFilesMetrics.java b/plugins/actions/evalfilesmetrics/src/main/java/org/apache/hop/workflow/actions/evalfilesmetrics/ActionEvalFilesMetrics.java index d012cf7989b..8bc5348e6ae 100644 --- a/plugins/actions/evalfilesmetrics/src/main/java/org/apache/hop/workflow/actions/evalfilesmetrics/ActionEvalFilesMetrics.java +++ b/plugins/actions/evalfilesmetrics/src/main/java/org/apache/hop/workflow/actions/evalfilesmetrics/ActionEvalFilesMetrics.java @@ -381,7 +381,7 @@ public Result execute(Result previousResult, int nr) throws HopException { int indexOfResultFieldIncludeSubfolders = -1; // as such we must get rows - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -456,7 +456,7 @@ public Result execute(Result previousResult, int nr) throws HopException { break; case SOURCE_FILES_FILENAMES_RESULT: List resultFiles = result.getResultFilesList(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, diff --git a/plugins/actions/evaluatetablecontent/src/main/java/org/apache/hop/workflow/actions/evaluatetablecontent/ActionEvalTableContent.java b/plugins/actions/evaluatetablecontent/src/main/java/org/apache/hop/workflow/actions/evaluatetablecontent/ActionEvalTableContent.java index e27378e801d..45183d089b1 100644 --- a/plugins/actions/evaluatetablecontent/src/main/java/org/apache/hop/workflow/actions/evaluatetablecontent/ActionEvalTableContent.java +++ b/plugins/actions/evaluatetablecontent/src/main/java/org/apache/hop/workflow/actions/evaluatetablecontent/ActionEvalTableContent.java @@ -265,7 +265,7 @@ public Result execute(Result previousResult, int nr) { boolean successOK = false; int nrRowsLimit = Const.toInt(resolve(limit), 0); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionEvalTableContent.Log.nrRowsLimit", "" + nrRowsLimit)); } @@ -279,7 +279,7 @@ public Result execute(Result previousResult, int nr) { if (useVars) { realCustomSql = resolve(realCustomSql); } - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "ActionEvalTableContent.Log.EnteredCustomSQL", realCustomSql)); @@ -312,7 +312,7 @@ public Result execute(Result previousResult, int nr) { } if (countSqlStatement != null) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionEvalTableContent.Log.RunSQLStatement", countSqlStatement)); @@ -340,7 +340,7 @@ public Result execute(Result previousResult, int nr) { } } } else { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -355,7 +355,7 @@ public Result execute(Result previousResult, int nr) { rowsCount = row.getInteger(0); } } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionEvalTableContent.Log.NrRowsReturned", "" + rowsCount)); diff --git a/plugins/actions/filesexist/src/main/java/org/apache/hop/workflow/actions/filesexist/ActionFilesExist.java b/plugins/actions/filesexist/src/main/java/org/apache/hop/workflow/actions/filesexist/ActionFilesExist.java index 34430facf2f..222ac70c7eb 100644 --- a/plugins/actions/filesexist/src/main/java/org/apache/hop/workflow/actions/filesexist/ActionFilesExist.java +++ b/plugins/actions/filesexist/src/main/java/org/apache/hop/workflow/actions/filesexist/ActionFilesExist.java @@ -159,13 +159,13 @@ public Result execute(Result previousResult, int nr) { if (file.exists() && file.isReadable()) { // TODO: is it needed to check file for readability? - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionFilesExist.File_Exists", realFilefoldername)); } } else { missingfiles++; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionFilesExist.File_Does_Not_Exist", realFilefoldername)); diff --git a/plugins/actions/folderisempty/src/main/java/org/apache/hop/workflow/actions/folderisempty/ActionFolderIsEmpty.java b/plugins/actions/folderisempty/src/main/java/org/apache/hop/workflow/actions/folderisempty/ActionFolderIsEmpty.java index d680650274a..fc5f8997044 100644 --- a/plugins/actions/folderisempty/src/main/java/org/apache/hop/workflow/actions/folderisempty/ActionFolderIsEmpty.java +++ b/plugins/actions/folderisempty/src/main/java/org/apache/hop/workflow/actions/folderisempty/ActionFolderIsEmpty.java @@ -166,8 +166,8 @@ public Result execute(Result previousResult, int nr) { throw ex; } } - if (log.isBasic()) { - log.logBasic("Total files", "We found : " + filescount + " file(s)"); + if (isBasic()) { + logBasic("Total files", "We found : " + filescount + " file(s)"); } if (filescount == 0) { result.setResult(true); @@ -175,12 +175,12 @@ public Result execute(Result previousResult, int nr) { } } else { // Not a folder, fail - log.logError("[" + realFoldername + "] is not a folder, failing."); + logError("[" + realFoldername + "] is not a folder, failing."); result.setNrErrors(1); } } else { // No Folder found - if (log.isBasic()) { + if (isBasic()) { logBasic("we can not find [" + realFoldername + "] !"); } result.setNrErrors(1); @@ -233,8 +233,8 @@ public boolean includeFile(FileSelectInfo info) throws ExpectedException { // We are in the Base folder if ((isSpecifyWildcard() && GetFileWildcard(info.getFile().getName().getBaseName())) || !isSpecifyWildcard()) { - if (log.isDetailed()) { - log.logDetailed("We found file : " + info.getFile().toString()); + if (isDetailed()) { + logDetailed("We found file : " + info.getFile().toString()); } filescount++; } @@ -244,8 +244,8 @@ public boolean includeFile(FileSelectInfo info) throws ExpectedException { if (isIncludeSubFolders()) { if ((isSpecifyWildcard() && GetFileWildcard(info.getFile().getName().getBaseName())) || !isSpecifyWildcard()) { - if (log.isDetailed()) { - log.logDetailed("We found file : " + info.getFile().toString()); + if (isDetailed()) { + logDetailed("We found file : " + info.getFile().toString()); } filescount++; } @@ -263,7 +263,7 @@ public boolean includeFile(FileSelectInfo info) throws ExpectedException { } catch (Exception e) { if (!rethrow) { - log.logError( + logError( BaseMessages.getString(PKG, "ActionFolderIsEmpty.Error"), BaseMessages.getString( PKG, diff --git a/plugins/actions/folderscompare/src/main/java/org/apache/hop/workflow/actions/folderscompare/ActionFoldersCompare.java b/plugins/actions/folderscompare/src/main/java/org/apache/hop/workflow/actions/folderscompare/ActionFoldersCompare.java index 4b9144a0b0b..af74d6a6b8e 100644 --- a/plugins/actions/folderscompare/src/main/java/org/apache/hop/workflow/actions/folderscompare/ActionFoldersCompare.java +++ b/plugins/actions/folderscompare/src/main/java/org/apache/hop/workflow/actions/folderscompare/ActionFoldersCompare.java @@ -298,7 +298,7 @@ public Result execute(Result previousResult, int nr) { int lenList1 = list1.length; int lenList2 = list2.length; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -338,7 +338,7 @@ public Result execute(Result previousResult, int nr) { Map.Entry entree = iterateur.next(); if (!collection2.containsKey(entree.getKey())) { ok = false; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -347,7 +347,7 @@ public Result execute(Result previousResult, int nr) { realFilename2)); } } else { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -363,7 +363,7 @@ public Result execute(Result previousResult, int nr) { if (!filefolder2.getType().equals(filefolder1.getType())) { // The file1 exist in the folder2..but they don't have the same type ok = false; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -405,7 +405,7 @@ public Result execute(Result previousResult, int nr) { long filefolder2Size = filefolder2.getContent().getSize(); if (filefolder1Size != filefolder2Size) { ok = false; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -433,7 +433,7 @@ public Result execute(Result previousResult, int nr) { if (comparefilecontent) { if (!equalFileContents(filefolder1, filefolder2)) { ok = false; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -452,7 +452,7 @@ public Result execute(Result previousResult, int nr) { result.setResult(ok); } else { // The 2 folders don't have the same files number - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, diff --git a/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/ftp/ActionFtp.java b/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/ftp/ActionFtp.java index c91b5a9326f..1960ec637d5 100644 --- a/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/ftp/ActionFtp.java +++ b/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/ftp/ActionFtp.java @@ -323,7 +323,7 @@ protected InetAddress getInetAddress(String realServername) throws UnknownHostEx @Override public Result execute(Result previousResult, int nr) { - log.logBasic(BaseMessages.getString(PKG, "ActionFTP.Started", serverName)); + logBasic(BaseMessages.getString(PKG, "ActionFTP.Started", serverName)); Result result = previousResult; result.setNrErrors(1); @@ -350,7 +350,7 @@ public Result execute(Result previousResult, int nr) { String realMoveToFolder = null; try { - ftpClient = FtpClientUtil.connectAndLogin(log, this, this, getName()); + ftpClient = FtpClientUtil.connectAndLogin(getLogChannel(), this, this, getName()); // move to spool dir ... if (!Utils.isEmpty(remoteDirectory)) { @@ -752,7 +752,7 @@ protected boolean needsDownload(String filename) { return true; } else if (ifFileExists == ifFileExistsFail) { - log.logError(BaseMessages.getString(PKG, CONST_LOCAL_FILE_EXISTS), filename); + logError(BaseMessages.getString(PKG, CONST_LOCAL_FILE_EXISTS), filename); updateErrors(); } else { if (isDebug()) { diff --git a/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/ftpdelete/ActionFtpDelete.java b/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/ftpdelete/ActionFtpDelete.java index 99a7d912657..f35285364a5 100644 --- a/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/ftpdelete/ActionFtpDelete.java +++ b/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/ftpdelete/ActionFtpDelete.java @@ -501,7 +501,7 @@ public void setProxyUsername(String proxyUsername) { /** Needed for the Vector coming from sshclient.ls() * */ @Override public Result execute(Result previousResult, int nr) { - log.logBasic(BaseMessages.getString(PKG, "ActionFTPDelete.Started", serverName)); + logBasic(BaseMessages.getString(PKG, "ActionFTPDelete.Started", serverName)); RowMetaAndData resultRow = null; Result result = previousResult; List rows = result.getRows(); @@ -727,7 +727,7 @@ private void sftpConnect( private void ftpConnect(String realFtpDirectory) throws Exception { // Create ftp client to host:port ... - ftpclient = FtpClientUtil.connectAndLogin(log, this, this, getName()); + ftpclient = FtpClientUtil.connectAndLogin(getLogChannel(), this, this, getName()); // move to spool dir ... if (!Utils.isEmpty(realFtpDirectory)) { diff --git a/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/ftpput/ActionFtpPut.java b/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/ftpput/ActionFtpPut.java index bea94301135..11dd1269d0d 100644 --- a/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/ftpput/ActionFtpPut.java +++ b/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/ftpput/ActionFtpPut.java @@ -527,7 +527,7 @@ public Result execute(Result previousResult, int nr) { result.setResult(false); long filesPut = 0; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionFtpPut.Log.Starting")); } @@ -540,7 +540,7 @@ public Result execute(Result previousResult, int nr) { String realRemoteDirectory = resolve(remoteDirectory); if (!Utils.isEmpty(realRemoteDirectory)) { ftpclient.changeWorkingDirectory(realRemoteDirectory); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionFtpPut.Log.ChangedDirectory", realRemoteDirectory)); @@ -571,7 +571,7 @@ public Result execute(Result previousResult, int nr) { } } } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -611,7 +611,7 @@ public Result execute(Result previousResult, int nr) { // Assume file does not exist !! } - if (log.isDebug()) { + if (isDebug()) { if (fileExist) { logDebug(BaseMessages.getString(PKG, "ActionFtpPut.Log.FileExists", file)); } else { @@ -620,7 +620,7 @@ public Result execute(Result previousResult, int nr) { } if (!fileExist || !onlyPuttingNewFiles) { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -634,7 +634,7 @@ public Result execute(Result previousResult, int nr) { if (fileExist) { boolean deleted = ftpclient.deleteFile(file); if (!deleted) { - log.logError( + logError( "Deletion of (existing) file '" + file + "' on the FTP server was not successful with reply string: " @@ -648,7 +648,7 @@ public Result execute(Result previousResult, int nr) { if (success) { filesPut++; } else { - log.logError( + logError( "Transfer of file '" + localFilename + "' to the FTP server was not successful with reply string: " @@ -659,7 +659,7 @@ public Result execute(Result previousResult, int nr) { // Delete the file if this is needed! if (remove) { new File(localFilename).delete(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionFtpPut.Log.DeletedFile", localFilename)); } @@ -669,7 +669,7 @@ public Result execute(Result previousResult, int nr) { } result.setResult(true); - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "ActionFtpPut.Log.WeHavePut", "" + filesPut)); } } catch (Exception e) { @@ -693,7 +693,7 @@ public Result execute(Result previousResult, int nr) { // package-local visibility for testing purposes FTPClient createAndSetUpFtpClient() throws HopException { - return FtpClientUtil.connectAndLogin(log, this, this, getName()); + return FtpClientUtil.connectAndLogin(getLogChannel(), this, this, getName()); } @Override diff --git a/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/sftp/ActionSftp.java b/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/sftp/ActionSftp.java index f1c7bf9c141..4a96c3fec04 100644 --- a/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/sftp/ActionSftp.java +++ b/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/sftp/ActionSftp.java @@ -427,14 +427,14 @@ public Result execute(Result previousResult, int nr) { result.setResult(false); long filesRetrieved = 0; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionSftp.Log.StartAction")); } HashSet listPreviousFilenames = new HashSet<>(); if (copyprevious) { if (rows.isEmpty()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionSftp.ArgsFromPreviousNothing")); } result.setResult(true); @@ -450,7 +450,7 @@ public Result execute(Result previousResult, int nr) { String filePrevious = resultRow.getString(0, null); if (!Utils.isEmpty(filePrevious)) { listPreviousFilenames.add(filePrevious); - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "ActionSftp.Log.FilenameFromResult", filePrevious)); } @@ -502,7 +502,7 @@ public Result execute(Result previousResult, int nr) { targetFolder = HopVfs.getFileObject(realTargetDirectory); boolean targetFolderExists = targetFolder.exists(); if (targetFolderExists) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionSftp.Log.TargetFolderExists", realTargetDirectory)); @@ -518,7 +518,7 @@ public Result execute(Result previousResult, int nr) { } else { // create target folder targetFolder.createFolder(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionSftp.Log.TargetFolderCreated", realTargetDirectory)); @@ -540,7 +540,7 @@ public Result execute(Result previousResult, int nr) { realUsername, realKeyFilename, realPassPhrase); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -582,7 +582,7 @@ public Result execute(Result previousResult, int nr) { PKG, "ActionSftp.Error.CanNotFindRemoteFolder", realSftpDirString)); throw new Exception(e); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionSftp.Log.ChangedDirectory", realSftpDirString)); } @@ -593,12 +593,12 @@ public Result execute(Result previousResult, int nr) { if (filelist == null) { // Nothing was found !!! exit result.setResult(true); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionSftp.Log.Found", "" + 0)); } return result; } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionSftp.Log.Found", "" + filelist.length)); } @@ -626,7 +626,7 @@ public Result execute(Result previousResult, int nr) { } if (getIt) { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "ActionSftp.Log.GettingFiles", filelist[i], realTargetDirectory)); @@ -646,20 +646,20 @@ public Result execute(Result previousResult, int nr) { parentWorkflow.getWorkflowName(), toString()); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionSftp.Log.FilenameAddedToResultFilenames", filelist[i])); } } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionSftp.Log.TransferedFile", filelist[i])); } // Delete the file if this is needed! if (remove) { sftpclient.delete(filelist[i]); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionSftp.Log.DeletedFile", filelist[i])); } } diff --git a/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/sftpput/ActionSftpPut.java b/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/sftpput/ActionSftpPut.java index de770706e75..e53a124f50f 100644 --- a/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/sftpput/ActionSftpPut.java +++ b/plugins/actions/ftp/src/main/java/org/apache/hop/workflow/actions/sftpput/ActionSftpPut.java @@ -561,14 +561,14 @@ public Result execute(Result previousResult, int nr) throws HopException { List rows = result.getRows(); result.setResult(false); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionSftpPut.Log.StartAction")); } ArrayList myFileList = new ArrayList<>(); if (copyingPrevious) { if (rows.isEmpty()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionSftpPut.ArgsFromPreviousNothing")); } result.setResult(true); @@ -591,7 +591,7 @@ public Result execute(Result previousResult, int nr) throws HopException { PKG, "ActionSftpPut.Log.FilefromPreviousNotFound", filePrevious)); } else { myFileList.add(file); - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "ActionSftpPut.Log.FilenameFromResult", filePrevious)); @@ -611,7 +611,7 @@ public Result execute(Result previousResult, int nr) throws HopException { if (copyingPreviousFiles) { List resultFiles = result.getResultFilesList(); if (resultFiles == null || resultFiles.isEmpty()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionSftpPut.ArgsFromPreviousNothingFiles")); } result.setResult(true); @@ -630,7 +630,7 @@ public Result execute(Result previousResult, int nr) throws HopException { PKG, "ActionSftpPut.Log.FilefromPreviousNotFound", file.toString())); } else { myFileList.add(file); - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "ActionSftpPut.Log.FilenameFromResult", file.toString())); @@ -728,7 +728,7 @@ public Result execute(Result previousResult, int nr) throws HopException { realUsername, realKeyFilename, realPassPhrase); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -768,7 +768,7 @@ public Result execute(Result previousResult, int nr) throws HopException { BaseMessages.getString( PKG, "ActionSftpPut.Error.CanNotFindRemoteFolder", realSftpDirString)); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionSftpPut.Error.CanNotFindRemoteFolder", realSftpDirString)); @@ -776,14 +776,14 @@ public Result execute(Result previousResult, int nr) throws HopException { // Let's create folder sftpclient.createFolder(realSftpDirString); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionSftpPut.Log.RemoteFolderCreated", realSftpDirString)); } } sftpclient.chdir(realSftpDirString); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionSftpPut.Log.ChangedDirectory", realSftpDirString)); } @@ -805,7 +805,7 @@ public Result execute(Result previousResult, int nr) throws HopException { } } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionSftpPut.Log.RowsFromPreviousResult", myFileList.size())); @@ -839,7 +839,7 @@ public Result execute(Result previousResult, int nr) throws HopException { if (getIt) { nrFilesMatched++; - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "ActionSftpPut.Log.PuttingFile", localFilename, realSftpDirString)); @@ -848,7 +848,7 @@ public Result execute(Result previousResult, int nr) throws HopException { sftpclient.put(myFile, destinationFilename); nrFilesSent++; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionSftpPut.Log.TransferredFile", localFilename)); } @@ -858,7 +858,7 @@ public Result execute(Result previousResult, int nr) throws HopException { switch (getAfterFtps()) { case AFTER_FTPSPUT_DELETE: myFile.delete(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionSftpPut.Log.DeletedFile", localFilename)); } @@ -872,7 +872,7 @@ public Result execute(Result previousResult, int nr) throws HopException { + Const.FILE_SEPARATOR + myFile.getName().getBaseName()); myFile.moveTo(destination); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionSftpPut.Log.FileMoved", myFile, destination)); @@ -893,7 +893,7 @@ public Result execute(Result previousResult, int nr) throws HopException { parentWorkflow.getWorkflowName(), toString()); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, diff --git a/plugins/actions/getpop/src/main/java/org/apache/hop/workflow/actions/getpop/ActionGetPOP.java b/plugins/actions/getpop/src/main/java/org/apache/hop/workflow/actions/getpop/ActionGetPOP.java index 7b57ec39017..0ccc49cefac 100644 --- a/plugins/actions/getpop/src/main/java/org/apache/hop/workflow/actions/getpop/ActionGetPOP.java +++ b/plugins/actions/getpop/src/main/java/org/apache/hop/workflow/actions/getpop/ActionGetPOP.java @@ -854,7 +854,7 @@ && getAfterGetIMAP() == MailConnectionMeta.AFTER_GET_IMAP_MOVE)) { // create a mail connection object mailConn = new MailConnection( - log, + getLogChannel(), MailConnectionMeta.getProtocolFromString( getProtocol(), MailConnectionMeta.PROTOCOL_IMAP), realserver, diff --git a/plugins/actions/http/src/main/java/org/apache/hop/workflow/actions/http/ActionHttp.java b/plugins/actions/http/src/main/java/org/apache/hop/workflow/actions/http/ActionHttp.java index 6d48ba334c7..e8e268ddb74 100644 --- a/plugins/actions/http/src/main/java/org/apache/hop/workflow/actions/http/ActionHttp.java +++ b/plugins/actions/http/src/main/java/org/apache/hop/workflow/actions/http/ActionHttp.java @@ -450,19 +450,20 @@ protected PasswordAuthentication getPasswordAuthentication() { HttpsURLConnection httpsConn = (HttpsURLConnection) connection; httpsConn.setSSLSocketFactory( HttpClientManager.getTrustAllSslContext().getSocketFactory()); - httpsConn.setHostnameVerifier(HttpClientManager.getHostnameVerifier(isDebug(), log)); + httpsConn.setHostnameVerifier( + HttpClientManager.getHostnameVerifier(isDebug(), getLogChannel())); } // if we have HTTP headers, add them if (!Utils.isEmpty(headerName)) { - if (log.isDebug()) { - log.logDebug(BaseMessages.getString(PKG, "ActionHTTP.Log.HeadersProvided")); + if (isDebug()) { + logDebug(BaseMessages.getString(PKG, "ActionHTTP.Log.HeadersProvided")); } for (int j = 0; j < headerName.length; j++) { if (!Utils.isEmpty(headerValue[j])) { connection.setRequestProperty(resolve(headerName[j]), resolve(headerValue[j])); - if (log.isDebug()) { - log.logDebug( + if (isDebug()) { + logDebug( BaseMessages.getString( PKG, "ActionHTTP.Log.HeaderSet", @@ -477,7 +478,7 @@ protected PasswordAuthentication getPasswordAuthentication() { // See if we need to send a file over? if (!Utils.isEmpty(realUploadFile)) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionHTTP.Log.SendingFile", realUploadFile)); } @@ -500,12 +501,12 @@ protected PasswordAuthentication getPasswordAuthentication() { fileStream = null; } } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionHTTP.Log.FinishedSendingFile")); } } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionHTTP.Log.StartReadingReply")); } diff --git a/plugins/actions/mail/src/main/java/org/apache/hop/workflow/actions/mail/ActionMail.java b/plugins/actions/mail/src/main/java/org/apache/hop/workflow/actions/mail/ActionMail.java index a72534afda5..139e64705ed 100644 --- a/plugins/actions/mail/src/main/java/org/apache/hop/workflow/actions/mail/ActionMail.java +++ b/plugins/actions/mail/src/main/java/org/apache/hop/workflow/actions/mail/ActionMail.java @@ -684,7 +684,7 @@ public Result execute(Result result, int nr) { props.put(CONST_MAIL + protocol + ".port", resolve(port)); } - if (log.isDebug()) { + if (isDebug()) { props.put("mail.debug", "true"); } @@ -693,7 +693,7 @@ public Result execute(Result result, int nr) { } Session session = Session.getInstance(props); - session.setDebug(log.isDebug()); + session.setDebug(isDebug()); try { // create a message @@ -1041,8 +1041,8 @@ public Result execute(Result result, int nr) { String realImageFile = resolve(embeddedimages[i]); String realcontenID = resolve(contentids[i]); if (messageText.indexOf("cid:" + realcontenID) < 0) { - if (log.isDebug()) { - log.logDebug("Image [" + realImageFile + "] is not used in message body!"); + if (isDebug()) { + logDebug("Image [" + realImageFile + "] is not used in message body!"); } } else { try { @@ -1051,7 +1051,7 @@ public Result execute(Result result, int nr) { if (imageFile.exists() && imageFile.getType() == FileType.FILE) { found = true; } else { - log.logError("We can not find [" + realImageFile + "] or it is not a file"); + logError("We can not find [" + realImageFile + "] or it is not a file"); } if (found) { // Create part for the image @@ -1064,12 +1064,12 @@ public Result execute(Result result, int nr) { // Add part to multi-part parts.addBodyPart(messageBodyPart); nrEmbeddedImages++; - log.logBasic("Image '" + fds.getName() + "' was embedded in message."); + logBasic("Image '" + fds.getName() + "' was embedded in message."); } } catch (Exception e) { - log.logError( + logError( "Error embedding image [" + realImageFile + "] in message : " + e.toString()); - log.logError(Const.getStackTracker(e)); + logError(Const.getStackTracker(e)); result.setNrErrors(1); } finally { if (imageFile != null) { diff --git a/plugins/actions/mailvalidator/src/main/java/org/apache/hop/workflow/actions/mailvalidator/ActionMailValidator.java b/plugins/actions/mailvalidator/src/main/java/org/apache/hop/workflow/actions/mailvalidator/ActionMailValidator.java index 0a76857f288..14134ac5fd6 100644 --- a/plugins/actions/mailvalidator/src/main/java/org/apache/hop/workflow/actions/mailvalidator/ActionMailValidator.java +++ b/plugins/actions/mailvalidator/src/main/java/org/apache/hop/workflow/actions/mailvalidator/ActionMailValidator.java @@ -155,19 +155,19 @@ public Result execute(Result previousResult, int nr) { String mailError = null; for (int i = 0; i < mailsCheck.length && !exitloop; i++) { String email = mailsCheck[i]; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionMailValidator.CheckingMail", email)); } // Check if address is valid MailValidationResult resultValidator = MailValidation.isAddressValid( - log, email, realSender, realDefaultSMTP, timeOut, smtpCheck); + getLogChannel(), email, realSender, realDefaultSMTP, timeOut, smtpCheck); mailIsValid = resultValidator.isValide(); mailError = resultValidator.getErrorMessage(); - if (log.isDetailed()) { + if (isDetailed()) { if (mailIsValid) { logDetailed(BaseMessages.getString(PKG, "ActionMailValidator.MailValid", email)); } else { diff --git a/plugins/actions/movefiles/src/main/java/org/apache/hop/workflow/actions/movefiles/ActionMoveFiles.java b/plugins/actions/movefiles/src/main/java/org/apache/hop/workflow/actions/movefiles/ActionMoveFiles.java index 427f9597aef..25a73b434a0 100644 --- a/plugins/actions/movefiles/src/main/java/org/apache/hop/workflow/actions/movefiles/ActionMoveFiles.java +++ b/plugins/actions/movefiles/src/main/java/org/apache/hop/workflow/actions/movefiles/ActionMoveFiles.java @@ -323,7 +323,7 @@ public Result execute(Result previousResult, int nr) throws HopException { successConditionBrokenExit = false; limitFiles = Const.toInt(resolve(getNrErrorsLessThan()), 10); - if (log.isDetailed()) { + if (isDetailed()) { if (simulate) { logDetailed(BaseMessages.getString(PKG, "ActionMoveFiles.Log.SimulationOn")); } @@ -347,7 +347,7 @@ public Result execute(Result previousResult, int nr) throws HopException { try { folder = HopVfs.getFileObject(moveToFolder, getVariables()); if (!folder.exists()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionMoveFiles.Log.Error.FolderMissing", moveToFolder)); @@ -386,7 +386,7 @@ public Result execute(Result previousResult, int nr) throws HopException { } if (argFromPrevious) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -418,7 +418,7 @@ public Result execute(Result previousResult, int nr) throws HopException { if (!Utils.isEmpty(vSourceFileFolderPrevious) && !Utils.isEmpty(vDestinationFileFolderPrevious)) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -440,7 +440,7 @@ public Result execute(Result previousResult, int nr) throws HopException { updateErrors(); } } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -468,7 +468,7 @@ public Result execute(Result previousResult, int nr) throws HopException { if (!Utils.isEmpty(vSourceFileFolder[i]) && !Utils.isEmpty(vDestinationFileFolder[i])) { // ok we can process this file/folder - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -489,7 +489,7 @@ public Result execute(Result previousResult, int nr) throws HopException { updateErrors(); } } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -515,7 +515,7 @@ public Result execute(Result previousResult, int nr) throws HopException { } private void displayResults() { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("======================================="); logDetailed( BaseMessages.getString(PKG, "ActionMoveFiles.Log.Info.FilesInError", "" + nrErrors)); @@ -575,7 +575,7 @@ private boolean processFileFolder( // Source is a folder, destination is a file // WARNING !!! CAN NOT MOVE FOLDER TO FILE !!! - log.logError( + logError( BaseMessages.getString(PKG, "ActionMoveFiles.Log.Forbidden"), BaseMessages.getString( PKG, @@ -670,7 +670,7 @@ private boolean processFileFolder( return entrystatus; } else { // Both source and destination are folders - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(" "); logDetailed( BaseMessages.getString( @@ -821,7 +821,7 @@ private boolean moveFile( sourcefilename.moveTo(destinationfilename); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -841,7 +841,7 @@ private boolean moveFile( retval = true; } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionMoveFiles.Log.FileExists", destinationfilename.toString())); @@ -850,7 +850,7 @@ private boolean moveFile( if (!simulate) { sourcefilename.moveTo(destinationfilename); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -891,7 +891,7 @@ private boolean moveFile( if (!simulate) { sourcefilename.moveTo(destinationfile); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -913,7 +913,7 @@ private boolean moveFile( if (!simulate) { sourcefilename.delete(); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -944,7 +944,7 @@ private boolean moveFile( if (!simulate) { sourcefilename.moveTo(destinationfile); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -965,7 +965,7 @@ private boolean moveFile( if (!simulate) { sourcefilename.moveTo(destinationfile); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -996,7 +996,7 @@ private boolean moveFile( if (!simulate) { sourcefilename.moveTo(destinationfile); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -1205,7 +1205,7 @@ private void addFileToResultFilenames( toString()); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); - if (log.isDebug()) { + if (isDebug()) { logDebug(" ------ "); logDebug( BaseMessages.getString( @@ -1213,7 +1213,7 @@ private void addFileToResultFilenames( } } catch (Exception e) { - log.logError( + logError( BaseMessages.getString(PKG, "ActionMoveFiles.Error.AddingToFilenameResult"), fileaddentry + "" + e.getMessage()); } @@ -1230,13 +1230,13 @@ private boolean createDestinationFolder(FileObject filefolder) { if (!folder.exists()) { if (createDestinationFolder) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionMoveFiles.Log.FolderNotExist", folder.getName().toString())); } folder.createFolder(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionMoveFiles.Log.FolderWasCreated", folder.getName().toString())); diff --git a/plugins/actions/mssqlbulkload/src/main/java/org/apache/hop/workflow/actions/mssqlbulkload/ActionMssqlBulkLoad.java b/plugins/actions/mssqlbulkload/src/main/java/org/apache/hop/workflow/actions/mssqlbulkload/ActionMssqlBulkLoad.java index f59c043529b..bdbebcb50ca 100644 --- a/plugins/actions/mssqlbulkload/src/main/java/org/apache/hop/workflow/actions/mssqlbulkload/ActionMssqlBulkLoad.java +++ b/plugins/actions/mssqlbulkload/src/main/java/org/apache/hop/workflow/actions/mssqlbulkload/ActionMssqlBulkLoad.java @@ -326,7 +326,7 @@ public Result execute(Result previousResult, int nr) { File file = new File(realFilename); if (file.exists() && file.canRead()) { // User has specified an existing file, We can continue ... - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionMssqlBulkLoad.FileExists.Label", realFilename)); } @@ -351,7 +351,7 @@ public Result execute(Result previousResult, int nr) { if (db.checkTableExists(realSchemaname, realTablename)) { // The table existe, We can continue ... - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionMssqlBulkLoad.TableExists.Label", realTablename)); diff --git a/plugins/actions/mysqlbulkfile/src/main/java/org/apache/hop/workflow/actions/mysqlbulkfile/ActionMysqlBulkFile.java b/plugins/actions/mysqlbulkfile/src/main/java/org/apache/hop/workflow/actions/mysqlbulkfile/ActionMysqlBulkFile.java index 5173c5331fc..b2882287175 100644 --- a/plugins/actions/mysqlbulkfile/src/main/java/org/apache/hop/workflow/actions/mysqlbulkfile/ActionMysqlBulkFile.java +++ b/plugins/actions/mysqlbulkfile/src/main/java/org/apache/hop/workflow/actions/mysqlbulkfile/ActionMysqlBulkFile.java @@ -229,7 +229,7 @@ public Result execute(Result previousResult, int nr) { } else if (file.exists() && ifFileExists == 1) { // the file exists and user want to do nothing result.setResult(true); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, CONST_ACTION_MYSQL_BULK_FILE_FILE_EXISTS_1_LABEL) + realFilename @@ -265,7 +265,7 @@ public Result execute(Result previousResult, int nr) { } // User has specified an existing file, We can continue ... - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, CONST_ACTION_MYSQL_BULK_FILE_FILE_EXISTS_1_LABEL) + realFilename @@ -283,7 +283,7 @@ public Result execute(Result previousResult, int nr) { if (db.checkTableExists(realSchemaname, realTablename)) { // The table existe, We can continue ... - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionMysqlBulkFile.TableExists1.Label") + realTablename @@ -364,7 +364,7 @@ public Result execute(Result previousResult, int nr) { + " LOCK IN SHARE MODE"; try { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(fileBulkFile); } // Run the SQL @@ -401,7 +401,7 @@ public Result execute(Result previousResult, int nr) { // Of course, the table should have been created already before the bulk load // operation result.setNrErrors(1); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionMysqlBulkFile.TableNotExists1.Label") + realTablename diff --git a/plugins/actions/mysqlbulkload/src/main/java/org/apache/hop/workflow/actions/mysqlbulkload/ActionMysqlBulkLoad.java b/plugins/actions/mysqlbulkload/src/main/java/org/apache/hop/workflow/actions/mysqlbulkload/ActionMysqlBulkLoad.java index 243ba9044a8..54e518e92a3 100644 --- a/plugins/actions/mysqlbulkload/src/main/java/org/apache/hop/workflow/actions/mysqlbulkload/ActionMysqlBulkLoad.java +++ b/plugins/actions/mysqlbulkload/src/main/java/org/apache/hop/workflow/actions/mysqlbulkload/ActionMysqlBulkLoad.java @@ -247,7 +247,7 @@ public Result execute(Result previousResult, int nr) { File file = new File(realFilename); if ((file.exists() && file.canRead()) || isLocalInfile() == false) { // User has specified an existing file, We can continue ... - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("File [" + realFilename + "] exists."); } @@ -262,7 +262,7 @@ public Result execute(Result previousResult, int nr) { if (db.checkTableExists(realSchemaname, realTablename)) { // The table existe, We can continue ... - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Table [" + realTablename + "] exists."); } @@ -403,7 +403,7 @@ public Result execute(Result previousResult, int nr) { // Of course, the table should have been created already before the bulk load // operation result.setNrErrors(1); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Table [" + realTablename + "] doesn't exist!"); } } diff --git a/plugins/actions/pgpfiles/src/main/java/org/apache/hop/workflow/actions/pgpdecryptfiles/ActionPGPDecryptFiles.java b/plugins/actions/pgpfiles/src/main/java/org/apache/hop/workflow/actions/pgpdecryptfiles/ActionPGPDecryptFiles.java index 52abb1207d3..1974eb70d38 100644 --- a/plugins/actions/pgpfiles/src/main/java/org/apache/hop/workflow/actions/pgpdecryptfiles/ActionPGPDecryptFiles.java +++ b/plugins/actions/pgpfiles/src/main/java/org/apache/hop/workflow/actions/pgpdecryptfiles/ActionPGPDecryptFiles.java @@ -393,7 +393,7 @@ public Result execute(Result previousResult, int nr) throws HopException { } } - gpg = new GPG(resolve(gpgLocation), log, getVariables()); + gpg = new GPG(resolve(gpgLocation), getLogChannel(), getVariables()); if (argFromPrevious && isDetailed()) { logDetailed( diff --git a/plugins/actions/pgpfiles/src/main/java/org/apache/hop/workflow/actions/pgpencryptfiles/ActionPGPEncryptFiles.java b/plugins/actions/pgpfiles/src/main/java/org/apache/hop/workflow/actions/pgpencryptfiles/ActionPGPEncryptFiles.java index e7b68d99d32..c9af524ebdf 100644 --- a/plugins/actions/pgpfiles/src/main/java/org/apache/hop/workflow/actions/pgpencryptfiles/ActionPGPEncryptFiles.java +++ b/plugins/actions/pgpfiles/src/main/java/org/apache/hop/workflow/actions/pgpencryptfiles/ActionPGPEncryptFiles.java @@ -432,7 +432,7 @@ public Result execute(Result previousResult, int nr) { } } - gpg = new GPG(resolve(gpgLocation), log, getVariables()); + gpg = new GPG(resolve(gpgLocation), getLogChannel(), getVariables()); if (argFromPrevious && isDetailed()) { logDetailed( diff --git a/plugins/actions/pgpfiles/src/main/java/org/apache/hop/workflow/actions/pgpverify/ActionPGPVerify.java b/plugins/actions/pgpfiles/src/main/java/org/apache/hop/workflow/actions/pgpverify/ActionPGPVerify.java index 60c4a992b26..23094452164 100644 --- a/plugins/actions/pgpfiles/src/main/java/org/apache/hop/workflow/actions/pgpverify/ActionPGPVerify.java +++ b/plugins/actions/pgpfiles/src/main/java/org/apache/hop/workflow/actions/pgpverify/ActionPGPVerify.java @@ -166,7 +166,7 @@ public Result execute(Result previousResult, int nr) { } file = HopVfs.getFileObject(realFilename, getVariables()); - GPG gpg = new GPG(resolve(getGPGLocation()), log, getVariables()); + GPG gpg = new GPG(resolve(getGPGLocation()), getLogChannel(), getVariables()); if (useDetachedfilename()) { String signature = resolve(getDetachedfilename()); diff --git a/plugins/actions/ping/src/main/java/org/apache/hop/workflow/actions/ping/ActionPing.java b/plugins/actions/ping/src/main/java/org/apache/hop/workflow/actions/ping/ActionPing.java index 5b3b3f2e44e..0a8b9ca6110 100644 --- a/plugins/actions/ping/src/main/java/org/apache/hop/workflow/actions/ping/ActionPing.java +++ b/plugins/actions/ping/src/main/java/org/apache/hop/workflow/actions/ping/ActionPing.java @@ -205,13 +205,13 @@ public Result execute(Result previousResult, int nr) { // Perform a system (Java) ping ... status = systemPing(hostname, timeoutInt); if (status) { - if (log.isDetailed()) { - log.logDetailed( + if (isDetailed()) { + logDetailed( BaseMessages.getString(PKG, "ActionPing.SystemPing"), BaseMessages.getString(PKG, CONST_ACTION_PING_OK_LABEL, hostname)); } } else { - log.logError( + logError( BaseMessages.getString(PKG, "ActionPing.SystemPing"), BaseMessages.getString(PKG, CONST_ACTION_PING_NOK_LABEL, hostname)); } @@ -220,13 +220,13 @@ public Result execute(Result previousResult, int nr) { // Perform a classic ping .. status = classicPing(hostname, packets); if (status) { - if (log.isDetailed()) { - log.logDetailed( + if (isDetailed()) { + logDetailed( BaseMessages.getString(PKG, "ActionPing.ClassicPing"), BaseMessages.getString(PKG, CONST_ACTION_PING_OK_LABEL, hostname)); } } else { - log.logError( + logError( BaseMessages.getString(PKG, "ActionPing.ClassicPing"), BaseMessages.getString(PKG, CONST_ACTION_PING_NOK_LABEL, hostname)); } @@ -235,7 +235,7 @@ public Result execute(Result previousResult, int nr) { logError(BaseMessages.getString(PKG, "ActionPing.Error.Label") + ex.getMessage()); } if (status) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, CONST_ACTION_PING_OK_LABEL, hostname)); } result.setNrErrors(0); @@ -262,7 +262,7 @@ private boolean systemPing(String hostname, int timeout) { return retval; } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionPing.HostName", address.getHostName())); logDetailed( BaseMessages.getString(PKG, "ActionPing.HostAddress", address.getHostAddress())); @@ -286,7 +286,7 @@ private boolean classicPing(String hostname, int nrpackets) { cmdPing += hostname + " " + NIX_CHAR + " " + nrpackets; } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionPing.NbrPackets.Label", "" + nrpackets)); logDetailed(BaseMessages.getString(PKG, "ActionPing.ExecClassicPing.Label", cmdPing)); } @@ -296,7 +296,7 @@ private boolean classicPing(String hostname, int nrpackets) { } catch (InterruptedException e) { logDetailed(BaseMessages.getString(PKG, "ActionPing.ClassicPingInterrupted")); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionPing.Gettingresponse.Label", hostname)); } // Get ping response @@ -304,7 +304,7 @@ private boolean classicPing(String hostname, int nrpackets) { // Read response lines while ((lignePing = br.readLine()) != null) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(lignePing); } } diff --git a/plugins/actions/pipeline/src/main/java/org/apache/hop/workflow/actions/pipeline/ActionPipeline.java b/plugins/actions/pipeline/src/main/java/org/apache/hop/workflow/actions/pipeline/ActionPipeline.java index 5e810e7919d..f2a8f06806a 100644 --- a/plugins/actions/pipeline/src/main/java/org/apache/hop/workflow/actions/pipeline/ActionPipeline.java +++ b/plugins/actions/pipeline/src/main/java/org/apache/hop/workflow/actions/pipeline/ActionPipeline.java @@ -493,8 +493,7 @@ public Result execute(Result result, int nr) throws HopException { } runConfiguration = resolve(runConfiguration); - log.logBasic( - BaseMessages.getString(PKG, "ActionPipeline.RunConfig.Message", runConfiguration)); + logBasic(BaseMessages.getString(PKG, "ActionPipeline.RunConfig.Message", runConfiguration)); // Create the pipeline from meta-data // diff --git a/plugins/actions/repeat/src/main/java/org/apache/hop/workflow/actions/repeat/Repeat.java b/plugins/actions/repeat/src/main/java/org/apache/hop/workflow/actions/repeat/Repeat.java index 313d09f52a6..88253131cc3 100644 --- a/plugins/actions/repeat/src/main/java/org/apache/hop/workflow/actions/repeat/Repeat.java +++ b/plugins/actions/repeat/src/main/java/org/apache/hop/workflow/actions/repeat/Repeat.java @@ -175,8 +175,7 @@ public Result execute(Result prevResult, int nr) throws HopException { executionResult = executePipelineOrWorkflow(realFilename, nr, executionResult, repetitionNr); Result result = executionResult.result; if (!result.getResult() || result.getNrErrors() > 0 || result.isStopped()) { - log.logError( - "The repeating work encountered and error or was stopped. This ends the loop."); + logError("The repeating work encountered and error or was stopped. This ends the loop."); // On an false result, stop the loop // @@ -393,7 +392,7 @@ private ExecutionResult executeWorkflow( boolean flagSet = workflow.getExtensionDataMap().get(REPEAT_END_LOOP) != null; if (flagSet) { - log.logBasic("End loop flag found, stopping loop."); + logBasic("End loop flag found, stopping loop."); } return new ExecutionResult(result, workflow, flagSet); diff --git a/plugins/actions/sendnagiospassivecheck/src/main/java/org/apache/hop/workflow/actions/sendnagiospassivecheck/ActionSendNagiosPassiveCheck.java b/plugins/actions/sendnagiospassivecheck/src/main/java/org/apache/hop/workflow/actions/sendnagiospassivecheck/ActionSendNagiosPassiveCheck.java index 4e5939875b1..9c9a7b2edff 100644 --- a/plugins/actions/sendnagiospassivecheck/src/main/java/org/apache/hop/workflow/actions/sendnagiospassivecheck/ActionSendNagiosPassiveCheck.java +++ b/plugins/actions/sendnagiospassivecheck/src/main/java/org/apache/hop/workflow/actions/sendnagiospassivecheck/ActionSendNagiosPassiveCheck.java @@ -398,7 +398,7 @@ public String getConnectionTimeOut() { @Override public Result execute(Result previousResult, int nr) { - log.logBasic(BaseMessages.getString(PKG, "ActionSendNagiosPassiveCheck.Started", serverName)); + logBasic(BaseMessages.getString(PKG, "ActionSendNagiosPassiveCheck.Started", serverName)); Result result = previousResult; result.setNrErrors(1); @@ -491,7 +491,7 @@ public Result execute(Result previousResult, int nr) { result.setResult(true); } catch (Exception e) { - log.logError( + logError( BaseMessages.getString(PKG, "ActionSendNagiosPassiveCheck.ErrorGetting", e.toString())); } diff --git a/plugins/actions/setvariables/src/main/java/org/apache/hop/workflow/actions/setvariables/ActionSetVariables.java b/plugins/actions/setvariables/src/main/java/org/apache/hop/workflow/actions/setvariables/ActionSetVariables.java index dc879ddb653..3acf62a2e5d 100644 --- a/plugins/actions/setvariables/src/main/java/org/apache/hop/workflow/actions/setvariables/ActionSetVariables.java +++ b/plugins/actions/setvariables/src/main/java/org/apache/hop/workflow/actions/setvariables/ActionSetVariables.java @@ -313,7 +313,7 @@ public Result execute(Result result, int nr) throws HopException { } // ok we can process this line - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionSetVariables.Log.SetVariableToValue", name, value)); diff --git a/plugins/actions/shell/src/main/java/org/apache/hop/workflow/actions/shell/ActionShell.java b/plugins/actions/shell/src/main/java/org/apache/hop/workflow/actions/shell/ActionShell.java index a771c9b0e42..d7eab685f99 100644 --- a/plugins/actions/shell/src/main/java/org/apache/hop/workflow/actions/shell/ActionShell.java +++ b/plugins/actions/shell/src/main/java/org/apache/hop/workflow/actions/shell/ActionShell.java @@ -310,7 +310,7 @@ public Result execute(Result result, int nr) throws HopException { shellLogLevel = logFileLevel; } - log.setLogLevel(shellLogLevel); + setLogLevel(shellLogLevel); result.setEntryNr(nr); @@ -329,7 +329,7 @@ public Result execute(Result result, int nr) throws HopException { boolean first = true; List rows = result.getRows(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionShell.Log.FoundPreviousRows", "" + (rows != null ? rows.size() : 0))); @@ -413,7 +413,7 @@ private void executeShell(Result result, List cmdRows, String[] String[] base = null; List cmds = new ArrayList<>(); - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "ActionShell.RunningOn", Const.getSystemOs())); } @@ -529,7 +529,7 @@ private void executeShell(Result result, List cmdRows, String[] } command.append(it.next()); } - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "ActionShell.ExecCommand", command.toString())); } @@ -551,10 +551,12 @@ private void executeShell(Result result, List cmdRows, String[] Process proc = procBuilder.start(); // any error message? - StreamLogger errorLogger = new StreamLogger(log, proc.getErrorStream(), "(stderr)", true); + StreamLogger errorLogger = + new StreamLogger(getLogChannel(), proc.getErrorStream(), "(stderr)", true); // any output? - StreamLogger outputLogger = new StreamLogger(log, proc.getInputStream(), "(stdout)"); + StreamLogger outputLogger = + new StreamLogger(getLogChannel(), proc.getInputStream(), "(stdout)"); // kick them off Thread errorLoggerThread = new Thread(errorLogger); @@ -563,14 +565,14 @@ private void executeShell(Result result, List cmdRows, String[] outputLoggerThread.start(); proc.waitFor(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ActionShell.CommandFinished", command.toString())); } // What's the exit status? result.setExitStatus(proc.exitValue()); if (result.getExitStatus() != 0) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -654,9 +656,9 @@ private FileObject createTemporaryShellFile(FileObject tempFile, String fileCont Process proc = procBuilder.start(); // Eat/log stderr/stdout all messages in a different thread... StreamLogger errorLogger = - new StreamLogger(log, proc.getErrorStream(), toString() + " (stderr)"); + new StreamLogger(getLogChannel(), proc.getErrorStream(), toString() + " (stderr)"); StreamLogger outputLogger = - new StreamLogger(log, proc.getInputStream(), toString() + " (stdout)"); + new StreamLogger(getLogChannel(), proc.getInputStream(), toString() + " (stdout)"); new Thread(errorLogger).start(); new Thread(outputLogger).start(); proc.waitFor(); diff --git a/plugins/actions/snmptrap/src/main/java/org/apache/hop/workflow/actions/snmptrap/ActionSNMPTrap.java b/plugins/actions/snmptrap/src/main/java/org/apache/hop/workflow/actions/snmptrap/ActionSNMPTrap.java index 5186c965fea..fc2816ba772 100644 --- a/plugins/actions/snmptrap/src/main/java/org/apache/hop/workflow/actions/snmptrap/ActionSNMPTrap.java +++ b/plugins/actions/snmptrap/src/main/java/org/apache/hop/workflow/actions/snmptrap/ActionSNMPTrap.java @@ -332,7 +332,7 @@ public Result execute(Result previousResult, int nr) { target.setVersion(SnmpConstants.version1); target.setAddress(udpAddress); if (target.getAddress().isValid()) { - if (log.isDebug()) { + if (isDebug()) { logDebug("Valid IP address"); } } else { @@ -359,7 +359,7 @@ public Result execute(Result previousResult, int nr) { transMap.listen(); usertarget.setAddress(udpAddress); if (usertarget.getAddress().isValid()) { - if (log.isDebug()) { + if (isDebug()) { logDebug("Valid IP address"); } } else { @@ -392,7 +392,7 @@ public Result execute(Result previousResult, int nr) { new USM( SecurityProtocols.getInstance(), new OctetString(MPv3.createLocalEngineID()), 0); usm.addUser(new OctetString(userName), uu); - if (log.isDebug()) { + if (isDebug()) { logDebug("Valid Usm"); } } @@ -409,7 +409,7 @@ public Result execute(Result previousResult, int nr) { response = snmp.send(pdu, usertarget); } - if (response != null && log.isDebug()) { + if (response != null && isDebug()) { logDebug("Received response from: " + response.getPeerAddress() + response.toString()); } diff --git a/plugins/actions/tableexists/src/main/java/org/apache/hop/workflow/actions/tableexists/ActionTableExists.java b/plugins/actions/tableexists/src/main/java/org/apache/hop/workflow/actions/tableexists/ActionTableExists.java index 5bf4ce99aa0..61faf721044 100644 --- a/plugins/actions/tableexists/src/main/java/org/apache/hop/workflow/actions/tableexists/ActionTableExists.java +++ b/plugins/actions/tableexists/src/main/java/org/apache/hop/workflow/actions/tableexists/ActionTableExists.java @@ -131,13 +131,13 @@ public Result execute(Result previousResult, int nr) { String realSchemaName = resolve(schemaName); if (db.checkTableExists(realSchemaName, realTableName)) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "TableExists.Log.TableExists", realTableName)); } result.setResult(true); } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "TableExists.Log.TableNotExists", realTableName)); } diff --git a/plugins/actions/truncatetables/src/main/java/org/apache/hop/workflow/actions/truncatetables/ActionTruncateTables.java b/plugins/actions/truncatetables/src/main/java/org/apache/hop/workflow/actions/truncatetables/ActionTruncateTables.java index 9239db6fd7d..4d5c345f7b3 100644 --- a/plugins/actions/truncatetables/src/main/java/org/apache/hop/workflow/actions/truncatetables/ActionTruncateTables.java +++ b/plugins/actions/truncatetables/src/main/java/org/apache/hop/workflow/actions/truncatetables/ActionTruncateTables.java @@ -106,7 +106,7 @@ private boolean truncateTables(String tableName, String schemaName, Database db) db.truncateTable(tableName); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionTruncateTables.Log.TableTruncated", tableName)); } @@ -136,7 +136,7 @@ public Result execute(Result previousResult, int nr) { nrSuccess = 0; if (argFromPrevious) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -165,7 +165,7 @@ this, this, getParentWorkflowMeta().findDatabase(connection, getVariables()))) { String schemaNamePrevious = resultRow.getString(1, null); if (!Utils.isEmpty(tableNamePrevious)) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -194,7 +194,7 @@ this, this, getParentWorkflowMeta().findDatabase(connection, getVariables()))) { String realTableName = resolve(tableItem.getTableName()); String realSchemaName = resolve(tableItem.getSchemaName()); if (!Utils.isEmpty(realTableName)) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionTruncateTables.ProcessingArg", realTableName, realSchemaName)); diff --git a/plugins/actions/unzip/src/main/java/org/apache/hop/workflow/actions/unzip/ActionUnZip.java b/plugins/actions/unzip/src/main/java/org/apache/hop/workflow/actions/unzip/ActionUnZip.java index 643936df8a0..8de237dc077 100644 --- a/plugins/actions/unzip/src/main/java/org/apache/hop/workflow/actions/unzip/ActionUnZip.java +++ b/plugins/actions/unzip/src/main/java/org/apache/hop/workflow/actions/unzip/ActionUnZip.java @@ -280,7 +280,7 @@ public Result execute(Result previousResult, int nr) { successConditionBrokenExit = false; if (isfromprevious) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -320,24 +320,24 @@ public Result execute(Result previousResult, int nr) { if (!targetdir.exists()) { if (createfolder) { targetdir.createFolder(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionUnZip.Log.TargetFolderCreated", realTargetdirectory)); } } else { - log.logError(BaseMessages.getString(PKG, "ActionUnZip.TargetFolderNotFound.Label")); + logError(BaseMessages.getString(PKG, "ActionUnZip.TargetFolderNotFound.Label")); exitaction = true; } } else { if (targetdir.getType() != FileType.FOLDER) { - log.logError( + logError( BaseMessages.getString( PKG, "ActionUnZip.TargetFolderNotFolder.Label", realTargetdirectory)); exitaction = true; } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionUnZip.TargetFolderExists.Label", realTargetdirectory)); @@ -349,21 +349,20 @@ public Result execute(Result previousResult, int nr) { // movetodirectory must be provided if (afterunzip == 2) { if (Utils.isEmpty(movetodirectory)) { - log.logError(BaseMessages.getString(PKG, "ActionUnZip.MoveToDirectoryEmpty.Label")); + logError(BaseMessages.getString(PKG, "ActionUnZip.MoveToDirectoryEmpty.Label")); exitaction = true; } else { movetodir = HopVfs.getFileObject(realMovetodirectory, getVariables()); if (!(movetodir.exists()) || movetodir.getType() != FileType.FOLDER) { if (createMoveToDirectory) { movetodir.createFolder(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionUnZip.Log.MoveToFolderCreated", realMovetodirectory)); } } else { - log.logError( - BaseMessages.getString(PKG, "ActionUnZip.MoveToDirectoryNotExists.Label")); + logError(BaseMessages.getString(PKG, "ActionUnZip.MoveToDirectoryNotExists.Label")); exitaction = true; } } @@ -420,18 +419,18 @@ public Result execute(Result previousResult, int nr) { } else { fileObject = HopVfs.getFileObject(realFilenameSource, getVariables()); if (!fileObject.exists()) { - log.logError( + logError( BaseMessages.getString( PKG, "ActionUnZip.ZipFile.NotExists.Label", realFilenameSource)); return result; } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionUnZip.Zip_FileExists.Label", realFilenameSource)); } if (Utils.isEmpty(sourcedirectory)) { - log.logError(BaseMessages.getString(PKG, "ActionUnZip.SourceFolderNotFound.Label")); + logError(BaseMessages.getString(PKG, "ActionUnZip.SourceFolderNotFound.Label")); return result; } @@ -447,7 +446,7 @@ public Result execute(Result previousResult, int nr) { realWildcardSource); } } catch (Exception e) { - log.logError( + logError( BaseMessages.getString( PKG, "ActionUnZip.ErrorUnzip.Label", realFilenameSource, e.getMessage())); updateErrors(); @@ -486,7 +485,7 @@ public Result execute(Result previousResult, int nr) { } private void displayResults() { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("======================================="); logDetailed(BaseMessages.getString(PKG, "ActionUnZip.Log.Info.FilesInError", "" + nrErrors)); logDetailed( @@ -600,7 +599,7 @@ private boolean unzipFile( String unzipToFolder = realTargetdirectory; try { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionUnZip.Log.ProcessingFile", sourceFileObject.toString())); @@ -622,7 +621,7 @@ private boolean unzipFile( if (!rootfolder.exists()) { try { rootfolder.createFolder(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionUnZip.Log.RootFolderCreated", folderName)); } @@ -683,7 +682,7 @@ public boolean includeFile(FileSelectInfo info) { synchronized (HopVfs.getFileSystemManager(getVariables())) { FileObject newFileObject = null; try { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -700,7 +699,7 @@ public boolean includeFile(FileSelectInfo info) { if (item.getType().equals(FileType.FOLDER)) { // Directory // - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionUnZip.CreatingDirectory.Label", newFileName)); @@ -732,7 +731,7 @@ public boolean includeFile(FileSelectInfo info) { boolean take = takeThisFile(item, newFileName); if (getIt && !getItexclude && take) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -755,7 +754,7 @@ public boolean includeFile(FileSelectInfo info) { + StringUtil.getFormattedDateTimeNow(true) + newFileName.substring(lastindexOfDot, lenstring); - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "ActionUnZip.Log.CreatingUniqFile", newFileName)); @@ -840,7 +839,7 @@ public boolean includeFile(FileSelectInfo info) { retval = true; } catch (Exception e) { updateErrors(); - log.logError( + logError( BaseMessages.getString( PKG, "ActionUnZip.ErrorUnzip.Label", sourceFileObject.toString(), e.getMessage()), e); @@ -863,7 +862,7 @@ private void doUnzipPostProcessing( PKG, "ActionUnZip.Cant_Delete_File.Label", sourceFileObject.toString())); } // File deleted - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "ActionUnZip.File_Deleted.Label", sourceFileObject.toString())); @@ -879,7 +878,7 @@ private void doUnzipPostProcessing( sourceFileObject.moveTo(destFile); // File moved - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -962,16 +961,16 @@ private boolean takeThisFile(FileObject sourceFile, String destinationFile) boolean retval = false; File destination = new File(destinationFile); if (!destination.exists()) { - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "ActionUnZip.Log.CanNotFindFile", destinationFile)); } return true; } - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "ActionUnZip.Log.FileExists", destinationFile)); } if (iffileexist == IF_FILE_EXISTS_SKIP) { - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "ActionUnZip.Log.FileSkip", destinationFile)); } return false; @@ -984,7 +983,7 @@ private boolean takeThisFile(FileObject sourceFile, String destinationFile) } if (iffileexist == IF_FILE_EXISTS_OVERWRITE) { - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "ActionUnZip.Log.FileOverwrite", destinationFile)); } return true; @@ -995,7 +994,7 @@ private boolean takeThisFile(FileObject sourceFile, String destinationFile) if (iffileexist == IF_FILE_EXISTS_OVERWRITE_DIFF_SIZE) { if (entrySize != destinationSize) { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -1007,7 +1006,7 @@ private boolean takeThisFile(FileObject sourceFile, String destinationFile) } return true; } else { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -1022,7 +1021,7 @@ private boolean takeThisFile(FileObject sourceFile, String destinationFile) } if (iffileexist == IF_FILE_EXISTS_OVERWRITE_EQUAL_SIZE) { if (entrySize == destinationSize) { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -1034,7 +1033,7 @@ private boolean takeThisFile(FileObject sourceFile, String destinationFile) } return true; } else { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -1049,7 +1048,7 @@ private boolean takeThisFile(FileObject sourceFile, String destinationFile) } if (iffileexist == IF_FILE_EXISTS_OVERWRITE_ZIP_BIG) { if (entrySize > destinationSize) { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -1061,7 +1060,7 @@ private boolean takeThisFile(FileObject sourceFile, String destinationFile) } return true; } else { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -1076,7 +1075,7 @@ private boolean takeThisFile(FileObject sourceFile, String destinationFile) } if (iffileexist == IF_FILE_EXISTS_OVERWRITE_ZIP_BIG_EQUAL) { if (entrySize >= destinationSize) { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -1088,7 +1087,7 @@ private boolean takeThisFile(FileObject sourceFile, String destinationFile) } return true; } else { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -1103,7 +1102,7 @@ private boolean takeThisFile(FileObject sourceFile, String destinationFile) } if (iffileexist == IF_FILE_EXISTS_OVERWRITE_ZIP_SMALL) { if (entrySize < destinationSize) { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -1115,7 +1114,7 @@ private boolean takeThisFile(FileObject sourceFile, String destinationFile) } return true; } else { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -1130,7 +1129,7 @@ private boolean takeThisFile(FileObject sourceFile, String destinationFile) } if (iffileexist == IF_FILE_EXISTS_OVERWRITE_ZIP_SMALL_EQUAL) { if (entrySize <= destinationSize) { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -1142,7 +1141,7 @@ private boolean takeThisFile(FileObject sourceFile, String destinationFile) } return true; } else { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, diff --git a/plugins/actions/waitforfile/src/main/java/org/apache/hop/workflow/actions/waitforfile/ActionWaitForFile.java b/plugins/actions/waitforfile/src/main/java/org/apache/hop/workflow/actions/waitforfile/ActionWaitForFile.java index 4e10ecedefc..7d57fa9a944 100644 --- a/plugins/actions/waitforfile/src/main/java/org/apache/hop/workflow/actions/waitforfile/ActionWaitForFile.java +++ b/plugins/actions/waitforfile/src/main/java/org/apache/hop/workflow/actions/waitforfile/ActionWaitForFile.java @@ -138,7 +138,7 @@ public Result execute(Result previousResult, int nr) { // if (iMaximumTimeout < 0) { iMaximumTimeout = Const.toInt(DEFAULT_MAXIMUM_TIMEOUT, 0); - if (log.isBasic()) { + if (isBasic()) { logBasic("Maximum timeout invalid, reset to " + iMaximumTimeout); } } @@ -146,17 +146,17 @@ public Result execute(Result previousResult, int nr) { if (iCycleTime < 1) { // If lower than 1 set to the default iCycleTime = Const.toInt(DEFAULT_CHECK_CYCLE_TIME, 1); - if (log.isBasic()) { + if (isBasic()) { logBasic("Check cycle time invalid, reset to " + iCycleTime); } } if (iMaximumTimeout == 0) { - if (log.isBasic()) { + if (isBasic()) { logBasic("Waiting indefinitely for file [" + realFilename + "]"); } } else { - if (log.isBasic()) { + if (isBasic()) { logBasic("Waiting " + iMaximumTimeout + " seconds for file [" + realFilename + "]"); } } @@ -167,7 +167,7 @@ public Result execute(Result previousResult, int nr) { if (fileObject.exists()) { // file exists, we're happy to exit - if (log.isBasic()) { + if (isBasic()) { logBasic("Detected file [" + realFilename + "] within timeout"); } result.setResult(true); @@ -192,12 +192,12 @@ public Result execute(Result previousResult, int nr) { // file doesn't exist after timeout, either true or false if (isSuccessOnTimeout()) { - if (log.isBasic()) { + if (isBasic()) { logBasic("Didn't detect file [" + realFilename + "] before timeout, success"); } result.setResult(true); } else { - if (log.isBasic()) { + if (isBasic()) { logBasic("Didn't detect file [" + realFilename + "] before timeout, failure"); } result.setResult(false); @@ -219,7 +219,7 @@ public Result execute(Result previousResult, int nr) { try { if (sleepTime > 0) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( "Sleeping " + sleepTime @@ -241,10 +241,10 @@ public Result execute(Result previousResult, int nr) { long oldSize = -1; long newSize = fileObject.getContent().getSize(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("File [" + realFilename + "] is " + newSize + " bytes long"); } - if (log.isBasic()) { + if (isBasic()) { logBasic( "Waiting until file [" + realFilename @@ -254,7 +254,7 @@ public Result execute(Result previousResult, int nr) { } while (oldSize != newSize && !parentWorkflow.isStopped()) { try { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( "Sleeping " + iCycleTime @@ -270,11 +270,11 @@ public Result execute(Result previousResult, int nr) { } oldSize = newSize; newSize = fileObject.getContent().getSize(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("File [" + realFilename + "] is " + newSize + " bytes long"); } } - if (log.isBasic()) { + if (isBasic()) { logBasic("Stopped waiting for file [" + realFilename + CONST_TO_STOP_GROWING); } } diff --git a/plugins/actions/waitforsql/src/main/java/org/apache/hop/workflow/actions/waitforsql/ActionWaitForSql.java b/plugins/actions/waitforsql/src/main/java/org/apache/hop/workflow/actions/waitforsql/ActionWaitForSql.java index 8d460d2a1d0..a314e1c96e8 100644 --- a/plugins/actions/waitforsql/src/main/java/org/apache/hop/workflow/actions/waitforsql/ActionWaitForSql.java +++ b/plugins/actions/waitforsql/src/main/java/org/apache/hop/workflow/actions/waitforsql/ActionWaitForSql.java @@ -251,7 +251,7 @@ public Result execute(Result previousResult, int nr) { if (useVars) { realCustomSql = resolve(realCustomSql); } - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "ActionWaitForSQL.Log.EnteredCustomSQL", realCustomSql)); } @@ -277,7 +277,7 @@ public Result execute(Result previousResult, int nr) { long timeStart = System.currentTimeMillis() / 1000; int nrRowsLimit = Const.toInt(resolve(rowsCountValue), 0); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionWaitForSQL.Log.nrRowsLimit", "" + nrRowsLimit)); } @@ -343,7 +343,7 @@ public Result execute(Result previousResult, int nr) { } try { if (sleepTime > 0) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Sleeping " + sleepTime + " seconds before next check for SQL data"); } Thread.sleep(sleepTime * 1000); @@ -398,7 +398,7 @@ protected boolean sqlDataOK( } if (countStatement != null) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionWaitForSQL.Log.RunSQLStatement", countStatement)); } @@ -408,7 +408,7 @@ protected boolean sqlDataOK( if (ar != null) { rowsCount = ar.size(); } else { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "ActionWaitForSQL.Log.customSQLreturnedNothing", countStatement)); @@ -421,7 +421,7 @@ protected boolean sqlDataOK( rowsCount = row.getInteger(0); } } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionWaitForSQL.Log.NrRowsReturned", "" + rowsCount)); } diff --git a/plugins/actions/workflow/src/main/java/org/apache/hop/workflow/actions/workflow/ActionWorkflow.java b/plugins/actions/workflow/src/main/java/org/apache/hop/workflow/actions/workflow/ActionWorkflow.java index 91da5147eb8..c7121c8ca1e 100644 --- a/plugins/actions/workflow/src/main/java/org/apache/hop/workflow/actions/workflow/ActionWorkflow.java +++ b/plugins/actions/workflow/src/main/java/org/apache/hop/workflow/actions/workflow/ActionWorkflow.java @@ -492,7 +492,8 @@ public Result execute(Result result, int nr) throws HopException { // Link both ways! workflow.getWorkflowTracker().setParentWorkflowTracker(parentWorkflow.getWorkflowTracker()); - ActionWorkflowRunner runner = new ActionWorkflowRunner(workflow, result, nr, log); + ActionWorkflowRunner runner = + new ActionWorkflowRunner(workflow, result, nr, getLogChannel()); Thread workflowRunnerThread = new Thread(runner); // added UUID to thread name, otherwise threads do share names if workflows actions are // executed in parallel in a @@ -590,23 +591,23 @@ private boolean createParentFolder(String filename) { parentfolder = HopVfs.getFileObject(filename).getParent(); if (!parentfolder.exists()) { if (createParentFolder) { - if (log.isDebug()) { - log.logDebug( + if (isDebug()) { + logDebug( BaseMessages.getString( PKG, "ActionWorkflow.Log.ParentLogFolderNotExist", parentfolder.getName().toString())); } parentfolder.createFolder(); - if (log.isDebug()) { - log.logDebug( + if (isDebug()) { + logDebug( BaseMessages.getString( PKG, "ActionWorkflow.Log.ParentLogFolderCreated", parentfolder.getName().toString())); } } else { - log.logError( + logError( BaseMessages.getString( PKG, "ActionWorkflow.Log.ParentLogFolderNotExist", @@ -614,8 +615,8 @@ private boolean createParentFolder(String filename) { resultat = false; } } else { - if (log.isDebug()) { - log.logDebug( + if (isDebug()) { + logDebug( BaseMessages.getString( PKG, "ActionWorkflow.Log.ParentLogFolderExists", @@ -624,7 +625,7 @@ private boolean createParentFolder(String filename) { } } catch (Exception e) { resultat = false; - log.logError( + logError( BaseMessages.getString(PKG, "ActionWorkflow.Error.ChekingParentLogFolderTitle"), BaseMessages.getString( PKG, diff --git a/plugins/actions/writetolog/src/main/java/org/apache/hop/workflow/actions/writetolog/ActionWriteToLog.java b/plugins/actions/writetolog/src/main/java/org/apache/hop/workflow/actions/writetolog/ActionWriteToLog.java index 61670bdf804..cd206145cfd 100644 --- a/plugins/actions/writetolog/src/main/java/org/apache/hop/workflow/actions/writetolog/ActionWriteToLog.java +++ b/plugins/actions/writetolog/src/main/java/org/apache/hop/workflow/actions/writetolog/ActionWriteToLog.java @@ -67,13 +67,13 @@ public ActionWriteToLog() { @Override public Object clone() { - ActionWriteToLog je = (ActionWriteToLog) super.clone(); - return je; + ActionWriteToLog action = (ActionWriteToLog) super.clone(); + return action; } private class LogWriterObject implements ILoggingObject { - private ILogChannel writerLog; + private ILogChannel log; private LogLevel logLevel; private ILoggingObject parent; private String subject; @@ -83,8 +83,8 @@ public LogWriterObject(String subject, ILoggingObject parent, LogLevel logLevel) this.subject = subject; this.parent = parent; this.logLevel = logLevel; - this.writerLog = new LogChannel(this, parent); - this.containerObjectId = writerLog.getContainerObjectId(); + this.log = new LogChannel(this, parent); + this.containerObjectId = log.getContainerObjectId(); } @Override @@ -94,7 +94,7 @@ public String getFilename() { @Override public String getLogChannelId() { - return writerLog.getLogChannelId(); + return log.getLogChannelId(); } @Override @@ -109,7 +109,7 @@ public String getObjectName() { @Override public LoggingObjectType getObjectType() { - return LoggingObjectType.TRANSFORM; + return LoggingObjectType.ACTION; } @Override @@ -118,7 +118,7 @@ public ILoggingObject getParent() { } public ILogChannel getLogChannel() { - return writerLog; + return log; } @Override @@ -142,22 +142,22 @@ public Date getRegistrationDate() { @Override public boolean isGatheringMetrics() { - return log.isGatheringMetrics(); + return parent.isGatheringMetrics(); } @Override public void setGatheringMetrics(boolean gatheringMetrics) { - log.setGatheringMetrics(gatheringMetrics); + parent.setGatheringMetrics(gatheringMetrics); } @Override public boolean isForcingSeparateLogging() { - return log.isForcingSeparateLogging(); + return parent.isForcingSeparateLogging(); } @Override public void setForcingSeparateLogging(boolean forcingSeparateLogging) { - log.setForcingSeparateLogging(forcingSeparateLogging); + parent.setForcingSeparateLogging(forcingSeparateLogging); } } @@ -180,32 +180,19 @@ public boolean evaluate(Result result) { try { switch (getActionLogLevel()) { - case ERROR: - logChannel.logError(message + Const.CR); - break; - case MINIMAL: - logChannel.logMinimal(message + Const.CR); - break; - case BASIC: - logChannel.logBasic(message + Const.CR); - break; - case DETAILED: - logChannel.logDetailed(message + Const.CR); - break; - case DEBUG: - logChannel.logDebug(message + Const.CR); - break; - case ROWLEVEL: - logChannel.logRowlevel(message + Const.CR); - break; - default: // NOTHING - break; + case ERROR -> logChannel.logError(message + Const.CR); + case MINIMAL -> logChannel.logMinimal(message + Const.CR); + case BASIC -> logChannel.logBasic(message + Const.CR); + case DETAILED -> logChannel.logDetailed(message + Const.CR); + case DEBUG -> logChannel.logDebug(message + Const.CR); + case ROWLEVEL -> logChannel.logRowlevel(message + Const.CR); + case NOTHING -> {} } return true; } catch (Exception e) { result.setNrErrors(1); - log.logError( + logError( BaseMessages.getString(PKG, "WriteToLog.Error.Label"), BaseMessages.getString(PKG, "WriteToLog.Error.Description") + " : " + e.toString()); return false; diff --git a/plugins/actions/xml/src/main/java/org/apache/hop/workflow/actions/xml/dtdvalidator/DtdValidator.java b/plugins/actions/xml/src/main/java/org/apache/hop/workflow/actions/xml/dtdvalidator/DtdValidator.java index 2d4b63b3c59..25689275a91 100644 --- a/plugins/actions/xml/src/main/java/org/apache/hop/workflow/actions/xml/dtdvalidator/DtdValidator.java +++ b/plugins/actions/xml/src/main/java/org/apache/hop/workflow/actions/xml/dtdvalidator/DtdValidator.java @@ -115,7 +115,7 @@ public Result execute(Result previousResult, int nr) { String realDTDfilename = getRealDTDfilename(); // Define a new DTD validator instance - DtdValidatorUtil validator = new DtdValidatorUtil(log); + DtdValidatorUtil validator = new DtdValidatorUtil(getLogChannel()); // Set XML filename validator.setXMLFilename(realxmlfilename); if (dtdintern) { @@ -130,7 +130,7 @@ public Result execute(Result previousResult, int nr) { boolean status = validator.validate(); if (!status) { // The XML file is invalid! - log.logError(validator.getErrorMessage()); + logError(validator.getErrorMessage()); result.setResult(false); result.setNrErrors(validator.getNrErrors()); result.setLogText(validator.getErrorMessage()); diff --git a/plugins/actions/xml/src/main/java/org/apache/hop/workflow/actions/xml/xmlwellformed/XmlWellFormed.java b/plugins/actions/xml/src/main/java/org/apache/hop/workflow/actions/xml/xmlwellformed/XmlWellFormed.java index 2bad348818e..e9c3888ed8c 100644 --- a/plugins/actions/xml/src/main/java/org/apache/hop/workflow/actions/xml/xmlwellformed/XmlWellFormed.java +++ b/plugins/actions/xml/src/main/java/org/apache/hop/workflow/actions/xml/xmlwellformed/XmlWellFormed.java @@ -217,7 +217,7 @@ public Result execute(Result previousResult, int nr) throws HopException { String[] vSourceFileFolder = sourceFileFolders; String[] vwildcard = wildcard; - if (argFromPrevious && log.isDetailed()) { + if (argFromPrevious && isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -248,7 +248,7 @@ public Result execute(Result previousResult, int nr) throws HopException { String vSourceFileFolderPrevious = resultRow.getString(0, null); String vWildcardPrevious = resultRow.getString(1, null); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -276,7 +276,7 @@ public Result execute(Result previousResult, int nr) throws HopException { return result; } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -304,7 +304,7 @@ public Result execute(Result previousResult, int nr) throws HopException { } private void displayResults() { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("======================================="); logDetailed( BaseMessages.getString(PKG, "ActionXMLWellFormed.Log.Info.FilesInError", "" + nrErrors)); @@ -393,7 +393,7 @@ private boolean processFileFolder( sourcefilefolder = HopVfs.getFileObject(realSourceFilefoldername); if (sourcefilefolder.exists()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionXMLWellFormed.Log.FileExists", sourcefilefolder.toString())); @@ -523,7 +523,7 @@ private boolean checkOneFile(FileObject file, Result result, IWorkflowEngine par addFileToResultFilenames(HopVfs.getFilename(file), result, parentWorkflow); } } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("---------------------------"); logDetailed( BaseMessages.getString( @@ -563,7 +563,7 @@ private void addFileToResultFilenames( toString()); result.getResultFiles().put(resultFile.getFile().toString(), resultFile); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionXMLWellFormed.Log.FileAddedToResultFilesName", fileaddentry)); diff --git a/plugins/actions/xml/src/main/java/org/apache/hop/workflow/actions/xml/xslt/Xslt.java b/plugins/actions/xml/src/main/java/org/apache/hop/workflow/actions/xml/xslt/Xslt.java index ddce4c51817..a5739565601 100644 --- a/plugins/actions/xml/src/main/java/org/apache/hop/workflow/actions/xml/xslt/Xslt.java +++ b/plugins/actions/xml/src/main/java/org/apache/hop/workflow/actions/xml/xslt/Xslt.java @@ -302,7 +302,7 @@ public Result execute(Result previousResult, int nr) throws HopException { } List rows = result.getRows(); - if (isFilenamesFromPrevious() && log.isDetailed()) { + if (isFilenamesFromPrevious() && isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionXSLT.Log.ArgFromPrevious.Found", (rows != null ? rows.size() : 0) + "")); @@ -393,7 +393,7 @@ private boolean processOneXMLFile( } else if (outputfile.exists() && ifFileExists == 1) { // Do nothing - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, CONST_ACTION_XSLT_OUPUT_FILE_EXISTS_1_LABEL) + outputfilename @@ -421,7 +421,7 @@ private boolean processOneXMLFile( // did not find wildcard outputfilename = outputfilename + "_" + StringUtil.getFormattedDateTimeNow(true); } - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, CONST_ACTION_XSLT_OUPUT_FILE_EXISTS_1_LABEL) + outputfilename @@ -441,8 +441,8 @@ private boolean processOneXMLFile( factory = new net.sf.saxon.TransformerFactoryImpl(); } - if (log.isDetailed()) { - log.logDetailed( + if (isDetailed()) { + logDetailed( BaseMessages.getString(PKG, "ActionXSL.Log.TransformerFactoryInfos"), BaseMessages.getString( PKG, "ActionXSL.Log.TransformerFactory", factory.getClass().getName())); @@ -458,8 +458,8 @@ private boolean processOneXMLFile( // Use the template to create a transformer Transformer xformer = template.newTransformer(); - if (log.isDetailed()) { - log.logDetailed( + if (isDetailed()) { + logDetailed( BaseMessages.getString(PKG, "ActionXSL.Log.TransformerClassInfos"), BaseMessages.getString( PKG, "ActionXSL.Log.TransformerClass", xformer.getClass().getName())); diff --git a/plugins/actions/zipfile/src/main/java/org/apache/hop/workflow/actions/zipfile/ActionZipFile.java b/plugins/actions/zipfile/src/main/java/org/apache/hop/workflow/actions/zipfile/ActionZipFile.java index 79eb1f8f22c..40040ba3599 100644 --- a/plugins/actions/zipfile/src/main/java/org/apache/hop/workflow/actions/zipfile/ActionZipFile.java +++ b/plugins/actions/zipfile/src/main/java/org/apache/hop/workflow/actions/zipfile/ActionZipFile.java @@ -183,19 +183,19 @@ private boolean createParentFolder(String filename) { parentfolder = HopVfs.getFileObject(filename, getVariables()).getParent(); if (!parentfolder.exists()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionZipFile.CanNotFindFolder", "" + parentfolder.getName())); } parentfolder.createFolder(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionZipFile.FolderCreated", "" + parentfolder.getName())); } } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionZipFile.FolderExists", "" + parentfolder.getName())); @@ -255,7 +255,7 @@ public boolean processRowFile( // Check if Zip File exists if (fileObject.exists()) { fileExists = true; - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "ActionZipFile.Zip_FileExists1.Label") + localrealZipfilename @@ -356,7 +356,7 @@ public boolean processRowFile( } localrealZipfilename += "_" + StringUtil.getFormattedDateTimeNow(true) + ".zip"; - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "ActionZipFile.Zip_FileNameChange1.Label") + localrealZipfilename @@ -383,7 +383,7 @@ public boolean processRowFile( + tempFile.getAbsolutePath() + BaseMessages.getString(PKG, "ActionZipFile.Cant_Rename_Temp3.Label")); } - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "ActionZipFile.Zip_FileAppend1.Label") + localrealZipfilename @@ -391,7 +391,7 @@ public boolean processRowFile( } } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ActionZipFile.Files_Found1.Label") + fileList.length @@ -485,7 +485,7 @@ public boolean processRowFile( && !isTargetDirectory && !fileSet.contains(targetFilename)) { // We can add the file to the Zip Archive - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "ActionZipFile.Add_FilesToZip1.Label") + fileList[i] @@ -510,7 +510,7 @@ public boolean processRowFile( } } - if (log.isBasic()) { + if (isBasic()) { logBasic( BaseMessages.getString( PKG, "ActionZipFile.Log.TotalZippedFiles", "" + zippedFiles.size())); @@ -668,7 +668,7 @@ private boolean moveFile( success = false; } // File moved - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "ActionZipFile.File_Moved1.Label") + fileObjectd @@ -693,7 +693,7 @@ private boolean deleteFile( + BaseMessages.getString(PKG, "ActionZipFile.Cant_Delete_File2.Label")); } // File deleted - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "ActionZipFile.File_Deleted1.Label") + localSourceFilename @@ -813,7 +813,7 @@ public Result execute(Result previousResult, int nr) { moveToDirectory = HopVfs.getFileObject(realMovetodirectory, getVariables()); if (moveToDirectory.exists()) { if (moveToDirectory.getType() == FileType.FOLDER) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionZipFile.Log.MoveToFolderExist", realMovetodirectory)); @@ -825,14 +825,14 @@ public Result execute(Result previousResult, int nr) { PKG, "ActionZipFile.Log.MoveToFolderNotFolder", realMovetodirectory)); } } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionZipFile.Log.MoveToFolderNotNotExist", realMovetodirectory)); } if (createMoveToDirectory) { moveToDirectory.createFolder(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionZipFile.Log.MoveToFolderCreaterd", realMovetodirectory)); @@ -871,7 +871,7 @@ public Result execute(Result previousResult, int nr) { // arguments from previous if (fromPrevious) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ActionZipFile.ArgFromPrevious.Found", (rows != null ? rows.size() : 0) + "")); diff --git a/plugins/databases/access/src/main/java/org/apache/hop/pipeline/transforms/accessoutput/AccessOutput.java b/plugins/databases/access/src/main/java/org/apache/hop/pipeline/transforms/accessoutput/AccessOutput.java index 8b508043244..5c3cf428483 100644 --- a/plugins/databases/access/src/main/java/org/apache/hop/pipeline/transforms/accessoutput/AccessOutput.java +++ b/plugins/databases/access/src/main/java/org/apache/hop/pipeline/transforms/accessoutput/AccessOutput.java @@ -82,7 +82,7 @@ public boolean processRow() throws HopException { writeToTable(row); putRow(data.outputRowMeta, row); // in case we want it go further... - if (checkFeedback(getLinesOutput()) && log.isBasic()) { + if (checkFeedback(getLinesOutput()) && isBasic()) { logBasic("linenr " + getLinesOutput()); } } catch (HopException e) { @@ -99,7 +99,7 @@ public boolean processRow() throws HopException { protected boolean writeToTable(Object[] row) throws HopValueException { if (row == null) { // Stop: last line or error encountered - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Last line inserted: stop"); } return false; @@ -194,7 +194,7 @@ public boolean init() { protected boolean openFile() throws Exception { data.oneFileOpened = true; String fileName = resolve(meta.getFileName()); - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "AccessOutput.log.WritingToFile", fileName)); } FileObject fileObject = HopVfs.getFileObject(fileName); diff --git a/plugins/misc/testing/src/main/java/org/apache/hop/testing/transforms/exectests/ExecuteTests.java b/plugins/misc/testing/src/main/java/org/apache/hop/testing/transforms/exectests/ExecuteTests.java index b4605be8261..08514db99df 100644 --- a/plugins/misc/testing/src/main/java/org/apache/hop/testing/transforms/exectests/ExecuteTests.java +++ b/plugins/misc/testing/src/main/java/org/apache/hop/testing/transforms/exectests/ExecuteTests.java @@ -56,14 +56,14 @@ public boolean init() { data.hasPrevious = true; if (StringUtils.isEmpty(meta.getTestNameInputField())) { - log.logError( + logError( "When this transform receives input it wants the name of a field to get the unit test name from to determine which transforms to execute"); setErrors(1); return false; } } } catch (Exception e) { - log.logError("Error analyzing ", e); + logError("Error analyzing ", e); setErrors(1); return false; } diff --git a/plugins/tech/avro/src/main/java/org/apache/hop/avro/transforms/avroencode/AvroEncode.java b/plugins/tech/avro/src/main/java/org/apache/hop/avro/transforms/avroencode/AvroEncode.java index ed5b49c4f8c..aa16433c3f2 100644 --- a/plugins/tech/avro/src/main/java/org/apache/hop/avro/transforms/avroencode/AvroEncode.java +++ b/plugins/tech/avro/src/main/java/org/apache/hop/avro/transforms/avroencode/AvroEncode.java @@ -78,8 +78,8 @@ public boolean processRow() throws HopException { AvroEncodeMeta.createAvroSchema( schemaName, namespace, documentation, getInputRowMeta(), meta.getSourceFields()); - if (log.isDetailed()) { - log.logDetailed("Schema: " + data.avroSchema.toString(true)); + if (isDetailed()) { + logDetailed("Schema: " + data.avroSchema.toString(true)); } } diff --git a/plugins/tech/avro/src/main/java/org/apache/hop/avro/transforms/avrooutput/AvroOutput.java b/plugins/tech/avro/src/main/java/org/apache/hop/avro/transforms/avrooutput/AvroOutput.java index d36e6be37f8..025bf96c186 100644 --- a/plugins/tech/avro/src/main/java/org/apache/hop/avro/transforms/avrooutput/AvroOutput.java +++ b/plugins/tech/avro/src/main/java/org/apache/hop/avro/transforms/avrooutput/AvroOutput.java @@ -251,12 +251,12 @@ public void writeSchemaFile() throws HopException { fields.add(avroField); } data.avroSchema = createAvroSchema(fields, ""); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Automatically generated Avro schema."); } if (meta.isWriteSchemaFile()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Writing schema file."); } try { @@ -267,18 +267,18 @@ public void writeSchemaFile() throws HopException { } OutputStream outputStream = getOutputStream(schemaFileName, false); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Opening output stream in default encoding"); } OutputStream schemaWriter = new BufferedOutputStream(outputStream, 5000); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Opened new file with name [" + schemaFileName + "]"); } schemaWriter.write(data.avroSchema.toString(true).getBytes()); schemaWriter.close(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Closed schema file with name [" + schemaFileName + "]"); } @@ -499,7 +499,7 @@ public String buildFilename(String filename) { getCopy(), getPartitionId(), data.isBeamContext(), - log.getLogChannelId(), + getLogChannelId(), data.getBeamBundleNr()); } @@ -521,12 +521,12 @@ public void openNewFile(String baseFilename) throws HopException { OutputStream outputStream = getOutputStream(filename, false); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Opening output stream in default encoding"); } data.writer = new BufferedOutputStream(outputStream, 5000); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Opened new file with name [" + filename + "]"); } } catch (Exception e) { @@ -553,7 +553,7 @@ private boolean closeFile() { if (data.writer != null) { data.writer.flush(); - if (log.isDebug()) { + if (isDebug()) { logDebug("Closing output stream"); } if (data.dataFileWriter != null) { @@ -564,7 +564,7 @@ private boolean closeFile() { // also. data.writer = null; data.dataFileWriter = null; - if (log.isDebug()) { + if (isDebug()) { logDebug("Closed output stream"); } } diff --git a/plugins/tech/aws/src/main/java/org/apache/hop/pipeline/transforms/redshift/bulkloader/RedshiftBulkLoader.java b/plugins/tech/aws/src/main/java/org/apache/hop/pipeline/transforms/redshift/bulkloader/RedshiftBulkLoader.java index c556d0823df..c58afb5ca2d 100644 --- a/plugins/tech/aws/src/main/java/org/apache/hop/pipeline/transforms/redshift/bulkloader/RedshiftBulkLoader.java +++ b/plugins/tech/aws/src/main/java/org/apache/hop/pipeline/transforms/redshift/bulkloader/RedshiftBulkLoader.java @@ -79,7 +79,7 @@ public boolean init() { data.db.connect(); getDbFields(); - if (log.isBasic()) { + if (isBasic()) { logBasic( BaseMessages.getString( PKG, "RedshiftBulkLoader.Connection.Connected", data.db.getDatabaseMeta())); @@ -256,7 +256,7 @@ private boolean closeFile() { data.writer.close(); } data.writer = null; - if (log.isDebug()) { + if (isDebug()) { logDebug("Closing normal file ..."); } diff --git a/plugins/tech/azure/src/main/java/org/apache/hop/pipeline/transforms/eventhubs/listen/AzureListener.java b/plugins/tech/azure/src/main/java/org/apache/hop/pipeline/transforms/eventhubs/listen/AzureListener.java index 6c71c4f939f..6ba8b12f088 100644 --- a/plugins/tech/azure/src/main/java/org/apache/hop/pipeline/transforms/eventhubs/listen/AzureListener.java +++ b/plugins/tech/azure/src/main/java/org/apache/hop/pipeline/transforms/eventhubs/listen/AzureListener.java @@ -155,7 +155,7 @@ public void rowWrittenEvent(IRowMeta rowMeta, Object[] row) data.stt = false; } - log.logDetailed("Creating connection string builder"); + logDetailed("Creating connection string builder"); data.connectionStringBuilder = new ConnectionStringBuilder() .setNamespaceName(namespace) @@ -163,11 +163,11 @@ public void rowWrittenEvent(IRowMeta rowMeta, Object[] row) .setSasKeyName(sasKeyName) .setSasKey(sasKey); - log.logDetailed("Opening new executor service"); + logDetailed("Opening new executor service"); data.executorService = Executors.newSingleThreadScheduledExecutor(); - log.logDetailed("Creating event hub client"); + logDetailed("Creating event hub client"); try { data.eventHubClient = EventHubClient.createFromConnectionStringSync( @@ -189,7 +189,7 @@ public void rowWrittenEvent(IRowMeta rowMeta, Object[] row) } catch (Exception e) { throw new HopException("Unable to set up events host processor", e); } - log.logDetailed("Set up events host named " + host.getHostName()); + logDetailed("Set up events host named " + host.getHostName()); EventProcessorOptions options = new EventProcessorOptions(); options.setExceptionNotification(new AzureListenerErrorNotificationHandler(AzureListener.this)); diff --git a/plugins/tech/azure/src/main/java/org/apache/hop/pipeline/transforms/eventhubs/write/AzureWrite.java b/plugins/tech/azure/src/main/java/org/apache/hop/pipeline/transforms/eventhubs/write/AzureWrite.java index 708541d967e..50d5f1a78c1 100644 --- a/plugins/tech/azure/src/main/java/org/apache/hop/pipeline/transforms/eventhubs/write/AzureWrite.java +++ b/plugins/tech/azure/src/main/java/org/apache/hop/pipeline/transforms/eventhubs/write/AzureWrite.java @@ -100,7 +100,7 @@ public boolean processRow() throws HopException { "Unable to find field '" + meta.getMessageField() + "' in the Transform input"); } - log.logBasic("Creating connection string"); + logBasic("Creating connection string"); String namespace = resolve(meta.getNamespace()); String eventHubName = resolve(meta.getEventHubName()); @@ -114,9 +114,9 @@ public boolean processRow() throws HopException { .setSasKeyName(sasKeyName) .setSasKey(sasKey); - log.logBasic("Opening new executor service"); + logBasic("Opening new executor service"); data.executorService = Executors.newSingleThreadScheduledExecutor(); - log.logBasic("Creating event hub client"); + logBasic("Creating event hub client"); try { data.eventHubClient = EventHubClient.createFromConnectionStringSync( diff --git a/plugins/tech/cassandra/src/main/java/org/apache/hop/pipeline/transforms/cassandrainput/CassandraInput.java b/plugins/tech/cassandra/src/main/java/org/apache/hop/pipeline/transforms/cassandrainput/CassandraInput.java index c0b34780b0b..3423478caa3 100644 --- a/plugins/tech/cassandra/src/main/java/org/apache/hop/pipeline/transforms/cassandrainput/CassandraInput.java +++ b/plugins/tech/cassandra/src/main/java/org/apache/hop/pipeline/transforms/cassandrainput/CassandraInput.java @@ -202,8 +202,8 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, r); } - if (log.isRowLevel()) { - log.logRowlevel(toString(), "Outputted row #" + getProcessed() + " : " + outRowData); + if (isRowLevel()) { + logRowlevel(toString(), "Outputted row #" + getProcessed() + " : " + outRowData); } if (checkFeedback(getProcessed())) { @@ -249,7 +249,7 @@ protected void initQuery() throws HopException { if (cqlHandler == null) { cqlHandler = data.keyspace.getCQLRowHandler(); } - cqlHandler.newRowQuery(this, tableName, queryS, compression.name(), "", log); + cqlHandler.newRowQuery(this, tableName, queryS, compression.name(), "", getLogChannel()); } catch (Exception e) { closeConnection(); diff --git a/plugins/tech/cassandra/src/main/java/org/apache/hop/pipeline/transforms/cassandraoutput/CassandraOutput.java b/plugins/tech/cassandra/src/main/java/org/apache/hop/pipeline/transforms/cassandraoutput/CassandraOutput.java index 23fe12b6d76..59033f78580 100644 --- a/plugins/tech/cassandra/src/main/java/org/apache/hop/pipeline/transforms/cassandraoutput/CassandraOutput.java +++ b/plugins/tech/cassandra/src/main/java/org/apache/hop/pipeline/transforms/cassandraoutput/CassandraOutput.java @@ -206,7 +206,7 @@ protected void initialize() throws HopException { getInputRowMeta(), keyIndexes, resolve(getMeta().getCreateTableWithClause()), - log); + getLogChannel()); if (!result) { throw new HopException( @@ -231,7 +231,7 @@ protected void initialize() throws HopException { if (getMeta().isUpdateCassandraMeta()) { // Update cassandra meta data for unknown incoming fields? - keyspace.updateTableCQL3(tableName, getInputRowMeta(), keyIndexes, log); + keyspace.updateTableCQL3(tableName, getInputRowMeta(), keyIndexes, getLogChannel()); } // get the table meta data @@ -262,7 +262,7 @@ protected void initialize() throws HopException { // Truncate (remove all data from) table first? if (getMeta().isTruncateTable()) { - keyspace.truncateTable(tableName, log); + keyspace.truncateTable(tableName, getLogChannel()); } } finally { if (connection != null) { diff --git a/plugins/tech/cassandra/src/main/java/org/apache/hop/pipeline/transforms/cassandrasstableoutput/SSTableOutput.java b/plugins/tech/cassandra/src/main/java/org/apache/hop/pipeline/transforms/cassandrasstableoutput/SSTableOutput.java index c45fdb51046..0a3088d617c 100644 --- a/plugins/tech/cassandra/src/main/java/org/apache/hop/pipeline/transforms/cassandrasstableoutput/SSTableOutput.java +++ b/plugins/tech/cassandra/src/main/java/org/apache/hop/pipeline/transforms/cassandrasstableoutput/SSTableOutput.java @@ -137,7 +137,7 @@ private void initialize() throws Exception { writer = builder.build(); try { - disableSystemExit(sm, log); + disableSystemExit(sm, getLogChannel()); writer.init(); } catch (Exception e) { throw new RuntimeException( diff --git a/plugins/tech/cassandra/src/main/java/org/apache/hop/workflow/actions/execcql/ExecCql.java b/plugins/tech/cassandra/src/main/java/org/apache/hop/workflow/actions/execcql/ExecCql.java index a0f2bd7bd52..77d5c14820c 100644 --- a/plugins/tech/cassandra/src/main/java/org/apache/hop/workflow/actions/execcql/ExecCql.java +++ b/plugins/tech/cassandra/src/main/java/org/apache/hop/workflow/actions/execcql/ExecCql.java @@ -136,7 +136,8 @@ public Result execute(Result result, int nr) throws HopException { cqlStatements = script; } - int nrExecuted = executeCqlStatements(this, log, result, cassandraConnection, cqlStatements); + int nrExecuted = + executeCqlStatements(this, getLogChannel(), result, cassandraConnection, cqlStatements); if (result.getNrErrors() == 0) { logBasic("Cassandra executed " + nrExecuted + " CQL commands without error"); diff --git a/plugins/tech/google/src/main/java/org/apache/hop/pipeline/transforms/googlesheets/GoogleSheetsInput.java b/plugins/tech/google/src/main/java/org/apache/hop/pipeline/transforms/googlesheets/GoogleSheetsInput.java index 4cc49382908..e55f07b5a88 100644 --- a/plugins/tech/google/src/main/java/org/apache/hop/pipeline/transforms/googlesheets/GoogleSheetsInput.java +++ b/plugins/tech/google/src/main/java/org/apache/hop/pipeline/transforms/googlesheets/GoogleSheetsInput.java @@ -71,7 +71,7 @@ public boolean init() { jsonFactory = JacksonFactory.getDefaultInstance(); httpTransport = GoogleNetHttpTransport.newTrustedTransport(); } catch (Exception e) { - log.logError("cannot initiate HTTP transport" + e.getMessage()); + logError("cannot initiate HTTP transport" + e.getMessage()); return false; } @@ -94,7 +94,7 @@ public boolean init() { ValueRange response = service.spreadsheets().values().get(resolve(meta.getSpreadsheetKey()), range).execute(); if (response == null) { - log.logError( + logError( "No data found for worksheet : " + resolve(meta.getWorksheetId()) + CONST_IN_SPREADSHEET @@ -102,7 +102,7 @@ public boolean init() { return false; } else { List> values = response.getValues(); - log.logBasic("Reading Sheet, found: " + values.size() + " rows"); + logBasic("Reading Sheet, found: " + values.size() + " rows"); if (values == null || values.isEmpty()) { throw new HopTransformException( "No response found for worksheet : " @@ -114,7 +114,7 @@ public boolean init() { } } } catch (Exception e) { - log.logError( + logError( "Error: for worksheet : " + resolve(meta.getWorksheetId()) + CONST_IN_SPREADSHEET @@ -247,7 +247,7 @@ private Object[] readRow() { } } } else { - log.logBasic( + logBasic( "Finished reading last row " + Integer.toString(data.currentRow) + " / " @@ -256,7 +256,7 @@ private Object[] readRow() { } return outputRowData; } catch (Exception e) { - log.logError("Exception reading value :" + e.getMessage()); + logError("Exception reading value :" + e.getMessage()); return null; } } diff --git a/plugins/tech/google/src/main/java/org/apache/hop/pipeline/transforms/googlesheets/GoogleSheetsOutput.java b/plugins/tech/google/src/main/java/org/apache/hop/pipeline/transforms/googlesheets/GoogleSheetsOutput.java index 0b145fca0ad..feb71d48414 100644 --- a/plugins/tech/google/src/main/java/org/apache/hop/pipeline/transforms/googlesheets/GoogleSheetsOutput.java +++ b/plugins/tech/google/src/main/java/org/apache/hop/pipeline/transforms/googlesheets/GoogleSheetsOutput.java @@ -118,7 +118,7 @@ public boolean init() { // log.logBasic(wsID+" VS "+spreadsheet.getId()); if (spreadsheetID.equals(spreadsheet.getId())) { exists = true; // file exists - log.logBasic("Spreadsheet:" + spreadsheetID + " exists"); + logBasic("Spreadsheet:" + spreadsheetID + " exists"); } } @@ -195,7 +195,7 @@ credential, resolve(meta.getTimeout()))) new UpdateSheetPropertiesRequest().setProperties(title); // set fields you want to update rename.setFields("title"); - log.logBasic("Changing worksheet title to:" + resolve(meta.getWorksheetId())); + logBasic("Changing worksheet title to:" + resolve(meta.getWorksheetId())); List requests = new ArrayList<>(); Request request1 = new Request().setUpdateSheetProperties(rename); requests.add(request1); @@ -205,7 +205,7 @@ credential, resolve(meta.getTimeout()))) data.service.spreadsheets().batchUpdate(spreadsheetID, requestBody).execute(); } } else { - log.logError("Append and Create options cannot be activated altogether"); + logError("Append and Create options cannot be activated altogether"); return false; } @@ -221,18 +221,18 @@ credential, resolve(meta.getTimeout()))) public void onFailure(GoogleJsonError e, HttpHeaders responseHeaders) throws IOException { // Handle error - log.logError("Failed sharing file" + e.getMessage()); + logError("Failed sharing file" + e.getMessage()); } @Override public void onSuccess(Permission permission, HttpHeaders responseHeaders) throws IOException { - log.logBasic("Shared successfully : Permission ID: " + permission.getId()); + logBasic("Shared successfully : Permission ID: " + permission.getId()); } }; BatchRequest batch = service.batch(); if (resolve(meta.getShareEmail()) != null && !resolve(meta.getShareEmail()).isEmpty()) { - log.logBasic("Sharing sheet with:" + resolve(meta.getShareEmail())); + logBasic("Sharing sheet with:" + resolve(meta.getShareEmail())); Permission userPermission = new Permission() .setType("user") @@ -247,7 +247,7 @@ public void onSuccess(Permission permission, HttpHeaders responseHeaders) } if (resolve(meta.getShareDomain()) != null && !resolve(meta.getShareDomain()).isEmpty()) { - log.logBasic("Sharing sheet with domain:" + resolve(meta.getShareDomain())); + logBasic("Sharing sheet with domain:" + resolve(meta.getShareDomain())); Permission domainPermission = new Permission() .setType("domain") @@ -264,12 +264,12 @@ public void onSuccess(Permission permission, HttpHeaders responseHeaders) } if (!exists && !meta.isCreate()) { - log.logError("File does not Exist"); + logError("File does not Exist"); return false; } } catch (Exception e) { - log.logError( + logError( "Error: for worksheet : " + resolve(meta.getWorksheetId()) + " in spreadsheet :" diff --git a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/actions/constraint/Neo4jConstraint.java b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/actions/constraint/Neo4jConstraint.java index b15bc999b8b..65b634d6bab 100644 --- a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/actions/constraint/Neo4jConstraint.java +++ b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/actions/constraint/Neo4jConstraint.java @@ -115,17 +115,17 @@ private void dropConstraint(final ConstraintUpdate constraintUpdate) throws HopE // Run this cypher statement... // final String _cypher = cypher; - try (Driver driver = connection.getDriver(log, this)) { - try (Session session = connection.getSession(log, driver, this)) { + try (Driver driver = connection.getDriver(getLogChannel(), this)) { + try (Session session = connection.getSession(getLogChannel(), driver, this)) { session.writeTransaction( tx -> { try { - log.logDetailed("Dropping constraint with cypher: " + _cypher); + logDetailed("Dropping constraint with cypher: " + _cypher); org.neo4j.driver.Result result = tx.run(_cypher); result.consume(); return true; } catch (Throwable e) { - log.logError("Error dropping constraint with cypher [" + _cypher + "]", e); + logError("Error dropping constraint with cypher [" + _cypher + "]", e); return false; } }); @@ -180,17 +180,17 @@ private void createConstraint(ConstraintUpdate constraintUpdate) throws HopExcep // Run this cypher statement... // final String _cypher = cypher; - try (Driver driver = connection.getDriver(log, this)) { - try (Session session = connection.getSession(log, driver, this)) { + try (Driver driver = connection.getDriver(getLogChannel(), this)) { + try (Session session = connection.getSession(getLogChannel(), driver, this)) { session.writeTransaction( tx -> { try { - log.logDetailed("Creating constraint with cypher: " + _cypher); + logDetailed("Creating constraint with cypher: " + _cypher); org.neo4j.driver.Result result = tx.run(_cypher); result.consume(); return true; } catch (Throwable e) { - log.logError("Error creating constraint with cypher [" + _cypher + "]", e); + logError("Error creating constraint with cypher [" + _cypher + "]", e); return false; } }); diff --git a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/actions/cypherscript/CypherScript.java b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/actions/cypherscript/CypherScript.java index 81f361bcd6b..46344d29595 100644 --- a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/actions/cypherscript/CypherScript.java +++ b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/actions/cypherscript/CypherScript.java @@ -106,11 +106,11 @@ public Result execute(Result result, int nr) throws HopException { int nrExecuted; - try (Driver driver = connection.getDriver(log, this)) { + try (Driver driver = connection.getDriver(getLogChannel(), this)) { // Connect to the database // - try (Session session = connection.getSession(log, driver, this)) { + try (Session session = connection.getSession(getLogChannel(), driver, this)) { TransactionWork transactionWork = transaction -> { @@ -130,14 +130,14 @@ public Result execute(Result result, int nr) throws HopException { if (StringUtils.isNotEmpty(cypher)) { transaction.run(cypher); executed++; - log.logDetailed("Executed cypher statement: " + cypher); + logDetailed("Executed cypher statement: " + cypher); } } // All statements executed successfully so commit // transaction.commit(); } catch (Exception e) { - log.logError("Error executing cypher statements...", e); + logError("Error executing cypher statements...", e); result.increaseErrors(1L); transaction.rollback(); result.setResult(false); diff --git a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/actions/index/Neo4jIndex.java b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/actions/index/Neo4jIndex.java index 351b2a34ba4..d93b6d6842a 100644 --- a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/actions/index/Neo4jIndex.java +++ b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/actions/index/Neo4jIndex.java @@ -124,17 +124,17 @@ private void dropIndex(final IndexUpdate indexUpdate) throws HopException { // Run this cypher statement... // final String _cypher = cypher; - try (Driver driver = connection.getDriver(log, this)) { - try (Session session = connection.getSession(log, driver, this)) { + try (Driver driver = connection.getDriver(getLogChannel(), this)) { + try (Session session = connection.getSession(getLogChannel(), driver, this)) { session.writeTransaction( tx -> { try { - log.logDetailed("Dropping index with cypher: " + _cypher); + logDetailed("Dropping index with cypher: " + _cypher); org.neo4j.driver.Result result = tx.run(_cypher); result.consume(); return true; } catch (Throwable e) { - log.logError("Error dropping index with cypher [" + _cypher + "]", e); + logError("Error dropping index with cypher [" + _cypher + "]", e); return false; } }); @@ -178,17 +178,17 @@ private void createIndex(IndexUpdate indexUpdate) throws HopException { // Run this cypher statement... // final String _cypher = cypher; - try (Driver driver = connection.getDriver(log, this)) { - try (Session session = connection.getSession(log, driver, this)) { + try (Driver driver = connection.getDriver(getLogChannel(), this)) { + try (Session session = connection.getSession(getLogChannel(), driver, this)) { session.writeTransaction( tx -> { try { - log.logDetailed("Creating index with cypher: " + _cypher); + logDetailed("Creating index with cypher: " + _cypher); org.neo4j.driver.Result result = tx.run(_cypher); result.consume(); return true; } catch (Throwable e) { - log.logError("Error creating index with cypher [" + _cypher + "]", e); + logError("Error creating index with cypher [" + _cypher + "]", e); return false; } }); diff --git a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/cypher/Cypher.java b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/cypher/Cypher.java index 77c2571e332..f18388ef294 100644 --- a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/cypher/Cypher.java +++ b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/cypher/Cypher.java @@ -70,7 +70,7 @@ public boolean init() { // Connect to Neo4j // if (StringUtils.isEmpty(resolve(meta.getConnectionName()))) { - log.logError("You need to specify a Neo4j connection to use in this transform"); + logError("You need to specify a Neo4j connection to use in this transform"); return false; } try { @@ -79,7 +79,7 @@ public boolean init() { .getSerializer(NeoConnection.class) .load(resolve(meta.getConnectionName())); if (data.neoConnection == null) { - log.logError( + logError( "Connection '" + resolve(meta.getConnectionName()) + "' could not be found in the metadata: " @@ -87,7 +87,7 @@ public boolean init() { return false; } } catch (HopException e) { - log.logError( + logError( "Could not gencsv Neo4j connection '" + resolve(meta.getConnectionName()) + "' from the metastore", @@ -101,7 +101,7 @@ public boolean init() { // int retries = Const.toInt(resolve(meta.getNrRetriesOnError()), 0); if (retries < 0) { - log.logError("The number of retries on an error should be larger than 0, not " + retries); + logError("The number of retries on an error should be larger than 0, not " + retries); return false; } data.attempts = 1 + retries; @@ -109,7 +109,7 @@ public boolean init() { try { createDriverSession(); } catch (Exception e) { - log.logError( + logError( "Unable to get or create Neo4j database driver for database '" + data.neoConnection.getName() + "'", @@ -139,14 +139,14 @@ private void closeSessionDriver() { } private void createDriverSession() throws HopConfigException { - data.driver = data.neoConnection.getDriver(log, this); - data.session = data.neoConnection.getSession(log, data.driver, this); + data.driver = data.neoConnection.getDriver(getLogChannel(), this); + data.session = data.neoConnection.getSession(getLogChannel(), data.driver, this); } private void reconnect() throws HopConfigException { closeSessionDriver(); - log.logBasic("RECONNECTING to database"); + logBasic("RECONNECTING to database"); // Wait for 30 seconds before reconnecting. // Let's give the server a breath of fresh air. @@ -226,7 +226,7 @@ public boolean processRow() throws HopException { if (meta.isCypherFromField()) { data.cypher = getInputRowMeta().getString(row, data.cypherFieldIndex); - log.logDetailed("Cypher statement from field is: " + data.cypher); + logDetailed("Cypher statement from field is: " + data.cypher); } // Do the value mapping and conversion to the parameters @@ -356,7 +356,7 @@ private void runCypherStatementsBatch() throws HopException { } } - if (log.isDebug()) { + if (isDebug()) { logDebug("Processed " + nrProcessed + " statements"); } @@ -380,7 +380,7 @@ private List writeUnwindList() throws HopException { try { for (int attempt = 0; attempt < data.attempts; attempt++) { if (attempt > 0) { - log.logBasic("Attempt #" + (attempt + 1) + "/" + data.attempts + " on Neo4j transaction"); + logBasic("Attempt #" + (attempt + 1) + "/" + data.attempts + " on Neo4j transaction"); } try { if (meta.isReadOnly()) { @@ -395,7 +395,7 @@ private List writeUnwindList() throws HopException { if (attempt + 1 >= data.attempts) { throw e; } else { - log.logBasic( + logBasic( "Retrying transaction after attempt #" + (attempt + 1) + " with error : " @@ -591,7 +591,7 @@ private boolean processSummary(Result result) { for (Notification notification : summary.notifications()) { if ("WARNING".equalsIgnoreCase(notification.severity())) { // Log it - log.logBasic( + logBasic( notification.severity() + " : " + notification.title() @@ -604,8 +604,8 @@ private boolean processSummary(Result result) { } else { // This is an error // - log.logError(notification.severity() + " : " + notification.title()); - log.logError( + logError(notification.severity() + " : " + notification.title()); + logError( notification.code() + " : " + notification.description() diff --git a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/cypherbuilder/CypherBuilder.java b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/cypherbuilder/CypherBuilder.java index ca6096caf7e..5915d83a00f 100644 --- a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/cypherbuilder/CypherBuilder.java +++ b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/cypherbuilder/CypherBuilder.java @@ -58,9 +58,9 @@ public boolean init() { String connectionName = resolve(meta.getConnectionName()); try { data.connection = metadataProvider.getSerializer(NeoConnection.class).load(connectionName); - data.driver = data.connection.getDriver(log, this); + data.driver = data.connection.getDriver(getLogChannel(), this); data.driver.verifyConnectivity(); - data.session = data.connection.getSession(log, data.driver, this); + data.session = data.connection.getSession(getLogChannel(), data.driver, this); } catch (Exception e) { setErrors(1); logError("Error connecting to Neo4j", e); @@ -80,7 +80,7 @@ public void dispose() { data.driver.close(); } } catch (Exception e) { - log.logError("Error closing Neo4j connection", e); + logError("Error closing Neo4j connection", e); } super.dispose(); diff --git a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/gencsv/GenerateCsv.java b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/gencsv/GenerateCsv.java index 1448c6745bb..0085ad33a10 100644 --- a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/gencsv/GenerateCsv.java +++ b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/gencsv/GenerateCsv.java @@ -86,7 +86,7 @@ public boolean processRow() throws HopException { csvFile.closeFile(); } catch (Exception e) { setErrors(1L); - log.logError("Error flushing/closing file '" + csvFile.getFilename() + "'", e); + logError("Error flushing/closing file '" + csvFile.getFilename() + "'", e); } Object[] nodeFileRow = RowDataUtil.allocateRowData(data.outputRowMeta.size()); diff --git a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/graph/GraphOutput.java b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/graph/GraphOutput.java index 55d5f3eda7f..f64e8fb5526 100644 --- a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/graph/GraphOutput.java +++ b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/graph/GraphOutput.java @@ -79,7 +79,7 @@ public boolean init() { // Verify some extra metadata... // if (StringUtils.isEmpty(meta.getConnectionName())) { - log.logError("You need to specify a Neo4j connection to use in this transform"); + logError("You need to specify a Neo4j connection to use in this transform"); return false; } @@ -87,7 +87,7 @@ public boolean init() { metadataProvider.getSerializer(NeoConnection.class); data.neoConnection = serializer.load(meta.getConnectionName()); if (data.neoConnection == null) { - log.logError( + logError( "Connection '" + meta.getConnectionName() + "' could not be found in the metadata : " @@ -96,11 +96,11 @@ public boolean init() { } try { - data.driver = data.neoConnection.getDriver(log, this); - data.session = data.neoConnection.getSession(log, data.driver, this); + data.driver = data.neoConnection.getDriver(getLogChannel(), this); + data.session = data.neoConnection.getSession(getLogChannel(), data.driver, this); data.version4 = data.neoConnection.isVersion4(); } catch (Exception e) { - log.logError( + logError( "Unable to get or create Neo4j database driver for database '" + data.neoConnection.getName() + "'", @@ -133,10 +133,10 @@ public boolean init() { // List usedNodeProperties = findUsedNodeProperties(); data.modelValidator = new ModelValidator(data.graphModel, usedNodeProperties); - int nrErrors = data.modelValidator.validateBeforeLoad(log, data.session); + int nrErrors = data.modelValidator.validateBeforeLoad(getLogChannel(), data.session); if (nrErrors > 0) { // There were validation errors, we can stop here... - log.logError( + logError( "Validation against graph model '" + data.graphModel.getName() + "' failed with " @@ -144,12 +144,12 @@ public boolean init() { + " errors."); return false; } else { - log.logBasic( + logBasic( "Validation against graph model '" + data.graphModel.getName() + "' was successful."); } } } catch (HopException e) { - log.logError("Could not find Neo4j connection'" + meta.getConnectionName() + "'", e); + logError("Could not find Neo4j connection'" + meta.getConnectionName() + "'", e); return false; } @@ -450,7 +450,7 @@ public boolean processRow() throws HopException { // Map parameters = new HashMap<>(); String cypher = getCypher(row, getInputRowMeta(), parameters); - if (log.isDebug()) { + if (isDebug()) { logDebug("Parameters found : " + parameters.size()); logDebug("Merge statement : " + cypher); } @@ -522,7 +522,7 @@ private void createNodePropertyIndexes(GraphOutputMeta meta, GraphOutputData dat // for (GraphNode node : nodePropertiesMap.keySet()) { NeoConnectionUtils.createNodeIndex( - log, data.session, node.getLabels(), nodePropertiesMap.get(node)); + getLogChannel(), data.session, node.getLabels(), nodePropertiesMap.get(node)); } } @@ -627,8 +627,8 @@ private boolean processSummary(Result result) { boolean errors = false; ResultSummary summary = result.consume(); for (Notification notification : summary.notifications()) { - log.logError(notification.title() + " (" + notification.severity() + ")"); - log.logError( + logError(notification.title() + " (" + notification.severity() + ")"); + logError( notification.code() + " : " + notification.description() @@ -734,7 +734,7 @@ protected String getCypher(Object[] row, IRowMeta rowMeta, Map p "We didn't find a node to write to. Did you specify a field mapping to node properties?"); } if (nar.nodes.size() > 1) { - log.logBasic("Warning: writing to multiple nodes but not to any relationships"); + logBasic("Warning: writing to multiple nodes but not to any relationships"); } for (SelectedNode node : nar.nodes) { addNodeCypher( @@ -762,7 +762,7 @@ protected String getCypher(Object[] row, IRowMeta rowMeta, Map p GraphRelationship relationship = selectedRelationship.getRelationship(); relationshipIndex++; - if (log.isDebug()) { + if (isDebug()) { logDebug("Handling relationship : " + relationship.getName()); } // Add the source node to the cypher @@ -898,7 +898,7 @@ private SelectedNodesAndRelationships selectNodesAndRelationships(IRowMeta rowMe // Null value? // if (nodeProperty.sourceValueMeta.isNull(nodeProperty.sourceValueData)) { - if (log.isDebug()) { + if (isDebug()) { logDebug( "Detected primary null property for node " + nodeProperty.node @@ -1012,7 +1012,7 @@ private SelectedNodesAndRelationships selectNodesAndRelationships(IRowMeta rowMe } } - if (log.isDebug()) { + if (isDebug()) { logDebug( "Found " + selectedRelationships.size() @@ -1252,8 +1252,8 @@ private void addNodeCypher( updateUsageMap(node.getLabels(), GraphUsage.NODE_UPDATE); - if (log.isDebug()) { - logBasic(" - node merge : " + node.getName()); + if (isDebug()) { + logDebug(" - node merge : " + node.getName()); } // Look up the properties to update in the node @@ -1281,8 +1281,8 @@ private void addNodeCypher( firstPrimary = false; - if (log.isDebug()) { - logBasic( + if (isDebug()) { + logDebug( " * property match/create : " + napd.property.getName() + " with value " @@ -1309,8 +1309,8 @@ private void addNodeCypher( matchCypher.append(buildParameterClause(parameterName)).append(" "); } - if (log.isDebug()) { - logBasic( + if (isDebug()) { + logDebug( " * property update : " + napd.property.getName() + " with value " @@ -1441,7 +1441,7 @@ protected GraphData getGraphData(Object[] row, IRowMeta rowMeta) throws HopExcep "We didn't find a node to write to. Did you specify a field mapping to node properties?"); } if (nar.nodes.size() > 1) { - log.logBasic("Warning: writing to multiple nodes but not to any relationships"); + logBasic("Warning: writing to multiple nodes but not to any relationships"); } for (SelectedNode node : nar.nodes) { @@ -1547,7 +1547,7 @@ private Set getIgnoredNodesWithPkNull(List no // Null value? // if (nodeProperty.sourceValueMeta.isNull(nodeProperty.sourceValueData)) { - if (log.isDebug()) { + if (isDebug()) { logDebug( "Detected primary null property for node " + nodeProperty.node diff --git a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/importer/Importer.java b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/importer/Importer.java index fb5e5422d94..95933fac6a3 100644 --- a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/importer/Importer.java +++ b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/importer/Importer.java @@ -138,7 +138,7 @@ private void runImport() throws HopException { String targetDbFolder = data.baseFolder + "data/databases/" + data.databaseFilename; try { if (new File(targetDbFolder).exists()) { - log.logBasic("Removing exsting folder: " + targetDbFolder); + logBasic("Removing exsting folder: " + targetDbFolder); FileUtils.deleteDirectory(new File(targetDbFolder)); } } catch (Exception e) { @@ -192,8 +192,8 @@ private void runImport() throws HopException { for (String argument : arguments) { command.append(argument).append(" "); } - log.logBasic("Running command : " + command); - log.logBasic("Running from base folder: " + data.baseFolder); + logBasic("Running command : " + command); + logBasic("Running from base folder: " + data.baseFolder); ProcessBuilder pb = new ProcessBuilder(arguments); pb.directory(new File(data.baseFolder)); diff --git a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/loginfo/GetLoggingInfo.java b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/loginfo/GetLoggingInfo.java index ad174d18356..d826c92a218 100644 --- a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/loginfo/GetLoggingInfo.java +++ b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/loginfo/GetLoggingInfo.java @@ -161,7 +161,7 @@ public boolean processRow() throws HopException { throw new HopException("Error getting Neo4j logging information", e); } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("System info returned: " + data.outputRowMeta.getString(row)); } @@ -218,7 +218,7 @@ private Date getPreviousPipelineExecution(String pipelineName) throws Exception + "ORDER BY startDate DESC " + "LIMIT 1 "; - return getResultStartDate(log, connection, cypher, parameters); + return getResultStartDate(getLogChannel(), connection, cypher, parameters); } private Date getPreviousPipelineSuccess(String pipelineName) throws Exception { @@ -245,7 +245,7 @@ private Date getPreviousPipelineSuccess(String pipelineName) throws Exception { + "ORDER BY startDate DESC " + "LIMIT 1 "; - return getResultStartDate(log, connection, cypher, parameters); + return getResultStartDate(getLogChannel(), connection, cypher, parameters); } private Date getPreviousWorkflowExecution(String jobName) throws Exception { @@ -271,7 +271,7 @@ private Date getPreviousWorkflowExecution(String jobName) throws Exception { + "ORDER BY startDate DESC " + "LIMIT 1 "; - return getResultStartDate(log, connection, cypher, parameters); + return getResultStartDate(getLogChannel(), connection, cypher, parameters); } private Date getPreviousWorkflowSuccess(String jobName) throws Exception { @@ -298,7 +298,7 @@ private Date getPreviousWorkflowSuccess(String jobName) throws Exception { + "ORDER BY startDate DESC " + "LIMIT 1 "; - return getResultStartDate(log, connection, cypher, parameters); + return getResultStartDate(getLogChannel(), connection, cypher, parameters); } private Date getResultStartDate( diff --git a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/output/Neo4JOutput.java b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/output/Neo4JOutput.java index b3794f7475a..fc3a46aa9c5 100644 --- a/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/output/Neo4JOutput.java +++ b/plugins/tech/neo4j/src/main/java/org/apache/hop/neo4j/transforms/output/Neo4JOutput.java @@ -185,10 +185,10 @@ public boolean processRow() throws HopException { // Create a session // if (meta.isReturningGraph()) { - log.logBasic("Writing to output graph field, not to Neo4j"); + logBasic("Writing to output graph field, not to Neo4j"); } else { - data.driver = data.neoConnection.getDriver(log, this); - data.session = data.neoConnection.getSession(log, data.driver, this); + data.driver = data.neoConnection.getDriver(getLogChannel(), this); + data.session = data.neoConnection.getSession(getLogChannel(), data.driver, this); // Create indexes for the primary properties of the From and To nodes // @@ -196,7 +196,7 @@ public boolean processRow() throws HopException { try { createNodePropertyIndexes(meta, data, getInputRowMeta(), row); } catch (HopException e) { - log.logError("Unable to create indexes", e); + logError("Unable to create indexes", e); return false; } } @@ -796,7 +796,7 @@ public boolean init() { // Connect to Neo4j using info metastore Neo4j Connection metadata // if (StringUtils.isEmpty(resolve(meta.getConnection()))) { - log.logError("You need to specify a Neo4j connection to use in this transform"); + logError("You need to specify a Neo4j connection to use in this transform"); return false; } @@ -804,7 +804,7 @@ public boolean init() { data.neoConnection = metadataProvider.getSerializer(NeoConnection.class).load(resolve(meta.getConnection())); if (data.neoConnection == null) { - log.logError( + logError( "Connection '" + resolve(meta.getConnection()) + "' could not be found in the metastore : " @@ -813,7 +813,7 @@ public boolean init() { } data.version4 = data.neoConnection.isVersion4(); } catch (HopException e) { - log.logError( + logError( "Could not gencsv Neo4j connection '" + resolve(meta.getConnection()) + "' from the metastore", @@ -867,8 +867,8 @@ private void processSummary(Result result) throws HopException { boolean error = false; ResultSummary summary = result.consume(); for (Notification notification : summary.notifications()) { - log.logError(notification.title() + " (" + notification.severity() + ")"); - log.logError( + logError(notification.title() + " (" + notification.severity() + ")"); + logError( notification.code() + " : " + notification.description() @@ -983,7 +983,7 @@ private void createIndexForNode( if (label != null && !primaryProperties.isEmpty()) { NeoConnectionUtils.createNodeIndex( - log, data.session, Collections.singletonList(label), primaryProperties); + getLogChannel(), data.session, Collections.singletonList(label), primaryProperties); } } } diff --git a/plugins/tech/parquet/src/main/java/org/apache/hop/parquet/transforms/output/ParquetOutput.java b/plugins/tech/parquet/src/main/java/org/apache/hop/parquet/transforms/output/ParquetOutput.java index a34b403be24..eb0232e3782 100644 --- a/plugins/tech/parquet/src/main/java/org/apache/hop/parquet/transforms/output/ParquetOutput.java +++ b/plugins/tech/parquet/src/main/java/org/apache/hop/parquet/transforms/output/ParquetOutput.java @@ -268,7 +268,7 @@ private String buildFilename(Date date) { filename += "-" + new DecimalFormat("0000").format(data.split); } if (data.isBeamContext()) { - filename += "_" + log.getLogChannelId() + "_" + data.getBeamBundleNr(); + filename += "_" + getLogChannelId() + "_" + data.getBeamBundleNr(); } filename += "." + Const.NVL(resolve(meta.getFilenameExtension()), "parquet"); filename += meta.getCompressionCodec().getExtension(); diff --git a/plugins/transforms/abort/src/main/java/org/apache/hop/pipeline/transforms/abort/Abort.java b/plugins/transforms/abort/src/main/java/org/apache/hop/pipeline/transforms/abort/Abort.java index 3e440de1d1e..1a64b6b52c2 100644 --- a/plugins/transforms/abort/src/main/java/org/apache/hop/pipeline/transforms/abort/Abort.java +++ b/plugins/transforms/abort/src/main/java/org/apache/hop/pipeline/transforms/abort/Abort.java @@ -111,7 +111,7 @@ public boolean processRow() throws HopException { Long.toString(nrInputRows), getInputRowMeta().getString(r))); } else { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, diff --git a/plugins/transforms/addsequence/src/main/java/org/apache/hop/pipeline/transforms/addsequence/AddSequence.java b/plugins/transforms/addsequence/src/main/java/org/apache/hop/pipeline/transforms/addsequence/AddSequence.java index 915f9b0f5f0..7d75cce0bb9 100644 --- a/plugins/transforms/addsequence/src/main/java/org/apache/hop/pipeline/transforms/addsequence/AddSequence.java +++ b/plugins/transforms/addsequence/src/main/java/org/apache/hop/pipeline/transforms/addsequence/AddSequence.java @@ -104,7 +104,7 @@ public boolean processRow() throws HopException { meta.getFields(data.outputRowMeta, getTransformName(), null, null, this, metadataProvider); } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "AddSequence.Log.ReadRow") + getLinesRead() @@ -115,14 +115,14 @@ public boolean processRow() throws HopException { try { putRow(data.outputRowMeta, addSequence(getInputRowMeta(), r)); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "AddSequence.Log.WriteRow") + getLinesWritten() + " : " + getInputRowMeta().getString(r)); } - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "AddSequence.Log.LineNumber") + getLinesRead()); } } catch (HopException e) { @@ -156,7 +156,7 @@ public boolean init() { data.setDb(db); try { data.getDb().connect(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "AddSequence.Log.ConnectedDB")); } return true; diff --git a/plugins/transforms/analyticquery/src/main/java/org/apache/hop/pipeline/transforms/analyticquery/AnalyticQuery.java b/plugins/transforms/analyticquery/src/main/java/org/apache/hop/pipeline/transforms/analyticquery/AnalyticQuery.java index 40e234bac43..4695e84781a 100644 --- a/plugins/transforms/analyticquery/src/main/java/org/apache/hop/pipeline/transforms/analyticquery/AnalyticQuery.java +++ b/plugins/transforms/analyticquery/src/main/java/org/apache/hop/pipeline/transforms/analyticquery/AnalyticQuery.java @@ -121,7 +121,7 @@ public boolean processRow() throws HopException { processQueue(); } - if (log.isBasic() && checkFeedback(getLinesRead())) { + if (isBasic() && checkFeedback(getLinesRead())) { logBasic(BaseMessages.getString(PKG, "LineNr", getLinesRead())); } diff --git a/plugins/transforms/append/src/main/java/org/apache/hop/pipeline/transforms/append/Append.java b/plugins/transforms/append/src/main/java/org/apache/hop/pipeline/transforms/append/Append.java index 395f7568db2..a7f9e4e86e3 100644 --- a/plugins/transforms/append/src/main/java/org/apache/hop/pipeline/transforms/append/Append.java +++ b/plugins/transforms/append/src/main/java/org/apache/hop/pipeline/transforms/append/Append.java @@ -90,7 +90,7 @@ public boolean processRow() throws HopException { } if (checkFeedback(getLinesRead())) { - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "Append.Log.LineNumber") + getLinesRead()); } } diff --git a/plugins/transforms/blockingtransform/src/main/java/org/apache/hop/pipeline/transforms/blockingtransform/BlockingTransform.java b/plugins/transforms/blockingtransform/src/main/java/org/apache/hop/pipeline/transforms/blockingtransform/BlockingTransform.java index 4ebb691f0f8..2a8e6084172 100644 --- a/plugins/transforms/blockingtransform/src/main/java/org/apache/hop/pipeline/transforms/blockingtransform/BlockingTransform.java +++ b/plugins/transforms/blockingtransform/src/main/java/org/apache/hop/pipeline/transforms/blockingtransform/BlockingTransform.java @@ -119,14 +119,14 @@ private Object[] getBuffer() { // Open all files at once and read one row from each file... if (!data.files.isEmpty() && (data.dis.isEmpty() || data.fis.isEmpty())) { - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "BlockingTransform.Log.Openfiles")); } try { FileObject fileObject = data.files.get(0); String filename = HopVfs.getFilename(fileObject); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "BlockingTransform.Log.Openfilename1") + filename @@ -147,7 +147,7 @@ private Object[] getBuffer() { // How long is the buffer? int buffersize = di.readInt(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "BlockingTransform.Log.BufferSize1") + filename @@ -295,7 +295,7 @@ public boolean processRow() throws HopException { // Now we can start the output! r = getBuffer(); while (r != null && !isStopped()) { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("Read row: " + getInputRowMeta().getString(r)); } diff --git a/plugins/transforms/blockuntiltransformsfinish/src/main/java/org/apache/hop/pipeline/transforms/blockuntiltransformsfinish/BlockUntilTransformsFinish.java b/plugins/transforms/blockuntiltransformsfinish/src/main/java/org/apache/hop/pipeline/transforms/blockuntiltransformsfinish/BlockUntilTransformsFinish.java index 39f78580dd0..1060dde557b 100644 --- a/plugins/transforms/blockuntiltransformsfinish/src/main/java/org/apache/hop/pipeline/transforms/blockuntiltransformsfinish/BlockUntilTransformsFinish.java +++ b/plugins/transforms/blockuntiltransformsfinish/src/main/java/org/apache/hop/pipeline/transforms/blockuntiltransformsfinish/BlockUntilTransformsFinish.java @@ -109,12 +109,12 @@ public boolean processRow() throws HopException { // This transform is still running... data.continueLoop = true; } else { - log.logBasic( + logBasic( "Transform " + transform.getName() + " status: " + transform.getStatusDescription()); // We have done with this transform. // remove it from the map data.componentMap.remove(e.getKey()); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( "Finished running transform [" + transform.getName() diff --git a/plugins/transforms/calculator/src/main/java/org/apache/hop/pipeline/transforms/calculator/Calculator.java b/plugins/transforms/calculator/src/main/java/org/apache/hop/pipeline/transforms/calculator/Calculator.java index 8f25b170722..65156fd47a0 100644 --- a/plugins/transforms/calculator/src/main/java/org/apache/hop/pipeline/transforms/calculator/Calculator.java +++ b/plugins/transforms/calculator/src/main/java/org/apache/hop/pipeline/transforms/calculator/Calculator.java @@ -164,7 +164,7 @@ public boolean processRow() throws HopException { } } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "Calculator.Log.ReadRow") + getLinesRead() @@ -176,10 +176,10 @@ public boolean processRow() throws HopException { Object[] row = calcFields(getInputRowMeta(), r); putRow(data.getOutputRowMeta(), row); // copy row to possible alternate rowset(s). - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("Wrote row #" + getLinesWritten() + " : " + getInputRowMeta().getString(r)); } - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "Calculator.Log.Linenr", "" + getLinesRead())); } } catch (HopFileNotFoundException e) { diff --git a/plugins/transforms/checksum/src/main/java/org/apache/hop/pipeline/transforms/checksum/CheckSum.java b/plugins/transforms/checksum/src/main/java/org/apache/hop/pipeline/transforms/checksum/CheckSum.java index d50357c26d6..d2d074330d9 100644 --- a/plugins/transforms/checksum/src/main/java/org/apache/hop/pipeline/transforms/checksum/CheckSum.java +++ b/plugins/transforms/checksum/src/main/java/org/apache/hop/pipeline/transforms/checksum/CheckSum.java @@ -126,7 +126,7 @@ public boolean processRow() throws HopException { } } - if (checkFeedback(getLinesRead()) && log.isDetailed()) { + if (checkFeedback(getLinesRead()) && isDetailed()) { logDetailed( BaseMessages.getString(PKG, "CheckSum.Log.LineNumber", Long.toString(getLinesRead()))); } diff --git a/plugins/transforms/clonerow/src/main/java/org/apache/hop/pipeline/transforms/clonerow/CloneRow.java b/plugins/transforms/clonerow/src/main/java/org/apache/hop/pipeline/transforms/clonerow/CloneRow.java index 1976942630b..7a622ff1b79 100644 --- a/plugins/transforms/clonerow/src/main/java/org/apache/hop/pipeline/transforms/clonerow/CloneRow.java +++ b/plugins/transforms/clonerow/src/main/java/org/apache/hop/pipeline/transforms/clonerow/CloneRow.java @@ -102,7 +102,7 @@ public boolean processRow() throws HopException { } else { String nrclonesString = resolve(meta.getNrClones()); data.nrclones = Const.toInt(nrclonesString, 0); - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "CloneRow.Log.NrClones", "" + data.nrclones)); } } @@ -134,7 +134,7 @@ public boolean processRow() throws HopException { throw new HopException(BaseMessages.getString(PKG, "CloneRow.Log.NrClonesIsNull")); } else { data.nrclones = nrCloneFieldValue; - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "CloneRow.Log.NrClones", "" + data.nrclones)); } } @@ -161,7 +161,7 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, outputRowData); // copy row to output rowset(s) } - if (log.isDetailed() && checkFeedback(getLinesRead())) { + if (isDetailed() && checkFeedback(getLinesRead())) { logDetailed(BaseMessages.getString(PKG, "CloneRow.Log.LineNumber", "" + getLinesRead())); } diff --git a/plugins/transforms/coalesce/src/main/java/org/apache/hop/pipeline/transforms/coalesce/CoalesceTransform.java b/plugins/transforms/coalesce/src/main/java/org/apache/hop/pipeline/transforms/coalesce/CoalesceTransform.java index 355be7dd39c..604a903a6c6 100644 --- a/plugins/transforms/coalesce/src/main/java/org/apache/hop/pipeline/transforms/coalesce/CoalesceTransform.java +++ b/plugins/transforms/coalesce/src/main/java/org/apache/hop/pipeline/transforms/coalesce/CoalesceTransform.java @@ -68,7 +68,7 @@ public boolean processRow() throws HopException { // indexes // in the row structure that only need to be done once if (first) { - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "CoalesceTransform.Log.StartedProcessing")); } @@ -143,7 +143,7 @@ public boolean processRow() throws HopException { // put the row to the output row stream putRow(data.outputRowMeta, outputRowValues); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, "CoalesceTransform.Log.WroteRowToNextTransform", outputRowValues)); diff --git a/plugins/transforms/columnexists/src/main/java/org/apache/hop/pipeline/transforms/columnexists/ColumnExists.java b/plugins/transforms/columnexists/src/main/java/org/apache/hop/pipeline/transforms/columnexists/ColumnExists.java index c0c40fe7d11..50240da44fd 100644 --- a/plugins/transforms/columnexists/src/main/java/org/apache/hop/pipeline/transforms/columnexists/ColumnExists.java +++ b/plugins/transforms/columnexists/src/main/java/org/apache/hop/pipeline/transforms/columnexists/ColumnExists.java @@ -141,7 +141,7 @@ public boolean processRow() throws HopException { // add new values to the row. putRow(data.outputRowMeta, outputRowData); // copy row to output rowset(s) - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, @@ -196,7 +196,7 @@ public boolean init() { try { data.db.connect(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ColumnExists.Log.ConnectedToDB")); } diff --git a/plugins/transforms/combinationlookup/src/main/java/org/apache/hop/pipeline/transforms/combinationlookup/CombinationLookup.java b/plugins/transforms/combinationlookup/src/main/java/org/apache/hop/pipeline/transforms/combinationlookup/CombinationLookup.java index 11e4cb89cfb..791403097ed 100644 --- a/plugins/transforms/combinationlookup/src/main/java/org/apache/hop/pipeline/transforms/combinationlookup/CombinationLookup.java +++ b/plugins/transforms/combinationlookup/src/main/java/org/apache/hop/pipeline/transforms/combinationlookup/CombinationLookup.java @@ -402,7 +402,7 @@ public boolean processRow() throws HopException { lookupValues(getInputRowMeta(), r); // add new values to the row in rowset[0]. putRow(data.outputRowMeta, outputRow); // copy row to output rowset(s) - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "CombinationLookup.Log.LineNumber") + getLinesRead()); } } catch (HopException e) { @@ -487,7 +487,7 @@ public void setCombiLookup(IRowMeta inputRowMeta) throws HopDatabaseException { } try { - if (log.isDebug()) { + if (isDebug()) { logDebug("preparing combi-lookup statement:" + Const.CR + sql); } data.prepStatementLookup = @@ -696,11 +696,6 @@ public Long combiInsert(IRowMeta rowMeta, Object[] row, Long valKey, Long valCrc return valKey; } - @Override - public boolean isRowLevel() { - return log.isRowLevel(); - } - @Override public boolean init() { if (super.init()) { @@ -722,7 +717,7 @@ public boolean init() { try { data.db.connect(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "CombinationLookup.Log.ConnectedToDB")); } data.db.setCommit(meta.getCommitSize()); @@ -830,7 +825,7 @@ private void preloadCache(IRowMeta hashRowMeta) sql += "GROUP BY" + Const.CR; sql += lookupKeys + Const.CR; - if (log.isDebug()) { + if (isDebug()) { logDebug("Using preload cache statement:" + Const.CR + sql); } cacheValues = data.db.getRows(databaseMeta.stripCR(sql), meta.getCacheSize()); diff --git a/plugins/transforms/concatfields/src/main/java/org/apache/hop/pipeline/transforms/concatfields/ConcatFields.java b/plugins/transforms/concatfields/src/main/java/org/apache/hop/pipeline/transforms/concatfields/ConcatFields.java index 4876fe20a3f..aac55e7ef65 100644 --- a/plugins/transforms/concatfields/src/main/java/org/apache/hop/pipeline/transforms/concatfields/ConcatFields.java +++ b/plugins/transforms/concatfields/src/main/java/org/apache/hop/pipeline/transforms/concatfields/ConcatFields.java @@ -117,7 +117,7 @@ public synchronized boolean processRow() throws HopException { Object[] outputRowData = concatFields(row); putRow(data.outputRowMeta, outputRowData); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "ConcatFields.Log.WriteRow") + getLinesWritten() @@ -125,7 +125,7 @@ public synchronized boolean processRow() throws HopException { + data.outputRowMeta.getString(row)); } if (checkFeedback(getLinesRead())) { - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "ConcatFields.Log.LineNumber") + getLinesRead()); } } diff --git a/plugins/transforms/constant/src/main/java/org/apache/hop/pipeline/transforms/constant/Constant.java b/plugins/transforms/constant/src/main/java/org/apache/hop/pipeline/transforms/constant/Constant.java index 17b0a830230..a24ce60acd9 100644 --- a/plugins/transforms/constant/src/main/java/org/apache/hop/pipeline/transforms/constant/Constant.java +++ b/plugins/transforms/constant/src/main/java/org/apache/hop/pipeline/transforms/constant/Constant.java @@ -262,7 +262,7 @@ public boolean processRow() throws HopException { putRow(data.outputMeta, r); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, @@ -271,7 +271,7 @@ public boolean processRow() throws HopException { getInputRowMeta().getString(r))); } - if (checkFeedback(getLinesWritten()) && log.isBasic()) { + if (checkFeedback(getLinesWritten()) && isBasic()) { logBasic( BaseMessages.getString(PKG, "Constant.Log.LineNr", Long.toString(getLinesWritten()))); } diff --git a/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoader.java b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoader.java index abd4c0c8381..f97c6fc4ede 100644 --- a/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoader.java +++ b/plugins/transforms/cratedbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/cratedbbulkloader/CrateDBBulkLoader.java @@ -101,7 +101,7 @@ public boolean init() { data.db.connect(); getDbFields(); - if (log.isBasic()) { + if (isBasic()) { logBasic( BaseMessages.getString( PKG, "CrateDBBulkLoader.Connection.Connected", data.db.getDatabaseMeta())); @@ -486,7 +486,7 @@ private boolean closeFile() { data.writer.close(); } data.writer = null; - if (log.isDebug()) { + if (isDebug()) { logDebug("Closing normal file ..."); } diff --git a/plugins/transforms/creditcardvalidator/src/main/java/org/apache/hop/pipeline/transforms/creditcardvalidator/CreditCardValidator.java b/plugins/transforms/creditcardvalidator/src/main/java/org/apache/hop/pipeline/transforms/creditcardvalidator/CreditCardValidator.java index c28e399cf32..eca99ed0618 100644 --- a/plugins/transforms/creditcardvalidator/src/main/java/org/apache/hop/pipeline/transforms/creditcardvalidator/CreditCardValidator.java +++ b/plugins/transforms/creditcardvalidator/src/main/java/org/apache/hop/pipeline/transforms/creditcardvalidator/CreditCardValidator.java @@ -131,7 +131,7 @@ public boolean processRow() throws HopException { // add new values to the row. putRow(data.outputRowMeta, outputRow); // copy row to output rowset(s) - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, diff --git a/plugins/transforms/cubeinput/src/main/java/org/apache/hop/pipeline/transforms/cubeinput/CubeInput.java b/plugins/transforms/cubeinput/src/main/java/org/apache/hop/pipeline/transforms/cubeinput/CubeInput.java index 10955912a49..a262b5978f8 100644 --- a/plugins/transforms/cubeinput/src/main/java/org/apache/hop/pipeline/transforms/cubeinput/CubeInput.java +++ b/plugins/transforms/cubeinput/src/main/java/org/apache/hop/pipeline/transforms/cubeinput/CubeInput.java @@ -74,7 +74,7 @@ public boolean processRow() throws HopException { throw new HopException(e); // shouldn't happen on files } - if (checkFeedback(getLinesInput()) && log.isBasic()) { + if (checkFeedback(getLinesInput()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "CubeInput.Log.LineNumber") + getLinesInput()); } diff --git a/plugins/transforms/cubeoutput/src/main/java/org/apache/hop/pipeline/transforms/cubeoutput/CubeOutput.java b/plugins/transforms/cubeoutput/src/main/java/org/apache/hop/pipeline/transforms/cubeoutput/CubeOutput.java index a558d82a34c..e0a7a929a76 100644 --- a/plugins/transforms/cubeoutput/src/main/java/org/apache/hop/pipeline/transforms/cubeoutput/CubeOutput.java +++ b/plugins/transforms/cubeoutput/src/main/java/org/apache/hop/pipeline/transforms/cubeoutput/CubeOutput.java @@ -114,7 +114,7 @@ public boolean processRow() throws HopException { putRow(data.outputMeta, r); // in case we want it to go further... - if (checkFeedback(getLinesOutput()) && log.isBasic()) { + if (checkFeedback(getLinesOutput()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "CubeOutput.Log.LineNumber") + getLinesOutput()); } @@ -209,7 +209,7 @@ private void createParentFolder(FileObject parentFolder) throws HopTransformExce // Try to create the parent folder... parentFolder.createFolder(); - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "CubeOutput.Log.ParentFolderCreated", parentFolder.getName())); diff --git a/plugins/transforms/databasejoin/src/main/java/org/apache/hop/pipeline/transforms/databasejoin/DatabaseJoin.java b/plugins/transforms/databasejoin/src/main/java/org/apache/hop/pipeline/transforms/databasejoin/DatabaseJoin.java index 4d134ed6256..1490c15cca2 100644 --- a/plugins/transforms/databasejoin/src/main/java/org/apache/hop/pipeline/transforms/databasejoin/DatabaseJoin.java +++ b/plugins/transforms/databasejoin/src/main/java/org/apache/hop/pipeline/transforms/databasejoin/DatabaseJoin.java @@ -77,7 +77,7 @@ private void lookupValues(IRowMeta rowMeta, Object[] rowData) throws HopExceptio data.lookupRowMeta = new RowMeta(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "DatabaseJoin.Log.CheckingRow") + rowMeta.getString(rowData)); @@ -121,7 +121,7 @@ private void lookupValues(IRowMeta rowMeta, Object[] rowData) throws HopExceptio // we have to clone, otherwise we only get the last new value putRow(data.outputRowMeta, data.outputRowMeta.cloneRow(newRow)); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "DatabaseJoin.Log.PutoutRow") + data.outputRowMeta.getString(newRow)); @@ -193,7 +193,7 @@ public boolean processRow() throws HopException { lookupValues(getInputRowMeta(), r); // add new values to the row in rowset[0]. if (checkFeedback(getLinesRead())) { - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "DatabaseJoin.Log.LineNumber") + getLinesRead()); } } @@ -272,7 +272,7 @@ public boolean init() { try { data.db.connect(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "DatabaseJoin.Log.ConnectedToDB")); } @@ -282,7 +282,7 @@ public boolean init() { } // Prepare the SQL statement data.pstmt = data.db.prepareSql(sql); - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "DatabaseJoin.Log.SQLStatement", sql)); } data.db.setQueryLimit(meta.getRowLimit()); diff --git a/plugins/transforms/databaselookup/src/main/java/org/apache/hop/pipeline/transforms/databaselookup/DatabaseLookup.java b/plugins/transforms/databaselookup/src/main/java/org/apache/hop/pipeline/transforms/databaselookup/DatabaseLookup.java index db1d103f7f0..3f7d977144c 100644 --- a/plugins/transforms/databaselookup/src/main/java/org/apache/hop/pipeline/transforms/databaselookup/DatabaseLookup.java +++ b/plugins/transforms/databaselookup/src/main/java/org/apache/hop/pipeline/transforms/databaselookup/DatabaseLookup.java @@ -121,7 +121,7 @@ synchronized Object[] lookupValues(IRowMeta inputRowMeta, Object[] row) throws H // database when all rows // are in (exception LIKE // operator) - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "DatabaseLookup.Log.AddedValuesToLookupRow1") + meta.getLookup().getKeyFields().size() @@ -146,7 +146,7 @@ synchronized Object[] lookupValues(IRowMeta inputRowMeta, Object[] row) throws H return null; } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel(BaseMessages.getString(PKG, "DatabaseLookup.Log.NoResultsFoundAfterLookup")); } @@ -159,7 +159,7 @@ synchronized Object[] lookupValues(IRowMeta inputRowMeta, Object[] row) throws H } } } else { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "DatabaseLookup.Log.FoundResultsAfterLookup") + Arrays.toString(add)); @@ -359,7 +359,7 @@ public boolean processRow() throws HopException { lookup.isFailingOnMultipleResults()); // lookup the values! - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "DatabaseLookup.Log.CheckingRow") + getInputRowMeta().getString(r)); @@ -394,7 +394,7 @@ public boolean processRow() throws HopException { + BaseMessages.getString( PKG, "DatabaseLookup.ERROR0001.FieldRequired4.Exception")); } - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "DatabaseLookup.Log.FieldHasIndex1") + field.getStreamField1() @@ -433,7 +433,7 @@ public boolean processRow() throws HopException { } } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "DatabaseLookup.Log.GotRowFromPreviousTransform") + getInputRowMeta().getString(r)); @@ -447,7 +447,7 @@ public boolean processRow() throws HopException { // copy row to output rowset(s) putRow(data.outputRowMeta, outputRow); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "DatabaseLookup.Log.WroteRowToNextTransform") + getInputRowMeta().getString(r)); @@ -687,7 +687,7 @@ private void connectDatabase(Database database) throws HopDatabaseException { database.setCommit(100); // we never get a commit, but it just turns off auto-commit. - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "DatabaseLookup.Log.ConnectedToDatabase")); } } diff --git a/plugins/transforms/datagrid/src/main/java/org/apache/hop/pipeline/transforms/datagrid/DataGrid.java b/plugins/transforms/datagrid/src/main/java/org/apache/hop/pipeline/transforms/datagrid/DataGrid.java index b8e5490c89e..253765c35b1 100644 --- a/plugins/transforms/datagrid/src/main/java/org/apache/hop/pipeline/transforms/datagrid/DataGrid.java +++ b/plugins/transforms/datagrid/src/main/java/org/apache/hop/pipeline/transforms/datagrid/DataGrid.java @@ -89,8 +89,8 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, outputRowData); data.linesWritten++; - if (log.isRowLevel()) { - log.logRowlevel( + if (isRowLevel()) { + logRowlevel( toString(), BaseMessages.getString( PKG, @@ -99,7 +99,7 @@ public boolean processRow() throws HopException { data.outputRowMeta.getString(outputRowData))); } - if (checkFeedback(getLinesWritten()) && log.isBasic()) { + if (checkFeedback(getLinesWritten()) && isBasic()) { logBasic( BaseMessages.getString(PKG, "DataGrid.Log.LineNr", Long.toString(getLinesWritten()))); } diff --git a/plugins/transforms/dbproc/src/main/java/org/apache/hop/pipeline/transforms/dbproc/DBProc.java b/plugins/transforms/dbproc/src/main/java/org/apache/hop/pipeline/transforms/dbproc/DBProc.java index c20809c6404..5ca7d5d874e 100644 --- a/plugins/transforms/dbproc/src/main/java/org/apache/hop/pipeline/transforms/dbproc/DBProc.java +++ b/plugins/transforms/dbproc/src/main/java/org/apache/hop/pipeline/transforms/dbproc/DBProc.java @@ -160,7 +160,7 @@ public boolean processRow() throws HopException { runProc(data.inputRowMeta, r); // add new values to the row in rowset[0]. putRow(data.outputMeta, outputRowData); // copy row to output rowset(s) - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "DBProc.LineNumber") + getLinesRead()); } } catch (HopException e) { @@ -211,12 +211,12 @@ public boolean init() { data.db.connect(); if (!meta.isAutoCommit()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "DBProc.Log.AutoCommit")); } data.db.setCommit(9999); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "DBProc.Log.ConnectedToDB")); } diff --git a/plugins/transforms/delay/src/main/java/org/apache/hop/pipeline/transforms/delay/Delay.java b/plugins/transforms/delay/src/main/java/org/apache/hop/pipeline/transforms/delay/Delay.java index 34a2e327769..7c6793f7a1e 100644 --- a/plugins/transforms/delay/src/main/java/org/apache/hop/pipeline/transforms/delay/Delay.java +++ b/plugins/transforms/delay/src/main/java/org/apache/hop/pipeline/transforms/delay/Delay.java @@ -80,7 +80,7 @@ public boolean processRow() throws HopException { String timeOut = resolve(meta.getTimeout()); data.timeout = Const.toInt(timeOut, 0); - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "Delay.Log.TimeOut", "" + data.timeout, msgScale)); } } @@ -115,13 +115,13 @@ public boolean processRow() throws HopException { } } } - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "Delay.WaitTimeIsElapsed.Label")); } putRow(getInputRowMeta(), r); // copy row to possible alternate rowset(s). - if (checkFeedback(getLinesRead()) && log.isDetailed()) { + if (checkFeedback(getLinesRead()) && isDetailed()) { logDetailed(BaseMessages.getString(PKG, "Delay.Log.LineNumber", "" + getLinesRead())); } diff --git a/plugins/transforms/delete/src/main/java/org/apache/hop/pipeline/transforms/delete/Delete.java b/plugins/transforms/delete/src/main/java/org/apache/hop/pipeline/transforms/delete/Delete.java index 7ef14855b98..87cd36d8a6d 100644 --- a/plugins/transforms/delete/src/main/java/org/apache/hop/pipeline/transforms/delete/Delete.java +++ b/plugins/transforms/delete/src/main/java/org/apache/hop/pipeline/transforms/delete/Delete.java @@ -69,7 +69,7 @@ private synchronized void deleteValues(IRowMeta rowMeta, Object[] row) throws Ho data.db.setValues(data.deleteParameterRowMeta, deleteRow, data.prepStatementDelete); - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -107,7 +107,7 @@ public boolean processRow() throws HopException { this, meta.getLookup().getSchemaName(), meta.getLookup().getTableName()); // lookup the values! - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "Delete.Log.CheckingRow") + getInputRowMeta().getString(r)); } @@ -140,7 +140,7 @@ public boolean processRow() throws HopException { BaseMessages.getString(PKG, "Delete.Exception.FieldRequired", dlf.getKeyStream2())); } - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "Delete.Log.FieldInfo", dlf.getKeyStream()) + data.keynrs[i]); @@ -155,7 +155,7 @@ public boolean processRow() throws HopException { putRow( data.outputRowMeta, r); // output the same rows of data, but with a copy of the metadata - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "Delete.Log.LineNumber") + getLinesRead()); } } catch (HopException e) { @@ -211,7 +211,7 @@ public void prepareDelete(IRowMeta rowMeta) throws HopDatabaseException { } try { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Setting delete preparedStatement to [" + sql + "]"); } data.prepStatementDelete = @@ -242,7 +242,7 @@ public boolean init() { try { data.db.connect(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "Delete.Log.ConnectedToDB")); } diff --git a/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/Denormaliser.java b/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/Denormaliser.java index 49cc9606e5c..b49e9d9ef33 100644 --- a/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/Denormaliser.java +++ b/plugins/transforms/denormaliser/src/main/java/org/apache/hop/pipeline/transforms/denormaliser/Denormaliser.java @@ -104,7 +104,7 @@ public boolean processRow() throws HopException { data.previous = r; - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "Denormaliser.Log.LineNumber") + getLinesRead()); } diff --git a/plugins/transforms/detectemptystream/src/main/java/org/apache/hop/pipeline/transforms/detectemptystream/DetectEmptyStream.java b/plugins/transforms/detectemptystream/src/main/java/org/apache/hop/pipeline/transforms/detectemptystream/DetectEmptyStream.java index 44b3d7d8de5..767578a4538 100644 --- a/plugins/transforms/detectemptystream/src/main/java/org/apache/hop/pipeline/transforms/detectemptystream/DetectEmptyStream.java +++ b/plugins/transforms/detectemptystream/src/main/java/org/apache/hop/pipeline/transforms/detectemptystream/DetectEmptyStream.java @@ -62,7 +62,7 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, buildOneRow()); // copy row to possible alternate rowset(s). if (checkFeedback(getLinesRead())) { - if (log.isBasic()) { + if (isBasic()) { logBasic( BaseMessages.getString(PKG, "DetectEmptyStream.Log.LineNumber") + getLinesRead()); } diff --git a/plugins/transforms/detectlastrow/src/main/java/org/apache/hop/pipeline/transforms/detectlastrow/DetectLastRow.java b/plugins/transforms/detectlastrow/src/main/java/org/apache/hop/pipeline/transforms/detectlastrow/DetectLastRow.java index 755df12ecfd..64f404806f8 100644 --- a/plugins/transforms/detectlastrow/src/main/java/org/apache/hop/pipeline/transforms/detectlastrow/DetectLastRow.java +++ b/plugins/transforms/detectlastrow/src/main/java/org/apache/hop/pipeline/transforms/detectlastrow/DetectLastRow.java @@ -79,7 +79,7 @@ public boolean processRow() throws HopException { // copy row to output rowset(s) putRow(data.outputRowMeta, outputRow); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "DetectLastRow.Log.WroteRowToNextTransform") + data.outputRowMeta.getString(outputRow)); @@ -100,7 +100,7 @@ public boolean processRow() throws HopException { // copy row to output rowset(s) putRow(data.outputRowMeta, outputRow); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "DetectLastRow.Log.WroteRowToNextTransform") + data.outputRowMeta.getString(outputRow)); diff --git a/plugins/transforms/dimensionlookup/src/main/java/org/apache/hop/pipeline/transforms/dimensionlookup/DimensionLookup.java b/plugins/transforms/dimensionlookup/src/main/java/org/apache/hop/pipeline/transforms/dimensionlookup/DimensionLookup.java index f191647adcf..f91e1ea02e2 100644 --- a/plugins/transforms/dimensionlookup/src/main/java/org/apache/hop/pipeline/transforms/dimensionlookup/DimensionLookup.java +++ b/plugins/transforms/dimensionlookup/src/main/java/org/apache/hop/pipeline/transforms/dimensionlookup/DimensionLookup.java @@ -212,7 +212,7 @@ public boolean processRow() throws HopException { lookupValues(data.inputRowMeta, r); // add new values to the row in rowset[0]. putRow(data.outputRowMeta, outputRow); // copy row to output rowset(s) - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "DimensionLookup.Log.LineNumber") + getLinesRead()); } } catch (HopException e) { @@ -311,7 +311,7 @@ private void preloadCache() throws HopException { sql += " FROM " + data.schemaTable; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( "Pre-loading cache by reading from database with: " + Const.CR + sql + Const.CR); } @@ -821,8 +821,7 @@ private synchronized Object[] lookupValues(IRowMeta rowMeta, Object[] row) throw // if (data.returnRowMeta.getValueMeta(0).isBigNumber() && returnRow[0] instanceof Long) { if (isDebug()) { - log.logDebug( - "Changing the type of the technical key from TYPE_BIGNUMBER to an TYPE_INTEGER"); + logDebug("Changing the type of the technical key from TYPE_BIGNUMBER to an TYPE_INTEGER"); } IValueMeta tkValueMeta = data.returnRowMeta.getValueMeta(0); data.returnRowMeta.setValueMeta( @@ -1312,16 +1311,6 @@ public Long dimInsert( return technicalKey; } - @Override - public boolean isRowLevel() { - return log.isRowLevel(); - } - - @Override - public boolean isDebug() { - return log.isDebug(); - } - public void dimUpdate(IRowMeta rowMeta, Object[] row, Long dimkey, Date valueDate) throws HopDatabaseException { DLFields f = meta.getFields(); @@ -1806,7 +1795,7 @@ public boolean init() { try { data.db.connect(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "DimensionLookup.Log.ConnectedToDB")); } data.db.setCommit(meta.getCommitSize()); diff --git a/plugins/transforms/dorisbulkloader/src/main/java/org/apache/hop/pipeline/transforms/dorisbulkloader/DorisBulkLoader.java b/plugins/transforms/dorisbulkloader/src/main/java/org/apache/hop/pipeline/transforms/dorisbulkloader/DorisBulkLoader.java index 4f8a8ffc0f4..45058a3b30f 100644 --- a/plugins/transforms/dorisbulkloader/src/main/java/org/apache/hop/pipeline/transforms/dorisbulkloader/DorisBulkLoader.java +++ b/plugins/transforms/dorisbulkloader/src/main/java/org/apache/hop/pipeline/transforms/dorisbulkloader/DorisBulkLoader.java @@ -133,8 +133,8 @@ public void processStreamLoad(String streamLoadRow, boolean first) // stop to load buffer data into doris by http api data.dorisStreamLoad.endWritingIntoBuffer(); ResponseContent responseContent = data.dorisStreamLoad.executeDorisStreamLoad(); - if (log.isDetailed()) { - log.logDetailed( + if (isDetailed()) { + logDetailed( BaseMessages.getString( PKG, "DorisBulkLoader.Log.StreamLoadResult", responseContent.toString())); } @@ -160,8 +160,8 @@ public void processStreamLoad(String streamLoadRow, boolean first) // stream load current buffer data into doris, and then write data into buffer again data.dorisStreamLoad.endWritingIntoBuffer(); ResponseContent responseContent = data.dorisStreamLoad.executeDorisStreamLoad(); - if (log.isDetailed()) { - log.logDetailed( + if (isDetailed()) { + logDetailed( BaseMessages.getString( PKG, "DorisBulkLoader.Log.StreamLoadResult", responseContent.toString())); } @@ -188,8 +188,8 @@ public void processStreamLoad(String streamLoadRow, boolean first) * @throws IOException */ private void initStreamLoad() throws HopException { - if (log.isDetailed()) { - log.logDetailed( + if (isDetailed()) { + logDetailed( BaseMessages.getString( PKG, "DorisBulkLoader.Log.StreamLoadParameter", diff --git a/plugins/transforms/drools/src/main/java/org/apache/hop/pipeline/transforms/drools/RulesAccumulator.java b/plugins/transforms/drools/src/main/java/org/apache/hop/pipeline/transforms/drools/RulesAccumulator.java index f69ef094ffb..c2df7116fe3 100644 --- a/plugins/transforms/drools/src/main/java/org/apache/hop/pipeline/transforms/drools/RulesAccumulator.java +++ b/plugins/transforms/drools/src/main/java/org/apache/hop/pipeline/transforms/drools/RulesAccumulator.java @@ -58,7 +58,7 @@ public boolean runtimeInit() throws HopTransformException { } catch (RuleValidationException e) { for (String message : e.getMessages()) { - log.logError(message); + logError(message); } throw new HopTransformException(BaseMessages.getString(PKG, "RulesData.Error.CompileDRL")); } diff --git a/plugins/transforms/drools/src/main/java/org/apache/hop/pipeline/transforms/drools/RulesExecutor.java b/plugins/transforms/drools/src/main/java/org/apache/hop/pipeline/transforms/drools/RulesExecutor.java index e0c1a98b606..efabb13c9d7 100644 --- a/plugins/transforms/drools/src/main/java/org/apache/hop/pipeline/transforms/drools/RulesExecutor.java +++ b/plugins/transforms/drools/src/main/java/org/apache/hop/pipeline/transforms/drools/RulesExecutor.java @@ -57,7 +57,7 @@ public boolean runtimeInit() throws HopTransformException { } catch (RuleValidationException e) { for (String message : e.getMessages()) { - log.logError(message); + logError(message); } throw new HopTransformException(BaseMessages.getString(PKG, "RulesData.Error.CompileDRL")); } diff --git a/plugins/transforms/dynamicsqlrow/src/main/java/org/apache/hop/pipeline/transforms/dynamicsqlrow/DynamicSqlRow.java b/plugins/transforms/dynamicsqlrow/src/main/java/org/apache/hop/pipeline/transforms/dynamicsqlrow/DynamicSqlRow.java index b7d15338eb2..e9e3ce17f85 100644 --- a/plugins/transforms/dynamicsqlrow/src/main/java/org/apache/hop/pipeline/transforms/dynamicsqlrow/DynamicSqlRow.java +++ b/plugins/transforms/dynamicsqlrow/src/main/java/org/apache/hop/pipeline/transforms/dynamicsqlrow/DynamicSqlRow.java @@ -64,7 +64,7 @@ private synchronized void lookupValues(IRowMeta rowMeta, Object[] rowData) throw loadFromBuffer = false; } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "DynamicSQLRow.Log.CheckingRow") + rowMeta.getString(rowData)); @@ -79,7 +79,7 @@ private synchronized void lookupValues(IRowMeta rowMeta, Object[] rowData) throw sql = sqlTemp; } - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "DynamicSQLRow.Log.SQLStatement", sql)); } @@ -182,7 +182,7 @@ private synchronized void lookupValues(IRowMeta rowMeta, Object[] rowData) throw data.skipPreviousRow = false; } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "DynamicSQLRow.Log.PutoutRow") + data.outputRowMeta.getString(newRow)); @@ -256,10 +256,8 @@ public boolean processRow() throws HopException { try { lookupValues(getInputRowMeta(), r); - if (checkFeedback(getLinesRead())) { - if (log.isDetailed()) { - logDetailed(BaseMessages.getString(PKG, "DynamicSQLRow.Log.LineNumber") + getLinesRead()); - } + if (checkFeedback(getLinesRead()) && isDetailed()) { + logDetailed(BaseMessages.getString(PKG, "DynamicSQLRow.Log.LineNumber") + getLinesRead()); } } catch (HopException e) { boolean sendToErrorRow = false; @@ -320,7 +318,7 @@ public boolean init() { data.db.setCommit(100); // we never get a commit, but it just turns off auto-commit. - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "DynamicSQLRow.Log.ConnectedToDB")); } diff --git a/plugins/transforms/excel/src/main/java/org/apache/hop/pipeline/transforms/excelinput/ExcelInput.java b/plugins/transforms/excel/src/main/java/org/apache/hop/pipeline/transforms/excelinput/ExcelInput.java index b7d57f1625c..f36f28a921b 100644 --- a/plugins/transforms/excel/src/main/java/org/apache/hop/pipeline/transforms/excelinput/ExcelInput.java +++ b/plugins/transforms/excel/src/main/java/org/apache/hop/pipeline/transforms/excelinput/ExcelInput.java @@ -101,7 +101,7 @@ private Object[] fillRow(int startColumn, ExcelInputRow excelInputRow) throws Ho ex = new HopCellValueException(ex, this.data.sheetnr, this.data.rownr, i, ""); throw ex; } - if (log.isBasic()) { + if (isBasic()) { logBasic( BaseMessages.getString( PKG, @@ -159,7 +159,7 @@ private Object[] fillRow(int startColumn, ExcelInputRow excelInputRow) throws Ho row[rowColumn] = cell.getValue(); sourceMeta = data.valueMetaNumber; } else { - if (log.isDetailed()) { + if (isDetailed()) { KCellType ct = cell.getType(); logDetailed( BaseMessages.getString( @@ -218,7 +218,7 @@ private Object[] fillRow(int startColumn, ExcelInputRow excelInputRow) throws Ho ex = new HopCellValueException(ex, this.data.sheetnr, cell.getRow(), i, field.getName()); throw ex; } - if (log.isBasic()) { + if (isBasic()) { logBasic( BaseMessages.getString( PKG, @@ -431,7 +431,7 @@ public boolean processRow() throws HopException { // See if we're not done processing... // We are done processing if the filenr >= number of files. if (data.filenr >= data.files.nrOfFiles()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ExcelInput.Log.NoMoreFiles", "" + data.filenr)); } @@ -441,7 +441,7 @@ public boolean processRow() throws HopException { if (meta.getRowLimit() > 0 && getLinesInput() >= meta.getRowLimit()) { // The close of the openFile is in dispose() - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ExcelInput.Log.RowLimitReached", "" + meta.getRowLimit())); } @@ -487,7 +487,7 @@ private void handleMissingFiles() throws HopException { if (!nonExistantFiles.isEmpty()) { String message = FileInputList.getRequiredFilesDescription(nonExistantFiles); - if (log.isBasic()) { + if (isBasic()) { logBasic( BaseMessages.getString(PKG, "ExcelInput.Log.RequiredFilesTitle"), BaseMessages.getString(PKG, "ExcelInput.Warning.MissingFiles", message)); @@ -506,7 +506,7 @@ private void handleMissingFiles() throws HopException { List nonAccessibleFiles = data.files.getNonAccessibleFiles(); if (!nonAccessibleFiles.isEmpty()) { String message = FileInputList.getRequiredFilesDescription(nonAccessibleFiles); - if (log.isBasic()) { + if (isBasic()) { logBasic( BaseMessages.getString(PKG, "ExcelInput.Log.RequiredFilesTitle"), BaseMessages.getString(PKG, "ExcelInput.Log.RequiredFilesMsgNotAccessible", message)); @@ -569,7 +569,7 @@ public Object[] getRowFromWorkbooks() { addResultFile(resultFile); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ExcelInput.Log.OpeningFile", "" + data.filenr + " : " + data.filename)); @@ -599,7 +599,7 @@ public Object[] getRowFromWorkbooks() { boolean nextsheet = false; // What sheet were we handling? - if (log.isDebug()) { + if (isDebug()) { logDetailed( BaseMessages.getString( PKG, "ExcelInput.Log.GetSheet", "" + data.filenr + "." + data.sheetnr)); @@ -629,7 +629,7 @@ public Object[] getRowFromWorkbooks() { if (!data.filePlayList.isProcessingNeeded(data.file, lineNr, sheetName)) { retval = null; // placeholder, was already null } else { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, @@ -638,14 +638,14 @@ public Object[] getRowFromWorkbooks() { data.filenr + "." + data.sheetnr)); } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "ExcelInput.Log.ReadLineWith", "" + line.length)); } ExcelInputRow excelInputRow = new ExcelInputRow(sheet.getName(), lineNr, line); Object[] r = fillRow(data.colnr, excelInputRow); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, @@ -669,7 +669,7 @@ public Object[] getRowFromWorkbooks() { } } } catch (ArrayIndexOutOfBoundsException e) { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel(BaseMessages.getString(PKG, "ExcelInput.Log.OutOfIndex")); } @@ -884,7 +884,7 @@ public void dispose() { try { data.errorHandler.close(); } catch (HopException e) { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "ExcelInput.Error.CouldNotCloseErrorHandler", e.toString())); diff --git a/plugins/transforms/excel/src/main/java/org/apache/hop/pipeline/transforms/excelwriter/ExcelWriterTransform.java b/plugins/transforms/excel/src/main/java/org/apache/hop/pipeline/transforms/excelwriter/ExcelWriterTransform.java index 0a2633c07fa..b69b7d0d477 100644 --- a/plugins/transforms/excel/src/main/java/org/apache/hop/pipeline/transforms/excelwriter/ExcelWriterTransform.java +++ b/plugins/transforms/excel/src/main/java/org/apache/hop/pipeline/transforms/excelwriter/ExcelWriterTransform.java @@ -232,7 +232,7 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, r); // Some basic logging - if (checkFeedback(getLinesOutput()) && log.isBasic()) { + if (checkFeedback(getLinesOutput()) && isBasic()) { logBasic("Linenr " + getLinesOutput()); } return true; @@ -694,7 +694,7 @@ void writeField( */ private void setDataFormat( ExcelWriterWorkbookDefinition workbookDefinition, String excelFieldFormat, Cell cell) { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -728,12 +728,7 @@ public String buildFilename(IRowMeta rowMeta, Object[] row) { */ public String buildFilename(int splitNr) { return meta.buildFilename( - this, - getCopy(), - splitNr, - data.isBeamContext(), - log.getLogChannelId(), - data.getBeamBundleNr()); + this, getCopy(), splitNr, data.isBeamContext(), getLogChannelId(), data.getBeamBundleNr()); } /** @@ -799,7 +794,7 @@ public void prepareNextOutputFile(Object[] row) throws HopException { createParentFolder(file); } - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "ExcelWriterTransform.Log.OpeningFile", file.getName().toString())); @@ -807,7 +802,7 @@ public void prepareNextOutputFile(Object[] row) throws HopException { // determine whether existing file must be deleted if (file.exists() && data.createNewFile && !file.delete()) { - if (log.isBasic()) { + if (isBasic()) { logBasic( BaseMessages.getString( PKG, @@ -854,7 +849,7 @@ public void prepareNextOutputFile(Object[] row) throws HopException { copyFile(HopVfs.getFileObject(data.realTemplateFileName, variables), file); } else { // template is missing, log it and get out - if (log.isBasic()) { + if (isBasic()) { logBasic( BaseMessages.getString( PKG, "ExcelWriterTransform.Log.TemplateMissing", data.realTemplateFileName)); @@ -1032,7 +1027,7 @@ public void prepareNextOutputFile(Object[] row) throws HopException { } } - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "ExcelWriterTransform.Log.FileOpened", file.getName().toString())); diff --git a/plugins/transforms/execinfo/src/main/java/org/apache/hop/pipeline/transforms/execinfo/ExecInfo.java b/plugins/transforms/execinfo/src/main/java/org/apache/hop/pipeline/transforms/execinfo/ExecInfo.java index abe6eb38caa..2f4ad166d79 100644 --- a/plugins/transforms/execinfo/src/main/java/org/apache/hop/pipeline/transforms/execinfo/ExecInfo.java +++ b/plugins/transforms/execinfo/src/main/java/org/apache/hop/pipeline/transforms/execinfo/ExecInfo.java @@ -366,7 +366,7 @@ public boolean init() { boolean noRemarks = true; for (ICheckResult remark : remarks) { if (remark.getType() == ICheckResult.TYPE_RESULT_ERROR) { - log.logError(remark.getText()); + logError(remark.getText()); noRemarks = false; } } @@ -383,7 +383,7 @@ public boolean init() { .load(resolve(meta.getLocation())); data.location.getExecutionInfoLocation().initialize(this, metadataProvider); } catch (HopException e) { - log.logError("Error initializing execution information location " + meta.getLocation(), e); + logError("Error initializing execution information location " + meta.getLocation(), e); return false; } @@ -396,7 +396,7 @@ public void dispose() { try { data.location.getExecutionInfoLocation().close(); } catch (Exception e) { - log.logError("Error closing location " + data.location.getName(), e); + logError("Error closing location " + data.location.getName(), e); } } super.dispose(); diff --git a/plugins/transforms/execprocess/src/main/java/org/apache/hop/pipeline/transforms/execprocess/ExecProcess.java b/plugins/transforms/execprocess/src/main/java/org/apache/hop/pipeline/transforms/execprocess/ExecProcess.java index 98481893abb..085a8afd350 100644 --- a/plugins/transforms/execprocess/src/main/java/org/apache/hop/pipeline/transforms/execprocess/ExecProcess.java +++ b/plugins/transforms/execprocess/src/main/java/org/apache/hop/pipeline/transforms/execprocess/ExecProcess.java @@ -118,7 +118,7 @@ public boolean processRow() throws HopException { // add new values to the row. putRow(data.outputRowMeta, outputRow); // copy row to output rowset(s) - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, diff --git a/plugins/transforms/execsqlrow/src/main/java/org/apache/hop/pipeline/transforms/execsqlrow/ExecSqlRow.java b/plugins/transforms/execsqlrow/src/main/java/org/apache/hop/pipeline/transforms/execsqlrow/ExecSqlRow.java index 9199ef7a22a..fa2e82ac4ff 100644 --- a/plugins/transforms/execsqlrow/src/main/java/org/apache/hop/pipeline/transforms/execsqlrow/ExecSqlRow.java +++ b/plugins/transforms/execsqlrow/src/main/java/org/apache/hop/pipeline/transforms/execsqlrow/ExecSqlRow.java @@ -127,12 +127,12 @@ public boolean processRow() throws HopException { // empty filename throw new HopException(BaseMessages.getString(PKG, "ExecSqlRow.Log.EmptySQLFromFile")); } - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "ExecSqlRow.Log.ExecutingSQLFromFile", sql)); } data.result = data.db.execStatementsFromFile(sql, meta.isSendOneStatement()); } else { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "ExecSqlRow.Log.ExecutingSQLScript") + Const.CR + sql); } @@ -164,7 +164,7 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, row); // send it out! - if (checkFeedback(getLinesWritten()) && log.isBasic()) { + if (checkFeedback(getLinesWritten()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "ExecSqlRow.Log.LineNumber") + getLinesWritten()); } } catch (HopException e) { @@ -189,7 +189,7 @@ public boolean processRow() throws HopException { @Override public void dispose() { - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "ExecSqlRow.Log.FinishingReadingQuery")); } @@ -240,7 +240,7 @@ public boolean init() { try { data.db.connect(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ExecSqlRow.Log.ConnectedToDB")); } diff --git a/plugins/transforms/fake/src/main/java/org/apache/hop/pipeline/transforms/fake/Fake.java b/plugins/transforms/fake/src/main/java/org/apache/hop/pipeline/transforms/fake/Fake.java index b86dd0fcfb3..61ceb70dd17 100644 --- a/plugins/transforms/fake/src/main/java/org/apache/hop/pipeline/transforms/fake/Fake.java +++ b/plugins/transforms/fake/src/main/java/org/apache/hop/pipeline/transforms/fake/Fake.java @@ -64,7 +64,7 @@ public boolean init() { Method topicMethod = fakerType.getClass().getMethod(field.getTopic()); data.fakerMethods.add(topicMethod); } catch (Exception e) { - log.logError( + logError( "Error getting faker object or method for type " + field.getType() + " and topic " diff --git a/plugins/transforms/fieldschangesequence/src/main/java/org/apache/hop/pipeline/transforms/fieldschangesequence/FieldsChangeSequence.java b/plugins/transforms/fieldschangesequence/src/main/java/org/apache/hop/pipeline/transforms/fieldschangesequence/FieldsChangeSequence.java index a02bb0c9881..8c27585aae0 100644 --- a/plugins/transforms/fieldschangesequence/src/main/java/org/apache/hop/pipeline/transforms/fieldschangesequence/FieldsChangeSequence.java +++ b/plugins/transforms/fieldschangesequence/src/main/java/org/apache/hop/pipeline/transforms/fieldschangesequence/FieldsChangeSequence.java @@ -111,7 +111,7 @@ public boolean processRow() throws HopException { data.seq = data.startAt; } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "FieldsChangeSequence.Log.ReadRow") + getLinesRead() @@ -126,7 +126,7 @@ public boolean processRow() throws HopException { data.seq += data.incrementBy; - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "FieldsChangeSequence.Log.WriteRow") + getLinesWritten() @@ -134,7 +134,7 @@ public boolean processRow() throws HopException { + getInputRowMeta().getString(r)); } - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic( BaseMessages.getString(PKG, "FieldsChangeSequence.Log.LineNumber") + getLinesRead()); } diff --git a/plugins/transforms/fieldsplitter/src/main/java/org/apache/hop/pipeline/transforms/fieldsplitter/FieldSplitter.java b/plugins/transforms/fieldsplitter/src/main/java/org/apache/hop/pipeline/transforms/fieldsplitter/FieldSplitter.java index 55afc07bb3e..c72a17a3199 100644 --- a/plugins/transforms/fieldsplitter/src/main/java/org/apache/hop/pipeline/transforms/fieldsplitter/FieldSplitter.java +++ b/plugins/transforms/fieldsplitter/src/main/java/org/apache/hop/pipeline/transforms/fieldsplitter/FieldSplitter.java @@ -104,7 +104,7 @@ private Object[] splitField(Object[] r) throws HopException { // final boolean selectFieldById = StringUtils.isNotEmpty(meta.getFields().get(0).getId()); - if (log.isDebug()) { + if (isDebug()) { if (selectFieldById) { logDebug(BaseMessages.getString(PKG, "FieldSplitter.Log.UsingIds")); } else { @@ -138,7 +138,7 @@ private Object[] splitField(Object[] r) throws HopException { } } - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "FieldSplitter.Log.SplitInfo") + rawValue); } } else { @@ -147,7 +147,7 @@ private Object[] splitField(Object[] r) throws HopException { rawValue = (valueParts == null || i >= valueParts.length) ? null : valueParts[i]; prev += (rawValue == null ? 0 : rawValue.length()) + data.delimiter.length(); - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "FieldSplitter.Log.SplitFieldsInfo", rawValue, String.valueOf(prev))); @@ -198,10 +198,8 @@ public synchronized boolean processRow() throws HopException { Object[] outputRowData = splitField(r); putRow(data.outputMeta, outputRowData); - if (checkFeedback(getLinesRead())) { - if (log.isBasic()) { - logBasic(BaseMessages.getString(PKG, "FieldSplitter.Log.LineNumber") + getLinesRead()); - } + if (checkFeedback(getLinesRead()) && isBasic()) { + logBasic(BaseMessages.getString(PKG, "FieldSplitter.Log.LineNumber") + getLinesRead()); } return true; diff --git a/plugins/transforms/fileexists/src/main/java/org/apache/hop/pipeline/transforms/fileexists/FileExists.java b/plugins/transforms/fileexists/src/main/java/org/apache/hop/pipeline/transforms/fileexists/FileExists.java index 8ac9c20615c..8ed3299429a 100644 --- a/plugins/transforms/fileexists/src/main/java/org/apache/hop/pipeline/transforms/fileexists/FileExists.java +++ b/plugins/transforms/fileexists/src/main/java/org/apache/hop/pipeline/transforms/fileexists/FileExists.java @@ -122,7 +122,7 @@ public boolean processRow() throws HopException { resultFile.setComment(BaseMessages.getString(PKG, "FileExists.Log.FileAddedResult")); addResultFile(resultFile); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "FileExists.Log.FilenameAddResult", data.file.toString())); @@ -142,7 +142,7 @@ public boolean processRow() throws HopException { // add new values to the row. putRow(data.outputRowMeta, outputRow); // copy row to output rowset(s) - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, diff --git a/plugins/transforms/filemetadata/src/main/java/org/apache/hop/pipeline/transforms/filemetadata/FileMetadata.java b/plugins/transforms/filemetadata/src/main/java/org/apache/hop/pipeline/transforms/filemetadata/FileMetadata.java index 51ecc5b5235..13a647410df 100644 --- a/plugins/transforms/filemetadata/src/main/java/org/apache/hop/pipeline/transforms/filemetadata/FileMetadata.java +++ b/plugins/transforms/filemetadata/src/main/java/org/apache/hop/pipeline/transforms/filemetadata/FileMetadata.java @@ -350,14 +350,14 @@ private void buildOutputRows() throws HopException { } } catch (IOException | HopFileException e) { - log.logError("IO Error while reading file: " + fileName + ". Invalid charset?"); + logError("IO Error while reading file: " + fileName + ". Invalid charset?"); throw new HopTransformException(e.getMessage(), e); } catch (ArrayIndexOutOfBoundsException e) { - log.logError("Error determining field types for: " + fileName + ". Inconsistent delimiters?"); + logError("Error determining field types for: " + fileName + ". Inconsistent delimiters?"); throw new HopTransformException(e.getMessage(), e); } catch (CsvValidationException e) { - log.logError("Error validating CSV file " + fileName, e); + logError("Error validating CSV file " + fileName, e); throw new HopTransformException(e.getMessage(), e); } } @@ -390,7 +390,7 @@ private DelimiterDetector.DetectionResult detectDelimiters( .withDelimiterCandidates(delimiterCandidates) .withEnclosureCandidates(enclosureCandidates) .withInput(f) - .withLogger(log) + .withLogger(getLogChannel()) .withRowLimit(limitRows) .build(); diff --git a/plugins/transforms/filterrows/src/main/java/org/apache/hop/pipeline/transforms/filterrows/FilterRows.java b/plugins/transforms/filterrows/src/main/java/org/apache/hop/pipeline/transforms/filterrows/FilterRows.java index b5a1cd8f458..a70ead3bef1 100644 --- a/plugins/transforms/filterrows/src/main/java/org/apache/hop/pipeline/transforms/filterrows/FilterRows.java +++ b/plugins/transforms/filterrows/src/main/java/org/apache/hop/pipeline/transforms/filterrows/FilterRows.java @@ -126,7 +126,7 @@ public boolean processRow() throws HopException { } else { if (keep) { if (data.trueRowSet != null) { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( "Sending row to true :" + data.trueTransformName @@ -137,7 +137,7 @@ public boolean processRow() throws HopException { } } else { if (data.falseRowSet != null) { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( "Sending row to false :" + data.falseTransformName @@ -149,7 +149,7 @@ public boolean processRow() throws HopException { } } - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "FilterRows.Log.LineNumber") + getLinesRead()); } diff --git a/plugins/transforms/formula/src/main/java/org/apache/hop/pipeline/transforms/formula/Formula.java b/plugins/transforms/formula/src/main/java/org/apache/hop/pipeline/transforms/formula/Formula.java index 76d223c1b8e..2398c30f725 100644 --- a/plugins/transforms/formula/src/main/java/org/apache/hop/pipeline/transforms/formula/Formula.java +++ b/plugins/transforms/formula/src/main/java/org/apache/hop/pipeline/transforms/formula/Formula.java @@ -116,7 +116,7 @@ public boolean processRow() throws HopException { int tempIndex = getInputRowMeta().size(); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("Read row #" + getLinesRead() + " : " + Arrays.toString(r)); } @@ -200,7 +200,7 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, outputRowData); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("Wrote row #" + getLinesWritten() + " : " + Arrays.toString(r)); } if (checkFeedback(getLinesRead())) { diff --git a/plugins/transforms/fuzzymatch/src/main/java/org/apache/hop/pipeline/transforms/fuzzymatch/FuzzyMatch.java b/plugins/transforms/fuzzymatch/src/main/java/org/apache/hop/pipeline/transforms/fuzzymatch/FuzzyMatch.java index 9e2e8efe75d..4490eb0516c 100644 --- a/plugins/transforms/fuzzymatch/src/main/java/org/apache/hop/pipeline/transforms/fuzzymatch/FuzzyMatch.java +++ b/plugins/transforms/fuzzymatch/src/main/java/org/apache/hop/pipeline/transforms/fuzzymatch/FuzzyMatch.java @@ -120,7 +120,7 @@ private boolean readLookupValues() throws HopException { data.nrCachedFields += meta.getLookupValues().size(); } } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "FuzzyMatch.Log.ReadLookupRow") + rowSet.getRowMeta().getString(rowData)); @@ -486,7 +486,7 @@ public boolean processRow() throws HopException { } putRow(data.outputRowMeta, outputRow); // copy row to output rowset(s) - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "FuzzyMatch.Log.LineNumber") + getLinesRead()); } } catch (HopException e) { diff --git a/plugins/transforms/getfilenames/src/main/java/org/apache/hop/pipeline/transforms/getfilenames/GetFileNames.java b/plugins/transforms/getfilenames/src/main/java/org/apache/hop/pipeline/transforms/getfilenames/GetFileNames.java index 46f1abdbfd6..25f56e539b5 100644 --- a/plugins/transforms/getfilenames/src/main/java/org/apache/hop/pipeline/transforms/getfilenames/GetFileNames.java +++ b/plugins/transforms/getfilenames/src/main/java/org/apache/hop/pipeline/transforms/getfilenames/GetFileNames.java @@ -285,7 +285,7 @@ public boolean processRow() throws HopException { data.filenr++; - if (checkFeedback(getLinesInput()) && log.isBasic()) { + if (checkFeedback(getLinesInput()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "GetFileNames.Log.NrLine", "" + getLinesInput())); } diff --git a/plugins/transforms/getfilesrowcount/src/main/java/org/apache/hop/pipeline/transforms/getfilesrowcount/GetFilesRowsCount.java b/plugins/transforms/getfilesrowcount/src/main/java/org/apache/hop/pipeline/transforms/getfilesrowcount/GetFilesRowsCount.java index 0aeba035025..6d4b40464e5 100644 --- a/plugins/transforms/getfilesrowcount/src/main/java/org/apache/hop/pipeline/transforms/getfilesrowcount/GetFilesRowsCount.java +++ b/plugins/transforms/getfilesrowcount/src/main/java/org/apache/hop/pipeline/transforms/getfilesrowcount/GetFilesRowsCount.java @@ -82,7 +82,7 @@ private boolean getOneRow() throws HopException { if (meta.isFileFromField() || data.lastFile) { putRow(data.outputRowMeta, outputRow); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "GetFilesRowsCount.Log.TotalRowsFiles"), data.rowNumber, @@ -182,7 +182,7 @@ private boolean openNextFile() { } else { data.inputRow = getRow(); if (data.inputRow == null) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "GetFilesRowsCount.Log.FinishedProcessing")); } return false; @@ -226,7 +226,7 @@ private boolean openNextFile() { } // End if first String filename = getInputRowMeta().getString(data.inputRow, data.indexOfFilenameField); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -258,12 +258,12 @@ private boolean openNextFile() { addResultFile(resultFile); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "GetFilesRowsCount.Log.OpeningFile", data.file.toString())); } getRowNumber(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "GetFilesRowsCount.Log.FileOpened", data.file.toString())); } @@ -287,7 +287,7 @@ private boolean getNextFilenameFromField() { if (data.fileNumber >= data.files.nrOfFiles()) { // finished processing! - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "GetFilesRowsCount.Log.FinishedProcessing")); } return true; @@ -370,7 +370,7 @@ public void dispose() { data.file.close(); data.file = null; } catch (Exception e) { - log.logError("Error closing file", e); + logError("Error closing file", e); } } if (data.lineStringBuilder != null) { diff --git a/plugins/transforms/getsubfolders/src/main/java/org/apache/hop/pipeline/transforms/getsubfolders/GetSubFolders.java b/plugins/transforms/getsubfolders/src/main/java/org/apache/hop/pipeline/transforms/getsubfolders/GetSubFolders.java index 3c97021fddd..3bca9561e5f 100644 --- a/plugins/transforms/getsubfolders/src/main/java/org/apache/hop/pipeline/transforms/getsubfolders/GetSubFolders.java +++ b/plugins/transforms/getsubfolders/src/main/java/org/apache/hop/pipeline/transforms/getsubfolders/GetSubFolders.java @@ -80,7 +80,7 @@ public boolean processRow() throws HopException { data.fileIndex++; - if (checkFeedback(getLinesInput()) && log.isBasic()) { + if (checkFeedback(getLinesInput()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "GetSubFolders.Log.NrLine", "" + getLinesInput())); } diff --git a/plugins/transforms/gettablenames/src/main/java/org/apache/hop/pipeline/transforms/gettablenames/GetTableNames.java b/plugins/transforms/gettablenames/src/main/java/org/apache/hop/pipeline/transforms/gettablenames/GetTableNames.java index 4b719596e3e..7d98c9d123b 100644 --- a/plugins/transforms/gettablenames/src/main/java/org/apache/hop/pipeline/transforms/gettablenames/GetTableNames.java +++ b/plugins/transforms/gettablenames/src/main/java/org/apache/hop/pipeline/transforms/gettablenames/GetTableNames.java @@ -365,10 +365,10 @@ private void processIncludeCatalog(Object[] outputRow) } private void logInfo(Object[] outputRow) throws HopValueException { - if (checkFeedback(getLinesRead()) && log.isDetailed()) { + if (checkFeedback(getLinesRead()) && isDetailed()) { logDetailed(BaseMessages.getString(PKG, "GetTableNames.LineNumber", "" + getLinesRead())); } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, "GetTableNames.Log.PutoutRow", data.outputRowMeta.getString(outputRow))); @@ -426,7 +426,7 @@ public boolean init() { data.db = new Database(this, variables, databaseMeta); try { data.db.connect(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "GetTableNames.Log.ConnectedToDB")); } diff --git a/plugins/transforms/getvariable/src/main/java/org/apache/hop/pipeline/transforms/getvariable/GetVariable.java b/plugins/transforms/getvariable/src/main/java/org/apache/hop/pipeline/transforms/getvariable/GetVariable.java index 99ed897794c..efa8ba46b3e 100644 --- a/plugins/transforms/getvariable/src/main/java/org/apache/hop/pipeline/transforms/getvariable/GetVariable.java +++ b/plugins/transforms/getvariable/src/main/java/org/apache/hop/pipeline/transforms/getvariable/GetVariable.java @@ -83,7 +83,7 @@ public boolean processRow() throws HopException { for (int i = 0; i < meta.getFieldDefinitions().size(); i++) { GetVariableMeta.FieldDefinition fieldDefinition = meta.getFieldDefinitions().get(i); String newValue = resolve(fieldDefinition.getVariableString()); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( "field [" + fieldDefinition.getFieldName() + "] has value [" + newValue + "]"); } diff --git a/plugins/transforms/groupby/src/main/java/org/apache/hop/pipeline/transforms/groupby/GroupBy.java b/plugins/transforms/groupby/src/main/java/org/apache/hop/pipeline/transforms/groupby/GroupBy.java index 4cff7b1af15..7be8cd31273 100644 --- a/plugins/transforms/groupby/src/main/java/org/apache/hop/pipeline/transforms/groupby/GroupBy.java +++ b/plugins/transforms/groupby/src/main/java/org/apache/hop/pipeline/transforms/groupby/GroupBy.java @@ -256,7 +256,7 @@ public boolean processRow() throws HopException { data.previous = data.inputRowMeta.cloneRow(r); - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "GroupBy.LineNumber") + getLinesRead()); } @@ -945,13 +945,13 @@ public void dispose() { closeInput(); closeOutput(); } catch (HopFileException e) { - log.logError(e.getLocalizedMessage()); + logError(e.getLocalizedMessage()); } boolean tempFileDeleted = data.tempFile.delete(); - if (!tempFileDeleted && log.isDetailed()) { - log.logDetailed( + if (!tempFileDeleted && isDetailed()) { + logDetailed( BaseMessages.getString( PKG, "GroupBy.Exception.UnableToDeleteTemporaryFile", data.tempFile.getPath())); } diff --git a/plugins/transforms/http/src/main/java/org/apache/hop/pipeline/transforms/http/Http.java b/plugins/transforms/http/src/main/java/org/apache/hop/pipeline/transforms/http/Http.java index 7f11324d6a9..95a702a0f7f 100644 --- a/plugins/transforms/http/src/main/java/org/apache/hop/pipeline/transforms/http/Http.java +++ b/plugins/transforms/http/src/main/java/org/apache/hop/pipeline/transforms/http/Http.java @@ -131,7 +131,7 @@ Object[] callHttpService(IRowMeta rowMeta, Object[] rowData) throws HopException data.headerParameters[i].getName(), data.inputRowMeta.getString(rowData, data.header_parameters_nrs[i])); if (isDebug()) { - log.logDebug( + logDebug( BaseMessages.getString( PKG, "HTTPDialog.Log.HeaderValue", @@ -171,8 +171,8 @@ Object[] callHttpService(IRowMeta rowMeta, Object[] rowData) throws HopException // calculate the responseTime long responseTime = System.currentTimeMillis() - startTime; - if (log.isDetailed()) { - log.logDetailed(BaseMessages.getString(PKG, "HTTP.Log.ResponseTime", responseTime, uri)); + if (isDetailed()) { + logDetailed(BaseMessages.getString(PKG, "HTTP.Log.ResponseTime", responseTime, uri)); } int statusCode = requestStatusCode(httpResponse); // The status code diff --git a/plugins/transforms/insertupdate/src/main/java/org/apache/hop/pipeline/transforms/insertupdate/InsertUpdate.java b/plugins/transforms/insertupdate/src/main/java/org/apache/hop/pipeline/transforms/insertupdate/InsertUpdate.java index ffeb9913bd5..62ade81c045 100644 --- a/plugins/transforms/insertupdate/src/main/java/org/apache/hop/pipeline/transforms/insertupdate/InsertUpdate.java +++ b/plugins/transforms/insertupdate/src/main/java/org/apache/hop/pipeline/transforms/insertupdate/InsertUpdate.java @@ -78,7 +78,7 @@ protected synchronized void lookupValues(IRowMeta rowMeta, Object[] row) throws data.db.setValues(data.lookupParameterRowMeta, lookupRow, data.prepStatementLookup); - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "InsertUpdate.Log.ValuesSetForLookup") + data.lookupParameterRowMeta.getString(lookupRow)); @@ -92,7 +92,7 @@ protected synchronized void lookupValues(IRowMeta rowMeta, Object[] row) throws * * INSERT ROW */ - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel(BaseMessages.getString(PKG, "InsertUpdate.InsertRow") + rowMeta.getString(row)); } @@ -113,7 +113,7 @@ protected synchronized void lookupValues(IRowMeta rowMeta, Object[] row) throws incrementLinesOutput(); } else { if (!meta.isUpdateBypassed()) { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "InsertUpdate.Log.FoundRowForUpdate") + rowMeta.getString(row)); @@ -156,7 +156,7 @@ protected synchronized void lookupValues(IRowMeta rowMeta, Object[] row) throws updateRow[j + i] = lookupRow[i]; } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "InsertUpdate.Log.UpdateRow") + data.lookupParameterRowMeta.getString(lookupRow)); @@ -168,7 +168,7 @@ protected synchronized void lookupValues(IRowMeta rowMeta, Object[] row) throws incrementLinesSkipped(); } } else { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "InsertUpdate.Log.UpdateBypassed") + rowMeta.getString(row)); @@ -204,7 +204,7 @@ public boolean processRow() throws HopException { this, meta.getSchemaName(), meta.getTableName()); // lookup the values! - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "InsertUpdate.Log.CheckingRow") + getInputRowMeta().getString(r)); @@ -248,7 +248,7 @@ public boolean processRow() throws HopException { } keynrs2.add(keynr2); - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "InsertUpdate.Log.FieldHasDataNumbers", keyField.getKeyStream()) @@ -273,7 +273,7 @@ public boolean processRow() throws HopException { BaseMessages.getString( PKG, CONST_INSERT_UPDATE_EXCEPTION_FIELD_REQUIRED, valueField.getUpdateStream())); } - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "InsertUpdate.Log.FieldHasDataNumbers", valueField.getUpdateStream()) @@ -325,7 +325,7 @@ public boolean processRow() throws HopException { r); // Nothing changed to the input, return the same row, pass a "cloned" metadata // row. - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "InsertUpdate.Log.LineNumber") + getLinesRead()); } } catch (HopException e) { @@ -414,7 +414,7 @@ public void setLookup(IRowMeta rowMeta) throws HopDatabaseException { } try { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Setting preparedStatement to [" + sql + "]"); } data.prepStatementLookup = @@ -494,7 +494,7 @@ public void prepareUpdate(IRowMeta rowMeta) throws HopDatabaseException { } try { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Setting update preparedStatement to [" + sql + "]"); } data.prepStatementUpdate = diff --git a/plugins/transforms/janino/src/main/java/org/apache/hop/pipeline/transforms/janino/Janino.java b/plugins/transforms/janino/src/main/java/org/apache/hop/pipeline/transforms/janino/Janino.java index bf6df18d134..4ae9eb9ccdd 100644 --- a/plugins/transforms/janino/src/main/java/org/apache/hop/pipeline/transforms/janino/Janino.java +++ b/plugins/transforms/janino/src/main/java/org/apache/hop/pipeline/transforms/janino/Janino.java @@ -91,14 +91,14 @@ public boolean processRow() throws HopException { } } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("Read row #" + getLinesRead() + " : " + getInputRowMeta().getString(r)); } try { Object[] outputRowData = calcFields(getInputRowMeta(), r); putRow(data.outputRowMeta, outputRowData); // copy row to possible alternate rowset(s). - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( "Wrote row #" + getLinesWritten() diff --git a/plugins/transforms/janino/src/main/java/org/apache/hop/pipeline/transforms/javafilter/JavaFilter.java b/plugins/transforms/janino/src/main/java/org/apache/hop/pipeline/transforms/javafilter/JavaFilter.java index 94e6134c2f9..38be4349769 100644 --- a/plugins/transforms/janino/src/main/java/org/apache/hop/pipeline/transforms/javafilter/JavaFilter.java +++ b/plugins/transforms/janino/src/main/java/org/apache/hop/pipeline/transforms/javafilter/JavaFilter.java @@ -111,7 +111,7 @@ public boolean processRow() throws HopException { } } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("Read row #" + getLinesRead() + " : " + getInputRowMeta().getString(r)); } @@ -124,7 +124,7 @@ public boolean processRow() throws HopException { } else { if (keep) { if (data.trueRowSet != null) { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( "Sending row to true :" + data.trueTransformName @@ -135,7 +135,7 @@ public boolean processRow() throws HopException { } } else { if (data.falseRowSet != null) { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( "Sending row to false :" + data.falseTransformName @@ -147,7 +147,7 @@ public boolean processRow() throws HopException { } } - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "JavaFilter.Log.LineNumber") + getLinesRead()); } diff --git a/plugins/transforms/javascript/src/main/java/org/apache/hop/pipeline/transforms/javascript/ScriptValues.java b/plugins/transforms/javascript/src/main/java/org/apache/hop/pipeline/transforms/javascript/ScriptValues.java index abc53856729..7d7a7080a30 100644 --- a/plugins/transforms/javascript/src/main/java/org/apache/hop/pipeline/transforms/javascript/ScriptValues.java +++ b/plugins/transforms/javascript/src/main/java/org/apache/hop/pipeline/transforms/javascript/ScriptValues.java @@ -115,7 +115,7 @@ private void determineUsedFields(IRowMeta row) { // String valname = row.getValueMeta(i).getName(); if (strTransformScript.indexOf(valname) >= 0) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ScriptValuesMod.Log.UsedValueName", String.valueOf(i), valname)); @@ -125,7 +125,7 @@ private void determineUsedFields(IRowMeta row) { } } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -296,11 +296,11 @@ private boolean addValues(IRowMeta rowMeta, Object[] row) throws HopException { if (strStartScript != null && strStartScript.length() > 0) { Script startScript = data.cx.compileString(strStartScript, "pipeline_Start", 1, null); startScript.exec(data.cx, data.scope); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(("Start Script found!")); } } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(("No starting Script found!")); } } @@ -359,12 +359,12 @@ private boolean addValues(IRowMeta rowMeta, Object[] row) throws HopException { Object pipelineStatus = data.scope.get("pipeline_Status", data.scope); if (pipelineStatus != Scriptable.NOT_FOUND) { bWithPipelineStat = true; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( ("pipeline_Status found. Checking pipeline status while script execution.")); } } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(("No pipeline_Status found. Pipeline status checking not available.")); } bWithPipelineStat = false; @@ -462,11 +462,11 @@ public boolean processRow() throws HopException { if (strEndScript != null && strEndScript.length() > 0) { Script endScript = data.cx.compileString(strEndScript, "pipeline_End", 1, null); endScript.exec(data.cx, data.scope); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(("End Script found!")); } } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(("No end Script found!")); } } diff --git a/plugins/transforms/joinrows/src/main/java/org/apache/hop/pipeline/transforms/joinrows/JoinRows.java b/plugins/transforms/joinrows/src/main/java/org/apache/hop/pipeline/transforms/joinrows/JoinRows.java index b72e7196b97..62a73aec586 100644 --- a/plugins/transforms/joinrows/src/main/java/org/apache/hop/pipeline/transforms/joinrows/JoinRows.java +++ b/plugins/transforms/joinrows/src/main/java/org/apache/hop/pipeline/transforms/joinrows/JoinRows.java @@ -134,7 +134,7 @@ public Object[] getRowData(int filenr) throws HopException { data.fileRowMeta[0] = rowSet.getRowMeta(); } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "JoinRows.Log.ReadRowFromStream") + (rowData == null ? "" : data.fileRowMeta[0].getString(rowData))); @@ -161,7 +161,7 @@ public Object[] getRowData(int filenr) throws HopException { // Read a row from the temporary file if (data.size[filenr] == 0) { - if (log.isBasic()) { + if (isBasic()) { logBasic( BaseMessages.getString(PKG, "JoinRows.Log.NoRowsComingFromTransform") + data.rs[filenr].getOriginTransformName() @@ -193,7 +193,7 @@ public Object[] getRowData(int filenr) throws HopException { stopAll(); return null; } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "JoinRows.Log.ReadRowFromFile") + filenr @@ -228,7 +228,7 @@ public Object[] getRowData(int filenr) throws HopException { } } else { if (data.size[filenr] == 0) { - if (log.isBasic()) { + if (isBasic()) { logBasic( BaseMessages.getString(PKG, "JoinRows.Log.NoRowsComingFromTransform") + data.rs[filenr].getOriginTransformName() @@ -394,7 +394,7 @@ private boolean cacheInputRow() throws HopException { data.fileRowMeta[data.filenr].writeData(data.dataOutputStream[data.filenr], rowData); data.size[data.filenr]++; - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, @@ -415,7 +415,7 @@ private boolean cacheInputRow() throws HopException { data.cache[data.filenr].add(rowData); } else { // we can't cope with this many rows: reset the cache... - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, diff --git a/plugins/transforms/json/src/main/java/org/apache/hop/pipeline/transforms/jsoninput/JsonInput.java b/plugins/transforms/json/src/main/java/org/apache/hop/pipeline/transforms/jsoninput/JsonInput.java index e69948740be..6f94c65cbae 100644 --- a/plugins/transforms/json/src/main/java/org/apache/hop/pipeline/transforms/jsoninput/JsonInput.java +++ b/plugins/transforms/json/src/main/java/org/apache/hop/pipeline/transforms/jsoninput/JsonInput.java @@ -105,7 +105,7 @@ public boolean processRow() throws HopException { return false; // end of data or error. } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, "JsonInput.Log.ReadRow", data.outputRowMeta.getString(outRow))); @@ -248,7 +248,7 @@ protected void fillFileAdditionalFields(JsonInputData data, FileObject file) super.fillFileAdditionalFields(data, file); data.filename = HopVfs.getFilename(file); data.filenr++; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JsonInput.Log.OpeningFile", file.toString())); } addFileToResultFilesname(file); @@ -454,7 +454,7 @@ private void createReader() throws HopException { inputFields[i] = field; } // Instead of putting in the meta.inputFields, we put in our json path resolved input fields - data.reader = new FastJsonReader(inputFields, meta.isDefaultPathLeafToNull(), log); + data.reader = new FastJsonReader(inputFields, meta.isDefaultPathLeafToNull(), getLogChannel()); data.reader.setIgnoreMissingPath(meta.isIgnoreMissingPath()); } diff --git a/plugins/transforms/json/src/main/java/org/apache/hop/pipeline/transforms/jsonoutput/JsonOutput.java b/plugins/transforms/json/src/main/java/org/apache/hop/pipeline/transforms/jsonoutput/JsonOutput.java index b5830e74da3..7289efbd1e9 100644 --- a/plugins/transforms/json/src/main/java/org/apache/hop/pipeline/transforms/jsonoutput/JsonOutput.java +++ b/plugins/transforms/json/src/main/java/org/apache/hop/pipeline/transforms/jsonoutput/JsonOutput.java @@ -302,7 +302,7 @@ public void dispose() { try { outputRow(null); } catch (Exception e) { - log.logError("Error writing final rows to disk", e); + logError("Error writing final rows to disk", e); } } data.ja = null; @@ -343,13 +343,13 @@ private void createParentFolder(String filename) throws HopTransformException { // Get parent folder parentfolder = HopVfs.getFileObject(filename).getParent(); if (!parentfolder.exists()) { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "JsonOutput.Error.ParentFolderNotExist", parentfolder.getName())); } parentfolder.createFolder(); - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "JsonOutput.Log.ParentFolderCreated")); } } @@ -402,7 +402,7 @@ public boolean openNewFile() { data.writer = new OutputStreamWriter(new BufferedOutputStream(outputStream, 5000)); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JsonOutput.FileOpened", filename)); } @@ -424,7 +424,7 @@ public String buildFilename() { null, data.splitnr + "", data.isBeamContext(), - log.getLogChannelId(), + getLogChannelId(), data.getBeamBundleNr(), false); } diff --git a/plugins/transforms/json/src/main/java/org/apache/hop/pipeline/transforms/jsonoutputenhanced/JsonOutput.java b/plugins/transforms/json/src/main/java/org/apache/hop/pipeline/transforms/jsonoutputenhanced/JsonOutput.java index e1bec3610e9..2e5d1646588 100644 --- a/plugins/transforms/json/src/main/java/org/apache/hop/pipeline/transforms/jsonoutputenhanced/JsonOutput.java +++ b/plugins/transforms/json/src/main/java/org/apache/hop/pipeline/transforms/jsonoutputenhanced/JsonOutput.java @@ -566,13 +566,13 @@ private void createParentFolder(String filename) throws HopTransformException { // Get parent folder parentfolder = HopVfs.getFileObject(filename).getParent(); if (!parentfolder.exists()) { - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "JsonOutput.Error.ParentFolderNotExist", parentfolder.getName())); } parentfolder.createFolder(); - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "JsonOutput.Log.ParentFolderCreated")); } } @@ -623,7 +623,7 @@ public boolean openNewFile() { data.writer = new OutputStreamWriter(new BufferedOutputStream(outputStream, 5000)); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "JsonOutput.FileOpened", filename)); } diff --git a/plugins/transforms/kafka/src/main/java/org/apache/hop/pipeline/transforms/kafka/consumer/KafkaConsumerInput.java b/plugins/transforms/kafka/src/main/java/org/apache/hop/pipeline/transforms/kafka/consumer/KafkaConsumerInput.java index d29120a7654..69e0ff9358f 100644 --- a/plugins/transforms/kafka/src/main/java/org/apache/hop/pipeline/transforms/kafka/consumer/KafkaConsumerInput.java +++ b/plugins/transforms/kafka/src/main/java/org/apache/hop/pipeline/transforms/kafka/consumer/KafkaConsumerInput.java @@ -76,7 +76,7 @@ public boolean init() { try { data.outputRowMeta = meta.getRowMeta(getTransformName(), this); } catch (HopTransformException e) { - log.logError("Error determining output row metadata", e); + logError("Error determining output row metadata", e); } data.incomingRowsBuffer = new ArrayList<>(); diff --git a/plugins/transforms/kafka/src/main/java/org/apache/hop/pipeline/transforms/kafka/producer/KafkaProducerOutput.java b/plugins/transforms/kafka/src/main/java/org/apache/hop/pipeline/transforms/kafka/producer/KafkaProducerOutput.java index 3e5b4016038..f73f6c8024d 100644 --- a/plugins/transforms/kafka/src/main/java/org/apache/hop/pipeline/transforms/kafka/producer/KafkaProducerOutput.java +++ b/plugins/transforms/kafka/src/main/java/org/apache/hop/pipeline/transforms/kafka/producer/KafkaProducerOutput.java @@ -107,7 +107,7 @@ public boolean processRow() throws HopException { putRow(getInputRowMeta(), r); // copy row to possible alternate rowset(s). - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "KafkaConsumerOutput.Log.LineNumber") + getLinesRead()); } diff --git a/plugins/transforms/ldap/src/main/java/org/apache/hop/pipeline/transforms/ldapinput/LdapInput.java b/plugins/transforms/ldap/src/main/java/org/apache/hop/pipeline/transforms/ldapinput/LdapInput.java index 3f3861f3e71..745c3e6851b 100644 --- a/plugins/transforms/ldap/src/main/java/org/apache/hop/pipeline/transforms/ldapinput/LdapInput.java +++ b/plugins/transforms/ldap/src/main/java/org/apache/hop/pipeline/transforms/ldapinput/LdapInput.java @@ -81,13 +81,13 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, outputRowData); // copy row to output rowset(s) - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "LdapInput.log.ReadRow"), data.outputRowMeta.getString(outputRowData)); } - if (checkFeedback(getLinesInput()) && log.isDetailed()) { + if (checkFeedback(getLinesInput()) && isDetailed()) { logDetailed(BaseMessages.getString(PKG, "LdapInput.log.LineRow") + getLinesInput()); } @@ -121,7 +121,7 @@ private boolean dynamicSearch() throws HopException { data.readRow = getRow(); // Get row from input rowset & set row busy! if (data.readRow == null) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "LdapInput.Log.FinishedProcessing")); } setOutputDone(); @@ -200,7 +200,7 @@ private Object[] getOneRow() throws HopException { // we need to perform another search with incoming row if (!dynamicSearch()) { // we finished with incoming rows - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "LdapInput.Log.FinishedProcessing")); } return null; @@ -347,7 +347,7 @@ private void connectServerLdap() throws HopException { } // Define new LDAP connection - data.connection = new LdapConnection(log, this, meta, data.attributesBinary); + data.connection = new LdapConnection(getLogChannel(), this, meta, data.attributesBinary); for (int i = 0; i < data.attrReturned.length; i++) { LdapInputField field = meta.getInputFields()[i]; diff --git a/plugins/transforms/ldap/src/main/java/org/apache/hop/pipeline/transforms/ldapoutput/LdapOutput.java b/plugins/transforms/ldap/src/main/java/org/apache/hop/pipeline/transforms/ldapoutput/LdapOutput.java index a9daaf36084..0075faf57ee 100644 --- a/plugins/transforms/ldap/src/main/java/org/apache/hop/pipeline/transforms/ldapoutput/LdapOutput.java +++ b/plugins/transforms/ldap/src/main/java/org/apache/hop/pipeline/transforms/ldapoutput/LdapOutput.java @@ -266,13 +266,13 @@ public boolean processRow() throws HopException { putRow(getInputRowMeta(), outputRowData); // copy row to output rowset(s) - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(classFromResourcesPackage, "LdapOutput.log.ReadRow"), getInputRowMeta().getString(outputRowData)); } - if (checkFeedback(getLinesInput()) && log.isDetailed()) { + if (checkFeedback(getLinesInput()) && isDetailed()) { logDetailed( BaseMessages.getString(classFromResourcesPackage, "LdapOutput.log.LineRow") + getLinesInput()); @@ -311,7 +311,7 @@ public boolean init() { if (super.init()) { try { // Define new LDAP connection - data.connection = new LdapConnection(log, this, meta, null); + data.connection = new LdapConnection(getLogChannel(), this, meta, null); // connect if (meta.isUseAuthentication()) { diff --git a/plugins/transforms/mail/src/main/java/org/apache/hop/pipeline/transforms/mail/Mail.java b/plugins/transforms/mail/src/main/java/org/apache/hop/pipeline/transforms/mail/Mail.java index 5c7baed7708..1132848f2f8 100644 --- a/plugins/transforms/mail/src/main/java/org/apache/hop/pipeline/transforms/mail/Mail.java +++ b/plugins/transforms/mail/src/main/java/org/apache/hop/pipeline/transforms/mail/Mail.java @@ -542,7 +542,7 @@ public boolean processRow() throws HopException { putRow(data.previousRowMeta, r); } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, @@ -767,9 +767,9 @@ public String sendMail( } else { // attached files if (meta.isDynamicFilename()) { - setAttachedFilesList(r, log); + setAttachedFilesList(r, getLogChannel()); } else { - setAttachedFilesList(null, log); + setAttachedFilesList(null, getLogChannel()); } } diff --git a/plugins/transforms/mailinput/src/main/java/org/apache/hop/pipeline/transforms/mailinput/MailInput.java b/plugins/transforms/mailinput/src/main/java/org/apache/hop/pipeline/transforms/mailinput/MailInput.java index dcdf9f54ea3..59361e2ad4e 100644 --- a/plugins/transforms/mailinput/src/main/java/org/apache/hop/pipeline/transforms/mailinput/MailInput.java +++ b/plugins/transforms/mailinput/src/main/java/org/apache/hop/pipeline/transforms/mailinput/MailInput.java @@ -69,7 +69,7 @@ public boolean processRow() throws HopException { } if (isRowLevel()) { - log.logRowlevel( + logRowlevel( toString(), BaseMessages.getString( PKG, "MailInput.Log.OutputRow", data.outputRowMeta.getString(outputRowData))); @@ -454,7 +454,7 @@ public boolean init() { // create a mail connection object data.mailConn = new MailConnection( - log, + getLogChannel(), MailConnectionMeta.getProtocolFromString( meta.getProtocol(), MailConnectionMeta.PROTOCOL_IMAP), realserver, @@ -519,7 +519,7 @@ private Integer parseIntWithSubstitute(String toParse) { try { return Integer.parseInt(toParse); } catch (NumberFormatException e) { - log.logError(e.getLocalizedMessage()); + logError(e.getLocalizedMessage()); } } return null; diff --git a/plugins/transforms/memgroupby/src/main/java/org/apache/hop/pipeline/transforms/memgroupby/MemoryGroupBy.java b/plugins/transforms/memgroupby/src/main/java/org/apache/hop/pipeline/transforms/memgroupby/MemoryGroupBy.java index fbc67490e0b..6f4cdd78dcb 100644 --- a/plugins/transforms/memgroupby/src/main/java/org/apache/hop/pipeline/transforms/memgroupby/MemoryGroupBy.java +++ b/plugins/transforms/memgroupby/src/main/java/org/apache/hop/pipeline/transforms/memgroupby/MemoryGroupBy.java @@ -178,10 +178,8 @@ public boolean processRow() throws HopException { addToAggregate(r); - if (checkFeedback(getLinesRead())) { - if (log.isBasic()) { - logBasic(BaseMessages.getString(PKG, "MemoryGroupBy.LineNumber") + getLinesRead()); - } + if (checkFeedback(getLinesRead()) && isBasic()) { + logBasic(BaseMessages.getString(PKG, "MemoryGroupBy.LineNumber") + getLinesRead()); } return true; diff --git a/plugins/transforms/mergejoin/src/main/java/org/apache/hop/pipeline/transforms/mergejoin/MergeJoin.java b/plugins/transforms/mergejoin/src/main/java/org/apache/hop/pipeline/transforms/mergejoin/MergeJoin.java index c40473cfea3..91514d163e8 100644 --- a/plugins/transforms/mergejoin/src/main/java/org/apache/hop/pipeline/transforms/mergejoin/MergeJoin.java +++ b/plugins/transforms/mergejoin/src/main/java/org/apache/hop/pipeline/transforms/mergejoin/MergeJoin.java @@ -154,7 +154,7 @@ public boolean processRow() throws HopException { data.two_dummy = new Object[data.twoMeta.size()]; } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, "MergeJoin.Log.DataInfo", data.oneMeta.getString(data.one) + "") diff --git a/plugins/transforms/mergerows/src/main/java/org/apache/hop/pipeline/transforms/mergerows/MergeRows.java b/plugins/transforms/mergerows/src/main/java/org/apache/hop/pipeline/transforms/mergerows/MergeRows.java index 1d0d3638d8a..54ccbae3167 100644 --- a/plugins/transforms/mergerows/src/main/java/org/apache/hop/pipeline/transforms/mergerows/MergeRows.java +++ b/plugins/transforms/mergerows/src/main/java/org/apache/hop/pipeline/transforms/mergerows/MergeRows.java @@ -113,7 +113,7 @@ public boolean processRow() throws HopException { } } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "MergeRows.Log.DataInfo", Arrays.toString(data.one) + "") + Arrays.toString(data.two)); @@ -206,10 +206,8 @@ public boolean processRow() throws HopException { // send the row to the next transforms... putRow(data.outputRowMeta, RowDataUtil.addValueData(outputRow, outputIndex, flagField)); - if (checkFeedback(getLinesRead())) { - if (log.isBasic()) { - logBasic(BaseMessages.getString(PKG, "MergeRows.LineNumber") + getLinesRead()); - } + if (checkFeedback(getLinesRead()) && isBasic()) { + logBasic(BaseMessages.getString(PKG, "MergeRows.LineNumber") + getLinesRead()); } return true; diff --git a/plugins/transforms/metainject/src/main/java/org/apache/hop/pipeline/transforms/metainject/MetaInject.java b/plugins/transforms/metainject/src/main/java/org/apache/hop/pipeline/transforms/metainject/MetaInject.java index 006c7b5ad2c..95b55c18060 100644 --- a/plugins/transforms/metainject/src/main/java/org/apache/hop/pipeline/transforms/metainject/MetaInject.java +++ b/plugins/transforms/metainject/src/main/java/org/apache/hop/pipeline/transforms/metainject/MetaInject.java @@ -244,7 +244,7 @@ public void rowWrittenEvent(IRowMeta rowMeta, Object[] row) // let the transformation complete it's execution to allow for any customizations to MDI to // happen in the init methods of transforms - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("XML of transformation after injection: " + data.pipelineMeta.getXml(this)); } String targetFile = resolve(meta.getTargetFile()); @@ -378,7 +378,7 @@ private void createParentFolder(String filename) throws Exception { /** Inject values from transforms. */ private void newInjection(String targetTransform, ITransformMeta targetTransformMeta) throws HopException { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Handing transform '" + targetTransform + "' injection!"); } BeanInjectionInfo injectionInfo = new BeanInjectionInfo(targetTransformMeta.getClass()); @@ -451,7 +451,7 @@ private void newInjection(String targetTransform, ITransformMeta targetTransform private void newInjectionConstants( IVariables variables, String targetTransform, ITransformMeta targetTransformMeta) throws HopException { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Handing transform '" + targetTransform + "' constants injection!"); } BeanInjectionInfo injectionInfo = new BeanInjectionInfo(targetTransformMeta.getClass()); diff --git a/plugins/transforms/monetdbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/monetdbbulkloader/MonetDbBulkLoader.java b/plugins/transforms/monetdbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/monetdbbulkloader/MonetDbBulkLoader.java index 16d161b3282..2c1c3a30883 100644 --- a/plugins/transforms/monetdbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/monetdbbulkloader/MonetDbBulkLoader.java +++ b/plugins/transforms/monetdbbulkloader/src/main/java/org/apache/hop/pipeline/transforms/monetdbbulkloader/MonetDbBulkLoader.java @@ -78,13 +78,13 @@ protected String escapeOsPath(String path, boolean isWindows) { } public boolean execute(MonetDbBulkLoaderMeta meta) throws HopException { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Started execute"); } try { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Auto String Length flag: " + meta.isAutoStringWidths()); } @@ -104,7 +104,7 @@ public boolean execute(MonetDbBulkLoaderMeta meta) throws HopException { throw new HopException("Error while connecting to MonetDB for bulk loading : " + error); } - data.outputLogger = new StreamLogger(log, mserver.getInputStream(), "OUTPUT"); + data.outputLogger = new StreamLogger(getLogChannel(), mserver.getInputStream(), "OUTPUT"); // If the truncate table checkbox is checked, we can do the truncate here. if (meta.isTruncate()) { @@ -189,7 +189,7 @@ public boolean processRow() throws HopException { protected void writeRowToMonetDB(IRowMeta rowMeta, Object[] r, DatabaseMeta dm) throws HopException { - if (data.bufferIndex == data.bufferSize || log.isDebug()) { + if (data.bufferIndex == data.bufferSize || isDebug()) { writeBufferToMonetDB(dm); } addRowToBuffer(rowMeta, r); @@ -398,24 +398,24 @@ public void drop() throws HopException { public void autoAdjustSchema(MonetDbBulkLoaderMeta meta) throws HopException { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Attempting to auto adjust table structure"); } drop(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("getTransMeta: " + getTransformMeta()); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("getTransformname: " + getTransformName()); } SqlStatement statement = meta.getTableDdl(variables, getPipelineMeta(), getTransformName(), true, data, true); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Statement: " + statement); } - if (log.isDetailed() && statement != null) { + if (isDetailed() && statement != null) { logDetailed("Statement has SQL: " + statement.hasSql()); } @@ -428,7 +428,7 @@ public void autoAdjustSchema(MonetDbBulkLoaderMeta meta) throws HopException { } } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Successfull"); } } @@ -472,7 +472,7 @@ protected void writeBufferToMonetDB(DatabaseMeta dm) throws HopException { for (String statement : sqlStatements) { data.out.write('s'); data.out.write(resolve(statement)); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(resolve(statement)); } data.out.write(';'); @@ -482,7 +482,7 @@ protected void writeBufferToMonetDB(DatabaseMeta dm) throws HopException { data.out.write('s'); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(cmd); } data.out.write(cmdBuff.toString()); @@ -491,7 +491,7 @@ protected void writeBufferToMonetDB(DatabaseMeta dm) throws HopException { for (int i = 0; i < data.bufferIndex; i++) { String buffer = data.rowBuffer[i]; data.out.write(buffer); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel(buffer); } } @@ -521,7 +521,7 @@ protected void writeBufferToMonetDB(DatabaseMeta dm) throws HopException { } } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel(Const.CR); } @@ -629,7 +629,8 @@ protected MapiSocket getMonetDBConnection() throws Exception { String password = Utils.resolvePassword(variables, Const.NVL(dm.getPassword(), "")); String db = resolve(Const.NVL(dm.getDatabaseName(), "")); - return getMonetDBConnection(hostname, Integer.parseInt(portnum), user, password, db, log); + return getMonetDBConnection( + hostname, Integer.parseInt(portnum), user, password, db, getLogChannel()); } protected static MapiSocket getMonetDBConnection( diff --git a/plugins/transforms/mongodb/src/main/java/org/apache/hop/pipeline/transforms/mongodbdelete/MongoDbDelete.java b/plugins/transforms/mongodb/src/main/java/org/apache/hop/pipeline/transforms/mongodbdelete/MongoDbDelete.java index b8fa22209e2..2c255ac6d57 100644 --- a/plugins/transforms/mongodb/src/main/java/org/apache/hop/pipeline/transforms/mongodbdelete/MongoDbDelete.java +++ b/plugins/transforms/mongodb/src/main/java/org/apache/hop/pipeline/transforms/mongodbdelete/MongoDbDelete.java @@ -129,7 +129,7 @@ public boolean processRow() throws HopException { DBObject query = MongoDbDeleteData.getQueryObject( data.mUserFields, getInputRowMeta(), row, MongoDbDelete.this); - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "MongoDbDelete.Message.Debug.QueryForDelete", query)); } @@ -190,7 +190,7 @@ public boolean init() { } // init connection constructs a MongoCredentials object if necessary - data.clientWrapper = data.connection.createWrapper(this, log); + data.clientWrapper = data.connection.createWrapper(this, getLogChannel()); data.collection = data.clientWrapper.getCollection(databaseName, collection); if (!StringUtil.isEmpty(meta.getWriteRetries())) { @@ -233,14 +233,14 @@ public void dispose() { try { data.cursor.close(); } catch (MongoDbException e) { - log.logError(e.getMessage()); + logError(e.getMessage()); } } if (data.clientWrapper != null) { try { data.clientWrapper.dispose(); } catch (MongoDbException e) { - log.logError(e.getMessage()); + logError(e.getMessage()); } } @@ -252,7 +252,7 @@ protected void disconnect() { try { data.getConnection().dispose(); } catch (MongoDbException e) { - log.logError(e.getMessage()); + logError(e.getMessage()); } } } diff --git a/plugins/transforms/mongodb/src/main/java/org/apache/hop/pipeline/transforms/mongodbinput/MongoDbInput.java b/plugins/transforms/mongodb/src/main/java/org/apache/hop/pipeline/transforms/mongodbinput/MongoDbInput.java index c3a7e434aae..fa609efc7da 100644 --- a/plugins/transforms/mongodb/src/main/java/org/apache/hop/pipeline/transforms/mongodbinput/MongoDbInput.java +++ b/plugins/transforms/mongodb/src/main/java/org/apache/hop/pipeline/transforms/mongodbinput/MongoDbInput.java @@ -153,7 +153,7 @@ protected void initQuery() throws HopException, MongoDbException { // check logging level and only set to false if // logging level at least detailed - if (log.isDetailed()) { + if (isDetailed()) { serverDetermined = false; } @@ -262,7 +262,7 @@ public boolean init() { } // init connection constructs a MongoCredentials object if necessary - data.clientWrapper = data.connection.createWrapper(this, log); + data.clientWrapper = data.connection.createWrapper(this, getLogChannel()); data.collection = data.clientWrapper.getCollection(databaseName, collection); if (!meta.isOutputJson()) { @@ -293,14 +293,14 @@ public void dispose() { try { data.cursor.close(); } catch (MongoDbException e) { - log.logError(e.getMessage()); + logError(e.getMessage()); } } if (data.clientWrapper != null) { try { data.clientWrapper.dispose(); } catch (MongoDbException e) { - log.logError(e.getMessage()); + logError(e.getMessage()); } } diff --git a/plugins/transforms/mongodb/src/main/java/org/apache/hop/pipeline/transforms/mongodboutput/MongoDbOutput.java b/plugins/transforms/mongodb/src/main/java/org/apache/hop/pipeline/transforms/mongodboutput/MongoDbOutput.java index bdd97c94d16..fbaab5e41a6 100644 --- a/plugins/transforms/mongodb/src/main/java/org/apache/hop/pipeline/transforms/mongodboutput/MongoDbOutput.java +++ b/plugins/transforms/mongodb/src/main/java/org/apache/hop/pipeline/transforms/mongodboutput/MongoDbOutput.java @@ -101,7 +101,7 @@ public boolean processRow() throws HopException { if (indexes != null && !indexes.isEmpty()) { logBasic(BaseMessages.getString(PKG, "MongoDbOutput.Messages.ApplyingIndexOpps")); try { - data.applyIndexes(indexes, log, meta.getTruncate()); + data.applyIndexes(indexes, getLogChannel(), meta.getTruncate()); } catch (MongoDbException e) { throw new HopException(e); } @@ -169,7 +169,7 @@ public boolean processRow() throws HopException { MongoDbOutput.this, mongoTopLevelStructure); - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "MongoDbOutput.Messages.Debug.QueryForUpsert", updateQuery)); @@ -190,7 +190,7 @@ public boolean processRow() throws HopException { row, mongoTopLevelStructure, data.hasTopLevelJsonDocInsert); - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "MongoDbOutput.Messages.Debug.InsertUpsertObject", insertUpdate)); @@ -210,7 +210,7 @@ public boolean processRow() throws HopException { } catch (MongoDbException e) { throw new HopException(e); } - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "MongoDbOutput.Messages.Debug.ModifierUpdateObject", insertUpdate)); @@ -472,7 +472,7 @@ public boolean init() { } // init connection constructs a MongoCredentials object if necessary - data.clientWrapper = data.connection.createWrapper(this, log); + data.clientWrapper = data.connection.createWrapper(this, getLogChannel()); if (StringUtils.isEmpty(collection)) { throw new HopException( @@ -523,7 +523,7 @@ protected void disconnect() { try { data.getConnection().dispose(); } catch (MongoDbException e) { - log.logError(e.getMessage()); + logError(e.getMessage()); } } } diff --git a/plugins/transforms/multimerge/src/main/java/org/apache/hop/pipeline/transforms/multimerge/MultiMergeJoin.java b/plugins/transforms/multimerge/src/main/java/org/apache/hop/pipeline/transforms/multimerge/MultiMergeJoin.java index 99200e62c5e..6a5da4f98e6 100644 --- a/plugins/transforms/multimerge/src/main/java/org/apache/hop/pipeline/transforms/multimerge/MultiMergeJoin.java +++ b/plugins/transforms/multimerge/src/main/java/org/apache/hop/pipeline/transforms/multimerge/MultiMergeJoin.java @@ -206,7 +206,7 @@ public boolean processRow() throws HopException { first = false; } - if (log.isRowLevel()) { + if (isRowLevel()) { String metaString = BaseMessages.getString( PKG, "MultiMergeJoin.Log.DataInfo", data.metas[0].getString(data.rows[0]) + ""); diff --git a/plugins/transforms/normaliser/src/main/java/org/apache/hop/pipeline/transforms/normaliser/Normaliser.java b/plugins/transforms/normaliser/src/main/java/org/apache/hop/pipeline/transforms/normaliser/Normaliser.java index 16d41698990..2cb50e1fc95 100644 --- a/plugins/transforms/normaliser/src/main/java/org/apache/hop/pipeline/transforms/normaliser/Normaliser.java +++ b/plugins/transforms/normaliser/src/main/java/org/apache/hop/pipeline/transforms/normaliser/Normaliser.java @@ -171,10 +171,8 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, outputRowData); } - if (checkFeedback(getLinesRead())) { - if (log.isBasic()) { - logBasic(BaseMessages.getString(PKG, "Normaliser.Log.LineNumber") + getLinesRead()); - } + if (checkFeedback(getLinesRead()) && isBasic()) { + logBasic(BaseMessages.getString(PKG, "Normaliser.Log.LineNumber") + getLinesRead()); } return true; diff --git a/plugins/transforms/nullif/src/main/java/org/apache/hop/pipeline/transforms/nullif/NullIf.java b/plugins/transforms/nullif/src/main/java/org/apache/hop/pipeline/transforms/nullif/NullIf.java index 2ea7bf78a69..38bdc4fc132 100644 --- a/plugins/transforms/nullif/src/main/java/org/apache/hop/pipeline/transforms/nullif/NullIf.java +++ b/plugins/transforms/nullif/src/main/java/org/apache/hop/pipeline/transforms/nullif/NullIf.java @@ -81,7 +81,7 @@ public boolean processRow() throws HopException { } } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "NullIf.Log.ConvertFieldValuesToNullForRow") + data.outputRowMeta.getString(r)); diff --git a/plugins/transforms/numberrange/src/main/java/org/apache/hop/pipeline/transforms/numberrange/NumberRange.java b/plugins/transforms/numberrange/src/main/java/org/apache/hop/pipeline/transforms/numberrange/NumberRange.java index 8d9985fbb39..fb5787ec58b 100644 --- a/plugins/transforms/numberrange/src/main/java/org/apache/hop/pipeline/transforms/numberrange/NumberRange.java +++ b/plugins/transforms/numberrange/src/main/java/org/apache/hop/pipeline/transforms/numberrange/NumberRange.java @@ -81,10 +81,8 @@ public boolean processRow() throws HopException { // add value to output row = RowDataUtil.addRowData(row, getInputRowMeta().size(), new Object[] {ranges}); putRow(data.outputRowMeta, row); - if (checkFeedback(getLinesRead())) { - if (log.isDetailed()) { - logDetailed(BaseMessages.getString(PKG, "NumberRange.Log.LineNumber") + getLinesRead()); - } + if (checkFeedback(getLinesRead()) && isDetailed()) { + logDetailed(BaseMessages.getString(PKG, "NumberRange.Log.LineNumber") + getLinesRead()); } } catch (HopException e) { boolean sendToErrorRow = false; diff --git a/plugins/transforms/orabulkloader/src/main/java/org/apache/hop/pipeline/transforms/orabulkloader/OraBulkLoader.java b/plugins/transforms/orabulkloader/src/main/java/org/apache/hop/pipeline/transforms/orabulkloader/OraBulkLoader.java index 3d64834fdc2..7e8b54f8a60 100644 --- a/plugins/transforms/orabulkloader/src/main/java/org/apache/hop/pipeline/transforms/orabulkloader/OraBulkLoader.java +++ b/plugins/transforms/orabulkloader/src/main/java/org/apache/hop/pipeline/transforms/orabulkloader/OraBulkLoader.java @@ -98,7 +98,7 @@ public void run() { while ((line = br.readLine()) != null) { // Only perform the concatenation if at basic level. Otherwise, // this just reads from the stream. - if (log.isBasic()) { + if (isBasic()) { logBasic(type + line); } } diff --git a/plugins/transforms/pgp/src/main/java/org/apache/hop/pipeline/transforms/pgpdecryptstream/PGPDecryptStream.java b/plugins/transforms/pgp/src/main/java/org/apache/hop/pipeline/transforms/pgpdecryptstream/PGPDecryptStream.java index 4aedba02ee0..712d9720eb5 100644 --- a/plugins/transforms/pgp/src/main/java/org/apache/hop/pipeline/transforms/pgpdecryptstream/PGPDecryptStream.java +++ b/plugins/transforms/pgp/src/main/java/org/apache/hop/pipeline/transforms/pgpdecryptstream/PGPDecryptStream.java @@ -135,7 +135,7 @@ public boolean processRow() throws HopException { // add new values to the row. putRow(data.outputRowMeta, outputRow); // copy row to output rowset(s) - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, @@ -181,7 +181,7 @@ public boolean init() { try { // initiate a new GPG encryptor - data.gpg = new GPG(resolve(meta.getGPGLocation()), log, variables); + data.gpg = new GPG(resolve(meta.getGPGLocation()), getLogChannel(), variables); } catch (Exception e) { logError(BaseMessages.getString(PKG, "PGPDecryptStream.Init.Error"), e); return false; diff --git a/plugins/transforms/pgp/src/main/java/org/apache/hop/pipeline/transforms/pgpencryptstream/PGPEncryptStream.java b/plugins/transforms/pgp/src/main/java/org/apache/hop/pipeline/transforms/pgpencryptstream/PGPEncryptStream.java index b17df6b5c6a..693d9ecc952 100644 --- a/plugins/transforms/pgp/src/main/java/org/apache/hop/pipeline/transforms/pgpencryptstream/PGPEncryptStream.java +++ b/plugins/transforms/pgp/src/main/java/org/apache/hop/pipeline/transforms/pgpencryptstream/PGPEncryptStream.java @@ -140,7 +140,7 @@ public boolean processRow() throws HopException { // add new values to the row. putRow(data.outputRowMeta, outputRow); // copy row to output rowset(s) - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, @@ -186,7 +186,7 @@ public boolean init() { try { // initiate a new GPG encryptor - data.gpg = new GPG(resolve(meta.getGPGLocation()), log, variables); + data.gpg = new GPG(resolve(meta.getGPGLocation()), getLogChannel(), variables); } catch (Exception e) { logError(BaseMessages.getString(PKG, "PGPEncryptStream.Init.Error"), e); return false; diff --git a/plugins/transforms/pipelineexecutor/src/main/java/org/apache/hop/pipeline/transforms/pipelineexecutor/PipelineExecutor.java b/plugins/transforms/pipelineexecutor/src/main/java/org/apache/hop/pipeline/transforms/pipelineexecutor/PipelineExecutor.java index 2e74ab1a1c8..0436fc11c98 100644 --- a/plugins/transforms/pipelineexecutor/src/main/java/org/apache/hop/pipeline/transforms/pipelineexecutor/PipelineExecutor.java +++ b/plugins/transforms/pipelineexecutor/src/main/java/org/apache/hop/pipeline/transforms/pipelineexecutor/PipelineExecutor.java @@ -260,7 +260,7 @@ private void executePipeline(List incomingFieldValues) throws HopExcepti result = executorPipeline.getResult(); } catch (HopException e) { - log.logError("An error occurred executing the pipeline: ", e); + logError("An error occurred executing the pipeline: ", e); result.setResult(false); result.setNrErrors(1); } diff --git a/plugins/transforms/processfiles/src/main/java/org/apache/hop/pipeline/transforms/processfiles/ProcessFiles.java b/plugins/transforms/processfiles/src/main/java/org/apache/hop/pipeline/transforms/processfiles/ProcessFiles.java index 1e29535bde0..61129d98bd3 100644 --- a/plugins/transforms/processfiles/src/main/java/org/apache/hop/pipeline/transforms/processfiles/ProcessFiles.java +++ b/plugins/transforms/processfiles/src/main/java/org/apache/hop/pipeline/transforms/processfiles/ProcessFiles.java @@ -89,7 +89,7 @@ public boolean processRow() throws HopException { } } - if (meta.simulate && log.isBasic()) { + if (meta.simulate && isBasic()) { logBasic(BaseMessages.getString(PKG, "ProcessFiles.Log.SimulationModeON")); } @@ -127,7 +127,7 @@ public boolean processRow() throws HopException { } data.targetFile = HopVfs.getFileObject(targetFilename, variables); if (data.targetFile.exists()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ProcessFiles.Log.TargetFileExists", targetFilename)); } @@ -170,13 +170,13 @@ public boolean processRow() throws HopException { // Better to delete the file before because. sometime, it's not properly overwritten data.targetFile.delete(); data.targetFile.copyFrom(data.sourceFile, new TextOneToOneFileSelector()); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ProcessFiles.Log.SourceFileCopied", sourceFilename, targetFilename)); } } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -191,13 +191,13 @@ public boolean processRow() throws HopException { || !data.targetFile.exists()) && !meta.simulate) { data.sourceFile.moveTo(HopVfs.getFileObject(targetFilename, variables)); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ProcessFiles.Log.SourceFileMoved", sourceFilename, targetFilename)); } } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -213,7 +213,7 @@ public boolean processRow() throws HopException { BaseMessages.getString( PKG, "ProcessFiles.Error.CanNotDeleteFile", data.sourceFile.toString())); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "ProcessFiles.Log.SourceFileDeleted", sourceFilename)); } @@ -236,7 +236,7 @@ public boolean processRow() throws HopException { resultFile.setComment(BaseMessages.getString(PKG, "ProcessFiles.Log.FileAddedResult")); addResultFile(resultFile); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "ProcessFiles.Log.FilenameAddResult", data.targetFile.toString())); @@ -245,7 +245,7 @@ public boolean processRow() throws HopException { putRow(getInputRowMeta(), r); // copy row to possible alternate rowset(s). - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "ProcessFiles.LineNumber") + getLinesRead()); } } catch (Exception e) { diff --git a/plugins/transforms/propertyinput/src/main/java/org/apache/hop/pipeline/transforms/propertyinput/PropertyInput.java b/plugins/transforms/propertyinput/src/main/java/org/apache/hop/pipeline/transforms/propertyinput/PropertyInput.java index 3b73d0d2758..a5cc4c564cb 100644 --- a/plugins/transforms/propertyinput/src/main/java/org/apache/hop/pipeline/transforms/propertyinput/PropertyInput.java +++ b/plugins/transforms/propertyinput/src/main/java/org/apache/hop/pipeline/transforms/propertyinput/PropertyInput.java @@ -334,7 +334,7 @@ private boolean openNextFile() { if (!meta.isFileField()) { if (data.fileNumber >= data.files.nrOfFiles()) { // finished processing! - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "PropertyInput.Log.FinishedProcessing")); } return false; @@ -349,7 +349,7 @@ private boolean openNextFile() { } else { data.inputRow = getRow(); // Get row from input rowset & set row busy! if (data.inputRow == null) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "PropertyInput.Log.FinishedProcessing")); } return false; @@ -396,7 +396,7 @@ private boolean openNextFile() { } // End if first String filename = getInputRowMeta().getString(data.inputRow, data.indexOfFilenameField); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -441,7 +441,7 @@ private boolean openNextFile() { data.rowNumber = 0; } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "PropertyInput.Log.OpeningFile", data.file.toString())); } @@ -494,7 +494,7 @@ private boolean openNextFile() { data.iniNameIterator = data.iniSection.keySet().iterator(); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "PropertyInput.Log.FileOpened", data.file.toString())); logDetailed( diff --git a/plugins/transforms/propertyoutput/src/main/java/org/apache/hop/pipeline/transforms/propertyoutput/PropertyOutput.java b/plugins/transforms/propertyoutput/src/main/java/org/apache/hop/pipeline/transforms/propertyoutput/PropertyOutput.java index 93a03ac3cfb..058b706b1ed 100644 --- a/plugins/transforms/propertyoutput/src/main/java/org/apache/hop/pipeline/transforms/propertyoutput/PropertyOutput.java +++ b/plugins/transforms/propertyoutput/src/main/java/org/apache/hop/pipeline/transforms/propertyoutput/PropertyOutput.java @@ -139,7 +139,7 @@ public boolean processRow() throws HopException { } if (!data.KeySet.contains(propkey)) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "PropertyOutput.Log.Key", propkey)); logDetailed(BaseMessages.getString(PKG, "PropertyOutput.Log.Value", propvalue)); } @@ -148,7 +148,7 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, r); // in case we want it to go further... incrementLinesOutput(); - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic("linenr " + getLinesRead()); } data.KeySet.add(propkey); @@ -207,7 +207,7 @@ private void createParentFolder() throws HopException { // Get parent folder parentfolder = data.file.getParent(); if (!parentfolder.exists()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -215,7 +215,7 @@ private void createParentFolder() throws HopException { parentfolder.getName().toString())); } parentfolder.createFolder(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, diff --git a/plugins/transforms/randomvalue/src/main/java/org/apache/hop/pipeline/transforms/randomvalue/RandomValue.java b/plugins/transforms/randomvalue/src/main/java/org/apache/hop/pipeline/transforms/randomvalue/RandomValue.java index 91422c2b7f2..111c59768e9 100644 --- a/plugins/transforms/randomvalue/src/main/java/org/apache/hop/pipeline/transforms/randomvalue/RandomValue.java +++ b/plugins/transforms/randomvalue/src/main/java/org/apache/hop/pipeline/transforms/randomvalue/RandomValue.java @@ -172,7 +172,7 @@ public boolean processRow() throws HopException { row = getRandomValue(imeta, row); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, "RandomValue.Log.ValueReturned", data.outputRowMeta.getString(row))); diff --git a/plugins/transforms/regexeval/src/main/java/org/apache/hop/pipeline/transforms/regexeval/RegexEval.java b/plugins/transforms/regexeval/src/main/java/org/apache/hop/pipeline/transforms/regexeval/RegexEval.java index 2ec8c47c634..e067c74ca4e 100644 --- a/plugins/transforms/regexeval/src/main/java/org/apache/hop/pipeline/transforms/regexeval/RegexEval.java +++ b/plugins/transforms/regexeval/src/main/java/org/apache/hop/pipeline/transforms/regexeval/RegexEval.java @@ -201,7 +201,7 @@ public boolean processRow() throws HopException { outputRow[data.indexOfResultField] = isMatch; } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "RegexEval.Log.ReadRow") + " " @@ -243,7 +243,7 @@ public boolean init() { if (meta.isUseVariableInterpolationFlagSet()) { regularexpression = resolve(meta.getScript()); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "RegexEval.Log.Regexp") + " " diff --git a/plugins/transforms/replacestring/src/main/java/org/apache/hop/pipeline/transforms/replacestring/ReplaceString.java b/plugins/transforms/replacestring/src/main/java/org/apache/hop/pipeline/transforms/replacestring/ReplaceString.java index 0a16ed4f33a..4cffa9e6763 100644 --- a/plugins/transforms/replacestring/src/main/java/org/apache/hop/pipeline/transforms/replacestring/ReplaceString.java +++ b/plugins/transforms/replacestring/src/main/java/org/apache/hop/pipeline/transforms/replacestring/ReplaceString.java @@ -226,7 +226,7 @@ public boolean processRow() throws HopException { Object[] output = handleOneRow(getInputRowMeta(), r); putRow(data.outputRowMeta, output); - if (checkFeedback(getLinesRead()) && log.isDetailed()) { + if (checkFeedback(getLinesRead()) && isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ReplaceString.Log.LineNumber") + getLinesRead()); } } catch (HopException e) { diff --git a/plugins/transforms/reservoirsampling/src/main/java/org/apache/hop/pipeline/transforms/reservoirsampling/ReservoirSampling.java b/plugins/transforms/reservoirsampling/src/main/java/org/apache/hop/pipeline/transforms/reservoirsampling/ReservoirSampling.java index 133b0a2191b..bd84530588e 100644 --- a/plugins/transforms/reservoirsampling/src/main/java/org/apache/hop/pipeline/transforms/reservoirsampling/ReservoirSampling.java +++ b/plugins/transforms/reservoirsampling/src/main/java/org/apache/hop/pipeline/transforms/reservoirsampling/ReservoirSampling.java @@ -107,7 +107,7 @@ public boolean processRow() throws HopException { return false; } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("Read row #" + getLinesRead() + " : " + Arrays.toString(row)); } diff --git a/plugins/transforms/rowgenerator/src/main/java/org/apache/hop/pipeline/transforms/rowgenerator/RowGenerator.java b/plugins/transforms/rowgenerator/src/main/java/org/apache/hop/pipeline/transforms/rowgenerator/RowGenerator.java index 5886fe05dc4..400597879b8 100644 --- a/plugins/transforms/rowgenerator/src/main/java/org/apache/hop/pipeline/transforms/rowgenerator/RowGenerator.java +++ b/plugins/transforms/rowgenerator/src/main/java/org/apache/hop/pipeline/transforms/rowgenerator/RowGenerator.java @@ -240,7 +240,7 @@ public synchronized boolean processRow() throws HopException { putRow(data.outputRowMeta, r); data.rowsWritten++; - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, @@ -250,7 +250,7 @@ public synchronized boolean processRow() throws HopException { } if (checkFeedback(data.rowsWritten)) { - if (log.isBasic()) { + if (isBasic()) { logBasic( BaseMessages.getString( PKG, "RowGenerator.Log.LineNr", Long.toString(data.rowsWritten))); diff --git a/plugins/transforms/rowsfromresult/src/main/java/org/apache/hop/pipeline/transforms/rowsfromresult/RowsFromResult.java b/plugins/transforms/rowsfromresult/src/main/java/org/apache/hop/pipeline/transforms/rowsfromresult/RowsFromResult.java index 50e220d99a4..340c0519912 100644 --- a/plugins/transforms/rowsfromresult/src/main/java/org/apache/hop/pipeline/transforms/rowsfromresult/RowsFromResult.java +++ b/plugins/transforms/rowsfromresult/src/main/java/org/apache/hop/pipeline/transforms/rowsfromresult/RowsFromResult.java @@ -61,7 +61,7 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, row.getData()); if (checkFeedback(getLinesRead())) { - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "RowsFromResult.Log.LineNumber") + getLinesRead()); } } diff --git a/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforce/SalesforceTransform.java b/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforce/SalesforceTransform.java index 55774ae2701..94074141502 100644 --- a/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforce/SalesforceTransform.java +++ b/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforce/SalesforceTransform.java @@ -59,25 +59,26 @@ public boolean init() { String realModule = resolve(meta.getModule()); if (Utils.isEmpty(realUrl)) { - log.logError(BaseMessages.getString(PKG, "SalesforceConnection.TargetURLMissing.Error")); + logError(BaseMessages.getString(PKG, "SalesforceConnection.TargetURLMissing.Error")); return false; } if (Utils.isEmpty(realUsername)) { - log.logError(BaseMessages.getString(PKG, "SalesforceConnection.UsernameMissing.Error")); + logError(BaseMessages.getString(PKG, "SalesforceConnection.UsernameMissing.Error")); return false; } if (Utils.isEmpty(realPassword)) { - log.logError(BaseMessages.getString(PKG, "SalesforceConnection.PasswordMissing.Error")); + logError(BaseMessages.getString(PKG, "SalesforceConnection.PasswordMissing.Error")); return false; } if (Utils.isEmpty(realModule)) { - log.logError(BaseMessages.getString(PKG, "SalesForceTransform.ModuleMissing.DialogMessage")); + logError(BaseMessages.getString(PKG, "SalesForceTransform.ModuleMissing.DialogMessage")); return false; } try { // The final transform should call data.connection.connect(), as other settings may set // additional options - data.connection = new SalesforceConnection(log, realUrl, realUsername, realPassword); + data.connection = + new SalesforceConnection(getLogChannel(), realUrl, realUsername, realPassword); data.connection.setModule(realModule); data.connection.setTimeOut(Const.toInt(resolve(meta.getTimeout()), 0)); data.connection.setUsingCompression(meta.isCompression()); diff --git a/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforcedelete/SalesforceDelete.java b/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforcedelete/SalesforceDelete.java index 982359b7fa6..826aec5a9db 100644 --- a/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforcedelete/SalesforceDelete.java +++ b/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforcedelete/SalesforceDelete.java @@ -100,7 +100,7 @@ public boolean processRow() throws HopException { private void writeToSalesForce(Object[] rowData) throws HopException { try { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -119,7 +119,7 @@ private void writeToSalesForce(Object[] rowData) throws HopException { } if (data.iBufferPos >= meta.getBatchSizeInt()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SalesforceDelete.Log.CallingFlush")); } flushBuffers(); @@ -148,7 +148,7 @@ private void flushBuffers() throws HopException { incrementLinesOutput(); if (checkFeedback(getLinesInput())) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "SalesforceDelete.log.LineRow", String.valueOf(getLinesInput()))); @@ -161,7 +161,7 @@ private void flushBuffers() throws HopException { // array and write them to the screen if (!getTransformMeta().isDoingErrorHandling()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SalesforceDelete.Found.Error")); } @@ -188,7 +188,7 @@ private void flushBuffers() throws HopException { err.getMessage()); } // Simply add this row to the error row - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "SalesforceDelete.PassingRowToErrorTransform")); } putError( @@ -212,7 +212,7 @@ private void flushBuffers() throws HopException { BaseMessages.getString(PKG, "SalesforceDelete.FailedToDeleted", e.getMessage())); } // Simply add this row to the error row - if (log.isDebug()) { + if (isDebug()) { logDebug("Passing row to error transform"); } diff --git a/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceinput/SalesforceInput.java b/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceinput/SalesforceInput.java index 1e638dc1402..9d8296f6f9d 100644 --- a/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceinput/SalesforceInput.java +++ b/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceinput/SalesforceInput.java @@ -76,7 +76,7 @@ public boolean processRow() throws HopException { data.limitReached = false; data.nrRecords = data.connection.getRecordsCount(); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "SalesforceInput.Log.RecordCount") + " : " @@ -97,7 +97,7 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, outputRowData); // copy row to output rowset(s) - if (checkFeedback(getLinesInput()) && log.isDetailed()) { + if (checkFeedback(getLinesInput()) && isDetailed()) { logDetailed( BaseMessages.getString(PKG, "SalesforceInput.log.LineRow", "" + getLinesInput())); } @@ -149,7 +149,7 @@ private Object[] getOneRow() throws HopException { if (meta.getRecordsFilter() != SalesforceConnectionUtils.RECORDS_FILTER_UPDATED) { // We retrieved all records available here // maybe we need to query more again ... - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "SalesforceInput.Log.NeedQueryMore", "" + data.rownr)); @@ -159,7 +159,7 @@ private Object[] getOneRow() throws HopException { // We returned more result (query is not done yet) int nr = data.connection.getRecordsCount(); data.nrRecords += nr; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "SalesforceInput.Log.QueryMoreRetrieved", "" + nr)); } @@ -345,8 +345,7 @@ public boolean init() { // Check if field list is filled if (data.nrFields == 0) { - log.logError( - BaseMessages.getString(PKG, "SalesforceInputDialog.FieldsMissing.DialogMessage")); + logError(BaseMessages.getString(PKG, "SalesforceInputDialog.FieldsMissing.DialogMessage")); return false; } @@ -356,7 +355,7 @@ public boolean init() { if (meta.isSpecifyQuery()) { // Check if user specified a query if (Utils.isEmpty(soSQL)) { - log.logError( + logError( BaseMessages.getString(PKG, "SalesforceInputDialog.QueryMissing.DialogMessage")); return false; } @@ -365,14 +364,14 @@ public boolean init() { if (meta.getRecordsFilter() != SalesforceConnectionUtils.RECORDS_FILTER_ALL) { String realFromDateString = resolve(meta.getReadFrom()); if (Utils.isEmpty(realFromDateString)) { - log.logError( + logError( BaseMessages.getString( PKG, "SalesforceInputDialog.FromDateMissing.DialogMessage")); return false; } String realToDateString = resolve(meta.getReadTo()); if (Utils.isEmpty(realToDateString)) { - log.logError( + logError( BaseMessages.getString(PKG, "SalesforceInputDialog.ToDateMissing.DialogMessage")); return false; } @@ -385,7 +384,7 @@ public boolean init() { data.endCal.setTime(dateFormat.parse(realToDateString)); dateFormat = null; } catch (Exception e) { - log.logError(BaseMessages.getString(PKG, "SalesforceInput.ErrorParsingDate"), e); + logError(BaseMessages.getString(PKG, "SalesforceInput.ErrorParsingDate"), e); return false; } } diff --git a/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceinsert/SalesforceInsert.java b/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceinsert/SalesforceInsert.java index af9661d91be..b51edf44779 100644 --- a/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceinsert/SalesforceInsert.java +++ b/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceinsert/SalesforceInsert.java @@ -114,7 +114,7 @@ public boolean processRow() throws HopException { void writeToSalesForce(Object[] rowData) throws HopException { try { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -139,7 +139,7 @@ void writeToSalesForce(Object[] rowData) throws HopException { // We need to keep track of this field fieldsToNull.add( SalesforceUtils.getFieldToNullName( - log, meta.getUpdateLookup()[i], meta.getUseExternalId()[i])); + getLogChannel(), meta.getUpdateLookup()[i], meta.getUseExternalId()[i])); } else { Object normalObject = normalizeValue(valueMeta, value); insertfields.add( @@ -169,7 +169,7 @@ void writeToSalesForce(Object[] rowData) throws HopException { } if (data.iBufferPos >= meta.getBatchSizeInt()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SalesforceInsert.CallingFlushBuffer")); } flushBuffers(); @@ -187,7 +187,7 @@ private void flushBuffers() throws HopException { if (data.saveResult[j].isSuccess()) { // Row was inserted String id = data.saveResult[j].getId(); - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "SalesforceInsert.RowInserted", id)); } @@ -199,14 +199,14 @@ private void flushBuffers() throws HopException { int newIndex = getInputRowMeta().size(); newRow[newIndex++] = id; } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SalesforceInsert.NewRow", newRow[0])); } putRow(data.outputRowMeta, newRow); // copy row to output rowset(s) incrementLinesOutput(); - if (checkFeedback(getLinesInput()) && log.isDetailed()) { + if (checkFeedback(getLinesInput()) && isDetailed()) { logDetailed( BaseMessages.getString(PKG, "SalesforceInsert.log.LineRow", getLinesInput())); } @@ -218,7 +218,7 @@ private void flushBuffers() throws HopException { if (!getTransformMeta().isDoingErrorHandling()) { - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "SalesforceInsert.ErrorFound")); } @@ -248,7 +248,7 @@ private void flushBuffers() throws HopException { } // Simply add this row to the error row - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SalesforceInsert.PassingRowToErrorTransform")); } putError( @@ -272,7 +272,7 @@ private void flushBuffers() throws HopException { BaseMessages.getString(PKG, "SalesforceInsert.FailedToInsertObject", e.getMessage())); } // Simply add this row to the error row - if (log.isDebug()) { + if (isDebug()) { logDebug("Passing row to error transform"); } diff --git a/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceupdate/SalesforceUpdate.java b/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceupdate/SalesforceUpdate.java index 1bcb893ce3b..5b0d6c55f23 100644 --- a/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceupdate/SalesforceUpdate.java +++ b/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceupdate/SalesforceUpdate.java @@ -112,7 +112,7 @@ public boolean processRow() throws HopException { void writeToSalesForce(Object[] rowData) throws HopException { try { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( "Called writeToSalesForce with " + data.iBufferPos @@ -134,7 +134,7 @@ void writeToSalesForce(Object[] rowData) throws HopException { // We need to keep track of this field fieldsToNull.add( SalesforceUtils.getFieldToNullName( - log, meta.getUpdateLookup()[i], meta.getUseExternalId()[i])); + getLogChannel(), meta.getUpdateLookup()[i], meta.getUseExternalId()[i])); } else { IValueMeta valueMeta = data.inputRowMeta.getValueMeta(data.fieldnrs[i]); Object value = rowData[data.fieldnrs[i]]; @@ -165,7 +165,7 @@ void writeToSalesForce(Object[] rowData) throws HopException { } if (data.iBufferPos >= meta.getBatchSizeInt()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Calling flush buffer from writeToSalesForce"); } flushBuffers(); @@ -190,7 +190,7 @@ private void flushBuffers() throws HopException { if (data.saveResult[j].isSuccess()) { // Row was updated String id = data.saveResult[j].getId(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Row updated with id: " + id); } @@ -198,14 +198,14 @@ private void flushBuffers() throws HopException { Object[] newRow = RowDataUtil.resizeArray(data.outputBuffer[j], data.outputRowMeta.size()); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("The new row has an id value of : " + newRow[0]); } putRow(data.outputRowMeta, newRow); // copy row to output rowset(s) incrementLinesUpdated(); - if (checkFeedback(getLinesInput()) && log.isDetailed()) { + if (checkFeedback(getLinesInput()) && isDetailed()) { logDetailed( BaseMessages.getString(PKG, "SalesforceUpdate.log.LineRow", "" + getLinesInput())); } @@ -216,7 +216,7 @@ private void flushBuffers() throws HopException { // array and write them to the screen if (!getTransformMeta().isDoingErrorHandling()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Found error from SalesForce and raising the exception"); } @@ -246,7 +246,7 @@ private void flushBuffers() throws HopException { } // Simply add this row to the error row - if (log.isDebug()) { + if (isDebug()) { logDebug("Passing row to error transform"); } @@ -271,7 +271,7 @@ private void flushBuffers() throws HopException { } // Simply add this row to the error row - if (log.isDebug()) { + if (isDebug()) { logDebug("Passing row to error transform"); } diff --git a/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceupsert/SalesforceUpsert.java b/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceupsert/SalesforceUpsert.java index ff9a4192ffb..ba560100851 100644 --- a/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceupsert/SalesforceUpsert.java +++ b/plugins/transforms/salesforce/src/main/java/org/apache/hop/pipeline/transforms/salesforceupsert/SalesforceUpsert.java @@ -122,7 +122,7 @@ public boolean processRow() throws HopException { void writeToSalesForce(Object[] rowData) throws HopException { try { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "SalesforceUpsert.CalledWrite", data.iBufferPos, meta.getBatchSizeInt())); @@ -151,7 +151,7 @@ void writeToSalesForce(Object[] rowData) throws HopException { // We need to keep track of this field fieldsToNull.add( SalesforceUtils.getFieldToNullName( - log, meta.getUpdateLookup()[i], meta.getUseExternalId()[i])); + getLogChannel(), meta.getUpdateLookup()[i], meta.getUseExternalId()[i])); } else { Object normalObject = normalizeValue(valueMeta, rowData[data.fieldnrs[i]]); if (data.mapData && data.dataTypeMap != null) { @@ -183,7 +183,7 @@ void writeToSalesForce(Object[] rowData) throws HopException { } if (data.iBufferPos >= meta.getBatchSizeInt()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Calling flush buffer from writeToSalesForce"); } flushBuffers(); @@ -227,12 +227,12 @@ private void flushBuffers() throws HopException { String id = data.upsertResult[j].getId(); if (data.upsertResult[j].isCreated()) { incrementLinesOutput(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SalesforceUpsert.ObjectCreated", id)); } } else { incrementLinesUpdated(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SalesforceUpsert.ObjectUpdated", id)); } } @@ -244,13 +244,13 @@ private void flushBuffers() throws HopException { int newIndex = data.inputRowMeta.size(); newRow[newIndex++] = id; } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SalesforceUpsert.NewRow", newRow[0])); } putRow(data.outputRowMeta, newRow); // copy row to output rowset(s) - if (checkFeedback(getLinesInput()) && log.isDetailed()) { + if (checkFeedback(getLinesInput()) && isDetailed()) { logDetailed( BaseMessages.getString(PKG, "SalesforceUpsert.log.LineRow", "" + getLinesInput())); } @@ -261,7 +261,7 @@ private void flushBuffers() throws HopException { // array and write them to the screen if (!getTransformMeta().isDoingErrorHandling()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SalesforceUpsert.ErrorFound")); } @@ -291,7 +291,7 @@ private void flushBuffers() throws HopException { } // Simply add this row to the error row - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "SalesforceUpsert.PassingRowToErrorTransform")); } putError( @@ -320,7 +320,7 @@ private void flushBuffers() throws HopException { } } // Simply add this row to the error row - if (log.isDebug()) { + if (isDebug()) { logDebug("Passing row to error transform"); } diff --git a/plugins/transforms/samplerows/src/main/java/org/apache/hop/pipeline/transforms/samplerows/SampleRows.java b/plugins/transforms/samplerows/src/main/java/org/apache/hop/pipeline/transforms/samplerows/SampleRows.java index 807f3aab601..6140fb22ad4 100644 --- a/plugins/transforms/samplerows/src/main/java/org/apache/hop/pipeline/transforms/samplerows/SampleRows.java +++ b/plugins/transforms/samplerows/src/main/java/org/apache/hop/pipeline/transforms/samplerows/SampleRows.java @@ -71,7 +71,7 @@ public boolean processRow() throws HopException { for (String part : rangePart) { if (part.matches("\\d+")) { - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "SampleRows.Log.RangeValue", part)); } int vpart = Integer.valueOf(part); @@ -82,7 +82,7 @@ public boolean processRow() throws HopException { Integer start = Integer.valueOf(rangeMultiPart[0]); Integer end = Integer.valueOf(rangeMultiPart[1]); Range range = Range.closed(start, end); - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "SampleRows.Log.RangeValue", range)); } setBuilder.add(range); @@ -110,7 +110,7 @@ public boolean processRow() throws HopException { // putRow(data.outputRowMeta, data.outputRow); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, diff --git a/plugins/transforms/schemamapping/src/main/java/org/apache/hop/pipeline/transforms/schemamapping/SchemaMapping.java b/plugins/transforms/schemamapping/src/main/java/org/apache/hop/pipeline/transforms/schemamapping/SchemaMapping.java index 3bb2a2ef528..9a4fda7391e 100644 --- a/plugins/transforms/schemamapping/src/main/java/org/apache/hop/pipeline/transforms/schemamapping/SchemaMapping.java +++ b/plugins/transforms/schemamapping/src/main/java/org/apache/hop/pipeline/transforms/schemamapping/SchemaMapping.java @@ -84,7 +84,7 @@ public boolean processRow() throws HopException { applySchemaToIncomingStream(outputData, r); putRow(data.outputRowMeta, outputData); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "SchemaMapping.Log.WroteRowToNextTransform") + data.outputRowMeta.getString(outputData)); diff --git a/plugins/transforms/script/src/main/java/org/apache/hop/pipeline/transforms/script/Script.java b/plugins/transforms/script/src/main/java/org/apache/hop/pipeline/transforms/script/Script.java index bd8e15e59d1..210ff5bbec0 100644 --- a/plugins/transforms/script/src/main/java/org/apache/hop/pipeline/transforms/script/Script.java +++ b/plugins/transforms/script/src/main/java/org/apache/hop/pipeline/transforms/script/Script.java @@ -134,7 +134,7 @@ private void determineUsedFields(IRowMeta row) { // String valName = row.getValueMeta(i).getName(); if (strTransformScript.contains(valName)) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "Script.Log.UsedValueName", String.valueOf(i), valName)); // $NON-NLS-3$ @@ -144,7 +144,7 @@ private void determineUsedFields(IRowMeta row) { } } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -226,7 +226,7 @@ private boolean addValues(IRowMeta rowMeta, Object[] row) throws HopException { try { // Checking for StartScript if (StringUtils.isNotEmpty(strStartScript)) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(("Start compiledScript found!")); } if (data.engine instanceof Compilable) { @@ -238,7 +238,7 @@ private boolean addValues(IRowMeta rowMeta, Object[] row) throws HopException { } } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(("No starting compiledScript found!")); } } @@ -308,12 +308,12 @@ private boolean addValues(IRowMeta rowMeta, Object[] row) throws HopException { Object statusVariable = bindings.get("pipeline_status"); if (statusVariable != null) { bWithPipelineStat = true; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( ("Value pipeline_status found. Checking pipeline status while compiledScript execution.")); } } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( ("No pipeline_status value found. Pipeline status checking not available.")); } @@ -619,11 +619,11 @@ public boolean processRow() throws HopException { // Checking for EndScript if (strEndScript != null && strEndScript.length() > 0) { data.engine.eval(strEndScript, bindings); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(("End Script found!")); } } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(("No end Script found!")); } } @@ -700,7 +700,7 @@ public boolean init() { try { data.engine = ScriptUtils.getInstance().getScriptEngineByName(meta.getLanguageName()); } catch (Exception e) { - log.logError("Error obtaining scripting engine for language " + meta.getLanguageName(), e); + logError("Error obtaining scripting engine for language " + meta.getLanguageName(), e); } rowNumber = 0; previousRow = null; diff --git a/plugins/transforms/selectvalues/src/main/java/org/apache/hop/pipeline/transforms/selectvalues/SelectValues.java b/plugins/transforms/selectvalues/src/main/java/org/apache/hop/pipeline/transforms/selectvalues/SelectValues.java index c9169b22ccf..7299c66bc98 100644 --- a/plugins/transforms/selectvalues/src/main/java/org/apache/hop/pipeline/transforms/selectvalues/SelectValues.java +++ b/plugins/transforms/selectvalues/src/main/java/org/apache/hop/pipeline/transforms/selectvalues/SelectValues.java @@ -170,7 +170,7 @@ private synchronized Object[] selectValues(IRowMeta rowMeta, Object[] rowData) // outputData[outputIndex++] = valueMeta.cloneValueData(rowData[idx]); } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "SelectValues.Log.MixingStreamWithDifferentFields")); } @@ -372,7 +372,7 @@ public boolean processRow() throws HopException { rowCopy = getInputRowMeta().cloneRow(rowData); } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "SelectValues.Log.GotRowFromPreviousTransform") + getInputRowMeta().getString(rowData)); @@ -410,7 +410,7 @@ public boolean processRow() throws HopException { // Send the row on its way // putRow(data.metadataRowMeta, outputData); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "SelectValues.Log.WroteRowToNextTransform") + data.metadataRowMeta.getString(outputData)); diff --git a/plugins/transforms/setvariable/src/main/java/org/apache/hop/pipeline/transforms/setvariable/SetVariable.java b/plugins/transforms/setvariable/src/main/java/org/apache/hop/pipeline/transforms/setvariable/SetVariable.java index e7eedd61d53..176cce570f0 100644 --- a/plugins/transforms/setvariable/src/main/java/org/apache/hop/pipeline/transforms/setvariable/SetVariable.java +++ b/plugins/transforms/setvariable/src/main/java/org/apache/hop/pipeline/transforms/setvariable/SetVariable.java @@ -179,7 +179,7 @@ private void setValue(Object[] rowData, int i, boolean usedefault) throws HopExc if (parentWorkflow != null) { parentWorkflow.setVariable(varname, value); } else { - log.logBasic( + logBasic( CONST_WARNING_CAN_T_SET_VARIABLE + varname + "] on parent workflow: the parent workflow is not available"); @@ -191,7 +191,7 @@ private void setValue(Object[] rowData, int i, boolean usedefault) throws HopExc if (gpJob != null) { gpJob.setVariable(varname, value); } else { - log.logBasic( + logBasic( CONST_WARNING_CAN_T_SET_VARIABLE + varname + "] on grand parent workflow: the grand parent workflow is not available"); @@ -205,7 +205,7 @@ private void setValue(Object[] rowData, int i, boolean usedefault) throws HopExc if (parentWorkflow != null) { parentWorkflow.setVariable(varname, value); } else { - log.logBasic( + logBasic( CONST_WARNING_CAN_T_SET_VARIABLE + varname + "] on parent workflow: the parent workflow is not available"); diff --git a/plugins/transforms/snowflake/src/main/java/org/apache/hop/pipeline/transforms/snowflake/bulkloader/SnowflakeBulkLoader.java b/plugins/transforms/snowflake/src/main/java/org/apache/hop/pipeline/transforms/snowflake/bulkloader/SnowflakeBulkLoader.java index 72eb0eb7004..c7951f59ea2 100644 --- a/plugins/transforms/snowflake/src/main/java/org/apache/hop/pipeline/transforms/snowflake/bulkloader/SnowflakeBulkLoader.java +++ b/plugins/transforms/snowflake/src/main/java/org/apache/hop/pipeline/transforms/snowflake/bulkloader/SnowflakeBulkLoader.java @@ -609,7 +609,7 @@ private void openNewFile(String baseFilename) throws HopException { throw new HopException("Compression provider GZip does not support output streams!"); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Opening output stream using provider: " + compressionProvider.getName()); } @@ -629,7 +629,7 @@ private void openNewFile(String baseFilename) throws HopException { data.writer = new BufferedOutputStream(data.out, 5000); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( "Opened new file with name [" + HopVfs.getFriendlyURI(filename, variables) + "]"); } @@ -654,7 +654,7 @@ private boolean closeFile() { data.writer.flush(); } data.writer = null; - if (log.isDebug()) { + if (isDebug()) { logDebug("Closing normal file ..."); } if (data.out != null) { @@ -703,7 +703,7 @@ public boolean init() { data.db = new Database(this, variables, data.databaseMeta); data.db.connect(); - if (log.isBasic()) { + if (isBasic()) { logBasic("Connected to database [" + meta.getConnection() + "]"); } diff --git a/plugins/transforms/sort/src/main/java/org/apache/hop/pipeline/transforms/sort/SortRows.java b/plugins/transforms/sort/src/main/java/org/apache/hop/pipeline/transforms/sort/SortRows.java index ab669963688..beac3e20228 100644 --- a/plugins/transforms/sort/src/main/java/org/apache/hop/pipeline/transforms/sort/SortRows.java +++ b/plugins/transforms/sort/src/main/java/org/apache/hop/pipeline/transforms/sort/SortRows.java @@ -81,10 +81,10 @@ void addBuffer(IRowMeta rowMeta, Object[] r) throws HopException { data.freeMemoryPct = Const.getPercentageFreeMemory(); data.freeCounter = 0; - if (log.isDetailed()) { + if (isDetailed()) { data.memoryReporting++; if (data.memoryReporting >= 10) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "SortRows.Detailed.AvailableMemory", data.freeMemoryPct)); @@ -100,8 +100,8 @@ void addBuffer(IRowMeta rowMeta, Object[] r) throws HopException { data.freeMemoryPctLimit > 0 && data.freeMemoryPct < data.freeMemoryPctLimit && data.buffer.size() >= data.minSortSize; - if (log.isDebug()) { - this.logDebug( + if (isDebug()) { + logDebug( BaseMessages.getString( PKG, "SortRows.Debug.StartDumpToDisk", data.freeMemoryPct, data.buffer.size())); } @@ -152,7 +152,7 @@ void sortExternalRows() throws HopException { int result = data.outputRowMeta.compare(row, previousRow, data.fieldnrs); if (result == 0) { duplicates.add(index); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, @@ -206,7 +206,7 @@ void sortExternalRows() throws HopException { // data.freeMemoryPct = Const.getPercentageFreeMemory(); data.freeCounter = 0; - if (data.sortSize <= 0 && log.isDetailed()) { + if (data.sortSize <= 0 && isDetailed()) { logDetailed( BaseMessages.getString(PKG, "SortRows.Detailed.AvailableMemory", data.freeMemoryPct)); } @@ -232,7 +232,7 @@ Object[] getBuffer() throws HopValueException { // Open all files at once and read one row from each file... if (CollectionUtils.isNotEmpty(data.files) && (data.dis.isEmpty() || data.fis.isEmpty())) { - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "SortRows.Basic.OpeningTempFiles", data.files.size())); } @@ -240,7 +240,7 @@ Object[] getBuffer() throws HopValueException { for (int f = 0; f < data.files.size() && !isStopped(); f++) { FileObject fileObject = data.files.get(f); String filename = HopVfs.getFilename(fileObject); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SortRows.Detailed.OpeningTempFile", filename)); } InputStream fi = HopVfs.getInputStream(fileObject); @@ -256,7 +256,7 @@ Object[] getBuffer() throws HopValueException { // How long is the buffer? int buffersize = data.bufferSizes.get(f); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "SortRows.Detailed.FromFileExpectingRows", filename, buffersize)); @@ -293,7 +293,7 @@ Object[] getBuffer() throws HopValueException { } else { // We now have "filenr" rows waiting: which one is the smallest? // - if (log.isRowLevel()) { + if (isRowLevel()) { for (int i = 0; i < data.rowbuffer.size() && !isStopped(); i++) { Object[] b = data.rowbuffer.get(i); logRowlevel( @@ -485,7 +485,7 @@ public boolean processRow() throws HopException { } if (checkFeedback(getLinesRead())) { - if (log.isBasic()) { + if (isBasic()) { logBasic("Linenr " + getLinesRead()); } } @@ -504,12 +504,12 @@ void passBuffer() throws HopException { Object[] previousRow = null; // log time spent for external merge (expected time consuming operation) - if (log.isDebug() && !data.files.isEmpty()) { + if (isDebug() && !data.files.isEmpty()) { this.logDebug(BaseMessages.getString(PKG, "SortRows.Debug.ExternalMergeStarted")); } while (r != null && !isStopped()) { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, "SortRows.RowLevel.ReadRow", data.outputRowMeta.getString(r))); @@ -539,7 +539,7 @@ void passBuffer() throws HopException { r = getBuffer(); } - if (log.isDebug() && !data.files.isEmpty()) { + if (isDebug() && !data.files.isEmpty()) { this.logDebug(BaseMessages.getString(PKG, "SortRows.Debug.ExternalMergeFinished")); } @@ -629,7 +629,7 @@ void quickSort(List elements) { nrConversions += valueMeta.getNumberOfBinaryStringConversions(); valueMeta.setNumberOfBinaryStringConversions(0L); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "SortRows.Detailed.ReportNumberOfBinaryStringConv", nrConversions)); diff --git a/plugins/transforms/splitfieldtorows/src/main/java/org/apache/hop/pipeline/transforms/splitfieldtorows/SplitFieldToRows.java b/plugins/transforms/splitfieldtorows/src/main/java/org/apache/hop/pipeline/transforms/splitfieldtorows/SplitFieldToRows.java index 926ee350704..f7d5993f8cc 100644 --- a/plugins/transforms/splitfieldtorows/src/main/java/org/apache/hop/pipeline/transforms/splitfieldtorows/SplitFieldToRows.java +++ b/plugins/transforms/splitfieldtorows/src/main/java/org/apache/hop/pipeline/transforms/splitfieldtorows/SplitFieldToRows.java @@ -130,7 +130,7 @@ public synchronized boolean processRow() throws HopException { return false; } - if (checkFeedback(getLinesRead()) && log.isDetailed()) { + if (checkFeedback(getLinesRead()) && isDetailed()) { logBasic(BaseMessages.getString(PKG, "SplitFieldToRows.Log.LineNumber") + getLinesRead()); } @@ -151,7 +151,7 @@ public boolean init() { data.delimiterPattern = Pattern.compile(Pattern.quote(resolve(delimiter))); } } catch (PatternSyntaxException pse) { - log.logError(pse.getMessage()); + logError(pse.getMessage()); throw pse; } diff --git a/plugins/transforms/splunk/src/main/java/org/apache/hop/pipeline/transforms/splunkinput/SplunkInput.java b/plugins/transforms/splunk/src/main/java/org/apache/hop/pipeline/transforms/splunkinput/SplunkInput.java index ef0ac0545f1..049ac0d077e 100644 --- a/plugins/transforms/splunk/src/main/java/org/apache/hop/pipeline/transforms/splunkinput/SplunkInput.java +++ b/plugins/transforms/splunk/src/main/java/org/apache/hop/pipeline/transforms/splunkinput/SplunkInput.java @@ -57,7 +57,7 @@ public boolean init() { // Connect to Neo4j // if (StringUtils.isEmpty(meta.getConnectionName())) { - log.logError("You need to specify a Splunk connection to use in this transform"); + logError("You need to specify a Splunk connection to use in this transform"); return false; } @@ -76,7 +76,7 @@ public boolean init() { } data.splunkConnection = serializer.load(connectionName); } catch (HopException e) { - log.logError( + logError( "Could not load Splunk connection '" + meta.getConnectionName() + "' from the metastore", e); return false; @@ -89,7 +89,7 @@ public boolean init() { data.service = Service.connect(data.serviceArgs); } catch (Exception e) { - log.logError( + logError( "Unable to get or create a connection to Splunk connection named '" + data.splunkConnection.getName() + "'", diff --git a/plugins/transforms/sql/src/main/java/org/apache/hop/pipeline/transforms/sql/ExecSql.java b/plugins/transforms/sql/src/main/java/org/apache/hop/pipeline/transforms/sql/ExecSql.java index b4eee2aaa57..5752335dcfa 100644 --- a/plugins/transforms/sql/src/main/java/org/apache/hop/pipeline/transforms/sql/ExecSql.java +++ b/plugins/transforms/sql/src/main/java/org/apache/hop/pipeline/transforms/sql/ExecSql.java @@ -187,7 +187,7 @@ public boolean processRow() throws HopException { sql = data.sql; } } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel(BaseMessages.getString(PKG, "ExecSql.Log.ExecutingSQLScript") + Const.CR + sql); } @@ -217,7 +217,7 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, row); // send it out! - if (checkFeedback(getLinesWritten()) && log.isBasic()) { + if (checkFeedback(getLinesWritten()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "ExecSql.Log.LineNumber") + getLinesWritten()); } } catch (HopException e) { @@ -240,7 +240,7 @@ public boolean processRow() throws HopException { @Override public void dispose() { - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "ExecSql.Log.FinishingReadingQuery")); } @@ -279,7 +279,7 @@ public boolean init() { try { data.db.connect(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ExecSql.Log.ConnectedToDB")); } diff --git a/plugins/transforms/sqlfileoutput/src/main/java/org/apache/hop/pipeline/transforms/sqlfileoutput/SQLFileOutput.java b/plugins/transforms/sqlfileoutput/src/main/java/org/apache/hop/pipeline/transforms/sqlfileoutput/SQLFileOutput.java index 4f42bfd819b..33c2ade367b 100644 --- a/plugins/transforms/sqlfileoutput/src/main/java/org/apache/hop/pipeline/transforms/sqlfileoutput/SQLFileOutput.java +++ b/plugins/transforms/sqlfileoutput/src/main/java/org/apache/hop/pipeline/transforms/sqlfileoutput/SQLFileOutput.java @@ -102,7 +102,7 @@ && getLinesOutput() > 0 if (meta.createTable()) { String crTable = data.db.getDDLCreationTable(schemaTable, data.insertRowMeta); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel(BaseMessages.getString(PKG, "SQLFileOutputLog.OutputSQL", crTable)); } // Write to file @@ -132,7 +132,7 @@ && getLinesOutput() > 0 sql = sql + Const.CR; } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel(BaseMessages.getString(PKG, "SQLFileOutputLog.OutputSQL", sql)); } @@ -147,7 +147,7 @@ && getLinesOutput() > 0 incrementLinesOutput(); if (checkFeedback(getLinesRead())) { - if (log.isBasic()) { + if (isBasic()) { logBasic("linenr " + getLinesRead()); } } @@ -199,32 +199,32 @@ public boolean openNewFile() { } OutputStream outputStream; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Opening output stream in nocompress mode"); } OutputStream fos = HopVfs.getOutputStream(filename, meta.isFileAppended(), variables); outputStream = fos; - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Opening output stream in default encoding"); } data.writer = new OutputStreamWriter(new BufferedOutputStream(outputStream, 5000)); if (!Utils.isEmpty(meta.getEncoding())) { - if (log.isBasic()) { + if (isBasic()) { logDetailed("Opening output stream in encoding: " + meta.getEncoding()); } data.writer = new OutputStreamWriter( new BufferedOutputStream(outputStream, 5000), resolve(meta.getEncoding())); } else { - if (log.isBasic()) { + if (isBasic()) { logDetailed("Opening output stream in default encoding"); } data.writer = new OutputStreamWriter(new BufferedOutputStream(outputStream, 5000)); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Opened new file with name [" + filename + "]"); } @@ -244,18 +244,18 @@ private boolean closeFile() { try { if (data.writer != null) { - if (log.isDebug()) { + if (isDebug()) { logDebug("Closing output stream"); } data.writer.close(); - if (log.isDebug()) { + if (isDebug()) { logDebug("Closed output stream"); } data.writer = null; } if (data.fos != null) { - if (log.isDebug()) { + if (isDebug()) { logDebug("Closing normal file .."); } data.fos.close(); @@ -299,10 +299,10 @@ public boolean init() { String filename = resolve(meta.getFileName()); parentfolder = HopVfs.getFileObject(filename, variables).getParent(); if (!parentfolder.exists()) { - log.logBasic( + logBasic( "Folder parent", "Folder parent " + parentfolder.getName() + " does not exist !"); parentfolder.createFolder(); - log.logBasic("Folder parent", "Folder parent was created."); + logBasic("Folder parent", "Folder parent was created."); } } catch (Exception e) { logError("Couldn't created parent folder " + parentfolder.getName()); diff --git a/plugins/transforms/ssh/src/main/java/org/apache/hop/pipeline/transforms/ssh/Ssh.java b/plugins/transforms/ssh/src/main/java/org/apache/hop/pipeline/transforms/ssh/Ssh.java index aa9d4ec5189..c72a58fb08d 100644 --- a/plugins/transforms/ssh/src/main/java/org/apache/hop/pipeline/transforms/ssh/Ssh.java +++ b/plugins/transforms/ssh/src/main/java/org/apache/hop/pipeline/transforms/ssh/Ssh.java @@ -118,19 +118,19 @@ public boolean processRow() throws HopException { // Open a session session = data.conn.openSession(); - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "SSH.Log.SessionOpened")); } // execute commands - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SSH.Log.RunningCommand", data.commands)); } session.execCommand(data.commands); // Read Stdout, Sterr and exitStatus SessionResult sessionresult = new SessionResult(session); - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -148,7 +148,7 @@ public boolean processRow() throws HopException { rowData[index++] = sessionresult.isStdTypeErr(); } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, "SSH.Log.OutputLine", data.outputRowMeta.getString(rowData))); @@ -157,7 +157,7 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, rowData); if (checkFeedback(getLinesRead())) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "SSH.LineNumber", "" + getLinesRead())); } } @@ -183,7 +183,7 @@ public boolean processRow() throws HopException { } finally { if (session != null) { session.close(); - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "SSH.Log.SessionClosed")); } } @@ -220,7 +220,7 @@ public boolean init() { // Open connection data.conn = SshData.openConnection(this, meta); - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "SSH.Log.ConnectionOpened")); } diff --git a/plugins/transforms/standardizephonenumber/src/main/java/org/apache/hop/pipeline/transforms/standardizephonenumber/StandardizePhoneNumber.java b/plugins/transforms/standardizephonenumber/src/main/java/org/apache/hop/pipeline/transforms/standardizephonenumber/StandardizePhoneNumber.java index ae45d6a5c6d..f33713dc7dd 100644 --- a/plugins/transforms/standardizephonenumber/src/main/java/org/apache/hop/pipeline/transforms/standardizephonenumber/StandardizePhoneNumber.java +++ b/plugins/transforms/standardizephonenumber/src/main/java/org/apache/hop/pipeline/transforms/standardizephonenumber/StandardizePhoneNumber.java @@ -68,7 +68,7 @@ public boolean processRow() throws HopException { // indexes // in the row structure that only need to be done once if (first) { - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "StandardizePhoneNumber.Log.StartedProcessing")); } @@ -148,7 +148,7 @@ public boolean processRow() throws HopException { } outputRow[index] = phoneNumberService.format(phoneNumber, format); } catch (NumberParseException e) { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, @@ -180,7 +180,7 @@ public boolean processRow() throws HopException { // put the row to the output row stream putRow(data.outputRowMeta, outputRow); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, "StandardizePhoneNumber.Log.WroteRowToNextTransform", outputRow)); diff --git a/plugins/transforms/streamlookup/src/main/java/org/apache/hop/pipeline/transforms/streamlookup/StreamLookup.java b/plugins/transforms/streamlookup/src/main/java/org/apache/hop/pipeline/transforms/streamlookup/StreamLookup.java index fb68cc53e3e..7ba47650026 100644 --- a/plugins/transforms/streamlookup/src/main/java/org/apache/hop/pipeline/transforms/streamlookup/StreamLookup.java +++ b/plugins/transforms/streamlookup/src/main/java/org/apache/hop/pipeline/transforms/streamlookup/StreamLookup.java @@ -132,7 +132,7 @@ private boolean readLookupValues() throws HopException { logError(BaseMessages.getString(PKG, "StreamLookup.Log.NoLookupTransformSpecified")); return false; } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "StreamLookup.Log.ReadingFromStream") + data.infoStream.getTransformName() @@ -148,7 +148,7 @@ private boolean readLookupValues() throws HopException { IRowSet rowSet = findInputRowSet(data.infoStream.getTransformName()); Object[] rowData = getRowFrom(rowSet); // rows are originating from "lookup_from" while (rowData != null) { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "StreamLookup.Log.ReadLookupRow") + rowSet.getRowMeta().getString(rowData)); @@ -398,7 +398,7 @@ public boolean processRow() throws HopException { if (r == null) { // no more input to be expected... - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "StreamLookup.Log.StoppedProcessingWithEmpty", getLinesRead() + "")); @@ -426,7 +426,7 @@ public boolean processRow() throws HopException { meta.getKeystream()[i], "" + getInputRowMeta().getString(r))); } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -470,7 +470,7 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, outputRow); // copy row to output rowset(s) if (checkFeedback(getLinesRead())) { - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "StreamLookup.Log.LineNumber") + getLinesRead()); } } diff --git a/plugins/transforms/stringcut/src/main/java/org/apache/hop/pipeline/transforms/stringcut/StringCut.java b/plugins/transforms/stringcut/src/main/java/org/apache/hop/pipeline/transforms/stringcut/StringCut.java index 796ced19a2b..14374c97953 100644 --- a/plugins/transforms/stringcut/src/main/java/org/apache/hop/pipeline/transforms/stringcut/StringCut.java +++ b/plugins/transforms/stringcut/src/main/java/org/apache/hop/pipeline/transforms/stringcut/StringCut.java @@ -171,7 +171,7 @@ public boolean processRow() throws HopException { Object[] output = getOneRow(getInputRowMeta(), r); putRow(data.outputRowMeta, output); - if (checkFeedback(getLinesRead()) && log.isDetailed()) { + if (checkFeedback(getLinesRead()) && isDetailed()) { logDetailed(BaseMessages.getString(PKG, "StringCut.Log.LineNumber") + getLinesRead()); } diff --git a/plugins/transforms/stringoperations/src/main/java/org/apache/hop/pipeline/transforms/stringoperations/StringOperations.java b/plugins/transforms/stringoperations/src/main/java/org/apache/hop/pipeline/transforms/stringoperations/StringOperations.java index 26b02f1f951..e69d7d1d922 100644 --- a/plugins/transforms/stringoperations/src/main/java/org/apache/hop/pipeline/transforms/stringoperations/StringOperations.java +++ b/plugins/transforms/stringoperations/src/main/java/org/apache/hop/pipeline/transforms/stringoperations/StringOperations.java @@ -321,7 +321,7 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, output); - if (checkFeedback(getLinesRead()) && log.isDetailed()) { + if (checkFeedback(getLinesRead()) && isDetailed()) { logDetailed( BaseMessages.getString(PKG, "StringOperations.Log.LineNumber") + getLinesRead()); } diff --git a/plugins/transforms/switchcase/src/main/java/org/apache/hop/pipeline/transforms/switchcase/SwitchCase.java b/plugins/transforms/switchcase/src/main/java/org/apache/hop/pipeline/transforms/switchcase/SwitchCase.java index 064b4221e5b..2d8db709111 100644 --- a/plugins/transforms/switchcase/src/main/java/org/apache/hop/pipeline/transforms/switchcase/SwitchCase.java +++ b/plugins/transforms/switchcase/src/main/java/org/apache/hop/pipeline/transforms/switchcase/SwitchCase.java @@ -88,7 +88,7 @@ public boolean processRow() throws HopException { } if (checkFeedback(getLinesRead())) { - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "SwitchCase.Log.LineNumber") + getLinesRead()); } } diff --git a/plugins/transforms/synchronizeaftermerge/src/main/java/org/apache/hop/pipeline/transforms/synchronizeaftermerge/SynchronizeAfterMerge.java b/plugins/transforms/synchronizeaftermerge/src/main/java/org/apache/hop/pipeline/transforms/synchronizeaftermerge/SynchronizeAfterMerge.java index 4e472566a9e..d7fe44e626c 100644 --- a/plugins/transforms/synchronizeaftermerge/src/main/java/org/apache/hop/pipeline/transforms/synchronizeaftermerge/SynchronizeAfterMerge.java +++ b/plugins/transforms/synchronizeaftermerge/src/main/java/org/apache/hop/pipeline/transforms/synchronizeaftermerge/SynchronizeAfterMerge.java @@ -110,7 +110,7 @@ private synchronized void lookupValues(Object[] row) throws HopException { * INSERT ROW */ - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "SynchronizeAfterMerge.InsertRow", Arrays.toString(row))); } @@ -129,7 +129,7 @@ private synchronized void lookupValues(Object[] row) throws HopException { data.db.getInsertStatement( data.realSchemaName, data.realTableName, data.insertRowMeta); - if (log.isDebug()) { + if (isDebug()) { logDebug("Preparation of the insert SQL statement: " + sql); } @@ -152,7 +152,7 @@ private synchronized void lookupValues(Object[] row) throws HopException { if (!data.batchMode) { incrementLinesOutput(); } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("Written row: " + data.insertRowMeta.getString(insertRowData)); } @@ -181,7 +181,7 @@ private synchronized void lookupValues(Object[] row) throws HopException { if (data.lookupStatement == null) { String sql = getLookupStatement(data.inputRowMeta); - if (log.isDebug()) { + if (isDebug()) { logDebug("Preparating SQL for insert: " + sql); } @@ -192,7 +192,7 @@ private synchronized void lookupValues(Object[] row) throws HopException { } data.db.setValues(data.lookupParameterRowMeta, lookupRow, data.lookupStatement); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, @@ -224,7 +224,7 @@ private synchronized void lookupValues(Object[] row) throws HopException { "SynchronizeAfterMerge.Exception.KeyCouldNotFound", data.lookupParameterRowMeta.getString(lookupRow))); } else { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, @@ -261,7 +261,7 @@ private synchronized void lookupValues(Object[] row) throws HopException { data.updateStatement = data.db.prepareSql(sql); data.preparedStatements.put( data.realSchemaTable + CONST_UPDATE, data.updateStatement); - if (log.isDebug()) { + if (isDebug()) { logDebug("Preparation of the Update SQL statement : " + sql); } } @@ -290,7 +290,7 @@ private synchronized void lookupValues(Object[] row) throws HopException { data.savepoint = data.db.setSavepoint(); } data.db.setValues(data.updateParameterRowMeta, updateRow, data.updateStatement); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, @@ -318,7 +318,7 @@ private synchronized void lookupValues(Object[] row) throws HopException { data.deleteStatement = data.db.prepareSql(sql); data.preparedStatements.put( data.realSchemaTable + CONST_DELETE, data.deleteStatement); - if (log.isDebug()) { + if (isDebug()) { logDebug("Preparation of the Delete SQL statement : " + sql); } } @@ -345,7 +345,7 @@ private synchronized void lookupValues(Object[] row) throws HopException { data.savepoint = data.db.setSavepoint(); } data.db.setValues(data.deleteParameterRowMeta, deleteRow, data.deleteStatement); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, @@ -466,7 +466,7 @@ private synchronized void lookupValues(Object[] row) throws HopException { } } catch (HopDatabaseException dbe) { if (getTransformMeta().isDoingErrorHandling()) { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("Written row to error handling : " + getInputRowMeta().getString(row)); } @@ -756,7 +756,7 @@ public boolean processRow() throws HopException { data.insertRowMeta = new RowMeta(); // lookup the values! - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "SynchronizeAfterMerge.Log.CheckingRow") + Arrays.toString(nextRow)); @@ -790,7 +790,7 @@ public boolean processRow() throws HopException { meta.getKeyStream2()[i])); } - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "SynchronizeAfterMerge.Log.FieldHasDataNumbers", meta.getKeyStream()[i]) @@ -827,7 +827,7 @@ public boolean processRow() throws HopException { CONST_SYNCHRONIZE_AFTER_MERGE_EXCEPTION_FIELD_REQUIRED, meta.getUpdateStream()[i])); } - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -843,7 +843,7 @@ public boolean processRow() throws HopException { data.lookupStatement = data.preparedStatements.get(data.realSchemaTable + CONST_LOOKUP); if (data.lookupStatement == null) { String sql = getLookupStatement(data.inputRowMeta); - if (log.isDebug()) { + if (isDebug()) { logDebug("Preparation of the lookup SQL statement : " + sql); } @@ -859,7 +859,7 @@ public boolean processRow() throws HopException { data.db.getInsertStatement( data.realSchemaName, data.realTableName, data.insertRowMeta); - if (log.isDebug()) { + if (isDebug()) { logDebug("Preparation of the Insert SQL statement : " + sql); } @@ -875,7 +875,7 @@ public boolean processRow() throws HopException { data.updateStatement = data.db.prepareSql(sql); data.preparedStatements.put(data.realSchemaTable + CONST_UPDATE, data.updateStatement); - if (log.isDebug()) { + if (isDebug()) { logDebug("Preparation of the Update SQL statement : " + sql); } } @@ -887,7 +887,7 @@ public boolean processRow() throws HopException { data.deleteStatement = data.db.prepareSql(sql); data.preparedStatements.put(data.realSchemaTable + CONST_DELETE, data.deleteStatement); - if (log.isDebug()) { + if (isDebug()) { logDebug("Preparation of the Delete SQL statement : " + sql); } } @@ -900,7 +900,7 @@ public boolean processRow() throws HopException { putRow(data.outputRowMeta, nextRow); // copy row to output rowset(s) } - if (checkFeedback(getLinesRead()) && log.isDetailed()) { + if (checkFeedback(getLinesRead()) && isDetailed()) { logDetailed( BaseMessages.getString(PKG, "SynchronizeAfterMerge.Log.LineNumber") + getLinesRead()); } @@ -955,7 +955,7 @@ public boolean init() { if (data.batchMode && data.specialErrorHandling) { data.batchMode = false; - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "SynchronizeAfterMerge.Log.BatchModeDisabled")); } } diff --git a/plugins/transforms/systemdata/src/main/java/org/apache/hop/pipeline/transforms/systemdata/SystemData.java b/plugins/transforms/systemdata/src/main/java/org/apache/hop/pipeline/transforms/systemdata/SystemData.java index 790fecadf85..99e07a18d12 100644 --- a/plugins/transforms/systemdata/src/main/java/org/apache/hop/pipeline/transforms/systemdata/SystemData.java +++ b/plugins/transforms/systemdata/src/main/java/org/apache/hop/pipeline/transforms/systemdata/SystemData.java @@ -739,7 +739,7 @@ public boolean processRow() throws HopException { row = getSystemData(imeta, row); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("System info returned: " + data.outputRowMeta.getString(row)); } diff --git a/plugins/transforms/tableexists/src/main/java/org/apache/hop/pipeline/transforms/tableexists/TableExists.java b/plugins/transforms/tableexists/src/main/java/org/apache/hop/pipeline/transforms/tableexists/TableExists.java index 8209d0bcadb..24483951075 100644 --- a/plugins/transforms/tableexists/src/main/java/org/apache/hop/pipeline/transforms/tableexists/TableExists.java +++ b/plugins/transforms/tableexists/src/main/java/org/apache/hop/pipeline/transforms/tableexists/TableExists.java @@ -98,7 +98,7 @@ public boolean processRow() throws HopException { // add new values to the row. putRow(data.outputRowMeta, outputRowData); // copy row to output rowset(s) - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString( PKG, @@ -150,7 +150,7 @@ public boolean init() { try { data.db.connect(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "TableExists.Log.ConnectedToDB")); } diff --git a/plugins/transforms/tableinput/src/main/java/org/apache/hop/pipeline/transforms/tableinput/TableInput.java b/plugins/transforms/tableinput/src/main/java/org/apache/hop/pipeline/transforms/tableinput/TableInput.java index 2feb030f804..90e2cf02642 100644 --- a/plugins/transforms/tableinput/src/main/java/org/apache/hop/pipeline/transforms/tableinput/TableInput.java +++ b/plugins/transforms/tableinput/src/main/java/org/apache/hop/pipeline/transforms/tableinput/TableInput.java @@ -53,7 +53,7 @@ public TableInput( } private RowMetaAndData readStartDate() throws HopException { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Reading from transform [" + data.infoStream.getTransformName() + "]"); } @@ -97,7 +97,7 @@ public boolean processRow() throws HopException { // Make sure we read data from source transforms... if (data.infoStream.getTransformMeta() != null) { if (meta.isExecuteEachInputRow()) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( "Reading single row from stream [" + data.infoStream.getTransformName() + "]"); } @@ -111,7 +111,7 @@ public boolean processRow() throws HopException { parameters = getRowFrom(data.rowSet); parametersMeta = data.rowSet.getRowMeta(); } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( "Reading query parameters from stream [" + data.infoStream.getTransformName() @@ -121,7 +121,7 @@ public boolean processRow() throws HopException { parameters = rmad.getData(); parametersMeta = rmad.getRowMeta(); } - if (parameters != null && log.isDetailed()) { + if (parameters != null && isDetailed()) { logDetailed("Query parameters found = " + parametersMeta.getString(parameters)); } } else { @@ -181,7 +181,7 @@ public boolean processRow() throws HopException { putRow(data.rowMeta, data.thisrow); // fill the rowset(s). (wait for empty) data.thisrow = data.nextrow; - if (checkFeedback(getLinesInput()) && log.isBasic()) { + if (checkFeedback(getLinesInput()) && isBasic()) { logBasic("linenr " + getLinesInput()); } } @@ -198,7 +198,7 @@ public boolean processRow() throws HopException { putRow(data.rowMeta, data.thisrow); // fill the rowset(s). (wait for empty) data.thisrow = data.nextrow; - if (checkFeedback(getLinesInput()) && log.isBasic()) { + if (checkFeedback(getLinesInput()) && isBasic()) { logBasic("linenr " + getLinesInput()); } } @@ -224,7 +224,7 @@ private boolean doQuery(IRowMeta parametersMeta, Object[] parameters) sql = meta.getSql(); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("SQL query : " + sql); } if (parametersMeta.isEmpty()) { @@ -263,7 +263,7 @@ private boolean doQuery(IRowMeta parametersMeta, Object[] parameters) @Override public void dispose() { - if (log.isBasic()) { + if (isBasic()) { logBasic("Finished reading query, closing connection."); } try { @@ -333,7 +333,7 @@ public boolean init() { if (databaseMeta.isRequiringTransactionsOnQueries()) { data.db.setCommit(100); // needed for PGSQL it seems... } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Connected to database..."); } diff --git a/plugins/transforms/tableoutput/src/main/java/org/apache/hop/pipeline/transforms/tableoutput/TableOutput.java b/plugins/transforms/tableoutput/src/main/java/org/apache/hop/pipeline/transforms/tableoutput/TableOutput.java index f230a21ebfb..cfc13b4aba5 100644 --- a/plugins/transforms/tableoutput/src/main/java/org/apache/hop/pipeline/transforms/tableoutput/TableOutput.java +++ b/plugins/transforms/tableoutput/src/main/java/org/apache/hop/pipeline/transforms/tableoutput/TableOutput.java @@ -119,7 +119,7 @@ public boolean processRow() throws HopException { incrementLinesOutput(); } - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic("linenr " + getLinesRead()); } } catch (HopException e) { @@ -136,7 +136,7 @@ public boolean processRow() throws HopException { protected Object[] writeToTable(IRowMeta rowMeta, Object[] r) throws HopException { if (r == null) { // Stop: last line or error encountered - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Last line inserted: stop"); } return null; @@ -232,7 +232,7 @@ protected Object[] writeToTable(IRowMeta rowMeta, Object[] r) throws HopExceptio if (insertStatement == null) { String sql = data.db.getInsertStatement(resolve(meta.getSchemaName()), tableName, data.insertRowMeta); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Prepared statement : " + sql); } insertStatement = data.db.prepareSql(sql, meta.isReturningGeneratedKeys()); @@ -357,14 +357,14 @@ protected Object[] writeToTable(IRowMeta rowMeta, Object[] r) throws HopExceptio } else { if (meta.isIgnoreErrors()) { if (data.warnings < 20) { - if (log.isBasic()) { + if (isBasic()) { logBasic( "WARNING: Couldn't insert row into table: " + rowMeta.getString(r) + Const.CR + dbe.getMessage()); } - } else if (data.warnings == 20 && log.isBasic()) { + } else if (data.warnings == 20 && isBasic()) { logBasic( "FINAL WARNING (no more then 20 displayed): Couldn't insert row into table: " + rowMeta.getString(r) @@ -428,7 +428,7 @@ protected Object[] writeToTable(IRowMeta rowMeta, Object[] r) throws HopExceptio @Override public boolean isRowLevel() { - return log.isRowLevel(); + return isRowLevel(); } private void processBatchException( @@ -512,7 +512,7 @@ public boolean init() { // if (getTransformMeta().isDoingErrorHandling() && !dbInterface.IsSupportsErrorHandlingOnBatchUpdates()) { - log.logBasic( + logBasic( BaseMessages.getString( PKG, "TableOutput.Warning.ErrorHandlingIsNotFullySupportedWithBatchProcessing")); } @@ -524,7 +524,7 @@ public boolean init() { data.db = new Database(this, this, data.databaseMeta); data.db.connect(); - if (log.isBasic()) { + if (isBasic()) { logBasic( "Connected to database [" + variables.resolve(meta.getConnection()) diff --git a/plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/csvinput/CsvInput.java b/plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/csvinput/CsvInput.java index e25512c7286..305fe98f939 100644 --- a/plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/csvinput/CsvInput.java +++ b/plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/csvinput/CsvInput.java @@ -144,7 +144,7 @@ public boolean processRow() throws HopException { } } else { putRow(data.outputRowMeta, outputRowData); // copy row to possible alternate rowset(s). - if (checkFeedback(getLinesInput()) && log.isBasic()) { + if (checkFeedback(getLinesInput()) && isBasic()) { logBasic( BaseMessages.getString( PKG, "CsvInput.Log.LineNumber", Long.toString(getLinesInput()))); @@ -477,14 +477,14 @@ String[] readFieldNamesFromFile(String fileName, CsvInputMeta csvInputMeta) thro EncodingType encodingType = EncodingType.guessEncodingType(reader.getEncoding()); String line = TextFileInput.getLine( - log, + getLogChannel(), reader, encodingType, TextFileInputMeta.FILE_FORMAT_UNIX, new StringBuilder(1000)); String[] fieldNames = TextFileLineUtil.guessStringsFromLine( - log, line, delimiter, enclosure, csvInputMeta.getEscapeCharacter()); + getLogChannel(), line, delimiter, enclosure, csvInputMeta.getEscapeCharacter()); if (!Utils.isEmpty(csvInputMeta.getEnclosure())) { removeEnclosure(fieldNames, csvInputMeta.getEnclosure()); } @@ -741,7 +741,7 @@ private Object[] readOneRow(boolean skipRow, boolean ignoreEnclosures) throws Ho // Did we have any escaped characters in there? // if (escapedEnclosureFound > 0) { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("Escaped enclosures found in " + new String(field)); } field = data.removeEscapedEnclosures(field, escapedEnclosureFound); diff --git a/plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/fileinput/TextFileInput.java b/plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/fileinput/TextFileInput.java index 6328c2b5639..560f0d90122 100644 --- a/plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/fileinput/TextFileInput.java +++ b/plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/fileinput/TextFileInput.java @@ -1097,7 +1097,7 @@ public boolean processRow() throws HopException { } if (data.getFiles().nrOfFiles() == 0) { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "TextFileInput.Log.Error.NoFilesSpecified")); } setOutputDone(); @@ -1180,7 +1180,7 @@ public boolean processRow() throws HopException { */ if (!data.doneWithHeader && data.pageLinesRead == 0) { // We are reading header lines - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("P-HEADER (" + data.headerLinesRead + ") : " + textLine.line); } data.headerLinesRead++; @@ -1203,7 +1203,7 @@ public boolean processRow() throws HopException { } } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("P-DATA: " + textLine.line); } // Read a normal line on a page of data. @@ -1212,7 +1212,7 @@ public boolean processRow() throws HopException { long useNumber = meta.isRowNumberByFile() ? data.lineInFile : getLinesWritten() + 1; r = convertLineToRow( - log, + getLogChannel(), textLine, meta, data.currentPassThruFieldsRow, @@ -1257,7 +1257,7 @@ public boolean processRow() throws HopException { data.headerLinesRead = 0; data.pageLinesRead = 0; data.footerLinesRead = 0; - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("RESTART PAGE"); } } @@ -1265,7 +1265,7 @@ public boolean processRow() throws HopException { // done reading the data lines, skip the footer lines if (meta.hasFooter() && data.footerLinesRead < meta.getNrFooterLines()) { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("P-FOOTER: " + textLine.line); } data.footerLinesRead++; @@ -1279,7 +1279,7 @@ public boolean processRow() throws HopException { data.headerLinesRead = 0; data.pageLinesRead = 0; data.footerLinesRead = 0; - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("RESTART PAGE"); } } @@ -1328,7 +1328,7 @@ public boolean processRow() throws HopException { long useNumber = meta.isRowNumberByFile() ? data.lineInFile : getLinesWritten() + 1; r = convertLineToRow( - log, + getLogChannel(), textLine, meta, data.currentPassThruFieldsRow, @@ -1358,7 +1358,7 @@ public boolean processRow() throws HopException { data.extension, data.size); if (r != null) { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("Found data row: " + data.outputRowMeta.getString(r)); } putrow = true; @@ -1393,7 +1393,7 @@ public boolean processRow() throws HopException { } } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("Putting row: " + data.outputRowMeta.getString(r)); } putRow(data.outputRowMeta, r); @@ -1406,7 +1406,7 @@ public boolean processRow() throws HopException { } if (checkFeedback(getLinesInput())) { - if (log.isBasic()) { + if (isBasic()) { logBasic("linenr " + getLinesInput()); } } @@ -1525,8 +1525,8 @@ private void handleMissingFiles() throws HopException { if (!nonExistantFiles.isEmpty()) { String message = FileInputList.getRequiredFilesDescription(nonExistantFiles); - if (log.isBasic()) { - log.logBasic("Required files", "WARNING: Missing " + message); + if (isBasic()) { + logBasic("Required files", "WARNING: Missing " + message); } if (meta.isErrorIgnored()) { for (FileObject fileObject : nonExistantFiles) { @@ -1540,8 +1540,8 @@ private void handleMissingFiles() throws HopException { List nonAccessibleFiles = data.getFiles().getNonAccessibleFiles(); if (!nonAccessibleFiles.isEmpty()) { String message = FileInputList.getRequiredFilesDescription(nonAccessibleFiles); - if (log.isBasic()) { - log.logBasic("Required files", "WARNING: Not accessible " + message); + if (isBasic()) { + logBasic("Required files", "WARNING: Not accessible " + message); } if (meta.isErrorIgnored()) { for (FileObject fileObject : nonAccessibleFiles) { @@ -1642,7 +1642,7 @@ private boolean openNextFile() { resultFile.setComment("File was read by an Text File input transform"); addResultFile(resultFile); } - if (log.isBasic()) { + if (isBasic()) { logBasic("Opening file: " + data.file.getName().getFriendlyURI()); } @@ -1654,7 +1654,7 @@ private boolean openNextFile() { data.dataErrorLineHandler.handleFile(data.file); data.in.nextEntry(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( "This is a compressed file being handled by the " + provider.getName() + " provider"); } @@ -1696,7 +1696,7 @@ private boolean openNextFile() { for (int i = 0; i < meta.getNrLinesDocHeader(); i++) { // Just skip these... getLine( - log, + getLogChannel(), data.isr, data.encodingType, data.fileFormatType, @@ -1742,7 +1742,13 @@ private boolean openNextFile() { private boolean tryToReadLine(boolean applyFilter) throws HopFileException { String line; - line = getLine(log, data.isr, data.encodingType, data.fileFormatType, data.lineStringBuilder); + line = + getLine( + getLogChannel(), + data.isr, + data.encodingType, + data.fileFormatType, + data.lineStringBuilder); if (line != null) { // when there is no header, check the filter for the first line if (applyFilter) { @@ -1871,7 +1877,7 @@ public void dispose() { data.file.close(); data.file = null; } catch (Exception e) { - log.logError("Error closing file", e); + logError("Error closing file", e); } } if (data.in != null) { diff --git a/plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/fileinput/text/TextFileInput.java b/plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/fileinput/text/TextFileInput.java index 7a02e0dca9e..251e7effc86 100644 --- a/plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/fileinput/text/TextFileInput.java +++ b/plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/fileinput/text/TextFileInput.java @@ -47,7 +47,7 @@ public TextFileInput( @Override protected IBaseFileInputReader createReader( TextFileInputMeta meta, TextFileInputData data, FileObject file) throws Exception { - return new TextFileInputReader(this, meta, data, file, log); + return new TextFileInputReader(this, meta, data, file, getLogChannel()); } @Override diff --git a/plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/textfileoutput/TextFileOutput.java b/plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/textfileoutput/TextFileOutput.java index aeb097d93e6..9c37485e2b2 100644 --- a/plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/textfileoutput/TextFileOutput.java +++ b/plugins/transforms/textfile/src/main/java/org/apache/hop/pipeline/transforms/textfileoutput/TextFileOutput.java @@ -171,7 +171,7 @@ public void initFileStreamWriter(String filename) throws HopException { && ((data.getFileStreamsCollection().size() == 0) || meta.isFileNameInField())) { createParentFolder(filename); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Opening output stream using provider: " + compressionProvider.getName()); } @@ -185,7 +185,7 @@ public void initFileStreamWriter(String filename) throws HopException { // that do not archive entries, they should use the default no-op implementation. compressionOutputStream.addEntry(filename, resolve(meta.getExtension())); - if (log.isDetailed()) { + if (isDetailed()) { if (!Utils.isEmpty(meta.getEncoding())) { logDetailed("Opening output stream in encoding: " + meta.getEncoding()); } else { @@ -201,7 +201,7 @@ public void initFileStreamWriter(String filename) throws HopException { data.getFileStreamsCollection().add(filename, fileStreams); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Opened new file with name [" + HopVfs.getFriendlyURI(filename) + "]"); } } else if (fileStreams.getBufferedOutputStream() @@ -775,7 +775,7 @@ public String buildFilename(String filename, boolean ziparchive) { getPartitionId(), data.splitnr, data.isBeamContext(), - log.getLogChannelId(), + getLogChannelId(), data.getBeamBundleNr(), ziparchive, meta); @@ -802,7 +802,7 @@ protected boolean closeFile() { data.writer = null; data.out = null; data.fos = null; - if (log.isDebug()) { + if (isDebug()) { logDebug("Closing normal file ..."); } retval = true; diff --git a/plugins/transforms/tika/src/main/java/org/apache/hop/pipeline/transforms/tika/Tika.java b/plugins/transforms/tika/src/main/java/org/apache/hop/pipeline/transforms/tika/Tika.java index 050a4add17f..c10db7be8a3 100755 --- a/plugins/transforms/tika/src/main/java/org/apache/hop/pipeline/transforms/tika/Tika.java +++ b/plugins/transforms/tika/src/main/java/org/apache/hop/pipeline/transforms/tika/Tika.java @@ -304,7 +304,7 @@ public String getTextFileContent(String vfsFilename, String encoding) throws Hop try { inputStream.close(); } catch (Exception e) { - log.logError("Error closing reader", e); + logError("Error closing reader", e); } } return retval; @@ -452,7 +452,7 @@ public boolean init() { } try { ClassLoader classLoader = meta.getClass().getClassLoader(); - data.tikaOutput = new TikaOutput(classLoader, log, this); + data.tikaOutput = new TikaOutput(classLoader, getLogChannel(), this); } catch (Exception e) { logError("Tika Error", e); } diff --git a/plugins/transforms/uniquerows/src/main/java/org/apache/hop/pipeline/transforms/uniquerows/UniqueRows.java b/plugins/transforms/uniquerows/src/main/java/org/apache/hop/pipeline/transforms/uniquerows/UniqueRows.java index 8b5bd06f4f8..5388853b44d 100644 --- a/plugins/transforms/uniquerows/src/main/java/org/apache/hop/pipeline/transforms/uniquerows/UniqueRows.java +++ b/plugins/transforms/uniquerows/src/main/java/org/apache/hop/pipeline/transforms/uniquerows/UniqueRows.java @@ -135,7 +135,7 @@ public boolean processRow() throws HopException { } } - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "UniqueRows.Log.LineNumber") + getLinesRead()); } first = false; diff --git a/plugins/transforms/uniquerowsbyhashset/src/main/java/org/apache/hop/pipeline/transforms/uniquerowsbyhashset/UniqueRowsByHashSet.java b/plugins/transforms/uniquerowsbyhashset/src/main/java/org/apache/hop/pipeline/transforms/uniquerowsbyhashset/UniqueRowsByHashSet.java index 678bbaca8fd..6b6492784ba 100644 --- a/plugins/transforms/uniquerowsbyhashset/src/main/java/org/apache/hop/pipeline/transforms/uniquerowsbyhashset/UniqueRowsByHashSet.java +++ b/plugins/transforms/uniquerowsbyhashset/src/main/java/org/apache/hop/pipeline/transforms/uniquerowsbyhashset/UniqueRowsByHashSet.java @@ -106,11 +106,8 @@ public boolean processRow() throws HopException { } } - if (checkFeedback(getLinesRead())) { - if (log.isBasic()) { - logBasic( - BaseMessages.getString(PKG, "UniqueRowsByHashSet.Log.LineNumber") + getLinesRead()); - } + if (checkFeedback(getLinesRead()) && isBasic()) { + logBasic(BaseMessages.getString(PKG, "UniqueRowsByHashSet.Log.LineNumber") + getLinesRead()); } return true; diff --git a/plugins/transforms/update/src/main/java/org/apache/hop/pipeline/transforms/update/Update.java b/plugins/transforms/update/src/main/java/org/apache/hop/pipeline/transforms/update/Update.java index ff1b7503dbc..2b88c79c6dd 100644 --- a/plugins/transforms/update/src/main/java/org/apache/hop/pipeline/transforms/update/Update.java +++ b/plugins/transforms/update/src/main/java/org/apache/hop/pipeline/transforms/update/Update.java @@ -87,7 +87,7 @@ private synchronized Object[] lookupValues(IRowMeta rowMeta, Object[] row) throw IRowMeta returnRowMeta = null; if (!meta.isSkipLookup()) { data.db.setValues(data.lookupParameterRowMeta, lookupRow, data.prepStatementLookup); - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, @@ -129,7 +129,7 @@ private synchronized Object[] lookupValues(IRowMeta rowMeta, Object[] row) throw + data.lookupParameterRowMeta.getString(lookupRow)); } } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "Update.Log.KeyCouldNotFound") + data.lookupParameterRowMeta.getString(lookupRow)); @@ -140,7 +140,7 @@ private synchronized Object[] lookupValues(IRowMeta rowMeta, Object[] row) throw } } } else { - if (!meta.isSkipLookup() && log.isRowLevel()) { + if (!meta.isSkipLookup() && isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "Update.Log.FoundRow") + data.lookupReturnRowMeta.getString(add)); @@ -180,7 +180,7 @@ private synchronized Object[] lookupValues(IRowMeta rowMeta, Object[] row) throw updateRow[data.valuenrs.length + i] = lookupRow[i]; } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "Update.Log.UpdateRow") + data.lookupParameterRowMeta.getString(lookupRow)); @@ -226,7 +226,7 @@ public boolean processRow() throws HopException { this, meta.getLookupField().getSchemaName(), meta.getLookupField().getTableName()); // lookup the values! - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "Update.Log.CheckingRow") + getInputRowMeta().getString(r)); } @@ -267,7 +267,7 @@ public boolean processRow() throws HopException { } keynrs2.add(keynr2); - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "Update.Log.FieldHasDataNumbers", keyItem.getKeyStream()) + "" @@ -290,7 +290,7 @@ public boolean processRow() throws HopException { BaseMessages.getString( PKG, CONST_UPDATE_EXCEPTION_FIELD_REQUIRED, fieldItem.getUpdateStream())); } - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString( PKG, "Update.Log.FieldHasDataNumbers", fieldItem.getUpdateStream()) @@ -334,7 +334,7 @@ public boolean processRow() throws HopException { if (outputRow != null) { putRow(data.outputRowMeta, outputRow); // copy non-ignored rows to output rowset(s) } - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic(BaseMessages.getString(PKG, "Update.Log.LineNumber") + getLinesRead()); } } catch (HopException e) { @@ -421,7 +421,7 @@ public void setLookup(IRowMeta rowMeta) throws HopDatabaseException { } try { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Setting preparedStatement to [" + sql + "]"); } data.prepStatementLookup = @@ -495,7 +495,7 @@ public void prepareUpdate(IRowMeta rowMeta) throws HopDatabaseException { sql += " ) ) "; } try { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed("Setting update preparedStatement to [" + sql + "]"); } data.prepStatementUpdate = @@ -522,7 +522,7 @@ public boolean init() { try { data.db.connect(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "Update.Log.ConnectedToDB")); } diff --git a/plugins/transforms/validator/src/main/java/org/apache/hop/pipeline/transforms/validator/Validator.java b/plugins/transforms/validator/src/main/java/org/apache/hop/pipeline/transforms/validator/Validator.java index 6e135bbd36b..cb84ba6fd4b 100644 --- a/plugins/transforms/validator/src/main/java/org/apache/hop/pipeline/transforms/validator/Validator.java +++ b/plugins/transforms/validator/src/main/java/org/apache/hop/pipeline/transforms/validator/Validator.java @@ -88,7 +88,7 @@ public boolean processRow() throws HopException { } } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("Read row #" + getLinesRead() + " : " + getInputRowMeta().getString(r)); } @@ -140,7 +140,7 @@ public boolean processRow() throws HopException { } } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("Wrote row #" + getLinesWritten() + " : " + getInputRowMeta().getString(r)); } if (checkFeedback(getLinesRead())) { diff --git a/plugins/transforms/verticabulkloader/src/main/java/org/apache/hop/pipeline/transforms/vertica/bulkloader/VerticaBulkLoader.java b/plugins/transforms/verticabulkloader/src/main/java/org/apache/hop/pipeline/transforms/vertica/bulkloader/VerticaBulkLoader.java index b35aaf7c2dc..3a2248686de 100644 --- a/plugins/transforms/verticabulkloader/src/main/java/org/apache/hop/pipeline/transforms/vertica/bulkloader/VerticaBulkLoader.java +++ b/plugins/transforms/verticabulkloader/src/main/java/org/apache/hop/pipeline/transforms/vertica/bulkloader/VerticaBulkLoader.java @@ -190,7 +190,7 @@ public boolean processRow() throws HopException { incrementLinesOutput(); } - if (checkFeedback(getLinesRead()) && log.isBasic()) { + if (checkFeedback(getLinesRead()) && isBasic()) { logBasic("linenr " + getLinesRead()); } } catch (HopException e) { @@ -576,7 +576,7 @@ public boolean init() { data.db = new Database(this, this, data.databaseMeta); data.db.connect(); - if (log.isBasic()) { + if (isBasic()) { logBasic("Connected to database [" + meta.getDatabaseMeta() + "]"); } diff --git a/plugins/transforms/webservices/src/main/java/org/apache/hop/pipeline/transforms/webservices/WebService.java b/plugins/transforms/webservices/src/main/java/org/apache/hop/pipeline/transforms/webservices/WebService.java index 874e998128c..4d4394a70e0 100644 --- a/plugins/transforms/webservices/src/main/java/org/apache/hop/pipeline/transforms/webservices/WebService.java +++ b/plugins/transforms/webservices/src/main/java/org/apache/hop/pipeline/transforms/webservices/WebService.java @@ -379,7 +379,7 @@ private synchronized void requestSOAP(Object[] rowData, IRowMeta rowMeta) throws cachedOperation, cachedWsdl.getWsdlTypes().isElementFormQualified(cachedWsdl.getTargetNamespace())); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "WebServices.Log.SOAPEnvelope")); logDetailed(xml); } @@ -632,7 +632,7 @@ private void processRows( transformer.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes"); transformer.setOutputProperty(OutputKeys.INDENT, "yes"); - if (log.isDetailed()) { + if (isDetailed()) { StringWriter bodyXML = new StringWriter(); transformer.transform(new DOMSource(bodyNode), new StreamResult(bodyXML)); @@ -768,7 +768,7 @@ private void processRows( // TODO: remove next 2 lines, added for debug reasons. // - if (log.isDetailed()) { + if (isDetailed()) { StringWriter nodeXML = new StringWriter(); transformer.transform(new DOMSource(node), new StreamResult(nodeXML)); logDetailed( @@ -870,7 +870,7 @@ private void compatibleProcessRows( // Start new code // START_ELEMENT= 1 // - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( "START_ELEMENT / " + vReader.getAttributeCount() @@ -881,7 +881,7 @@ private void compatibleProcessRows( // If we start the xml element named like the return type, // we start a new row // - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel(CONST_V_READER_GET_LOCAL_NAME + vReader.getLocalName()); } if (Utils.isEmpty(meta.getOutFieldArgumentName())) { @@ -896,7 +896,7 @@ private void compatibleProcessRows( } else { if (meta.getOutFieldContainerName().equals(vReader.getLocalName())) { // meta.getOutFieldContainerName() = vReader.getLocalName() - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("OutFieldContainerName = " + meta.getOutFieldContainerName()); } oneValueRowProcessing = true; @@ -905,14 +905,14 @@ private void compatibleProcessRows( } } else { // getOutFieldArgumentName() != null - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel(CONST_OUT_FIELD_ARGUMENT_NAME + meta.getOutFieldArgumentName()); } if (meta.getOutFieldArgumentName().equals(vReader.getLocalName())) { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel(CONST_V_READER_GET_LOCAL_NAME + vReader.getLocalName()); } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel(CONST_OUT_FIELD_ARGUMENT_NAME); } if (processing) { @@ -959,10 +959,8 @@ private void compatibleProcessRows( } } else { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel(CONST_V_READER_GET_LOCAL_NAME + vReader.getLocalName()); - } - if (log.isRowLevel()) { logRowlevel(CONST_OUT_FIELD_ARGUMENT_NAME + meta.getOutFieldArgumentName()); } } @@ -971,7 +969,7 @@ private void compatibleProcessRows( case XMLStreamConstants.END_ELEMENT: // END_ELEMENT= 2 - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("END_ELEMENT"); } // If we end the xml element named as the return type, we @@ -987,82 +985,82 @@ private void compatibleProcessRows( break; case XMLStreamConstants.PROCESSING_INSTRUCTION: // PROCESSING_INSTRUCTION= 3 - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("PROCESSING_INSTRUCTION"); } break; case XMLStreamConstants.CHARACTERS: // CHARACTERS= 4 - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("CHARACTERS"); } break; case XMLStreamConstants.COMMENT: // COMMENT= 5 - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("COMMENT"); } break; case XMLStreamConstants.SPACE: // PROCESSING_INSTRUCTION= 6 - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("PROCESSING_INSTRUCTION"); } break; case XMLStreamConstants.START_DOCUMENT: // START_DOCUMENT= 7 - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("START_DOCUMENT"); } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel(vReader.getText()); } break; case XMLStreamConstants.END_DOCUMENT: // END_DOCUMENT= 8 - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("END_DOCUMENT"); } break; case XMLStreamConstants.ENTITY_REFERENCE: // ENTITY_REFERENCE= 9 - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("ENTITY_REFERENCE"); } break; case XMLStreamConstants.ATTRIBUTE: // ATTRIBUTE= 10 - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("ATTRIBUTE"); } break; case XMLStreamConstants.DTD: // DTD= 11 - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("DTD"); } break; case XMLStreamConstants.CDATA: // CDATA= 12 - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("CDATA"); } break; case XMLStreamConstants.NAMESPACE: // NAMESPACE= 13 - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("NAMESPACE"); } break; case XMLStreamConstants.NOTATION_DECLARATION: // NOTATION_DECLARATION= 14 - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("NOTATION_DECLARATION"); } break; case XMLStreamConstants.ENTITY_DECLARATION: // ENTITY_DECLARATION= 15 - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("ENTITY_DECLARATION"); } break; diff --git a/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/getxmldata/GetXmlData.java b/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/getxmldata/GetXmlData.java index e9745de6508..f7f1f934473 100644 --- a/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/getxmldata/GetXmlData.java +++ b/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/getxmldata/GetXmlData.java @@ -107,7 +107,7 @@ protected boolean setDocument( if (data.prunePath != null) { // when pruning is on: reader.read() below will wait until all is processed in the handler - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "GetXMLData.Log.StreamingMode.Activated")); } if (data.PathValue.equals(data.prunePath)) { @@ -134,7 +134,7 @@ public void onEnd(ElementPath path) { // NPE or other errors depending on the parsing location - this will be treated in // the catch part below // any better idea is welcome - if (log.isBasic()) { + if (isBasic()) { logBasic(BaseMessages.getString(PKG, "GetXMLData.Log.StreamingMode.Stopped")); } data.stopPruning = true; @@ -143,7 +143,7 @@ public void onEnd(ElementPath path) { } // process a ROW element - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "GetXMLData.Log.StreamingMode.StartProcessing")); } @@ -160,7 +160,7 @@ public void onEnd(ElementPath path) { } // prune the tree row.detach(); - if (log.isDebug()) { + if (isDebug()) { logDebug( BaseMessages.getString(PKG, "GetXMLData.Log.StreamingMode.EndProcessing")); } @@ -231,7 +231,7 @@ private void processStreaming(Element row) throws HopException { if (meta.isNamespaceAware()) { prepareNSMap(data.document.getRootElement()); } - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "GetXMLData.Log.StreamingMode.ApplyXPath")); } // If the prune path and the path are the same, then @@ -240,7 +240,7 @@ private void processStreaming(Element row) throws HopException { data.an.set(0, (AbstractNode) row); data.nodesize = 1; // it's always just one row. data.nodenr = 0; - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "GetXMLData.Log.StreamingMode.ProcessingRows")); } Object[] r = getXmlRowPutRowWithErrorhandling(); @@ -261,7 +261,7 @@ private void processStreaming(Element row) throws HopException { } // main loop through the data until limit is reached or transformation is stopped // similar functionality like in BaseTransform.runTransformThread - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "GetXMLData.Log.StreamingMode.ProcessingRows")); } boolean cont = true; @@ -276,7 +276,7 @@ private void processStreaming(Element row) throws HopException { // the hole // file (slow but works) } - if (log.isDebug()) { + if (isDebug()) { logDebug(BaseMessages.getString(PKG, "GetXMLData.Log.StreamingMode.FreeMemory")); } // free allocated memory @@ -359,7 +359,7 @@ private boolean ReadNextString() { if (data.readrow == null) { // finished processing! - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "GetXMLData.Log.FinishedProcessing")); } return false; @@ -412,7 +412,7 @@ private boolean ReadNextString() { // get XML field value String fieldvalue = getInputRowMeta().getString(data.readrow, data.indexOfXmlField); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "GetXMLData.Log.XMLStream", meta.getXMLField(), fieldvalue)); @@ -444,7 +444,7 @@ private boolean ReadNextString() { addFileToResultFilesname(file); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, @@ -483,7 +483,7 @@ private boolean ReadNextString() { throw new HopException( BaseMessages.getString(PKG, CONST_GET_XMLDATA_LOG_UNABLE_APPLY_XPATH)); } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, CONST_GET_XMLDATA_LOG_LOOP_FILE_OCCURENCES, "" + data.nodesize)); @@ -586,7 +586,7 @@ private boolean openNextFile() { if (data.filenr >= data.files.nrOfFiles()) { // finished processing! - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "GetXMLData.Log.FinishedProcessing")); } return false; @@ -638,7 +638,7 @@ private boolean openNextFile() { openNextFile(); } else { - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "GetXMLData.Log.OpeningFile", data.file.toString())); } @@ -661,7 +661,7 @@ private boolean openNextFile() { addFileToResultFilesname(data.file); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString(PKG, "GetXMLData.Log.FileOpened", data.file.toString())); logDetailed( @@ -724,7 +724,7 @@ public boolean processRow() throws HopException { } private boolean putRowOut(Object[] r) throws HopException { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "GetXMLData.Log.ReadRow", data.outputRowMeta.getString(r))); } @@ -1017,7 +1017,7 @@ public boolean init() { if (!data.PathValue.substring(0, 1).equals(GetXmlDataMeta.N0DE_SEPARATOR)) { data.PathValue = GetXmlDataMeta.N0DE_SEPARATOR + data.PathValue; } - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "GetXMLData.Log.LoopXPath", data.PathValue)); } diff --git a/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/xmlinputstream/XmlInputStream.java b/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/xmlinputstream/XmlInputStream.java index 586e1c11b2c..87ba7392cc0 100644 --- a/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/xmlinputstream/XmlInputStream.java +++ b/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/xmlinputstream/XmlInputStream.java @@ -216,8 +216,8 @@ private void closeFile() { try { data.xmlEventReader.close(); } catch (XMLStreamException e) { - if (log.isBasic()) { - log.logBasic( + if (isBasic()) { + logBasic( BaseMessages.getString( PKG, CONST_XMLINPUT_STREAM_LOG_UNABLE_TO_CLOSE_FILE, @@ -230,8 +230,8 @@ private void closeFile() { try { data.inputStream.close(); } catch (IOException e) { - if (log.isBasic()) { - log.logBasic( + if (isBasic()) { + logBasic( BaseMessages.getString( PKG, CONST_XMLINPUT_STREAM_LOG_UNABLE_TO_CLOSE_FILE, @@ -244,8 +244,8 @@ private void closeFile() { try { data.fileObject.close(); } catch (FileSystemException e) { - if (log.isBasic()) { - log.logBasic( + if (isBasic()) { + logBasic( BaseMessages.getString( PKG, CONST_XMLINPUT_STREAM_LOG_UNABLE_TO_CLOSE_FILE, @@ -330,7 +330,7 @@ private void putRowOut(Object[] r) throws HopTransformException, HopValueExcepti // Skip rows? (not exact science since some attributes could be mixed within the last row) if (data.nrRowsToSkip == 0 || data.rowNumber > data.nrRowsToSkip) { - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel("Read row: " + data.outputRowMeta.getString(r)); } if (data.currentInputRow != null) { diff --git a/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/xmljoin/XmlJoin.java b/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/xmljoin/XmlJoin.java index 8e004d674b6..b266a014c4b 100644 --- a/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/xmljoin/XmlJoin.java +++ b/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/xmljoin/XmlJoin.java @@ -280,7 +280,7 @@ public boolean init() { // swapFirstInputRowSetIfExists(meta.getTargetXmlTransform()); } catch (Exception e) { - log.logError(BaseMessages.getString(PKG, "XmlJoin.Error.Init"), e); + logError(BaseMessages.getString(PKG, "XmlJoin.Error.Init"), e); return false; } diff --git a/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/xsdvalidator/XsdValidator.java b/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/xsdvalidator/XsdValidator.java index 3b15d54f805..c2f61ab8913 100644 --- a/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/xsdvalidator/XsdValidator.java +++ b/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/xsdvalidator/XsdValidator.java @@ -207,7 +207,7 @@ public boolean processRow() throws HopException { outputRowData2 = outputRowData; } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "XsdValidator.Log.ReadRow") + " " diff --git a/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/xslt/Xslt.java b/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/xslt/Xslt.java index cbc7c0bde2e..aecef32be6d 100644 --- a/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/xslt/Xslt.java +++ b/plugins/transforms/xml/src/main/java/org/apache/hop/pipeline/transforms/xml/xslt/Xslt.java @@ -212,7 +212,7 @@ public boolean processRow() throws HopException { if (meta.useXSLField()) { // Get the value data.xslfilename = getInputRowMeta().getString(row, data.fielxslfiledposition); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "Xslt.Log.XslfileNameFromFied", data.xslfilename, meta.getXSLFileField())); @@ -221,7 +221,7 @@ public boolean processRow() throws HopException { try { - if (log.isDetailed()) { + if (isDetailed()) { if (meta.isXSLFieldIsAFile()) { logDetailed(BaseMessages.getString(PKG, "Xslt.Log.Filexsl") + data.xslfilename); } else { @@ -251,13 +251,13 @@ public boolean processRow() throws HopException { transformer.transform(source, result); String xmlString = result.getWriter().toString(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "Xslt.Log.FileResult")); logDetailed(xmlString); } Object[] outputRowData = RowDataUtil.addValueData(row, getInputRowMeta().size(), xmlString); - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "Xslt.Log.ReadRow") + " " diff --git a/plugins/transforms/yamlinput/src/main/java/org/apache/hop/pipeline/transforms/yamlinput/YamlInput.java b/plugins/transforms/yamlinput/src/main/java/org/apache/hop/pipeline/transforms/yamlinput/YamlInput.java index f7d08845a61..2eb2596a5e1 100644 --- a/plugins/transforms/yamlinput/src/main/java/org/apache/hop/pipeline/transforms/yamlinput/YamlInput.java +++ b/plugins/transforms/yamlinput/src/main/java/org/apache/hop/pipeline/transforms/yamlinput/YamlInput.java @@ -83,7 +83,7 @@ private boolean readNextString() { if (data.readrow == null) { // finished processing! - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "YamlInput.Log.FinishedProcessing")); } return false; @@ -121,7 +121,7 @@ private boolean readNextString() { getLinesInput(); - if (log.isDetailed()) { + if (isDetailed()) { logDetailed( BaseMessages.getString( PKG, "YamlInput.Log.YAMLStream", meta.getYamlField(), fieldvalue)); @@ -165,7 +165,7 @@ private boolean openNextFile() { try { if (data.filenr >= data.files.nrOfFiles()) { // finished processing! - if (log.isDetailed()) { + if (isDetailed()) { logDetailed(BaseMessages.getString(PKG, "YamlInput.Log.FinishedProcessing")); } return false; @@ -246,7 +246,7 @@ public boolean processRow() throws HopException { return false; // end of data or error. } - if (log.isRowLevel()) { + if (isRowLevel()) { logRowlevel( BaseMessages.getString(PKG, "YamlInput.Log.ReadRow", data.outputRowMeta.getString(r))); } @@ -401,7 +401,7 @@ public boolean init() { valueMeta.setTrimType(field.getTrimType()); data.rowMeta.addValueMeta(valueMeta); } catch (Exception e) { - log.logError("Unable to create value meta", e); + logError("Unable to create value meta", e); return false; } } diff --git a/plugins/transforms/zipfile/src/main/java/org/apache/hop/pipeline/transforms/zipfile/ZipFile.java b/plugins/transforms/zipfile/src/main/java/org/apache/hop/pipeline/transforms/zipfile/ZipFile.java index d41bbec4771..d83dcaa6adf 100644 --- a/plugins/transforms/zipfile/src/main/java/org/apache/hop/pipeline/transforms/zipfile/ZipFile.java +++ b/plugins/transforms/zipfile/src/main/java/org/apache/hop/pipeline/transforms/zipfile/ZipFile.java @@ -140,7 +140,7 @@ public boolean processRow() throws HopException { String sourceFilename = getInputRowMeta().getString(r, data.indexOfSourceFilename); if (Utils.isEmpty(sourceFilename)) { - log.logError(toString(), BaseMessages.getString(PKG, "ZipFile.Error.SourceFileEmpty")); + logError(toString(), BaseMessages.getString(PKG, "ZipFile.Error.SourceFileEmpty")); throw new HopException(BaseMessages.getString(PKG, "ZipFile.Error.SourceFileEmpty")); } data.sourceFile = HopVfs.getFileObject(sourceFilename, variables); @@ -148,14 +148,14 @@ public boolean processRow() throws HopException { // Check sourcefile boolean skip = false; if (!data.sourceFile.exists()) { - log.logError( + logError( toString(), BaseMessages.getString(PKG, "ZipFile.Error.SourceFileNotExist", sourceFilename)); throw new HopException( BaseMessages.getString(PKG, "ZipFile.Error.SourceFileNotExist", sourceFilename)); } else { if (data.sourceFile.getType() != FileType.FILE) { - log.logError( + logError( toString(), BaseMessages.getString(PKG, "ZipFile.Error.SourceFileNotFile", sourceFilename)); throw new HopException( @@ -182,13 +182,13 @@ public boolean processRow() throws HopException { String targetFilename = getInputRowMeta().getString(r, data.indexOfZipFilename); if (Utils.isEmpty(targetFilename)) { - log.logError(toString(), BaseMessages.getString(PKG, "ZipFile.Error.TargetFileEmpty")); + logError(toString(), BaseMessages.getString(PKG, "ZipFile.Error.TargetFileEmpty")); throw new HopException(BaseMessages.getString(PKG, "ZipFile.Error.TargetFileEmpty")); } data.zipFile = HopVfs.getFileObject(targetFilename, variables); if (data.zipFile.exists()) { - if (log.isDetailed()) { - log.logDetailed( + if (isDetailed()) { + logDetailed( toString(), BaseMessages.getString(PKG, "ZipFile.Log.TargetFileExists", targetFilename)); } @@ -225,7 +225,7 @@ public boolean processRow() throws HopException { getLinesInput(); putRow(data.outputRowMeta, r); // copy row to output rowset(s) - if (checkFeedback(getLinesRead()) && log.isDetailed()) { + if (checkFeedback(getLinesRead()) && isDetailed()) { logDetailed(BaseMessages.getString(PKG, "ZipFile.LineNumber", "" + getLinesRead())); } } catch (Exception e) { @@ -325,8 +325,8 @@ private void addFilenameToResult() { resultFile.setComment(BaseMessages.getString(PKG, "ZipFile.Log.FileAddedResult")); addResultFile(resultFile); - if (log.isDetailed()) { - log.logDetailed( + if (isDetailed()) { + logDetailed( toString(), BaseMessages.getString( PKG, "ZipFile.Log.FilenameAddResult", data.sourceFile.toString()));