-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathloadScript.json
More file actions
3 lines (3 loc) · 90.2 KB
/
loadScript.json
File metadata and controls
3 lines (3 loc) · 90.2 KB
1
2
3
{
"loadScript": "///$tab Initialize\r\n//\tOperations Monitor \r\nLET yr\t\t\t= year(ReloadTime());\r\nSET copyright = 'Copyright 1993-$(yr) Qliktech International AB';\r\n\r\nREM To manually override where this app loads log data, please update the variable db_v_file_override as follows:\r\n0 = auto = script will check for recent data in logging database\r\n1 = file logs only (no database logs loaded)\r\n2 = database logs only (no file data loaded except where file log data already stored in governanceLogContent QVDs;;\r\n\r\nSET db_v_file_override\t=\t0;\r\n\r\nREM END set manual override for log data source (Only update the script beyond here at your own risk!);\r\n\r\nLet ReloadStartTime \t\t= now(1);\r\nSet ahora = ; SET msg =; SET skipped=0; SET loaded =0; SET textFile =;\t// Reset these variables\r\nSET app_name\t\t\t\t= 'Operations Monitor';\r\nSET app_version\t\t\t\t= '7.10.0';\r\nLet comp \t\t\t\t\t= ComputerName(); \r\nLET EngineVer = PurgeChar(EngineVersion(),chr(39)); \r\nLET startMsg_1 = 'Reloading $(app_name) $(app_version) from $(comp) running QIX Engine version $(EngineVer)';\r\nLET startMsg\t\t\t\t= '$(startMsg_1). ' & If(db_v_file_override=2,'Database logs chosen',if(db_v_file_override=1,'File logs chosen','Default log source selected (will check database first, then files)'));\r\nTRACE $(startMsg);\r\n\r\nSET monthsOfHistory \t\t= 3;\t\t// How many months of history should be available in the app. More history = more processing, bigger app, etc.\r\nLET cutoffDate \t\t\t\t= AddMonths(today(1),-$(monthsOfHistory),1);\t\t// Filter individual .log files and baseTable; note: the 1 \r\nLet LastReloadTime \t\t\t= timestamp(alt(LastSuccessfulReloadStartTime,cutoffDate));\r\nLet lastReloadCompare \t\t= num(LastReloadTime)-1;\t// (Re-)load any logs updated within 24 hours of the last reload\r\n\r\nLET serverLogFolder\t\t\t= 'lib://ServerLogFolder/';\t\r\nLET archivedLogsFolder\t\t= 'lib://ArchivedLogsFolder/';\r\n\r\nLET baseFileName\t \t\t= 'governanceLogContent_$(app_version)';\r\nLET baseTableName \t\t\t= '$(serverLogFolder)$(baseFileName)';\r\nLET serviceFileName\t \t\t= 'governanceServiceLog_$(app_version)';\r\nLET serviceTableName \t\t= '$(serverLogFolder)$(serviceFileName)';\r\nLET time_range_FileName\t\t= 'governance_time_range_ops_$(app_version)';\r\nLET time_range_TableName \t= '$(serverLogFolder)$(time_range_FileName)';\r\nLET date_time_FileName\t \t= 'governance_date_time_ops_$(app_version)';\r\nLET date_time_TableName \t= '$(serverLogFolder)$(date_time_FileName)';\r\nLET monitorAppStatsFile\t\t= '$(serverLogFolder)Operations_Monitor_Reload_Stats_$(app_version).txt';\r\n\r\nSET hideprefix \t\t\t\t= 'log';\t// Hiding logList from view, though preserving it for now (not dropping it)\r\nSET firstReload \t\t\t= 0;\t\t// RESET this each time and let script verify if it is the first reload.\r\n\r\n// Set date and time formats\r\nSET TimeFormat\t\t= 'hh:mm:ss';\r\nSET DateFormat\t\t= 'YYYY-MM-DD';\r\nSET TimestampFormat\t= 'YYYY-MM-DD hh:mm:ss';\r\n// Calendar Variables\r\nLet vLast4Hours =\tNum(timestamp(Now(1)-1/6));\t\t/// 4 hours = 1 day / 24 hours (per day) * 4 hours = 1/6 Days\r\nLet vLast24Hours =\tNum(timestamp(Now(1)-1));\r\nLet vLast72Hours =\tNum(timestamp(Now(1)-3));\r\n///$tab verify_database\r\nSUB verify_database\r\n \r\n TRACE Verifying logging database.;\r\n \r\n REM Verify existence of database log data except where db_v_file_override set to 1;\r\n \r\n IF db_v_file_override = 1 THEN \r\n TRACE File as log source has been manually chosen. Script will not check for presence of logging database. Carry on.;\r\n SET db_v_file = 1;\r\n LET baseTableName = '$(baseTableName)_file';\t\t// Store log history QVD with suffix _file so it only gets used with file logging\r\n LET lastReloadCompare = If(LastReloadSource=1,lastReloadCompare,cutoffDate);\t// If last reload loaded from db and now from file\r\n \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t//\twe want to start over and pull data from cutoffdate\r\n TRACE Last Reload Compare time = $(lastReloadCompare). CutoffDate = $(cutoffDate).;\r\n EXIT SUB;\r\n \r\n ELSE\t\t// All other cases (including default) - will verify database existence.\r\n\r\n LET db_check_time = timestamp(now(1)-0.01);\t // equivalent to ~ 15 minutes\r\n Set errormode = 0;\t\t\t\t// suppress reload failure in case of no database\r\n\r\n LIB CONNECT TO 'QLogs';\r\n\r\n // If there is an error connecting to logging database...\r\n LET tempErrorDetails = ScriptErrorDetails;\r\n IF Len(tempErrorDetails) > 0 THEN\r\n trace ERROR: $(tempErrorDetails);\r\n CALL monitor_app_reload_stats('WARN','$(textFile)', tempErrorDetails, 'Status Message')\r\n tempErrorDetails =;\t// Reset this variable\r\n TRACE Could not validate active database logging. Sourcing from file logs instead.;\r\n SET db_v_file = 1;\r\n LET baseTableName = '$(baseTableName)_file';\t\t// Store log history QVD with suffix _file so it only gets used with file logging\r\n LET lastReloadCompare = If(LastReloadSource=1,lastReloadCompare,cutoffDate);\t// If last reload loaded from db and now from file\r\n \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t//\twe want to start over and pull data from cutoffdate\r\n TRACE Last Reload Compare time = $(lastReloadCompare). CutoffDate = $(cutoffDate).;\r\n SET errormode=1;\r\n EXIT SUB;\r\n END IF\r\n\r\n db_check:\r\n SELECT \"id\" \r\n FROM \"public\".\"log_entries\" \r\n WHERE \"entry_timestamp\" >= '$(db_check_time)'\r\n ;\r\n\r\n // If there is an error fetching data from database...\r\n LET tempErrorDetails = ScriptErrorDetails;\r\n IF Len(tempErrorDetails) > 0 THEN\r\n trace ERROR: $(tempErrorDetails);\r\n CALL monitor_app_reload_stats('WARN','$(textFile)', tempErrorDetails, 'Status Message')\r\n tempErrorDetails =;\t// Reset this variable\r\n TRACE Could not validate active database logging. Sourcing from file logs instead.;\r\n SET db_v_file = 1;\r\n LET baseTableName = '$(baseTableName)_file';\t\t// Store log history QVD with suffix _file so it only gets used with file logging\r\n LET lastReloadCompare = If(LastReloadSource=1,lastReloadCompare,cutoffDate);\t// If last reload loaded from db and now from file\r\n \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t//\twe want to start over and pull data from cutoffdate\r\n TRACE Last Reload Compare time = $(lastReloadCompare). CutoffDate = $(cutoffDate).;\r\n DisConnect;\r\n SET errormode=1;\r\n EXIT SUB;\r\n END IF\r\n\r\n Let NoOfRows_db_check = NoOfRows('db_check');\r\n\r\n IF $(NoOfRows_db_check)>1 THEN\r\n TRACE Database logging exists. Sourcing from log database.;\r\n SET db_v_file = 2;\r\n LET baseTableName = '$(baseTableName)_db';\t\t// Store log history QVD with suffix _db so it only gets used with db logging\r\n LET lastReloadCompare = If(LastReloadSource=2,lastReloadCompare,cutoffDate);\t// If last reload loaded from file and now from db\r\n \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t//\twe want to start over and pull data from cutoffdate\r\n TRACE Last Reload Compare time = $(lastReloadCompare). CutoffDate = $(cutoffDate).;\r\n Drop Table db_check;\r\n ELSE\r\n TRACE Could not validate active database logging. Sourcing from file logs instead.;\r\n SET db_v_file = 1;\r\n LET baseTableName = '$(baseTableName)_file';\t\t// Store log history QVD with suffix _file so it only gets used with file logging\r\n LET lastReloadCompare = If(LastReloadSource=1,lastReloadCompare,cutoffDate);\t// If last reload loaded from db and now from file\r\n \t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t//\twe want to start over and pull data from cutoffdate\r\n TRACE Last Reload Compare time = $(lastReloadCompare). CutoffDate = $(cutoffDate).;\r\n ENDIF\r\n\tDisConnect;\r\n SET errormode = 1;\r\n \r\n ENDIF\t// For db_v_file_override---> verifying existence of active logging database\r\nENDSUB\r\n///$tab logList\r\nSUB logList\r\n\t// List of all log files to load -- and which ones to load fully (logRead = 1) or Errors and Warnings only (logRead = 0)\r\n\tlogList:\r\n LOAD * INLINE [\r\n \tlogService, logArea, logType, logStart, logAddlFields\r\n Engine, Audit, Activity,Timestamp,fieldsAuditActivityEngine\r\n Engine, Audit, Security,Timestamp,fieldsAuditSecurity\r\n\t\tRepository, Audit, Activity,Timestamp,fieldsAuditActivityRepository\r\n Repository, Audit, Security,Timestamp,fieldsAuditSecurityRepository\r\n Proxy, Audit, Activity,Timestamp,fieldsAuditActivityProxy\r\n Proxy, Audit, Security,Timestamp,fieldsAuditSecurity\r\n Scheduler, Audit, Activity,Timestamp,fieldsAuditActivityScheduler\r\n Printing, Audit, Activity,Timestamp,fieldsAuditActivityPrinting\r\n \r\n Scheduler, System, Service,Timestamp,fieldsSystemService\r\n Proxy, System, Service,Timestamp,fieldsSystemService\r\n Repository, System, Service,Timestamp,fieldsSystemService\r\n Engine, System, Service,Timestamp,fieldsSystemService\r\n Printing, System, Service, Timestamp, fieldsSystemService\r\n \r\n Engine, Trace, Performance, Timestamp, fieldsEnginePerformance\r\n Engine, Trace, Session, \"Session Start\", fieldsEngineSession\r\n Repository, Trace, Synchronization, Timestamp, fieldsRepositorySynchronization\r\n\r\n ];\r\n\r\nENDSUB\r\n\r\n///$tab defineFields\r\nSUB defineFields\r\n\r\n LET commonFields\t\t\t\t\t=\t'UserDirectory & chr(92) & UserId as UserId,\r\n \t\t\t\t\t\t\t\t\t\tObjectId,ObjectName,\r\n lower(Hostname) as Hostname,\r\n Service,\r\n ProxySessionId,ProxyPackageId,RequestSequenceId,\r\n \t\t\t\t\t\t\t\t\t\tContext,Command,Result,Description,\r\n if(ObjectName ='&chr(39)&'Not available'&chr(39)&',null(),\r\n \tIF(Service='&chr(39)&'Scheduler'&chr(39)&',subfield(ObjectName,'&chr(39)&chr(124)&chr(39)&',2),\r\n \tIF(Service='&chr(39)&'Printing'&chr(39)&' and Index(Command,'&chr(39)&'Export'&chr(39)&'),subfield(ObjectName,'&chr(39)&chr(124)&chr(39)&',-1),\r\n \t\t\tIF(right(Command,3)='&chr(39)&'app'&chr(39)&' OR Context like '&chr(39)&chr(42)&'/app/'&chr(42)& chr(39)&',ObjectName))\r\n \t)) AS [App Name],\r\n \t\t\t\t\t\t\t\t\t\tProductVersion';\r\n\r\n LET qmcChangeFields\t\t\t\t=\t'applymap('&chr(39)&'qmc_change_map'&chr(39)&',subfield(Command,'&chr(39)&' '&chr(39)&',1),null()) as qmc_change,'\r\n \t\t\t\t\t\t\t\t\t\t& 'IF(applymap('&chr(39)&'qmc_change_map'&chr(39)&',subfield(Command,'&chr(39)&' '&chr(39)&',1),null())=1\r\n \tand substringcount(Command,'&chr(39)&':'&chr(39)&')=0,\r\n \t\t\t\t\t\t\t\t\tIF(left(UserId,3)<>'&chr(39)&'sa_'&chr(39)\r\n \t&' and UserId <> '&chr(39)&'Not available'&chr(39)\r\n &' and UserId <> '&chr(39)&'System'&chr(39)&',\r\n upper(left(mid(Command,Index(Command,'&chr(39)&' '&chr(39)&')+1),1))&mid(mid(Command,Index(Command,'&chr(39)&' '&chr(39)&')+1),2) \r\n \t)) as [QMC Resource Type],';\r\n \r\n LET fieldsAuditActivityEngine\t\t= \t'ProxySessionId&ProxyPackageId as _proxySessionPackage,\r\n \t\t\t\t\t\t\t\t\t\t\tIf(Context='&chr(39)&'Doc::DoReload'&chr(39)&',1) as [Reload from Hub],\r\n \tIf(Context='&chr(39)&'Doc::DoReload'&chr(39)&',1) as TaskExecution,\r\n If(Context='&chr(39)&'Doc::DoReload'&chr(39)&',ProxySessionId&ProxyPackageId&RequestSequenceId) as TaskId,'\r\n \t\t\t\t\t\t\t\t\t\t& commonFields;\r\n \r\n // For scheduler reloads, identify Start (0) and Finish (1); Some failed starts = Finish (Result =20,25,35,40,45) but not 30 > this \r\n \t\t// reload execution entry occurs in the middle of an active task execution which will have its own Finish entry\r\n LET fieldsAuditActivityScheduler = \t'ProxySessionId&ProxyPackageId as _proxySessionPackage,\r\n \t\t\t\t\t\t\t\t\t\tIf(Message='&chr(39)&'Changing task state to Triggered'&chr(39)&',0,\r\n \t\t\t\t\t\t\t\t\t\t\t\tIF(Message like '&chr(39)&'Task finished*'&chr(39)&',1,\r\n \tIF(Result <> 30 AND Result>=20 AND Result <45,1))) as TaskExecution,\r\n subfield(ObjectId,'&chr(39)&chr(124)&chr(39)&',1) as TaskId,' \r\n \t\t\t\t\t\t\t\t\t\t& commonFields;\r\n //IF(Right(Command,7)='&chr(39)&':Reload'&chr(39)&',subfield(ObjectId,'&chr(39)&chr(124)&chr(39)&',1)) as TaskId,' \r\n \r\n LET fieldsAuditActivityRepository = \t'ProxySessionId&ProxyPackageId as _proxySessionPackage,'\r\n & chr(39)&'0.0000'&chr(39)& '& rowno() as Sequence#,'\r\n & qmcChangeFields\r\n \t\t\t\t\t\t\t\t\t\t& commonFields;\r\n \r\n LET fieldsAuditActivityProxy \t\t= \t'ProxySessionId&ProxyPackageId as _proxySessionPackage,' \r\n \t\t\t\t\t\t\t\t\t\t& commonFields;\r\n \r\n LET fieldsAuditActivityPrinting = \t//'ProxySessionId&ProxyPackageId as _proxySessionPackage, // Use this to link to session; remove RequestSequenceId\r\n \t\t\t\t\t\t\t\t\t\t'RequestSequenceId as _proxySessionPackage,\r\n \t\t\t\t\t\t\t\t\t\tIndex(Command,'&chr(39)&'Export'&chr(39)&') as [Export Count],\r\n Index(Command,'&chr(39)&'Store'&chr(39)&') as [Export Store Count],'\r\n \t\t\t\t\t\t\t\t\t\t& commonFields;\r\n\r\n// Repository has special load statements related to qmc changes - unique to Repository. Put it here so only Repo audit security logs have this logic.\r\n LET fieldsAuditSecurityRepository\t= \t'ProxySessionId&ProxyPackageId as _proxySessionPackage,SecurityClass, ClientHostAddress,'\r\n \t\t\t\t\t\t\t\t\t\t& chr(39)&'0.0000'&chr(39)& '& rowno() as Sequence#,'\r\n \t\t\t\t\t\t\t\t\t\t& qmcChangeFields\r\n \t\t\t\t\t\t\t\t\t\t& commonFields;\r\n\r\n LET fieldsAuditSecurity \t\t\t= \t'ProxySessionId&ProxyPackageId as _proxySessionPackage,SecurityClass, ClientHostAddress,' \r\n \t\t\t\t\t\t\t\t\t\t& commonFields;\r\n \r\n LET fieldsSystemService \t\t\t= \t'Severity,If(ProxyPackageId=0 AND ProxySessionId=0, Id, ProxySessionId&ProxyPackageId) as _proxySessionPackage,' \r\n \t\t\t\t\t\t\t\t\t\t& commonFields;\r\n\r\n// TRACE Logs are missing certain Common Fields (hence the need for commmonFields variable\r\n\r\n LET fieldsEnginePerformance\t=\t'ActiveDocSessions,ActiveDocs,ActiveUsers,CPULoad/100 as CPULoad,LoadedDocs,Selections,\r\n\t\t\t\t\t\t\t\t\t\tRound(\"VMCommitted(MB)\"/1024,0.01) as VMCommitted,\r\n Round(\"VMAllocated(MB)\"/1024,0.01) as VMAllocated,\r\n \t\t\t\t\t\t\t\t\tRound(\"VMFree(MB)\"/1024,0.01) as VMFree,\r\n round(\"VMCommitted(MB)\"/(\"VMFree(MB)\"+\"VMCommitted(MB)\"),0.01) as VMPctCommitted,\r\n Id as _proxySessionPackage,\r\n '& chr(39) & 'Engine' & chr(39) & ' as Service,\r\n Level as Severity,\r\n CacheHits as [Cache Hits],CacheLookups as [Cache Lookups],CacheBytesAdded as [Cache Bytes Added],\r\n lower(Hostname) as Hostname\r\n '; \r\n\r\n LET fieldsRepositorySynchronization = chr(39) & 'Synchronization' & chr(39) & ' as Command,\r\n \t\t\t\t\t\t\t\t\t\tsubfield(Logger,'& chr(39) & '.' & chr(39) & ',-1) as Description,\r\n '& chr(39) & '0' & chr(39) & ' as ProxySessionId,\r\n \t\t\t\t\t\t\t\t\t\tId as _proxySessionPackage,\r\n '& chr(39) & 'Repository' & chr(39) & ' as Service,\r\n Level as Severity,\r\n lower(Hostname) as Hostname\r\n \t\t\t\t\t\t\t\t\t\t';\r\n\r\n LET fieldsEngineSession\t\t=\t'\tProxySessionId&Sequence#\t \t\t\t\t\t\t\tas _proxySessionPackage,\r\n \t\t\t\t\t\t\t\t\t\tlower(Hostname) \t\t\t\t\t\t\t\t\t\tas Hostname,\r\n ActiveUserDirectory & chr(92) & ActiveUserId \t\t\tas UserId,\r\n AppId \t\t\t\t\t\t\t\t\t\t\t\t\tas ObjectId,\r\n '& chr(39) & 'Engine' & chr(39) & ' \t\t\t\t\tas Service,\r\n Level as Severity,\r\n ProxySessionId,\r\n [App Title] \t\t\t\t\t\t\t\t\t\t\tas [App Name],\r\n [App Title] \t\t\t\t\t\t\t\t\t\t\tas ObjectName,\r\n round(([Session Duration])*1440,0.02) \t\t\t\t\tas [Session Duration],\r\n Selections \t\t\t\t\t\t\t\t\t\t\tas [Session Selections],\r\n If(ProxySessionId='& chr(39) & '0' & chr(39) &',0,1) \tas [Session Count],\r\n If(ProxySessionId='& chr(39) & '0' & chr(39) &',null(), round([CPU Spent (s)]*1000,0.01)) \t\tas [Session CPU Spent (ms)],\r\n If(ProxySessionId='& chr(39) & '0' & chr(39) &',round([CPU Spent (s)]*1000,0.01)) \t\t\t\tas [Reload CPU Spent (ms)],\r\n If(ProxySessionId='& chr(39) & '0' & chr(39) &',null(),ceil((\"Bytes Received\"+\"Bytes Sent\")/1024)) as [Session KBytes Sent+Received],\r\n If(ProxySessionId='& chr(39) & '0' & chr(39) &',ceil((\"Bytes Received\"+\"Bytes Sent\")/1024)) \tas [Reload KBytes Sent+Received] \r\n \t\t\t\t\t\t\t\t\t\t' ;\t\r\nENDSUB\r\n\r\n///$tab mappingLoads\r\nSUB mappingLoads\r\n\r\n// For QMC Changes using new logs\r\n qmc_change_map:\r\n Mapping Load * Inline [\r\n action, is_qmc_change\r\n Add, 1\r\n Create,1\r\n Delete,1\r\n Duplicate,1\r\n Export,1\r\n Import,1\r\n Publish,1\r\n Replace,1\r\n Republish,1\r\n Update,1\r\n Upload,1\r\n Unpublish,1\r\n ];\r\n \r\n \r\n\r\nENDSUB\r\n\r\n///$tab load_base_table\r\nSUB load_base_table (nombre, archivo, timestamp_field)\r\n\r\n TRACE Checking for base qvd;\r\n Let ts_field = '\"$(timestamp_field)\"';\r\n\r\n\t// Check to see if governanceLogContent qvd exists\r\n\tLet baseFileSize = FileSize('$(archivo).qvd');\r\n\r\n IF baseFileSize > 0 THEN \t // Yes - QVD exists = not first load\r\n\r\n\t\ttrace Incremental reload of $(nombre) table (i.e. not first reload);\r\n \tLet firstReload = 0;\r\n \r\n $(nombre):\r\n NoConcatenate\r\n \tLoad * FROM [$(archivo).qvd] (qvd)\r\n WHERE $(timestamp_field) >= '$(cutoffDate)'\r\n ;\r\n \r\n LET tempErrorDetails = ScriptErrorDetails;\r\n IF Len(tempErrorDetails) > 0 THEN\r\n CALL monitor_app_reload_stats('WARN','$(archivo)', tempErrorDetails, 'Status Message')\r\n\t\t tempErrorDetails = ;\r\n END IF\r\n \r\n ELSE\t\t// No - no QVD exists = First (initial) load\r\n \r\n trace Initial Load of $(nombre) table ($(archivo) was either not found or was empty).;\r\n Let firstReload = 1;\r\n LET lastReloadCompare\t\t= num(cutoffDate);\t//num('2014-01-01');\t// If First reload, do not filter logs by LastReload\r\n Let LastReloadTime \t\t\t= timestamp(cutoffDate);\r\n IF nombre = 'LogContent' THEN\t// Primary log files (Audit Activity and Security\r\n $(nombre):\r\n NoConcatenate\r\n Load * Inline [Id, LogEntryPeriodStart, LogTimeStamp,Service,Hostname,Message,Export Count,Export Store Count];\r\n \t//Note: adding [Export Count] and [Export Store Count] fields to initial load to prevent reload\r\n \t\t// failure in the absence of Printing logs (which is possible initially)\r\n \r\n ELSE\t// For future separate tables...\r\n $(nombre):\r\n NoConcatenate\r\n Load * Inline [$(timestamp_field)];\r\n \r\n END IF\r\n \r\n END IF\r\n \r\n LET NoOfRows$(nombre)BASE = NoOfRows('$(nombre)');\r\n \r\nENDSUB\r\n\r\n///$tab multiNodeConfig\r\nSUB multiNodeConfig\t\t\r\n\r\n TRACE Checking the configuration - multi-node or single-node;\r\n\r\n// Check for multi-node environment by verifying files in Repository\\ArchivedLogs folder\r\n\t\r\n FOR each folder in DirList(archivedLogsFolder & '*')\r\n node_list:\r\n Load\r\n '$(folder)'&'\\' as folder,\r\n mid('$(folder)',26) as [Node Name],\r\n FileTime( '$(folder)' ) as folder_Time\r\n AutoGenerate 1;\r\n \r\n\tNEXT folder\r\n \r\n LET count_of_nodes\t= NoOfRows('node_list');\r\n// LET loop_count\t\t= count_of_nodes-1;\t// the \"For\" loop in \"RUN logic\" starts from 0\r\n \r\n IF count_of_nodes > 1 then\r\n \tLET multiNode = 'Multi-Node';\r\n TRACE Multi-Node environment detected;\r\n ELSE\r\n LET multiNode = 'Single-Node';\r\n TRACE Single-Node environment detected;\r\n let count_of_nodes = If(isnull(count_of_nodes),0,1);\r\n ENDIF\r\n\r\nEndSub\r\n\r\n///$tab logFolderList\r\nSUB logFolderList\r\n \r\n // Create a list of folders to search for log files, including all folders in the ..\\Sense\\Repository\\ArchivedLogs folder\r\n // For Multi-node configuration, please refer to the instructions below\r\n FOR each node in 'ServerLogFolder'\r\n \r\n LET svr = 'lib://$(node)/'; \r\n \r\n logFolderList:\r\n LOAD\r\n '$(svr)' as mainLogFolder,\r\n 'txt' as file_extension\r\n AutoGenerate(1);\r\n \r\n NEXT node \r\n\r\n FOR each fldr in DirList('$(archivedLogsFolder)'&'*')\r\n Concatenate (logFolderList)\r\n Load\r\n '$(fldr)/' as mainLogFolder,\r\n 'log' as file_extension\r\n AutoGenerate(1); \r\n \r\n NEXT fldr\r\n \r\n /* =========== Instructions for Multi-node configuration\t==================================================================================\\\\\r\n \r\n\t1.\tAdd new data connection for each rim node. If you have 5 RIM nodes, you will need to create 5 data connections. \r\n\t\tFor example, data connection for RIM1 points to folder \\\\rim_node_1\\c$\\programdata\\qlik\\sense\\log and is called RIM1\r\n\r\n\t2.\tRename new data connections in QMC to remove the (username) which is appended to the data connection name --- Example RIM1 (user_183)\r\n\r\n\t3.\tUpdate load script in section SUBT logFolderList on line 5 by adding the names of all new data connections created in step 1 and 2. \r\n \tEach new data connection name should be enclosed in single quotes ' and separated by a comman. For example:\r\n \tFOR each node in 'ServerLogFolder','RIM1','RIM2'\r\n\r\n\t4.\tPerform Step 3 in the other Monitor App\r\n \r\n /* ===========================================================================================================================================*/ \r\n\r\nENDSUB\r\n\r\n///$tab loadFiles\r\nSUB loadFiles (fdr,iter)\r\n // Use the iteration number (on Run Logic section) to load all log files listed in the logList SUB\r\n Let carpeta\t\t\t= peek('mainLogFolder',$(fdr),'logFolderList');\r\n Let extension\t\t\t= peek('file_extension',$(fdr),'logFolderList');\r\n Let logService \t\t= peek('logService',$(iter),'logList');\r\n Let logArea\t \t\t= peek('logArea',$(iter),'logList');\r\n Let logType\t \t\t= peek('logType',$(iter),'logList');\r\n Let logAddlFields\t\t= peek('logAddlFields',$(iter),'logList');\r\n LET logType \t\t\t= if(logType='Performance','_performance',logType);\r\n \r\n LET logName \t\t\t= '$(carpeta)$(logService)\\$(logArea)\\*$(logType)';\t\t\t// For Common Logging - TRACE folder + new logs\r\n\r\n // Log-specific fields spelled out in the SUB defineFields\r\n LET fields2Load \t\t= $(logAddlFields);\r\n \r\n // Session and TaskExecution log files have a start and stop timestamp; all other logs use Timestamp as start and stop, per the logList table\r\n LET logStartTS\t\t= 'TimeStamp(ConvertToLocalTime(Round(\"' & peek('logStart',$(iter),'logList') & '\",1/86400)))'; // qlik-80854 updated from 1/1440\r\n\r\n for each textFile in FileList(logName & '*.' & extension)\r\n \r\n IF filetime( '$(textFile)' ) >= $(lastReloadCompare) then\t\t// Only load the files updated since the last reload\r\n\r\n //working:\r\n CONCATENATE (working)\r\n Load\r\n Round($(logStartTS),1/1440) &'|' \r\n & Round(ConvertToLocalTime(Timestamp),1/1440) \t\tAS _date_time_link, \r\n $(logStartTS) AS LogEntryPeriodStart,\r\n timestamp(ConvertToLocalTime(Round(Timestamp,1/86400))) AS LogTimeStamp,\r\n\r\n Message,\r\n $(fields2Load), \r\n Id as Id_temp\t\t// Unique Identifier for Log entry to be used in the WHERE NOT EXISTS () clause to avoid loading duplicate log entries\r\n \r\n FROM '$(textFile)'\r\n (txt, utf8, embedded labels, delimiter is '\\t', msq)\r\n WHERE isnum(Sequence#);\r\n \r\n // If there is an error in the loading of the log, send a trace message about it\r\n LET tempErrorDetails = ScriptErrorDetails;\r\n IF Len(tempErrorDetails) > 0 THEN\r\n trace ERROR: $(tempErrorDetails);\r\n CALL monitor_app_reload_stats('WARN','$(textFile)', tempErrorDetails, 'Status Message')\r\n tempErrorDetails =;\t// Reset this variable\r\n END IF\r\n \r\n ENDIF\r\n next textFile\r\n\r\nENDSUB\r\n\r\n///$tab load_database_logs\r\nSUB load_database_logs\r\n\r\n LIB CONNECT TO 'QLogs';\r\n\r\n REM AuditActivity_AuditSecurity log entries first;\r\n CONCATENATE (working)\r\n LOAD \r\n \tRound((entry_timestamp),1/1440) &'|' \r\n \t& Round((entry_timestamp),1/1440) \t\tAS _date_time_link,\r\n Timestamp((Round(entry_timestamp,1/86400))) \tAS LogEntryPeriodStart,\r\n Timestamp((Round(entry_timestamp,1/86400))) \tAS LogTimeStamp,\r\n id \t\t\t\t\t\t\t\t\t\t\t\tAS Id_temp, // For incremental reload\r\n id \t\t\t\t\t\t\t\t\t\t\t\tAS Sequence#,\r\n\tlower(process_host) \t\t\t\t\t\t\tAS Hostname, \r\n\tprocess_name, \r\n\tlogger \t\t\t\t\t\t\t\t\t\t\tAS Logger, \r\n\tentry_level \t\t\t\t\t\t\t\t\tAS Severity,\r\n\tmessage \t\t\t\t\t\t\t\t\t\tAS Message, \r\n\tdescription \t\t\t\t\t\t\t\t\tAS Description,\r\n proxy_session_id & proxy_package_id \t\t\tAS _proxySessionPackage,\r\n\tproxy_session_id \t\t\t\t\t\t\t\tAS ProxySessionId, \r\n\tproxy_package_id \t\t\t\t\t\t\t\tAS ProxyPackageId, \r\n\trequest_sequence_id \t\t\t\t\t\t\tAS RequestSequenceId,\r\n\tservice \t\t\t\t\t\t\t\t\t\tAS Service, \r\n\tcontext \t\t\t\t\t\t\t\t\t\tAS Context, \r\n\tcommand \t\t\t\t\t\t\t\t\t\tAS Command, \r\n\tresult \t\t\t\t\t\t\t\t\t\t\tAS Result, \r\n\tobject_id \t\t\t\t\t\t\t\t\t\tAS ObjectId,\r\n\tobject_name \t\t\t\t\t\t\t\t\tAS ObjectName, \r\n\tuser_directory \t\t\t\t\t\t\t\t\tAS UserDirectory, \r\n\tuser_directory & chr(92) & user_id \t\t\t\tAS UserId,\r\n\tsecurity_class \t\t\t\t\t\t\t\t\tAS SecurityClass, \r\n\tclient_host_address \t\t\t\t\t\t\tAS ClientHostAddress,\r\n IF(process_name='scheduler',Subfield(object_name,chr(124),2),\r\n \tIF(process_name='printing' AND Index(command,'Export'),Subfield(object_name,chr(124),-1),\r\n \tIF(right(command,3)='app' OR context like '*/app/*',object_name))) \t\t\t\tAS [App Name],\r\n IF(service='Repository',ApplyMap('qmc_change_map',Subfield(command,' ',1),null()))\t\tAS qmc_change,\r\n IF(service='Repository',\r\n \tIF(ApplyMap('qmc_change_map',Subfield(command,' ',1),null())=1 AND SubStringCount(command,':')=0,\r\n \tIF(Left(user_id,3)<>'sa_' AND user_id <> 'Not available' AND user_id <> 'System', \r\n \t//Upper(Left(Mid(command,index(command,' ')+1),1))&Mid(Mid(command,Index(command,' ')+1),2) // THIS just capitalizes the first letter -- lots of processing!\r\n Capitalize(Mid(command,Index(command,' ')+1))\t\t// Simplified using Capitalize to capitalize first letter in each word\r\n )\r\n ) \r\n ) \t\t\t\t\t\t\t\t\t\t\t\tAS [QMC Resource Type],\r\n IF(context='Doc::DoReload',1) \t\t\t\t\tAS [Reload from Hub],\r\n IF(context='Doc::DoReload',1,\r\n \tIF(process_name='scheduler',\r\n \tIF(message='Changing task state to Triggered',0,\r\n \tIF(message like 'Task finished*',1,\r\n \tIF(result<>30 AND result >=20 AND result<45,1)\r\n )\r\n )\r\n )\r\n ) \t\t\t\t\t\t\t\t\t\t\t\tAS TaskExecution,\r\n IF(context='Doc::DoReload',proxy_session_id & proxy_package_id & request_sequence_id,\r\n \tIF(process_name='scheduler',SubField(object_id,chr(124),1))) \tAS TaskId\r\n ;\r\n SELECT * FROM \"public\".\"view_audit_activity_audit_security\"\r\n WHERE entry_timestamp >= '$(LastReloadTime)';\r\n \r\n REM system_errors_warnings for errors and warnings from all System logs;\r\n CONCATENATE (working)\r\n LOAD \r\n \tRound((entry_timestamp),1/1440) &'|' \r\n \t& Round((entry_timestamp),1/1440) \t\tAS _date_time_link,\r\n Timestamp((Round(entry_timestamp,1/86400))) \tAS LogEntryPeriodStart,\r\n Timestamp((Round(entry_timestamp,1/86400))) \tAS LogTimeStamp,\r\n id \t\t\t\t\t\t\t\t\t\t\t\tAS Id_temp, // For incremental reload\r\n id \t\t\t\t\t\t\t\t\t\t\t\tAS _proxySessionPackage,\r\n\tlower(process_host) \t\t\t\t\t\t\tAS Hostname, \r\n\tprocess_name, \r\n\tlogger \t\t\t\t\t\t\t\t\t\t\tAS Logger, \r\n\tentry_level \t\t\t\t\t\t\t\t\tAS Severity,\r\n\tmessage \t\t\t\t\t\t\t\t\t\tAS Message,\r\n\texception\t\t\t\t\t\t\t\t\t\tAS Exception, \r\n\tstack_trace\t\t\t\t\t\t\t\t\t\tAS [Stack Trace], \r\n\tthread\t\t\t\t\t\t\t\t\t\t\tAS Thread,\r\n\tproxy_session_id \t\t\t\t\t\t\t\tAS [ProxySessionId],\t\t// This is sparsely populated! Not reliable at all\r\n subfield(logger,'.',2)\t\t\t\t\t\t\tAS Service,\r\n\t// Engine specific entries\r\n\tengine_thread\t\t\t\t\t\t\t\t\tAS [Engine Thread], \r\n\tactive_user_directory\t\t\t\t\t\t\tAS [Active User Directory], \r\n\tactive_user_id\t\t\t\t\t\t\t\t\tAS [Acitve User Id], \r\n\tprocess_id\t\t\t\t\t\t\t\t\t\tAS [Process Id], \r\n\tTimestamp((engine_timestamp))\tAS [Engine Timestamp], \r\n\t// Scheduler specific entries\r\n\ttask_id\t\t\t\t\t\t\t\t\t\t\tAS [TaskId], \r\n task_id\t\t\t\t\t\t\t\t\t\t\tAS ObjectId,\r\n\texecution_id\t\t\t\t\t\t\t\t\tAS [Execution Id], \r\n\ttask_name\t\t\t\t\t\t\t\t\t\tAS [Scheduler Task Name],\r\n task_name\t\t\t\t\t\t\t\t\t\tAS ObjectName,\r\n\tapp_id\t\t\t\t\t\t\t\t\t\t\tAS [Scheduler AppId], \r\n\tapp_name\t\t\t\t\t\t\t\t\t\tAS [App Name], \r\n\tuser\t\t\t\t\t\t\t\t\t\t\tAS [Scheduler User]\r\n ;\r\n SELECT * FROM \"public\".\"view_system_errors_warnings\"\r\n WHERE entry_timestamp >= '$(LastReloadTime)';\r\n \r\n REM Performance_Engine logs next;\r\n CONCATENATE (working)\r\n LOAD \r\n \tRound((entry_timestamp),1/1440) &'|' \r\n \t& Round((entry_timestamp),1/1440) \t\tAS _date_time_link,\r\n\tTimestamp((Round(entry_timestamp,1/86400))) \tAS LogEntryPeriodStart,\r\n Timestamp((Round(entry_timestamp,1/86400))) \tAS LogTimeStamp,\r\n id \t\t\t\t\t\t\t\t\t\t\t\tAS Id_temp, // For incremental reload\r\n id \t\t\t\t\t\t\t\t\t\t\t\tAS _proxySessionPackage,\r\n lower(process_host) \t\t\t\t\t\t\tAS Hostname,\r\n logger\t\t\t\t\t\t\t\t\t\t\tAS Logger, \r\n entry_level \t\t\t\t\t\t\t\t\tAS Severity,\r\n 'Engine' \t\t\t\t\t\t\t\t\t\tAS Service,\r\n\tnum(active_doc_sessions)\t\t\t\t\t\tAS ActiveDocSessions, \r\n\tnum(active_docs)\t\t\t\t\t\t\t\tAS ActiveDocs, \r\n\tnum(selections)\t\t\t\t\t\t\t\t\tAS Selections, \r\n\tnum(active_users)\t\t\t\t\t\t\t\tAS ActiveUsers, \r\n\tcpu_load/100 \t\t\t\t\t\t\t\t\tAS CPULoad, \r\n\tnum(loaded_docs)\t\t\t\t\t\t\t\tAS LoadedDocs,\r\n Round(vm_committed_mb/1024,0.01) \t\t\t\tAS VMCommitted,\r\n Round(vm_allocated_mb/1024,0.01) \t\t\t\tAS VMAllocated,\r\n Round(vm_free_mb/1024,0.01) \t\t\t\t\tAS VMFree,\r\n round(vm_committed_mb/(vm_free_mb+vm_committed_mb),0.01) AS VMPctCommitted,\r\n\tnum(cache_hits) \t\t\t\t\t\t\t\tAS [Cache Hits], \r\n\tnum(cache_lookups) \t\t\t\t\t\t\t\tAS [Cache Lookups], \r\n\tnum(cache_bytes_added) \t\t\t\t\t\t\tAS [Cache Bytes Added]\r\n ;\r\n SELECT * FROM \"public\".\"view_performance_engine\"\r\n WHERE entry_timestamp >= '$(LastReloadTime)'; \r\n\r\n REM Session_Engine logs next;\r\n CONCATENATE (working)\r\n LOAD \r\n \tRound(ConvertToLocalTime(session_start),1/1440) &'|' \r\n \t& Round((entry_timestamp),1/1440) \t\t\tAS _date_time_link,\r\n\tTimestamp(ConvertToLocalTime(Round(session_start,1/86400))) \t\tAS LogEntryPeriodStart,\t\r\n \t// Use ConvertToLocalTime for session_start and not entry_timestamp b/c session_start stored differently in [payload] field\r\n Timestamp((Round(entry_timestamp,1/86400))) \tAS LogTimeStamp,\r\n id \t\t\t\t\t\t\t\t\t\t\t\tAS Id_temp, // For incremental reload\r\n lower(process_host) \t\t\t\t\t\t\tAS Hostname,\r\n logger\t\t\t\t\t\t\t\t\t\t\tAS Logger,\r\n proxy_session_id & id \t\t\t\t\t\t\tAS _proxySessionPackage,\t// using id instead of proxy_package_id which is 0\r\n active_user_directory & chr(92) & active_user_id \tas UserId,\r\n 'Engine' \t\t\t\t\t\t\t\t\t\tas Service,\r\n entry_level \t\t\t\t\t\t\t\t\tAS Severity,\r\n proxy_session_id \tAS ProxySessionId,\r\n app_id \t\t\t\t\t\t\t\t\t\t\tAS ObjectId,\r\n app_title \t\t\t\t\t\t\t\t\t\tas [App Name],\r\n app_title\t\t\t\t\t\t\t\t\t\tas ObjectName,\r\n If(proxy_session_id=0,0,1)\t\t\t\t\t\tAS [Session Count],\r\n round(session_duration*1440,0.02) \t\t\t\tAS [Session Duration],\r\n If(proxy_session_id=0,null(),round(cpu_spent_s*1000,0.01)) \t\t\t\tAS [Session CPU Spent (ms)],\r\n If(proxy_session_id=0,null(),ceil((bytes_received+bytes_sent)/1024))\tAS [Session KBytes Sent+Received],\r\n If(len(proxy_session_id)>0,selections,0) \t\t\t\t\t\t\t\tAS [Session Selections],\r\n\tIf(proxy_session_id=0,round(cpu_spent_s*1000,0.01)) \t\t\t\t\tAS [Reload CPU Spent (ms)],\r\n If(proxy_session_id=0,ceil((bytes_received+bytes_sent)/1024))\t\t\tAS [Reload KBytes Sent+Received]\r\n ;\r\n SELECT * FROM \"public\".\"view_session_engine\"\r\n WHERE entry_timestamp >= '$(LastReloadTime)'\r\n AND NOT active_user_directory like 'internal';\r\n\r\n TRACE Finished loading data incrementally from database. Nice job!; \r\n DisConnect;\r\n \r\nENDSUB\r\n///$tab calendarization\r\nSUB calendarization\r\n\r\n TRACE Working on master Calendar;\r\n \r\n // 1- Check for & Load existing calendar QVDs (can we work incrementally?)\r\n CALL load_base_table ('time_range', '$(time_range_TableName)','DateTime')\r\n CALL load_base_table ('date_time', '$(date_time_TableName)','_date_time_link_incr')\r\n \r\n // 2- Find first and last date from my data\r\n Range:\r\n LOAD \r\n DayStart(min) as startdate,\r\n DayStart(max) as enddate,\r\n timestamp(max) as maxLogTimeStamp;\r\n LOAD \r\n min(LogEntryPeriodStart) as min,\r\n max(LogTimeStamp) as max\r\n resident working;\r\n\r\n let startdate\t\t\t\t= floor(peek('startdate',-1,'Range'));\r\n let enddate\t\t\t\t= ceil(peek('enddate',-1,'Range')) +1;\r\n\r\n let maxLogTimeStamp \t\t= peek('maxLogTimeStamp',-1,'Range');\r\n Let maxLogTimeStamp_Hour \t= hour(maxLogTimeStamp);\r\n Let hour_now \t\t\t\t= maxLogTimeStamp_Hour;\r\n Drop Table Range;\r\n \r\n// SORT ORDERING of Time fields\r\n// To sort backward from now(reload) -- for 24-Hour summary charts\r\n hour_temp:\r\n mapping Load \r\n recno()-1 & ':00' as Hour,\r\n if($(hour_now)-(recno()-1)>=0, $(hour_now)-(recno()-1),23+($(hour_now)-(recno()-1))+1) as hour_sort\r\n autogenerate (24);\r\n \r\n// Establish order of weekdays\r\n Weekdays:\r\n Load \r\n \tweekday(weekstart(today())+recno()-1) as Weekday,\r\n recno() as weekday_sort\r\n autogenerate 7;\r\n\r\n// For all non-24-hour Summary charts, we want \"normal\" numeric sorting of Hour from 0 to 23 hours\r\n Hour_Table:\r\n NoConcatenate\r\n Load\r\n rowno()-1 & ':00' as Hour,\r\n rowno()-1 & ':00' as [Hour of Day]\r\n AutoGenerate (24);\r\n\r\n// Build a time-date table of every minute between my start and end date\r\n DO WHILE startdate < enddate\r\n time_range_working:\r\n LOAD\r\n timestamp($(startdate) + (1/(1440))*(recno()-1),'YYYY-MM-DD h:mm') as DateTime_temp\t// \"_temp\" for incremental\r\n autogenerate (1440);\r\n\r\n //let startdate = num($(startdate) + 1,'###0.#####','.') ;\r\n let startdate = $(startdate) + 1;\r\n LOOP \r\n\r\n Inner Join (time_range_working) \r\n IntervalMatch (DateTime_temp) \r\n Load\r\n LogEntryPeriodStart-(1/(1440)) AS start_minus_one, \r\n LogTimeStamp\r\n Resident working;\r\n\r\n date_time_working:\r\n Load\r\n *,\r\n (Round(Num(start_minus_one+1/(1440)),1/1440)&'|'\r\n &Round(Num(LogTimeStamp),1/1440)) as _date_time_link_incr_temp\t// LINK w/ LogContent; \"_temp\" for incremental loading\r\n RESIDENT time_range_working; \r\n\r\n// Concatenate base (historical) calendar tables for dates within cutoffdate\r\n CALL concat_tables ('time_range', 'time_range_working','DateTime')\r\n CALL concat_tables ('date_time', 'date_time_working','_date_time_link_incr')\r\n\r\n// Store then drop time_range; date_time table stored after summaries loaded (below)\r\n CALL store_files ('time_range', '$(time_range_TableName)')\r\n DROP TABLE time_range;\r\n\r\nENDSUB\r\n\r\nSUB calendarization_add\r\n TRACE Looking for additional date time links to include in DateTime;\r\n \r\n //Create table with date_time_link fields that do not already exist in date_time (generated in the sessionSummary, reloadSummary, and/or exportingSummary\r\n new_date_time_links:\r\n Load\r\n \tDistinct _date_time_link,\r\n SubField([_date_time_link],'|',1)-1/1440\tAS start_minus_one,\r\n SubField([_date_time_link],'|',-1)\t\t\tAS end\r\n Resident LogContent\r\n Where Not Exists([_date_time_link_incr],[_date_time_link]);\r\n \r\n Let new_date_time_links = NoOfRows('new_date_time_links');\r\n IF new_date_time_links > 0 Then\r\n \tTRACE $(new_date_time_links) new _date_time_links found;\r\n \r\n addl_date_time:\r\n Load Distinct DateTime_temp AS tmp,\r\n *\r\n Resident date_time;\r\n Drop Fields tmp, [_date_time_link_incr],start_minus_one from addl_date_time;\r\n \r\n Inner Join (addl_date_time)\r\n IntervalMatch(DateTime_temp)\r\n Load \r\n start_minus_one,\r\n end\r\n Resident new_date_time_links;\r\n \r\n Concatenate (date_time)\r\n Load *, \r\n (Round(start_minus_one+1/1440,1/1440)&'|'\r\n &Round(end,1/1440)) \t\t\t\t\tAS _date_time_link_incr\r\n Resident addl_date_time;\r\n \r\n Drop Tables addl_date_time, new_date_time_links;\r\n Drop field end From date_time;\r\n \r\n ELSE\r\n \tTRACE No additional _date_time_links found. Carry on!;\r\n ENDIF\r\n \r\n Drop field start_minus_one;\r\n \r\n CALL store_files ('date_time', '$(date_time_TableName)')\t// Store this table without all the extra columns to limit the QVD size on disk.\r\n \r\n TRACE Adding additional fields to date_time table.;\r\n REM This must be done after the \"Summary\" loads because those alter the _date_time_link field;\r\n date_time_link:\r\n NoConcatenate Load \r\n Distinct _date_time_link_incr \t\t\t\t\t\t\t\tAS _date_time_link,\r\n DateTime_temp\t\t\t\t\t\t\t\t\t\t\t\tAS DateTime,\r\n MonthName(DateTime_temp) \t\t\t\t\t\t\t\t\tAS Month,\r\n WeekStart(DateTime_temp) \t\t\t\t\t\t\t\t\tas [Week Beginning],\r\n WeekDay(DateTime_temp) \t\t\t\t\t\t\t\t\t\tas Weekday,\r\n makedate(year(DateTime_temp),month(DateTime_temp),day(DateTime_temp)) as Date, \r\n Hour(DateTime_temp)&':00' \t\t\t\t\t\t\t\t\tas Hour,\r\n Time(DateTime_temp) \t\t\t\t\t\t\t\t\t\tas Time,\r\n ApplyMap('hour_temp',Hour(DateTime_temp)&':00' ) \t\t\tas hour_sort,\r\n Minute(DateTime_temp) \t\t\t\t\t\t\t\t\t\tas [Minute of Hour], \r\n timestamp(floor(DateTime_temp,1/(24)),'MMM-DD hh:00') \t\tas [Hour Timeline],\r\n timestamp(floor(DateTime_temp,10/(1440)),'MMM-DD hh:mm') \tAs [Ten-Minute Timeline],\r\n Hour(Frac(DateTime_temp))&':'&right(0&Minute(floor(Frac(DateTime_temp),1/144)),2) As [Ten-Minute Generic Timeline],\r\n timestamp(floor(DateTime_temp,1/(1440)),'MMM-DD hh:mm') \tas [One-Minute Timeline],\r\n If(DateTime_temp>=$(vLast4Hours),1) \t\t\t\t\t\tAS last4hours,\r\n If(DateTime_temp>=$(vLast24Hours),1) \t\t\t\t\t\tAS last24hours,\r\n If(DateTime_temp>=$(vLast72Hours),1) \t\t\t\t\t\tAS last72hours\r\n Resident date_time\r\n ORDER BY DateTime_temp DESC;\r\n \r\n DROP TABLE date_time;\r\n \r\n// Create Timeframe field for quick selection of common historical timeframes\r\n Last:\r\n Load Distinct [Hour Timeline], 'Last 4 Hours' as [Timeframe] Resident date_time_link Where last4hours=1;\r\n Concatenate Load Distinct [Hour Timeline], 'Last 24 Hours' as [Timeframe] Resident date_time_link Where last24hours=1;\r\n Concatenate Load Distinct [Hour Timeline], 'Last 72 Hours' as [Timeframe] Resident date_time_link Where last72hours=1;\r\n \r\n Drop fields last4hours,last24hours,last72hours;\r\n\r\nENDSUB\r\n\r\n///$tab concat_tables\r\nSUB concat_tables (concatToTable, incrementalTable, concatField)\r\n\r\n TRACE Concatenating $(concatToTable)...;\r\n\r\n Let rows$(incrementalTable)Final = num(NoOfRows('$(incrementalTable)'),'#,##0');\r\n trace $(rows$(incrementalTable)Final) rows loaded.;\r\n\r\n IF NoOfRows('$(incrementalTable)')>0 then\r\n\r\n CONCATENATE ($(concatToTable))\r\n LOAD \r\n *, \r\n $(concatField)_temp as $(concatField)\r\n RESIDENT $(incrementalTable)\r\n WHERE NOT Exists ($(concatField),$(concatField)_temp);\r\n\r\n drop field $(concatField)_temp from $(concatToTable);\r\n\r\n ELSE\r\n TRACE No incremental rows for $(incrementalTable); // Should only ever occur if all Qlik Services are stopped \r\n\r\n ENDIF\r\n\r\n drop table $(incrementalTable);\r\n\r\nENDSUB\r\n///$tab store_files\r\nSUB store_files (nombre, archivo)\r\n\r\n TRACE Storing the QVD;\r\n\r\n \tStore '$(nombre)' into [$(archivo).qvd];\r\n \r\n LET tempErrorDetails = ScriptErrorDetails;\r\n \tIF Len(tempErrorDetails) > 0 THEN\r\n \tSET storeBaseTableFail = 1;\r\n CALL monitor_app_reload_stats('WARN','$(archivo)', tempErrorDetails, 'Status Message')\r\n\t\ttempErrorDetails = ; // Reset This\r\n \tELSE\r\n \tSET storeBaseTableFail = 0;\r\n END IF\r\n \r\n LET NoOfRowsLogContent = num(NoOfRows('$(nombre)'),'#,##0');\r\n LET NoOfRowsIncremental = NoOfRowsLogContent - NoOfRowsLogContentBASE;\r\n Let storeTime = now(1);\r\n TRACE $(nombre) table stored at $(storeTime) with $(NoOfRowsLogContent) rows;\r\n\r\nENDSUB\r\n\r\n///$tab serviceLog\r\nSUB serviceLog\r\n\r\n TRACE Working on Service Logs;\r\n\r\n justErrorsWarnings:\r\n LOAD errorWarn INLINE [\r\n errorWarn\r\n ERROR\r\n WARN\r\n WARNING\r\n FAIL\r\n FATAL\r\n ];\r\n\r\n\r\n serviceLog:\r\n NoConcatenate\r\n Load\r\n _proxySessionPackage,\r\n IF(Severity='WARN' or Severity = 'WARNING',1,0) as [Service Warning],\r\n IF(Severity='ERROR' or Severity='FAIL' or Severity='FATAL',1,0) as [Service Error],\r\n Message as [Service Message],\r\n Service as [Service Log],\r\n Severity\r\n resident LogContent\r\n WHERE exists (errorWarn,Severity); \t// just load errors and warnings\r\n \r\n DROP FIELD Severity from LogContent;\r\n DROP TABLE justErrorsWarnings;\r\n\r\n// About the fields\r\n//\t[_proxySessionPackage] is the unique identifier for \"sesssions\" in Qlik land - not user-app sessions but \r\n//\t\t\tproxy-authenticated sessions when a user accesses the QMC and/or Hub and apps. For Service Log, this is\r\n//\t\t\tset to Id (unique per log entry) when ProxySessionID = 0 and ProxyPackageId = 0 (internal issues) so that\r\n//\t\t\tthis record in the serviceLog will have a link to LogContent to it's own record rather than to all other\r\n//\t\t\taudit log entries with _proxySessionPackage = 00. This is necessary to make sure each \"Severity\" has a fixed\r\n//\t\t\tLogTimeStamp (or set) that it links to.\r\n//\t[Severity] provides the link among all the logs for a \"session\" that had an error\r\n//\t\t\tassociated with it. I load it here in a separate table and drop it from LogContent so you can select ERROR or WARN\r\n//\t\t\tand see associated log entries. If left in LogContent, only the System Service log entries would be \"associated\" to \r\n//\t\t\tERROR and WARN, which defeats the goal of reconstructing activity across a Qlik site.\r\n\r\nENDSUB\r\n\r\n///$tab reloadSummary\r\nSUB reloadSummary\r\n\r\n TRACE Working on Reload Summary;\r\n\r\n Reload_1:\r\n Load\r\n If([Reload from Hub]=1,TaskId,ProxySessionId) as ProxySessionId,\t// LINK w/ Reload Finishes > Hub Reloads need special care - unique identifier is TaskId for these Engine Audit Activity entries\r\n LogTimeStamp as [Reload Start],\r\n If([Reload from Hub]=1,'Reload from Hub of ' & ObjectName,SubField(ObjectName,'|',1)) as [Task Name]\r\n RESIDENT LogContent\r\n WHERE TaskExecution = 0 OR [Reload from Hub] = 1;\t// Reload Starts\r\n \r\n Let countOfReloads = NoOfRows('Reload_1');\r\n\r\n IF $(countOfReloads) = 0 then\t\t// Don't run the rest of this if there are no reload entries; This should only ever happen on first reload\r\n \ttrace No Reload entries yet. If you see this message more than once, contact Qlik Support because something is not right.;\r\n Drop table Reload_1;\r\n\r\n ELSE\r\n \r\n LEFT JOIN (Reload_1)\r\n LOAD\r\n If([Reload from Hub]=1,TaskId,ProxySessionId) as ProxySessionId,\t\t// To Join w/ Reload Start > Note the special case for Hub Reloads using TaskId which is composed of proxy session,pkg,requestsequence\r\n _proxySessionPackage, \t\t\t// To Join w/ Finish in LogContent\r\n TaskExecution, \t\t\t\t\t// To Join w/ Finish in LogContent\r\n [Reload from Hub],\t\t\t\t//Temp field for Reload Duration\r\n TaskId as _reloadSummaryTaskId,\t// For task chain duration analysis\r\n LogTimeStamp as [Reload Finish],\r\n Result as [Reload Result Code],\r\n If([Reload from Hub]=1,'Unknown: Reload from Hub',If(Result=0,'Success',IF(Right(Message,7)='Aborted','Aborted','Failed'))) as [Reload Status], \t\t// COMBINE Result and Message to get consolidated list\r\n If(Result>0,1,0) as [Reload Failure]\r\n RESIDENT LogContent\r\n WHERE TaskExecution = 1 AND _proxySessionPackage <> '00';\t// Reload Finishes; exclude INTERNAL records or ones\r\n // in which no PRoxySessionId is properly assigned.\r\n \r\n drop field ProxySessionId from Reload_1;\r\n \r\n ReloadSummary:\r\n NoConcatenate Load\r\n *,\r\n If([Reload from Hub]=1,0,round(([Reload Finish]-[Reload Start])*1440,0.02)) as [Reload Duration]\r\n RESIDENT Reload_1;\r\n \r\n Drop table Reload_1;\r\n Drop field [Reload from Hub] from ReloadSummary;\t\t// No longer need this field in the ReloadSummary table; it will be referenced from the LogContent table if neeeded.\r\n \r\n LEFT JOIN (LogContent)\r\n LOAD\r\n _proxySessionPackage,\t\t// Field to Join LogContent on; Note that for Reload From Hub cases, we are using TaskId\r\n _reloadSummaryTaskId as TaskId,\t\t\t\t\t// Adding TaskId for Join to LogContent for the REload from Hub case, which requires TaskId to avoid a many:many join situation\r\n TaskExecution, \t\t\t// Field to Join on > 1 for \"Task finished*\"; Also used to count Reloads \r\n [Reload Start]\t\t\t// To create LogEntryPeriodStart and _date_time_link2 for Finished task: in LogContent\r\n RESIDENT ReloadSummary;\r\n \r\n // To update LogEntryPeriodStart and _date_time_link in LogContent for \"Finished task\" entries\r\n LogContentReloaded:\r\n NoConcatenate Load\r\n *,\r\n alt([Reload Start],LogEntryPeriodStart) as LogEntryPeriodStart2,\r\n if(isnull([Reload Start]),_date_time_link,round([Reload Start],1/1440)&'|'&round(LogTimeStamp,1/1440)) as _date_time_link2\r\n Resident LogContent;\r\n \r\n Drop table LogContent;\r\n Drop fields LogEntryPeriodStart,_date_time_link;\r\n Rename field LogEntryPeriodStart2 to LogEntryPeriodStart;\r\n Rename field _date_time_link2 to _date_time_link;\r\n Rename table LogContentReloaded to LogContent;\r\n Drop fields [Reload Start] from LogContent;\r\n\t\r\n////// Reload Task Supporting data\t//////\r\n\t// Task Duration is given in minutes!\r\n\tReload_Duration_Bucket:\r\n Load \r\n DISTINCT [Reload Duration],\r\n IF([Reload Duration]<1,dual('< 1',1),\r\n IF([Reload Duration]<6,dual('1 - 5',5),\r\n IF([Reload Duration]<11,dual('6 - 10',10),\r\n IF([Reload Duration]<31,dual('11 - 30',30), dual('> 30',31)\r\n )))) as [Reload Duration Bucket]\r\n\tRESIDENT ReloadSummary\r\n WHERE [Reload Duration] > 0;\r\n \r\n ENDIF\r\n \r\nEndSub\r\n///$tab sessionSummary\r\nSUB sessionSummary\r\n\r\n session_count:\r\n Load count(Id) as count resident LogContent\r\n Where [Session Count]=1;\r\n \r\n LET session_count = Peek('count');\r\n Drop table session_count;\r\n \r\n IF session_count > 0 Then\r\n\r\n TRACE Working on Session Summary for $(session_count) session entries.;\r\n\r\n SessionSummary:\r\n NoConcatenate Load\r\n _proxySessionPackage,\t// LINK\r\n LogEntryPeriodStart\t\tas [Session Start],\r\n LogTimeStamp\t\t\tas [Session Finish],\r\n Hostname \t\t\t\tas [Session Hostname],\r\n [App Name]\t\t\t\tas [Session App Name],\r\n [Session Duration],\r\n [Session Count]\r\n RESIDENT LogContent\r\n WHERE [Session Count] = 1;\r\n\r\n // Add Session_Engine fields for calculation of \"Cost of a Session\"\r\n Session_Cost:\r\n NoConcatenate Load\r\n _proxySessionPackage,\t// Key to link the session\r\n [Session Selections],\r\n [Session CPU Spent (ms)],\r\n [Session KBytes Sent+Received]\r\n Resident LogContent\r\n Where [Session CPU Spent (ms)] >=0;\r\n\r\n App_Cost_Summary:\t\t// This links with App tables from QRS\r\n Load *,\r\n If(app_cost_session_count<50,dual('< 50',1),\r\n If(app_cost_session_count<250,dual('50-249',2),\r\n If(app_cost_session_count<500,dual('250-499',3),\r\n If(app_cost_session_count>=500,dual('500+',4)\r\n ))))\t\t\tAS [App Cost Session Count]\r\n ;\r\n NoConcatenate Load\r\n ObjectId as AppId,\r\n Num(Count(Distinct [_proxySessionPackage]),'#,##0')\t\tAS app_cost_session_count,\t// QLIK-74213 \r\n date(Floor(Max(LogTimeStamp))) \t\t\t\t\t\t\tAS [App Last Accessed],\t\t// QLIK-74213\r\n Num(Max([Session CPU Spent (ms)]),'#,##0')\t\t\t\tAS [Max CPU Spent (ms)],\r\n Num(Round(Avg([Session CPU Spent (ms)]),0.01),'#,##0.00')\tAS [Avg CPU Spent (ms)],\r\n Num(Ceil(Max([Session KBytes Sent+Received])),'#,##0')\tAS [Max KBytes Sent+Received],\r\n Num(Ceil(Avg([Session KBytes Sent+Received])),'#,##0')\tAS [Avg KBytes Sent+Received]\r\n Resident LogContent\r\n WHERE [Session KBytes Sent+Received]>0\r\n Group By ObjectId;\r\n\r\n DROP FIELD app_cost_session_count; \r\n Drop Fields [Session Selections],[Session CPU Spent (ms)],[Session KBytes Sent+Received] from LogContent;\r\n Drop Fields [Session Duration], [Session Count] from LogContent;\r\n\r\n // Session Duration is given in Minutes!\r\n Session_Duration_Bucket:\r\n Load \r\n DISTINCT [Session Duration],\r\n IF([Session Duration]<11,dual('< 10',10),\r\n IF([Session Duration]<31,dual('11 - 30',30),\r\n IF([Session Duration]<61,dual('31 - 60',60),\r\n IF([Session Duration]<121,dual('61 - 120',120), dual('> 120',121)\r\n )))) as [Session Duration Bucket]\r\n RESIDENT SessionSummary\r\n WHERE [Session Duration] > 0;\r\n \r\n ELSE \r\n \tTRACE No session entries found. Skipping sessionSummary logic.;\r\n \r\n Endif\r\n\r\nEndSub\r\n///$tab exportingSummary\r\nSUB exportingSummary\r\n\r\n TRACE Working on Exporting Summary;\r\n\r\n Exporting_1:\r\n Load *,\r\n \treplace(exportObjectTypeTemp,left(exportObjectTypeTemp,1),upper(left(exportObjectTypeTemp,1))) as [Exported Object Type]\t// for Uppercase Type\r\n \t;\r\n Load\r\n _proxySessionPackage,\t// Link to Sessions & Session Summary\r\n //RequestSequenceId, // Join field for Export logs\r\n LogTimeStamp as [Export Start],\r\n subfield(ObjectName,'|',1) as [Exported Object],\r\n subfield(Command,' ',-1) as [Export to],\r\n if(textbetween(Command,' ',' ')='chart',purgechar(TextBetween(Message,'of type ',' '),chr(39)),textbetween(Command,' ',' ')) as exportObjectTypeTemp,\r\n [App Name] as [Export App Name],\r\n Message as [Export Message],\r\n UserId as [Export UserId]\r\n RESIDENT LogContent\r\n WHERE [Export Count] = 1;\t// Exporting Starts\r\n \r\n drop field exportObjectTypeTemp from Exporting_1;\r\n\r\n Let countOfExports = NoOfRows('Exporting_1');\r\n\r\n IF $(countOfExports) = 0 then\t\t// Don't run the rest of this if there are no Export entries;\r\n trace No Exporting entries yet.;\r\n Drop table Exporting_1;\r\n\r\n ELSE\r\n\r\n LEFT JOIN (Exporting_1)\r\n LOAD\r\n _proxySessionPackage,\t\t// Remove this when using RequestSequenceId as join field\r\n //RequestSequenceId, \t\t// Join field for Export logs (remove _proxySessionPackage)\r\n 1 as [Export Count], \t\t// To Join w/ Finish in LogContent\r\n LogTimeStamp as [Export Finish]\r\n RESIDENT LogContent\r\n WHERE [Export Store Count] = 1;\t// Storing the Exported object (Export Finishes)\r\n\r\n ExportingSummary:\r\n NoConcatenate Load\r\n *,\r\n Round(([Export Finish]-[Export Start])*1440,0.02) as [Export Duration]\r\n RESIDENT Exporting_1;\r\n\r\n Drop table Exporting_1;\r\n\r\n LEFT JOIN (LogContent)\r\n LOAD\r\n _proxySessionPackage,\t\t// Field to Join on until we use RequestSequenceId (and are linking sessions)\r\n //RequestSequenceId, \t\t// Join field for Export logs (remove _proxySessionPackage)\r\n [Export Count], \t\t// Field to Join on > 1 for \"Export finished*\"; Also used to count Export exports\r\n\r\n [Export Start]\t\t// To create LogEntryPeriodStart and _date_time_link2 for Finished Export: in LogContent\r\n RESIDENT ExportingSummary;\r\n\r\n // Adding Session Finish to \"Start\" records so that the start and finish entries can each independently span the timeline in the calendar\r\n LEFT JOIN (LogContent)\r\n LOAD\r\n _proxySessionPackage,\t\t// Field to Join on until we use RequestSequenceId (and are linking sessions)\r\n //RequestSequenceId, \t\t// Join field for Export logs (remove _proxySessionPackage)\r\n 1 as [Export Store Count], \t\t// Field to Join on > 1 for \"Export finished*\"; Also used to count Export / Exports\r\n\r\n [Export Finish]\t\t// To create LogEntryPeriodStart and _date_time_link2 for Finished Export: in LogContent\r\n RESIDENT ExportingSummary;\r\n\r\n // To update fields in LogContent for \"Exporting\" entries\r\n LogContentExported:\r\n NoConcatenate Load\r\n *,\r\n ALT(if([Export Count]=1,[Export Start]),LogEntryPeriodStart) \t\t\t\t\tAS LogEntryPeriodStart2,\r\n ALT(if([Export Store Count]=1,[Export Finish]),LogTimeStamp) \t\t\t\t\t\t\tAS LogTimeStamp2,\r\n Round(ALT(if([Export Count]=1,[Export Start]),LogEntryPeriodStart),1/1440)&'|'&\r\n \tRound(ALT(if([Export Store Count]=1,[Export Finish]),LogTimeStamp),1/1440)\t\t\tAS _date_time_link2\r\n Resident LogContent;\r\n\r\n Drop table LogContent;\r\n Drop fields LogEntryPeriodStart,LogTimeStamp,_date_time_link;\r\n Rename field LogEntryPeriodStart2 to LogEntryPeriodStart;\r\n Rename field LogTimeStamp2 to LogTimeStamp;\r\n Rename field _date_time_link2 to _date_time_link;\r\n Rename table LogContentExported to LogContent;\r\n Drop fields [Export Start],[Export Finish],[Export Count] from LogContent;\r\n\r\n ENDIF\r\n\r\nEndSub\r\n///$tab monitorAppReloadStats\r\nSUB monitor_app_stats_incremental\t// Use this to append new 'status' entry to table \r\n Concatenate (monitor_app_reload_stats)\r\n Load\r\n RowNo() as [Log Entry],\r\n timestamp(now(1)) as [Log Timestamp],\r\n '$(sev)' as [Log Severity],\r\n '$(comp)' as Host,\r\n '$(description)' as Description,\r\n '$(message)' as [Log Message],\r\n '$(obj)' as Object\r\n AutoGenerate (1);\r\n \r\nENDSUB\r\n \r\nSUB monitor_app_reload_stats (sev, obj, message, description)\r\n\r\n TRACE Working on Monitor App Reload Stats;\r\n\r\n IF description = 'Reload Start' THEN\r\n \t// Check for existing base status file\r\n\tIF FileSize('$(monitorAppStatsFile)') > 0 THEN\r\n monitor_app_reload_stats:\r\n Load * From '$(monitorAppStatsFile)' (txt, utf8, embedded labels, delimiter is '\\t', msq);\r\n ELSE\r\n Trace Did not find $(monitorAppStatsFile) - will create a new file.;\r\n monitor_app_reload_stats:\r\n Load * Inline [Log Entry, Log Timestamp, Log Severity,Host,Description,Log Message,Object];\r\n ENDIF\r\n\r\n Let appMonitorStatsRowsInit = NoOfRows('monitor_app_reload_stats');\r\n CALL monitor_app_stats_incremental\t\t// Add start message\r\n \r\n ELSEIF description = 'Status Message' THEN \r\n CALL monitor_app_stats_incremental\t\t// Add status message\r\n \r\n ELSEIF description = 'Reload Finish' THEN\r\n \tCALL monitor_app_stats_incremental\t\t// Add Finish message\r\n STORE monitor_app_reload_stats into '$(monitorAppStatsFile)' (txt, delimiter is '\\t');\r\n DROP TABLE monitor_app_reload_stats; \r\n TRACE $(message);\r\n \r\n ELSE\r\n \ttrace Something went wrong with the monitor app reload status messaging.;\r\n \r\n ENDIF\r\n\r\nENDSUB\r\n\r\n///$tab QRS\r\nSUB QRS\r\n\tTRACE Fetching data from Qlik Sense Repository (QRS) database;\r\n // If the connection fails (missing REST connector, can't connect to QRS) - the load script will fail :( \r\n //\tAlso, if no data is returned from the QRS, the load script will terminate as well because there is something wrong to be investigated :(\r\n LET NumRowsQRS = 0;\r\n SET QRS_RowCounts = 'QRS Row Counts: ';\r\n \r\n For each endpoint in 'monitor_apps_REST_user','monitor_apps_REST_app','monitor_apps_REST_appobject','monitor_apps_REST_task'\r\n \tCALL $(endpoint)\r\n DisConnect;\r\n\t\tLET rose\t\t\t= evaluate(NumRows_$(endpoint));\r\n LET rose\t\t\t= if(isnull(rose),0,rose);\r\n LET NumRowsQRS\t\t= $(NumRowsQRS) + $(rose);\r\n LET QRS_RowCounts \t= '$(QRS_RowCounts) $(endpoint) = $(rose) lines,';\r\n Next endpoint\r\n\r\n\tIf NumRowsQRS > 0 Then\r\n \tCALL monitor_app_reload_stats('INFO','Operations Monitor', '$(QRS_RowCounts)','Status Message')\r\n TRACE Reload Status: $(QRS_RowCounts);\r\n ELSE\t// No data fetched from QRS! This throws an error message, but will not fail the reload\r\n \t\tLET msg_qrs = 'There was a problem fetching data from QRS via the REST connector. We could connect, but failed to fetch data. $(QRS_RowCounts)';\r\n \t\tCALL monitor_app_reload_stats('ERROR','Operations Monitor', msg_qrs,'Status Message')\r\n // This msg_qrs message will be reported on the Log Details page\r\n ENDIF\r\n\r\nENDSUB\r\n///$tab qrs_user\r\nSUB monitor_apps_REST_user\r\n\r\n LIB CONNECT TO 'monitor_apps_REST_user';\r\n\r\n RestConnectorMasterTable:\r\n SQL SELECT \r\n \"id\" AS \"id_u2\",\r\n \"createdDate\" AS \"createdDate_u1\",\r\n \"modifiedDate\" AS \"modifiedDate_u1\",\r\n \"modifiedByUserName\" AS \"modifiedByUserName_u1\",\r\n \"userId\",\r\n \"userDirectory\",\r\n \"name\" AS \"name_u0\",\r\n \"inactive\",\r\n \"removedExternally\",\r\n \"blacklisted\",\r\n \"__KEY_root\",\r\n (SELECT \r\n \"@Value\" AS \"@Value_u0\",\r\n \"__FK_roles\"\r\n FROM \"roles\" FK \"__FK_roles\" ArrayValueAlias \"@Value_u0\"),\r\n (SELECT \r\n \"id\" AS \"id_u1\",\r\n \"createdDate\" AS \"createdDate_u0\",\r\n \"modifiedDate\" AS \"modifiedDate_u0\",\r\n \"modifiedByUserName\" AS \"modifiedByUserName_u0\",\r\n \"attributeType\",\r\n \"attributeValue\",\r\n \"externalId\",\r\n \"__FK_attributes\"\r\n FROM \"attributes\" FK \"__FK_attributes\")\r\n FROM JSON (wrap on) \"root\" PK \"__KEY_root\";\r\n \r\n LET NumRows_monitor_apps_REST_user = NoOfRows('RestConnectorMasterTable'); \r\n \r\n User:\r\n LOAD\r\n [createdDate_u1] AS [User Created],\r\n [modifiedDate_u1] AS [User Modified],\r\n [modifiedByUserName_u1] AS [User Modified By],\r\n userDirectory & '\\' & userId AS UserId,\r\n userDirectory as [User Directory],\r\n [name_u0] AS [User Name],\t\r\n [inactive] AS [User Inactive],\r\n [removedExternally] AS [User Removed Externally],\r\n [blacklisted] AS [User Blacklisted],\r\n [__KEY_root] AS _userKey\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__KEY_root]);\r\n\r\n userRoles:\r\n LOAD\t[@Value_u0] AS [User Role],\r\n [__FK_roles] AS _userKey\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__FK_roles])\r\n and exists(_userKey,__FK_roles);\r\n \r\n userAttributes:\r\n LOAD\r\n [__FK_attributes] AS _userKey,\r\n [attributeType] AS [User Attribute Type],\r\n [attributeValue] AS [User Attribute Value] \r\n \r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__FK_attributes])\r\n and exists(_userKey,__FK_attributes);\r\n \r\n // Expand Custom Properties and User Attributes -- TBD if kept long term\r\n If NoOfRows('userAttributes')>1 Then\r\n \tQualify *;\r\n Unqualify _userKey; \r\n \tUserAttributes:\r\n \tGeneric Load * resident userAttributes;\r\n Unqualify *;\r\n Drop Table userAttributes;\r\n Endif\r\n \r\n DROP TABLE RestConnectorMasterTable;\r\n\r\nENDSUB\r\n\r\n///$tab qrs_app\r\nSUB monitor_apps_REST_app\r\n \r\n LIB CONNECT TO 'monitor_apps_REST_app';\r\n \r\n RestConnectorMasterTable:\r\n SQL SELECT \r\n \"id\" AS \"id_u4\",\r\n \"createdDate\" AS \"createdDate_u0\",\r\n \"modifiedDate\" AS \"modifiedDate_u0\",\r\n \"modifiedByUserName\" AS \"modifiedByUserName_u0\",\r\n \"name\" AS \"name_u3\",\r\n \"publishTime\",\r\n \"published\",\r\n \"description\",\r\n \"fileSize\",\r\n \"lastReloadTime\",\r\n \"availabilityStatus\",\r\n \"__KEY_root\",\r\n (SELECT \r\n \"userId\",\r\n \"userDirectory\",\r\n \"__FK_owner\"\r\n FROM \"owner\" FK \"__FK_owner\"),\r\n (SELECT \r\n \"name\" AS \"name_u2\",\r\n \"__FK_stream\"\r\n FROM \"stream\" FK \"__FK_stream\")\r\n FROM JSON (wrap on) \"root\" PK \"__KEY_root\";\r\n \r\n LET NumRows_monitor_apps_REST_app = NoOfRows('RestConnectorMasterTable');\r\n \r\n map_stream:\r\n Mapping LOAD\t\r\n [__FK_stream] AS [__KEY_root],\r\n [name_u2] AS Stream\t\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__FK_stream]);\r\n \r\n App_Stream:\r\n LOAD\r\n [id_u4] AS ObjectId,\r\n ApplyMap('map_stream',__KEY_root,'Unpublished') as [App Stream]\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__KEY_root]);\r\n \r\n map_app_owner:\r\n Mapping LOAD\r\n [__FK_owner] AS [__KEY_root],\r\n [userDirectory] & '\\' & [userId] as AppOwner\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__FK_owner]);\r\n \r\n App:\r\n LOAD\t\r\n [id_u4] \t\t\t\t\tAS ObjectId,\r\n [id_u4]\t\t\t\t\tAS AppId,\r\n date(alt(\r\n date#(left(createdDate_u0,10),'YYYY-MM-DD'),\r\n date#(left(createdDate_u0,10),'YYYY/MM/DD'),\r\n date#(left(createdDate_u0,10),'MM-DD-YYYY'),\r\n date#(left(createdDate_u0,10),'MM/DD/YYYY'),\r\n date#(left(createdDate_u0,10),'YYYY.MM.DD'),\r\n 'No valid date')\r\n \t) as [App Created Date],\r\n date(alt(\r\n date#(left(modifiedDate_u0,10),'YYYY-MM-DD'),\r\n date#(left(modifiedDate_u0,10),'YYYY/MM/DD'),\r\n date#(left(modifiedDate_u0,10),'MM-DD-YYYY'),\r\n date#(left(modifiedDate_u0,10),'MM/DD/YYYY'),\r\n date#(left(modifiedDate_u0,10),'YYYY.MM.DD'),\r\n 'No valid date')\r\n \t) as [App Modified Date],\r\n [modifiedByUserName_u0] \tAS [App Modified By],\r\n [name_u3] \t\t\t\tAS [App Name QRS],\r\n if(left(publishTime,4)='1753','Never',timestamp(publishTime)) AS [App Publish Time],\r\n [published] \t\t\t\tAS [App Published],\r\n [description] \t\t\tAS [App Description],\r\n floor([fileSize]/1024)\tAS [App File Size],\t\t// In Kb\r\n timestamp([lastReloadTime]) AS [App Last Reload Time],\r\n [availabilityStatus] \t\tAS [App Availability Status],\r\n ApplyMap('map_app_owner',__KEY_root,'Unknown Owner') AS [App Owner]\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__KEY_root]);\r\n \r\n DROP TABLE RestConnectorMasterTable;\r\n \r\n // Unify the App Name from the Logs and from QRS (To present the most up-to-date App Name while preserving App Name history)\r\n tempAppName:\r\n NoConcatenate \r\n Load\r\n \tDistinct ObjectId,\r\n ObjectId as AppIdQRS,\r\n [App Name QRS]\r\n Resident App;\r\n \r\n Outer Join (tempAppName)\r\n Load\r\n \tDistinct ObjectId,\r\n ObjectId as AppIdHistorical,\r\n [App Name] as [App Name Historical]\r\n Resident LogContent\r\n Where len([App Name])>0;\r\n \r\n AppName:\r\n NoConcatenate\r\n Load\r\n \tObjectId,\r\n if(isnull([App Name QRS]),[App Name Historical],[App Name QRS]) as [App Name],\r\n if(isnull(AppIdQRS) and index(ObjectId,'|')=0,AppIdHistorical,AppIdQRS) as AppId,\r\n if(index(ObjectId,'|')>0,null(),isnull([App Name QRS])*-1) as [AppId Removed from QRS],\r\n [App Name Historical]\r\n Resident tempAppName;\r\n \r\n Drop Table tempAppName;\r\n Drop Field [App Name] from LogContent;\r\n Drop Field [App Name QRS] from App;\r\n Drop Field AppId from App;\r\n // End unify App Name\r\n\r\n // Add \"App Stream\" to removed apps so session entries appear in Session Details, which hides null App Streams (Qlik-74956)\r\n Concatenate (App_Stream)\r\n Load\r\n \tObjectId,\r\n 'Deleted App' as [App Stream]\r\n Resident AppName\r\n Where [AppId Removed from QRS]=1;\r\n \r\nENDSUB\r\n///$tab qrs_appobject\r\nSUB monitor_apps_REST_appobject\r\n\r\n LIB CONNECT TO 'monitor_apps_REST_appobject';\r\n \r\n RestConnectorMasterTable:\r\n SQL SELECT \r\n \"id\" AS \"id_u2\",\r\n \"createdDate\",\r\n \"modifiedDate\",\r\n \"modifiedByUserName\",\r\n \"description\",\r\n \"objectType\",\r\n \"publishTime\" AS \"publishTime_u0\",\r\n \"published\" AS \"published_u0\",\r\n \"approved\",\r\n \"name\" AS \"name_u2\",\r\n \"__KEY_root\",\r\n (SELECT \r\n \"userId\",\r\n \"userDirectory\",\r\n \"__FK_owner\"\r\n FROM \"owner\" FK \"__FK_owner\"),\r\n // \t(SELECT \r\n // \t\t\"@Value\",\r\n // \t\t\"__FK_tags\"\r\n // \tFROM \"tags\" FK \"__FK_tags\" ArrayValueAlias \"@Value\"),\r\n (SELECT \r\n \"id\" AS \"id_u1\",\r\n \"__KEY_app\",\r\n \"__FK_app\"\r\n FROM \"app\" PK \"__KEY_app\" FK \"__FK_app\")\r\n FROM JSON (wrap on) \"root\" PK \"__KEY_root\";\r\n \r\n LET NumRows_monitor_apps_REST_appobject = NoOfRows('RestConnectorMasterTable');\r\n \r\n owner_map:\r\n Mapping LOAD\r\n \t[__FK_owner] AS [__KEY_root],\r\n [userDirectory] & '\\' & userId AS uid \r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__FK_owner]);\r\n \r\n // [tags]:\r\n // LOAD\t[@Value] AS [@Value],\r\n // \t[__FK_tags] AS [__KEY_root]\r\n // RESIDENT RestConnectorMasterTable\r\n // WHERE NOT IsNull([__FK_tags]);\r\n \r\n app_map:\r\n mapping LOAD \r\n \t[__FK_app],\r\n \t[id_u1] AS OjbectId\r\n// [__KEY_app] AS [__KEY_app],\r\n// [__FK_app] AS [__KEY_root]\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__FK_app]);\r\n \r\n AppObject:\r\n LOAD\t\r\n [id_u2] \t\tAS AppObjectId,\r\n date(alt(\r\n date#(left(createdDate,10),'YYYY-MM-DD'),\r\n date#(left(createdDate,10),'YYYY/MM/DD'),\r\n date#(left(createdDate,10),'MM-DD-YYYY'),\r\n date#(left(createdDate,10),'MM/DD/YYYY'),\r\n date#(left(createdDate,10),'YYYY.MM.DD'),\r\n 'No valid date')\r\n \t) as [App Object Created Date], \r\n \tdate(alt(\r\n date#(left(modifiedDate,10),'YYYY-MM-DD'),\r\n date#(left(modifiedDate,10),'YYYY/MM/DD'),\r\n date#(left(modifiedDate,10),'MM-DD-YYYY'),\r\n date#(left(modifiedDate,10),'MM/DD/YYYY'),\r\n date#(left(modifiedDate,10),'YYYY.MM.DD'),\r\n 'No valid date')\r\n ) as [App Object Modified Date],\r\n [modifiedByUserName] AS [App Object Modified By],\r\n [description] \tAS [App Object Description],\r\n [objectType] \t\tAS [App Object Type],\r\n if(left(publishTime_u0,4)='1753','Never',timestamp([publishTime_u0])) \tAS [App Object Publish Time],\r\n If(lower([published_u0])='true',dual('Published',1),dual('Unpublished',0)) \tAS [App Object Published],\r\n If(lower([approved])='true',dual('Approved',1),dual('Not Approved',0)) \t\tAS [App Object Approved],\r\n [name_u2] \t\tAS [App Object Name],\r\n// [__KEY_root] \t\tAS [__KEY_root],\t\t// Will only need __KEY_root with tags or custom properties\r\n ApplyMap('owner_map',__KEY_root,'Missing App Object Owner') \tAS [App Object Owner],\r\n ApplyMap('app_map',__KEY_root,'Missing App') \t\t\t\t\tAS ObjectId\t// This is AppId to link to the App\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__KEY_root]);\r\n \r\n \r\n DROP TABLE RestConnectorMasterTable;\r\n\r\nENDSUB\r\n///$tab qrs_task\r\nSUB monitor_apps_REST_task\r\n \r\n LIB CONNECT TO 'monitor_apps_REST_task';\r\n \r\n RestConnectorMasterTable:\r\n SQL SELECT \r\n \"id\" as \"TaskId\",\r\n \"createdDate\",\r\n \"modifiedDate\",\r\n \"modifiedByUserName\",\r\n \"isManuallyTriggered\",\r\n \"name\" AS \"name_u2\",\r\n \"taskType\",\r\n \"enabled\",\r\n \"maxRetries\",\r\n \"__KEY_root\",\r\n (SELECT \r\n \"name\" AS \"name_u0\",\r\n \"__FK_app\"\r\n FROM \"app\" PK \"__KEY_app\" FK \"__FK_app\"),\r\n (SELECT \r\n \"nextExecution\",\r\n \"__FK_operational\"\r\n FROM \"operational\" PK \"__KEY_operational\" FK \"__FK_operational\"),\r\n (SELECT \r\n \"name\" AS \"name_u1\",\r\n \"type\",\r\n \"__FK_userDirectory\"\r\n FROM \"userDirectory\" FK \"__FK_userDirectory\")\r\n FROM JSON (wrap on) \"root\" PK \"__KEY_root\";\r\n \r\n LET NumRows_monitor_apps_REST_task = NoOfRows('RestConnectorMasterTable');\r\n \r\n map_task_app:\r\n MAPPING LOAD\r\n [__FK_app],\r\n [name_u0]\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__FK_app]);\r\n \r\n \r\n map_operational:\r\n MAPPING LOAD\r\n __FK_operational,\r\n If(year(nextExecutionClean)>2013,nextExecutionClean,'') as nextExecutionFinal\r\n ;\r\n LOAD\r\n [__FK_operational],\r\n timestamp(alt(\r\n timestamp(nextExecution),\r\n timestamp#((nextExecution),'YYYY-MM-DD hh:mm:ss'),\r\n timestamp#((nextExecution),'YYYY/MM/DD hh:mm:ss'),\r\n timestamp#((nextExecution),'MM-DD-YYYY hh:mm:ss'),\r\n timestamp#((nextExecution),'MM/DD/YYYY hh:mm:ss'),\r\n timestamp#((nextExecution),'YYYYMMDDhhmmss'),\r\n 'No valid timestamp')\r\n ) AS nextExecutionClean\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__FK_operational]);\r\n \r\n UDC:\r\n LOAD\t\r\n [name_u1] AS [User Directory Connector Name],\r\n subfield(type,'.',4) as [User Directory Connectory Type],\r\n [__FK_userDirectory] AS _task\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__FK_userDirectory]);\r\n \r\n Task:\r\n LOAD\t\r\n TaskId,\t\t// Links with TaskId in LogContent, from Scheduler Audit Activity log\r\n date(alt(\r\n date#(left(createdDate,10),'YYYY-MM-DD'),\r\n date#(left(createdDate,10),'YYYY/MM/DD'),\r\n date#(left(createdDate,10),'MM-DD-YYYY'),\r\n date#(left(createdDate,10),'MM/DD/YYYY'),\r\n date#(left(createdDate,10),'YYYY.MM.DD'),\r\n 'No valid date')\r\n ) AS [Task Created],\r\n date(alt(\r\n date#(left(modifiedDate,10),'YYYY-MM-DD'),\r\n date#(left(modifiedDate,10),'YYYY/MM/DD'),\r\n date#(left(modifiedDate,10),'MM-DD-YYYY'),\r\n date#(left(modifiedDate,10),'MM/DD/YYYY'),\r\n date#(left(modifiedDate,10),'YYYY.MM.DD'),\r\n 'No valid date')\r\n ) AS [Task Modified],\r\n [modifiedByUserName] AS [Task Modified By],\r\n [isManuallyTriggered] AS [Task Manually Triggered],\r\n [name_u2] AS [Task Name QRS],\r\n If([taskType]=2,'User syncronization','Reload') AS [Task Type],\r\n if([enabled]='True','Enabled','Disabled') AS [Task Enabled],\r\n [maxRetries] AS [Task Max Retries],\r\n ApplyMap('map_operational',__KEY_root) as [Task Next Execution],\r\n If([taskType]=2,null(),ApplyMap('map_task_app',__KEY_root)) as [Task App Name],\r\n [__KEY_root] AS _task\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__KEY_root]);\r\n \r\n DROP TABLE RestConnectorMasterTable;\r\n\r\n //// Unify the Task Name from the Logs and from QRS (To present the most up-to-date Task Name while preserving App Name history)\r\n IF $(countOfReloads) = 0 then\t// No task reload\r\n \tTRACE No task reloads found.;\r\n Rename Field [Task Name QRS] to [Task Name];\r\n\r\n\t// Create mapping load for TaskId and Task Name\r\n map_taskName2:\r\n Mapping Load\r\n TaskId,\r\n [Task Name]\r\n Resident Task;\r\n \r\n ELSE\t\t// We have task reload history\r\n tempTaskName:\r\n NoConcatenate \r\n Load\r\n Distinct TaskId,\r\n [Task Name QRS]\r\n Resident Task;\r\n \r\n Outer Join (tempTaskName)\r\n Load\r\n Distinct TaskId,\r\n If([Reload from Hub]=1,'Reload from Hub of ' & ObjectName,Subfield(ObjectName,'|',1)) as [Task Name Historical]\r\n \tRESIDENT LogContent\r\n \tWHERE TaskExecution = 1;\r\n \r\n TaskName:\r\n NoConcatenate\r\n Load\r\n TaskId,\r\n if(isnull([Task Name QRS]),[Task Name Historical],[Task Name QRS]) as [Task Name],\r\n [Task Name Historical]\r\n Resident tempTaskName;\r\n \r\n Drop Table tempTaskName;\r\n Drop Field [Task Name] from ReloadSummary;\t\t// to avoid circular reference\r\n Drop Field [Task Name QRS] from Task;\r\n Drop field TaskExecution from LogContent;\t\t// to avoid synthetic key\r\n \r\n // Create Mapping load with TaskId and TaskName from Tasks endpoint\r\n map_taskName2:\r\n Mapping Load\r\n TaskId,\r\n [Task Name]\r\n Resident TaskName;\r\n \r\n ENDIF\t// check countOfReloads\r\n //// End unify App Name\r\n DisConnect;\r\n // Add Task Trigger & Dependency details\r\n LIB CONNECT TO 'monitor_apps_REST_event';\r\n \r\n RestConnectorMasterTable:\r\n SQL SELECT \r\n \"createdDate\" AS \"createdDate_u1\",\r\n \"modifiedDate\" AS \"modifiedDate_u1\",\r\n \"modifiedByUserName\" AS \"modifiedByUserName_u1\",\r\n \"name\" AS \"name_u2\",\r\n \"enabled\" AS \"enabled_u2\",\r\n \"eventType\",\r\n \"startDate\",\r\n \"expirationDate\",\r\n \"incrementDescription\",\r\n \"incrementOption\",\r\n \"__KEY_root\",\r\n \r\n (SELECT \r\n\t\t\t\"id\" AS \"id_u5\",\t// For compound triggers\r\n \"__KEY_compositeRules\",\r\n \"__FK_compositeRules\",\r\n (SELECT \r\n \"id\" AS \"id_u3\",\r\n \"__FK_reloadTask\",\r\n \"__KEY_reloadTask\"\r\n FROM \"reloadTask\" PK \"__KEY_reloadTask\" FK \"__FK_reloadTask\")\r\n FROM \"compositeRules\" PK \"__KEY_compositeRules\" FK \"__FK_compositeRules\"),\r\n\r\n (SELECT \r\n \"id\" AS \"id_u10\",\r\n \"__FK_reloadTask_u0\",\r\n \"__KEY_reloadTask_u0\"\r\n FROM \"reloadTask\" PK \"__KEY_reloadTask_u0\" FK \"__FK_reloadTask_u0\"),\r\n (SELECT \r\n \"id\" AS \"id_u14\",\r\n \"__FK_userSyncTask\",\r\n \"__KEY_userSyncTask\"\r\n FROM \"userSyncTask\" PK \"__KEY_userSyncTask\" FK \"__FK_userSyncTask\")\r\n FROM JSON (wrap on) \"root\" PK \"__KEY_root\";\r\n \r\n // To get Task Name (of task which has an UPSTREAM dependency in a trigger), link __FK_compositeRule with __KEY_reloadTask_u0 to return id_u10 as TaskId\r\n map_reloadTask:\r\n Mapping LOAD\r\n __KEY_reloadTask_u0 AS _reloadTask,\r\n id_u10 AS taskIdReloadTask\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__KEY_reloadTask_u0]);\r\n\r\n // To get Task Name of the actual upstream dependency (aka \"preceding task\"), link __KEY_compositeRule with __FK_reloadTask to return id_u3 as TaskId\r\n map_precedingTask:\r\n Mapping LOAD\r\n __FK_reloadTask AS _precedingTask,\r\n id_u3 AS taskIdPrecedingTask\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__FK_reloadTask]);\r\n \r\n // Create composite task dependency for Task Triggers involving more than one task (e.g. Task2 reloads after Task0 and Task1 - which are independent from each other)\r\n composite1:\r\n LOAD\r\n \t__KEY_root,\r\n taskIdPrecedingTask,\r\n ApplyMap('map_taskName2',taskIdPrecedingTask,null()) as compRulePrecedingTaskName\r\n ;\r\n LOAD\r\n \t[__FK_compositeRules] AS [__KEY_root],\r\n ApplyMap('map_precedingTask',__KEY_compositeRules,null()) AS taskIdPrecedingTask\r\n RESIDENT RestConnectorMasterTable \r\n WHERE NOT IsNull([__FK_compositeRules]);\r\n \r\n taskId1:\r\n LOAD [__FK_reloadTask_u0] AS [__KEY_root],\r\n [id_u10] AS taskId\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__FK_reloadTask_u0]);\r\n \r\n Concatenate (taskId1)\r\n LOAD\r\n [__FK_userSyncTask] AS [__KEY_root],\r\n [id_u14] AS taskId\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__FK_userSyncTask]);\r\n \r\n map_taskId:\r\n mapping Load \r\n\t__KEY_root,\r\n taskId\r\n resident taskId1;\r\n Drop Table taskId1;\r\n\r\n\r\n TaskTrigger:\r\n LOAD \r\n *,\r\n TaskId as tempId,\t// For tasks w/ no trigger, as explained below\r\n ApplyMap('map_taskName2',TaskId,null()) as [TaskTriggerTaskName]\r\n ;\r\n LOAD\r\n \t__KEY_root,\r\n date(alt(\r\n date#(left(modifiedDate_u1,10),'YYYY-MM-DD'),\r\n date#(left(modifiedDate_u1,10),'YYYY/MM/DD'),\r\n date#(left(modifiedDate_u1,10),'MM-DD-YYYY'),\r\n date#(left(modifiedDate_u1,10),'MM/DD/YYYY'),\r\n date#(left(modifiedDate_u1,10),'YYYY.MM.DD'),\r\n 'No valid date')\r\n ) AS [Task Trigger Modified],\r\n [modifiedByUserName_u1] AS [Task Trigger Modified By],\r\n IF(left(expirationDate,4)=9999,'No expiration',\r\n date(alt(\r\n date#(left(expirationDate,10),'YYYY-MM-DD'),\r\n date#(left(expirationDate,10),'YYYY/MM/DD'),\r\n date#(left(expirationDate,10),'MM-DD-YYYY'),\r\n date#(left(expirationDate,10),'MM/DD/YYYY'),\r\n date#(left(expirationDate,10),'YYYY.MM.DD'),\r\n 'No expiration')\r\n )) AS [Task Trigger Expiration Date],\r\n [incrementDescription] AS [Task Trigger Increment Description],\r\n [incrementOption] AS [Task Trigger Increment Option],\r\n [name_u2] AS [Task Trigger Name],\r\n [eventType] AS [Task Trigger Type],\r\n [enabled_u2] AS [Task Trigger Enabled],\r\n ApplyMap('map_taskId',__KEY_root,null()) as TaskId\r\n RESIDENT RestConnectorMasterTable\r\n WHERE NOT IsNull([__KEY_root]);\r\n \r\n DROP TABLE RestConnectorMasterTable;\r\n\r\n JOIN (TaskTrigger)\r\n LOAD \r\n __KEY_root,\r\n \tcompRulePrecedingTaskName as TaskTriggerDependencyTaskName ,\r\n \ttaskIdPrecedingTask as TaskTriggerTaskId\r\n RESIDENT composite1;\r\n DROP TABLE composite1;\r\n\r\n // Concatenate these tasks for those tasks which have no triggers (like Manual Reload of ...)\r\n //\tWithout this logic, any task which has downstream dependencies but which has no trigger will not appear in the Task Dependencies page\r\n Concatenate (TaskTrigger)\t\t\r\n Load\r\n// 0 as TaskTriggerCompositeCount,\r\n \tTaskTriggerTaskId as TaskId,\r\n TaskTriggerDependencyTaskName as TaskTriggerTaskName\r\n RESIDENT TaskTrigger\r\n WHERE not isnull (TaskTriggerTaskId)\r\n \tAND Not Exists (tempId,TaskTriggerTaskId);\r\n\r\n// A last ditch effort to make sure we show all Tasks in the Task Dependency charts\r\n IF $(countOfReloads) = 0 then\r\n Concatenate (TaskTrigger)\t\t\r\n Load\r\n 0 as TaskTriggerCompositeCount,\r\n TaskId,\r\n [Task Name] as TaskTriggerTaskName\r\n RESIDENT Task\r\n WHERE Not Exists (tempId,TaskId); \r\n ELSE\r\n Concatenate (TaskTrigger)\t\t\r\n Load\r\n 0 as TaskTriggerCompositeCount,\r\n TaskId,\r\n [Task Name] as TaskTriggerTaskName\r\n RESIDENT TaskName\r\n WHERE Not Exists (tempId,TaskId); \r\n ENDIF\r\n \r\n DROP FIELD tempId;\r\n\r\n // Build Task Hierarchy\r\n Staging:\r\n Load\r\n DISTINCT TaskId,\r\n TaskTriggerTaskName AS TaskName,\r\n TaskTriggerTaskId AS ParentTaskId,\r\n TaskTriggerDependencyTaskName AS ParentTaskName\r\n Resident TaskTrigger;\r\n \r\n TaskHierarchy:\r\n Hierarchy (TaskId, ParentTaskId, \"Task Hierarchy\", ParentTaskName, \"Task Hierarchy\", TaskPath, ' >> ', TaskDepth)\r\n Load \r\n TaskId,\r\n ParentTaskId,\r\n TaskName as [Task Hierarchy]\r\n RESIDENT Staging;\r\n\r\n TaskTree:\r\n HierarchyBelongsTo (TaskId, ParentTaskId, \"Task Downstream\", TaskTreeID, TaskTreeName)\r\n Load \r\n TaskId,\r\n ParentTaskId,\r\n TaskName as [Task Downstream]\r\n RESIDENT Staging;\r\n \r\n drop table Staging;\r\n \r\n maxTaskDepth:\r\n Load max(TaskDepth) as maxTaskDepth Resident TaskHierarchy;\r\n LET maxTaskDepth = peek('maxTaskDepth');\r\n \r\n // Calculate Median, Avg, Max reload time for task chains\r\n // 1- Reduce TaskTree to distinct values (no duplicates)\r\n TaskTreeDistinct:\r\n NoConcatenate Load\r\n \tTaskId,\r\n TaskTreeID,\r\n MaxString(TaskTreeName) as TaskTreeName,\r\n MaxString([Task Downstream]) as [Task Downstream]\r\n RESIDENT TaskTree\r\n Group By TaskId, TaskTreeID;\r\n Drop Table TaskTree;\r\n Rename Table TaskTreeDistinct to TaskTree;\r\n \r\n // 2- Get reload stats for tasks in task chains for last 28 days (to keep this 'group by' manageable\r\n IF $(countOfReloads)>0 THEN\r\n \r\n Left Join (TaskTree)\t\t// We want this data on the TaskTree table for easy TaskTree analysis\r\n Load\r\n _reloadSummaryTaskId as TaskId,\t// Join on this field\r\n Round(Median([Reload Duration]),0.02) as TaskTreeMedianDuration,\r\n Round(Avg([Reload Duration]),0.02) as TaskTreeAverageDuration,\r\n Round(Max([Reload Duration]),0.02) as TaskTreeMaxDuration\r\n RESIDENT ReloadSummary\r\n WHERE [Reload Finish] >= today(1)-28\r\n Group By _reloadSummaryTaskId;\r\n \r\n Drop Field _reloadSummaryTaskId;\t// Field no longer needed on Reload Summary table\r\n \r\n ENDIF\r\n \r\nENDSUB\r\n\r\n///$tab supportingLogic\r\nSUB supportingLogic\r\n \r\n TRACE Working on supporting logic;\r\n\r\n ////// Colors\r\n set c_red\t\t\t\t\t= 'RGB(204,102,119)';\r\n set c_yellow\t\t\t\t= 'RGB(221,204,119)';\r\n set c_blue\t\t\t\t= 'RGB(68,119,170)';\r\n set c_green\t\t\t\t= 'RGB(17,119,51)';\r\n set c_gray \t\t\t\t= 'RGB(150,150,150)';\r\n set c_lightred \t\t\t= 'RGB(240,209,214)';\r\n set c_lightblue \t\t\t= 'RGB(188,181,201)'; \r\n \r\n Tasks_temp:\r\n Load * Inline \r\n \t[\r\n ReloadStatusTEMP, taskReloadStatusColor, taskReloadStatusSort\r\n Failed,\"$(c_red)\",1\r\n Aborted,\"$(c_yellow)\",2\r\n Success,\"$(c_blue)\",3\r\n *, \"$(c_gray)\",4\r\n \t];\r\n\r\n \r\n [Reload Color Sort]:\r\n Load\r\n \tReloadStatusTEMP as [Reload Status],\r\n taskReloadStatusColor,\r\n taskReloadStatusSort\r\n RESIDENT Tasks_temp\r\n WHERE exists([Reload Status],ReloadStatusTEMP);\r\n \r\n drop table Tasks_temp;\r\n \r\n//// Additional Items\r\n // Dimension table to support Dashboard \"Excel\" chart\r\n dim_dash:\r\n LOAD * INLINE\r\n [\r\n dimNum, dimName\r\n 1, Max Sense Engine CPU\r\n 2, Max Sense Engine RAM\r\n 3, Max Concurrent Users\r\n 4, Max Concurrent Apps\r\n 5, User Sessions\r\n 6, Reloads\r\n 7, Reload Failures\r\n 8, Avg Reload Duration\r\n 9, Errors & Warnings\r\n ];\r\n \r\n // Limit Smart search to relevant fields\r\n Search Include *;\r\n Search Exclude [*Id],[dim*],[_*],[folder*],[*ort];\r\n Search Include UserId;\r\n\r\nENDSUB\r\n///$tab finalize\r\nSUB finalize\r\n\r\n TRACE Finalizing things...;\r\n\r\n If firstReload = 1 THEN\r\n SET PriorReloadDuration = 0;\t\t// Initialize ReloadDuration for first reload\r\n ELSE\r\n \tLet PriorReloadDuration = ReloadDuration; \r\n END IF\r\n \r\n //// Set Reload Stats Variables\t//// \r\n Let ReloadDuration = interval(now(1)-ReloadStartTime,'hh:mm:ss');\r\n \r\n IF storeBaseTableFail = 0 then\r\n Let LastSuccessfulReloadStartTime = ReloadStartTime;\r\n ELSE\r\n Let LastSuccessfulReloadStartTime = LastReloadTime;\t// reset this to prior reload time\r\n END IF\r\n\r\n// Monitor reload statistics\r\n Let ttlRows \t\t= num(NoOfRows('LogContent'),'#,##0');\r\n let hst\t\t\t= lower(ComputerName());\r\n let ahora\t\t\t= now(1);\r\n \r\n // Check to see if there were any reload errors associated with this app; report them on the Log Details page\r\n let reloadWarn\t= NoOfRows('monitor_app_reload_stats')-$(appMonitorStatsRowsInit)-1;\t// There will already be an 'reload start' entry in this table\r\n let reloadWarnMsg\t= if(reloadWarn>1,' Reloaded with ' & reloadWarn & ' warning(s). Consult the Operations_Monitor_Reload_Stats.txt log for details.','');\r\n LET reloadWarnMsg\t= reloadWarnMsg & if(NumRowsQRS>0,'',msg_qrs);\t// Add error message if failure to fetch data from qrs\r\n Let msg\t\t\t= 'Reloaded at $(ahora) on $(hst) for $(ReloadDuration) with $(ttlRows) log entries from $(logSource).$(reloadWarnMsg)';\r\n \r\n // Write final reload message and store App Reload Stats\r\n CALL monitor_app_reload_stats('INFO','Operations Monitor',msg,'Reload Finish')\r\n\r\n \r\nENDSUB\r\n\r\n///$tab run_logic\r\n//// Reload Logic ////\r\n\r\nCALL monitor_app_reload_stats('INFO','Operations Monitor', startMsg,'Reload Start')\r\n\r\nCALL verify_database\r\n\r\nREM Load the historical (incremental) QVD if it exists;\r\nCALL load_base_table ('LogContent', '$(baseTableName)','LogTimeStamp')\r\n\r\nREM initialize working tables;\r\nworking:\r\nLoad * inline [ProxyPackageId,RequestSequenceId]; \r\n\r\nCALL mappingLoads\r\n\r\nREM The log source (file or database) determines how the log data are loaded, which is defined next;\r\nIF db_v_file = 1 THEN // File logs as source\r\n CALL logList\r\n CALL defineFields\r\n CALL multiNodeConfig\r\n CALL logFolderList\r\n // This loops through the Sense\\Log folder on the central node + each [hostname] folder in the Sense\\Repository\\Archived Logs folder\r\n for i = 0 to noofrows('logFolderList')-1\r\n // Loop through each logfile enumerated in the logList SUB\r\n FOR j = 0 to noofrows('logList')-1 \r\n CALL loadFiles (i,j)\r\n next j\r\n next i\r\n SET logSource = 'Log Files';\r\n SET LastReloadSource = 1;\r\n \r\nELSEIF db_v_file = 2 THEN // Database log as source\r\n CALL load_database_logs\r\n SET logSource = 'Log Database';\r\n SET LastReloadSource = 2;\r\n\r\nELSE\r\n TRACE There was a problem determining which source to use (file or database). Contact Qlik support.; // This should not happen, but just in case.\r\n\r\nENDIF\r\n\r\nLet rowsWorkingFinal = num(NoOfRows('working'),'#,##0');\r\ntrace $(rowsWorkingFinal) incremental rows loaded;\r\n\r\nCALL calendarization\t// Create calendar incrementally; perform before concat & store of LogContent\r\nCALL concat_tables ('LogContent', 'working','Id')\r\nCALL store_files ('LogContent', '$(baseTableName)')\r\n\r\nCALL serviceLog\t\t\t\t// \"Service\" for database derived from Logger -- subfield(Logger,'.',2) - the second part; System.Engine.Engine\r\nREM Summary tables created for reloads, sessions, and exporting;\r\nCALL reloadSummary\r\nCALL sessionSummary\r\nCALL exportingSummary\t\t// TODO - No printing logs in database\r\n\r\nCALL calendarization_add\t// To account for any \"new\" date_time_links generated in the Summary loads above; In calendarization section\r\n\r\nCALL QRS\t\t// Call QRS data AFTER LogContent table is stored\r\n\r\nIF db_v_file = 1 THEN\t// These tables only exist with file log mode\r\n DROP TABLES logList, logFolderList;\r\nENDIF\r\n\r\nCALL supportingLogic\r\nCALL finalize\r\n\r\n///$tab Reference\r\n/* REFERENCE information\r\n\r\n/// Notes about specific fields:\r\n\r\nTask Duration\t \tGiven in minutes, rounded to 0.02 minutes (~1 second). For Average duration using the \"interval\" format (given in seconds),\r\n\t\t\t\t\t\tuse [Task Duration]/24/60 = [Task Duration]/1440\r\nVM (Server ram) \tGiven in GB (i.e. takes recorded value of MB / 1024).\r\nCPU Load\t\t\tGiven in fraction of 1 -- just format to % (no need to divide by 100)\r\nSession Duration\tRounded to the minute; If you want a duration, divide by 1440 (24*60)\r\n\r\nASCII http://www.ascii-code.com/\r\nchr(39)\t'\r\nchr(32) [space]\r\nchr(44) ,\r\nchr(34) \"\r\nchr(45) -\r\nchr(46) .\r\nchr(47) /\r\nchr(42) *\r\nchr(124) |\r\nchr(92) \\\r\n\r\nReload Result Codes\r\n0 - Ok\r\n5 - Task not found\r\n10 - License not found\r\n15 - Scheduler is not master\r\n20 - Already active session found\r\n25 - Task disabled\r\n30 - TaskExecutionSession already exists for App [App.Name]\r\n35 - App is not Enabled (ie. not migrated properly)\r\n40 - No slave-nodes found\r\n45 - Failed to create task execution session\r\n50 - Unexpected exception (message will be written in the Message column)\r\n55 - Unauthorized state change\r\n60 - Task failed when wrapping up fail-scenario\r\n65 - Task finished unsuccessful\r\n70 - Failed to delete task execution session\r\n75 - App not found\r\n80 - No retry (as in not allowed or not possible)\r\n85 - Suppressed state change\r\n90 - No task execution session found\r\n95 - Failed to update task execution session\r\n\r\n\r\n*/"
}