diff --git a/metrics/pvstats.py b/metrics/pvstats.py index 0088fc51..2a92e078 100644 --- a/metrics/pvstats.py +++ b/metrics/pvstats.py @@ -18,6 +18,7 @@ def __init__(self, id): self.lastSuccessfulJob = "" self.lastFailedJob = "" self.lastAlarmedJob = "" + self.preReceivedAuditEventCount = 0 self.receivedAuditEventCount = 0 self.employedWorkers = 0 self.assignedJobs = 0 @@ -98,7 +99,7 @@ def report(self): lastSuccessfulJob: \033[0;32;40m$lastSuccessfulJob\033[0;0m lastFailedJob: \033[0;33;40m$lastFailedJob\033[0;0m lastAlarmedJob: \033[0;31;40m$lastAlarmedJob\033[0;0m -rcvd_events: \033[0;96;40m$rcvd_events\033[0;0m throughput: \033[0;95;40m$throughput\033[0;0m up_time: $upTime +rcvd_events: \033[0;96;40m$rcvd_events\033[0;0m in_rate: \033[0;96;40m$in_rate\033[0;0m throughput: \033[0;95;40m$throughput\033[0;0m up_time: $upTime build_name: $buildName version: $buildVersion employedWorkers: $employedWorkers assignedJobs: $assignedJobs unassignedJobs: $unassignedJobs """ @@ -126,6 +127,7 @@ def report(self): lastFailedJob=report.lastFailedJob, lastSuccessfulJob=report.lastSuccessfulJob, rcvd_events=report.receivedAuditEventCount, + in_rate=int(report.preReceivedAuditEventCount / upseconds), buildName=report.buildName, buildVersion=report.buildVersion, upTime=upstring, @@ -182,7 +184,7 @@ def parse_isoduration(isostring, as_dict=False): args = parser.parse_args() consumer = kafka3.KafkaConsumer(bootstrap_servers=args.msgbroker, auto_offset_reset='earliest') - consumer.subscribe( ['Protocol_Verifier_Statistics','Protocol_Verifier_InfoWarn'] ) + consumer.subscribe( ['Protocol_Verifier_Statistics','Protocol_Verifier_InfoWarn','Protocol_Verifier_Reception'] ) # initialise a report report = Report(1) @@ -199,6 +201,8 @@ def parse_isoduration(isostring, as_dict=False): report.consume_statistics(s) elif tp.topic == 'Protocol_Verifier_InfoWarn': report.consume_infowarn(s) + elif tp.topic == 'Protocol_Verifier_Reception': + report.preReceivedAuditEventCount += 1 # log periodically t1 = time.monotonic() if ( t1 - t0 ) > 1: