Ajout nouvelle version de parseStats
This commit is contained in:
parent
2719c12ebf
commit
ef2ae4acdb
222
statistiques/parseStats_ng.py
Executable file
222
statistiques/parseStats_ng.py
Executable file
@ -0,0 +1,222 @@
|
||||
#! /usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# Imports
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import datetime
|
||||
from calendar import monthrange
|
||||
import sqlite3
|
||||
|
||||
# Constantes
|
||||
STAT_DIR = '/srv/visio.chapril.org/statistiques/'
|
||||
|
||||
STATS_TOT_FIELDS = ['total_conferences_created','total_failed_conferences','total_conferences_completed','total_conference_seconds','total_bytes_received','total_bytes_sent','total_participants','conferences','videochannels','endpoints_sending_audio',]
|
||||
STATS_FR_TOT_FIELDS = ['conferences creees total','conferences totalement echouees','conferences terminees total','duree totale conferences','total octets reçus','total octets envoyés','total participants','nombre de conferences','canaux video','clients en audio',]
|
||||
STATS_AVG_FIELDS = ['largest_conference',]
|
||||
STATS_FR_AVG_FIELDS = ['plus grande conference',]
|
||||
dbPath = '/srv/visio.chapril.org/statistiques/stats_sqlite.db'
|
||||
dbName = 'jitsi_stats'
|
||||
|
||||
# Classes
|
||||
class Stats:
|
||||
def __init__(self,year,mois):
|
||||
self.db = SQLite()
|
||||
self.year = year
|
||||
self.mois = mois
|
||||
self.files = os.listdir(STAT_DIR)
|
||||
self.startDate = self.EndDate = None
|
||||
self.consolided = {}
|
||||
self.consolided_datas = {}
|
||||
self.__initBounds()
|
||||
|
||||
def __initBounds(self):
|
||||
self.__setStartDate(f'{self.year}-{self.mois}-01 00:00:00')
|
||||
maxDays = monthrange(self.year,self.mois)[1]
|
||||
self.__setEndDate(f'{self.year}-{self.mois}-{maxDays} 23:59:59')
|
||||
|
||||
|
||||
def __setStartDate(self,thisDate):
|
||||
self.startDate = self.__date2timestamp(thisDate)
|
||||
|
||||
def getStartDate(self):
|
||||
if self.startDate is not None:
|
||||
return self.startDate
|
||||
else:
|
||||
return 'undefiined'
|
||||
|
||||
def getEndDate(self):
|
||||
if self.endDate is not None:
|
||||
return self.endDate
|
||||
else:
|
||||
return 'undefiined'
|
||||
|
||||
def __setEndDate(self,thisDate):
|
||||
self.endDate = self.__date2timestamp(thisDate)
|
||||
|
||||
def __date2timestamp(self,thisDate):
|
||||
timestamp = time.mktime(time.strptime(thisDate, '%Y-%m-%d %H:%M:%S'))
|
||||
return int(timestamp)
|
||||
|
||||
def __conv(self,octets,dataType='b'):
|
||||
|
||||
if dataType == 'b':
|
||||
unit = 'octets'
|
||||
if int(octets) > 1024:
|
||||
octets = int(octets) / 1024
|
||||
unit = 'ko'
|
||||
if int(octets) > 1024:
|
||||
octets = int(octets) / 1024
|
||||
unit = 'Mo'
|
||||
if int(octets) > 1024:
|
||||
octets = int(octets) / 1024
|
||||
unit = 'Go'
|
||||
elif dataType == 't':
|
||||
unit = 's'
|
||||
if int(octets) > 60:
|
||||
unit = 'min'
|
||||
octets = octets / 60
|
||||
if int(octets) > 60:
|
||||
unit = 'h'
|
||||
octets = octets / 60
|
||||
octets = int(octets * 10) / 10
|
||||
return octets,unit
|
||||
|
||||
def parse2(self):
|
||||
res = self.db.dbQuery(f"""SELECT * FROM {dbName} WHERE timestamp > {self.startDate} AND timestamp < {self.endDate}""")
|
||||
consolided = {}
|
||||
for line in res:
|
||||
field = line[2]
|
||||
if field in STATS_TOT_FIELDS:
|
||||
if field in consolided:
|
||||
consolided[field] = consolided[field] + int(line[3])
|
||||
else:
|
||||
consolided[field] = int(line[3])
|
||||
if field in STATS_AVG_FIELDS:
|
||||
if field in consolided:
|
||||
if consolided[field] < int(line[3]):
|
||||
consolided[field] = int(line[3])
|
||||
return consolided
|
||||
|
||||
def parse(self):
|
||||
if len(self.files) <= 0:
|
||||
return None
|
||||
for f in self.files:
|
||||
if '.db' in f:
|
||||
continue
|
||||
ts = int(f.split('.')[0].split('_')[3])
|
||||
if ts >= self.startDate and ts <= self.endDate:
|
||||
with open(f"{STAT_DIR}/{f}") as fh:
|
||||
datas = fh.readlines()
|
||||
for line in datas:
|
||||
if line.split(';')[0].lower() in STATS_TOT_FIELDS:
|
||||
key = line.split(';')[0].lower()
|
||||
value = int(line.split(';')[1])
|
||||
if key in self.consolided_datas:
|
||||
datas = self.consolided_datas[key]
|
||||
if datas[0] <= 0:
|
||||
self.consolided_datas[key][0] = value
|
||||
else:
|
||||
if value > datas[1]:
|
||||
self.consolided_datas[key][1] = value
|
||||
else:
|
||||
self.consolided_datas[key] = [value,0]
|
||||
|
||||
if line.split(';')[0].lower() in STATS_AVG_FIELDS:
|
||||
key = line.split(';')[0].lower()
|
||||
value = int(line.split(';')[1])
|
||||
if key in self.consolided:
|
||||
if self.consolided[key] < int(value):
|
||||
self.consolided[key] = int(value)
|
||||
else:
|
||||
self.consolided[key] = 0
|
||||
|
||||
for (k,v) in self.consolided_datas.items():
|
||||
self.consolided[k] = v[1] - v[0]
|
||||
if 'byte' in k:
|
||||
octets,unit = self.__conv(self.consolided[k])
|
||||
self.consolided[k] = f"{octets} {unit}"
|
||||
elif 'seconds' in k:
|
||||
octets,unit = self.__conv(self.consolided[k],'t')
|
||||
self.consolided[k] = f"{octets} {unit}"
|
||||
return self.consolided
|
||||
|
||||
class SQLite:
|
||||
def __init__(self):
|
||||
if not os.path.isfile(dbPath):
|
||||
self.__initDb()
|
||||
|
||||
def __initDb(self):
|
||||
self.__openDb()
|
||||
self.cursor.execute(''' create table jitsi_stats(
|
||||
id integer primary key autoincrement,
|
||||
timestamp text,
|
||||
key_field text,
|
||||
value_field text
|
||||
)
|
||||
''')
|
||||
self.conn.commit()
|
||||
self.__closeDb()
|
||||
|
||||
def __openDb(self):
|
||||
self.conn = sqlite3.connect(dbPath)
|
||||
self.cursor = self.conn.cursor()
|
||||
|
||||
def __closeDb(self):
|
||||
self.cursor.close()
|
||||
self.conn.close()
|
||||
|
||||
def dbQuery(self,query='SELECT'):
|
||||
self.__openDb()
|
||||
self.cursor.execute(query)
|
||||
rows = self.cursor.fetchall()
|
||||
self.__closeDb()
|
||||
return rows
|
||||
|
||||
def dbInsert(self,ts,k,v):
|
||||
self.__openDb()
|
||||
self.cursor.execute(f"""INSERT INTO jististats (timestamp,key_field,value_field) VALUES ('{ts}','{k}','{v}')""")
|
||||
self.conn.commit()
|
||||
self.__closeDb()
|
||||
|
||||
# Fonctions
|
||||
|
||||
# Principal
|
||||
def runMain():
|
||||
if len(sys.argv) <= 1:
|
||||
print('Argument manquant: mois')
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
mois = int(sys.argv[1])
|
||||
except ValueError:
|
||||
print('Le mois doit etre un nombre compris entre 1 et 12 !')
|
||||
sys.exit(1)
|
||||
|
||||
if mois < 1 or mois > 12:
|
||||
print('Le mois doit etre un nombre compris entre 1 et 12 !')
|
||||
sys.exit(1)
|
||||
|
||||
currentDate = datetime.date.today()
|
||||
if len(sys.argv) >= 3:
|
||||
year = int(sys.argv[2])
|
||||
else:
|
||||
year = currentDate.year
|
||||
|
||||
stats = Stats(year,mois)
|
||||
res = stats.parse2()
|
||||
for (k,v) in res.items():
|
||||
print(f"{k}={v}")
|
||||
|
||||
#res = stats.parse()
|
||||
#for (k,v) in res.items():
|
||||
# if k in STATS_TOT_FIELDS:
|
||||
# chaine = STATS_FR_TOT_FIELDS[STATS_TOT_FIELDS.index(k)]
|
||||
# elif k in STATS_AVG_FIELDS:
|
||||
# chaine = STATS_FR_AVG_FIELDS[STATS_AVG_FIELDS.index(k)]
|
||||
# print(f"{chaine} : {v}")
|
||||
|
||||
if __name__ == '__main__':
|
||||
runMain()
|
||||
# Fin du programme
|
Loading…
Reference in New Issue
Block a user