Activation des statistiques nouvelle generation
This commit is contained in:
parent
048fb189b3
commit
9f34f102af
@ -61,11 +61,9 @@ def runMain():
|
|||||||
response = requests.get(api_url,timeout=1)
|
response = requests.get(api_url,timeout=1)
|
||||||
element = datetime.datetime.strptime(response.json()['current_timestamp'],'%Y-%m-%d %H:%M:%S.%f')
|
element = datetime.datetime.strptime(response.json()['current_timestamp'],'%Y-%m-%d %H:%M:%S.%f')
|
||||||
timestamp = int(time.mktime(element.timetuple()))
|
timestamp = int(time.mktime(element.timetuple()))
|
||||||
with open(f'/{statsPath}/jisti_meet_stats_{timestamp}.csv','w') as fh:
|
for (k,v) in response.json().items():
|
||||||
for (k,v) in response.json().items():
|
db.dbInsert(timestamp,k,v)
|
||||||
db.dbInsert(timestamp,k,v)
|
#db.dbQuery()
|
||||||
fh.write(f"{k};{v};{timestamp}\n")
|
|
||||||
db.dbQuery()
|
|
||||||
|
|
||||||
# Principal
|
# Principal
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
@ -61,9 +61,11 @@ def runMain():
|
|||||||
response = requests.get(api_url,timeout=1)
|
response = requests.get(api_url,timeout=1)
|
||||||
element = datetime.datetime.strptime(response.json()['current_timestamp'],'%Y-%m-%d %H:%M:%S.%f')
|
element = datetime.datetime.strptime(response.json()['current_timestamp'],'%Y-%m-%d %H:%M:%S.%f')
|
||||||
timestamp = int(time.mktime(element.timetuple()))
|
timestamp = int(time.mktime(element.timetuple()))
|
||||||
for (k,v) in response.json().items():
|
with open(f'/{statsPath}/jisti_meet_stats_{timestamp}.csv','w') as fh:
|
||||||
db.dbInsert(timestamp,k,v)
|
for (k,v) in response.json().items():
|
||||||
#db.dbQuery()
|
db.dbInsert(timestamp,k,v)
|
||||||
|
fh.write(f"{k};{v};{timestamp}\n")
|
||||||
|
db.dbQuery()
|
||||||
|
|
||||||
# Principal
|
# Principal
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
@ -14,8 +14,8 @@ STAT_DIR = '/srv/visio.chapril.org/statistiques/'
|
|||||||
|
|
||||||
STATS_TOT_FIELDS = ['total_conferences_created','total_failed_conferences','total_conferences_completed','total_conference_seconds','total_bytes_received','total_bytes_sent','total_participants','conferences','videochannels','endpoints_sending_audio',]
|
STATS_TOT_FIELDS = ['total_conferences_created','total_failed_conferences','total_conferences_completed','total_conference_seconds','total_bytes_received','total_bytes_sent','total_participants','conferences','videochannels','endpoints_sending_audio',]
|
||||||
STATS_FR_TOT_FIELDS = ['conferences creees total','conferences totalement echouees','conferences terminees total','duree totale conferences','total octets reçus','total octets envoyés','total participants','nombre de conferences','canaux video','clients en audio',]
|
STATS_FR_TOT_FIELDS = ['conferences creees total','conferences totalement echouees','conferences terminees total','duree totale conferences','total octets reçus','total octets envoyés','total participants','nombre de conferences','canaux video','clients en audio',]
|
||||||
STATS_AVG_FIELDS = ['largest_conference',]
|
STATS_MAX_FIELDS = ['largest_conference',]
|
||||||
STATS_FR_AVG_FIELDS = ['plus grande conference',]
|
STATS_FR_MAX_FIELDS = ['plus grande conference',]
|
||||||
dbPath = '/srv/visio.chapril.org/statistiques/stats_sqlite.db'
|
dbPath = '/srv/visio.chapril.org/statistiques/stats_sqlite.db'
|
||||||
dbName = 'jitsi_stats'
|
dbName = 'jitsi_stats'
|
||||||
|
|
||||||
@ -36,7 +36,6 @@ class Stats:
|
|||||||
maxDays = monthrange(self.year,self.mois)[1]
|
maxDays = monthrange(self.year,self.mois)[1]
|
||||||
self.__setEndDate(f'{self.year}-{self.mois}-{maxDays} 23:59:59')
|
self.__setEndDate(f'{self.year}-{self.mois}-{maxDays} 23:59:59')
|
||||||
|
|
||||||
|
|
||||||
def __setStartDate(self,thisDate):
|
def __setStartDate(self,thisDate):
|
||||||
self.startDate = self.__date2timestamp(thisDate)
|
self.startDate = self.__date2timestamp(thisDate)
|
||||||
|
|
||||||
@ -60,7 +59,6 @@ class Stats:
|
|||||||
return int(timestamp)
|
return int(timestamp)
|
||||||
|
|
||||||
def __conv(self,octets,dataType='b'):
|
def __conv(self,octets,dataType='b'):
|
||||||
|
|
||||||
if dataType == 'b':
|
if dataType == 'b':
|
||||||
unit = 'octets'
|
unit = 'octets'
|
||||||
if int(octets) > 1024:
|
if int(octets) > 1024:
|
||||||
@ -83,64 +81,43 @@ class Stats:
|
|||||||
octets = int(octets * 10) / 10
|
octets = int(octets * 10) / 10
|
||||||
return octets,unit
|
return octets,unit
|
||||||
|
|
||||||
def parse2(self):
|
def parse(self):
|
||||||
res = self.db.dbQuery(f"""SELECT * FROM {dbName} WHERE timestamp > {self.startDate} AND timestamp < {self.endDate}""")
|
res = self.db.dbQuery(f"""SELECT * FROM {dbName} WHERE timestamp > {self.startDate} AND timestamp < {self.endDate} ORDER by id""")
|
||||||
consolided = {}
|
consolided = {}
|
||||||
|
moy_conf_by_day = 0
|
||||||
for line in res:
|
for line in res:
|
||||||
field = line[2]
|
field = line[2]
|
||||||
if field in STATS_TOT_FIELDS:
|
if field in STATS_TOT_FIELDS:
|
||||||
if field in consolided:
|
if field in consolided and 'total' not in field:
|
||||||
consolided[field] = consolided[field] + int(line[3])
|
consolided[field] = consolided[field] + int(line[3])
|
||||||
else:
|
else:
|
||||||
consolided[field] = int(line[3])
|
consolided[field] = int(line[3])
|
||||||
if field in STATS_AVG_FIELDS:
|
if field == 'conferences':
|
||||||
|
moy_conf_by_day += 1
|
||||||
|
|
||||||
|
if field in STATS_MAX_FIELDS:
|
||||||
if field in consolided:
|
if field in consolided:
|
||||||
if consolided[field] < int(line[3]):
|
if consolided[field] < int(line[3]):
|
||||||
consolided[field] = int(line[3])
|
consolided[field] = int(line[3])
|
||||||
|
for (k,v) in consolided.items():
|
||||||
|
if 'bytes' in k:
|
||||||
|
(v,u) = self.__conv(consolided[k])
|
||||||
|
consolided[k] = f"{v} {u}"
|
||||||
|
if 'seconds' in k:
|
||||||
|
(v,u) = self.__conv(consolided[k],dataType='t')
|
||||||
|
consolided.pop(k)
|
||||||
|
n_k = k.replace('_seconds','')
|
||||||
|
consolided[n_k] = f"{v} {u}"
|
||||||
|
|
||||||
|
if moy_conf_by_day > 1:
|
||||||
|
tot = consolided['conferences']
|
||||||
|
moy_conf_by_day = int(moy_conf_by_day / 12 + 0.5)
|
||||||
|
moy = int(tot/moy_conf_by_day + 0.5)
|
||||||
|
consolided.pop('conferences')
|
||||||
|
consolided['average conferences by day'] = moy
|
||||||
|
|
||||||
return consolided
|
return consolided
|
||||||
|
|
||||||
def parse(self):
|
|
||||||
if len(self.files) <= 0:
|
|
||||||
return None
|
|
||||||
for f in self.files:
|
|
||||||
if '.db' in f:
|
|
||||||
continue
|
|
||||||
ts = int(f.split('.')[0].split('_')[3])
|
|
||||||
if ts >= self.startDate and ts <= self.endDate:
|
|
||||||
with open(f"{STAT_DIR}/{f}") as fh:
|
|
||||||
datas = fh.readlines()
|
|
||||||
for line in datas:
|
|
||||||
if line.split(';')[0].lower() in STATS_TOT_FIELDS:
|
|
||||||
key = line.split(';')[0].lower()
|
|
||||||
value = int(line.split(';')[1])
|
|
||||||
if key in self.consolided_datas:
|
|
||||||
datas = self.consolided_datas[key]
|
|
||||||
if datas[0] <= 0:
|
|
||||||
self.consolided_datas[key][0] = value
|
|
||||||
else:
|
|
||||||
if value > datas[1]:
|
|
||||||
self.consolided_datas[key][1] = value
|
|
||||||
else:
|
|
||||||
self.consolided_datas[key] = [value,0]
|
|
||||||
|
|
||||||
if line.split(';')[0].lower() in STATS_AVG_FIELDS:
|
|
||||||
key = line.split(';')[0].lower()
|
|
||||||
value = int(line.split(';')[1])
|
|
||||||
if key in self.consolided:
|
|
||||||
if self.consolided[key] < int(value):
|
|
||||||
self.consolided[key] = int(value)
|
|
||||||
else:
|
|
||||||
self.consolided[key] = 0
|
|
||||||
|
|
||||||
for (k,v) in self.consolided_datas.items():
|
|
||||||
self.consolided[k] = v[1] - v[0]
|
|
||||||
if 'byte' in k:
|
|
||||||
octets,unit = self.__conv(self.consolided[k])
|
|
||||||
self.consolided[k] = f"{octets} {unit}"
|
|
||||||
elif 'seconds' in k:
|
|
||||||
octets,unit = self.__conv(self.consolided[k],'t')
|
|
||||||
self.consolided[k] = f"{octets} {unit}"
|
|
||||||
return self.consolided
|
|
||||||
|
|
||||||
class SQLite:
|
class SQLite:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -149,13 +126,12 @@ class SQLite:
|
|||||||
|
|
||||||
def __initDb(self):
|
def __initDb(self):
|
||||||
self.__openDb()
|
self.__openDb()
|
||||||
self.cursor.execute(''' create table jitsi_stats(
|
self.cursor.execute('''create table jitsi_stats(
|
||||||
id integer primary key autoincrement,
|
id integer primary key autoincrement,
|
||||||
timestamp text,
|
timestamp text,
|
||||||
key_field text,
|
key_field text,
|
||||||
value_field text
|
value_field text
|
||||||
)
|
)''')
|
||||||
''')
|
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
self.__closeDb()
|
self.__closeDb()
|
||||||
|
|
||||||
@ -168,10 +144,13 @@ class SQLite:
|
|||||||
self.conn.close()
|
self.conn.close()
|
||||||
|
|
||||||
def dbQuery(self,query='SELECT'):
|
def dbQuery(self,query='SELECT'):
|
||||||
self.__openDb()
|
try:
|
||||||
self.cursor.execute(query)
|
self.__openDb()
|
||||||
rows = self.cursor.fetchall()
|
self.cursor.execute(query)
|
||||||
self.__closeDb()
|
rows = self.cursor.fetchall()
|
||||||
|
self.__closeDb()
|
||||||
|
except sqlite3.OperationalError:
|
||||||
|
rows = None
|
||||||
return rows
|
return rows
|
||||||
|
|
||||||
def dbInsert(self,ts,k,v):
|
def dbInsert(self,ts,k,v):
|
||||||
@ -205,17 +184,9 @@ def runMain():
|
|||||||
year = currentDate.year
|
year = currentDate.year
|
||||||
|
|
||||||
stats = Stats(year,mois)
|
stats = Stats(year,mois)
|
||||||
#res = stats.parse2()
|
|
||||||
#for (k,v) in res.items():
|
|
||||||
# print(f"{k}={v}")
|
|
||||||
|
|
||||||
res = stats.parse()
|
res = stats.parse()
|
||||||
for (k,v) in res.items():
|
for (k,v) in res.items():
|
||||||
if k in STATS_TOT_FIELDS:
|
print(f"{k}={v}")
|
||||||
chaine = STATS_FR_TOT_FIELDS[STATS_TOT_FIELDS.index(k)]
|
|
||||||
elif k in STATS_AVG_FIELDS:
|
|
||||||
chaine = STATS_FR_AVG_FIELDS[STATS_AVG_FIELDS.index(k)]
|
|
||||||
print(f"{chaine} : {v}")
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
runMain()
|
runMain()
|
||||||
|
@ -14,8 +14,8 @@ STAT_DIR = '/srv/visio.chapril.org/statistiques/'
|
|||||||
|
|
||||||
STATS_TOT_FIELDS = ['total_conferences_created','total_failed_conferences','total_conferences_completed','total_conference_seconds','total_bytes_received','total_bytes_sent','total_participants','conferences','videochannels','endpoints_sending_audio',]
|
STATS_TOT_FIELDS = ['total_conferences_created','total_failed_conferences','total_conferences_completed','total_conference_seconds','total_bytes_received','total_bytes_sent','total_participants','conferences','videochannels','endpoints_sending_audio',]
|
||||||
STATS_FR_TOT_FIELDS = ['conferences creees total','conferences totalement echouees','conferences terminees total','duree totale conferences','total octets reçus','total octets envoyés','total participants','nombre de conferences','canaux video','clients en audio',]
|
STATS_FR_TOT_FIELDS = ['conferences creees total','conferences totalement echouees','conferences terminees total','duree totale conferences','total octets reçus','total octets envoyés','total participants','nombre de conferences','canaux video','clients en audio',]
|
||||||
STATS_MAX_FIELDS = ['largest_conference',]
|
STATS_AVG_FIELDS = ['largest_conference',]
|
||||||
STATS_FR_MAX_FIELDS = ['plus grande conference',]
|
STATS_FR_AVG_FIELDS = ['plus grande conference',]
|
||||||
dbPath = '/srv/visio.chapril.org/statistiques/stats_sqlite.db'
|
dbPath = '/srv/visio.chapril.org/statistiques/stats_sqlite.db'
|
||||||
dbName = 'jitsi_stats'
|
dbName = 'jitsi_stats'
|
||||||
|
|
||||||
@ -36,6 +36,7 @@ class Stats:
|
|||||||
maxDays = monthrange(self.year,self.mois)[1]
|
maxDays = monthrange(self.year,self.mois)[1]
|
||||||
self.__setEndDate(f'{self.year}-{self.mois}-{maxDays} 23:59:59')
|
self.__setEndDate(f'{self.year}-{self.mois}-{maxDays} 23:59:59')
|
||||||
|
|
||||||
|
|
||||||
def __setStartDate(self,thisDate):
|
def __setStartDate(self,thisDate):
|
||||||
self.startDate = self.__date2timestamp(thisDate)
|
self.startDate = self.__date2timestamp(thisDate)
|
||||||
|
|
||||||
@ -59,6 +60,7 @@ class Stats:
|
|||||||
return int(timestamp)
|
return int(timestamp)
|
||||||
|
|
||||||
def __conv(self,octets,dataType='b'):
|
def __conv(self,octets,dataType='b'):
|
||||||
|
|
||||||
if dataType == 'b':
|
if dataType == 'b':
|
||||||
unit = 'octets'
|
unit = 'octets'
|
||||||
if int(octets) > 1024:
|
if int(octets) > 1024:
|
||||||
@ -81,43 +83,64 @@ class Stats:
|
|||||||
octets = int(octets * 10) / 10
|
octets = int(octets * 10) / 10
|
||||||
return octets,unit
|
return octets,unit
|
||||||
|
|
||||||
def parse(self):
|
def parse2(self):
|
||||||
res = self.db.dbQuery(f"""SELECT * FROM {dbName} WHERE timestamp > {self.startDate} AND timestamp < {self.endDate} ORDER by id""")
|
res = self.db.dbQuery(f"""SELECT * FROM {dbName} WHERE timestamp > {self.startDate} AND timestamp < {self.endDate}""")
|
||||||
consolided = {}
|
consolided = {}
|
||||||
moy_conf_by_day = 0
|
|
||||||
for line in res:
|
for line in res:
|
||||||
field = line[2]
|
field = line[2]
|
||||||
if field in STATS_TOT_FIELDS:
|
if field in STATS_TOT_FIELDS:
|
||||||
if field in consolided and 'total' not in field:
|
if field in consolided:
|
||||||
consolided[field] = consolided[field] + int(line[3])
|
consolided[field] = consolided[field] + int(line[3])
|
||||||
else:
|
else:
|
||||||
consolided[field] = int(line[3])
|
consolided[field] = int(line[3])
|
||||||
if field == 'conferences':
|
if field in STATS_AVG_FIELDS:
|
||||||
moy_conf_by_day += 1
|
|
||||||
|
|
||||||
if field in STATS_MAX_FIELDS:
|
|
||||||
if field in consolided:
|
if field in consolided:
|
||||||
if consolided[field] < int(line[3]):
|
if consolided[field] < int(line[3]):
|
||||||
consolided[field] = int(line[3])
|
consolided[field] = int(line[3])
|
||||||
for (k,v) in consolided.items():
|
|
||||||
if 'bytes' in k:
|
|
||||||
(v,u) = self.__conv(consolided[k])
|
|
||||||
consolided[k] = f"{v} {u}"
|
|
||||||
if 'seconds' in k:
|
|
||||||
(v,u) = self.__conv(consolided[k],dataType='t')
|
|
||||||
consolided.pop(k)
|
|
||||||
n_k = k.replace('_seconds','')
|
|
||||||
consolided[n_k] = f"{v} {u}"
|
|
||||||
|
|
||||||
if moy_conf_by_day > 1:
|
|
||||||
tot = consolided['conferences']
|
|
||||||
moy_conf_by_day = int(moy_conf_by_day / 12 + 0.5)
|
|
||||||
moy = int(tot/moy_conf_by_day + 0.5)
|
|
||||||
consolided.pop('conferences')
|
|
||||||
consolided['average conferences by day'] = moy
|
|
||||||
|
|
||||||
return consolided
|
return consolided
|
||||||
|
|
||||||
|
def parse(self):
|
||||||
|
if len(self.files) <= 0:
|
||||||
|
return None
|
||||||
|
for f in self.files:
|
||||||
|
if '.db' in f:
|
||||||
|
continue
|
||||||
|
ts = int(f.split('.')[0].split('_')[3])
|
||||||
|
if ts >= self.startDate and ts <= self.endDate:
|
||||||
|
with open(f"{STAT_DIR}/{f}") as fh:
|
||||||
|
datas = fh.readlines()
|
||||||
|
for line in datas:
|
||||||
|
if line.split(';')[0].lower() in STATS_TOT_FIELDS:
|
||||||
|
key = line.split(';')[0].lower()
|
||||||
|
value = int(line.split(';')[1])
|
||||||
|
if key in self.consolided_datas:
|
||||||
|
datas = self.consolided_datas[key]
|
||||||
|
if datas[0] <= 0:
|
||||||
|
self.consolided_datas[key][0] = value
|
||||||
|
else:
|
||||||
|
if value > datas[1]:
|
||||||
|
self.consolided_datas[key][1] = value
|
||||||
|
else:
|
||||||
|
self.consolided_datas[key] = [value,0]
|
||||||
|
|
||||||
|
if line.split(';')[0].lower() in STATS_AVG_FIELDS:
|
||||||
|
key = line.split(';')[0].lower()
|
||||||
|
value = int(line.split(';')[1])
|
||||||
|
if key in self.consolided:
|
||||||
|
if self.consolided[key] < int(value):
|
||||||
|
self.consolided[key] = int(value)
|
||||||
|
else:
|
||||||
|
self.consolided[key] = 0
|
||||||
|
|
||||||
|
for (k,v) in self.consolided_datas.items():
|
||||||
|
self.consolided[k] = v[1] - v[0]
|
||||||
|
if 'byte' in k:
|
||||||
|
octets,unit = self.__conv(self.consolided[k])
|
||||||
|
self.consolided[k] = f"{octets} {unit}"
|
||||||
|
elif 'seconds' in k:
|
||||||
|
octets,unit = self.__conv(self.consolided[k],'t')
|
||||||
|
self.consolided[k] = f"{octets} {unit}"
|
||||||
|
return self.consolided
|
||||||
|
|
||||||
class SQLite:
|
class SQLite:
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
@ -126,12 +149,13 @@ class SQLite:
|
|||||||
|
|
||||||
def __initDb(self):
|
def __initDb(self):
|
||||||
self.__openDb()
|
self.__openDb()
|
||||||
self.cursor.execute('''create table jitsi_stats(
|
self.cursor.execute(''' create table jitsi_stats(
|
||||||
id integer primary key autoincrement,
|
id integer primary key autoincrement,
|
||||||
timestamp text,
|
timestamp text,
|
||||||
key_field text,
|
key_field text,
|
||||||
value_field text
|
value_field text
|
||||||
)''')
|
)
|
||||||
|
''')
|
||||||
self.conn.commit()
|
self.conn.commit()
|
||||||
self.__closeDb()
|
self.__closeDb()
|
||||||
|
|
||||||
@ -144,13 +168,10 @@ class SQLite:
|
|||||||
self.conn.close()
|
self.conn.close()
|
||||||
|
|
||||||
def dbQuery(self,query='SELECT'):
|
def dbQuery(self,query='SELECT'):
|
||||||
try:
|
self.__openDb()
|
||||||
self.__openDb()
|
self.cursor.execute(query)
|
||||||
self.cursor.execute(query)
|
rows = self.cursor.fetchall()
|
||||||
rows = self.cursor.fetchall()
|
self.__closeDb()
|
||||||
self.__closeDb()
|
|
||||||
except sqlite3.OperationalError:
|
|
||||||
rows = None
|
|
||||||
return rows
|
return rows
|
||||||
|
|
||||||
def dbInsert(self,ts,k,v):
|
def dbInsert(self,ts,k,v):
|
||||||
@ -184,9 +205,17 @@ def runMain():
|
|||||||
year = currentDate.year
|
year = currentDate.year
|
||||||
|
|
||||||
stats = Stats(year,mois)
|
stats = Stats(year,mois)
|
||||||
|
#res = stats.parse2()
|
||||||
|
#for (k,v) in res.items():
|
||||||
|
# print(f"{k}={v}")
|
||||||
|
|
||||||
res = stats.parse()
|
res = stats.parse()
|
||||||
for (k,v) in res.items():
|
for (k,v) in res.items():
|
||||||
print(f"{k}={v}")
|
if k in STATS_TOT_FIELDS:
|
||||||
|
chaine = STATS_FR_TOT_FIELDS[STATS_TOT_FIELDS.index(k)]
|
||||||
|
elif k in STATS_AVG_FIELDS:
|
||||||
|
chaine = STATS_FR_AVG_FIELDS[STATS_AVG_FIELDS.index(k)]
|
||||||
|
print(f"{chaine} : {v}")
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
runMain()
|
runMain()
|
Loading…
Reference in New Issue
Block a user