804 lines
28 KiB
Python
804 lines
28 KiB
Python
'''
|
|
Ce fichier traite tout ce qui est liée aux requetes de statistique
|
|
|
|
'''
|
|
import pymongo
|
|
from pymongo import MongoClient
|
|
import json
|
|
from bson import ObjectId
|
|
import re
|
|
from datetime import datetime
|
|
import prj_common as mycommon
|
|
import secrets
|
|
import inspect
|
|
import sys, os
|
|
import csv
|
|
import pandas as pd
|
|
from pymongo import ReturnDocument
|
|
import GlobalVariable as MYSY_GV
|
|
from math import isnan
|
|
import GlobalVariable as MYSY_GV
|
|
from datetime import timedelta
|
|
|
|
|
|
class JSONEncoder(json.JSONEncoder):
|
|
def default(self, o):
|
|
if isinstance(o, ObjectId):
|
|
return str(o)
|
|
return json.JSONEncoder.default(self, o)
|
|
|
|
|
|
def GetStat_class_view_old():
|
|
try:
|
|
# collection
|
|
collection = MYSY_GV.dbname["user_recherche_result"]
|
|
|
|
"""
|
|
Pour avoir une requete qui groupe par jour, ou jour-heure
|
|
on doit jouer avec le
|
|
"date" : { $substr: [ "$date_update", 0, 25 ] } ==> ou 25 et la largeur de la coupure,
|
|
"""
|
|
|
|
QUERY_BY = "jour"
|
|
SUBSTR = 0
|
|
|
|
if (QUERY_BY == 'heure'):
|
|
SUBSTR = 13
|
|
elif( QUERY_BY == 'jour'):
|
|
SUBSTR = 10
|
|
elif (QUERY_BY == 'mois'):
|
|
SUBSTR = 7
|
|
|
|
|
|
|
|
pipe2 = [{
|
|
'$group': {
|
|
'_id': {
|
|
"INTERNAL URL":"$internal_url",
|
|
"OWNER":"$owner",
|
|
"Date_view": { "$substr": ["$date_update", 0, SUBSTR]},
|
|
},
|
|
'count': { '$count': { }
|
|
}
|
|
}
|
|
},
|
|
{
|
|
"$sort" : { "count": -1 }
|
|
}
|
|
]
|
|
|
|
insertObject = []
|
|
for result in collection.aggregate(pipe2):
|
|
tmp_val = {}
|
|
|
|
#tab_training_id.append(str(result["_id"]))
|
|
#print(result['_id'])
|
|
|
|
if ("Date_view" in result['_id'].keys()):
|
|
if result['_id']['Date_view']:
|
|
|
|
tmp_val['Date_view'] = str( result['_id']['Date_view'])
|
|
|
|
|
|
if ("OWNER" in result['_id'].keys()):
|
|
if result['_id']['OWNER']:
|
|
#print( "Owner = "+str( result['_id']['OWNER']) )
|
|
tmp_val['owner'] = str( result['_id']['OWNER'])
|
|
|
|
if ("INTERNAL URL" in result['_id'].keys()):
|
|
if result['_id']['INTERNAL URL']:
|
|
#print( "INTERNAL URL = "+str( result['_id']['INTERNAL URL']) )
|
|
tmp_val['internal_url'] = str(result['_id']['INTERNAL URL'])
|
|
|
|
|
|
#print("Nombre = "+str(result['count']))
|
|
tmp_val['nb_view'] = str(result['count'])
|
|
insertObject.append(JSONEncoder().encode(tmp_val))
|
|
|
|
|
|
#print(result)
|
|
|
|
|
|
return True, insertObject
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
|
return False, "Impossible de récupérer les stat"
|
|
|
|
|
|
def GetStat_class_view(diction):
|
|
try:
|
|
'''
|
|
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
|
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
|
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
|
# field_list.
|
|
'''
|
|
field_list = ['external_code', 'internal_url', 'zone_diffusion', 'metier', 'date_lieu',
|
|
'published', 'token', 'date_start', 'date_end']
|
|
incom_keys = diction.keys()
|
|
for val in incom_keys:
|
|
if val not in field_list and val.startswith('my_') is False:
|
|
mycommon.myprint(str(inspect.stack()[0][
|
|
3]) + " - Le champ '" + val + "' n'est pas autorisé, Creation formation annulée")
|
|
return False, " Verifier votre API"
|
|
|
|
'''
|
|
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
|
On controle que les champs obligatoires sont presents dans la liste
|
|
'''
|
|
field_list_obligatoire = [ 'token']
|
|
|
|
for val in field_list_obligatoire:
|
|
if val not in diction:
|
|
mycommon.myprint(str(inspect.stack()[0][3])+" - La valeur '" + val + "' n'est pas presente dans liste ")
|
|
return False, " Verifier votre API"
|
|
|
|
'''
|
|
Verification si le token et l'email sont valident
|
|
'''
|
|
|
|
# recuperation des paramettre
|
|
mydata = {}
|
|
|
|
|
|
if ("token" in diction.keys()):
|
|
if diction['token']:
|
|
mydata['token'] = diction['token']
|
|
|
|
date_start = ""
|
|
if ("date_start" in diction.keys()):
|
|
if diction['date_start']:
|
|
date_start = diction['date_start']
|
|
|
|
date_end = ""
|
|
if ("date_end" in diction.keys()):
|
|
if diction['date_end']:
|
|
date_end = diction['date_end']
|
|
|
|
|
|
|
|
# Verification de la validité du token
|
|
'''
|
|
Important : pour créer une formation, il faut obligatoirement avoir un token.
|
|
PAS DE CREATION DE FORMATION EN MODE NON CONNECTE.
|
|
|
|
CONCERNANT LES CREEES PAR NOS SYSTEME AUTOMATIQUE, IL FAUDRA LEUR PASSER UNE VALEUR MALGRE TOUT
|
|
|
|
'''
|
|
retval = mycommon.check_partner_token_validity("", str(mydata['token']))
|
|
|
|
if retval is False:
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " - La session de connexion n'est pas valide")
|
|
return False, "L'email ou le token ne sont pas valident"
|
|
|
|
# Recuperation du recid du partenaire
|
|
user_recid = mycommon.get_parnter_recid_from_token(str(mydata['token']))
|
|
if user_recid is False:
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du partenaire")
|
|
return False, " Les informations d'identification sont incorrectes"
|
|
|
|
# collection
|
|
collection = MYSY_GV.dbname["user_recherche_result"]
|
|
|
|
"""
|
|
Pour avoir une requete qui groupe par jour, ou jour-heure
|
|
on doit jouer avec le
|
|
"date" : { $substr: [ "$date_update", 0, 25 ] } ==> ou 25 et la largeur de la coupure,
|
|
"""
|
|
|
|
thisweek = datetime.today() - timedelta(days=0)
|
|
thisweek_format = thisweek.strftime('%Y-%m-%d')
|
|
|
|
QUERY_BY = "jour"
|
|
SUBSTR = 0
|
|
|
|
if (QUERY_BY == 'heure'):
|
|
SUBSTR = 13
|
|
elif (QUERY_BY == 'jour'):
|
|
SUBSTR = 10
|
|
elif (QUERY_BY == 'mois'):
|
|
SUBSTR = 7
|
|
|
|
pipe2 = [
|
|
{'$match': {'date_update' : { '$gte' : str(date_start), '$lte' : str(date_end)},
|
|
'partner_owner_recid':str(user_recid),
|
|
}},
|
|
{
|
|
'$group': {
|
|
'_id': {
|
|
"Date_view": {"$substr": ["$date_update", 0, SUBSTR]},
|
|
"OWNER": "$partner_owner_recid",
|
|
},
|
|
'count': {'$count': {}
|
|
}
|
|
}
|
|
},
|
|
{
|
|
"$sort": {"_id": 1}
|
|
}
|
|
]
|
|
|
|
print(" PIP 21 ="+str(pipe2))
|
|
|
|
insertObject = []
|
|
for result in collection.aggregate(pipe2):
|
|
tmp_val = {}
|
|
|
|
# tab_training_id.append(str(result["_id"]))
|
|
# print(result['_id'])
|
|
|
|
if ("Date_view" in result['_id'].keys()):
|
|
if result['_id']['Date_view']:
|
|
#print("Date_view = " + str(result['_id']['Date_view']))
|
|
tmp_val['Date_view'] = str(result['_id']['Date_view'])
|
|
|
|
if ("OWNER" in result['_id'].keys()):
|
|
if result['_id']['OWNER']:
|
|
#print("Owner = " + str(result['_id']['OWNER']))
|
|
tmp_val['owner'] = str(result['_id']['OWNER'])
|
|
|
|
if ("INTERNAL URL" in result['_id'].keys()):
|
|
if result['_id']['INTERNAL URL']:
|
|
#print("INTERNAL URL = " + str(result['_id']['INTERNAL URL']))
|
|
tmp_val['internal_url'] = str(result['_id']['INTERNAL URL'])
|
|
|
|
#print("Nombre = " + str(result['count']))
|
|
tmp_val['nb_view'] = str(result['count'])
|
|
insertObject.append(JSONEncoder().encode(tmp_val))
|
|
|
|
# print(result)
|
|
|
|
return True, insertObject
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
|
return False, "Impossible de récupérer les stat"
|
|
|
|
|
|
"""
|
|
Cette fonction retourne le top 5 sur une periode donnée pour un partenaire.
|
|
Dans ses arguments, elle prendra le token, puis va allercherche le recid du partenaire
|
|
"""
|
|
def GetStat_class_view_topX(diction):
|
|
try:
|
|
'''
|
|
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
|
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
|
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
|
# field_list.
|
|
'''
|
|
field_list = ['external_code', 'internal_url', 'zone_diffusion', 'metier', 'date_lieu',
|
|
'published', 'token', 'date_start', 'date_end', 'topX']
|
|
incom_keys = diction.keys()
|
|
for val in incom_keys:
|
|
if val not in field_list and val.startswith('my_') is False:
|
|
mycommon.myprint(str(inspect.stack()[0][
|
|
3]) + " - Le champ '" + val + "' n'est pas autorisé, Creation formation annulée")
|
|
return False, " Verifier votre API"
|
|
|
|
'''
|
|
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
|
On controle que les champs obligatoires sont presents dans la liste
|
|
'''
|
|
field_list_obligatoire = [ 'token', 'topX']
|
|
|
|
for val in field_list_obligatoire:
|
|
if val not in diction:
|
|
mycommon.myprint(str(inspect.stack()[0][3])+" - La valeur '" + val + "' n'est pas presente dans liste ")
|
|
return False, " Verifier votre API"
|
|
|
|
'''
|
|
Verification si le token et l'email sont valident
|
|
'''
|
|
|
|
# recuperation des paramettre
|
|
mydata = {}
|
|
my_token = ""
|
|
|
|
|
|
if ("token" in diction.keys()):
|
|
if diction['token']:
|
|
my_token = diction['token']
|
|
|
|
date_start = ""
|
|
if ("date_start" in diction.keys()):
|
|
if diction['date_start']:
|
|
date_start = diction['date_start']
|
|
mydata['date_start'] = diction['date_start']
|
|
|
|
date_end = ""
|
|
if ("date_end" in diction.keys()):
|
|
if diction['date_end']:
|
|
date_end = diction['date_end']
|
|
mydata['date_end'] = diction['date_end']
|
|
|
|
topX = 0
|
|
if ("topX" in diction.keys()):
|
|
if diction['topX']:
|
|
topX = mycommon.tryInt(diction['topX'])
|
|
mydata['topX'] = diction['topX']
|
|
|
|
|
|
local_status, try_end_date = mycommon.TryToDateYYYMMDD(str(diction['date_end']))
|
|
new_end_date = try_end_date.date()
|
|
|
|
local_status, try_from_date = mycommon.TryToDateYYYMMDD(str(diction['date_start']))
|
|
new_from_date = try_from_date.date()
|
|
|
|
day_before = new_end_date - timedelta(days=0)
|
|
tmp = 0
|
|
|
|
categories = {}
|
|
categories_val = ""
|
|
while ( tmp < 10 and str(day_before) != str(new_from_date) ) :
|
|
day_after = new_from_date + timedelta(days=tmp)
|
|
categories_val = categories_val+str(day_after)+","
|
|
tmp = tmp + 1
|
|
#print(" day_before DATE = " + str(day_before)+ " VS "+str(new_from_date))
|
|
|
|
if(categories_val.endswith(',') ):
|
|
categories_val = categories_val[:-1]
|
|
|
|
print(" categories_val = "+categories_val)
|
|
|
|
categories['categories'] = str(categories_val)
|
|
# Verification de la validité du token
|
|
'''
|
|
Important : pour créer une formation, il faut obligatoirement avoir un token.
|
|
PAS DE CREATION DE FORMATION EN MODE NON CONNECTE.
|
|
|
|
CONCERNANT LES CREEES PAR NOS SYSTEME AUTOMATIQUE, IL FAUDRA LEUR PASSER UNE VALEUR MALGRE TOUT
|
|
|
|
'''
|
|
retval = mycommon.check_partner_token_validity("", str(my_token))
|
|
|
|
if retval is False:
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " - La session de connexion n'est pas valide")
|
|
return False, "L'email ou le token ne sont pas valident"
|
|
|
|
# Recuperation du recid du partenaire
|
|
user_recid = mycommon.get_parnter_recid_from_token(str(my_token))
|
|
if user_recid is False:
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du partenaire")
|
|
return False, " Les informations d'identification sont incorrectes"
|
|
|
|
|
|
mydata['partner_owner_recid'] = str(user_recid)
|
|
|
|
"""
|
|
Recuperation des id (internal_url) des formations
|
|
"""
|
|
print(" ICI diction = "+str(mydata))
|
|
status_tmp, diction_topx = Get_X_Best_Class_On_Given_period(mydata)
|
|
if( status_tmp is False):
|
|
return False, "Impossible de récupérer les stats- erreur interne"
|
|
|
|
|
|
print(" pour cette stat, voici le top " + str(topX) + " des ref concernées ")
|
|
print(diction_topx)
|
|
|
|
print(" ========= print(diction_topx)")
|
|
liste_formation = []
|
|
for val_tmp in diction_topx:
|
|
json_object = json.loads(val_tmp)
|
|
if ("internal_url" in json_object.keys()):
|
|
if json_object['internal_url']:
|
|
liste_formation.append(str(json_object['internal_url']))
|
|
|
|
# collection
|
|
collection = MYSY_GV.dbname["user_recherche_result"]
|
|
|
|
"""
|
|
Pour avoir une requete qui groupe par jour, ou jour-heure
|
|
on doit jouer avec le
|
|
"date" : { $substr: [ "$date_update", 0, 25 ] } ==> ou 25 et la largeur de la coupure,
|
|
"""
|
|
|
|
thisweek = datetime.today() - timedelta(days=0)
|
|
thisweek_format = thisweek.strftime('%Y-%m-%d')
|
|
|
|
QUERY_BY = "jour"
|
|
SUBSTR = 0
|
|
|
|
if (QUERY_BY == 'heure'):
|
|
SUBSTR = 13
|
|
elif (QUERY_BY == 'jour'):
|
|
SUBSTR = 10
|
|
elif (QUERY_BY == 'mois'):
|
|
SUBSTR = 7
|
|
|
|
pipe2 = [
|
|
{'$match': {'date_update' : { '$gte' : str(date_start), '$lte' : str(date_end)},
|
|
'partner_owner_recid':str(user_recid),
|
|
'internal_url': {'$in': liste_formation},
|
|
}},
|
|
{
|
|
'$group': {
|
|
'_id': {
|
|
"Date_view": {"$substr": ["$date_update", 0, SUBSTR]},
|
|
"OWNER": "$owner",
|
|
"INTERNAL URL": "$internal_url",
|
|
|
|
},
|
|
'count': {'$count': {}
|
|
}
|
|
}
|
|
},
|
|
{
|
|
"$sort": {"_id": 1}
|
|
},
|
|
|
|
]
|
|
|
|
print(" PIP 22 ="+str(pipe2))
|
|
for result in collection.aggregate(pipe2):
|
|
print(str(result['_id']) + " ===> " + str(result['count']))
|
|
|
|
|
|
insertObject = []
|
|
i = 0
|
|
for tmp in liste_formation :
|
|
i = i +1
|
|
print(" Traitement de "+str(tmp))
|
|
ret_val = {}
|
|
ret_val['name'+str(i)] = tmp
|
|
name = tmp
|
|
tab_valeur = []
|
|
for result in collection.aggregate(pipe2):
|
|
|
|
print(" ### NAME = ", name)
|
|
if( name in result['_id']['INTERNAL URL']):
|
|
print(" #### RESULT = ", result)
|
|
tab_valeur.append(str(result['count']))
|
|
|
|
|
|
ret_val['data'+str(i)] = tab_valeur
|
|
|
|
insertObject.append(JSONEncoder().encode(ret_val))
|
|
|
|
insertObject.append(JSONEncoder().encode(categories))
|
|
|
|
# print(result)
|
|
|
|
return True, insertObject
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
|
return False, "Impossible de récupérer les stat"
|
|
|
|
|
|
"""
|
|
Cette fonction recuprer la liste des X formations d'un client donné,
|
|
les plus vue sur une periode donnée
|
|
"""
|
|
def Get_X_Best_Class_On_Given_period(diction):
|
|
try:
|
|
|
|
field_list = ['partner_owner_recid', 'date_start', 'date_end', 'topX']
|
|
incom_keys = diction.keys()
|
|
for val in incom_keys:
|
|
if val not in field_list and val.startswith('my_') is False:
|
|
mycommon.myprint(str(inspect.stack()[0][
|
|
3]) + " - Le champ '" + val + "' n'est pas autorisé, action annulée")
|
|
return False, " Verifier votre API"
|
|
|
|
# collection
|
|
collection = MYSY_GV.dbname["user_recherche_result"]
|
|
|
|
partner_owner_recid = ""
|
|
if ("partner_owner_recid" in diction.keys()):
|
|
if diction['partner_owner_recid']:
|
|
partner_owner_recid = diction['partner_owner_recid']
|
|
|
|
date_start = ""
|
|
if ("date_start" in diction.keys()):
|
|
if diction['date_start']:
|
|
date_start = diction['date_start']
|
|
|
|
date_end = ""
|
|
if ("date_end" in diction.keys()):
|
|
if diction['date_end']:
|
|
date_end = diction['date_end']
|
|
|
|
topX = 0
|
|
if ("topX" in diction.keys()):
|
|
if diction['topX']:
|
|
topX = mycommon.tryInt(diction['topX'])
|
|
|
|
pipe2 = [
|
|
{'$match': {'date_update': {'$gte': str(date_start), '$lte': str(date_end)},
|
|
'partner_owner_recid': str(partner_owner_recid)
|
|
}},
|
|
{
|
|
'$group': {
|
|
'_id': {
|
|
"INTERNAL URL": "$internal_url",
|
|
"OWNER": "$owner",
|
|
|
|
},
|
|
'count': {'$count': {}
|
|
}
|
|
}
|
|
},
|
|
{
|
|
"$sort": {"count": -1}
|
|
},
|
|
{
|
|
"$limit": topX
|
|
}
|
|
]
|
|
|
|
print(" PIP 222 =" + str(pipe2))
|
|
|
|
insertObject = []
|
|
for result in collection.aggregate(pipe2):
|
|
tmp_val = {}
|
|
|
|
#tab_training_id.append(str(result["_id"]))
|
|
# print(result['_id'])
|
|
|
|
|
|
if ("OWNER" in result['_id'].keys()):
|
|
if result['_id']['OWNER']:
|
|
print("Owner = " + str(result['_id']['OWNER']))
|
|
tmp_val['owner'] = str(result['_id']['OWNER'])
|
|
|
|
if ("INTERNAL URL" in result['_id'].keys()):
|
|
if result['_id']['INTERNAL URL']:
|
|
print("INTERNAL URL = " + str(result['_id']['INTERNAL URL']))
|
|
tmp_val['internal_url'] = str(result['_id']['INTERNAL URL'])
|
|
|
|
#print("Nombre = " + str(result['count']))
|
|
tmp_val['nb_view'] = str(result['count'])
|
|
insertObject.append(JSONEncoder().encode(tmp_val))
|
|
|
|
# print(result)
|
|
|
|
|
|
return True, insertObject
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
|
return False, "Impossible de X les formations les plus vue"
|
|
|
|
|
|
|
|
def test_return():
|
|
|
|
try:
|
|
|
|
|
|
insertObject = []
|
|
nb_val ={'nb_formation':'2'}
|
|
tmp_val0 = {}
|
|
tmp_val1 = {}
|
|
tmp_val2 = {}
|
|
tmp_val3 = {}
|
|
tmp_val4 = {}
|
|
|
|
tmp_val0['categories'] = "01/02/22, 02/02/22, 03/02/22, 04/02/22, 05/02/22, 06/02/22"
|
|
tmp_val1 = {'name': 'formation 1', 'Date_view': '2022-09-11', 'count':'2'}
|
|
tmp_val2 = {'name': 'formation 1', 'Date_view': '2022-09-10', 'count': '23'}
|
|
tmp_val3 = {'name': 'formation 1', 'Date_view': '2022-09-05', 'count': '9'}
|
|
|
|
tmp_val4 = {'name': 'formation 2', 'Date_view': '2022-09-11', 'count': '3'}
|
|
tmp_val5 = {'name': 'formation 2', 'Date_view': '2022-09-10', 'count': '4'}
|
|
tmp_val6 = {'name': 'formation 3', 'Date_view': '2022-09-05', 'count': '6'}
|
|
|
|
|
|
|
|
#insertObject.append(JSONEncoder().encode(nb_val))
|
|
|
|
insertObject.append(JSONEncoder().encode(tmp_val0))
|
|
insertObject.append(JSONEncoder().encode(tmp_val1))
|
|
insertObject.append(JSONEncoder().encode(tmp_val2))
|
|
insertObject.append(JSONEncoder().encode(tmp_val3))
|
|
insertObject.append(JSONEncoder().encode(tmp_val4))
|
|
insertObject.append(JSONEncoder().encode(tmp_val5))
|
|
insertObject.append(JSONEncoder().encode(tmp_val6))
|
|
|
|
|
|
return True, insertObject
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
|
return False, "Impossible de test_return"
|
|
|
|
|
|
"""
|
|
Cette fonction retourne les statistiques d'une formation données
|
|
"""
|
|
|
|
def GetStat_class_by_internal_url(diction):
|
|
try:
|
|
'''
|
|
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
|
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
|
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
|
# field_list.
|
|
'''
|
|
field_list = ['external_code', 'internal_url', 'zone_diffusion', 'metier', 'date_lieu',
|
|
'published', 'token', 'date_start', 'date_end']
|
|
incom_keys = diction.keys()
|
|
for val in incom_keys:
|
|
if val not in field_list and val.startswith('my_') is False:
|
|
mycommon.myprint(str(inspect.stack()[0][
|
|
3]) + " - Le champ '" + val + "' n'est pas autorisé, Creation formation annulée")
|
|
return False, " Verifier votre API"
|
|
|
|
'''
|
|
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
|
On controle que les champs obligatoires sont presents dans la liste
|
|
'''
|
|
field_list_obligatoire = [ 'token', 'internal_url']
|
|
|
|
for val in field_list_obligatoire:
|
|
if val not in diction:
|
|
mycommon.myprint(str(inspect.stack()[0][3])+" - La valeur '" + val + "' n'est pas presente dans liste ")
|
|
return False, " Verifier votre API"
|
|
|
|
'''
|
|
Verification si le token et l'email sont valident
|
|
'''
|
|
|
|
# recuperation des paramettre
|
|
mydata = {}
|
|
|
|
|
|
if ("token" in diction.keys()):
|
|
if diction['token']:
|
|
mydata['token'] = diction['token']
|
|
|
|
date_start = ""
|
|
if ("date_start" in diction.keys()):
|
|
if diction['date_start']:
|
|
date_start = diction['date_start']
|
|
|
|
date_end = ""
|
|
if ("date_end" in diction.keys()):
|
|
if diction['date_end']:
|
|
date_end = diction['date_end']
|
|
|
|
|
|
|
|
local_status, try_end_date = mycommon.TryToDateYYYMMDD(str(diction['date_end']))
|
|
new_end_date = try_end_date.date()
|
|
|
|
|
|
local_status, try_from_date = mycommon.TryToDateYYYMMDD(str(diction['date_start']))
|
|
new_from_date = try_from_date.date()
|
|
|
|
day_before = new_end_date - timedelta(days=0)
|
|
tmp = 0
|
|
|
|
categories = {}
|
|
categories_val = ""
|
|
while ( tmp > -10 and str(day_before) != str(new_from_date) ) :
|
|
tmp = tmp - 1
|
|
day_before = new_end_date + timedelta(days=tmp)
|
|
categories_val = categories_val+str(day_before)+","
|
|
#print(" day_before DATE = " + str(day_before)+ " VS "+str(new_from_date))
|
|
|
|
if(categories_val.endswith(',') ):
|
|
categories_val = categories_val[:-1]
|
|
|
|
#print(" categories_val = "+categories_val)
|
|
|
|
categories['categories'] = str(categories_val)
|
|
# Verification de la validité du token
|
|
'''
|
|
Important : pour créer une formation, il faut obligatoirement avoir un token.
|
|
PAS DE CREATION DE FORMATION EN MODE NON CONNECTE.
|
|
|
|
CONCERNANT LES CREEES PAR NOS SYSTEME AUTOMATIQUE, IL FAUDRA LEUR PASSER UNE VALEUR MALGRE TOUT
|
|
|
|
'''
|
|
retval = mycommon.check_partner_token_validity("", str(mydata['token']))
|
|
|
|
if retval is False:
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " - La session de connexion n'est pas valide")
|
|
return False, "L'email ou le token ne sont pas valident"
|
|
|
|
# Recuperation du recid du partenaire
|
|
user_recid = mycommon.get_parnter_recid_from_token(str(mydata['token']))
|
|
if user_recid is False:
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du partenaire")
|
|
return False, " Les informations d'identification sont incorrectes"
|
|
|
|
|
|
"""
|
|
Recuperation des id (internal_url) des formations
|
|
"""
|
|
liste_formation = []
|
|
myinternal_url = ""
|
|
if ("internal_url" in diction.keys()):
|
|
if diction['internal_url']:
|
|
myinternal_url = diction['internal_url']
|
|
liste_formation.append(str(myinternal_url))
|
|
|
|
# collection
|
|
collection = MYSY_GV.dbname["user_recherche_result"]
|
|
|
|
"""
|
|
Pour avoir une requete qui groupe par jour, ou jour-heure
|
|
on doit jouer avec le
|
|
"date" : { $substr: [ "$date_update", 0, 25 ] } ==> ou 25 et la largeur de la coupure,
|
|
"""
|
|
|
|
thisweek = datetime.today() - timedelta(days=0)
|
|
thisweek_format = thisweek.strftime('%Y-%m-%d')
|
|
|
|
QUERY_BY = "jour"
|
|
SUBSTR = 0
|
|
|
|
if (QUERY_BY == 'heure'):
|
|
SUBSTR = 13
|
|
elif (QUERY_BY == 'jour'):
|
|
SUBSTR = 10
|
|
elif (QUERY_BY == 'mois'):
|
|
SUBSTR = 7
|
|
|
|
pipe2 = [
|
|
{'$match': {'date_update' : { '$gte' : str(date_start), '$lte' : str(date_end)},
|
|
'token':str(mydata['token']),
|
|
'internal_url': {'$in': liste_formation},
|
|
}},
|
|
{
|
|
'$group': {
|
|
'_id': {
|
|
"Date_view": {"$substr": ["$date_update", 0, SUBSTR]},
|
|
"INTERNAL URL": "$internal_url",
|
|
"OWNER": "$owner",
|
|
|
|
},
|
|
'count': {'$count': {}
|
|
}
|
|
}
|
|
},
|
|
{
|
|
"$sort": {"_id": 1}
|
|
},
|
|
|
|
]
|
|
|
|
#print(" PIP 23 ="+str(pipe2))
|
|
#for result in collection.aggregate(pipe2):
|
|
# print(str(result['_id']) + " ===> " + str(result['count']))
|
|
|
|
|
|
insertObject = []
|
|
i = 0
|
|
for tmp in liste_formation :
|
|
i = i +1
|
|
#print(" Traitement de "+str(tmp))
|
|
ret_val = {}
|
|
ret_val['name'+str(i)] = tmp
|
|
name = tmp
|
|
tab_valeur = []
|
|
for result in collection.aggregate(pipe2):
|
|
|
|
if( name in result['_id']['INTERNAL URL']):
|
|
|
|
tab_valeur.append(str(result['count']))
|
|
|
|
ret_val['data'+str(i)] = tab_valeur
|
|
|
|
insertObject.append(JSONEncoder().encode(ret_val))
|
|
|
|
insertObject.append(JSONEncoder().encode(categories))
|
|
|
|
# print(result)
|
|
|
|
return True, insertObject
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
|
return False, "Impossible de récupérer les stat"
|
|
|