wwssdssddd

master
cherif 2024-07-10 18:53:17 +02:00
parent 2509c3e9f6
commit e818512ffe
7 changed files with 4297 additions and 26 deletions

View File

@ -3,7 +3,11 @@
<component name="ChangeListManager">
<list default="true" id="c6d0259a-16e1-410d-91a1-830590ee2a08" name="Changes" comment="wwssdssd">
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
<change beforePath="$PROJECT_DIR$/Inscription_mgt.py" beforeDir="false" afterPath="$PROJECT_DIR$/Inscription_mgt.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/Log/log_file.log" beforeDir="false" afterPath="$PROJECT_DIR$/Log/log_file.log" afterDir="false" />
<change beforePath="$PROJECT_DIR$/Session_Formation.py" beforeDir="false" afterPath="$PROJECT_DIR$/Session_Formation.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/class_mgt.py" beforeDir="false" afterPath="$PROJECT_DIR$/class_mgt.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/main.py" beforeDir="false" afterPath="$PROJECT_DIR$/main.py" afterDir="false" />
<change beforePath="$PROJECT_DIR$/survey_mgt.py" beforeDir="false" afterPath="$PROJECT_DIR$/survey_mgt.py" afterDir="false" />
</list>
<option name="SHOW_DIALOG" value="false" />
@ -74,13 +78,6 @@
<option name="presentableId" value="Default" />
<updated>1680804787304</updated>
</task>
<task id="LOCAL-00297" summary="13/05/2024 - 18h30">
<created>1715618340942</created>
<option name="number" value="00297" />
<option name="presentableId" value="LOCAL-00297" />
<option name="project" value="LOCAL" />
<updated>1715618340942</updated>
</task>
<task id="LOCAL-00298" summary="14/05/2024 - 21h30">
<created>1715714868339</created>
<option name="number" value="00298" />
@ -417,7 +414,14 @@
<option name="project" value="LOCAL" />
<updated>1720531095888</updated>
</task>
<option name="localTasksCounter" value="346" />
<task id="LOCAL-00346" summary="wwssdssd">
<created>1720553186262</created>
<option name="number" value="00346" />
<option name="presentableId" value="LOCAL-00346" />
<option name="project" value="LOCAL" />
<updated>1720553186264</updated>
</task>
<option name="localTasksCounter" value="347" />
<servers />
</component>
<component name="Vcs.Log.Tabs.Properties">

View File

@ -1465,6 +1465,12 @@ def GetAllClassStagiaire(diction):
invoiced_date = str(retval['invoiced_date'])[0:10]
user['invoiced_date'] = invoiced_date
if ("civilite" in retval.keys()):
user['civilite'] = str(retval['civilite']).strip().lower()
else:
user['civilite'] = "neutre"
RetObject.append(mycommon.JSONEncoder().encode(user))
return True, RetObject

File diff suppressed because it is too large Load Diff

View File

@ -2095,6 +2095,8 @@ par exemple avec l'import d'un fichier csv
"""
def Add_Update_SessionFormation_mass(file=None, Folder=None, diction=None):
try:
diction = mycommon.strip_dictionary(diction)
'''
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
@ -2572,6 +2574,7 @@ c'est soit le fichier est TOUT bon ou pas.
"""
def Controle_Add_Update_SessionFormation_mass(saved_file=None, Folder=None, diction=None):
try:
diction = mycommon.strip_dictionary(diction)
'''
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
@ -3064,6 +3067,7 @@ Cela veut dire qu'on fourni 'external_code' pour chaque ligne du fichier excel,
def Add_Update_SessionFormation_mass_for_many_class(file=None, Folder=None, diction=None):
try:
diction = mycommon.strip_dictionary(diction)
'''
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
@ -3590,6 +3594,9 @@ Controle fichier avant import
"""
def Controle_Add_Update_SessionFormation_mass_for_many_class(saved_file=None, Folder=None, diction=None):
try:
diction = mycommon.strip_dictionary(diction)
'''
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API

View File

@ -3141,6 +3141,8 @@ Cette fontion import un fichier excel de formation
def add_class_mass(file=None, Folder=None, diction=None):
try:
diction = mycommon.strip_dictionary(diction)
'''
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API

27
main.py
View File

@ -10203,6 +10203,33 @@ def Get_Entity_Question_List_With_Stat_For_Given_related_collection_id():
return jsonify(status=status, message=retval)
"""
API / STATISTIC / EXPORT EXCEL : Export excel des resultats des stat : Get_Entity_Question_List_With_Stat_For_Given_related_collection_id
"""
@app.route('/myclass/api/Export_Excel_Survey_Stat_Data_For_Given_related_collection_id/<token>/<related_collection>/<related_collection_id>/<date_from>/<date_to>/<question_id>/<class_id>/', methods=['POST','GET'])
@crossdomain(origin='*')
def Export_Excel_Survey_Stat_Data_For_Given_related_collection_id(token, related_collection, related_collection_id, date_from, date_to, question_id, class_id):
# On recupere le corps (payload) de la requete
payload = mycommon.strip_dictionary (request.form.to_dict())
payload = {}
payload['token'] = str(token)
payload['related_collection'] = str(related_collection).replace("-", "")
payload['periode_start_date'] = str(date_from).replace("-", "/")
payload['periode_end_date'] = str(date_to).replace("-", "/")
payload['related_collection_id'] = str(related_collection_id).replace("-", "")
payload['question_id'] = str(question_id).replace("-", "")
payload['class_id'] = str(class_id).replace("-", "")
print(" ### Export_Excel_Survey_Stat_Data_For_Given_related_collection_id payload = ",payload)
status, retval = survey_mgt.Export_Excel_Survey_Stat_Data_For_Given_related_collection_id(payload)
if(status ):
return retval
else:
return False
if __name__ == '__main__':

View File

@ -12,7 +12,7 @@ from pymongo import MongoClient
import json
from bson import ObjectId
import re
from datetime import datetime
from datetime import datetime, date
import module_editique
import prj_common as mycommon
@ -2214,7 +2214,8 @@ def Export_To_Excel_Survey_with_filter(diction):
"""
Cette fonction permet de faire des statistiques ou de projections graphiques sur
sur des reponses, par exemple la moyenne de satisfaction par rapport
a un formateur ou par rapport à une formation
a un formateur ou a une formation ( en effet dans la collection "survey",
on le champ "class_id"
"""
def Get_Survey_Stat_Data_For_Given_related_collection_id(diction):
try:
@ -2224,7 +2225,7 @@ def Get_Survey_Stat_Data_For_Given_related_collection_id(diction):
Verification des input acceptés
"""
field_list = ['token', 'periode_start_date', 'periode_end_date', 'related_collection', 'related_collection_id',
'question_id']
'question_id', 'class_id']
incom_keys = diction.keys()
for val in incom_keys:
@ -2300,6 +2301,314 @@ def Get_Survey_Stat_Data_For_Given_related_collection_id(diction):
return False, " Le paramétrage de la question ne permet pas de déterminer la nature statistiques des données "
"""
Si on a des class_id alors on cree les filtre
"""
filt_class_id = {}
if( "class_id" in diction.keys() and diction['class_id'] ):
filt_class_id = { 'class_id':str(diction['class_id'])}
"""
Si on a des related_collection, 'related_collection_id' on cree les filtre
"""
filt_related_collection_and_related_collection_id = {}
if ("related_collection" in diction.keys() and diction[
'related_collection'] and "related_collection_id" in diction.keys() and diction['related_collection_id']):
filt_related_collection_and_related_collection_id = {
'related_collection': str(diction['related_collection']),
'related_collection_id': str(diction['related_collection_id'])}
filt_periode_start_date = ""
if ("periode_start_date" in diction.keys() and diction['periode_start_date']):
filt_periode_start_date = str(diction['periode_start_date'])[0:10]
local_status = mycommon.CheckisDate(filt_periode_start_date)
if (local_status is False):
mycommon.myprint(str(
inspect.stack()[0][3]) + " Le filtre : 'date de debut' n'est pas au format jj/mm/aaaa.")
return False, " Le filtre : 'date de debut' n'est pas au format jj/mm/aaaa."
"""
Si on a des dates, 'date_from', 'date_to', verifier qu'elles sont valides
"""
filt_periode_start_date = ""
if ("periode_start_date" in diction.keys() and diction['periode_start_date']):
filt_periode_start_date = str(diction['periode_start_date'])[0:10]
local_status = mycommon.CheckisDate(filt_periode_start_date)
if (local_status is False):
mycommon.myprint(str(
inspect.stack()[0][3]) + " Le filtre : 'date de debut' n'est pas au format jj/mm/aaaa.")
return False, " Le filtre : 'date de debut' n'est pas au format jj/mm/aaaa."
filt_periode_end_date = ""
if ("periode_end_date" in diction.keys() and diction['periode_end_date']):
filt_periode_end_date = str(diction['periode_end_date'])[0:10]
local_status = mycommon.CheckisDate(filt_periode_end_date)
if (local_status is False):
mycommon.myprint(str(
inspect.stack()[0][3]) + " Le filtre : 'date de fin' n'est pas au format jj/mm/aaaa.")
return False, " Le filtre : 'date de fin' n'est pas au format jj/mm/aaaa."
if( filt_periode_start_date and filt_periode_end_date ):
filt_periode_start_date_ISODATE = datetime.strptime(str(filt_periode_start_date), '%d/%m/%Y')
filt_periode_end_date_ISODATE = datetime.strptime(str(filt_periode_end_date), '%d/%m/%Y')
else:
mycommon.myprint(str(
inspect.stack()[0][3]) + " Les filtred : les dates de debut et fin ne sont pas valides.")
return False, " Les filtred : les dates de debut et fin ne sont pas valides."
"""
Creation de la range des mois entre filt_periode_start_date_ISODATE et
filt_periode_end_date_ISODATE
"""
range_date_month = []
start = filt_periode_start_date_ISODATE
end = filt_periode_end_date_ISODATE
while start <= end:
node = {}
node['month_year'] = '{:02d}'.format(start.month) + "_" + str(start.year)
node['label'] = '{:02d}'.format(start.month) + "_" + str(start.year)
node['TotalAmount'] = 0
node['value'] = 0
node['count'] = 0
range_date_month.append(node)
start += relativedelta(months=1)
#print(" ### range_date_month = ", range_date_month)
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])},
{"valide": '1', 'locked':'0', 'statut':'2',
"date_reponse": {"$exists": True},
'user_response.question_id': str(diction['question_id']),
},
filt_related_collection_and_related_collection_id,
filt_class_id,
{
'mysy_survey_date_reponse': {'$gte': filt_periode_start_date_ISODATE,
'$lte': filt_periode_end_date_ISODATE}}, ]}
pipe_qry = ([
{"$addFields": {
"mysy_survey_date_reponse": {
'$dateFromString': {
'dateString': { '$substr': [ "$date_reponse", 0, 10 ] },
'format': "%Y-%m-%d"
}
}
}
},
{'$match': qery_match},
])
print(" ### Get_Survey_Stat_Data ici pipe_qry = ", pipe_qry)
RetObject = []
val_tmp = 0
raw_reponse_data = []
for retval in MYSY_GV.dbname['survey'].aggregate(pipe_qry):
for tmp in retval['user_response']:
if (tmp['question_id'] == str(diction['question_id'])):
node = {}
node['related_collection'] = str(diction['related_collection'])
node['related_collection_id'] = str(diction['related_collection_id'])
node['question_id'] = str(diction['question_id'])
node['question_question'] = str(is_question_data['question'])
node['question_type'] = str(is_question_data['type'])
node['question_response'] = str(tmp['response'])
node['question_date_reponse'] = str(retval['date_reponse'])
node['question_date_reponse_mois_annee'] = str(retval['date_reponse'])[5:7]+"_"+str(retval['date_reponse'])[0:4]
raw_reponse_data.append(node)
print(" ### les reponses brutes sont : ", raw_reponse_data)
global_nb_question_response = 0
global_somme_question_response = 0
global_moyenne_question_response = 0
for tmp in range_date_month:
nb_question_response = 0
somme_question_response = 0
for local_data in raw_reponse_data:
if (str(local_data['question_date_reponse_mois_annee']) == str(tmp['month_year'])):
nb_question_response = nb_question_response + 1
if( question_analyse_type == "somme" and str(local_data['question_response']).lower().strip() == "oui"):
somme_question_response = somme_question_response + 1
elif(question_analyse_type == "moyenne" ):
somme_question_response = somme_question_response + mycommon.tryFloat(str(local_data['question_response']).lower().strip())
if( nb_question_response > 0 ):
"""print(" ## calcul moyenne : ")
print(" ## calcul somme_question_response : ", somme_question_response)
print(" ## calcul nb_question_response : ", nb_question_response)
"""
moyenne_question_response = round(somme_question_response / nb_question_response, 2)
tmp['nb_question_response'] = mycommon.tryFloat(str(nb_question_response))
tmp['somme_question_response'] = mycommon.tryFloat(str(somme_question_response))
tmp['moyenne_question_response'] = mycommon.tryFloat(str(moyenne_question_response))
tmp['count'] = mycommon.tryFloat(str(nb_question_response))
if( str(question_analyse_type) == "moyenne"):
tmp['value'] = mycommon.tryFloat(str(moyenne_question_response))
tmp['type_question'] = "moyenne"
elif( str(question_analyse_type) == "somme"):
tmp['value'] = mycommon.tryFloat(str(somme_question_response))
tmp['type_question'] = "somme"
else:
tmp['nb_question_response'] = 0
tmp['somme_question_response'] = 0
tmp['moyenne_question_response'] = 0
tmp['count'] = 0
tmp['value'] = 0
global_nb_question_response = global_nb_question_response + mycommon.tryFloat(str( tmp['nb_question_response']))
global_somme_question_response = global_somme_question_response + mycommon.tryFloat(str( tmp['somme_question_response']))
if( global_nb_question_response > 0 ):
global_moyenne_question_response = round(global_somme_question_response / global_nb_question_response, 2)
node = {}
node['global_nb_question_response'] = global_nb_question_response
node['global_somme_question_response'] = global_somme_question_response
node['global_moyenne_question_response'] = global_moyenne_question_response
final_data = {}
final_data['data'] = range_date_month
final_data['global'] = node
print(" ### les reponses final_data: ", final_data)
RetObject = []
RetObject.append(mycommon.JSONEncoder().encode(final_data))
return True, RetObject
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
return False, " Impossible de récupérer les données statistiques "
"""
Export Excel des resultat du Get_Survey_Stat_Data_For_Given_related_collection_id
"""
def Export_Excel_Survey_Stat_Data_For_Given_related_collection_id(diction):
try:
diction = mycommon.strip_dictionary(diction)
"""
Verification des input acceptés
"""
field_list = ['token', 'periode_start_date', 'periode_end_date', 'related_collection', 'related_collection_id',
'question_id', 'class_id']
incom_keys = diction.keys()
for val in incom_keys:
if val not in field_list and val.startswith('my_') is False:
mycommon.myprint(str(
inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas autorisé")
return False, " Les informations fournies sont incorrectes"
"""
Verification des champs obligatoires
"""
field_list_obligatoire = ['token', 'related_collection', 'related_collection_id', 'question_id',
'periode_start_date', 'periode_end_date', 'class_id']
for val in field_list_obligatoire:
if val not in diction:
mycommon.myprint(
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
return False, " Les informations fournies sont incorrectes"
"""
Verification de l'identité et autorisation de l'entité qui
appelle cette API
"""
token = ""
if ("token" in diction.keys()):
if diction['token']:
token = diction['token']
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
if (local_status is not True):
return local_status, my_partner
"""
Recuperer les données de la quesion
"""
is_question_formulaire_exist = MYSY_GV.dbname['formulaire'].count_documents(
{'partner_owner_recid': str(my_partner['recid']),
'valide': '1', 'locked': '0',
'list_questions._id': str(diction['question_id'])})
if (is_question_formulaire_exist != 1):
mycommon.myprint(" L'identifiant de la question est invalide ")
return False, " L'identifiant de la question est invalide "
is_question_formulaire = MYSY_GV.dbname['formulaire'].find_one(
{'partner_owner_recid': str(my_partner['recid']),
'valide': '1', 'locked': '0',
'list_questions._id': str(diction['question_id'])})
is_question_data = None
for tmp in is_question_formulaire['list_questions']:
if( tmp['_id'] == str(diction['question_id'])):
is_question_data = tmp
if( is_question_data is None ):
mycommon.myprint(" L'identifiant de la question est invalide (2) ")
return False, " L'identifiant de la question est invalide (2) "
if( "is_statistic" not in is_question_data.keys() or str(is_question_data['is_statistic']) != "1"):
mycommon.myprint(" La question n'est pas soumises à une analyse statistique ")
return False, " La question n'est pas soumises à une analyse statistique "
question_analyse_type = ""
if( is_question_data['type'] in ['note', 'entier']):
question_analyse_type = "moyenne"
elif (is_question_data['type'] in ['bool']):
question_analyse_type = "somme"
else :
mycommon.myprint(" Le paramétrage de la question ne permet pas de déterminer la nature statistiques des données ")
return False, " Le paramétrage de la question ne permet pas de déterminer la nature statistiques des données "
"""
Si on a des class_id alors on cree les filtre
"""
filt_class_id = {}
if ("class_id" in diction.keys() and diction['class_id']):
filt_class_id = {'class_id': str(diction['class_id'])}
"""
Si on a des related_collection, 'related_collection_id' on cree les filtre
"""
filt_related_collection_and_related_collection_id = {}
if ("related_collection" in diction.keys() and diction[
'related_collection'] and "related_collection_id" in diction.keys() and diction['related_collection_id']):
filt_related_collection_and_related_collection_id = {
'related_collection': str(diction['related_collection']),
'related_collection_id': str(diction['related_collection_id'])}
"""
Si on a des dates, 'date_from', 'date_to', verifier qu'elles sont valides
"""
@ -2353,9 +2662,10 @@ def Get_Survey_Stat_Data_For_Given_related_collection_id(diction):
{"valide": '1', 'locked':'0', 'statut':'2',
"date_reponse": {"$exists": True},
'user_response.question_id': str(diction['question_id']),
'related_collection':str(diction['related_collection']),
'related_collection_id': str(diction['related_collection_id']),
},
filt_class_id,
filt_related_collection_and_related_collection_id,
{
'mysy_survey_date_reponse': {'$gte': filt_periode_start_date_ISODATE,
@ -2397,7 +2707,11 @@ def Get_Survey_Stat_Data_For_Given_related_collection_id(diction):
raw_reponse_data.append(node)
#print(" ### les reponses brutes sont : ", raw_reponse_data)
print(" ### les reponses brutes sont : ", raw_reponse_data)
global_nb_question_response = 0
global_somme_question_response = 0
global_moyenne_question_response = 0
for tmp in range_date_month:
nb_question_response = 0
@ -2406,10 +2720,19 @@ def Get_Survey_Stat_Data_For_Given_related_collection_id(diction):
for local_data in raw_reponse_data:
if (str(local_data['question_date_reponse_mois_annee']) == str(tmp['month_year'])):
nb_question_response = nb_question_response + 1
if( str(local_data['question_response']).lower().strip() == "oui"):
if( question_analyse_type == "somme" and str(local_data['question_response']).lower().strip() == "oui"):
somme_question_response = somme_question_response + 1
elif(question_analyse_type == "moyenne" ):
somme_question_response = somme_question_response + mycommon.tryFloat(str(local_data['question_response']).lower().strip())
if( nb_question_response > 0 ):
"""print(" ## calcul moyenne : ")
print(" ## calcul somme_question_response : ", somme_question_response)
print(" ## calcul nb_question_response : ", nb_question_response)
"""
moyenne_question_response = round(somme_question_response / nb_question_response, 2)
tmp['nb_question_response'] = mycommon.tryFloat(str(nb_question_response))
@ -2432,14 +2755,115 @@ def Get_Survey_Stat_Data_For_Given_related_collection_id(diction):
tmp['count'] = 0
tmp['value'] = 0
#print(" ### les reponses range_date_month: ", range_date_month)
RetObject = []
RetObject.append(mycommon.JSONEncoder().encode(range_date_month))
return True, RetObject
global_nb_question_response = global_nb_question_response + mycommon.tryFloat(str( tmp['nb_question_response']))
global_somme_question_response = global_somme_question_response + mycommon.tryFloat(str( tmp['somme_question_response']))
if( global_nb_question_response > 0 ):
global_moyenne_question_response = round(global_somme_question_response / global_nb_question_response, 2)
node = {}
node['global_nb_question_response'] = global_nb_question_response
node['global_somme_question_response'] = global_somme_question_response
node['global_moyenne_question_response'] = global_moyenne_question_response
final_data = {}
final_data['data'] = range_date_month
final_data['global'] = node
print(" ### les reponses final_data: ", final_data)
"""
Preparation des exports Excels à faire
"""
todays_date = str(date.today().strftime("%d/%m/%Y"))
ts = datetime.now().timestamp()
ts = str(ts).replace(".", "").replace(",", "")[-5:]
orig_file_name = "Export_Excel_Report_" + str(my_partner['recid']) + "_" + str(ts) + ".xlsx"
outputFilename = str(MYSY_GV.TEMPORARY_DIRECTORY) + "/" + str(orig_file_name)
tab_exported_fields = [ 'date_export', 'filtre_date_debut', 'filtre_date_fin', 'question', 'type_question', 'periode', 'nb_question_response',
'somme_question_response', 'moyenne_question_response']
# Create a workbook and add a worksheet.
workbook = xlsxwriter.Workbook(outputFilename)
worksheet = workbook.add_worksheet()
row = 0
column = 0
# Ecrire l'entete
for header_item in tab_exported_fields:
worksheet.write(row, column, header_item)
column += 1
# Ecrire les lignes du fichier excel
for val_tmp in range_date_month:
column = 0
row = row + 1
worksheet.write(row, column, str(todays_date))
column += 1
worksheet.write(row, column, str(diction['periode_start_date']))
column += 1
worksheet.write(row, column, str(diction['periode_end_date']))
column += 1
worksheet.write(row, column, str(is_question_data['question']))
column += 1
worksheet.write(row, column, str(is_question_data['type']))
column += 1
if( "month_year" in val_tmp.keys() ):
worksheet.write(row, column, str(val_tmp['month_year']))
column += 1
else:
worksheet.write(row, column, "")
column += 1
if ("nb_question_response" in val_tmp.keys()):
worksheet.write(row, column, str(val_tmp['nb_question_response']))
column += 1
else:
worksheet.write(row, column, "")
column += 1
if ("somme_question_response" in val_tmp.keys()):
worksheet.write(row, column, str(val_tmp['somme_question_response']))
column += 1
else:
worksheet.write(row, column, "")
column += 1
if ("moyenne_question_response" in val_tmp.keys()):
worksheet.write(row, column, str(val_tmp['moyenne_question_response']))
column += 1
else:
worksheet.write(row, column, "")
column += 1
workbook.close()
if os.path.exists(outputFilename):
# print(" ### ok os.path.exists(outputFilename) "+str(outputFilename))
return True, send_file(outputFilename, as_attachment=True)
return False, " Impossible de générer l'export csv (2) "
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
return False, " Impossible de récupérer les données statistiques "
return False, " Impossible de générer l'export csv "
"""
@ -2454,7 +2878,7 @@ def Get_Entity_Question_List_With_Stat_For_Given_related_collection_id(diction):
"""
Verification des input acceptés
"""
field_list = ['token', 'related_collection', 'related_collection_id', ]
field_list = ['token', 'related_collection', 'related_collection_id', 'class_id']
incom_keys = diction.keys()
for val in incom_keys:
@ -2466,7 +2890,7 @@ def Get_Entity_Question_List_With_Stat_For_Given_related_collection_id(diction):
"""
Verification des champs obligatoires
"""
field_list_obligatoire = ['token', 'related_collection', 'related_collection_id', ]
field_list_obligatoire = ['token', 'related_collection', 'related_collection_id', 'class_id']
for val in field_list_obligatoire:
if val not in diction:
@ -2487,15 +2911,33 @@ def Get_Entity_Question_List_With_Stat_For_Given_related_collection_id(diction):
if (local_status is not True):
return local_status, my_partner
"""
Si on a des class_id alors on cree les filtre
/!\ : quand utilise le filtre "class_id", on verrouille "related_collection" sur la valeur : "inscription"
"""
filt_class_id = {}
if ("class_id" in diction.keys() and diction['class_id']):
filt_class_id = {'class_id': str(diction['class_id']),
'related_collection':'inscription'}
"""
Si on a des related_collection, 'related_collection_id' on cree les filtre
"""
filt_related_collection_and_related_collection_id = {}
if ("related_collection" in diction.keys() and diction[
'related_collection'] and "related_collection_id" in diction.keys() and diction['related_collection_id']):
filt_related_collection_and_related_collection_id = {
'related_collection': str(diction['related_collection']),
'related_collection_id': str(diction['related_collection_id'])}
#print(" ### range_date_month = ", range_date_month)
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])},
{"valide": '1', 'locked':'0', 'statut':'2',
"date_reponse": {"$exists": True},
'user_response.is_statistic': "1",
'related_collection':str(diction['related_collection']),
'related_collection_id': str(diction['related_collection_id']),
},
filt_class_id,
filt_related_collection_and_related_collection_id,
]}
pipe_qry = ([
@ -2503,7 +2945,7 @@ def Get_Entity_Question_List_With_Stat_For_Given_related_collection_id(diction):
])
#print(" ### Get_Survey_Stat_Data ici pipe_qry = ", pipe_qry)
print(" ### Get_Survey_Stat_Data ici pipe_qry = ", pipe_qry)
RetObject = []
val_tmp = 0
@ -2515,7 +2957,7 @@ def Get_Entity_Question_List_With_Stat_For_Given_related_collection_id(diction):
for tmp in retval['user_response']:
if (tmp['is_statistic'] == "1" and str(tmp['question_id']) not in raw_question_id):
raw_question_id.append(str(tmp['question_id']))
print(" ## tmp = ", tmp)
node = {}
node['id'] = str(cpt)
cpt = cpt + 1
@ -2524,6 +2966,7 @@ def Get_Entity_Question_List_With_Stat_For_Given_related_collection_id(diction):
node['related_collection_id'] = str(diction['related_collection_id'])
node['question_id'] = str(tmp['question_id'])
node['question_question'] = str(tmp['question'])
node['class_id'] = str(retval['class_id'])
if (str(tmp['type']) in ['note', 'entier']):
node['question_type'] = "Moyenne"