parent
aa7b1f1f85
commit
d18826d40a
|
@ -4,24 +4,27 @@
|
|||
<option name="autoReloadType" value="SELECTIVE" />
|
||||
</component>
|
||||
<component name="ChangeListManager">
|
||||
<list default="true" id="c6d0259a-16e1-410d-91a1-830590ee2a08" name="Changes" comment="12/07/2025 - 12h">
|
||||
<list default="true" id="c6d0259a-16e1-410d-91a1-830590ee2a08" name="Changes" comment="qsd">
|
||||
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Dashbord_queries/factures_tbd_qries.py" beforeDir="false" afterPath="$PROJECT_DIR$/Dashbord_queries/factures_tbd_qries.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Dashbord_queries/formation_tbd_qries.py" beforeDir="false" afterPath="$PROJECT_DIR$/Dashbord_queries/formation_tbd_qries.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Dashbord_queries/inscription_tdb_qries.py" beforeDir="false" afterPath="$PROJECT_DIR$/Dashbord_queries/inscription_tdb_qries.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/GlobalVariable.py" beforeDir="false" afterPath="$PROJECT_DIR$/GlobalVariable.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Inscription_mgt.py" beforeDir="false" afterPath="$PROJECT_DIR$/Inscription_mgt.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Job_Cron.py" beforeDir="false" afterPath="$PROJECT_DIR$/Job_Cron.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Job_Cron_Common.py" beforeDir="false" afterPath="$PROJECT_DIR$/Job_Cron_Common.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Log/log_file.log" beforeDir="false" afterPath="$PROJECT_DIR$/Log/log_file.log" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Session_Formation.py" beforeDir="false" afterPath="$PROJECT_DIR$/Session_Formation.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Session_Formation_Sequence.py" beforeDir="false" afterPath="$PROJECT_DIR$/Session_Formation_Sequence.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/apprenant_mgt.py" beforeDir="false" afterPath="$PROJECT_DIR$/apprenant_mgt.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/class_mgt.py" beforeDir="false" afterPath="$PROJECT_DIR$/class_mgt.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/data_indexees.csv" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/ela_output_test_file_pandas_2.txt" beforeDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/email_mgt.py" beforeDir="false" afterPath="$PROJECT_DIR$/email_mgt.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/email_queu/email_queu_mgt.py" beforeDir="false" afterPath="$PROJECT_DIR$/email_queu/email_queu_mgt.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/emargement.py" beforeDir="false" afterPath="$PROJECT_DIR$/emargement.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/groupe_inscrit_mgt.py" beforeDir="false" afterPath="$PROJECT_DIR$/groupe_inscrit_mgt.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/main.py" beforeDir="false" afterPath="$PROJECT_DIR$/main.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/partner_client.py" beforeDir="false" afterPath="$PROJECT_DIR$/partner_client.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/module_editique.py" beforeDir="false" afterPath="$PROJECT_DIR$/module_editique.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/partner_invoice.py" beforeDir="false" afterPath="$PROJECT_DIR$/partner_invoice.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/prj_common.py" beforeDir="false" afterPath="$PROJECT_DIR$/prj_common.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/ressources_humaines.py" beforeDir="false" afterPath="$PROJECT_DIR$/ressources_humaines.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/survey_mgt.py" beforeDir="false" afterPath="$PROJECT_DIR$/survey_mgt.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/tools_cherif/tools_cherif.py" beforeDir="false" afterPath="$PROJECT_DIR$/tools_cherif/tools_cherif.py" afterDir="false" />
|
||||
</list>
|
||||
<option name="SHOW_DIALOG" value="false" />
|
||||
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
||||
|
@ -464,7 +467,7 @@
|
|||
<option name="project" value="LOCAL" />
|
||||
<updated>1747251650255</updated>
|
||||
</task>
|
||||
<option name="localTasksCounter" value="490" />
|
||||
<option name="localTasksCounter" value="491" />
|
||||
<servers />
|
||||
</component>
|
||||
<component name="Vcs.Log.Tabs.Properties">
|
||||
|
@ -524,13 +527,13 @@
|
|||
<MESSAGE value="20/06/2025 -18h" />
|
||||
<MESSAGE value="ssqsdqsd" />
|
||||
<MESSAGE value="25/06/2025 - 18h" />
|
||||
<MESSAGE value="qsd" />
|
||||
<MESSAGE value="qsdssdd" />
|
||||
<MESSAGE value="ss" />
|
||||
<MESSAGE value="ssss" />
|
||||
<MESSAGE value="03/07/2025 - 19h30" />
|
||||
<MESSAGE value="06/07/2025 - 12h" />
|
||||
<MESSAGE value="12/07/2025 - 12h" />
|
||||
<option name="LAST_COMMIT_MESSAGE" value="12/07/2025 - 12h" />
|
||||
<MESSAGE value="qsd" />
|
||||
<option name="LAST_COMMIT_MESSAGE" value="qsd" />
|
||||
</component>
|
||||
</project>
|
|
@ -2415,6 +2415,258 @@ def Get_Qery_List_Facture_Previsionnelle_Data_By_Periode(diction):
|
|||
return False, " Impossible de récupérer les données "
|
||||
|
||||
|
||||
"""
|
||||
Recuperation du chiffre d'affaire generé par une liste de formation par periode
|
||||
analyse des lignes de factures
|
||||
"""
|
||||
def Get_Qery_List_Factures_For_List_Class(diction):
|
||||
try:
|
||||
diction = mycommon.strip_dictionary(diction)
|
||||
|
||||
"""
|
||||
Verification des input acceptés
|
||||
"""
|
||||
field_list = ['token', 'periode_start_date', 'periode_end_date', 'filter_value',
|
||||
'tab_formation_ids', 'from_beginning']
|
||||
|
||||
incom_keys = diction.keys()
|
||||
for val in incom_keys:
|
||||
if val not in field_list and val.startswith('my_') is False:
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
||||
return False, " Les informations fournies sont incorrectes",
|
||||
|
||||
"""
|
||||
Verification des champs obligatoires
|
||||
"""
|
||||
field_list_obligatoire = ['token', ]
|
||||
for val in field_list_obligatoire:
|
||||
if val not in diction:
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||||
return False, " Les informations fournies sont incorrectes",
|
||||
|
||||
"""
|
||||
Verification de l'identité et autorisation de l'entité qui
|
||||
appelle cette API
|
||||
"""
|
||||
token = ""
|
||||
if ("token" in diction.keys()):
|
||||
if diction['token']:
|
||||
token = diction['token']
|
||||
|
||||
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
||||
if (local_status is not True):
|
||||
return local_status, my_partner
|
||||
|
||||
tab_formation_ids_split = []
|
||||
tab_formation_ids = []
|
||||
tab_formation_internal_url = []
|
||||
if ("tab_formation_ids" in diction.keys()):
|
||||
tab_formation_ids_split = str(diction['tab_formation_ids']).split(",")
|
||||
|
||||
for tmp in tab_formation_ids_split:
|
||||
if (tmp):
|
||||
tab_formation_ids.append((str(tmp)))
|
||||
local_class_data = MYSY_GV.dbname['myclass'].find_one({'_id':ObjectId(str(tmp)),
|
||||
'partner_owner_recid':str(my_partner['recid'])}, {'internal_url':1})
|
||||
if( local_class_data and "internal_url" in local_class_data.keys() and local_class_data['internal_url']):
|
||||
tab_formation_internal_url.append(str(local_class_data['internal_url']))
|
||||
|
||||
|
||||
filt_periode_start_date = ""
|
||||
if ("periode_start_date" in diction.keys() and diction['periode_start_date']):
|
||||
filt_periode_start_date = str(diction['periode_start_date'])[0:10]
|
||||
local_status = mycommon.CheckisDate(filt_periode_start_date)
|
||||
if (local_status is False):
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][3]) + " Le filtre : 'date de debut' n'est pas au format jj/mm/aaaa.")
|
||||
return False, " Le filtre : 'date de debut' n'est pas au format jj/mm/aaaa."
|
||||
|
||||
filt_periode_end_date = ""
|
||||
if ("periode_end_date" in diction.keys() and diction['periode_end_date']):
|
||||
filt_periode_end_date = str(diction['periode_end_date'])[0:10]
|
||||
local_status = mycommon.CheckisDate(filt_periode_end_date)
|
||||
if (local_status is False):
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][3]) + " Le filtre : 'date de fin' n'est pas au format jj/mm/aaaa.")
|
||||
return False, " Le filtre : 'date de fin' n'est pas au format jj/mm/aaaa."
|
||||
|
||||
|
||||
|
||||
"""
|
||||
Si la valeur de 'filter_value' est m0 ou m1, on va aller recuperer les date du mois correspondant.
|
||||
On ecrase les valeur de filt_session_start_date et filt_session_end_date
|
||||
"""
|
||||
if ('filter_value' in diction.keys()):
|
||||
# print(" filter_value = ", diction['filter_value'])
|
||||
if (str(diction['filter_value']) == "m0"):
|
||||
# On recupere les date du mois en cours
|
||||
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Current_Month_Start_End_Date()
|
||||
if (local_status is False):
|
||||
return local_status, start_current_month_date
|
||||
|
||||
filt_periode_start_date = start_current_month_date
|
||||
filt_periode_end_date = end_current_month_date
|
||||
|
||||
# print(" ### filt_session_start_date = ", filt_session_start_date, " ### filt_session_end_date = ", filt_session_end_date)
|
||||
|
||||
elif (str(diction['filter_value']) == "m1"):
|
||||
# On recupere les date du mois en cours
|
||||
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Previous_Month_Start_End_Date()
|
||||
if (local_status is False):
|
||||
return local_status, start_current_month_date
|
||||
|
||||
filt_periode_start_date = start_current_month_date
|
||||
filt_periode_end_date = end_current_month_date
|
||||
|
||||
|
||||
|
||||
filt_periode_start_date_ISODATE = datetime.strptime(str(filt_periode_start_date), '%d/%m/%Y')
|
||||
filt_periode_end_date_ISODATE = datetime.strptime(str(filt_periode_end_date), '%d/%m/%Y')
|
||||
|
||||
|
||||
if ("from_beginning" in diction.keys() and diction['from_beginning'] == "1"):
|
||||
filt_mysy_invoice_date = {}
|
||||
else:
|
||||
filt_mysy_invoice_date = {
|
||||
'mysy_invoice_date': {'$gte': filt_periode_start_date_ISODATE,
|
||||
'$lte': filt_periode_end_date_ISODATE}}
|
||||
|
||||
|
||||
filt_list_class_internal_url = {}
|
||||
filt_list_class_id = {}
|
||||
if (len(tab_formation_ids) > 0):
|
||||
filt_list_class_id = {'$in': ["$class_id", tab_formation_ids]}
|
||||
filt_list_class_internal_url = {'$in': ["$order_line_formation", tab_formation_internal_url]}
|
||||
|
||||
|
||||
|
||||
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])},
|
||||
{"valide": '1'},
|
||||
filt_mysy_invoice_date, ]}
|
||||
|
||||
pipe_qry = ([
|
||||
{"$addFields": {
|
||||
"mysy_invoice_date": {
|
||||
'$dateFromString': {
|
||||
'dateString': '$invoice_date',
|
||||
'format': "%d/%m/%Y"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
{'$match': qery_match},
|
||||
{'$lookup': {
|
||||
'from': 'partner_invoice_line',
|
||||
"let": {"invoice_header_id": {'$toString': "$_id"},
|
||||
'partner_invoice_line_partner_owner_recid': '$partner_owner_recid',
|
||||
'partner_invoice_line_invoice_header_ref_interne': '$invoice_header_ref_interne'},
|
||||
'pipeline': [
|
||||
{'$match':
|
||||
{'$expr':
|
||||
{'$and':
|
||||
[
|
||||
{'$eq': ["$valide", "1"]},
|
||||
filt_list_class_internal_url,
|
||||
{'$eq': ["$invoice_header_ref_interne",'$$partner_invoice_line_invoice_header_ref_interne']},
|
||||
{'$eq': ["$partner_owner_recid", '$$partner_invoice_line_partner_owner_recid']},
|
||||
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
],
|
||||
'as': 'collection_partner_invoice_line'
|
||||
}
|
||||
},
|
||||
{
|
||||
'$unwind': '$collection_partner_invoice_line'
|
||||
},
|
||||
{'$group': {
|
||||
'_id': {
|
||||
"class_internal_url": "$collection_partner_invoice_line.order_line_formation",
|
||||
},
|
||||
"TotalAmount_HT": {
|
||||
"$sum": {'$toDouble': '$collection_partner_invoice_line.order_line_montant_hors_taxes'}},
|
||||
"count": {"$sum": 1}
|
||||
}
|
||||
},
|
||||
{
|
||||
'$sort': {'count': -1}
|
||||
},
|
||||
|
||||
])
|
||||
|
||||
print(" ### Get_Qery_List_Factures_Data_By_Class ici pipe_qry = ", pipe_qry)
|
||||
|
||||
RetObject = []
|
||||
val_tmp = 0
|
||||
|
||||
axis_data = []
|
||||
series_TotalAmount_data = []
|
||||
|
||||
tab_data = []
|
||||
tab_axis_data_class_code = []
|
||||
tab_axis_data_class_title = []
|
||||
"""
|
||||
On recupere les données, on les format dans le 'range_date_month' et on retourne"""
|
||||
for retval in MYSY_GV.dbname['partner_invoice_header'].aggregate(pipe_qry):
|
||||
val_tmp = val_tmp + 1
|
||||
|
||||
node = {}
|
||||
node['class_internal_url'] = retval['_id']['class_internal_url']
|
||||
node['TotalAmount_HT'] = retval['TotalAmount_HT']
|
||||
node['label'] = retval['TotalAmount_HT']
|
||||
node['nb_line'] = retval['count']
|
||||
|
||||
# Recuperation des données de la formation
|
||||
|
||||
class_data = MYSY_GV.dbname['myclass'].find_one({'internal_url':str(retval['_id']['class_internal_url']),
|
||||
'partner_owner_recid':str(my_partner['recid']),
|
||||
'valide':'1'})
|
||||
|
||||
|
||||
if( class_data and "title" in class_data.keys() ):
|
||||
node['class_title'] = class_data['title']
|
||||
tab_axis_data_class_title.append(class_data['title'])
|
||||
else:
|
||||
node['class_title'] = ""
|
||||
tab_axis_data_class_title.append("")
|
||||
|
||||
if (class_data and "external_code" in class_data.keys()):
|
||||
node['class_external_code'] = class_data['external_code']
|
||||
tab_axis_data_class_code.append(class_data['external_code'])
|
||||
else:
|
||||
node['class_external_code'] = ""
|
||||
tab_axis_data_class_code.append("")
|
||||
|
||||
tab_data.append(node)
|
||||
|
||||
|
||||
|
||||
|
||||
json_retval = {}
|
||||
json_retval['data'] = tab_data
|
||||
json_retval['axis_class_code'] = tab_axis_data_class_code
|
||||
json_retval['axis_class_title'] = tab_axis_data_class_title
|
||||
|
||||
print(" ### Get_Qery_List_Factures_Data_By_Class json_retval = ", json_retval)
|
||||
RetObject.append(mycommon.JSONEncoder().encode(json_retval))
|
||||
|
||||
return True, RetObject
|
||||
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible de récupérer les données "
|
||||
|
||||
|
||||
|
||||
|
||||
"""
|
||||
Voici la requete pour le chiffre d'affaire groupé par client et par mois et année
|
||||
On obtient un resultat comme ca :
|
||||
|
|
|
@ -126,7 +126,7 @@ def Get_Qery_Formation_By_Session_By_Periode(diction):
|
|||
filt_session_start_date_ISODATE_work += relativedelta(months=1)
|
||||
|
||||
|
||||
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])}, {"valide": '1'},
|
||||
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])}, {"valide": '1', 'session_status':'1'},
|
||||
{"formateur_id": {"$exists": 'true'}}, {
|
||||
'mysy_date_debut_session': {'$gte': filt_session_start_date_ISODATE,
|
||||
'$lte': filt_session_end_date_ISODATE}}, ]}
|
||||
|
@ -356,7 +356,7 @@ def Get_Qery_Formation_By_Session_By_Periode_Export_CSV(diction):
|
|||
filt_session_start_date_ISODATE_work += relativedelta(months=1)
|
||||
|
||||
|
||||
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])}, {"valide": '1'},
|
||||
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])}, {"valide": '1', 'session_status':'1'},
|
||||
{"formateur_id": {"$exists": 'true'}}, {
|
||||
'mysy_date_debut_session': {'$gte': filt_session_start_date_ISODATE,
|
||||
'$lte': filt_session_end_date_ISODATE}}, ]}
|
||||
|
@ -640,7 +640,7 @@ def Get_Qery_Session_By_Periode_V2(diction):
|
|||
filt_session_start_date_ISODATE_work = filt_session_start_date_ISODATE
|
||||
filt_session_end_date_ISODATE_work = filt_session_end_date_ISODATE
|
||||
|
||||
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])}, {"valide": '1'},
|
||||
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])}, {"valide": '1', 'session_status':'1'},
|
||||
|
||||
{'mysy_session_date_debut': {'$gte': filt_session_start_date_ISODATE,
|
||||
'$lte': filt_session_end_date_ISODATE}},
|
||||
|
@ -837,7 +837,7 @@ def Get_Qery_Session_By_Formation_V2(diction):
|
|||
filt_session_start_date_ISODATE_work = filt_session_start_date_ISODATE
|
||||
filt_session_end_date_ISODATE_work = filt_session_end_date_ISODATE
|
||||
|
||||
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])}, {"valide": '1'},
|
||||
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])}, {"valide": '1', 'session_status':'1'},
|
||||
|
||||
{'mysy_session_date_debut': {'$gte': filt_session_start_date_ISODATE,
|
||||
'$lte': filt_session_end_date_ISODATE}},
|
||||
|
@ -1039,7 +1039,7 @@ def Get_Qery_Session_By_Periode_Cumule_V2(diction):
|
|||
filt_session_start_date_ISODATE_work = filt_session_start_date_ISODATE
|
||||
filt_session_end_date_ISODATE_work = filt_session_end_date_ISODATE
|
||||
|
||||
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])}, {"valide": '1'},
|
||||
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])}, {"valide": '1', 'session_status':'1'},
|
||||
|
||||
{'mysy_session_date_debut': {'$gte': filt_session_start_date_ISODATE,
|
||||
'$lte': filt_session_end_date_ISODATE}},
|
||||
|
@ -1126,3 +1126,312 @@ def Get_Qery_Session_By_Periode_Cumule_V2(diction):
|
|||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible de récupérer les données "
|
||||
|
||||
|
||||
"""
|
||||
Indicateur Qualiopi : Recuperation du nombre d'heure de formation dispensé sur une période
|
||||
"""
|
||||
def Get_Qery_Session_For_List_Class(diction):
|
||||
try:
|
||||
diction = mycommon.strip_dictionary(diction)
|
||||
|
||||
"""
|
||||
Verification des input acceptés
|
||||
"""
|
||||
field_list = ['token', 'session_start_date', 'session_end_date', 'filter_value',
|
||||
'tab_formation_ids', 'from_beginning']
|
||||
|
||||
incom_keys = diction.keys()
|
||||
for val in incom_keys:
|
||||
if val not in field_list and val.startswith('my_') is False:
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
||||
return False, " Les informations fournies sont incorrectes",
|
||||
|
||||
"""
|
||||
Verification des champs obligatoires
|
||||
"""
|
||||
field_list_obligatoire = ['token', 'session_start_date', 'session_end_date']
|
||||
for val in field_list_obligatoire:
|
||||
if val not in diction:
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||||
return False, " Les informations fournies sont incorrectes",
|
||||
|
||||
"""
|
||||
Verification de l'identité et autorisation de l'entité qui
|
||||
appelle cette API
|
||||
"""
|
||||
token = ""
|
||||
if ("token" in diction.keys()):
|
||||
if diction['token']:
|
||||
token = diction['token']
|
||||
|
||||
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
||||
if (local_status is not True):
|
||||
return local_status, my_partner
|
||||
|
||||
tab_formation_ids_split = []
|
||||
tab_formation_ids = []
|
||||
if ("tab_formation_ids" in diction.keys()):
|
||||
tab_formation_ids_split = str(diction['tab_formation_ids']).split(",")
|
||||
|
||||
for tmp in tab_formation_ids_split:
|
||||
if (tmp):
|
||||
tab_formation_ids.append((str(tmp)))
|
||||
|
||||
filt_session_start_date = ""
|
||||
if ("session_start_date" in diction.keys() and diction['session_start_date']):
|
||||
filt_session_start_date = str(diction['session_start_date'])[0:10]
|
||||
local_status = mycommon.CheckisDate(filt_session_start_date)
|
||||
if (local_status is False):
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][3]) + " Le filtre : 'date de debut de session' n'est pas au format jj/mm/aaaa.")
|
||||
return False, " Le filtre : 'date de debut de session' n'est pas au format jj/mm/aaaa."
|
||||
|
||||
filt_session_end_date = ""
|
||||
if ("session_end_date" in diction.keys() and diction['session_end_date']):
|
||||
filt_session_end_date = str(diction['session_end_date'])[0:10]
|
||||
local_status = mycommon.CheckisDate(filt_session_end_date)
|
||||
if (local_status is False):
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][3]) + " Le filtre : 'date de fin de session' n'est pas au format jj/mm/aaaa.")
|
||||
return False, " Le filtre : 'date de fin de session' n'est pas au format jj/mm/aaaa."
|
||||
|
||||
"""
|
||||
Si la valeur de 'filter_value' est m0 ou m1, on va aller recuperer les date du mois correspondant.
|
||||
On ecrase les valeur de filt_session_start_date et filt_session_end_date
|
||||
"""
|
||||
if ('filter_value' in diction.keys()):
|
||||
if (str(diction['filter_value']) == "m0"):
|
||||
# On recupere les date du mois en cours
|
||||
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Current_Month_Start_End_Date()
|
||||
if (local_status is False):
|
||||
return local_status, start_current_month_date
|
||||
|
||||
filt_session_start_date = start_current_month_date
|
||||
filt_session_end_date = end_current_month_date
|
||||
|
||||
elif (str(diction['filter_value']) == "m1"):
|
||||
# On recupere les date du mois en cours
|
||||
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Previous_Month_Start_End_Date()
|
||||
if (local_status is False):
|
||||
return local_status, start_current_month_date
|
||||
|
||||
filt_session_start_date = start_current_month_date
|
||||
filt_session_end_date = end_current_month_date
|
||||
|
||||
RetObject = []
|
||||
val_tmp = 1
|
||||
|
||||
filt_session_start_date_ISODATE = datetime.strptime(str(filt_session_start_date), '%d/%m/%Y')
|
||||
filt_session_end_date_ISODATE = datetime.strptime(str(filt_session_end_date), '%d/%m/%Y')
|
||||
|
||||
todays_date = str(date.today().strftime("%Y-%m-%d"))
|
||||
filt_date_today_ISODATE = datetime.strptime(str(todays_date), '%Y-%m-%d')
|
||||
|
||||
"""
|
||||
Creation de la range des mois entre filt_periode_start_date_ISODATE et
|
||||
filt_periode_end_date_ISODATE
|
||||
"""
|
||||
range_date_month = []
|
||||
start = filt_session_start_date_ISODATE
|
||||
end = filt_session_end_date_ISODATE
|
||||
while start <= end:
|
||||
node = {}
|
||||
node['month_year'] = '{:02d}'.format(start.month) + "_" + str(start.year)
|
||||
node['label'] = '{:02d}'.format(start.month) + "_" + str(start.year)
|
||||
node['TotalAmount'] = 0
|
||||
node['value'] = 0
|
||||
node['count'] = 0
|
||||
|
||||
range_date_month.append(node)
|
||||
start += relativedelta(months=1)
|
||||
|
||||
|
||||
|
||||
filt_session_start_date_ISODATE_work = filt_session_start_date_ISODATE
|
||||
filt_session_end_date_ISODATE_work = filt_session_end_date_ISODATE
|
||||
|
||||
if ("from_beginning" in diction.keys() and diction['from_beginning'] == "1"):
|
||||
filt_mysy_session_date = {
|
||||
'mysy_session_end_date': {'$lte': filt_date_today_ISODATE, }
|
||||
}
|
||||
|
||||
else:
|
||||
filt_mysy_session_date = {
|
||||
'mysy_session_date_debut': {'$gte': filt_session_start_date_ISODATE},
|
||||
'mysy_session_end_date': {'$lte': filt_session_end_date_ISODATE,}
|
||||
}
|
||||
|
||||
|
||||
|
||||
filt_list_class_id_mode2 = {}
|
||||
if (len(tab_formation_ids) > 0):
|
||||
filt_list_class_id_mode2 = {'class_id':{'$in': tab_formation_ids}}
|
||||
|
||||
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])}, {"valide": '1', 'session_status':'1'},
|
||||
filt_mysy_session_date, filt_list_class_id_mode2
|
||||
]}
|
||||
|
||||
|
||||
#print(" ### qery_match = ", qery_match)
|
||||
|
||||
pipe_qry = ([
|
||||
{"$addFields": {
|
||||
"mysy_session_date_debut": {
|
||||
'$dateFromString': {
|
||||
'dateString': { "$substr": [ "$date_debut", 0, 10 ] },
|
||||
'format': "%d/%m/%Y"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
{"$addFields": {
|
||||
"mysy_session_end_date": {
|
||||
'$dateFromString': {
|
||||
'dateString': { "$substr": [ "$date_fin", 0, 10 ] },
|
||||
'format': "%d/%m/%Y"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
{'$match': qery_match},
|
||||
|
||||
{
|
||||
"$lookup": {
|
||||
'from': 'myclass',
|
||||
'localField': 'class_internal_url',
|
||||
'foreignField': 'internal_url',
|
||||
"pipeline": [{'$project': {'title': 1, 'internal_url': 1, 'external_code': 1, 'published': 1,
|
||||
'duration':1, 'duration_unit':1}}
|
||||
],
|
||||
"as": "myclass_collection"
|
||||
}
|
||||
},
|
||||
|
||||
{
|
||||
"$group": {
|
||||
"_id": {
|
||||
"class_id": "$myclass_collection._id",
|
||||
"class_code": "$myclass_collection.external_code",
|
||||
"class_title": "$myclass_collection.title",
|
||||
"class_duration": "$myclass_collection.duration",
|
||||
"class_duration_unit": "$myclass_collection.duration_unit",
|
||||
|
||||
},
|
||||
"count": {
|
||||
"$sum": 1
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
'$sort': {'count': -1}
|
||||
},
|
||||
|
||||
])
|
||||
|
||||
|
||||
print(" ### Get_Qery_Session_By_Formation_V2 ici pipe_qry = ", pipe_qry)
|
||||
|
||||
|
||||
duration_unit = ""
|
||||
volume_en_duration_unit = 0
|
||||
volume_ramene_en_heure = 0
|
||||
total_session_formation = 0
|
||||
total_nb_heure = 0
|
||||
|
||||
axis_data = []
|
||||
my_data = []
|
||||
cpt = 0
|
||||
for retval in MYSY_GV.dbname['session_formation'].aggregate(pipe_qry):
|
||||
if (len(retval['_id']['class_code']) > 0):
|
||||
cpt = cpt + 1
|
||||
axis_data.append(str(retval['_id']['class_code'][0]))
|
||||
node = {}
|
||||
node['class_code'] = str(retval['_id']['class_code'][0])
|
||||
node['class_title'] = str(retval['_id']['class_title'][0])
|
||||
node['class_duration'] = str(retval['_id']['class_duration'][0])
|
||||
node['class_duration_unit'] = str(retval['_id']['class_duration_unit'][0])
|
||||
|
||||
|
||||
node['label'] = str(retval['_id']['class_code'][0])
|
||||
node['value'] = mycommon.tryFloat(str(retval['count']))
|
||||
node['count'] = mycommon.tryFloat(str(retval['count']))
|
||||
|
||||
"""
|
||||
Recuperer la configuration entre les heure/jours/mois ...
|
||||
Dans la collection base_partner_setup : nb_heure_par_annee / nb_heure_par_mois / nb_heure_par_jour
|
||||
"""
|
||||
|
||||
|
||||
if (str(retval['_id']['class_duration_unit'][0]) == "heure"):
|
||||
volume_ramene_en_heure = mycommon.tryFloat(str(retval['_id']['class_duration'][0]))
|
||||
local_session_volume = mycommon.tryFloat(str(volume_ramene_en_heure)) * mycommon.tryFloat(str(retval['count']))
|
||||
total_session_formation = mycommon.tryFloat(str(total_session_formation)) + mycommon.tryFloat(str(local_session_volume))
|
||||
|
||||
|
||||
elif (str(retval['_id']['class_duration_unit'][0]) == "jour"):
|
||||
nb_hour_converted = mycommon.Get_Partner_Hour_Per_Day(str(my_partner['recid']))
|
||||
if (nb_hour_converted is False):
|
||||
nb_hour_converted = "7"
|
||||
|
||||
volume_ramene_en_heure = mycommon.tryFloat(str(retval['_id']['class_duration'][0])) * mycommon.tryFloat(str(nb_hour_converted))
|
||||
local_session_volume = mycommon.tryFloat(str(volume_ramene_en_heure)) * mycommon.tryFloat(str(retval['count']))
|
||||
total_session_formation = mycommon.tryFloat(str(total_session_formation)) + mycommon.tryFloat(str(local_session_volume))
|
||||
|
||||
|
||||
|
||||
elif (str(retval['_id']['class_duration_unit'][0]) == "semaine"):
|
||||
nb_hour_converted = mycommon.Get_Partner_Hour_Per_Week(str(my_partner['recid']))
|
||||
if (nb_hour_converted is False):
|
||||
nb_hour_converted = "35"
|
||||
volume_ramene_en_heure = mycommon.tryFloat(str(retval['_id']['class_duration'][0])) * mycommon.tryFloat(str(nb_hour_converted))
|
||||
local_session_volume = mycommon.tryFloat(str(volume_ramene_en_heure)) * mycommon.tryFloat(
|
||||
str(retval['count']))
|
||||
total_session_formation = mycommon.tryFloat(str(total_session_formation)) + mycommon.tryFloat(
|
||||
str(local_session_volume))
|
||||
|
||||
|
||||
elif (str(retval['_id']['class_duration_unit'][0]) == "mois"):
|
||||
nb_hour_converted = mycommon.Get_Partner_Hour_Per_Month(str(my_partner['recid']))
|
||||
if (nb_hour_converted is False):
|
||||
nb_hour_converted = "152"
|
||||
volume_ramene_en_heure = mycommon.tryFloat( str(retval['_id']['class_duration'][0])) * mycommon.tryFloat(str(nb_hour_converted))
|
||||
local_session_volume = mycommon.tryFloat(str(volume_ramene_en_heure)) * mycommon.tryFloat( str(retval['count']))
|
||||
total_session_formation = mycommon.tryFloat(str(total_session_formation)) + mycommon.tryFloat( str(local_session_volume))
|
||||
|
||||
elif (str(retval['_id']['class_duration_unit'][0]) == "annee"):
|
||||
nb_hour_converted = mycommon.Get_Partner_Hour_Per_Year(str(my_partner['recid']))
|
||||
if (nb_hour_converted is False):
|
||||
nb_hour_converted = "1820"
|
||||
volume_ramene_en_heure = mycommon.tryFloat(
|
||||
str(retval['_id']['class_duration'][0])) * mycommon.tryFloat(str(nb_hour_converted))
|
||||
local_session_volume = mycommon.tryFloat(str(volume_ramene_en_heure)) * mycommon.tryFloat(
|
||||
str(retval['count']))
|
||||
total_session_formation = mycommon.tryFloat(str(total_session_formation)) + mycommon.tryFloat(
|
||||
str(local_session_volume))
|
||||
|
||||
|
||||
my_data.append(node)
|
||||
|
||||
|
||||
RetObject = []
|
||||
json_retval = {}
|
||||
json_retval['data'] = my_data
|
||||
json_retval['axis_data'] = axis_data
|
||||
json_retval['total_nb_heure'] = str(total_session_formation)
|
||||
|
||||
#print(" ### json_retval = ", json_retval)
|
||||
|
||||
RetObject.append(mycommon.JSONEncoder().encode(json_retval))
|
||||
|
||||
#print(" ### RetObject = ", RetObject)
|
||||
|
||||
return True, RetObject
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible de récupérer les données "
|
||||
|
||||
|
||||
|
|
|
@ -171,6 +171,7 @@ def Get_Qery_Inscription_By_Session_By_Periode_old(diction):
|
|||
}}]},
|
||||
|
||||
{'$eq': ["$valide", "1"]},
|
||||
{'$eq': ["$session_status", "1"]},
|
||||
{'$eq': ["$partner_owner_recid", '$$partner_owner_recid']}
|
||||
]
|
||||
}
|
||||
|
@ -443,6 +444,7 @@ def Get_Qery_Inscription_By_Session_By_Periode(diction):
|
|||
}}]},
|
||||
|
||||
{'$eq': ["$valide", "1"]},
|
||||
{'$eq': ["$session_status", "1"]},
|
||||
{'$eq': ["$partner_owner_recid", '$$partner_owner_recid']}
|
||||
]
|
||||
}
|
||||
|
@ -552,11 +554,11 @@ def Get_Qery_Inscription_By_Session_By_Periode(diction):
|
|||
json_retval['data'] = range_date_month
|
||||
json_retval['axis_data'] = axis_data
|
||||
|
||||
print(" ### json_retval = ", json_retval)
|
||||
#print(" ### json_retval = ", json_retval)
|
||||
|
||||
RetObject.append(mycommon.JSONEncoder().encode(json_retval))
|
||||
|
||||
print(" ### RetObject = ", RetObject)
|
||||
#print(" ### RetObject = ", RetObject)
|
||||
|
||||
return True, RetObject
|
||||
except Exception as e:
|
||||
|
@ -720,6 +722,7 @@ def Get_Qery_Inscription_By_Session_By_Periode_cumule(diction):
|
|||
}}]},
|
||||
|
||||
{'$eq': ["$valide", "1"]},
|
||||
{'$eq': ["$session_status", "1"]},
|
||||
{'$eq': ["$partner_owner_recid", '$$partner_owner_recid']}
|
||||
]
|
||||
}
|
||||
|
@ -844,7 +847,7 @@ def Get_Qery_Inscription_By_Session_By_Periode_cumule(diction):
|
|||
|
||||
|
||||
"""
|
||||
TBD inscription groupé par Formation
|
||||
TBD inscription groupé par Formation,
|
||||
"""
|
||||
def Get_Qery_Inscription_Group_By_Class(diction):
|
||||
try:
|
||||
|
@ -1001,6 +1004,7 @@ def Get_Qery_Inscription_Group_By_Class(diction):
|
|||
}}]},
|
||||
|
||||
{'$eq': ["$valide", "1"]},
|
||||
{'$eq': ["$session_status", "1"]},
|
||||
{'$eq': ["$partner_owner_recid", '$$partner_owner_recid']}
|
||||
]
|
||||
}
|
||||
|
@ -1287,6 +1291,7 @@ def Get_Qery_Inscription_Group_By_Session(diction):
|
|||
}}]},
|
||||
|
||||
{'$eq': ["$valide", "1"]},
|
||||
{'$eq': ["$session_status", "1"]},
|
||||
{'$eq': ["$partner_owner_recid", '$$partner_owner_recid']}
|
||||
]
|
||||
}
|
||||
|
@ -1553,7 +1558,7 @@ def Format_Inscription_Data(tab_diction):
|
|||
|
||||
|
||||
"""
|
||||
Cette fonction permet d'experter un dashbord en csv
|
||||
Cette fonction permet d'exporter un dashbord en csv
|
||||
"""
|
||||
|
||||
def TBD_Inscription_Export_Dashbord_To_Csv(diction):
|
||||
|
@ -1717,3 +1722,726 @@ def TBD_Inscription_Export_Dashbord_To_Csv(diction):
|
|||
return False, " Impossible d'exporter les données "
|
||||
|
||||
|
||||
"""
|
||||
TBD pour connaitre le nombre inscription validé pour une liste de formation (indicateur qualiopi)
|
||||
"""
|
||||
def Get_Qery_Inscription_For_List_Class(diction):
|
||||
try:
|
||||
diction = mycommon.strip_dictionary(diction)
|
||||
|
||||
"""
|
||||
Verification des input acceptés
|
||||
"""
|
||||
field_list = ['token', 'session_start_date', 'session_end_date', 'filter_value',
|
||||
'tab_formation_ids', 'from_beginning']
|
||||
|
||||
incom_keys = diction.keys()
|
||||
for val in incom_keys:
|
||||
if val not in field_list and val.startswith('my_') is False:
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
||||
return False, " Les informations fournies sont incorrectes",
|
||||
|
||||
"""
|
||||
Verification des champs obligatoires
|
||||
"""
|
||||
field_list_obligatoire = ['token', 'session_start_date', 'session_end_date']
|
||||
for val in field_list_obligatoire:
|
||||
if val not in diction:
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||||
return False, " Les informations fournies sont incorrectes",
|
||||
|
||||
"""
|
||||
Verification de l'identité et autorisation de l'entité qui
|
||||
appelle cette API
|
||||
"""
|
||||
token = ""
|
||||
if ("token" in diction.keys()):
|
||||
if diction['token']:
|
||||
token = diction['token']
|
||||
|
||||
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
||||
if (local_status is not True):
|
||||
return local_status, my_partner
|
||||
|
||||
tab_formation_ids_split = []
|
||||
tab_formation_ids = []
|
||||
if( "tab_formation_ids" in diction.keys() ):
|
||||
tab_formation_ids_split = str(diction['tab_formation_ids']).split(",")
|
||||
|
||||
for tmp in tab_formation_ids_split:
|
||||
if( tmp ):
|
||||
tab_formation_ids.append((str(tmp)))
|
||||
|
||||
|
||||
filt_session_start_date = ""
|
||||
if ("session_start_date" in diction.keys() and diction['session_start_date']):
|
||||
filt_session_start_date = str(diction['session_start_date'])[0:10]
|
||||
local_status = mycommon.CheckisDate(filt_session_start_date)
|
||||
if (local_status is False):
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][3]) + " Le filtre : 'date de debut de session' n'est pas au format jj/mm/aaaa.")
|
||||
return False, " Le filtre : 'date de debut de session' n'est pas au format jj/mm/aaaa."
|
||||
|
||||
filt_session_end_date = ""
|
||||
if ("session_end_date" in diction.keys() and diction['session_end_date']):
|
||||
filt_session_end_date = str(diction['session_end_date'])[0:10]
|
||||
local_status = mycommon.CheckisDate(filt_session_end_date)
|
||||
if (local_status is False):
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][3]) + " Le filtre : 'date de fin de session' n'est pas au format jj/mm/aaaa.")
|
||||
return False, " Le filtre : 'date de fin de session' n'est pas au format jj/mm/aaaa."
|
||||
|
||||
"""
|
||||
Si la valeur de 'filter_value' est m0 ou m1, on va aller recuperer les date du mois correspondant.
|
||||
On ecrase les valeur de filt_session_start_date et filt_session_end_date
|
||||
"""
|
||||
if ('filter_value' in diction.keys()):
|
||||
if (str(diction['filter_value']) == "m0"):
|
||||
# On recupere les date du mois en cours
|
||||
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Current_Month_Start_End_Date()
|
||||
if (local_status is False):
|
||||
return local_status, start_current_month_date
|
||||
|
||||
filt_session_start_date = start_current_month_date
|
||||
filt_session_end_date = end_current_month_date
|
||||
|
||||
elif (str(diction['filter_value']) == "m1"):
|
||||
# On recupere les date du mois en cours
|
||||
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Previous_Month_Start_End_Date()
|
||||
if (local_status is False):
|
||||
return local_status, start_current_month_date
|
||||
|
||||
filt_session_start_date = start_current_month_date
|
||||
filt_session_end_date = end_current_month_date
|
||||
|
||||
RetObject = []
|
||||
val_tmp = 1
|
||||
|
||||
filt_session_start_date_ISODATE = datetime.strptime(str(filt_session_start_date), '%d/%m/%Y')
|
||||
filt_session_end_date_ISODATE = datetime.strptime(str(filt_session_end_date), '%d/%m/%Y')
|
||||
|
||||
"""
|
||||
Creation de la range des mois entre filt_periode_start_date_ISODATE et
|
||||
filt_periode_end_date_ISODATE
|
||||
"""
|
||||
range_date_month = []
|
||||
start = filt_session_start_date_ISODATE
|
||||
end = filt_session_end_date_ISODATE
|
||||
while start <= end:
|
||||
node = {}
|
||||
node['month_year'] = '{:02d}'.format(start.month) + "_" + str(start.year)
|
||||
node['label'] = '{:02d}'.format(start.month) + "_" + str(start.year)
|
||||
node['TotalAmount'] = 0
|
||||
node['value'] = 0
|
||||
node['count'] = 0
|
||||
|
||||
range_date_month.append(node)
|
||||
start += relativedelta(months=1)
|
||||
|
||||
|
||||
|
||||
filt_session_start_date_ISODATE_work = filt_session_start_date_ISODATE
|
||||
filt_session_end_date_ISODATE_work = filt_session_end_date_ISODATE
|
||||
|
||||
|
||||
if("from_beginning" in diction.keys() and diction['from_beginning'] == "1"):
|
||||
filt_mysy_inscription_validation_date = {}
|
||||
else:
|
||||
filt_mysy_inscription_validation_date = {'mysy_inscription_validation_date': {'$gte': filt_session_start_date_ISODATE,
|
||||
'$lte': filt_session_end_date_ISODATE}}
|
||||
|
||||
|
||||
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])}, {"valide": '1'},
|
||||
{"apprenant_id": {"$exists": True}},
|
||||
{"inscription_validation_date": {"$exists": True, '$ne': ""}},
|
||||
{"status": "1"},
|
||||
filt_mysy_inscription_validation_date,
|
||||
]}
|
||||
"""
|
||||
|
||||
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])}, {"valide": '1'},
|
||||
{"apprenant_id": {"$exists": True}},
|
||||
]}
|
||||
"""
|
||||
|
||||
# print(" ### qery_match = ", qery_match)
|
||||
|
||||
pipe_qry = ([
|
||||
{"$addFields": {
|
||||
"mysy_inscription_validation_date": {
|
||||
'$dateFromString': {
|
||||
'dateString': { "$substr": [ "$inscription_validation_date", 0, 10 ] },
|
||||
'format': "%d/%m/%Y"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
{'$match': qery_match},
|
||||
# ----
|
||||
{'$lookup': {
|
||||
'from': 'session_formation',
|
||||
'let': {'session_id': "$session_id", 'partner_owner_recid': '$partner_owner_recid'},
|
||||
'pipeline': [
|
||||
{'$match':
|
||||
{'$expr':
|
||||
{'$and':
|
||||
[
|
||||
|
||||
{'$eq': ["$_id", {'$convert': {
|
||||
'input': "$$session_id",
|
||||
'to': "objectId",
|
||||
'onError': {'error': 'true'},
|
||||
'onNull': {'isnull': 'true'}
|
||||
}}]},
|
||||
{'$in': ["$class_id", tab_formation_ids]},
|
||||
{'$eq': ["$valide", "1"]},
|
||||
{'$eq': ["$session_status", "1"]},
|
||||
{'$eq': ["$partner_owner_recid", '$$partner_owner_recid']}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
],
|
||||
'as': 'session_formation_collection'
|
||||
}
|
||||
},
|
||||
|
||||
# ---
|
||||
{
|
||||
"$lookup": {
|
||||
'from': 'myclass',
|
||||
'localField': 'session_formation_collection.class_internal_url',
|
||||
'foreignField': 'internal_url',
|
||||
"pipeline": [{'$project': {'title': 1, 'internal_url': 1, 'external_code': 1, 'published': 1}}
|
||||
|
||||
],
|
||||
"as": "myclass_collection"
|
||||
}
|
||||
},
|
||||
|
||||
# --
|
||||
# ---
|
||||
{'$lookup': {
|
||||
'from': 'apprenant',
|
||||
"let": {'apprenant_id': "$apprenant_id", 'partner_owner_recid': '$partner_owner_recid'},
|
||||
'pipeline': [
|
||||
{'$match':
|
||||
{'$expr':
|
||||
{'$and':
|
||||
[
|
||||
|
||||
{'$eq': ["$_id", {'$convert': {
|
||||
'input': "$$apprenant_id",
|
||||
'to': "objectId",
|
||||
'onError': {'error': 'true'},
|
||||
'onNull': {'isnull': 'true'}
|
||||
}}]},
|
||||
|
||||
{'$eq': ["$valide", "1"]},
|
||||
{'$eq': ["$partner_owner_recid", '$$partner_owner_recid']}
|
||||
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
],
|
||||
'as': 'apprenant_collection'
|
||||
}
|
||||
},
|
||||
{
|
||||
"$group": {
|
||||
"_id": {
|
||||
"class_id": "$myclass_collection._id",
|
||||
"class_code": "$myclass_collection.external_code",
|
||||
"class_title": "$myclass_collection.title",
|
||||
},
|
||||
"count": {
|
||||
"$sum": 1
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
'$sort': {'count': -1}
|
||||
},
|
||||
|
||||
])
|
||||
|
||||
"""
|
||||
Cette requete donne le tableau complete des apprenants avec les sessions
|
||||
si on veut faire des group by, on ajoute ceci :
|
||||
{'$group': {
|
||||
'_id': {
|
||||
|
||||
"apprenant_collection_nom":"$apprenant_collection.nom",
|
||||
"apprenant_collection_prenom":"$apprenant_collection.prenom",
|
||||
"apprenant_collection_session":"$session_formation_collection.code_session",
|
||||
|
||||
},
|
||||
'count': {'$count': {}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
"""
|
||||
print(" ### Get_Qery_Inscription_Group_By_Class ici pipe_qry = ", pipe_qry)
|
||||
|
||||
axis_data = []
|
||||
my_data = []
|
||||
cpt = 0
|
||||
tab_lines_inscription_data = []
|
||||
total_nb_inscrit = 0
|
||||
for retval in MYSY_GV.dbname['inscription'].aggregate(pipe_qry):
|
||||
if( len(retval['_id']['class_code']) > 0 ):
|
||||
cpt = cpt + 1
|
||||
axis_data.append(str(retval['_id']['class_code'][0]))
|
||||
node = {}
|
||||
node['class_code'] = str(retval['_id']['class_code'][0])
|
||||
node['class_title'] = str(retval['_id']['class_title'][0])
|
||||
node['label'] = str(retval['_id']['class_code'][0])
|
||||
node['value'] = mycommon.tryFloat(str(retval['count']))
|
||||
node['count'] = mycommon.tryFloat(str(retval['count']))
|
||||
total_nb_inscrit = mycommon.tryFloat(str(total_nb_inscrit)) + mycommon.tryFloat(str(retval['count']))
|
||||
my_data.append(node)
|
||||
|
||||
|
||||
RetObject = []
|
||||
json_retval = {}
|
||||
json_retval['data'] = my_data
|
||||
json_retval['axis_data'] = axis_data
|
||||
json_retval['total_nb_inscrit'] = total_nb_inscrit
|
||||
|
||||
|
||||
|
||||
|
||||
#print(" ### json_retval Groupe by class = ", json_retval)
|
||||
|
||||
RetObject.append(mycommon.JSONEncoder().encode(json_retval))
|
||||
|
||||
#print(" ### json_retval Groupe by class = ", RetObject)
|
||||
|
||||
return True, RetObject
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible de récupérer les données "
|
||||
|
||||
"""
|
||||
TBD : Calcul du taux de reussite à une formation.
|
||||
Le calcul se fait en prenant le nombre d'inscrit / sur le nombre d'inscrits ayant recu leur attestation
|
||||
"""
|
||||
def Get_Qery_Taux_Success_For_List_Class(diction):
|
||||
try:
|
||||
diction = mycommon.strip_dictionary(diction)
|
||||
|
||||
"""
|
||||
Verification des input acceptés
|
||||
"""
|
||||
field_list = ['token', 'session_start_date', 'session_end_date', 'filter_value',
|
||||
'tab_formation_ids', 'from_beginning']
|
||||
|
||||
incom_keys = diction.keys()
|
||||
for val in incom_keys:
|
||||
if val not in field_list and val.startswith('my_') is False:
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
||||
return False, " Les informations fournies sont incorrectes",
|
||||
|
||||
"""
|
||||
Verification des champs obligatoires
|
||||
"""
|
||||
field_list_obligatoire = ['token', 'session_start_date', 'session_end_date']
|
||||
for val in field_list_obligatoire:
|
||||
if val not in diction:
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||||
return False, " Les informations fournies sont incorrectes",
|
||||
|
||||
"""
|
||||
Verification de l'identité et autorisation de l'entité qui
|
||||
appelle cette API
|
||||
"""
|
||||
token = ""
|
||||
if ("token" in diction.keys()):
|
||||
if diction['token']:
|
||||
token = diction['token']
|
||||
|
||||
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
||||
if (local_status is not True):
|
||||
return local_status, my_partner
|
||||
|
||||
tab_formation_ids_split = []
|
||||
tab_formation_ids = []
|
||||
if( "tab_formation_ids" in diction.keys() ):
|
||||
tab_formation_ids_split = str(diction['tab_formation_ids']).split(",")
|
||||
|
||||
for tmp in tab_formation_ids_split:
|
||||
if( tmp ):
|
||||
tab_formation_ids.append((str(tmp)))
|
||||
|
||||
|
||||
filt_session_start_date = ""
|
||||
if ("session_start_date" in diction.keys() and diction['session_start_date']):
|
||||
filt_session_start_date = str(diction['session_start_date'])[0:10]
|
||||
local_status = mycommon.CheckisDate(filt_session_start_date)
|
||||
if (local_status is False):
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][3]) + " Le filtre : 'date de debut de session' n'est pas au format jj/mm/aaaa.")
|
||||
return False, " Le filtre : 'date de debut de session' n'est pas au format jj/mm/aaaa."
|
||||
|
||||
filt_session_end_date = ""
|
||||
if ("session_end_date" in diction.keys() and diction['session_end_date']):
|
||||
filt_session_end_date = str(diction['session_end_date'])[0:10]
|
||||
local_status = mycommon.CheckisDate(filt_session_end_date)
|
||||
if (local_status is False):
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][3]) + " Le filtre : 'date de fin de session' n'est pas au format jj/mm/aaaa.")
|
||||
return False, " Le filtre : 'date de fin de session' n'est pas au format jj/mm/aaaa."
|
||||
|
||||
"""
|
||||
Si la valeur de 'filter_value' est m0 ou m1, on va aller recuperer les date du mois correspondant.
|
||||
On ecrase les valeur de filt_session_start_date et filt_session_end_date
|
||||
"""
|
||||
if ('filter_value' in diction.keys()):
|
||||
if (str(diction['filter_value']) == "m0"):
|
||||
# On recupere les date du mois en cours
|
||||
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Current_Month_Start_End_Date()
|
||||
if (local_status is False):
|
||||
return local_status, start_current_month_date
|
||||
|
||||
filt_session_start_date = start_current_month_date
|
||||
filt_session_end_date = end_current_month_date
|
||||
|
||||
elif (str(diction['filter_value']) == "m1"):
|
||||
# On recupere les date du mois en cours
|
||||
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Previous_Month_Start_End_Date()
|
||||
if (local_status is False):
|
||||
return local_status, start_current_month_date
|
||||
|
||||
filt_session_start_date = start_current_month_date
|
||||
filt_session_end_date = end_current_month_date
|
||||
|
||||
RetObject = []
|
||||
val_tmp = 1
|
||||
|
||||
filt_session_start_date_ISODATE = datetime.strptime(str(filt_session_start_date), '%d/%m/%Y')
|
||||
filt_session_end_date_ISODATE = datetime.strptime(str(filt_session_end_date), '%d/%m/%Y')
|
||||
|
||||
todays_date = str(date.today().strftime("%Y-%m-%d"))
|
||||
filt_date_today_ISODATE = datetime.strptime(str(todays_date), '%Y-%m-%d')
|
||||
|
||||
"""
|
||||
Creation de la range des mois entre filt_periode_start_date_ISODATE et
|
||||
filt_periode_end_date_ISODATE
|
||||
"""
|
||||
range_date_month = []
|
||||
start = filt_session_start_date_ISODATE
|
||||
end = filt_session_end_date_ISODATE
|
||||
while start <= end:
|
||||
node = {}
|
||||
node['month_year'] = '{:02d}'.format(start.month) + "_" + str(start.year)
|
||||
node['label'] = '{:02d}'.format(start.month) + "_" + str(start.year)
|
||||
node['TotalAmount'] = 0
|
||||
node['value'] = 0
|
||||
node['count'] = 0
|
||||
|
||||
range_date_month.append(node)
|
||||
start += relativedelta(months=1)
|
||||
|
||||
|
||||
|
||||
filt_session_start_date_ISODATE_work = filt_session_start_date_ISODATE
|
||||
filt_session_end_date_ISODATE_work = filt_session_end_date_ISODATE
|
||||
|
||||
|
||||
if("from_beginning" in diction.keys() and diction['from_beginning'] == "1"):
|
||||
filt_mysy_inscription_validation_date = {}
|
||||
else:
|
||||
filt_mysy_inscription_validation_date = {'mysy_inscription_validation_date': {'$gte': filt_session_start_date_ISODATE,
|
||||
'$lte': filt_session_end_date_ISODATE}}
|
||||
|
||||
|
||||
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])}, {"valide": '1'},
|
||||
{"apprenant_id": {"$exists": True}},
|
||||
{"inscription_validation_date": {"$exists": True, '$ne': ""}},
|
||||
{"status": "1"},
|
||||
filt_mysy_inscription_validation_date,
|
||||
]}
|
||||
"""
|
||||
|
||||
qery_match = {'$and': [{"partner_owner_recid": str(my_partner['recid'])}, {"valide": '1'},
|
||||
{"apprenant_id": {"$exists": True}},
|
||||
]}
|
||||
"""
|
||||
filt_list_class_id = {}
|
||||
if(len(tab_formation_ids) > 0 ):
|
||||
filt_list_class_id = {'$in': ["$class_id", tab_formation_ids]}
|
||||
|
||||
|
||||
# print(" ### qery_match = ", qery_match)
|
||||
|
||||
pipe_qry = ([
|
||||
{"$addFields": {
|
||||
"mysy_inscription_validation_date": {
|
||||
'$dateFromString': {
|
||||
'dateString': { "$substr": [ "$inscription_validation_date", 0, 10 ] },
|
||||
'format': "%d/%m/%Y"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
{'$match': qery_match},
|
||||
# ----
|
||||
{'$lookup': {
|
||||
'from': 'session_formation',
|
||||
'let': {'session_id': "$session_id", 'partner_owner_recid': '$partner_owner_recid'},
|
||||
'pipeline': [
|
||||
{'$match':
|
||||
{'$expr':
|
||||
{'$and':
|
||||
[
|
||||
|
||||
{'$eq': ["$_id", {'$convert': {
|
||||
'input': "$$session_id",
|
||||
'to': "objectId",
|
||||
'onError': {'error': 'true'},
|
||||
'onNull': {'isnull': 'true'}
|
||||
}}]},
|
||||
filt_list_class_id,
|
||||
{'$eq': ["$valide", "1"]},
|
||||
{'$eq': ["$session_status", "1"]},
|
||||
{'$eq': ["$partner_owner_recid", '$$partner_owner_recid']}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
],
|
||||
'as': 'session_formation_collection'
|
||||
}
|
||||
},
|
||||
|
||||
# ---
|
||||
{
|
||||
"$lookup": {
|
||||
'from': 'myclass',
|
||||
'localField': 'session_formation_collection.class_internal_url',
|
||||
'foreignField': 'internal_url',
|
||||
"pipeline": [{'$project': {'title': 1, 'internal_url': 1, 'external_code': 1, 'published': 1}}
|
||||
|
||||
],
|
||||
"as": "myclass_collection"
|
||||
}
|
||||
},
|
||||
|
||||
# --
|
||||
# ---
|
||||
{'$lookup': {
|
||||
'from': 'apprenant',
|
||||
"let": {'apprenant_id': "$apprenant_id", 'partner_owner_recid': '$partner_owner_recid'},
|
||||
'pipeline': [
|
||||
{'$match':
|
||||
{'$expr':
|
||||
{'$and':
|
||||
[
|
||||
|
||||
{'$eq': ["$_id", {'$convert': {
|
||||
'input': "$$apprenant_id",
|
||||
'to': "objectId",
|
||||
'onError': {'error': 'true'},
|
||||
'onNull': {'isnull': 'true'}
|
||||
}}]},
|
||||
|
||||
{'$eq': ["$valide", "1"]},
|
||||
{'$eq': ["$partner_owner_recid", '$$partner_owner_recid']}
|
||||
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
],
|
||||
'as': 'apprenant_collection'
|
||||
}
|
||||
},
|
||||
{
|
||||
"$group": {
|
||||
"_id": {
|
||||
"class_id": "$myclass_collection._id",
|
||||
"class_code": "$myclass_collection.external_code",
|
||||
"class_title": "$myclass_collection.title",
|
||||
},
|
||||
"count": {
|
||||
"$sum": 1
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
'$sort': {'count': -1}
|
||||
},
|
||||
|
||||
])
|
||||
|
||||
|
||||
print(" ### Get_Qery_Inscription_Group_By_Class ici pipe_qry = ", pipe_qry)
|
||||
|
||||
axis_data = []
|
||||
my_data = []
|
||||
cpt = 0
|
||||
tab_lines_inscription_data = []
|
||||
total_nb_inscrit = 0
|
||||
for retval in MYSY_GV.dbname['inscription'].aggregate(pipe_qry):
|
||||
if( len(retval['_id']['class_code']) > 0 ):
|
||||
|
||||
cpt = cpt + 1
|
||||
axis_data.append(str(retval['_id']['class_code'][0]))
|
||||
node = {}
|
||||
node['class_code'] = str(retval['_id']['class_code'][0])
|
||||
node['class_title'] = str(retval['_id']['class_title'][0])
|
||||
node['label'] = str(retval['_id']['class_code'][0])
|
||||
node['value'] = mycommon.tryFloat(str(retval['count']))
|
||||
node['count'] = mycommon.tryFloat(str(retval['count']))
|
||||
total_nb_inscrit = mycommon.tryFloat(str(total_nb_inscrit)) + mycommon.tryFloat(str(retval['count']))
|
||||
my_data.append(node)
|
||||
|
||||
|
||||
RetObject = []
|
||||
json_retval = {}
|
||||
json_retval['data'] = my_data
|
||||
json_retval['axis_data'] = axis_data
|
||||
json_retval['total_nb_inscrit'] = total_nb_inscrit
|
||||
|
||||
|
||||
"""
|
||||
Aller chercher le nombre d'attestation delivré
|
||||
"""
|
||||
if ("from_beginning" in diction.keys() and diction['from_beginning'] == "1"):
|
||||
filt_mysy_session_date = {
|
||||
'mysy_session_end_date': {'$lte': filt_date_today_ISODATE, }
|
||||
}
|
||||
|
||||
else:
|
||||
filt_mysy_session_date = {
|
||||
'mysy_session_date_debut': {'$gte': filt_session_start_date_ISODATE},
|
||||
'mysy_session_end_date': {'$lte': filt_session_end_date_ISODATE, }
|
||||
}
|
||||
|
||||
filt_list_class_id_mode2 = {}
|
||||
if (len(tab_formation_ids) > 0):
|
||||
filt_list_class_id_mode2 = {'class_id': {'$in': tab_formation_ids}}
|
||||
|
||||
|
||||
qery_match_attestion_formation = {
|
||||
'$and': [{"partner_owner_recid": str(my_partner['recid'])}, {"valide": '1', 'session_status': '1'},
|
||||
filt_mysy_session_date, filt_list_class_id_mode2
|
||||
]}
|
||||
|
||||
pipe_qry_attestion_formation = ([
|
||||
{"$addFields": {
|
||||
"mysy_session_date_debut": {
|
||||
'$dateFromString': {
|
||||
'dateString': { "$substr": [ "$date_debut", 0, 10 ] },
|
||||
'format': "%d/%m/%Y"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
{"$addFields": {
|
||||
"mysy_session_end_date": {
|
||||
'$dateFromString': {
|
||||
'dateString': {"$substr": ["$date_fin", 0, 10]},
|
||||
'format': "%d/%m/%Y"
|
||||
}
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
|
||||
{'$match': qery_match_attestion_formation},
|
||||
# ----
|
||||
{'$lookup': {
|
||||
'from': 'attestation_formation',
|
||||
'let': {'session_id': {'$toString': '$_id'}, 'partner_owner_recid': '$partner_owner_recid'},
|
||||
'pipeline': [
|
||||
|
||||
{'$match':
|
||||
{'$expr':
|
||||
{'$and':
|
||||
[
|
||||
|
||||
{'$eq': ['$session_id', '$$session_id']},
|
||||
{'$eq': ["$valide", "1"]},
|
||||
{'$eq': ["$statut", "1"]},
|
||||
{'$ne': ["$inscription_id", ""]},
|
||||
{'$eq': ["$partner_owner_recid", '$$partner_owner_recid']},
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
],
|
||||
'as': 'attestation_formation'
|
||||
}
|
||||
},
|
||||
{
|
||||
'$unwind': '$attestation_formation'
|
||||
},
|
||||
|
||||
|
||||
{
|
||||
"$group": {
|
||||
"_id": {
|
||||
"session_id": "$attestation_formation.session_id",
|
||||
"session_code": "$code_session",
|
||||
},
|
||||
"count": {
|
||||
"$sum": 1
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
'$sort': {'count': -1}
|
||||
},
|
||||
|
||||
])
|
||||
|
||||
print(" ### Get_Qery_Inscription_Group_By_Class_attestation ici pipe_qry = ", pipe_qry_attestion_formation)
|
||||
|
||||
nb_attesation_delivred = 0
|
||||
for retval_attestation in MYSY_GV.dbname['session_formation'].aggregate(pipe_qry_attestion_formation):
|
||||
if( retval_attestation and "count" in retval_attestation.keys() ):
|
||||
nb_attesation_delivred = mycommon.tryFloat(str(nb_attesation_delivred)) + mycommon.tryFloat(str(retval_attestation['count']))
|
||||
|
||||
|
||||
|
||||
#print(" ### NB_attesation_delivrer = ", nb_attesation_delivred)
|
||||
#print(" ### Total inscrit = ", total_nb_inscrit)
|
||||
|
||||
taux_success = 0
|
||||
if( mycommon.tryInt(total_nb_inscrit) != 0 and mycommon.tryInt(total_nb_inscrit) != 0 ):
|
||||
taux_success = (mycommon.tryFloat(nb_attesation_delivred) / mycommon.tryFloat(total_nb_inscrit))*100
|
||||
taux_success = round(taux_success, 1)
|
||||
|
||||
print(" ### Taux reussite = ", taux_success)
|
||||
|
||||
json_retval['nb_attestation_delivred'] = str(nb_attesation_delivred)
|
||||
json_retval['taux_success'] = str(taux_success)
|
||||
|
||||
|
||||
#print(" ### json_retval Groupe by class = ", json_retval)
|
||||
|
||||
RetObject.append(mycommon.JSONEncoder().encode(json_retval))
|
||||
|
||||
#print(" ### json_retval Groupe by class = ", RetObject)
|
||||
|
||||
return True, RetObject
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible de récupérer les données "
|
||||
|
|
|
@ -20,9 +20,9 @@ MYSY_ENV = ENV_DATA.MYSY_ENV
|
|||
CLIENT_URL_BASE = ""
|
||||
|
||||
if (MYSY_ENV == "PROD"):
|
||||
CONNECTION_STRING = "mongodb://localhost:27017/cherifdb_fi"
|
||||
CONNECTION_STRING = "mongodb://localhost:27017/cherifdb"
|
||||
client = MongoClient(CONNECTION_STRING)
|
||||
dbname = client['cherifdb_fi']
|
||||
dbname = client['cherifdb']
|
||||
CLIENT_URL_BASE = 'https://www.mysy-training.com/'
|
||||
MYSY_DEBUG_LEVEL = 0
|
||||
|
||||
|
@ -76,9 +76,9 @@ elif (MYSY_ENV == "REC"):
|
|||
|
||||
|
||||
elif (MYSY_ENV == "DEM"):
|
||||
CONNECTION_STRING = "mongodb://localhost:27017/cherifdb_demo_fi"
|
||||
CONNECTION_STRING = "mongodb://localhost:27017/cherifdb_demo"
|
||||
client = MongoClient(CONNECTION_STRING)
|
||||
dbname = client['cherifdb_demo_fi']
|
||||
dbname = client['cherifdb_demo']
|
||||
CLIENT_URL_BASE = 'https://demo.mysy-training.com/'
|
||||
MYSY_DEBUG_LEVEL = 1
|
||||
|
||||
|
@ -357,7 +357,7 @@ TEMPORARY_DIRECTORY = "./temp_direct"
|
|||
"""
|
||||
Nombre maximum de participants importable par csv
|
||||
"""
|
||||
MAX_PARTICIPANT_BY_CSV = 200
|
||||
MAX_PARTICIPANT_BY_CSV = 500
|
||||
|
||||
"""
|
||||
Nombre maximum de clients de partner importable par csv
|
||||
|
@ -943,4 +943,12 @@ Les types de session de formation :
|
|||
0 => 'session' pr les formations continues
|
||||
1 => 'promotion' pr les formations continues
|
||||
"""
|
||||
TYPE_SESSION = ['0', '1']
|
||||
TYPE_SESSION = ['0', '1']
|
||||
|
||||
"""
|
||||
Les type de ressources humaines sont :
|
||||
1 ==> ressouces non employé, comme les parents d'eleves, les tuteures, etc
|
||||
'vide' ou rien ==> Ressource internes (enseignant, vacataire, etc)
|
||||
|
||||
"""
|
||||
TYPE_RH = ['', '1']
|
|
@ -232,14 +232,13 @@ def AddStagiairetoClass(diction):
|
|||
# Verifier que le type d'apprenant est bien valide
|
||||
type_apprenant = "0"
|
||||
|
||||
if ("type_apprenant" in diction.keys() and diction['type_apprenant'] ):
|
||||
type_apprenant = str(mycommon.tryInt(diction['type_apprenant']))
|
||||
if( type_apprenant not in MYSY_GV.INSCRIPTION_TYPE_APPRENANT):
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3]) + " Le type d'apprenant est invalide ")
|
||||
return False, " Le type d'apprenant est invalide. Valeurs autorisées"+str(MYSY_GV.INSCRIPTION_TYPE_APPRENANT)
|
||||
|
||||
if ("type_apprenant" in diction.keys() and str(diction['type_apprenant']) not in MYSY_GV.INSCRIPTION_TYPE_APPRENANT):
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3]) + " Le type d'apprenant est invalide ")
|
||||
return False, " Le type d'apprenant est invalide "
|
||||
|
||||
elif ("type_apprenant" in diction.keys()) :
|
||||
type_apprenant = str(diction['type_apprenant'])
|
||||
|
||||
|
||||
new_price = "-1" # 0 ==> par defaut
|
||||
|
@ -746,7 +745,7 @@ def AddStagiairetoClass(diction):
|
|||
new_apprenant_diction = diction
|
||||
champ_to_delete = ['class_internal_url', 'session_id', 'inscription_validation_date', 'apprenant_id',
|
||||
'status', 'modefinancement', 'type_apprenant', 'financeur_rattachement_id',
|
||||
'quotation_id', 'facture_client_rattachement_id']
|
||||
'quotation_id', 'facture_client_rattachement_id', 'price']
|
||||
|
||||
for val in champ_to_delete:
|
||||
if (val in new_apprenant_diction.keys()):
|
||||
|
@ -758,6 +757,9 @@ def AddStagiairetoClass(diction):
|
|||
if( "tab_ue_ids" in new_apprenant_diction.keys()):
|
||||
del new_apprenant_diction['tab_ue_ids']
|
||||
|
||||
if ("price" in new_apprenant_diction.keys()):
|
||||
del new_apprenant_diction['price']
|
||||
|
||||
local_status, local_retval = apprenant_mgt.Update_Apprenant(new_apprenant_diction)
|
||||
if (local_status is False):
|
||||
return local_status, local_retval
|
||||
|
@ -767,11 +769,13 @@ def AddStagiairetoClass(diction):
|
|||
new_apprenant_diction = diction
|
||||
champ_to_delete = [ 'class_internal_url', 'session_id', 'inscription_validation_date', 'apprenant_id', 'status',
|
||||
'modefinancement', 'type_apprenant', 'financeur_rattachement_id',
|
||||
'quotation_id', 'facture_client_rattachement_id', 'tab_ue_ids']
|
||||
'quotation_id', 'facture_client_rattachement_id', 'tab_ue_ids', 'price']
|
||||
for val in champ_to_delete:
|
||||
if( val in new_apprenant_diction.keys()):
|
||||
del new_apprenant_diction[str(val)]
|
||||
|
||||
|
||||
|
||||
local_status, local_retval = apprenant_mgt.Add_Apprenant(new_apprenant_diction)
|
||||
if (local_status is False):
|
||||
return local_status, local_retval
|
||||
|
@ -1084,13 +1088,15 @@ def UpdateStagiairetoClass(diction):
|
|||
mydata['tuteur2_include_com'] = str(diction['tuteur2_include_com']).strip()
|
||||
|
||||
# Verifier que le type d'apprenant est bien valide
|
||||
if( "type_apprenant" in diction.keys() ):
|
||||
if (str(diction['type_apprenant']) not in MYSY_GV.INSCRIPTION_TYPE_APPRENANT):
|
||||
if ("type_apprenant" in diction.keys() and diction['type_apprenant']):
|
||||
type_apprenant = str(mycommon.tryInt(diction['type_apprenant']))
|
||||
if (type_apprenant not in MYSY_GV.INSCRIPTION_TYPE_APPRENANT):
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3]) + " Le type d'apprenant est invalide ")
|
||||
return False, " Le type d'apprenant est invalide "
|
||||
else:
|
||||
mydata['type_apprenant'] = str(diction['type_apprenant'])
|
||||
str(inspect.stack()[0][3]) + " Le type d'apprenant est invalide ")
|
||||
return False, " Le type d'apprenant est invalide. Valeurs autorisées" + str(
|
||||
MYSY_GV.INSCRIPTION_TYPE_APPRENANT)
|
||||
mydata['type_apprenant'] = str(type_apprenant)
|
||||
|
||||
|
||||
|
||||
user_prenom = ""
|
||||
|
@ -2759,6 +2765,8 @@ def AddStagiairetoClass_mass(file=None, Folder=None, diction=None):
|
|||
if ("type_apprenant" in df.keys()):
|
||||
if (str(df['type_apprenant'].values[n])):
|
||||
type_apprenant = str(df['type_apprenant'].values[n])
|
||||
type_apprenant = str(mycommon.tryInt(type_apprenant))
|
||||
|
||||
mydata['type_apprenant'] = type_apprenant
|
||||
|
||||
|
||||
|
@ -3151,7 +3159,17 @@ def Controle_AddStagiairetoClass_mass(saved_file=None, Folder=None, diction=None
|
|||
if ("type_apprenant" in df.keys()):
|
||||
if (str(df['type_apprenant'].values[n])):
|
||||
type_apprenant = str(df['type_apprenant'].values[n])
|
||||
mydata['type_apprenant'] = type_apprenant
|
||||
type_apprenant = str(mycommon.tryInt(diction['type_apprenant']))
|
||||
|
||||
if (type_apprenant not in MYSY_GV.INSCRIPTION_TYPE_APPRENANT):
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3]) + "Ligne : " + str(
|
||||
n) + ". Le type d'apprenant est invalide ")
|
||||
return False, " Ligne : " + str(
|
||||
n) + ". Le type d'apprenant est invalide. Valeurs autorisées" + str(
|
||||
MYSY_GV.INSCRIPTION_TYPE_APPRENANT)
|
||||
|
||||
|
||||
|
||||
adresse = ""
|
||||
if ("adresse" in df.keys()):
|
||||
|
@ -3739,6 +3757,7 @@ def AddStagiairetoClass_mass_for_many_session(file=None, Folder=None, diction=No
|
|||
if ("type_apprenant" in df.keys()):
|
||||
if (str(df['type_apprenant'].values[n])):
|
||||
type_apprenant = str(df['type_apprenant'].values[n]).strip()
|
||||
type_apprenant = str(mycommon.tryInt(type_apprenant))
|
||||
|
||||
if( type_apprenant not in MYSY_GV.INSCRIPTION_TYPE_APPRENANT):
|
||||
mycommon.myprint(
|
||||
|
@ -4097,13 +4116,16 @@ def Controle_AddStagiairetoClass_mass_for_many_session(saved_file=None, Folder=N
|
|||
|
||||
|
||||
mydata['status'] = str(df['status'].values[n]).strip()
|
||||
if( str(mydata['status']) != "0" and str(mydata['status']) != "1" and str(mydata['status']) != "2"):
|
||||
mystatus = mycommon.tryInt(str(mydata['status']))
|
||||
|
||||
|
||||
if( str(mystatus) != "0" and str(mystatus) != "1" and str(mystatus) != "2"):
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3]) + " Le champ 'status' de la ligne " + str(n) + " est incorrecte. Valeurs acceptées : 0,1,2")
|
||||
return False, " Le champ status de la ligne " + str(n) + " est incorrecte. Valeurs acceptées : 0,1,2"
|
||||
|
||||
|
||||
if( str(df['status'].values[n]).strip() == "1"):
|
||||
if( str(mystatus).strip() == "1"):
|
||||
mydata['inscription_validation_date'] = str(datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
|
||||
|
||||
|
||||
|
@ -4120,6 +4142,7 @@ def Controle_AddStagiairetoClass_mass_for_many_session(saved_file=None, Folder=N
|
|||
if ("type_apprenant" in df.keys()):
|
||||
if (str(df['type_apprenant'].values[n])):
|
||||
type_apprenant = str(df['type_apprenant'].values[n]).strip()
|
||||
type_apprenant = str(mycommon.tryInt(type_apprenant))
|
||||
|
||||
if (type_apprenant not in MYSY_GV.INSCRIPTION_TYPE_APPRENANT):
|
||||
mycommon.myprint(
|
||||
|
@ -6970,6 +6993,11 @@ def AcceptAttendeeInscription(diction):
|
|||
if ("tab_ue_ids" in new_apprenant_diction.keys()):
|
||||
del new_apprenant_diction['tab_ue_ids']
|
||||
|
||||
if( "price" in new_apprenant_diction.keys() ):
|
||||
del new_apprenant_diction['price']
|
||||
|
||||
|
||||
|
||||
local_status, local_retval = apprenant_mgt.Update_Apprenant(new_apprenant_diction)
|
||||
if (local_status is False):
|
||||
return local_status, local_retval
|
||||
|
@ -13848,11 +13876,12 @@ def Create_Emargement_Send_Email_From_Inscription(diction):
|
|||
|
||||
emargement_dictionnary_data['emargement_data'] = my_emargement_data
|
||||
|
||||
|
||||
body = {
|
||||
"params": emargement_dictionnary_data,
|
||||
}
|
||||
|
||||
#print( " ### emargement_dictionnary_data ===== ", emargement_dictionnary_data)
|
||||
print( " ### emargement_dictionnary_data ===== ", emargement_dictionnary_data)
|
||||
|
||||
# Traitement du sujet du mail
|
||||
sujet_mail_Template = jinja2.Template(str(courrier_data_retval['sujet']))
|
||||
|
@ -14840,7 +14869,7 @@ def unarchive_stagiaire(diction):
|
|||
|
||||
|
||||
# seules les formations avec locked = 0 et valide=1 sont modifiables
|
||||
ret_val = MYSY_GV.dbname['partner_client'].update_many(
|
||||
ret_val = MYSY_GV.dbname['inscription'].update_many(
|
||||
{'_id': {'$in': tab_stagiaire_ids_ObjectId}, 'valide': '1',
|
||||
'partner_owner_recid': str(my_partner['recid'])},
|
||||
{"$set": mydata},
|
||||
|
@ -14864,7 +14893,7 @@ def unarchive_stagiaire(diction):
|
|||
mycommon.myprint(
|
||||
" WARNING : Impossible de logguer l'historique pour l'évènement : " + str(history_event_dict))
|
||||
|
||||
return True, " Stagiaire(s) desarchivée(s) "
|
||||
return True, " Stagiaire(s) desarchivé(s) "
|
||||
|
||||
|
||||
except Exception as e:
|
||||
|
|
233
Job_Cron.py
233
Job_Cron.py
|
@ -1205,137 +1205,146 @@ def Cron_Daily_Automatic_Qualiopi_Documents():
|
|||
"""
|
||||
local_session_target_date_added_nb_jour_action = datetime.strptime(str(todays_date).strip(),'%d/%m/%Y')
|
||||
|
||||
elif (data_automatic_traitement['action_target_date'] == "preinscription"):
|
||||
"""
|
||||
Il faut aller voir si on a une sequence en date d'aujoud'hui pour cette session.
|
||||
Si oui, alors on local_session_target_date_added_nb_jour_action = date to day
|
||||
"""
|
||||
local_session_target_date_added_nb_jour_action = datetime.strptime(str(todays_date).strip(),
|
||||
'%d/%m/%Y')
|
||||
|
||||
|
||||
|
||||
print(" ### CMP 1 local_session_target_date_added_nb_jour_action = ", local_session_target_date_added_nb_jour_action)
|
||||
print(" ### CMP 2 datetime.strptime(str(todays_date).strip(), '%d/%m/%Y') = ",
|
||||
datetime.strptime(str(todays_date).strip(), '%d/%m/%Y'))
|
||||
|
||||
if (data_automatic_traitement['action_target_date'] == "start_session"):
|
||||
print(" ### delta day (start_session) = ", datetime.strptime(str(todays_date).strip(), '%d/%m/%Y') - local_session_start_date )
|
||||
if (data_automatic_traitement['action_target_date'] == "end_session"):
|
||||
print(" ### delta day (end_session) = ", datetime.strptime(str(todays_date).strip(), '%d/%m/%Y') - local_session_end_date )
|
||||
if (data_automatic_traitement['action_target_date'] != "preinscription" and
|
||||
data_automatic_traitement['action_target_date'] != "inscription"):
|
||||
if (data_automatic_traitement['action_target_date'] == "start_session"):
|
||||
print(" ### delta day (start_session) = ", datetime.strptime(str(todays_date).strip(), '%d/%m/%Y') - local_session_start_date )
|
||||
if (data_automatic_traitement['action_target_date'] == "end_session"):
|
||||
print(" ### delta day (end_session) = ", datetime.strptime(str(todays_date).strip(), '%d/%m/%Y') - local_session_end_date )
|
||||
|
||||
if (local_session_target_date_added_nb_jour_action > datetime.strptime(str(todays_date).strip(), '%d/%m/%Y')):
|
||||
print(" ### SUPPPPPP : Action à faire dans le futur ")
|
||||
if (local_session_target_date_added_nb_jour_action > datetime.strptime(str(todays_date).strip(), '%d/%m/%Y')):
|
||||
print(" ### SUPPPPPP : Action à faire dans le futur ")
|
||||
|
||||
elif (local_session_target_date_added_nb_jour_action < datetime.strptime(str(todays_date).strip(), '%d/%m/%Y') ):
|
||||
print(" ### INFFFFFF : Alert il y a un pb sur une action non realiser ")
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][
|
||||
3]) + " CRON WARNING * Cron_Daily_Document_Sending: Il a un CRON qui ne n'est pas exécuté à la date prévue."
|
||||
" Session Formation = " + str(
|
||||
my_session['_id']) + " Id du CRON courrier_template_type_document_id = " + str(
|
||||
data_automatic_traitement['courrier_template_type_document_id']))
|
||||
|
||||
courrier_template_type_document_data = MYSY_GV.dbname[
|
||||
'courrier_template_type_document'].find_one(
|
||||
{'ref_interne': data_automatic_traitement['courrier_template_type_document_ref_interne'],
|
||||
'valide': '1',
|
||||
'locked': '0'})
|
||||
|
||||
node_notification = {}
|
||||
node_notification['message'] = "Cette tâche n'a pas été exécutée. " \
|
||||
" Date Initialement prévue "+str(local_session_target_date_added_nb_jour_action_formated)
|
||||
node_notification['status'] = "Warning"
|
||||
node_notification['action'] = str(courrier_template_type_document_data['ref_interne'])
|
||||
diction_for_notification_email['tab_message'].append(node_notification)
|
||||
diction_for_notification_email['global_status'].append("warning")
|
||||
|
||||
|
||||
elif (local_session_target_date_added_nb_jour_action == datetime.strptime(str(todays_date).strip(), '%d/%m/%Y')):
|
||||
|
||||
"""
|
||||
Effectuer l'action
|
||||
"""
|
||||
"""
|
||||
Recuperer le type de document concerné
|
||||
"""
|
||||
courrier_template_type_document_data = MYSY_GV.dbname['courrier_template_type_document'].find_one(
|
||||
{'ref_interne':data_automatic_traitement['courrier_template_type_document_ref_interne'],
|
||||
'valide':'1',
|
||||
'locked':'0'})
|
||||
|
||||
if( courrier_template_type_document_data and 'ref_interne' in courrier_template_type_document_data.keys() ):
|
||||
print(" ### Action à faire : Envoyer le document de type ", courrier_template_type_document_data['ref_interne'])
|
||||
else:
|
||||
elif (local_session_target_date_added_nb_jour_action < datetime.strptime(str(todays_date).strip(), '%d/%m/%Y') ):
|
||||
print(" ### INFFFFFF : Alert il y a un pb sur une action non realiser ")
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][
|
||||
3]) + " CRON WARNING * Cron_Daily_Document_Sending: Impossible de trouver le type de document concerné "
|
||||
3]) + " CRON WARNING * Cron_Daily_Document_Sending: Il a un CRON qui ne n'est pas exécuté à la date prévue."
|
||||
" Session Formation = " + str(
|
||||
my_session['_id']) + " Id du CRON courrier_template_type_document_id = " + str(
|
||||
data_automatic_traitement['courrier_template_type_document_id']))
|
||||
|
||||
continue
|
||||
|
||||
#print(" ### avant action 1 courrier_template_type_document_data = ", courrier_template_type_document_data)
|
||||
#print(" ## data_automatic_traitement = ", data_automatic_traitement)
|
||||
|
||||
local_diction = {}
|
||||
local_diction['session_id'] = str(my_session['_id'])
|
||||
local_diction['document_ref_intern'] = str(courrier_template_type_document_data['ref_interne'])
|
||||
local_diction['partner_owner_recid'] = str(my_session['partner_owner_recid'])
|
||||
local_diction['courrier_template_type_document_ref_interne'] = str(data_automatic_traitement['courrier_template_type_document_ref_interne'])
|
||||
local_diction['courrier_template_type_document_id'] = str( data_automatic_traitement['courrier_template_type_document_id'])
|
||||
|
||||
#print( " ### local_diction for Send_document_from_base_document_automatic_setup = ", local_diction)
|
||||
local_status, local_retval = Send_document_from_base_document_automatic_setup(local_diction)
|
||||
|
||||
node_notification = {}
|
||||
node_notification['message'] = str(courrier_template_type_document_data['ref_interne'])
|
||||
|
||||
local_status_tanslate = ""
|
||||
if( local_status is True ):
|
||||
local_status_tanslate = "Ok"
|
||||
else:
|
||||
local_status_tanslate = "Erreur"
|
||||
|
||||
node_notification['status'] = str(local_status_tanslate)
|
||||
node_notification['action'] = str(courrier_template_type_document_data['ref_interne'])
|
||||
diction_for_notification_email['tab_message'].append(node_notification)
|
||||
diction_for_notification_email['global_status'].append(str(local_status).lower())
|
||||
|
||||
|
||||
"""
|
||||
Mettre à jour le document
|
||||
|
||||
Important : 17/04/25 : Lorsqu'il s'agit d'une action d'emargement avec un targuet = 'every_session_sequence_day', alors
|
||||
on ne desactive l'action que si la date du jous >= a la date de fin de session
|
||||
|
||||
"""
|
||||
if( data_automatic_traitement['action_target_date'] != "every_session_sequence_day" or
|
||||
( data_automatic_traitement['action_target_date'] == "every_session_sequence_day" and
|
||||
local_session_end_date <= datetime.strptime(str(todays_date).strip(), '%d/%m/%Y') )):
|
||||
|
||||
local_update = MYSY_GV.dbname['session_formation'].update_many(
|
||||
{'_id': ObjectId(str(my_session['_id'])),
|
||||
'partner_owner_recid':str(my_session['partner_owner_recid']),
|
||||
courrier_template_type_document_data = MYSY_GV.dbname[
|
||||
'courrier_template_type_document'].find_one(
|
||||
{'ref_interne': data_automatic_traitement['courrier_template_type_document_ref_interne'],
|
||||
'valide': '1',
|
||||
'automatic_traitement.courrier_template_type_document_id': str(data_automatic_traitement['courrier_template_type_document_id'])
|
||||
},
|
||||
{'$set':
|
||||
{
|
||||
'automatic_traitement.$[xxx].actif': "-1",
|
||||
'automatic_traitement.$[xxx].statut': "2",
|
||||
'automatic_traitement.$[xxx].date_traitement_cron': str(datetime.now()),
|
||||
'automatic_traitement.$[xxx].date_update': str(datetime.now()),
|
||||
'automatic_traitement.$[xxx].update_by': "Automatic Cron",
|
||||
}
|
||||
},
|
||||
upsert=False,
|
||||
array_filters=[
|
||||
{"xxx.courrier_template_type_document_id": str(data_automatic_traitement['courrier_template_type_document_id'])}
|
||||
]
|
||||
'locked': '0'})
|
||||
|
||||
)
|
||||
if( local_update.matched_count < 1 ):
|
||||
node_notification = {}
|
||||
node_notification['message'] = "Cette tâche n'a pas été exécutée. " \
|
||||
" Date Initialement prévue "+str(local_session_target_date_added_nb_jour_action_formated)
|
||||
node_notification['status'] = "Warning"
|
||||
node_notification['action'] = str(courrier_template_type_document_data['ref_interne'])
|
||||
diction_for_notification_email['tab_message'].append(node_notification)
|
||||
diction_for_notification_email['global_status'].append("warning")
|
||||
|
||||
|
||||
elif (local_session_target_date_added_nb_jour_action == datetime.strptime(str(todays_date).strip(), '%d/%m/%Y')):
|
||||
|
||||
"""
|
||||
Effectuer l'action
|
||||
"""
|
||||
"""
|
||||
Recuperer le type de document concerné
|
||||
"""
|
||||
courrier_template_type_document_data = MYSY_GV.dbname['courrier_template_type_document'].find_one(
|
||||
{'ref_interne':data_automatic_traitement['courrier_template_type_document_ref_interne'],
|
||||
'valide':'1',
|
||||
'locked':'0'})
|
||||
|
||||
if( courrier_template_type_document_data and 'ref_interne' in courrier_template_type_document_data.keys() ):
|
||||
print(" ### Action à faire : Envoyer le document de type ", courrier_template_type_document_data['ref_interne'])
|
||||
else:
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][
|
||||
3]) + " CRON WARNING * Cron_Daily_Document_Sending: La mise à jour du statut (actif = -1) du cron ne s'est "
|
||||
" PAS bien passé : Session Formation = "+ str(my_session['_id'])+
|
||||
" Id du CRON courrier_template_type_document_id = "+
|
||||
str(data_automatic_traitement['courrier_template_type_document_id']))
|
||||
3]) + " CRON WARNING * Cron_Daily_Document_Sending: Impossible de trouver le type de document concerné "
|
||||
" Session Formation = " + str(
|
||||
my_session['_id']) + " Id du CRON courrier_template_type_document_id = " + str(
|
||||
data_automatic_traitement['courrier_template_type_document_id']))
|
||||
|
||||
continue
|
||||
|
||||
#print(" ### avant action 1 courrier_template_type_document_data = ", courrier_template_type_document_data)
|
||||
#print(" ## data_automatic_traitement = ", data_automatic_traitement)
|
||||
|
||||
local_diction = {}
|
||||
local_diction['session_id'] = str(my_session['_id'])
|
||||
local_diction['document_ref_intern'] = str(courrier_template_type_document_data['ref_interne'])
|
||||
local_diction['partner_owner_recid'] = str(my_session['partner_owner_recid'])
|
||||
local_diction['courrier_template_type_document_ref_interne'] = str(data_automatic_traitement['courrier_template_type_document_ref_interne'])
|
||||
local_diction['courrier_template_type_document_id'] = str( data_automatic_traitement['courrier_template_type_document_id'])
|
||||
|
||||
#print( " ### local_diction for Send_document_from_base_document_automatic_setup = ", local_diction)
|
||||
local_status, local_retval = Send_document_from_base_document_automatic_setup(local_diction)
|
||||
|
||||
node_notification = {}
|
||||
node_notification['message'] = str(courrier_template_type_document_data['ref_interne'])
|
||||
|
||||
local_status_tanslate = ""
|
||||
if( local_status is True ):
|
||||
local_status_tanslate = "Ok"
|
||||
else:
|
||||
local_status_tanslate = "Erreur"
|
||||
|
||||
node_notification['status'] = str(local_status_tanslate)
|
||||
node_notification['action'] = str(courrier_template_type_document_data['ref_interne'])
|
||||
diction_for_notification_email['tab_message'].append(node_notification)
|
||||
diction_for_notification_email['global_status'].append(str(local_status).lower())
|
||||
|
||||
|
||||
"""
|
||||
Mettre à jour le document
|
||||
|
||||
Important : 17/04/25 : Lorsqu'il s'agit d'une action d'emargement avec un targuet = 'every_session_sequence_day', alors
|
||||
on ne desactive l'action que si la date du jous >= a la date de fin de session
|
||||
|
||||
"""
|
||||
if( data_automatic_traitement['action_target_date'] != "every_session_sequence_day" or
|
||||
( data_automatic_traitement['action_target_date'] == "every_session_sequence_day" and
|
||||
local_session_end_date <= datetime.strptime(str(todays_date).strip(), '%d/%m/%Y') )):
|
||||
|
||||
local_update = MYSY_GV.dbname['session_formation'].update_many(
|
||||
{'_id': ObjectId(str(my_session['_id'])),
|
||||
'partner_owner_recid':str(my_session['partner_owner_recid']),
|
||||
'valide': '1',
|
||||
'automatic_traitement.courrier_template_type_document_id': str(data_automatic_traitement['courrier_template_type_document_id'])
|
||||
},
|
||||
{'$set':
|
||||
{
|
||||
'automatic_traitement.$[xxx].actif': "-1",
|
||||
'automatic_traitement.$[xxx].statut': "2",
|
||||
'automatic_traitement.$[xxx].date_traitement_cron': str(datetime.now()),
|
||||
'automatic_traitement.$[xxx].date_update': str(datetime.now()),
|
||||
'automatic_traitement.$[xxx].update_by': "Automatic Cron",
|
||||
}
|
||||
},
|
||||
upsert=False,
|
||||
array_filters=[
|
||||
{"xxx.courrier_template_type_document_id": str(data_automatic_traitement['courrier_template_type_document_id'])}
|
||||
]
|
||||
|
||||
)
|
||||
if( local_update.matched_count < 1 ):
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][
|
||||
3]) + " CRON WARNING * Cron_Daily_Document_Sending: La mise à jour du statut (actif = -1) du cron ne s'est "
|
||||
" PAS bien passé : Session Formation = "+ str(my_session['_id'])+
|
||||
" Id du CRON courrier_template_type_document_id = "+
|
||||
str(data_automatic_traitement['courrier_template_type_document_id']))
|
||||
|
||||
|
||||
|
||||
|
|
23365
Log/log_file.log
23365
Log/log_file.log
File diff suppressed because one or more lines are too long
|
@ -1823,6 +1823,9 @@ def GetAllValideSessionPartner_List(diction):
|
|||
elif ("archive" in diction.keys() and diction["archive"] == "1"):
|
||||
filt_archive = {'archive': '1'}
|
||||
|
||||
elif ("archive" not in diction.keys() ):
|
||||
filt_archive = {'archive': {'$ne': '1'}}
|
||||
|
||||
filt_type_session = {}
|
||||
if ("type_session" in diction.keys()):
|
||||
filt_type_session = {'type_session': str(diction['type_session']).strip()}
|
||||
|
@ -2124,7 +2127,7 @@ def GetAllValideSessionPartner_List(diction):
|
|||
local_diction['session_id'] = str(retVal['_id'])
|
||||
|
||||
"""
|
||||
25/06/2025 - zzzz
|
||||
25/06/2025 -
|
||||
On ne fait le controle d'alerte que sur les session dont la date début < today - 5 jours jour ou > today + 30 jour
|
||||
|
||||
"""
|
||||
|
@ -2297,7 +2300,6 @@ def GetAllValideSessionPartner_List_filter_like(diction):
|
|||
for retVal in MYSY_GV.dbname['session_formation'].aggregate(query):
|
||||
if( 'myclass_collection' in retVal.keys() and len(retVal['myclass_collection']) > 0 ):
|
||||
|
||||
|
||||
val = {}
|
||||
val['id'] = str(cpt)
|
||||
cpt = cpt + 1
|
||||
|
@ -5496,9 +5498,17 @@ def Duplicate_List_Session_Formation(diction):
|
|||
'locked':'0',
|
||||
'session_step_name':str(session_etape)})
|
||||
if( is_valide_etape != 1):
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3]) + " L'étape de La promotion est invalide ")
|
||||
return False, " L'étape de La promotion est invalide "
|
||||
# Verifier si l'etape dans les étapes de sessions par defaut
|
||||
is_valide_etape_default = MYSY_GV.dbname['base_partner_session_step'].count_documents(
|
||||
{'partner_owner_recid': "default",
|
||||
'valide': '1',
|
||||
'locked': '0',
|
||||
'session_step_name': str(session_etape)})
|
||||
|
||||
if (is_valide_etape_default != 1):
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3]) + " L'étape de La promotion est invalide ")
|
||||
return False, " L'étape de La promotion est invalide "
|
||||
|
||||
|
||||
# Verification des date et des cohérences de date
|
||||
|
@ -6812,7 +6822,7 @@ def Prepare_and_Send_Convention_From_Session_For_Selected_Inscrit_By_PDF(diction
|
|||
local_diction['courrier_template_id'] = diction['courrier_template_id']
|
||||
local_diction['client_id'] = str(val)
|
||||
|
||||
print(" ## ZZZ1 client_id PRRRR = ", str(val))
|
||||
|
||||
local_status, local_full_file_name = Create_Convention_By_Client_PDF(local_diction)
|
||||
if( local_status is False):
|
||||
return local_status, local_full_file_name
|
||||
|
@ -7752,6 +7762,140 @@ def Prepare_and_Send_Convocation_From_Session_For_Selected_Inscrit_By_PDF(dictio
|
|||
return False, " Impossible de générer les conventions par pdf PDF "
|
||||
|
||||
|
||||
"""
|
||||
Cette fonction permet de telecharger les attestations de formation
|
||||
au format PDF pour une liste d'attestation (attestation_formation)
|
||||
zzzzzz
|
||||
"""
|
||||
def Prepare_and_Send_Attestation_From_Session_By_PDF( diction):
|
||||
try:
|
||||
|
||||
field_list_obligatoire = ['token', 'session_id', 'tab_attestation_formation_ids']
|
||||
|
||||
for val in field_list_obligatoire:
|
||||
if val not in diction:
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][
|
||||
3]) + " La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||||
return False, " La valeur '" + val + "' n'est pas presente dans la liste des arguments"
|
||||
|
||||
my_token = ""
|
||||
if ("token" in diction.keys()):
|
||||
if diction['token']:
|
||||
my_token = diction['token']
|
||||
|
||||
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
||||
if (local_status is not True):
|
||||
return local_status, my_partner
|
||||
|
||||
# Verifier que La promotion est valide
|
||||
is_session_valide = MYSY_GV.dbname['session_formation'].count_documents(
|
||||
{'_id': ObjectId(str(diction['session_id'])),
|
||||
'valide': '1',
|
||||
'partner_owner_recid': str(my_partner['recid'])})
|
||||
|
||||
if (is_session_valide != 1):
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][
|
||||
3]) + " L'identifiant de La promotion est invalide ")
|
||||
return False, " L'identifiant de La promotion est invalide "
|
||||
|
||||
# Stokage des nom de fichier à zipper
|
||||
list_file_name_to_zip = []
|
||||
|
||||
tab_attestation_formation_ids_splited_obj = []
|
||||
tab_attestation_formation_ids = []
|
||||
|
||||
tab_attestation_formation_ids_splited = str(diction['tab_attestation_formation_ids']).split(",")
|
||||
for tmp in tab_attestation_formation_ids_splited :
|
||||
if( tmp ):
|
||||
tab_attestation_formation_ids.append(tmp)
|
||||
tab_attestation_formation_ids_splited_obj.append(ObjectId(str(tmp)))
|
||||
|
||||
|
||||
is_warning_message = "0"
|
||||
warning_message = "Les attestation ont été correctement envoyées par emails avec l'attention suivante : "
|
||||
for attestation_formation_data in MYSY_GV.dbname['attestation_formation'].find({
|
||||
'partner_owner_recid':str(my_partner['recid']),
|
||||
'valide':"1",
|
||||
"locked":'0',
|
||||
'_id': { '$in': tab_attestation_formation_ids_splited_obj}
|
||||
}):
|
||||
|
||||
|
||||
|
||||
|
||||
# Recupération des données du modèle de document
|
||||
is_convention_by_client = "0"
|
||||
courrier_template_count = MYSY_GV.dbname['courrier_template'].count_documents(
|
||||
{'_id': ObjectId(str(attestation_formation_data['courrier_template_id'])),
|
||||
'valide': '1',
|
||||
'locked': '0',
|
||||
'partner_owner_recid': str(my_partner['recid'])}
|
||||
)
|
||||
|
||||
if( courrier_template_count != 1 ):
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][
|
||||
3]) + " L'identifiant du modèle de courrier "+str(attestation_formation_data['courrier_template_id'])+" est invalide")
|
||||
return False, " L'identifiant du modèle de courrier "+str(attestation_formation_data['courrier_template_id'])+" est invalide"
|
||||
|
||||
|
||||
|
||||
#field_list_obligatoire = [ 'token', 'inscription_id', 'courrier_template_id', 'email_test', 'email_production' ]
|
||||
new_diction_no_client = {}
|
||||
new_diction_no_client['token'] = str(diction['token'])
|
||||
new_diction_no_client['attestation_formation_id'] = str(attestation_formation_data['_id'])
|
||||
new_diction_no_client['courrier_template_id'] = str(attestation_formation_data['courrier_template_id'])
|
||||
new_diction_no_client['session_id'] = diction['session_id']
|
||||
|
||||
|
||||
print(" ##### new_diction_no_client (attestation pdf) = ", new_diction_no_client)
|
||||
tab_saved_file_full_path = []
|
||||
local_status, local_full_file_name = Create_Attestation_By_Stagiaire_PDF( new_diction_no_client)
|
||||
|
||||
if (local_status is False):
|
||||
mycommon.myprint(" WARNING impossible de créer l'attestation de formation (PDF) a l'apprenant : " + str(attestation_formation_data['inscription_id']) )
|
||||
is_warning_message = "1"
|
||||
warning_message = str(warning_message)+" -- "+str(" WARNING impossible de créer l'attestation de formation (PDF) a l'apprenant : " + str(attestation_formation_data['inscription_id']) )
|
||||
|
||||
else:
|
||||
list_file_name_to_zip.append(str(local_full_file_name))
|
||||
# Mettre à jour avec la date d'envoi de l'attestation
|
||||
updata_data = {}
|
||||
updata_data['date_update'] = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
|
||||
updata_data['update_by'] = str(my_partner['recid'])
|
||||
updata_data['statut'] = "1"
|
||||
updata_data['date_envoie'] = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
|
||||
|
||||
MYSY_GV.dbname['attestation_formation'].find_one_and_update(
|
||||
{'_id': ObjectId(str(attestation_formation_data['_id'])),
|
||||
'partner_owner_recid': str(my_partner['recid'])},
|
||||
{'$set': updata_data})
|
||||
|
||||
|
||||
# Create a ZipFile Object
|
||||
todays_date = str(date.today().strftime("%d/%m/%Y"))
|
||||
ts = datetime.now().timestamp()
|
||||
ts = str(ts).replace(".", "").replace(",", "")[-3:]
|
||||
zip_file_name = str(MYSY_GV.TEMPORARY_DIRECTORY_V2) + "List_Attestation_"+ str(ts) + ".zip"
|
||||
|
||||
with ZipFile(zip_file_name, 'w') as zip_object:
|
||||
for pdf_files in list_file_name_to_zip:
|
||||
# print(" ### fichier a zipper = ", pdf_files)
|
||||
zip_object.write(str(pdf_files))
|
||||
|
||||
if os.path.exists(zip_file_name):
|
||||
# print(" ### ok os.path.exists(outputFilename) "+str(outputFilename))
|
||||
|
||||
return True, send_file(zip_file_name, as_attachment=True)
|
||||
|
||||
return False, " Impossible de générer les convocations par PDF (1) "
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible d'envoyer les conventions par email "
|
||||
|
||||
|
||||
|
||||
|
@ -7935,6 +8079,193 @@ def Create_Convocation_By_Stagiaire_PDF(diction):
|
|||
return False, " Impossible de créer le fichier pdf de convocation par stagiaire "
|
||||
|
||||
|
||||
"""
|
||||
Cette fonction créer une attesation de formation en PDF par participant à une session de formation
|
||||
Peu importe le rattachement client ou pas.
|
||||
"""
|
||||
def Create_Attestation_By_Stagiaire_PDF(diction):
|
||||
try:
|
||||
field_list_obligatoire = ['token', 'session_id', 'courrier_template_id', 'attestation_formation_id']
|
||||
|
||||
for val in field_list_obligatoire:
|
||||
if val not in diction:
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][
|
||||
3]) + " La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||||
return False, " La valeur '" + val + "' n'est pas presente dans la liste des arguments"
|
||||
|
||||
my_token = ""
|
||||
if ("token" in diction.keys()):
|
||||
if diction['token']:
|
||||
my_token = diction['token']
|
||||
|
||||
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
||||
if (local_status is not True):
|
||||
return local_status, my_partner
|
||||
|
||||
qry = {'_id':ObjectId(str(diction['courrier_template_id'])),
|
||||
'valide':'1',
|
||||
'locked':'0',
|
||||
'ref_interne': 'CONVOCATION_STAGIAIRE',
|
||||
'partner_owner_recid':str(my_partner['recid'])}
|
||||
|
||||
#print(" ##### qry = ", qry)
|
||||
|
||||
# 1 - Verifier que le modele de courrier est bien editable par individu
|
||||
template_courrier_data = MYSY_GV.dbname['courrier_template'].find_one({'_id':ObjectId(str(diction['courrier_template_id'])),
|
||||
'valide':'1',
|
||||
'locked':'0',
|
||||
'partner_owner_recid':str(my_partner['recid'])})
|
||||
|
||||
if( template_courrier_data is None ):
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][
|
||||
3]) + " L'identifiant du modèle de courrier est invalide ")
|
||||
return False, " L'identifiant du modèle de courrier est invalide "
|
||||
|
||||
if ("edit_by_client" in template_courrier_data.keys() and str(template_courrier_data['edit_by_client']) == "1"):
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][
|
||||
3]) + " Le modèle de courrier n'est pas éditable par stagiaire. ")
|
||||
return False, " Le modèle de courrier n'est pas éditable par stagiaire "
|
||||
|
||||
|
||||
# Verifier que l'attestation de formation est valide
|
||||
|
||||
|
||||
attestation_formation_data = MYSY_GV.dbname['attestation_formation'].find_one({'_id':ObjectId(str(diction['attestation_formation_id'])), 'valide':'1', 'locked':'0',
|
||||
'partner_owner_recid':str(my_partner['recid'])})
|
||||
|
||||
if (attestation_formation_data is None ):
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][
|
||||
3]) + " L'identifiant de l'attestation est invalide ")
|
||||
return False, " L'identifiant de l'attestation est invalide "
|
||||
|
||||
tab_stagiaire = []
|
||||
tab_stagiaire.append(ObjectId(str(attestation_formation_data['inscription_id'])))
|
||||
|
||||
|
||||
|
||||
inscription_data = MYSY_GV.dbname['inscription'].find_one({'_id':ObjectId(attestation_formation_data['inscription_id']),
|
||||
'partner_owner_recid':str(my_partner['recid'])})
|
||||
|
||||
tab_apprenant = []
|
||||
tab_apprenant.append(ObjectId(str(inscription_data['apprenant_id'])))
|
||||
|
||||
# Verifier que La promotion est valide
|
||||
session_data = MYSY_GV.dbname['session_formation'].find_one(
|
||||
{'_id': ObjectId(str(diction['session_id'])), 'valide': '1', 'partner_owner_recid': str(my_partner['recid'])})
|
||||
|
||||
if (session_data is None):
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][
|
||||
3]) + " L'identifiant de La promotion/session est invalide ")
|
||||
return False, " L'identifiant de La promotion/session est invalide "
|
||||
|
||||
tab_session = []
|
||||
tab_session.append(session_data['_id'])
|
||||
|
||||
# Recuperation du titre de la formation
|
||||
class_data = MYSY_GV.dbname['myclass'].find_one(
|
||||
{'internal_url': str(session_data['class_internal_url']), 'valide': '1',
|
||||
'partner_owner_recid': str(my_partner['recid']), 'locked': '0'})
|
||||
|
||||
contenu_attestation = ""
|
||||
if (class_data and "contenu_attestation" in class_data.keys() and class_data['contenu_attestation']):
|
||||
contenu_attestation = class_data['contenu_attestation']
|
||||
|
||||
tab_class = []
|
||||
tab_class.append(class_data['_id'])
|
||||
|
||||
|
||||
# Creation du dictionnaire d'information à utiliser pour la creation du doc
|
||||
convention_dictionnary_data = {}
|
||||
new_diction = {}
|
||||
new_diction['token'] = diction['token']
|
||||
new_diction['list_stagiaire_id'] = tab_stagiaire
|
||||
new_diction['list_session_id'] = tab_session
|
||||
new_diction['list_class_id'] = tab_class
|
||||
new_diction['list_client_id'] = []
|
||||
new_diction['list_apprenant_id'] = tab_apprenant
|
||||
new_diction['list_sequence_session_id'] = []
|
||||
|
||||
|
||||
local_status, local_retval = mycommon.Get_Dictionnary_data_For_Template(new_diction)
|
||||
|
||||
if (local_status is False):
|
||||
return local_status, local_retval
|
||||
|
||||
convention_dictionnary_data = local_retval
|
||||
convention_dictionnary_data['contenu_attestation'] = contenu_attestation
|
||||
|
||||
body = {
|
||||
"params": convention_dictionnary_data,
|
||||
}
|
||||
|
||||
"""
|
||||
Creation du ficier PDF
|
||||
"""
|
||||
contenu_doc_Template = jinja2.Template(str(template_courrier_data['contenu_doc']))
|
||||
|
||||
sourceHtml = contenu_doc_Template.render(params=body["params"])
|
||||
|
||||
todays_date = str(date.today().strftime("%d/%m/%Y"))
|
||||
ts = datetime.now().timestamp()
|
||||
ts = str(ts).replace(".", "").replace(",", "")[-5:]
|
||||
|
||||
orig_file_name = "Attestation_Ftion" + str(my_partner['recid'])[0:3] + "_" + str(ts) + ".pdf"
|
||||
outputFilename = str(MYSY_GV.TEMPORARY_DIRECTORY) + "/" + str(orig_file_name)
|
||||
|
||||
# open output file for writing (truncated binary)
|
||||
resultFile = open(outputFilename, "w+b")
|
||||
|
||||
# convert HTML to PDF
|
||||
pisaStatus = pisa.CreatePDF(
|
||||
src=sourceHtml, # the HTML to convert
|
||||
dest=resultFile) # file handle to receive result
|
||||
|
||||
# close output file
|
||||
resultFile.close()
|
||||
|
||||
|
||||
"""
|
||||
25/01/2024 : pour loger une action dans la collection ==> courrier_template_tracking_history
|
||||
"""
|
||||
#print(" str(diction['inscription_id']) == ", str(diction['inscription_id']))
|
||||
""" local_status, local_retval = module_editique.Editic_Log_History_Action(my_partner, template_courrier_data,
|
||||
str(diction['session_id']), 'inscription', str(diction['inscription_id']))
|
||||
"""
|
||||
|
||||
local_status, local_retval = module_editique.Editic_Log_History_Action_From_courrier_template_type_document_ref_interne(
|
||||
my_partner, "ATTESTATION_FORMATION", str(diction['session_id']), 'inscription',
|
||||
str(attestation_formation_data['inscription_id']),
|
||||
str(attestation_formation_data['courrier_template_id']),
|
||||
"Telecharg. PDF")
|
||||
|
||||
# Mettre à jour avec la date d'envoi de l'attestation
|
||||
updata_data = {}
|
||||
updata_data['date_update'] = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
|
||||
updata_data['update_by'] = str(my_partner['recid'])
|
||||
updata_data['statut'] = "1"
|
||||
updata_data['date_envoie'] = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
|
||||
|
||||
MYSY_GV.dbname['attestation_formation'].find_one_and_update(
|
||||
{'_id': ObjectId(str(attestation_formation_data['_id'])),
|
||||
'partner_owner_recid': str(my_partner['recid'])},
|
||||
{'$set': updata_data})
|
||||
|
||||
|
||||
|
||||
return True, outputFilename
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible de créer le fichier pdf de convocation par stagiaire "
|
||||
|
||||
|
||||
|
||||
"""
|
||||
Cette fonction prepare et envoi les conVocation a chaque
|
||||
participant à La promotion de formation
|
||||
|
@ -8720,8 +9051,6 @@ def Prepare_and_Send_Attestation_From_Session_By_Email(tab_files, Folder, dictio
|
|||
|
||||
|
||||
|
||||
|
||||
|
||||
is_warning_message = "0"
|
||||
warning_message = "Les attestation ont été correctement envoyées par emails avec l'attention suivante : "
|
||||
for attestation_formation_data in MYSY_GV.dbname['attestation_formation'].find({"session_id":str(diction['session_id']),
|
||||
|
@ -8993,11 +9322,11 @@ def Sent_Attestation_Stagiaire_By_Email(tab_files, Folder, diction):
|
|||
stagiaire_client_id = str(inscription_data['client_rattachement_id'])
|
||||
local_diction = {"token":str(diction['token']), "_id":stagiaire_client_id }
|
||||
|
||||
print(" ##### local_diction pr Get_Partner_Client_Communication_Contact= ", local_diction)
|
||||
#print(" ##### local_diction pr Get_Partner_Client_Communication_Contact= ", local_diction)
|
||||
local_status, partner_client_contact_communication = partner_client.Get_Partner_Client_Communication_Contact(local_diction)
|
||||
|
||||
if (local_status is True):
|
||||
print(" ### partner_client_contact_communication = ", partner_client_contact_communication)
|
||||
#print(" ### partner_client_contact_communication = ", partner_client_contact_communication)
|
||||
tab_local_email_production = []
|
||||
for tmp in partner_client_contact_communication:
|
||||
tmp_JSON = ast.literal_eval(tmp)
|
||||
|
@ -9035,6 +9364,12 @@ def Sent_Attestation_Stagiaire_By_Email(tab_files, Folder, diction):
|
|||
{'internal_url': str(session_data['class_internal_url']), 'valide': '1',
|
||||
'partner_owner_recid': str(my_partner['recid']), 'locked': '0'})
|
||||
|
||||
|
||||
contenu_attestation = ""
|
||||
if( class_data and "contenu_attestation" in class_data.keys() and class_data['contenu_attestation']):
|
||||
contenu_attestation = class_data['contenu_attestation']
|
||||
|
||||
|
||||
tab_class = []
|
||||
tab_class.append(class_data['_id'])
|
||||
|
||||
|
@ -9054,6 +9389,8 @@ def Sent_Attestation_Stagiaire_By_Email(tab_files, Folder, diction):
|
|||
return local_status, local_retval
|
||||
|
||||
convention_dictionnary_data = local_retval
|
||||
convention_dictionnary_data['contenu_attestation'] = contenu_attestation
|
||||
|
||||
|
||||
body = {
|
||||
"params": convention_dictionnary_data,
|
||||
|
@ -10399,7 +10736,7 @@ def Invoice_Partner_From_Session( diction):
|
|||
list_partner_invoice_line_champ = ['order_line_formation', 'order_line_qty', 'order_line_prix_unitaire', 'order_line_tax', 'order_line_tax_amount', 'order_line_montant_toutes_taxes',
|
||||
'order_line_montant_hors_taxes', 'order_line_type_reduction', 'order_line_type_valeur', 'order_line_montant_reduction', 'order_header_ref_interne',
|
||||
'order_line_comment', 'order_header_id', 'valide', 'locked', 'date_update', 'partner_owner_recid', 'invoice_header_ref_interne', 'invoice_line_type',
|
||||
'invoice_date', 'invoice_header_id']
|
||||
'invoice_date', 'invoice_header_id', 'order_line_class_id']
|
||||
|
||||
|
||||
# PreRemplir les champs
|
||||
|
@ -10428,6 +10765,8 @@ def Invoice_Partner_From_Session( diction):
|
|||
nom_prenom_email_participant += local_nom+" "+local_prenom+" "+local_email+"\n"
|
||||
|
||||
partner_invoice_line_data['order_line_formation'] = class_data[0]['internal_url']
|
||||
partner_invoice_line_data['order_line_class_id'] = str(class_data[0]['_id'])
|
||||
|
||||
partner_invoice_line_data['order_line_qty'] = str(nb_participant_du_client)
|
||||
partner_invoice_line_data['order_line_prix_unitaire'] = str(prix_session)
|
||||
partner_invoice_line_data['order_line_montant_hors_taxes'] = str(total_ht)
|
||||
|
@ -11102,9 +11441,13 @@ def Invoice_Inscrption_With_Split_Session_By_Inscription_Id( tab_files, diction)
|
|||
MYSY_GV.TEMPORARY_DIRECTORY_V2,
|
||||
local_diction_for_NOT_INVOICE_SPLIT)
|
||||
|
||||
|
||||
print(" ### status = ", status)
|
||||
print(" ### retval = ", retval)
|
||||
print(" ### local_diction_for_NOT_INVOICE_SPLIT invoice_ref = ", invoice_ref)
|
||||
if (status is False):
|
||||
return False, str(retval), False
|
||||
|
||||
for tmp in invoice_ref:
|
||||
list_non_splited_invoice.append(tmp)
|
||||
|
||||
|
@ -12721,16 +13064,16 @@ def Invoice_Partner_From_Session_By_Inscription_Id( diction):
|
|||
taux_tva_statuts, taux_tva_retval = partner_base_setup.Get_Given_Partner_Basic_Setup({'token':str(diction['token']), 'config_name':'tva'})
|
||||
|
||||
if( taux_tva_statuts is False ):
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " Facturation : Impossible de récupérer le taux de TVA ")
|
||||
return False, " Facturation : Impossible de récupérer le taux de TVA ", False
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + "Impossible de récupérer le taux de TVA, Verifiez votre paramétrage ")
|
||||
return False, " Impossible de récupérer le taux de TVA, Verifiez votre paramétrage ", False
|
||||
|
||||
tmp = ast.literal_eval(taux_tva_retval[0])
|
||||
taux_tva_retval = tmp['config_value']
|
||||
print(" ### taux_tva_retval = ", taux_tva_retval)
|
||||
tva_status, tva_value = mycommon.IsFloat(str(taux_tva_retval))
|
||||
if (tva_status is False):
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " Facturation : Le taux de TVA est invalide ")
|
||||
return False, " Facturation : Le taux de TVA est invalide ", False
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " Le taux de TVA est invalide. Verifiez votre paramétrage ")
|
||||
return False, " Le taux de TVA est invalide. Verifiez votre paramétrage ", False
|
||||
|
||||
partner_invoice_header_data['order_header_tax'] = taux_tva_retval
|
||||
partner_invoice_header_data['order_header_tax_amount'] = str(round(tva_value * total_ht/100, 2))
|
||||
|
@ -12810,7 +13153,7 @@ def Invoice_Partner_From_Session_By_Inscription_Id( diction):
|
|||
list_partner_invoice_line_champ = ['order_line_formation', 'order_line_qty', 'order_line_prix_unitaire', 'order_line_tax', 'order_line_tax_amount', 'order_line_montant_toutes_taxes',
|
||||
'order_line_montant_hors_taxes', 'order_line_type_reduction', 'order_line_type_valeur', 'order_line_montant_reduction', 'order_header_ref_interne',
|
||||
'order_line_comment', 'order_header_id', 'valide', 'locked', 'date_update', 'partner_owner_recid', 'invoice_header_ref_interne', 'invoice_line_type',
|
||||
'invoice_date', 'invoice_header_id', 'order_line_is_include_bpf']
|
||||
'invoice_date', 'invoice_header_id', 'order_line_is_include_bpf', 'order_line_class_id']
|
||||
|
||||
|
||||
# PreRemplir les champs
|
||||
|
@ -12839,6 +13182,7 @@ def Invoice_Partner_From_Session_By_Inscription_Id( diction):
|
|||
nom_prenom_email_participant += local_nom+" "+local_prenom+" "+local_email+"\n"
|
||||
|
||||
partner_invoice_line_data['order_line_formation'] = class_data[0]['internal_url']
|
||||
partner_invoice_line_data['order_line_class_id'] = str(class_data[0]['_id'])
|
||||
partner_invoice_line_data['order_line_qty'] = str(nb_participant_du_client)
|
||||
partner_invoice_line_data['order_line_prix_unitaire'] = str(prix_session)
|
||||
partner_invoice_line_data['order_line_montant_hors_taxes'] = str(total_ht)
|
||||
|
@ -12893,6 +13237,8 @@ def Invoice_Partner_From_Session_By_Inscription_Id( diction):
|
|||
partner_invoice_line_data_detail['order_line_inscription_modefinancement'] = ""
|
||||
|
||||
partner_invoice_line_data_detail['order_line_formation'] = class_data[0]['internal_url']
|
||||
partner_invoice_line_data_detail['order_line_class_id'] = str(class_data[0]['_id'])
|
||||
|
||||
partner_invoice_line_data_detail['order_line_prix_unitaire'] = str(prix_session)
|
||||
partner_invoice_line_data_detail['order_line_qty'] = "1"
|
||||
partner_invoice_line_data_detail['order_line_montant_hors_taxes'] = str(total_ht)
|
||||
|
@ -13069,9 +13415,9 @@ def Invoice_Splited_Partner_From_Session_By_Inscription_Id( diction):
|
|||
|
||||
|
||||
for one_tab_split in invoice_split['tab_split'] :
|
||||
print(" ### one_tab_split = ", one_tab_split)
|
||||
print(" ### split_type = ", split_type)
|
||||
print(" ### invoice_part = ", one_tab_split['invoice_part'])
|
||||
#print(" ### one_tab_split = ", one_tab_split)
|
||||
#print(" ### split_type = ", split_type)
|
||||
#print(" ### invoice_part = ", one_tab_split['invoice_part'])
|
||||
split_invoice_part = one_tab_split['invoice_part']
|
||||
|
||||
|
||||
|
@ -13278,8 +13624,8 @@ def Invoice_Splited_Partner_From_Session_By_Inscription_Id( diction):
|
|||
taux_tva_statuts, taux_tva_retval = partner_base_setup.Get_Given_Partner_Basic_Setup({'token':str(diction['token']), 'config_name':'tva'})
|
||||
|
||||
if( taux_tva_statuts is False ):
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " Facturation : Impossible de récupérer le taux de TVA ")
|
||||
return False, " Facturation : Impossible de récupérer le taux de TVA ", False
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + "Impossible de récupérer le taux de TVA, Verifiez votre paramétrage ")
|
||||
return False, " Impossible de récupérer le taux de TVA, Verifiez votre paramétrage ", False
|
||||
|
||||
tmp = ast.literal_eval(taux_tva_retval[0])
|
||||
taux_tva_retval = tmp['config_value']
|
||||
|
@ -13287,8 +13633,8 @@ def Invoice_Splited_Partner_From_Session_By_Inscription_Id( diction):
|
|||
print(" ### taux_tva_retval = ", taux_tva_retval)
|
||||
tva_status, tva_value = mycommon.IsFloat(str(taux_tva_retval))
|
||||
if (tva_status is False):
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " Facturation : Le taux de TVA est invalide ")
|
||||
return False, " Facturation : Le taux de TVA est invalide ", False
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " Le taux de TVA est invalide, Verifiez votre paramétrage ")
|
||||
return False, "Le taux de TVA est invalide, Verifiez votre paramétrage ", False
|
||||
|
||||
split_invoice_part_FLOAT = mycommon.tryFloat(str(split_invoice_part))
|
||||
|
||||
|
@ -13394,7 +13740,7 @@ def Invoice_Splited_Partner_From_Session_By_Inscription_Id( diction):
|
|||
list_partner_invoice_line_champ = ['order_line_formation', 'order_line_qty', 'order_line_prix_unitaire', 'order_line_tax', 'order_line_tax_amount', 'order_line_montant_toutes_taxes',
|
||||
'order_line_montant_hors_taxes', 'order_line_type_reduction', 'order_line_type_valeur', 'order_line_montant_reduction', 'order_header_ref_interne',
|
||||
'order_line_comment', 'order_header_id', 'valide', 'locked', 'date_update', 'partner_owner_recid', 'invoice_header_ref_interne', 'invoice_line_type',
|
||||
'invoice_date', 'invoice_header_id']
|
||||
'invoice_date', 'invoice_header_id', 'order_line_class_id']
|
||||
|
||||
|
||||
# PreRemplir les champs
|
||||
|
@ -13423,6 +13769,8 @@ def Invoice_Splited_Partner_From_Session_By_Inscription_Id( diction):
|
|||
nom_prenom_email_participant += local_nom+" "+local_prenom+" "+local_email+"\n"
|
||||
|
||||
partner_invoice_line_data['order_line_formation'] = class_data[0]['internal_url']
|
||||
partner_invoice_line_data['order_line_class_id'] = str(class_data[0]['_id'])
|
||||
|
||||
partner_invoice_line_data['order_line_qty'] = str(nb_participant_du_client)
|
||||
partner_invoice_line_data['order_line_prix_unitaire'] = str(prix_session)
|
||||
partner_invoice_line_data['order_line_montant_hors_taxes'] = str(total_ht)
|
||||
|
@ -13474,6 +13822,8 @@ def Invoice_Splited_Partner_From_Session_By_Inscription_Id( diction):
|
|||
partner_invoice_line_data_detail['order_line_inscription_type_apprenant'] = str(tmp_inscription_dat['type_apprenant'])
|
||||
partner_invoice_line_data_detail['order_line_inscription_modefinancement'] = str(tmp_inscription_dat['modefinancement'])
|
||||
partner_invoice_line_data_detail['order_line_formation'] = class_data[0]['internal_url']
|
||||
partner_invoice_line_data_detail['order_line_class_id'] = str(class_data[0]['_id'])
|
||||
|
||||
partner_invoice_line_data_detail['order_line_prix_unitaire'] = str(prix_session)
|
||||
partner_invoice_line_data_detail['order_line_montant_hors_taxes'] = str(total_ht)
|
||||
partner_invoice_line_data_detail['order_line_qty'] = "1"
|
||||
|
@ -13723,6 +14073,8 @@ def Invoice_Create_Secure_E_Document(diction):
|
|||
user['_id'] = retval['_id']
|
||||
|
||||
user['order_line_formation'] = retval['order_line_formation']
|
||||
user['order_line_class_id'] = str(retval['myclass_collection'][0]['_id'])
|
||||
|
||||
user['order_line_qty'] = retval['order_line_qty']
|
||||
user['order_line_prix_unitaire'] = retval['order_line_prix_unitaire']
|
||||
user['order_header_id'] = retval['order_header_id']
|
||||
|
@ -13778,7 +14130,7 @@ def Invoice_Create_Secure_E_Document(diction):
|
|||
else:
|
||||
user['order_line_comment'] = ""
|
||||
|
||||
user['class_id'] = retval['myclass_collection'][0]['_id']
|
||||
user['class_id'] = str(retval['myclass_collection'][0]['_id'])
|
||||
user['title'] = retval['myclass_collection'][0]['title']
|
||||
user['order_line_formation_external_code'] = retval['myclass_collection'][0]['external_code']
|
||||
|
||||
|
@ -13864,6 +14216,8 @@ def Invoice_Create_Secure_E_Document(diction):
|
|||
val_tmp = val_tmp + 1
|
||||
user['_id'] = retval['_id']
|
||||
user['order_line_formation'] = retval['order_line_formation']
|
||||
|
||||
|
||||
user['order_line_qty'] = retval['order_line_qty']
|
||||
user['order_line_prix_unitaire'] = retval['order_line_prix_unitaire']
|
||||
user['invoice_header_id'] = retval['invoice_header_id']
|
||||
|
@ -14078,7 +14432,7 @@ def Invoice_Create_Secure_E_Document(diction):
|
|||
)
|
||||
|
||||
|
||||
# zzzzz
|
||||
|
||||
return True, str(local_retval_e_doc)
|
||||
|
||||
|
||||
|
|
|
@ -204,6 +204,8 @@ def Add_Session_Sequence(diction):
|
|||
return False, " La date de fin de séquence " + str(
|
||||
date_debut_seq) + " chevauche une autre séquence"
|
||||
|
||||
|
||||
|
||||
sequence_type = "planning"
|
||||
if ("type" in diction.keys()):
|
||||
sequence_type = str(diction['type'])
|
||||
|
@ -286,7 +288,10 @@ def Add_Session_Sequence(diction):
|
|||
return False, " La ligne de planification de unité d'enseignement est actuellement utilisé. Impossible d'associer à cette ligne à une nouvelle séquence "
|
||||
"""
|
||||
|
||||
delta_time = datetime.strptime(str(date_fin_seq), '%d/%m/%Y %H:%M') - datetime.strptime(str(date_debut_seq),
|
||||
'%d/%m/%Y %H:%M')
|
||||
|
||||
delta_time_converted_in_hour = round(float(delta_time.total_seconds() / 3600), 2)
|
||||
|
||||
my_data = {}
|
||||
my_data['session_id'] = str(diction['session_id'])
|
||||
|
@ -294,6 +299,7 @@ def Add_Session_Sequence(diction):
|
|||
my_data['sequence_title'] = sequence_title
|
||||
my_data['sequence_start'] = date_debut_seq
|
||||
my_data['sequence_end'] = date_fin_seq
|
||||
my_data['time_in_hour'] = str(delta_time_converted_in_hour)
|
||||
|
||||
my_data['agenda'] = agenda
|
||||
my_data['objectif'] = objectif
|
||||
|
@ -592,12 +598,19 @@ def Add_Session_Sequence_Return_New_Seq_Data(diction):
|
|||
return False, " La ligne de planification de unité d'enseignement est actuellement utilisé. Impossible d'associer à cette ligne à une nouvelle séquence "
|
||||
"""
|
||||
|
||||
delta_time = datetime.strptime(str(date_fin_seq), '%d/%m/%Y %H:%M') - datetime.strptime(str(date_debut_seq),
|
||||
'%d/%m/%Y %H:%M')
|
||||
|
||||
delta_time_converted_in_hour = round(float(delta_time.total_seconds()/3600), 2)
|
||||
|
||||
|
||||
my_data = {}
|
||||
my_data['session_id'] = str(diction['session_id'])
|
||||
my_data['type'] = sequence_type
|
||||
my_data['sequence_title'] = sequence_title
|
||||
my_data['sequence_start'] = date_debut_seq
|
||||
my_data['sequence_end'] = date_fin_seq
|
||||
my_data['time_in_hour'] = str(delta_time_converted_in_hour)
|
||||
|
||||
my_data['agenda'] = agenda
|
||||
my_data['objectif'] = objectif
|
||||
|
|
226
apprenant_mgt.py
226
apprenant_mgt.py
|
@ -203,6 +203,7 @@ def Add_Apprenant(diction):
|
|||
|
||||
new_data['valide'] = '1'
|
||||
new_data['locked'] = '0'
|
||||
new_data['archive'] = '0'
|
||||
new_data['partner_owner_recid'] = str(my_partner['recid'])
|
||||
new_data['date_update'] = str(datetime.now())
|
||||
new_data['update_by'] = str(my_partner['_id'])
|
||||
|
@ -479,6 +480,9 @@ def Get_Given_Apprenant_Data(diction):
|
|||
user['id'] = str(val_tmp)
|
||||
val_tmp = val_tmp + 1
|
||||
|
||||
if ("archive" not in val.keys()):
|
||||
user['archive'] = "0"
|
||||
|
||||
if( "tuteur1_civilite" not in val.keys() ):
|
||||
user['tuteur1_civilite'] = ""
|
||||
elif(val['tuteur1_civilite'] not in MYSY_GV.CIVILITE ):
|
||||
|
@ -773,7 +777,7 @@ def Get_List_Partner_Apprenant(diction):
|
|||
"""
|
||||
Verification des input acceptés
|
||||
"""
|
||||
field_list = ['token']
|
||||
field_list = ['token', 'archive']
|
||||
|
||||
incom_keys = diction.keys()
|
||||
for val in incom_keys:
|
||||
|
@ -806,15 +810,28 @@ def Get_List_Partner_Apprenant(diction):
|
|||
if (local_status is not True):
|
||||
return local_status, my_partner
|
||||
|
||||
filt_archive = {}
|
||||
if ("archive" in diction.keys() and (diction["archive"] == "0" or diction["archive"] == "")):
|
||||
filt_archive = {'archive': {'$ne': '1'}}
|
||||
|
||||
elif ("archive" in diction.keys() and diction["archive"] == "1"):
|
||||
filt_archive = {'archive': '1'}
|
||||
|
||||
elif ("archive" not in diction.keys()):
|
||||
filt_archive = {'archive': {'$ne': '1'}}
|
||||
|
||||
RetObject = []
|
||||
val_tmp = 1
|
||||
for val in MYSY_GV.dbname['apprenant'].find({'partner_owner_recid': str(my_partner['recid']),
|
||||
for val in MYSY_GV.dbname['apprenant'].find({ '$and' : [filt_archive, {'partner_owner_recid': str(my_partner['recid']),
|
||||
'valide': '1',
|
||||
'locked': '0'}).sort([("_id", pymongo.DESCENDING), ]):
|
||||
'locked': '0'}]}).sort([("_id", pymongo.DESCENDING), ]):
|
||||
user = val
|
||||
user['id'] = str(val_tmp)
|
||||
val_tmp = val_tmp + 1
|
||||
|
||||
if ("archive" not in val.keys()):
|
||||
user['archive'] = "0"
|
||||
|
||||
# Recuperer les données du client
|
||||
client_nom = ""
|
||||
if ("client_rattachement_id" in val.keys() and val['client_rattachement_id']):
|
||||
|
@ -870,7 +887,7 @@ Recuperer la liste des apprenant avec des filtrer
|
|||
|
||||
def Get_Apprenant_List_Partner_with_filter(diction):
|
||||
try:
|
||||
field_list = ['token', 'nom', 'prenom', 'email']
|
||||
field_list = ['token', 'nom', 'prenom', 'email', 'archive']
|
||||
incom_keys = diction.keys()
|
||||
for val in incom_keys:
|
||||
if val not in field_list and val.startswith('my_') is False:
|
||||
|
@ -911,9 +928,19 @@ def Get_Apprenant_List_Partner_with_filter(diction):
|
|||
if ("prenom" in diction.keys()):
|
||||
filt_prenom = {'prenom': {'$regex': str(diction['prenom']), "$options": "i"}}
|
||||
|
||||
filt_archive = {}
|
||||
if ("archive" in diction.keys() and (diction["archive"] == "0" or diction["archive"] == "")):
|
||||
filt_archive = {'archive': {'$ne': '1'}}
|
||||
|
||||
elif ("archive" in diction.keys() and diction["archive"] == "1"):
|
||||
filt_archive = {'archive': '1'}
|
||||
|
||||
elif ("archive" not in diction.keys()):
|
||||
filt_archive = {'archive': {'$ne': '1'}}
|
||||
|
||||
filt_class_partner_recid = {'partner_owner_recid': str(my_partner['recid'])}
|
||||
|
||||
query = [{'$match': {'$and': [filt_email, filt_nom, filt_prenom, {'partner_owner_recid': str(my_partner['recid'])}]}},
|
||||
query = [{'$match': {'$and': [filt_archive, filt_email, filt_nom, filt_prenom, {'partner_owner_recid': str(my_partner['recid'])}]}},
|
||||
{'$sort': {'_id': -1}},
|
||||
]
|
||||
#print("#### Get_Apprenant_List_Partner_with_filter laa 01 : query = ", query)
|
||||
|
@ -924,6 +951,9 @@ def Get_Apprenant_List_Partner_with_filter(diction):
|
|||
val['id'] = str(cpt)
|
||||
cpt = cpt + 1
|
||||
|
||||
if ("archive" not in retVal.keys()):
|
||||
val['archive'] = "0"
|
||||
|
||||
if ("date_naissance" not in val.keys() or str(val['date_naissance']) == ""):
|
||||
val['date_naissance'] = "01/01/1900"
|
||||
|
||||
|
@ -3242,3 +3272,189 @@ def ENT_Enable_Apprenant_Account(diction):
|
|||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible désactiver les accès ENT "
|
||||
|
||||
|
||||
"""
|
||||
02/06/2025 : Archivage d'un apprenant
|
||||
"""
|
||||
def archive_apprenant(diction):
|
||||
try:
|
||||
|
||||
'''
|
||||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||||
# field_list.
|
||||
'''
|
||||
field_list = ['tab_apprenant_ids', 'token']
|
||||
incom_keys = diction.keys()
|
||||
for val in incom_keys:
|
||||
if val not in field_list and val.startswith('my_') is False:
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3])+" - Le champ '" + val + "' n'existe pas")
|
||||
return False, " Le champ '" + val + "' n'existe pas"
|
||||
|
||||
'''
|
||||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||||
On controle que les champs obligatoires sont presents dans la liste
|
||||
'''
|
||||
field_list_obligatoire = ['tab_apprenant_ids', 'token']
|
||||
|
||||
for val in field_list_obligatoire:
|
||||
if val not in diction:
|
||||
mycommon.myprint(str(inspect.stack()[0][3])+" - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||||
return False, " Impossible de mettre à jour la formation"
|
||||
|
||||
# recuperation des paramettre
|
||||
mydata = {}
|
||||
my_internal_url = ""
|
||||
|
||||
if ("token" in diction.keys()):
|
||||
if diction['token']:
|
||||
mydata['token'] = diction['token']
|
||||
|
||||
# Verification de la validité du token
|
||||
tab_apprenant_ids = []
|
||||
tab_apprenant_ids_ObjectId = []
|
||||
if ("tab_apprenant_ids" in diction.keys()):
|
||||
tab_apprenant_ids = str(diction['tab_apprenant_ids']).split(",")
|
||||
|
||||
for tmp in tab_apprenant_ids:
|
||||
if (tmp):
|
||||
tab_apprenant_ids_ObjectId.append(ObjectId(tmp))
|
||||
|
||||
# Verification de la validité du token
|
||||
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
||||
if (local_status is not True):
|
||||
return local_status, my_partner
|
||||
|
||||
mydata['date_update'] = str(datetime.now())
|
||||
mydata['update_by'] = str(my_partner['_id'])
|
||||
mydata['archive'] = '1'
|
||||
|
||||
|
||||
# seules les formations avec locked = 0 et valide=1 sont modifiables
|
||||
ret_val = MYSY_GV.dbname['apprenant'].update_many(
|
||||
{'_id': {'$in': tab_apprenant_ids_ObjectId}, 'valide': '1',
|
||||
'partner_owner_recid': str(my_partner['recid'])},
|
||||
{"$set": mydata},
|
||||
|
||||
)
|
||||
|
||||
for apprenant_id in tab_apprenant_ids:
|
||||
# pour la collection 'myclass'
|
||||
now = str(datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
|
||||
# print(" ####### laaaa diction = ", diction)
|
||||
|
||||
history_event_dict = {}
|
||||
history_event_dict['token'] = str(diction['token'])
|
||||
history_event_dict['related_collection'] = "inscription"
|
||||
history_event_dict['related_collection_recid'] = str(apprenant_id)
|
||||
history_event_dict['action_date'] = str(now)
|
||||
|
||||
history_event_dict['action_description'] = "Archivage "
|
||||
local_status, local_retval = mycommon.Collection_Historique.Add_Historique_Event(history_event_dict)
|
||||
if (local_status is False):
|
||||
mycommon.myprint(
|
||||
" WARNING : Impossible de logguer l'historique pour l'évènement : " + str(history_event_dict))
|
||||
|
||||
return True, " Apprenants(s) archivé(s) "
|
||||
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible d'archiver les apprenants "
|
||||
|
||||
|
||||
"""
|
||||
02/06/2025 : De-Archivage d'un apprenant
|
||||
"""
|
||||
def unarchive_apprenant(diction):
|
||||
try:
|
||||
|
||||
'''
|
||||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||||
# field_list.
|
||||
'''
|
||||
field_list = ['tab_apprenant_ids', 'token']
|
||||
incom_keys = diction.keys()
|
||||
for val in incom_keys:
|
||||
if val not in field_list and val.startswith('my_') is False:
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3])+" - Le champ '" + val + "' n'existe pas")
|
||||
return False, " Le champ '" + val + "' n'existe pas"
|
||||
|
||||
'''
|
||||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||||
On controle que les champs obligatoires sont presents dans la liste
|
||||
'''
|
||||
field_list_obligatoire = ['tab_apprenant_ids', 'token']
|
||||
|
||||
for val in field_list_obligatoire:
|
||||
if val not in diction:
|
||||
mycommon.myprint(str(inspect.stack()[0][3])+" - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||||
return False, " Impossible de mettre à jour la formation"
|
||||
|
||||
# recuperation des paramettre
|
||||
mydata = {}
|
||||
my_internal_url = ""
|
||||
|
||||
if ("token" in diction.keys()):
|
||||
if diction['token']:
|
||||
mydata['token'] = diction['token']
|
||||
|
||||
# Verification de la validité du token
|
||||
tab_apprenant_ids = []
|
||||
tab_apprenant_ids_ObjectId = []
|
||||
if ("tab_apprenant_ids" in diction.keys()):
|
||||
tab_apprenant_ids = str(diction['tab_apprenant_ids']).split(",")
|
||||
|
||||
for tmp in tab_apprenant_ids:
|
||||
if (tmp):
|
||||
tab_apprenant_ids_ObjectId.append(ObjectId(tmp))
|
||||
|
||||
# Verification de la validité du token
|
||||
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
||||
if (local_status is not True):
|
||||
return local_status, my_partner
|
||||
|
||||
mydata['date_update'] = str(datetime.now())
|
||||
mydata['update_by'] = str(my_partner['_id'])
|
||||
mydata['archive'] = '0'
|
||||
|
||||
|
||||
# seules les formations avec locked = 0 et valide=1 sont modifiables
|
||||
ret_val = MYSY_GV.dbname['apprenant'].update_many(
|
||||
{'_id': {'$in': tab_apprenant_ids_ObjectId}, 'valide': '1',
|
||||
'partner_owner_recid': str(my_partner['recid'])},
|
||||
{"$set": mydata},
|
||||
|
||||
)
|
||||
|
||||
for apprenant_id in tab_apprenant_ids:
|
||||
# pour la collection 'myclass'
|
||||
now = str(datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
|
||||
# print(" ####### laaaa diction = ", diction)
|
||||
|
||||
history_event_dict = {}
|
||||
history_event_dict['token'] = str(diction['token'])
|
||||
history_event_dict['related_collection'] = "apprenant"
|
||||
history_event_dict['related_collection_recid'] = str(apprenant_id)
|
||||
history_event_dict['action_date'] = str(now)
|
||||
|
||||
history_event_dict['action_description'] = "Desarchivage "
|
||||
local_status, local_retval = mycommon.Collection_Historique.Add_Historique_Event(history_event_dict)
|
||||
if (local_status is False):
|
||||
mycommon.myprint(
|
||||
" WARNING : Impossible de logguer l'historique pour l'évènement : " + str(history_event_dict))
|
||||
|
||||
return True, " Apprenants(s) desarchivé(s) "
|
||||
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible desarchiver les apprenants"
|
||||
|
||||
|
|
|
@ -458,6 +458,14 @@ def CreateTableauEmargement_From_Sequence(diction):
|
|||
new_my_local_data['matin'] = False
|
||||
new_my_local_data['apresmidi'] = False
|
||||
|
||||
if( "time_in_hour" in session_sequence.keys() ):
|
||||
new_my_local_data['time_in_hour'] = session_sequence['time_in_hour']
|
||||
else:
|
||||
delta_time = datetime.strptime(str(session_sequence['sequence_end']), '%d/%m/%Y %H:%M') - datetime.strptime(str(session_sequence['sequence_start']),'%d/%m/%Y %H:%M')
|
||||
|
||||
delta_time_converted_in_hour = round(float(delta_time.total_seconds() / 3600), 2)
|
||||
new_my_local_data['time_in_hour'] = str(delta_time_converted_in_hour)
|
||||
|
||||
new_my_local_data['statut'] = "0"
|
||||
new_my_local_data['date_envoi'] = ""
|
||||
|
||||
|
@ -559,6 +567,13 @@ def GetTableauEmargement(diction):
|
|||
if ("date_emargement" not in retval.keys()):
|
||||
user['date_emargement'] = ""
|
||||
|
||||
if ("time_in_hour" not in retval.keys()):
|
||||
delta_time = datetime.strptime(str(user['sequence_end']), '%d/%m/%Y %H:%M') - datetime.strptime(
|
||||
str(user['sequence_start']),'%d/%m/%Y %H:%M')
|
||||
|
||||
delta_time_converted_in_hour = round(float(delta_time.total_seconds() / 3600), 2)
|
||||
user['time_in_hour'] = str(delta_time_converted_in_hour)
|
||||
|
||||
if ("mysy_manual_signature_img" not in retval.keys()):
|
||||
user['mysy_manual_signature_img'] = ""
|
||||
|
||||
|
@ -855,6 +870,14 @@ def Get_Given_Emargement_Data(diction):
|
|||
user['id'] = str(val_tmp)
|
||||
val_tmp = val_tmp + 1
|
||||
|
||||
if ("time_in_hour" not in retval.keys()):
|
||||
delta_time = datetime.strptime(str(user['sequence_end']), '%d/%m/%Y %H:%M') - datetime.strptime(
|
||||
str(user['sequence_start']),'%d/%m/%Y %H:%M')
|
||||
|
||||
delta_time_converted_in_hour = round(float(delta_time.total_seconds() / 3600), 2)
|
||||
user['time_in_hour'] = str(delta_time_converted_in_hour)
|
||||
|
||||
|
||||
RetObject.append(mycommon.JSONEncoder().encode(user))
|
||||
|
||||
return True, RetObject
|
||||
|
@ -978,7 +1001,7 @@ def GerneratePDFEmargementList(diction):
|
|||
mydate = diction['date']
|
||||
query_get_data['date'] = diction['date']
|
||||
|
||||
|
||||
total_heure = 0
|
||||
tab_users = []
|
||||
for val_tmp in MYSY_GV.dbname['emargement'].find({"$and":[query_get_data, filt_inscrit]}).sort([("date", pymongo.ASCENDING), ("sequence_start", pymongo.ASCENDING), ("sequence_end", pymongo.ASCENDING)]):
|
||||
local_tmp = {}
|
||||
|
@ -991,6 +1014,20 @@ def GerneratePDFEmargementList(diction):
|
|||
local_tmp['sequence_end'] = val_tmp['sequence_end']
|
||||
local_tmp['is_present'] = val_tmp['is_present']
|
||||
|
||||
# Comptabiliser les heures de présence
|
||||
if( str(val_tmp['is_present']) == "True"):
|
||||
if ("time_in_hour" not in val_tmp.keys()):
|
||||
delta_time = datetime.strptime(str(val_tmp['sequence_end']), '%d/%m/%Y %H:%M') - datetime.strptime(
|
||||
str(val_tmp['sequence_start']), '%d/%m/%Y %H:%M')
|
||||
|
||||
delta_time_converted_in_hour = round(float(delta_time.total_seconds() / 3600), 2)
|
||||
local_tmp['time_in_hour'] = str(delta_time_converted_in_hour)
|
||||
else:
|
||||
local_tmp['time_in_hour'] = val_tmp['time_in_hour']
|
||||
|
||||
total_heure = total_heure + mycommon.tryFloat(local_tmp['time_in_hour'])
|
||||
|
||||
|
||||
mysy_manual_signature_img = ""
|
||||
if( "mysy_manual_signature_img" in val_tmp.keys() ):
|
||||
mysy_manual_signature_img = val_tmp['mysy_manual_signature_img']
|
||||
|
@ -1001,6 +1038,7 @@ def GerneratePDFEmargementList(diction):
|
|||
|
||||
tab_users.append(local_tmp)
|
||||
|
||||
total_heure = round(total_heure, 2)
|
||||
#print(' ### tab_users = ' + str(tab_users))
|
||||
date_for_tmplate = {}
|
||||
|
||||
|
@ -1130,9 +1168,11 @@ def GerneratePDFEmargementList(diction):
|
|||
|
||||
#print(json.dumps(tab_users, indent=1))
|
||||
# This data can come from database query
|
||||
|
||||
body = {
|
||||
"data": date_for_tmplate,
|
||||
"user": tab_users,
|
||||
"total_heure": str(total_heure),
|
||||
"company_data" :company_data
|
||||
}
|
||||
|
||||
|
@ -1154,7 +1194,8 @@ def GerneratePDFEmargementList(diction):
|
|||
contenu_doc_Template = jinja2.Template(str(partner_document_data['contenu_doc']))
|
||||
|
||||
|
||||
sourceHtml = contenu_doc_Template.render(json_data=body["data"], users=body["user"], company_data=body["company_data"])
|
||||
sourceHtml = contenu_doc_Template.render(json_data=body["data"], users=body["user"], company_data=body["company_data"],
|
||||
total_heure=body["total_heure"])
|
||||
orig_file_name = "Emargement.pdf"
|
||||
outputFilename = str(MYSY_GV.EMARGEMENT_DIRECTORY) + str(orig_file_name)
|
||||
|
||||
|
|
|
@ -774,7 +774,7 @@ def Add_Update_Groupe_Inscrit_Membres(diction):
|
|||
"""
|
||||
Verification des input acceptés
|
||||
"""
|
||||
field_list = ['token', '_id', 'tab_inscriptions_ids']
|
||||
field_list = ['token', '_id', 'tab_inscriptions_ids', 'delegue_level']
|
||||
|
||||
incom_keys = diction.keys()
|
||||
for val in incom_keys:
|
||||
|
@ -896,6 +896,87 @@ def Add_Update_Groupe_Inscrit_Membres(diction):
|
|||
return False, " Impossible d'inscrire les personnes au groupe "
|
||||
|
||||
|
||||
"""
|
||||
Mise à uniquement du 'delegue_level'
|
||||
"""
|
||||
def Update_Groupe_Inscrit_Membres_From_Id(diction):
|
||||
try:
|
||||
diction = mycommon.strip_dictionary(diction)
|
||||
|
||||
"""
|
||||
Verification des input acceptés
|
||||
"""
|
||||
field_list = ['token', 'tab_ids', 'delegue_level']
|
||||
|
||||
incom_keys = diction.keys()
|
||||
for val in incom_keys:
|
||||
if val not in field_list and val.startswith('my_') is False:
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas autorisé")
|
||||
return False, " Les informations fournies sont incorrectes"
|
||||
|
||||
"""
|
||||
Verification des champs obligatoires
|
||||
"""
|
||||
field_list_obligatoire = ['token', 'tab_ids', 'delegue_level']
|
||||
|
||||
for val in field_list_obligatoire:
|
||||
if val not in diction:
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||||
return False, " Les informations fournies sont incorrectes"
|
||||
|
||||
"""
|
||||
Verification de l'identité et autorisation de l'entité qui
|
||||
appelle cette API
|
||||
"""
|
||||
token = ""
|
||||
if ("token" in diction.keys()):
|
||||
if diction['token']:
|
||||
token = diction['token']
|
||||
|
||||
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
||||
if (local_status is not True):
|
||||
return local_status, my_partner
|
||||
|
||||
|
||||
tab_ids = []
|
||||
tab_ids_ObjectId = []
|
||||
|
||||
tab_ids_splited = str(diction['tab_ids']).split(",")
|
||||
# Controle de validité des inscriptions
|
||||
for tmp in tab_ids_splited:
|
||||
if( tmp):
|
||||
tab_ids.append(str(tmp))
|
||||
tab_ids_ObjectId.append(ObjectId(str(tmp)))
|
||||
|
||||
|
||||
|
||||
now = str(datetime.now())
|
||||
mytoday = datetime.today()
|
||||
|
||||
new_data = {}
|
||||
new_data['delegue_level'] = str(diction['delegue_level'])
|
||||
new_data['date_update'] = now
|
||||
new_data['update_by'] = str(my_partner['_id'])
|
||||
|
||||
ret_val = MYSY_GV.dbname['groupe_inscription_membre'].update_many(
|
||||
{'_id': {'$in': tab_ids_ObjectId}, 'valide': '1',
|
||||
'partner_owner_recid': str(my_partner['recid'])},
|
||||
{"$set": new_data},
|
||||
|
||||
)
|
||||
|
||||
|
||||
return True, "Mise à faite"
|
||||
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible de faire la mise à jour "
|
||||
|
||||
|
||||
"""
|
||||
Recuperer les membres d'un groupe
|
||||
"""
|
||||
|
@ -955,6 +1036,10 @@ def Get_Given_Groupe_Membres(diction):
|
|||
local_new_dict['token'] = diction['token']
|
||||
local_new_dict['inscrit_id'] = retval['inscription_id']
|
||||
|
||||
|
||||
if( "delegue_level" not in user.keys()):
|
||||
user['delegue_level'] = ""
|
||||
|
||||
local_inscrit_data_status, local_inscrit_data_retval = mycommon.Get_Inscrit_And_Apprenant_Data(
|
||||
local_new_dict)
|
||||
if (local_inscrit_data_status is False):
|
||||
|
|
130
main.py
130
main.py
|
@ -8199,6 +8199,21 @@ def Get_Qery_Session_By_Formation_V2():
|
|||
|
||||
|
||||
|
||||
|
||||
"""
|
||||
API/ TBD : Indicateur Qualiopi : Recuperation du nombre d'heure de formation dispensé sur une période
|
||||
"""
|
||||
@app.route('/myclass/api/Get_Qery_Session_For_List_Class/', methods=['POST','GET'])
|
||||
@crossdomain(origin='*')
|
||||
def Get_Qery_Session_For_List_Class():
|
||||
# On recupere le corps (payload) de la requete
|
||||
payload = mycommon.strip_dictionary (request.form.to_dict())
|
||||
print(" ### Get_Qery_Session_For_List_Class payload = ",payload)
|
||||
status, retval = formation_tbd_qries.Get_Qery_Session_For_List_Class(payload)
|
||||
return jsonify(status=status, message=retval)
|
||||
|
||||
|
||||
|
||||
"""
|
||||
API/ TBD / QERY / session par periode cumule
|
||||
"""
|
||||
|
@ -8718,6 +8733,34 @@ def Get_Qery_Inscription_Group_By_Session():
|
|||
return jsonify(status=status, message=retval)
|
||||
|
||||
|
||||
"""
|
||||
API / TBD : pour connaitre le nombre inscription validé pour une liste de formation (indicateur qualiopi)
|
||||
"""
|
||||
@app.route('/myclass/api/Get_Qery_Inscription_For_List_Class/', methods=['POST','GET'])
|
||||
@crossdomain(origin='*')
|
||||
def Get_Qery_Inscription_For_List_Class():
|
||||
# On recupere le corps (payload) de la requete
|
||||
payload = mycommon.strip_dictionary (request.form.to_dict())
|
||||
print(" ### Get_Qery_Inscription_For_List_Class payload = ",payload)
|
||||
status, retval = inscription_tdb_qries.Get_Qery_Inscription_For_List_Class(payload)
|
||||
return jsonify(status=status, message=retval)
|
||||
|
||||
|
||||
"""
|
||||
API / TBD : pour connaitre le nombre inscription validé pour une liste de formation (indicateur qualiopi)
|
||||
avec le taux de success
|
||||
"""
|
||||
@app.route('/myclass/api/Get_Qery_Taux_Success_For_List_Class/', methods=['POST','GET'])
|
||||
@crossdomain(origin='*')
|
||||
def Get_Qery_Taux_Success_For_List_Class():
|
||||
# On recupere le corps (payload) de la requete
|
||||
payload = mycommon.strip_dictionary (request.form.to_dict())
|
||||
print(" ### Get_Qery_Taux_Success_For_List_Class payload = ",payload)
|
||||
status, retval = inscription_tdb_qries.Get_Qery_Taux_Success_For_List_Class(payload)
|
||||
return jsonify(status=status, message=retval)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
"""
|
||||
|
@ -8919,6 +8962,30 @@ def Prepare_and_Send_Convocation_From_Session_By_PDF(token, session_id, courrier
|
|||
|
||||
|
||||
|
||||
"""
|
||||
API : pour préprer et générer un zip de fichier PDF pour les attestations de formation en partant d'une session
|
||||
"""
|
||||
@app.route('/myclass/api/Prepare_and_Send_Attestation_From_Session_By_PDF/<token>/<session_id>/<tab_attestation_formation_ids>', methods=['POST','GET'])
|
||||
@crossdomain(origin='*')
|
||||
def Prepare_and_Send_Attestation_From_Session_By_PDF(token, session_id, tab_attestation_formation_ids):
|
||||
# On recupere le corps (payload) de la requete
|
||||
payload = mycommon.strip_dictionary (request.form.to_dict())
|
||||
payload = {}
|
||||
payload['token'] = str(token)
|
||||
payload['session_id'] = str(session_id)
|
||||
payload['tab_attestation_formation_ids'] = str(tab_attestation_formation_ids)
|
||||
|
||||
print(" ### Prepare_and_Send_Attestation_From_Session_By_PDF : payload = ",str(payload))
|
||||
|
||||
localStatus, response= SF.Prepare_and_Send_Attestation_From_Session_By_PDF(payload)
|
||||
if(localStatus ):
|
||||
return response
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
|
||||
|
||||
"""
|
||||
API : pour préprer et générer un zip de fichier PDF pour les convocation en partant d'une session
|
||||
pour une liste d'inscrit bien definie
|
||||
|
@ -9949,6 +10016,18 @@ def Get_Qery_List_Facture_Previsionnelle_Data_By_Periode():
|
|||
return jsonify(status=status, message=retval)
|
||||
|
||||
|
||||
"""
|
||||
API / TBD / QRY : Recuperation du CA pour liste de formation sur une periode
|
||||
"""
|
||||
@app.route('/myclass/api/Get_Qery_List_Factures_For_List_Class/', methods=['POST','GET'])
|
||||
@crossdomain(origin='*')
|
||||
def Get_Qery_List_Factures_For_List_Class():
|
||||
# On recupere le corps (payload) de la requete
|
||||
payload = mycommon.strip_dictionary (request.form.to_dict())
|
||||
print(" ### Get_Qery_List_Factures_For_List_Class payload = ",payload)
|
||||
status, retval = factures_tbd_qries.Get_Qery_List_Factures_For_List_Class(payload)
|
||||
return jsonify(status=status, message=retval)
|
||||
|
||||
|
||||
|
||||
|
||||
|
@ -11165,6 +11244,24 @@ def Add_Update_Groupe_Inscrit_Membres():
|
|||
|
||||
|
||||
|
||||
|
||||
"""
|
||||
API pour mettre à jour le champ : 'delegue_level'
|
||||
"""
|
||||
@app.route('/myclass/api/Update_Groupe_Inscrit_Membres_From_Id/', methods=['POST','GET'])
|
||||
@crossdomain(origin='*')
|
||||
def Update_Groupe_Inscrit_Membres_From_Id():
|
||||
# On recupere le corps (payload) de la requete
|
||||
payload = mycommon.strip_dictionary (request.form.to_dict())
|
||||
print(" ### Update_Groupe_Inscrit_Membres_From_Id payload = ",payload)
|
||||
status, retval = groupe_inscrit_mgt.Update_Groupe_Inscrit_Membres_From_Id(payload)
|
||||
return jsonify(status=status, message=retval)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
"""
|
||||
API pour recuperer la liste des membre d'un groupe d'inscrits
|
||||
"""
|
||||
|
@ -13524,6 +13621,38 @@ def ENT_Enable_Apprenant_Account():
|
|||
return jsonify(status=status, message=retval)
|
||||
|
||||
|
||||
|
||||
"""
|
||||
API pour archiver une liste d'apprenants
|
||||
"""
|
||||
@app.route('/myclass/api/archive_apprenant/', methods=['POST','GET'])
|
||||
@crossdomain(origin='*')
|
||||
def archive_apprenant():
|
||||
# On recupere le corps (payload) de la requete
|
||||
payload = mycommon.strip_dictionary (request.form.to_dict())
|
||||
print(" ### archive_apprenant payload = ",payload)
|
||||
status, retval = apprenant_mgt.archive_apprenant(payload)
|
||||
return jsonify(status=status, message=retval)
|
||||
|
||||
|
||||
|
||||
"""
|
||||
API pour desarchiver une liste d'apprenants
|
||||
"""
|
||||
@app.route('/myclass/api/unarchive_apprenant/', methods=['POST','GET'])
|
||||
@crossdomain(origin='*')
|
||||
def unarchive_apprenant():
|
||||
# On recupere le corps (payload) de la requete
|
||||
payload = mycommon.strip_dictionary (request.form.to_dict())
|
||||
print(" ### unarchive_apprenant payload = ",payload)
|
||||
status, retval = apprenant_mgt.unarchive_apprenant(payload)
|
||||
return jsonify(status=status, message=retval)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
"""
|
||||
API de creation d'un graphique et sauvegade en PNG
|
||||
"""
|
||||
|
@ -13663,7 +13792,6 @@ def Get_List_Message_To_Mail_Queue():
|
|||
if __name__ == '__main__':
|
||||
print(" debut api")
|
||||
|
||||
print(" sdqsdqs app.config['MAX_CONTENT_LENGTH'] = ", app.config['MAX_CONTENT_LENGTH'])
|
||||
context = SSL.Context(SSL.SSLv23_METHOD)
|
||||
if (MYSY_GV.MYSY_ENV == "PROD"):
|
||||
mycommon.myprint(" ++++ ENVIRONNEMENT PRODUCTION ++++ ")
|
||||
|
|
|
@ -1182,7 +1182,6 @@ def Editic_Log_History_Action_From_courrier_template_type_document_ref_interne(m
|
|||
'locked':'0',
|
||||
'courrier_template_type_document_ref_interne':str(courrier_template_type_document_ref_interne)}
|
||||
|
||||
print(" #### qry = ,", qry)
|
||||
|
||||
courrier_template_tracking_data = MYSY_GV.dbname['courrier_template_tracking'].find_one({'partner_owner_recid':str(my_partner['recid']),
|
||||
'valide':'1',
|
||||
|
|
|
@ -358,6 +358,22 @@ def Invoice_Partner_Order(diction):
|
|||
partner_invoice_line_data_detail['order_line_inscription_type_apprenant'] = str(new_invoice_data_line['order_line_type_apprenant'])
|
||||
partner_invoice_line_data_detail['order_line_inscription_modefinancement'] = ""
|
||||
partner_invoice_line_data_detail['order_line_formation'] = str(new_invoice_data_line['order_line_formation'])
|
||||
|
||||
"""
|
||||
27/07/25 : aller recuperer le 'class_id'
|
||||
"""
|
||||
local_class_id = ""
|
||||
if( "order_line_formation" in new_invoice_data_line.keys() and new_invoice_data_line['order_line_formation']):
|
||||
class_data = MYSY_GV.dbname['myclass'].find_one({'internal_url':str(new_invoice_data_line['order_line_formation']),
|
||||
'partner_owner_recid':str(my_partner['recid'])}, {'_id':1})
|
||||
|
||||
if( class_data and '_id' in class_data.keys() ):
|
||||
local_class_id = str(class_data['_id'])
|
||||
|
||||
partner_invoice_line_data_detail['order_line_class_id'] = local_class_id
|
||||
|
||||
|
||||
|
||||
partner_invoice_line_data_detail['order_line_prix_unitaire'] = str(new_invoice_data_line['order_line_prix_unitaire'])
|
||||
partner_invoice_line_data_detail['order_line_montant_hors_taxes'] = str(new_invoice_data_line['order_line_montant_hors_taxes'])
|
||||
partner_invoice_line_data_detail['order_line_invoiced_amount'] = str(new_invoice_data_line['order_line_montant_hors_taxes'])
|
||||
|
@ -516,6 +532,13 @@ def Get_Given_Partner_Invoice(diction):
|
|||
user['id'] = str(val_tmp)
|
||||
val_tmp = val_tmp + 1
|
||||
|
||||
if ("invoice_header_origin" not in user.keys()):
|
||||
user['invoice_header_origin'] = ""
|
||||
|
||||
if ("credit_note_ref" not in user.keys()):
|
||||
user['credit_note_ref'] = ""
|
||||
|
||||
|
||||
# Si le champ 'order_header_condition_paiement_id' alors on va chercher le code de la condition de paiement
|
||||
paiement_ction_code = ""
|
||||
if ('order_header_condition_paiement_id' in retval.keys() and retval[
|
||||
|
@ -640,7 +663,7 @@ def Get_Given_Partner_Invoice_Lines(diction):
|
|||
'localField': 'order_line_formation',
|
||||
'foreignField': 'internal_url',
|
||||
'pipeline': [{'$match': {'$and': [ filt_class_partner_recid]}},
|
||||
{'$project': {'title': 1, 'domaine': 1,
|
||||
{'$project': {'title': 1, 'domaine': 1, '_id':1,
|
||||
'duration': 1,
|
||||
'duration_unit': 1,'external_code':1}}],
|
||||
'as': 'myclass_collection'
|
||||
|
@ -656,6 +679,7 @@ def Get_Given_Partner_Invoice_Lines(diction):
|
|||
val_tmp = val_tmp + 1
|
||||
user['_id'] = retval['_id']
|
||||
user['order_line_formation'] = retval['order_line_formation']
|
||||
|
||||
user['order_line_qty'] = retval['order_line_qty']
|
||||
user['order_line_prix_unitaire'] = retval['order_line_prix_unitaire']
|
||||
user['invoice_header_id'] = retval['invoice_header_id']
|
||||
|
@ -1353,6 +1377,9 @@ def Get_List_Partner_Invoice_no_filter(diction):
|
|||
if( "credit_note_ref" not in New_retVal.keys() ):
|
||||
user['credit_note_ref'] = ""
|
||||
|
||||
if ("invoice_header_origin" not in New_retVal.keys()):
|
||||
user['invoice_header_origin'] = ""
|
||||
|
||||
RetObject.append(mycommon.JSONEncoder().encode(user))
|
||||
|
||||
|
||||
|
@ -1448,7 +1475,7 @@ def Get_List_Partner_Invoice_with_filter(diction):
|
|||
'partner_owner_recid': str(my_partner['recid']), 'valide': '1', 'locked': '0'}
|
||||
|
||||
# Recuperation des '_id' des formation dont le nom match en regexp
|
||||
print(" ### sub_filt_formation_external_code = ", sub_filt_formation_external_code)
|
||||
#print(" ### sub_filt_formation_external_code = ", sub_filt_formation_external_code)
|
||||
for Lists_partner_formation_Data in MYSY_GV.dbname['myclass'].find(sub_filt_formation_external_code, {'internal_url': 1}):
|
||||
Lists_partner_formation_internal_url.append(str(Lists_partner_formation_Data['internal_url']))
|
||||
|
||||
|
@ -1594,6 +1621,9 @@ def Get_List_Partner_Invoice_with_filter(diction):
|
|||
if ("credit_note_ref" not in New_retVal.keys()):
|
||||
user['credit_note_ref'] = ""
|
||||
|
||||
if ("invoice_header_origin" not in New_retVal.keys()):
|
||||
user['invoice_header_origin'] = ""
|
||||
|
||||
|
||||
if( filter_date_debut and filter_date_fin ):
|
||||
if ( datetime.strptime(str(New_retVal['invoice_date'])[0:10], '%d/%m/%Y') >= datetime.strptime(str(filter_date_debut)[0:10], '%d/%m/%Y') and
|
||||
|
@ -2140,7 +2170,7 @@ def GerneratePDF_Partner_Invoice(diction):
|
|||
'localField': 'order_line_formation',
|
||||
'foreignField': 'internal_url',
|
||||
'pipeline': [{'$match': {'$and': [filt_class_partner_recid]}},
|
||||
{'$project': {'title': 1, 'domaine': 1,
|
||||
{'$project': {'title': 1, 'domaine': 1, '_id':1,
|
||||
'duration': 1,
|
||||
'duration_unit': 1,
|
||||
'external_code':1}}],
|
||||
|
@ -2158,6 +2188,9 @@ def GerneratePDF_Partner_Invoice(diction):
|
|||
val_tmp = val_tmp + 1
|
||||
user['_id'] = retval['_id']
|
||||
user['order_line_formation'] = retval['order_line_formation']
|
||||
user['order_line_class_id'] = str(retval['myclass_collection'][0]['_id'])
|
||||
|
||||
|
||||
user['order_line_qty'] = retval['order_line_qty']
|
||||
user['order_line_prix_unitaire'] = retval['order_line_prix_unitaire']
|
||||
user['order_header_id'] = retval['order_header_id']
|
||||
|
@ -2580,7 +2613,7 @@ def Send_Partner_Invoice_By_Email(tab_files, Folder, diction):
|
|||
'localField': 'order_line_formation',
|
||||
'foreignField': 'internal_url',
|
||||
'pipeline': [{'$match': {'$and': [filt_class_partner_recid]}},
|
||||
{'$project': {'title': 1, 'domaine': 1,
|
||||
{'$project': {'title': 1, 'domaine': 1, '_id':1,
|
||||
'duration': 1,
|
||||
'duration_unit': 1,
|
||||
'external_code':1}}],
|
||||
|
@ -2598,6 +2631,9 @@ def Send_Partner_Invoice_By_Email(tab_files, Folder, diction):
|
|||
val_tmp = val_tmp + 1
|
||||
user['_id'] = retval['_id']
|
||||
user['order_line_formation'] = retval['order_line_formation']
|
||||
user['order_line_class_id'] = str(retval['myclass_collection'][0]['_id'])
|
||||
|
||||
|
||||
user['order_line_qty'] = retval['order_line_qty']
|
||||
user['order_line_prix_unitaire'] = retval['order_line_prix_unitaire']
|
||||
user['order_header_id'] = retval['order_header_id']
|
||||
|
|
140
prj_common.py
140
prj_common.py
|
@ -4806,7 +4806,7 @@ def Get_Dictionnary_data_For_Template(diction):
|
|||
list_myclass_data = []
|
||||
tab_myclass_id = diction['list_class_id']
|
||||
|
||||
myclass_liste_champ = ['certif', 'cpf', 'metier', 'objectif', 'title', 'duration', 'duration_unit']
|
||||
myclass_liste_champ = ['certif', 'cpf', 'metier', 'objectif', 'title', 'duration', 'duration_unit', ]
|
||||
|
||||
for myclass_data in MYSY_GV.dbname['myclass'].find({'_id': {'$in': tab_myclass_id, },
|
||||
'valide': '1',
|
||||
|
@ -5145,7 +5145,7 @@ def Get_Partner_Hour_Per_Day(partner_owner_recid):
|
|||
|
||||
hour_per_day_value = "0"
|
||||
|
||||
hour_per_day = MYSY_GV.dbname['base_partner_setup'].find_one({'config_name':"partner_jour_heure",
|
||||
hour_per_day = MYSY_GV.dbname['base_partner_setup'].find_one({'config_name':"nb_heure_par_jour",
|
||||
'valide':'1',
|
||||
'locked':'0',
|
||||
'partner_owner_recid':str(partner_owner_recid)})
|
||||
|
@ -5156,7 +5156,7 @@ def Get_Partner_Hour_Per_Day(partner_owner_recid):
|
|||
" Utilisation de la convertion par defaut ")
|
||||
|
||||
|
||||
hour_per_day = MYSY_GV.dbname['base_partner_setup'].find_one({'config_name': "partner_jour_heure",
|
||||
hour_per_day = MYSY_GV.dbname['base_partner_setup'].find_one({'config_name': "nb_heure_par_jour",
|
||||
'valide': '1',
|
||||
'locked': '0',
|
||||
'partner_owner_recid': "default"})
|
||||
|
@ -5181,6 +5181,140 @@ def Get_Partner_Hour_Per_Day(partner_owner_recid):
|
|||
return False
|
||||
|
||||
|
||||
"""
|
||||
Recuperation du nombre d'heure par semaine depuis la confif du partner
|
||||
"""
|
||||
def Get_Partner_Hour_Per_Week(partner_owner_recid):
|
||||
try:
|
||||
|
||||
hour_per_week_value = "0"
|
||||
|
||||
hour_per_week = MYSY_GV.dbname['base_partner_setup'].find_one({'config_name':"nb_heure_par_semaine",
|
||||
'valide':'1',
|
||||
'locked':'0',
|
||||
'partner_owner_recid':str(partner_owner_recid)})
|
||||
|
||||
if(hour_per_week is None ):
|
||||
myprint(str(
|
||||
inspect.stack()[0][3]) + " WARNING : Le partner (partner_owner_recid = "+str(partner_owner_recid)+" ) n'a pas de configuration pour la convertion semaine / heure."
|
||||
" Utilisation de la convertion par defaut ")
|
||||
|
||||
|
||||
hour_per_week = MYSY_GV.dbname['base_partner_setup'].find_one({'config_name': "nb_heure_par_semaine",
|
||||
'valide': '1',
|
||||
'locked': '0',
|
||||
'partner_owner_recid': "default"})
|
||||
if (hour_per_week is None):
|
||||
myprint(str(
|
||||
inspect.stack()[0][3]) + " WARNING : Auncune configuration par defaut pour la convertion semaine / heure."
|
||||
" Fixation unilaterale à 1 semaine = 35 h ")
|
||||
|
||||
hour_per_week_value = "35"
|
||||
|
||||
else:
|
||||
hour_per_week_value = str(hour_per_week['config_value'])
|
||||
else:
|
||||
hour_per_week_value = str(hour_per_week['config_value'])
|
||||
|
||||
|
||||
return hour_per_week_value
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||||
return False
|
||||
|
||||
|
||||
"""
|
||||
Recuperation du nombre d'heure par mois depuis la confif du partner
|
||||
"""
|
||||
def Get_Partner_Hour_Per_Month(partner_owner_recid):
|
||||
try:
|
||||
|
||||
hour_per_month_value = "0"
|
||||
|
||||
hour_per_month = MYSY_GV.dbname['base_partner_setup'].find_one({'config_name':"nb_heure_par_mois",
|
||||
'valide':'1',
|
||||
'locked':'0',
|
||||
'partner_owner_recid':str(partner_owner_recid)})
|
||||
|
||||
if(hour_per_month is None ):
|
||||
myprint(str(
|
||||
inspect.stack()[0][3]) + " WARNING : Le partner (partner_owner_recid = "+str(partner_owner_recid)+" ) n'a pas de configuration pour la convertion mois / heure."
|
||||
" Utilisation de la convertion par defaut ")
|
||||
|
||||
|
||||
hour_per_month = MYSY_GV.dbname['base_partner_setup'].find_one({'config_name': "nb_heure_par_mois",
|
||||
'valide': '1',
|
||||
'locked': '0',
|
||||
'partner_owner_recid': "default"})
|
||||
if (hour_per_month is None):
|
||||
myprint(str(
|
||||
inspect.stack()[0][3]) + " WARNING : Auncune configuration par defaut pour la convertion semaine / heure."
|
||||
" Fixation unilaterale à 1 mois = 140 h ")
|
||||
|
||||
hour_per_month_value = "152"
|
||||
|
||||
else:
|
||||
hour_per_month_value = str(hour_per_month['config_value'])
|
||||
else:
|
||||
hour_per_month_value = str(hour_per_month['config_value'])
|
||||
|
||||
|
||||
return hour_per_month_value
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||||
return False
|
||||
|
||||
|
||||
"""
|
||||
Recuperation du nombre d'heure par an depuis la confif du partner
|
||||
"""
|
||||
def Get_Partner_Hour_Per_Year(partner_owner_recid):
|
||||
try:
|
||||
|
||||
hour_per_year_value = "0"
|
||||
|
||||
hour_per_year = MYSY_GV.dbname['base_partner_setup'].find_one({'config_name':"nb_heure_par_annee",
|
||||
'valide':'1',
|
||||
'locked':'0',
|
||||
'partner_owner_recid':str(partner_owner_recid)})
|
||||
|
||||
if(hour_per_year is None ):
|
||||
myprint(str(
|
||||
inspect.stack()[0][3]) + " WARNING : Le partner (partner_owner_recid = "+str(partner_owner_recid)+" ) n'a pas de configuration pour la convertion mois / heure."
|
||||
" Utilisation de la convertion par defaut ")
|
||||
|
||||
|
||||
hour_per_year = MYSY_GV.dbname['base_partner_setup'].find_one({'config_name': "nb_heure_par_annee",
|
||||
'valide': '1',
|
||||
'locked': '0',
|
||||
'partner_owner_recid': "default"})
|
||||
if (hour_per_year is None):
|
||||
myprint(str(
|
||||
inspect.stack()[0][3]) + " WARNING : Auncune configuration par defaut pour la convertion semaine / heure."
|
||||
" Fixation unilaterale à 1 mois = 140 h ")
|
||||
|
||||
hour_per_year_value = "1820"
|
||||
|
||||
else:
|
||||
hour_per_year_value = str(hour_per_year['config_value'])
|
||||
else:
|
||||
hour_per_year_value = str(hour_per_year['config_value'])
|
||||
|
||||
|
||||
return hour_per_year_value
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||||
return False
|
||||
|
||||
|
||||
|
||||
|
||||
"""
|
||||
Recuperation de la devise du partner dans jour depuis la confif du partner
|
||||
"""
|
||||
|
|
|
@ -328,6 +328,18 @@ def PROD_Add_Partner_Admin_Ressource_Humaine_No_Toke(diction):
|
|||
|
||||
####
|
||||
|
||||
"""
|
||||
Pour distinquer les employé des autres ressource humaines,
|
||||
on va utiliser le champs 'type_rh'
|
||||
|
||||
Les type de ressources humaines sont :
|
||||
1 ==> ressouces non employé, comme les parents d'eleves, les tuteures, etc
|
||||
'vide' ou rien ==> Ressource internes (enseignant, vacataire, etc)
|
||||
|
||||
TYPE_RH = ['', '1']
|
||||
"""
|
||||
|
||||
|
||||
def Add_Ressource_Humaine(diction):
|
||||
|
||||
try:
|
||||
|
@ -340,7 +352,7 @@ def Add_Ressource_Humaine(diction):
|
|||
'adr_ville', 'adr_pays', 'profil', 'telephone_mobile', 'linkedin',
|
||||
'facebook', 'twitter', 'prenom', 'fonction', 'civilite', 'superieur_hierarchie_id', 'ismanager',
|
||||
'groupe_prix_achat_id', 'prix_achat', 'type_contrat', 'gategorie', 'date_naissance', 'competence',
|
||||
'diffusion_mail'
|
||||
'diffusion_mail', 'type_rh'
|
||||
]
|
||||
|
||||
incom_keys = diction.keys()
|
||||
|
@ -473,6 +485,21 @@ def Add_Ressource_Humaine(diction):
|
|||
gategorie = diction['gategorie']
|
||||
data['gategorie'] = gategorie
|
||||
|
||||
type_rh = ""
|
||||
if ("type_rh" in diction.keys()):
|
||||
if diction['type_rh']:
|
||||
type_rh = diction['type_rh']
|
||||
type_rh = str(mycommon.tryInt(str(type_rh)))
|
||||
if( type_rh not in MYSY_GV.TYPE_RH):
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][
|
||||
3]) + " Le type RH est invalide. Les valeurs acceptées :"+str(MYSY_GV.TYPE_RH))
|
||||
return False, " Le type RH est invalide. Les valeurs acceptées :"+str(MYSY_GV.TYPE_RH)
|
||||
|
||||
|
||||
data['type_rh'] = type_rh
|
||||
|
||||
|
||||
diffusion_mail = ""
|
||||
if ("diffusion_mail" in diction.keys()):
|
||||
if (diction['diffusion_mail']):
|
||||
|
@ -696,7 +723,9 @@ def Add_Ressource_Humaine(diction):
|
|||
|
||||
|
||||
"""
|
||||
Mise à jour d'une personne en se basant sur sont _id
|
||||
Mise à jour d'une personne en se basant sur sont _id.
|
||||
important :
|
||||
on ne modifie pas le TYPE_RH
|
||||
"""
|
||||
def Update_Ressource_Humaine(diction):
|
||||
try:
|
||||
|
@ -1289,6 +1318,9 @@ def Get_List_Ressource_Humaine(diction):
|
|||
if( str(user['civilite']) == "" ):
|
||||
user['civilite'] = "neutre"
|
||||
|
||||
if( "type_rh" not in user.keys() ):
|
||||
user['type_rh'] = ""
|
||||
|
||||
"""
|
||||
Si l'employé a un superieur_hierarchie_id, alors on va aller récupérer son nom et prenom.
|
||||
ce superieur est une ressource humaime
|
||||
|
@ -1418,6 +1450,8 @@ def Get_List_Profil_Ressource_Humaine(diction):
|
|||
user['id'] = str(val_tmp)
|
||||
val_tmp = val_tmp + 1
|
||||
|
||||
if ("type_rh" not in user.keys()):
|
||||
user['type_rh'] = ""
|
||||
|
||||
RetObject.append(mycommon.JSONEncoder().encode(user))
|
||||
|
||||
|
@ -1443,7 +1477,8 @@ def Get_List_Ressource_Humaine_with_filter(diction):
|
|||
"""
|
||||
Verification des input acceptés
|
||||
"""
|
||||
field_list = ['token', 'nom', 'email', 'formation', 'session']
|
||||
field_list = ['token', 'nom', 'email', 'formation',
|
||||
'session', 'type_rh']
|
||||
|
||||
incom_keys = diction.keys()
|
||||
for val in incom_keys:
|
||||
|
@ -1487,6 +1522,11 @@ def Get_List_Ressource_Humaine_with_filter(diction):
|
|||
if ("nom" in diction.keys()):
|
||||
filt_nom = {'nom': {'$regex': str(diction['nom']), "$options": "i"}}
|
||||
|
||||
filt_type_rh = {}
|
||||
if ("type_rh" in diction.keys()):
|
||||
filt_type_rh = {'type_rh': str(diction['type_rh'])}
|
||||
|
||||
|
||||
filt_email = {}
|
||||
if ("email" in diction.keys()):
|
||||
filt_email = {
|
||||
|
@ -1548,7 +1588,8 @@ def Get_List_Ressource_Humaine_with_filter(diction):
|
|||
data_cle['locked'] = "0"
|
||||
data_cle['valide'] = "1"
|
||||
|
||||
find_qry = {'$and':[{'partner_recid': str(my_partner['recid']), 'valide':'1', 'locked':'0', 'is_partner_admin_account':'0' }, filt_nom, filt_email] }
|
||||
find_qry = {'$and':[{'partner_recid': str(my_partner['recid']), 'valide':'1', 'locked':'0', 'is_partner_admin_account':'0' }, filt_nom, filt_email,
|
||||
filt_type_rh] }
|
||||
|
||||
|
||||
new_myquery = [{'$match': find_qry},
|
||||
|
@ -1594,6 +1635,13 @@ def Get_List_Ressource_Humaine_with_filter(diction):
|
|||
user['nom'] = New_retVal['nom']
|
||||
user['email'] = New_retVal['email']
|
||||
|
||||
|
||||
if ("type_rh" in New_retVal.keys()):
|
||||
user['type_rh'] = New_retVal['type_rh']
|
||||
else:
|
||||
user['type_rh'] = ""
|
||||
|
||||
|
||||
if ("prenom" in New_retVal.keys()):
|
||||
user['prenom'] = New_retVal['prenom']
|
||||
else:
|
||||
|
@ -1786,7 +1834,7 @@ def Get_List_Ressource_Humaine_no_filter(diction):
|
|||
"""
|
||||
Verification des input acceptés
|
||||
"""
|
||||
field_list = ['token', ]
|
||||
field_list = ['token', 'type_rh']
|
||||
|
||||
incom_keys = diction.keys()
|
||||
for val in incom_keys:
|
||||
|
@ -1837,8 +1885,14 @@ def Get_List_Ressource_Humaine_no_filter(diction):
|
|||
data_cle['locked'] = "0"
|
||||
data_cle['valide'] = "1"
|
||||
|
||||
find_qry = None
|
||||
|
||||
find_qry = {'partner_recid': str(my_partner['recid']), 'valide':'1', 'locked':'0' }
|
||||
if( "type_rh" in diction.keys()):
|
||||
find_qry = {'partner_recid': str(my_partner['recid']), 'valide':'1', 'locked':'0',
|
||||
'type_rh':diction['type_rh']}
|
||||
else:
|
||||
find_qry = {'partner_recid': str(my_partner['recid']), 'valide': '1', 'locked': '0',
|
||||
'type_rh':{'$nin':['1']}}
|
||||
|
||||
|
||||
RetObject = []
|
||||
|
@ -1847,6 +1901,12 @@ def Get_List_Ressource_Humaine_no_filter(diction):
|
|||
for retval in MYSY_GV.dbname['ressource_humaine'].find(find_qry).sort([("_id", pymongo.DESCENDING), ]):
|
||||
user = retval
|
||||
user['id'] = str(val_tmp)
|
||||
|
||||
if ("type_rh" in retval.keys()):
|
||||
user['type_rh'] = retval['type_rh']
|
||||
else:
|
||||
user['type_rh'] = ""
|
||||
|
||||
if ("prenom" in retval.keys()):
|
||||
user['prenom'] = retval['prenom']
|
||||
else:
|
||||
|
@ -1992,6 +2052,9 @@ def Get_Given_Ressource_Humaine(diction):
|
|||
user['id'] = str(val_tmp)
|
||||
val_tmp = val_tmp + 1
|
||||
|
||||
if ("type_rh" not in retval.keys() ):
|
||||
user['type_rh'] = "neutre"
|
||||
|
||||
if ("civilite" not in retval.keys() ):
|
||||
user['civilite'] = "neutre"
|
||||
|
||||
|
@ -2525,7 +2588,8 @@ def Add_Ressource_Humaine_mass(file=None, Folder=None, diction=None):
|
|||
# Verification que les noms des colonne sont bien corrects"
|
||||
'''
|
||||
field_list = ['nom', 'prenom', 'email', 'telephone_mobile', 'telephone', 'profil', 'ismanager', 'superieur_hierarchie_email', 'civilite',
|
||||
'adresse', 'code_postal', 'ville', 'pays', 'facebook', 'fonction', 'linkedin', 'twitter', 'date_naissance']
|
||||
'adresse', 'code_postal', 'ville', 'pays', 'facebook', 'fonction', 'linkedin', 'twitter',
|
||||
'date_naissance', 'type_rh']
|
||||
|
||||
|
||||
# Controle du nombre de lignes dans le fichier.
|
||||
|
@ -2636,6 +2700,14 @@ def Add_Ressource_Humaine_mass(file=None, Folder=None, diction=None):
|
|||
telephone_mobile = str(df['telephone_mobile'].values[n])
|
||||
mydata['telephone_mobile'] = telephone_mobile
|
||||
|
||||
type_rh = ""
|
||||
if ("type_rh" in df.keys()):
|
||||
if (str(df['type_rh'].values[n])):
|
||||
type_rh = str(df['type_rh'].values[n])
|
||||
type_rh = str(mycommon.tryInt(str(type_rh)))
|
||||
mydata['type_rh'] = type_rh
|
||||
|
||||
|
||||
date_naissance = ""
|
||||
if ("date_naissance" in df.keys()):
|
||||
if (str(df['date_naissance'].values[n])):
|
||||
|
@ -2821,7 +2893,8 @@ def Controle_Add_Ressource_Humaine_mass(saved_file=None, Folder=None, diction=No
|
|||
# Verification que les noms des colonne sont bien corrects"
|
||||
'''
|
||||
field_list = ['nom', 'prenom', 'email', 'telephone_mobile', 'telephone', 'profil', 'ismanager', 'superieur_hierarchie_email',
|
||||
'civilite', 'adresse', 'code_postal', 'ville', 'pays', 'facebook', 'fonction', 'linkedin', 'twitter', 'date_naissance']
|
||||
'civilite', 'adresse', 'code_postal', 'ville', 'pays', 'facebook', 'fonction', 'linkedin', 'twitter',
|
||||
'date_naissance', 'type_rh']
|
||||
|
||||
|
||||
# Controle du nombre de lignes dans le fichier.
|
||||
|
@ -2922,6 +2995,21 @@ def Controle_Add_Ressource_Humaine_mass(saved_file=None, Folder=None, diction=No
|
|||
|
||||
mydata['token'] = str(my_token)
|
||||
|
||||
type_rh = ""
|
||||
if ("type_rh" in df.keys()):
|
||||
if (str(df['type_rh'].values[n])):
|
||||
type_rh = str(df['type_rh'].values[n])
|
||||
type_rh = str(mycommon.tryInt(str(type_rh)))
|
||||
|
||||
if( type_rh not in MYSY_GV.TYPE_RH):
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3]) + " Le champ type_rh " + str(
|
||||
df['type_rh'].values[
|
||||
n]) + " est invalide. Les valeurs autorisées : "+str(MYSY_GV.TYPE_RH))
|
||||
return False, " Le champ type_rh -" + str(df['type_rh'].values[n]) + "- est invalide. Les valeurs autorisées : "++str(MYSY_GV.TYPE_RH)
|
||||
|
||||
|
||||
|
||||
|
||||
telephone_mobile = ""
|
||||
if ("telephone_mobile" in df.keys()):
|
||||
|
|
|
@ -1806,9 +1806,7 @@ def Send_Survey_TabIds(diction):
|
|||
'status': '1',
|
||||
'partner_owner_recid': str(my_partner['recid'])}
|
||||
|
||||
#print(" ### qry 010101 = ", qry)
|
||||
|
||||
#print(" #### str(local_survey_retval['related_collection']) = ", str(local_survey_retval['related_collection']))
|
||||
|
||||
|
||||
partner_owner_recid = "partner_owner_recid"
|
||||
|
|
|
@ -879,7 +879,7 @@ def test_web_service(diction):
|
|||
try:
|
||||
diction = mycommon.strip_dictionary(diction)
|
||||
|
||||
C1_pipe_qry = ([
|
||||
"""C1_pipe_qry = ([
|
||||
{'$match': {'$and': [ {'valide':'1'}, {'automatic_traitement.actif':'1'},
|
||||
|
||||
]}
|
||||
|
@ -888,9 +888,28 @@ def test_web_service(diction):
|
|||
|
||||
for retval in MYSY_GV.dbname['session_formation'].aggregate(C1_pipe_qry) :
|
||||
print( " ### retval == ", retval)
|
||||
"""
|
||||
cpt = 0
|
||||
for retval in MYSY_GV.dbname['session_formation'].find({'date_by':'automatic'}):
|
||||
print(" ### retval == ", retval)
|
||||
|
||||
if( "class_internal_url" in retval.keys() and retval['class_internal_url']):
|
||||
class_data = MYSY_GV.dbname['myclass'].find_one({'internal_url':str(retval['class_internal_url'])})
|
||||
|
||||
return True, "Test WebService OK Diction "
|
||||
if( class_data ):
|
||||
ret_val2 = MYSY_GV.dbname['session_formation'].find_one_and_update(
|
||||
{'_id': ObjectId(str(retval['_id'])), 'partner_owner_recid':str(retval['partner_owner_recid'])},
|
||||
{"$set": {'class_id':str(class_data['_id']),
|
||||
'date_update':str(datetime.now().strftime("%d/%m/%Y")),
|
||||
'date_by':'automatic'}},
|
||||
return_document=ReturnDocument.AFTER,
|
||||
upsert=False,
|
||||
)
|
||||
cpt = cpt +1
|
||||
|
||||
print("### changement fait = ", str(cpt))
|
||||
|
||||
return True, "Test WebService OK Diction : "+str(cpt)
|
||||
|
||||
|
||||
except Exception as e:
|
||||
|
|
Loading…
Reference in New Issue