09/01/24 - 19h30
parent
e432dcfd84
commit
cccb5b8540
|
@ -1,10 +1,13 @@
|
|||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<project version="4">
|
||||
<component name="ChangeListManager">
|
||||
<list default="true" id="c6d0259a-16e1-410d-91a1-830590ee2a08" name="Changes" comment="06/01/24 - 20h30">
|
||||
<list default="true" id="c6d0259a-16e1-410d-91a1-830590ee2a08" name="Changes" comment="08/01/24 - 22h30">
|
||||
<change beforePath="$PROJECT_DIR$/.idea/workspace.xml" beforeDir="false" afterPath="$PROJECT_DIR$/.idea/workspace.xml" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Inscription_mgt.py" beforeDir="false" afterPath="$PROJECT_DIR$/Inscription_mgt.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/Log/log_file.log" beforeDir="false" afterPath="$PROJECT_DIR$/Log/log_file.log" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/ressources_humaines.py" beforeDir="false" afterPath="$PROJECT_DIR$/ressources_humaines.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/apprenant_mgt.py" beforeDir="false" afterPath="$PROJECT_DIR$/apprenant_mgt.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/class_mgt.py" beforeDir="false" afterPath="$PROJECT_DIR$/class_mgt.py" afterDir="false" />
|
||||
<change beforePath="$PROJECT_DIR$/main.py" beforeDir="false" afterPath="$PROJECT_DIR$/main.py" afterDir="false" />
|
||||
</list>
|
||||
<option name="SHOW_DIALOG" value="false" />
|
||||
<option name="HIGHLIGHT_CONFLICTS" value="true" />
|
||||
|
@ -74,13 +77,6 @@
|
|||
<option name="presentableId" value="Default" />
|
||||
<updated>1680804787304</updated>
|
||||
</task>
|
||||
<task id="LOCAL-00142" summary="sdds">
|
||||
<created>1699654387810</created>
|
||||
<option name="number" value="00142" />
|
||||
<option name="presentableId" value="LOCAL-00142" />
|
||||
<option name="project" value="LOCAL" />
|
||||
<updated>1699654387811</updated>
|
||||
</task>
|
||||
<task id="LOCAL-00143" summary="11/11/2023 - 17h">
|
||||
<created>1699718029739</created>
|
||||
<option name="number" value="00143" />
|
||||
|
@ -417,7 +413,14 @@
|
|||
<option name="project" value="LOCAL" />
|
||||
<updated>1704570649680</updated>
|
||||
</task>
|
||||
<option name="localTasksCounter" value="191" />
|
||||
<task id="LOCAL-00191" summary="08/01/24 - 22h30">
|
||||
<created>1704750040591</created>
|
||||
<option name="number" value="00191" />
|
||||
<option name="presentableId" value="LOCAL-00191" />
|
||||
<option name="project" value="LOCAL" />
|
||||
<updated>1704750040592</updated>
|
||||
</task>
|
||||
<option name="localTasksCounter" value="192" />
|
||||
<servers />
|
||||
</component>
|
||||
<component name="Vcs.Log.Tabs.Properties">
|
||||
|
@ -432,7 +435,6 @@
|
|||
</option>
|
||||
</component>
|
||||
<component name="VcsManagerConfiguration">
|
||||
<MESSAGE value="07/12/2023 - 21h30" />
|
||||
<MESSAGE value="08/12/2023 - 22h30" />
|
||||
<MESSAGE value="11/12/2023 - 20h30" />
|
||||
<MESSAGE value="12/12/2023 - 22h30" />
|
||||
|
@ -457,6 +459,7 @@
|
|||
<MESSAGE value="5555" />
|
||||
<MESSAGE value="05/01/24 - 20h30" />
|
||||
<MESSAGE value="06/01/24 - 20h30" />
|
||||
<option name="LAST_COMMIT_MESSAGE" value="06/01/24 - 20h30" />
|
||||
<MESSAGE value="08/01/24 - 22h30" />
|
||||
<option name="LAST_COMMIT_MESSAGE" value="08/01/24 - 22h30" />
|
||||
</component>
|
||||
</project>
|
|
@ -2,6 +2,7 @@
|
|||
Ce fichier permets de créer les inscription des stagiaires à une formation
|
||||
"""
|
||||
import smtplib
|
||||
import xlsxwriter
|
||||
from email import encoders
|
||||
from email.mime.base import MIMEBase
|
||||
from email.mime.multipart import MIMEMultipart
|
||||
|
@ -6252,7 +6253,7 @@ def GetAttendeeDetail_perSession_from_line_id(diction):
|
|||
},
|
||||
|
||||
],
|
||||
'as': 'inscription_collectoin'
|
||||
'as': 'inscription_collection'
|
||||
}
|
||||
},
|
||||
{'$lookup': {'from': 'apprenant', 'let': {'apprenant_id': '$apprenant_id',
|
||||
|
@ -6285,7 +6286,7 @@ def GetAttendeeDetail_perSession_from_line_id(diction):
|
|||
mycommon.myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du partenaire (2)")
|
||||
return False, " Les informations d'identifier la session (2) "
|
||||
|
||||
if ('inscription_collectoin' in local_Insc_retval.keys() and len(local_Insc_retval['inscription_collectoin']) > 0):
|
||||
if ('inscription_collection' in local_Insc_retval.keys() and len(local_Insc_retval['inscription_collection']) > 0):
|
||||
my_retrun_dict = {}
|
||||
|
||||
if ("session_id" in local_Insc_retval.keys()):
|
||||
|
@ -6297,14 +6298,14 @@ def GetAttendeeDetail_perSession_from_line_id(diction):
|
|||
my_retrun_dict['apprenant_id'] = ""
|
||||
|
||||
|
||||
if ("code_session" in local_Insc_retval['inscription_collectoin'][0].keys()):
|
||||
my_retrun_dict['code_session'] = str(local_Insc_retval['inscription_collectoin'][0]['code_session'])
|
||||
if ("code_session" in local_Insc_retval['inscription_collection'][0].keys()):
|
||||
my_retrun_dict['code_session'] = str(local_Insc_retval['inscription_collection'][0]['code_session'])
|
||||
|
||||
if ("date_debut" in local_Insc_retval['inscription_collectoin'][0].keys()):
|
||||
my_retrun_dict['date_du'] = str(local_Insc_retval['inscription_collectoin'][0]['date_debut'])[0:10]
|
||||
if ("date_debut" in local_Insc_retval['inscription_collection'][0].keys()):
|
||||
my_retrun_dict['date_du'] = str(local_Insc_retval['inscription_collection'][0]['date_debut'])[0:10]
|
||||
|
||||
if ("date_fin" in local_Insc_retval['inscription_collectoin'][0].keys()):
|
||||
my_retrun_dict['date_au'] = str(local_Insc_retval['inscription_collectoin'][0]['date_fin'])[0:10]
|
||||
if ("date_fin" in local_Insc_retval['inscription_collection'][0].keys()):
|
||||
my_retrun_dict['date_au'] = str(local_Insc_retval['inscription_collection'][0]['date_fin'])[0:10]
|
||||
|
||||
if ("ville" in local_Insc_retval.keys()):
|
||||
my_retrun_dict['ville'] = local_Insc_retval['ville']
|
||||
|
@ -9122,3 +9123,307 @@ def Get_Statgiaire_Communication_Contact(diction):
|
|||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible de récupérer la liste des contacts du stagiaire "
|
||||
|
||||
"""
|
||||
Fonction permet d'exporter des inscription dans un fichier excel
|
||||
"""
|
||||
def Export_Inscription_To_Excel_From_from_List_Id(diction):
|
||||
try:
|
||||
diction = mycommon.strip_dictionary(diction)
|
||||
|
||||
"""
|
||||
Verification des input acceptés
|
||||
"""
|
||||
field_list = ['token', 'tab_id']
|
||||
|
||||
incom_keys = diction.keys()
|
||||
for val in incom_keys:
|
||||
if val not in field_list and val.startswith('my_') is False:
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
||||
return False, " Les informations fournies sont incorrectes",
|
||||
|
||||
"""
|
||||
Verification des champs obligatoires
|
||||
"""
|
||||
field_list_obligatoire = ['token', 'tab_id']
|
||||
for val in field_list_obligatoire:
|
||||
if val not in diction:
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
|
||||
return False, " Les informations fournies sont incorrectes",
|
||||
|
||||
"""
|
||||
Verification de l'identité et autorisation de l'entité qui
|
||||
appelle cette API
|
||||
"""
|
||||
token = ""
|
||||
if ("token" in diction.keys()):
|
||||
if diction['token']:
|
||||
token = diction['token']
|
||||
|
||||
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
||||
if (local_status is not True):
|
||||
return local_status, my_partner
|
||||
|
||||
tab_id = []
|
||||
tab_id_tmp = str(diction['tab_id']).split(",")
|
||||
for val in tab_id_tmp:
|
||||
tab_id.append(ObjectId(str(val)))
|
||||
|
||||
qery_match = {'_id': {'$in': tab_id}, 'partner_owner_recid': str(my_partner['recid']), 'valide': '1',
|
||||
'locked': '0'}
|
||||
|
||||
print(" #### qry = ", qery_match)
|
||||
list_class_datas = MYSY_GV.dbname['myclass'].find({'_id': {'$in': tab_id},
|
||||
'partner_owner_recid': str(my_partner['recid']),
|
||||
'valide': '1', 'locked': '0'}, {'_id': 0,
|
||||
'valide': 0, 'locked': 0})
|
||||
|
||||
pipe_qry = ([
|
||||
{'$match': qery_match},
|
||||
{'$project': {'_id': 0, 'valide': 0, 'locked': 0, }},
|
||||
{'$lookup': {
|
||||
'from': 'partner_client',
|
||||
"let": {'client_rattachement_id': "$client_rattachement_id",
|
||||
'partner_owner_recid': '$partner_owner_recid'},
|
||||
'pipeline': [
|
||||
{'$match':
|
||||
{'$expr':
|
||||
{'$and':
|
||||
[
|
||||
|
||||
{'$eq': ["$_id", {'$convert': {
|
||||
'input': "$$client_rattachement_id",
|
||||
'to': "objectId",
|
||||
'onError': {'error': 'true'},
|
||||
'onNull': {'isnull': 'true'}
|
||||
}}]},
|
||||
|
||||
{'$eq': ["$valide", "1"]},
|
||||
{'$eq': ["$partner_recid", '$$partner_owner_recid']}
|
||||
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{'$project': {'nom': 1, 'raison_sociale': 1, '_id': 0}},
|
||||
|
||||
],
|
||||
'as': 'partner_client'
|
||||
}
|
||||
},
|
||||
{'$lookup':
|
||||
{
|
||||
'from': 'session_formation',
|
||||
'let': {'session_id': "$session_id", 'class_internal_url': '$class_internal_url',
|
||||
'partner_owner_recid': '$partner_owner_recid'},
|
||||
|
||||
'pipeline': [
|
||||
{'$match':
|
||||
{'$expr':
|
||||
{'$and':
|
||||
[
|
||||
{'$eq': ['$_id', {'$toObjectId': '$$session_id'}]},
|
||||
{'$eq': ["$valide", "1"]},
|
||||
{'$eq': ["$partner_owner_recid", '$$partner_owner_recid']}
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{'$project': { '_id': 0, 'testdate':0}},
|
||||
|
||||
],
|
||||
'as': 'session_collection'
|
||||
}
|
||||
},
|
||||
|
||||
{'$lookup':
|
||||
{
|
||||
'from': 'apprenant',
|
||||
"let": {'apprenant_id': "$apprenant_id", 'partner_owner_recid': '$partner_owner_recid'},
|
||||
'pipeline': [
|
||||
{'$match':
|
||||
{'$expr': {'$and': [
|
||||
{'$eq': ["$valide", "1"]},
|
||||
{'$eq': ["$_id", {'$convert': {
|
||||
'input': "$$apprenant_id",
|
||||
'to': "objectId",
|
||||
'onError': {'error': 'true'},
|
||||
'onNull': {'isnull': 'true'}
|
||||
}}]},
|
||||
|
||||
]}}},
|
||||
{'$project': {'_id': 0}},
|
||||
], 'as': 'apprenant_collection'}}
|
||||
|
||||
])
|
||||
|
||||
print(" #### pipe_qry_inscrit = ", pipe_qry)
|
||||
list_class_datas = MYSY_GV.dbname['inscription'].aggregate(pipe_qry)
|
||||
|
||||
# print(" ### list_class_datas = ", str(list_class_datas))
|
||||
todays_date = str(datetime.today().strftime("%d/%m/%Y"))
|
||||
ts = datetime.now().timestamp()
|
||||
ts = str(ts).replace(".", "").replace(",", "")[-5:]
|
||||
|
||||
orig_file_name = "Export_Inscription_csv_" + str(my_partner['recid']) + "_" + str(ts) + ".xlsx"
|
||||
outputFilename = str(MYSY_GV.TEMPORARY_DIRECTORY) + "/" + str(orig_file_name)
|
||||
|
||||
tab_exported_fields_header = ["apprenant_id", "nom", "email", "prenom", "civilite", "date_naissance", "telephone", "employeur", "client_rattachement_id", "adresse", "code_postal", "ville", "pays", "tuteur1_nom", "tuteur1_prenom",
|
||||
"tuteur1_email", "tuteur1_telephone", "tuteur2_nom", "tuteur2_prenom", "tuteur2_email", "tuteur2_telephone", "opco", "comment", "tuteur1_adresse", "tuteur1_cp", "tuteur1_ville", "tuteur1_pays",
|
||||
"tuteur1_include_com", "tuteur2_adresse", "tuteur2_cp", "tuteur2_ville", "tuteur2_pays", "tuteur2_include_com",
|
||||
"client_nom", "client_raison_sociale", "Session_titre", "code_session", "session_date_debut", "session_date_fin"]
|
||||
|
||||
|
||||
tab_exported_fields = ["nom", "email", "prenom", "civilite", "date_naissance", "telephone", "employeur", "client_rattachement_id", "adresse", "code_postal", "ville", "pays", "tuteur1_nom", "tuteur1_prenom",
|
||||
"tuteur1_email", "tuteur1_telephone", "tuteur2_nom", "tuteur2_prenom", "tuteur2_email", "tuteur2_telephone", "opco", "comment", "tuteur1_adresse", "tuteur1_cp", "tuteur1_ville", "tuteur1_pays",
|
||||
"tuteur1_include_com", "tuteur2_adresse", "tuteur2_cp", "tuteur2_ville", "tuteur2_pays", "tuteur2_include_com"]
|
||||
|
||||
# Create a workbook and add a worksheet.
|
||||
workbook = xlsxwriter.Workbook(outputFilename)
|
||||
worksheet = workbook.add_worksheet()
|
||||
|
||||
row = 0
|
||||
column = 0
|
||||
|
||||
for header_item in tab_exported_fields_header:
|
||||
worksheet.write(row, column, header_item)
|
||||
column += 1
|
||||
|
||||
for class_data in list_class_datas:
|
||||
column = 0
|
||||
row = row + 1
|
||||
"""
|
||||
for local_fiels in tab_exported_fields:
|
||||
print(" #### class_data = ", class_data)
|
||||
answers_record_JSON = ast.literal_eval(str(class_data))
|
||||
if (str(local_fiels) in answers_record_JSON.keys()):
|
||||
local_status, local_retval = mycommon.IsFloat(str(answers_record_JSON[str(local_fiels)]).strip())
|
||||
if (local_status is True):
|
||||
no_html = answers_record_JSON[str(local_fiels)]
|
||||
else:
|
||||
no_html = mycommon.cleanhtml(answers_record_JSON[str(local_fiels)])
|
||||
else:
|
||||
no_html = ""
|
||||
|
||||
worksheet.write(row, column, no_html)
|
||||
column += 1
|
||||
|
||||
"""
|
||||
if ("apprenant_id" in class_data.keys() and str(class_data['apprenant_id']).strip() != "" and
|
||||
"apprenant_collection" in class_data.keys() and len(class_data['apprenant_collection']) > 0):
|
||||
|
||||
worksheet.write(row, column, str(class_data['apprenant_id']))
|
||||
column += 1
|
||||
|
||||
#print(" ### str(class_data['apprenant_id']) = ", str(class_data['apprenant_id']) )
|
||||
answers_record_JSON = ast.literal_eval(str(class_data['apprenant_collection'][0]))
|
||||
#print(" ### answers_record_JSON = ", answers_record_JSON)
|
||||
for local_fiels in tab_exported_fields:
|
||||
if (str(local_fiels) in answers_record_JSON.keys()):
|
||||
local_status, local_retval = mycommon.IsFloat(
|
||||
str(answers_record_JSON[str(local_fiels)]).strip())
|
||||
if (local_status is True):
|
||||
no_html = answers_record_JSON[str(local_fiels)]
|
||||
else:
|
||||
no_html = mycommon.cleanhtml(answers_record_JSON[str(local_fiels)])
|
||||
else:
|
||||
no_html = ""
|
||||
|
||||
worksheet.write(row, column, no_html)
|
||||
column += 1
|
||||
|
||||
|
||||
elif ( "apprenant_id" not in class_data.keys() or str(class_data['apprenant_id']).strip() == ""):
|
||||
worksheet.write(row, column, "--")
|
||||
column += 1
|
||||
|
||||
for local_fiels in tab_exported_fields:
|
||||
#print(" #### class_data = ", class_data)
|
||||
answers_record_JSON = ast.literal_eval(str(class_data))
|
||||
if (str(local_fiels) in answers_record_JSON.keys()):
|
||||
local_status, local_retval = mycommon.IsFloat(
|
||||
str(answers_record_JSON[str(local_fiels)]).strip())
|
||||
if (local_status is True):
|
||||
no_html = answers_record_JSON[str(local_fiels)]
|
||||
else:
|
||||
no_html = mycommon.cleanhtml(answers_record_JSON[str(local_fiels)])
|
||||
else:
|
||||
no_html = ""
|
||||
|
||||
worksheet.write(row, column, no_html)
|
||||
column += 1
|
||||
|
||||
|
||||
# Récuperation des données du client de rattachement
|
||||
if ("partner_client" in class_data.keys() and len(class_data['partner_client']) > 0):
|
||||
if ("nom" in class_data['partner_client'][0].keys()):
|
||||
no_html_formateur_nom = class_data['partner_client'][0]['nom']
|
||||
worksheet.write(row, column, no_html_formateur_nom)
|
||||
column += 1
|
||||
else:
|
||||
worksheet.write(row, column, "")
|
||||
column += 1
|
||||
|
||||
if ("raison_sociale" in class_data['partner_client'][0].keys()):
|
||||
no_html_formateur_raison_sociale = class_data['partner_client'][0]['raison_sociale']
|
||||
worksheet.write(row, column, no_html_formateur_raison_sociale)
|
||||
column += 1
|
||||
else:
|
||||
worksheet.write(row, column, "")
|
||||
column += 1
|
||||
else:
|
||||
worksheet.write(row, column, "")
|
||||
column += 1
|
||||
worksheet.write(row, column, "")
|
||||
column += 1
|
||||
|
||||
# Récuperation des données de la session de formation
|
||||
if ("session_collection" in class_data.keys() and len(class_data['session_collection']) > 0):
|
||||
if ("titre" in class_data['session_collection'][0].keys()):
|
||||
no_html_session_titre = class_data['session_collection'][0]['titre']
|
||||
worksheet.write(row, column, no_html_session_titre)
|
||||
column += 1
|
||||
else:
|
||||
worksheet.write(row, column, "")
|
||||
column += 1
|
||||
|
||||
if ("code_session" in class_data['session_collection'][0].keys()):
|
||||
no_html_code_session = class_data['session_collection'][0]['code_session']
|
||||
worksheet.write(row, column, no_html_code_session)
|
||||
column += 1
|
||||
else:
|
||||
worksheet.write(row, column, "")
|
||||
column += 1
|
||||
|
||||
|
||||
|
||||
if ("date_debut" in class_data['session_collection'][0].keys()):
|
||||
no_html_session_debut = class_data['session_collection'][0]['date_debut']
|
||||
worksheet.write(row, column, no_html_session_debut)
|
||||
column += 1
|
||||
else:
|
||||
worksheet.write(row, column, "")
|
||||
column += 1
|
||||
|
||||
if ("date_fin" in class_data['session_collection'][0].keys()):
|
||||
no_html_session_date_fin = class_data['session_collection'][0]['date_fin']
|
||||
worksheet.write(row, column, no_html_session_date_fin)
|
||||
column += 1
|
||||
else:
|
||||
worksheet.write(row, column, "")
|
||||
column += 1
|
||||
|
||||
|
||||
workbook.close()
|
||||
if os.path.exists(outputFilename):
|
||||
# print(" ### ok os.path.exists(outputFilename) "+str(outputFilename))
|
||||
return True, send_file(outputFilename, as_attachment=True)
|
||||
|
||||
return False, "Impossible de générer l'export csv des inscrits (2) "
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible d'exporter les inscrits "
|
||||
|
||||
|
|
2172
Log/log_file.log
2172
Log/log_file.log
File diff suppressed because it is too large
Load Diff
170
apprenant_mgt.py
170
apprenant_mgt.py
|
@ -5,7 +5,7 @@ Un apprenant est crée apres la validation d'une inscription ou sans.
|
|||
|
||||
"""
|
||||
import ast
|
||||
|
||||
import xlsxwriter
|
||||
import jinja2
|
||||
import pymongo
|
||||
from flask import send_file
|
||||
|
@ -2294,3 +2294,171 @@ def Get_List_Fiche_Apprenant(diction):
|
|||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible de récupérer la liste des modèles de fiche"
|
||||
|
||||
|
||||
"""
|
||||
Fonction permet d'exporter les apprenant dans un fichier excel
|
||||
"""
|
||||
def Export_Apprenant_To_Excel_From_from_List_Id(diction):
|
||||
try:
|
||||
diction = mycommon.strip_dictionary(diction)
|
||||
|
||||
"""
|
||||
Verification des input acceptés
|
||||
"""
|
||||
field_list = ['token', 'tab_id']
|
||||
|
||||
incom_keys = diction.keys()
|
||||
for val in incom_keys:
|
||||
if val not in field_list and val.startswith('my_') is False:
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
||||
return False, " Les informations fournies sont incorrectes",
|
||||
|
||||
"""
|
||||
Verification des champs obligatoires
|
||||
"""
|
||||
field_list_obligatoire = ['token', 'tab_id']
|
||||
for val in field_list_obligatoire:
|
||||
if val not in diction:
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
|
||||
return False, " Les informations fournies sont incorrectes",
|
||||
|
||||
"""
|
||||
Verification de l'identité et autorisation de l'entité qui
|
||||
appelle cette API
|
||||
"""
|
||||
token = ""
|
||||
if ("token" in diction.keys()):
|
||||
if diction['token']:
|
||||
token = diction['token']
|
||||
|
||||
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
||||
if (local_status is not True):
|
||||
return local_status, my_partner
|
||||
|
||||
tab_id = []
|
||||
tab_id_tmp = str(diction['tab_id']).split(",")
|
||||
for val in tab_id_tmp:
|
||||
tab_id.append(ObjectId(str(val)))
|
||||
|
||||
qery_match = {'_id': {'$in': tab_id}, 'partner_owner_recid': str(my_partner['recid']), 'valide': '1',
|
||||
'locked': '0'}
|
||||
|
||||
print(" #### qry = ", qery_match)
|
||||
list_class_datas = MYSY_GV.dbname['myclass'].find({'_id': {'$in': tab_id},
|
||||
'partner_owner_recid': str(my_partner['recid']),
|
||||
'valide': '1', 'locked': '0'}, {'_id': 0,
|
||||
'valide': 0, 'locked': 0})
|
||||
|
||||
pipe_qry = ([
|
||||
{'$match': qery_match},
|
||||
{'$project': {'_id': 0, 'valide': 0, 'locked': 0}},
|
||||
{'$lookup': {
|
||||
'from': 'partner_client',
|
||||
"let": {'client_rattachement_id': "$client_rattachement_id", 'partner_owner_recid': '$partner_owner_recid'},
|
||||
'pipeline': [
|
||||
{'$match':
|
||||
{'$expr':
|
||||
{'$and':
|
||||
[
|
||||
|
||||
{'$eq': ["$_id", {'$convert': {
|
||||
'input': "$$client_rattachement_id",
|
||||
'to': "objectId",
|
||||
'onError': {'error': 'true'},
|
||||
'onNull': {'isnull': 'true'}
|
||||
}}]},
|
||||
|
||||
{'$eq': ["$valide", "1"]},
|
||||
{'$eq': ["$partner_recid", '$$partner_owner_recid']}
|
||||
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{'$project': {'nom': 1, 'raison_sociale': 1, '_id': 0}},
|
||||
|
||||
],
|
||||
'as': 'partner_client'
|
||||
}
|
||||
},
|
||||
|
||||
])
|
||||
|
||||
print(" #### pipe_qry_apprenant = ", pipe_qry)
|
||||
list_class_datas = MYSY_GV.dbname['apprenant'].aggregate(pipe_qry)
|
||||
|
||||
# print(" ### list_class_datas = ", str(list_class_datas))
|
||||
todays_date = str(datetime.today().strftime("%d/%m/%Y"))
|
||||
ts = datetime.now().timestamp()
|
||||
ts = str(ts).replace(".", "").replace(",", "")[-5:]
|
||||
|
||||
orig_file_name = "Export_Apprenant_csv_" + str(my_partner['recid']) + "_" + str(ts) + ".xlsx"
|
||||
outputFilename = str(MYSY_GV.TEMPORARY_DIRECTORY) + "/" + str(orig_file_name)
|
||||
|
||||
tab_exported_fields_header = ["nom", "email", "prenom", "civilite", "date_naissance", "telephone", "employeur", "client_rattachement_id", "adresse", "code_postal", "ville", "pays", "tuteur1_nom", "tuteur1_prenom",
|
||||
"tuteur1_email", "tuteur1_telephone", "tuteur2_nom", "tuteur2_prenom", "tuteur2_email", "tuteur2_telephone", "opco", "comment", "tuteur1_adresse", "tuteur1_cp", "tuteur1_ville", "tuteur1_pays",
|
||||
"tuteur1_include_com", "tuteur2_adresse", "tuteur2_cp", "tuteur2_ville", "tuteur2_pays", "tuteur2_include_com", "client_nom", "client_raison_sociale"]
|
||||
|
||||
|
||||
tab_exported_fields = ["nom", "email", "prenom", "civilite", "date_naissance", "telephone", "employeur", "client_rattachement_id", "adresse", "code_postal", "ville", "pays", "tuteur1_nom", "tuteur1_prenom",
|
||||
"tuteur1_email", "tuteur1_telephone", "tuteur2_nom", "tuteur2_prenom", "tuteur2_email", "tuteur2_telephone", "opco", "comment", "tuteur1_adresse", "tuteur1_cp", "tuteur1_ville", "tuteur1_pays",
|
||||
"tuteur1_include_com", "tuteur2_adresse", "tuteur2_cp", "tuteur2_ville", "tuteur2_pays", "tuteur2_include_com"]
|
||||
|
||||
# Create a workbook and add a worksheet.
|
||||
workbook = xlsxwriter.Workbook(outputFilename)
|
||||
worksheet = workbook.add_worksheet()
|
||||
|
||||
row = 0
|
||||
column = 0
|
||||
|
||||
for header_item in tab_exported_fields_header:
|
||||
worksheet.write(row, column, header_item)
|
||||
column += 1
|
||||
|
||||
for class_data in list_class_datas:
|
||||
column = 0
|
||||
row = row + 1
|
||||
for local_fiels in tab_exported_fields:
|
||||
answers_record_JSON = ast.literal_eval(str(class_data))
|
||||
if (str(local_fiels) in answers_record_JSON.keys()):
|
||||
local_status, local_retval = mycommon.IsFloat(str(answers_record_JSON[str(local_fiels)]).strip())
|
||||
if (local_status is True):
|
||||
no_html = answers_record_JSON[str(local_fiels)]
|
||||
else:
|
||||
no_html = mycommon.cleanhtml(answers_record_JSON[str(local_fiels)])
|
||||
else:
|
||||
no_html = ""
|
||||
|
||||
worksheet.write(row, column, no_html)
|
||||
column += 1
|
||||
|
||||
|
||||
if ("partner_client" in class_data.keys() and len(class_data['partner_client']) > 0):
|
||||
if ("nom" in class_data['partner_client'][0].keys()):
|
||||
no_html_formateur_nom = class_data['partner_client'][0]['nom']
|
||||
worksheet.write(row, column, no_html_formateur_nom)
|
||||
column += 1
|
||||
|
||||
if ("raison_sociale" in class_data['partner_client'][0].keys()):
|
||||
no_html_formateur_raison_sociale = class_data['partner_client'][0]['raison_sociale']
|
||||
worksheet.write(row, column, no_html_formateur_raison_sociale)
|
||||
column += 1
|
||||
|
||||
workbook.close()
|
||||
if os.path.exists(outputFilename):
|
||||
# print(" ### ok os.path.exists(outputFilename) "+str(outputFilename))
|
||||
return True, send_file(outputFilename, as_attachment=True)
|
||||
|
||||
return False, "Impossible de générer l'export csv des apprenants (2) "
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible d'exporter les apprenants "
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
258
class_mgt.py
258
class_mgt.py
|
@ -2,7 +2,11 @@
|
|||
Ce fichier traite tout ce qui est liée à la gestion des formations
|
||||
|
||||
'''
|
||||
import ast
|
||||
import hashlib
|
||||
import xlsxwriter
|
||||
import pymongo
|
||||
from flask import send_file
|
||||
from pymongo import MongoClient
|
||||
import json
|
||||
from bson import ObjectId
|
||||
|
@ -4684,10 +4688,36 @@ def Duplicate_Class(diction):
|
|||
{'_id': 0}
|
||||
)
|
||||
|
||||
# Creation de la variable aleatoire en se basant sur le datetime now
|
||||
suffix = hashlib.md5(str(datetime.now()).encode()).hexdigest()
|
||||
i = 1
|
||||
new_title = str(class_to_duplicate['title']) + "_dup"
|
||||
new_internal_code = str(class_to_duplicate['internal_code'])+"_dup"
|
||||
new_internal_url = str(class_to_duplicate['internal_url'])+"_dup"
|
||||
new_external_code = str(class_to_duplicate['external_code'])+"_dup"
|
||||
new_internal_code = str(class_to_duplicate['internal_code'])+"_dup_" + str(suffix[0:i])
|
||||
new_internal_url = str(class_to_duplicate['internal_url'])+"_dup_" + str(suffix[0:i])
|
||||
new_external_code = str(class_to_duplicate['external_code'])+"_dup_" + str(suffix[0:i])
|
||||
|
||||
# Verifier qu'il n'existe pas de formation avec le nouvel internal url, ni le nouvel internal_code
|
||||
# Les nouveau codes doivent etre unique quelque soit le 'partner_owner_recid'
|
||||
is_exite_class_with_new_internal_url = 1
|
||||
|
||||
while( is_exite_class_with_new_internal_url > 0):
|
||||
is_exite_class_with_new_internal_url = MYSY_GV.dbname['myclass'].count_documents({'$or':[{'internal_url':str(new_internal_url)},
|
||||
{'internal_code':str(new_internal_code)}
|
||||
]})
|
||||
i = i + 1
|
||||
|
||||
new_title = str(class_to_duplicate['title']) + "_dup_"+str(i)
|
||||
new_internal_code = str(class_to_duplicate['internal_code']) + "_dup_" + str(suffix[0:i])
|
||||
new_internal_url = str(class_to_duplicate['internal_url']) + "_dup_" + str(suffix[0:i])
|
||||
new_external_code = str(class_to_duplicate['external_code']) + "_dup_" + str(suffix[0:1])
|
||||
|
||||
|
||||
print(" FINAL DUPLICATE DATA = ")
|
||||
print(" ### new_title = ", new_title)
|
||||
print(" ### new_internal_code = ", new_internal_code)
|
||||
print(" ### new_internal_url = ", new_internal_url)
|
||||
print(" ### new_external_code = ", new_external_code)
|
||||
|
||||
|
||||
new_class = class_to_duplicate
|
||||
|
||||
|
@ -4783,10 +4813,40 @@ def Duplicate_Class_from_internal_url(diction):
|
|||
{'_id':0}
|
||||
)
|
||||
|
||||
# Creation de la variable aleatoire en se basant sur le datetime now
|
||||
suffix = hashlib.md5(str(datetime.now()).encode()).hexdigest()
|
||||
i = 1
|
||||
new_title = str(class_to_duplicate['title']) + "_dup"
|
||||
new_internal_code = str(class_to_duplicate['internal_code'])+"_dup"
|
||||
new_internal_url = str(class_to_duplicate['internal_url'])+"_dup"
|
||||
new_external_code = str(class_to_duplicate['external_code'])+"_dup"
|
||||
new_internal_code = str(class_to_duplicate['internal_code']) + "_dup_" + str(suffix[0:i])
|
||||
new_internal_url = str(class_to_duplicate['internal_url']) + "_dup_" + str(suffix[0:i])
|
||||
new_external_code = str(class_to_duplicate['external_code']) + "_dup_" + str(suffix[0:i])
|
||||
|
||||
# Verifier qu'il n'existe pas de formation avec le nouvel internal url, ni le nouvel internal_code
|
||||
# Les nouveau codes doivent etre unique quelque soit le 'partner_owner_recid'
|
||||
|
||||
is_exite_class_with_new_internal_url = MYSY_GV.dbname['myclass'].count_documents(
|
||||
{'$or': [{'internal_url': str(new_internal_url)},
|
||||
{'internal_code': str(new_internal_code)}
|
||||
]})
|
||||
|
||||
while (is_exite_class_with_new_internal_url > 0):
|
||||
|
||||
#print(" #### qry = ", qry)
|
||||
|
||||
|
||||
new_title = str(class_to_duplicate['title']) + "_dup_" + str(i)
|
||||
new_internal_code = str(class_to_duplicate['internal_code']) + "_dup_" + str(suffix[0:i])
|
||||
new_internal_url = str(class_to_duplicate['internal_url']) + "_dup_" + str(suffix[0:i])
|
||||
new_external_code = str(class_to_duplicate['external_code']) + "_dup_" + str(suffix[0:i])
|
||||
i = i + 1
|
||||
|
||||
"""print(" FINAL DUPLICATE DATA MASSE= ")
|
||||
print(" ### new_title = ", new_title)
|
||||
print(" ### new_internal_code = ", new_internal_code)
|
||||
print(" ### new_internal_url = ", new_internal_url)
|
||||
print(" ### new_external_code = ", new_external_code)
|
||||
"""
|
||||
|
||||
|
||||
new_class = class_to_duplicate
|
||||
|
||||
|
@ -4804,14 +4864,14 @@ def Duplicate_Class_from_internal_url(diction):
|
|||
|
||||
# Indexation Title de la nouvelle formation ajoutée
|
||||
training_to_index_title = {}
|
||||
training_to_index_title['internal_url'] = new_external_code['internal_url']
|
||||
training_to_index_title['internal_url'] = new_class['internal_url']
|
||||
training_to_index_title['reindex_all'] = '0'
|
||||
training_to_index_title['partner_owner_recid'] = str(my_partner['recid'])
|
||||
eibdd.ela_index_given_classes_title(training_to_index_title)
|
||||
|
||||
# Indexation Title des mots clées
|
||||
if (str(new_external_code['mots_cle']).strip() != ""):
|
||||
eibdd.ela_index_class_key_word(new_external_code['external_code'], "keyword", str(my_partner['recid']))
|
||||
if (str(new_class['mots_cle']).strip() != ""):
|
||||
eibdd.ela_index_class_key_word(new_class['external_code'], "keyword", str(my_partner['recid']))
|
||||
|
||||
|
||||
return True, " La formation a été dupliquée"
|
||||
|
@ -4889,3 +4949,183 @@ def Get_List_Class_Niveau_Formation(diction):
|
|||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible de récupérer la liste des types des niveaux de formation"
|
||||
|
||||
|
||||
"""
|
||||
Fonction permet d'exporter les formations dans un fichier Excel
|
||||
"""
|
||||
def Export_Class_To_Excel_From_from_List_Id(diction):
|
||||
try:
|
||||
diction = mycommon.strip_dictionary(diction)
|
||||
|
||||
"""
|
||||
Verification des input acceptés
|
||||
"""
|
||||
field_list = ['token', 'tab_id']
|
||||
|
||||
incom_keys = diction.keys()
|
||||
for val in incom_keys:
|
||||
if val not in field_list and val.startswith('my_') is False:
|
||||
mycommon.myprint(str(
|
||||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
||||
return False, " Les informations fournies sont incorrectes",
|
||||
|
||||
"""
|
||||
Verification des champs obligatoires
|
||||
"""
|
||||
field_list_obligatoire = ['token', 'tab_id']
|
||||
for val in field_list_obligatoire:
|
||||
if val not in diction:
|
||||
mycommon.myprint(
|
||||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
|
||||
return False, " Les informations fournies sont incorrectes",
|
||||
|
||||
"""
|
||||
Verification de l'identité et autorisation de l'entité qui
|
||||
appelle cette API
|
||||
"""
|
||||
token = ""
|
||||
if ("token" in diction.keys()):
|
||||
if diction['token']:
|
||||
token = diction['token']
|
||||
|
||||
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
||||
if (local_status is not True):
|
||||
return local_status, my_partner
|
||||
|
||||
tab_id = []
|
||||
tab_id_tmp= str(diction['tab_id']).split(",")
|
||||
for val in tab_id_tmp:
|
||||
tab_id.append(ObjectId(str(val)))
|
||||
|
||||
qery_match = {'_id':{'$in':tab_id}, 'partner_owner_recid':str(my_partner['recid']), 'valide':'1', 'locked':'0'}
|
||||
|
||||
print(" #### qry = ", qery_match)
|
||||
list_class_datas = MYSY_GV.dbname['myclass'].find({'_id':{'$in':tab_id},
|
||||
'partner_owner_recid':str(my_partner['recid']),
|
||||
'valide':'1', 'locked':'0'},{'_id':0, 'internal_code':0,
|
||||
'freeacces':0, 'indexed':0,
|
||||
'indexed_desc':0, 'indexed_obj':0,
|
||||
'indexed_title':0, 'isalaune':0,
|
||||
'valide':0, 'locked':0})
|
||||
|
||||
pipe_qry = ([
|
||||
{'$match': qery_match},
|
||||
{'$project':{'_id':0, 'internal_code':0,'freeacces':0, 'indexed':0, 'indexed_desc':0, 'indexed_obj':0, 'indexed_title':0, 'isalaune':0,'valide':0, 'locked':0}},
|
||||
{'$lookup': {
|
||||
'from': 'ressource_humaine',
|
||||
"let": {'formateur_id': "$formateur_id", 'partner_owner_recid': '$partner_owner_recid'},
|
||||
'pipeline': [
|
||||
{'$match':
|
||||
{'$expr':
|
||||
{'$and':
|
||||
[
|
||||
|
||||
{'$eq': ["$_id", {'$convert': {
|
||||
'input': "$$formateur_id",
|
||||
'to': "objectId",
|
||||
'onError': {'error': 'true'},
|
||||
'onNull': {'isnull': 'true'}
|
||||
}}]},
|
||||
|
||||
{'$eq': ["$valide", "1"]},
|
||||
{'$eq': ["$partner_recid", '$$partner_owner_recid']}
|
||||
|
||||
]
|
||||
}
|
||||
}
|
||||
},
|
||||
{'$project':{'nom':1, 'prenom':1, '_id':0}},
|
||||
|
||||
],
|
||||
'as': 'ressource_humaine'
|
||||
}
|
||||
},
|
||||
|
||||
])
|
||||
|
||||
|
||||
#print(" #### pipe_qry = ", pipe_qry)
|
||||
list_class_datas = MYSY_GV.dbname['myclass'].aggregate(pipe_qry)
|
||||
|
||||
#print(" ### list_class_datas = ", str(list_class_datas))
|
||||
todays_date = str(datetime.today().strftime("%d/%m/%Y"))
|
||||
ts = datetime.now().timestamp()
|
||||
ts = str(ts).replace(".", "").replace(",", "")[-5:]
|
||||
|
||||
orig_file_name = "Export_Formation_csv_" + str(my_partner['recid']) + "_" + str(ts) + ".xlsx"
|
||||
outputFilename = str(MYSY_GV.TEMPORARY_DIRECTORY) + "/" + str(orig_file_name)
|
||||
|
||||
tab_exported_fields_header = ["external_code", "certif", "cpf", "title", "description", "objectif", "domaine",
|
||||
"duration", "duration_unit", "institut_formation", "metier", "mots_cle",
|
||||
"note",
|
||||
"plus_produit", "pourqui", "prerequis", "price", "programme", "published", "support",
|
||||
"presentiel", "distantiel", "formateur_nom", "formateur_prenom" ]
|
||||
|
||||
|
||||
tab_exported_fields = ["external_code", "certif", "cpf", "title", "description", "objectif", "domaine",
|
||||
"duration", "duration_unit", "institut_formation", "metier", "mots_cle",
|
||||
"note",
|
||||
"plus_produit", "pourqui", "prerequis", "price", "programme", "published", "support",]
|
||||
|
||||
|
||||
# Create a workbook and add a worksheet.
|
||||
workbook = xlsxwriter.Workbook(outputFilename)
|
||||
worksheet = workbook.add_worksheet()
|
||||
|
||||
row = 0
|
||||
column = 0
|
||||
|
||||
for header_item in tab_exported_fields_header:
|
||||
worksheet.write(row, column, header_item)
|
||||
column += 1
|
||||
|
||||
for class_data in list_class_datas:
|
||||
column = 0
|
||||
row = row + 1
|
||||
for local_fiels in tab_exported_fields:
|
||||
answers_record_JSON = ast.literal_eval(str(class_data))
|
||||
if (str(local_fiels) in answers_record_JSON.keys()):
|
||||
local_status, local_retval = mycommon.IsFloat(str(answers_record_JSON[str(local_fiels)]).strip())
|
||||
if( local_status is True ):
|
||||
no_html = answers_record_JSON[str(local_fiels)]
|
||||
else:
|
||||
no_html = mycommon.cleanhtml(answers_record_JSON[str(local_fiels)])
|
||||
else:
|
||||
no_html = ""
|
||||
|
||||
worksheet.write(row, column, no_html)
|
||||
column += 1
|
||||
|
||||
if( "presentiel" in class_data.keys()):
|
||||
no_html_presentiel = class_data['presentiel']['presentiel']
|
||||
worksheet.write(row, column, no_html_presentiel)
|
||||
column += 1
|
||||
no_html_distantiel = class_data['presentiel']['distantiel']
|
||||
worksheet.write(row, column, no_html_distantiel)
|
||||
column += 1
|
||||
|
||||
if( "ressource_humaine" in class_data.keys() and len(class_data['ressource_humaine'])> 0 ):
|
||||
if( "nom" in class_data['ressource_humaine'][0].keys() ):
|
||||
no_html_formateur_nom = class_data['ressource_humaine'][0]['nom']
|
||||
worksheet.write(row, column, no_html_formateur_nom)
|
||||
column += 1
|
||||
|
||||
if ("prenom" in class_data['ressource_humaine'][0].keys()):
|
||||
no_html_formateur_prenom = class_data['ressource_humaine'][0]['prenom']
|
||||
worksheet.write(row, column, no_html_formateur_prenom)
|
||||
column += 1
|
||||
|
||||
|
||||
workbook.close()
|
||||
if os.path.exists(outputFilename):
|
||||
# print(" ### ok os.path.exists(outputFilename) "+str(outputFilename))
|
||||
return True, send_file(outputFilename, as_attachment=True)
|
||||
|
||||
|
||||
return False, "Impossible de générer l'export csv des formation (2) "
|
||||
|
||||
except Exception as e:
|
||||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||||
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||||
return False, " Impossible d'exporter les formations "
|
||||
|
||||
|
|
63
main.py
63
main.py
|
@ -7002,6 +7002,69 @@ def TBD_Inscription_Export_Dashbord_To_Csv(token, user_dashbord_id):
|
|||
return False
|
||||
|
||||
|
||||
"""
|
||||
API Export des formations dans un fichier excel / csv
|
||||
"""
|
||||
@app.route('/myclass/api/Export_Class_To_Excel_From_from_List_Id/<token>/<tab_id>', methods=['POST','GET'])
|
||||
@crossdomain(origin='*')
|
||||
def Export_Class_To_Csv_From_from_List_Id(token, tab_id):
|
||||
# On recupere le corps (payload) de la requete
|
||||
payload = mycommon.strip_dictionary (request.form.to_dict())
|
||||
payload = {}
|
||||
payload['token'] = str(token)
|
||||
payload['tab_id'] = str(tab_id)
|
||||
|
||||
print(" ### Export_Class_To_Excel_From_from_List_Id payload = ",payload)
|
||||
|
||||
status, retval = cm.Export_Class_To_Excel_From_from_List_Id(payload)
|
||||
if(status ):
|
||||
return retval
|
||||
else:
|
||||
return False
|
||||
|
||||
"""
|
||||
API Export des apprenants dans un fichier excel / csv
|
||||
"""
|
||||
@app.route('/myclass/api/Export_Apprenant_To_Excel_From_from_List_Id/<token>/<tab_id>', methods=['POST','GET'])
|
||||
@crossdomain(origin='*')
|
||||
def Export_Apprenant_To_Excel_From_from_List_Id(token, tab_id):
|
||||
# On recupere le corps (payload) de la requete
|
||||
payload = mycommon.strip_dictionary (request.form.to_dict())
|
||||
payload = {}
|
||||
payload['token'] = str(token)
|
||||
payload['tab_id'] = str(tab_id)
|
||||
|
||||
print(" ### Export_Apprenant_To_Excel_From_from_List_Id payload = ",payload)
|
||||
|
||||
status, retval = apprenant_mgt.Export_Apprenant_To_Excel_From_from_List_Id(payload)
|
||||
if(status ):
|
||||
return retval
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
|
||||
"""
|
||||
API Export des inscrits dans un fichier excel / csv
|
||||
"""
|
||||
@app.route('/myclass/api/Export_Inscription_To_Excel_From_from_List_Id/<token>/<tab_id>', methods=['POST','GET'])
|
||||
@crossdomain(origin='*')
|
||||
def Export_Inscription_To_Excel_From_from_List_Id(token, tab_id):
|
||||
# On recupere le corps (payload) de la requete
|
||||
payload = mycommon.strip_dictionary (request.form.to_dict())
|
||||
payload = {}
|
||||
payload['token'] = str(token)
|
||||
payload['tab_id'] = str(tab_id)
|
||||
|
||||
print(" ### Export_Inscription_To_Excel_From_from_List_Id payload = ",payload)
|
||||
|
||||
status, retval = inscription.Export_Inscription_To_Excel_From_from_List_Id(payload)
|
||||
if(status ):
|
||||
return retval
|
||||
else:
|
||||
return False
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
print(" debut api")
|
||||
|
|
Loading…
Reference in New Issue