1249 lines
56 KiB
Python
1249 lines
56 KiB
Python
"""
|
|
Ce fichier permet de traiter les tableaux de bord associées aux ressources humaines
|
|
"""
|
|
import ast
|
|
|
|
import dateutil
|
|
import pymongo
|
|
import xlsxwriter
|
|
from dateutil.relativedelta import relativedelta
|
|
from flask import send_file
|
|
from pymongo import MongoClient
|
|
import json
|
|
from bson import ObjectId
|
|
import re
|
|
from datetime import datetime, timezone, date
|
|
import prj_common as mycommon
|
|
import secrets
|
|
import inspect
|
|
import sys, os
|
|
import csv
|
|
import pandas as pd
|
|
from pymongo import ReturnDocument
|
|
import GlobalVariable as MYSY_GV
|
|
from math import isnan
|
|
import GlobalVariable as MYSY_GV
|
|
import GlobalVariable as MYSY_GV
|
|
from datetime import timedelta
|
|
from datetime import timedelta
|
|
from operator import itemgetter
|
|
|
|
|
|
"""
|
|
Recupération du tableau des ressources humaines en lien avec leur planification
|
|
"""
|
|
def Get_Humain_Ressource_With_Planning(diction):
|
|
try:
|
|
diction = mycommon.strip_dictionary(diction)
|
|
|
|
"""
|
|
Verification des input acceptés
|
|
"""
|
|
field_list = ['token', 'session_start_date', 'session_end_date', 'filter_value', 'tab_rh_id', 'event_type' ]
|
|
|
|
|
|
incom_keys = diction.keys()
|
|
for val in incom_keys:
|
|
if val not in field_list and val.startswith('my_') is False:
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification des champs obligatoires
|
|
"""
|
|
field_list_obligatoire = ['token', 'session_start_date', 'session_end_date' ]
|
|
for val in field_list_obligatoire:
|
|
if val not in diction:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification de l'identité et autorisation de l'entité qui
|
|
appelle cette API
|
|
"""
|
|
token = ""
|
|
if ("token" in diction.keys()):
|
|
if diction['token']:
|
|
token = diction['token']
|
|
|
|
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
|
if (local_status is not True):
|
|
return local_status, my_partner
|
|
|
|
|
|
# Verfication de la validité du type d'evenement
|
|
|
|
event_type_filter = {}
|
|
if( "event_type" in diction.keys() and diction['event_type']):
|
|
if( diction['event_type'] not in MYSY_GV.AGENDA_EVENT_TYPE):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " Le type d'évènement est invalide ")
|
|
return False, " Le type d'évènement est invalide "
|
|
|
|
event_type_filter = {'$eq': ["$event_type", str(diction['event_type'])]}
|
|
|
|
|
|
print(" ### event_type_filter = ", event_type_filter)
|
|
"""
|
|
/!\ : si 'tab_rh_id' == 'all', cela veut dire qu'on prend touts les employés
|
|
si non, on va aller filtrer selon les contenus de la tab tab_rh_id
|
|
"""
|
|
tab_rh_id = []
|
|
tab_rh_object_id = []
|
|
|
|
if( str(diction['tab_rh_id']) == "all" ):
|
|
for val in MYSY_GV.dbname['ressource_humaine'].find({'valide': '1',
|
|
'locked': '0',
|
|
'partner_recid': str(my_partner['recid'])}):
|
|
|
|
tab_rh_object_id.append(val['_id'])
|
|
|
|
|
|
|
|
|
|
else : # Verification de la validité de toutes les ressources humaines dans tab_rh_id
|
|
tab_rh_id = str(diction['tab_rh_id']).split(',')
|
|
for rh_id in tab_rh_id:
|
|
if (MYSY_GV.dbname['ressource_humaine'].count_documents({'_id': ObjectId(str(rh_id)),
|
|
'valide': '1',
|
|
'locked': '0',
|
|
'partner_recid': str(
|
|
my_partner['recid'])}) != 1):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " La ressource humaine :" + str(rh_id) + " n'est pas valide ")
|
|
return False, " La session_id :" + str(rh_id) + " n'est pas valide "
|
|
|
|
tab_rh_object_id.append(ObjectId(str(rh_id)))
|
|
|
|
|
|
filt_session_start_date = ""
|
|
if ("session_start_date" in diction.keys() and diction['session_start_date']):
|
|
filt_session_start_date = str(diction['session_start_date'])[0:10]
|
|
local_status = mycommon.CheckisDate(filt_session_start_date)
|
|
if (local_status is False):
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le filtre : 'date de debut de session' n'est pas au format jj/mm/aaaa.")
|
|
return False, " Le filtre : 'date de debut de session' n'est pas au format jj/mm/aaaa."
|
|
|
|
filt_session_end_date = ""
|
|
if ("session_end_date" in diction.keys() and diction['session_end_date']):
|
|
filt_session_end_date = str(diction['session_end_date'])[0:10]
|
|
local_status = mycommon.CheckisDate(filt_session_end_date)
|
|
if (local_status is False):
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le filtre : 'date de fin de session' n'est pas au format jj/mm/aaaa.")
|
|
return False, " Le filtre : 'date de fin de session' n'est pas au format jj/mm/aaaa."
|
|
|
|
|
|
"""
|
|
Si la valeur de 'filter_value' est m0 ou m1, on va aller recuperer les date du mois correspondant.
|
|
On ecrase les valeur de filt_session_start_date et filt_session_end_date
|
|
"""
|
|
if( 'filter_value' in diction.keys() ):
|
|
if( str(diction['filter_value']) == "m0"):
|
|
# On recupere les date du mois en cours
|
|
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Current_Month_Start_End_Date()
|
|
if (local_status is False):
|
|
return local_status, start_current_month_date
|
|
|
|
filt_session_start_date = start_current_month_date
|
|
filt_session_end_date = end_current_month_date
|
|
|
|
elif (str(diction['filter_value']) == "m1"):
|
|
# On recupere les date du mois en cours
|
|
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Previous_Month_Start_End_Date()
|
|
if (local_status is False):
|
|
return local_status, start_current_month_date
|
|
|
|
filt_session_start_date = start_current_month_date
|
|
filt_session_end_date = end_current_month_date
|
|
|
|
RetObject = []
|
|
val_tmp = 1
|
|
|
|
filt_session_start_date_ISODATE = datetime.strptime(str(filt_session_start_date), '%d/%m/%Y')
|
|
filt_session_end_date_ISODATE = datetime.strptime(str(filt_session_end_date), '%d/%m/%Y')
|
|
|
|
filt_session_start_date_ISODATE_work = filt_session_start_date_ISODATE
|
|
filt_session_end_date_ISODATE_work = filt_session_end_date_ISODATE
|
|
|
|
interval_date = []
|
|
pipe_qry = ( [
|
|
{ "$match": {'_id':{'$in':tab_rh_object_id} }},
|
|
{'$lookup': {
|
|
'from': 'agenda',
|
|
"let": {"rh_id": {'$toString': "$_id"}, 'rh_partner_owner_recid':'$partner_recid'},
|
|
'pipeline': [
|
|
{'$match':
|
|
{'$expr':
|
|
{'$and':
|
|
[
|
|
|
|
event_type_filter,
|
|
{'$eq': ["$valide", "1"]},
|
|
{'$eq': ["$related_collection_recid", '$$rh_id']},
|
|
{'$eq': ["$partner_owner_recid", '$$rh_partner_owner_recid']},
|
|
{'$gte': [{'$toDate': "$event_start" },filt_session_start_date_ISODATE]},
|
|
{'$lte': [{'$toDate': "$event_start"}, filt_session_end_date_ISODATE]}
|
|
|
|
|
|
]
|
|
}
|
|
}
|
|
},
|
|
|
|
],
|
|
'as': 'collection_agenda'
|
|
}
|
|
}
|
|
])
|
|
|
|
print(" ### Get_Humain_Ressource_With_Planning ici pipe_qry = ", pipe_qry)
|
|
|
|
new_retval_titles = []
|
|
new_retval_data = []
|
|
local_id = 0
|
|
|
|
total_duration = 0
|
|
for retval in MYSY_GV.dbname['ressource_humaine'].aggregate(pipe_qry):
|
|
|
|
## Recuperation des agenda
|
|
if( "collection_agenda" in retval.keys() and len(retval['collection_agenda']) > 0 ):
|
|
|
|
for agenda_retval in retval['collection_agenda'] :
|
|
local_id = local_id + 1
|
|
new_node = {}
|
|
new_node['id'] = str(local_id)
|
|
new_node['_id'] = str(retval['_id'])
|
|
|
|
if ("civilite" in retval.keys()):
|
|
new_node['civilite'] = str(retval['civilite']).lower()
|
|
else:
|
|
new_node['civilite'] = ""
|
|
|
|
if ("nom" in retval.keys()):
|
|
new_node['nom'] = str(retval['nom'])
|
|
else:
|
|
new_node['nom'] = ""
|
|
|
|
if ("prenom" in retval.keys()):
|
|
new_node['prenom'] = str(retval['prenom'])
|
|
else:
|
|
new_node['prenom'] = ""
|
|
|
|
if ("email" in retval.keys()):
|
|
new_node['email'] = str(retval['email'])
|
|
else:
|
|
new_node['email'] = ""
|
|
|
|
if ("ismanager" in retval.keys()):
|
|
new_node['ismanager'] = str(retval['ismanager'])
|
|
else:
|
|
new_node['ismanager'] = "0"
|
|
|
|
if ("fonction" in retval.keys()):
|
|
new_node['fonction'] = str(retval['fonction'])
|
|
else:
|
|
new_node['fonction'] = ""
|
|
|
|
if ("_id" in agenda_retval.keys()):
|
|
new_node['agenda_id'] = str(agenda_retval['_id'])
|
|
else:
|
|
new_node['agenda_id'] = ""
|
|
|
|
if ("event_title" in agenda_retval.keys()):
|
|
new_node['agenda_event_title'] = str(agenda_retval['event_title'])
|
|
else:
|
|
new_node['agenda_event_title'] = ""
|
|
|
|
if ("comment" in agenda_retval.keys()):
|
|
new_node['agenda_event_code_session'] = str(agenda_retval['comment'])
|
|
else:
|
|
new_node['agenda_event_code_session'] = ""
|
|
|
|
if ("sequence_session_id" in agenda_retval.keys()):
|
|
new_node['agenda_event_sequence_session_id'] = str(agenda_retval['sequence_session_id'])
|
|
else:
|
|
new_node['agenda_event_sequence_session_id'] = ""
|
|
|
|
|
|
if ("event_type" in agenda_retval.keys()):
|
|
new_node['agenda_event_type'] = str(agenda_retval['event_type'])
|
|
else:
|
|
new_node['agenda_event_type'] = ""
|
|
|
|
if ("event_start" in agenda_retval.keys()):
|
|
new_node['agenda_date_jour'] = str(datetime.strptime(str(agenda_retval['event_start'])[0:16], '%Y-%m-%dT%H:%M').strftime("%d/%m/%Y"))
|
|
new_node['agenda_event_start'] = str(datetime.strptime(str(agenda_retval['event_start'])[0:16], '%Y-%m-%dT%H:%M').strftime("%d/%m/%Y %H:%M"))
|
|
|
|
else:
|
|
new_node['agenda_date_jour'] = ""
|
|
new_node['agenda_event_start'] = ""
|
|
|
|
|
|
if ("event_end" in agenda_retval.keys()):
|
|
new_node['agenda_event_end'] = str(datetime.strptime(str(agenda_retval['event_end'])[0:16], '%Y-%m-%dT%H:%M').strftime("%d/%m/%Y %H:%M"))
|
|
else:
|
|
new_node['agenda_event_end'] = ""
|
|
|
|
event_duration_second = datetime.strptime(str(agenda_retval['event_end'])[0:16], '%Y-%m-%dT%H:%M') - datetime.strptime(str(agenda_retval['event_start'])[0:16],'%Y-%m-%dT%H:%M')
|
|
|
|
"""
|
|
Gerer les eventuels cas d'erreur sur une date de fin event < debut event
|
|
"""
|
|
if (event_duration_second.total_seconds() < 0):
|
|
event_duration_second = 0
|
|
|
|
event_duration_second = event_duration_second.total_seconds()
|
|
|
|
event_duration_hour = round(divmod(event_duration_second, 3600)[0]+divmod(event_duration_second, 3600)[1]/3600, 2)
|
|
|
|
|
|
total_duration = total_duration + event_duration_hour
|
|
new_node['event_duration_second'] = str(event_duration_second)
|
|
new_node['event_duration_hour'] = str(event_duration_hour)
|
|
|
|
#print(" ### new_node = ", new_node)
|
|
RetObject.append(mycommon.JSONEncoder().encode(new_node))
|
|
|
|
node_duration = {"total_duration":str(total_duration)}
|
|
#print(' ### node_duration = ', node_duration)
|
|
|
|
RetObject.append(mycommon.JSONEncoder().encode(node_duration))
|
|
|
|
return True, RetObject
|
|
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
|
return False, " Impossible de récupérer les données "
|
|
|
|
|
|
"""
|
|
Cette fonction permet d'experter un dashbord en csv
|
|
"""
|
|
|
|
def TBD_RH_01_Export_Dashbord_To_Csv(diction):
|
|
try:
|
|
diction = mycommon.strip_dictionary(diction)
|
|
|
|
"""
|
|
Verification des input acceptés
|
|
"""
|
|
field_list = ['token', 'user_dashbord_id']
|
|
|
|
incom_keys = diction.keys()
|
|
for val in incom_keys:
|
|
if val not in field_list and val.startswith('my_') is False:
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification des champs obligatoires
|
|
"""
|
|
field_list_obligatoire = ['token', 'user_dashbord_id']
|
|
for val in field_list_obligatoire:
|
|
if val not in diction:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification de l'identité et autorisation de l'entité qui
|
|
appelle cette API
|
|
"""
|
|
token = ""
|
|
if ("token" in diction.keys()):
|
|
if diction['token']:
|
|
token = diction['token']
|
|
|
|
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
|
if (local_status is not True):
|
|
return local_status, my_partner
|
|
|
|
# Recuperation des données du user_dashbord
|
|
my_user_dashbord = MYSY_GV.dbname['user_dashbord'].find_one({'_id': ObjectId(str(diction['user_dashbord_id'])),
|
|
'valide': '1',
|
|
'locked': '0',
|
|
'partner_owner_recid': str(my_partner['recid'])})
|
|
|
|
if (my_user_dashbord is None):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " - L'identifiant du tableau de bord est invalide ")
|
|
return False, " L'identifiant du tableau de bord est invalide",
|
|
|
|
new_retval_data = {}
|
|
my_new_diction = {}
|
|
my_new_diction['token'] = diction['token']
|
|
|
|
session_start_date = ""
|
|
session_end_date = ""
|
|
|
|
local_default_filter = ast.literal_eval(str(my_user_dashbord['default_filter']))
|
|
if ("session_start_date" in local_default_filter.keys() and "session_end_date" in local_default_filter.keys()):
|
|
session_start_date = local_default_filter['session_start_date']
|
|
session_end_date = local_default_filter['session_end_date']
|
|
|
|
elif ("periode" in local_default_filter.keys()):
|
|
my_new_diction['filter_value'] = str(local_default_filter['periode'])
|
|
if (str(local_default_filter['periode']) == "m0"):
|
|
# On recupere les date du mois en cours
|
|
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Current_Month_Start_End_Date()
|
|
if (local_status is False):
|
|
return local_status, start_current_month_date
|
|
|
|
session_start_date = start_current_month_date
|
|
session_end_date = end_current_month_date
|
|
|
|
elif (str(local_default_filter['periode']) == "m1"):
|
|
# On recupere les date du mois en cours
|
|
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Previous_Month_Start_End_Date()
|
|
if (local_status is False):
|
|
return local_status, start_current_month_date
|
|
|
|
session_start_date = start_current_month_date
|
|
session_end_date = end_current_month_date
|
|
|
|
my_new_diction['session_start_date'] = session_start_date
|
|
my_new_diction['session_end_date'] = session_end_date
|
|
my_new_diction['tab_rh_id'] = "all"
|
|
|
|
if (my_user_dashbord['dashbord_internal_code'] == "tbd_rh_01"):
|
|
local_status, local_retval = Get_Humain_Ressource_With_Planning(my_new_diction)
|
|
if (local_status is False):
|
|
return local_status, local_retval
|
|
new_retval_data = local_retval
|
|
|
|
elif (my_user_dashbord['dashbord_internal_code'] == "tbd_rh_02"):
|
|
local_status, local_retval = Get_Humain_Ressource_With_Planning_And_Cost(my_new_diction)
|
|
if (local_status is False):
|
|
return local_status, local_retval
|
|
new_retval_data = local_retval
|
|
|
|
|
|
else:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " Requête inconnue ")
|
|
return False, " Requête inconnue ",
|
|
|
|
|
|
# Recuperation des colonne à exporter
|
|
base_config_dashbord_data = MYSY_GV.dbname['base_config_dashbord'].find_one(
|
|
{'dashbord_internal_code': str(my_user_dashbord['dashbord_internal_code'])})
|
|
|
|
tab_exported_fields = []
|
|
if ("exported_fields" in base_config_dashbord_data):
|
|
tab_exported_fields = base_config_dashbord_data['exported_fields']
|
|
|
|
# Remettres les dates de filtres en debut de liste
|
|
if ("filtre_date_fin" in tab_exported_fields):
|
|
tab_exported_fields.remove("filtre_date_fin")
|
|
tab_exported_fields.insert(0, "filtre_date_fin")
|
|
|
|
if ("filtre_date_debut" in tab_exported_fields):
|
|
tab_exported_fields.remove("filtre_date_debut")
|
|
tab_exported_fields.insert(0, "filtre_date_debut")
|
|
|
|
tab_exported_fields.insert(0, "date_extraction")
|
|
|
|
|
|
todays_date = str(date.today().strftime("%d/%m/%Y"))
|
|
ts = datetime.now().timestamp()
|
|
ts = str(ts).replace(".", "").replace(",", "")[-5:]
|
|
|
|
orig_file_name = "Export_csv_" + str(my_partner['recid']) + "_" + str(ts) + ".csv"
|
|
outputFilename = str(MYSY_GV.TEMPORARY_DIRECTORY) + "/" + str(orig_file_name)
|
|
|
|
|
|
|
|
|
|
|
|
with open(outputFilename, 'w', newline='') as outfile:
|
|
fields = tab_exported_fields
|
|
write = csv.DictWriter(outfile, fieldnames=fields)
|
|
write.writeheader()
|
|
|
|
for answers_record in new_retval_data: # Here we are using 'cursor' as an iterator
|
|
flattened_record = {}
|
|
|
|
|
|
for local_fiels in tab_exported_fields:
|
|
answers_record_JSON = ast.literal_eval(str(answers_record))
|
|
if (str(local_fiels) in answers_record_JSON.keys()):
|
|
flattened_record[str(local_fiels)] = answers_record_JSON[str(local_fiels)]
|
|
|
|
|
|
flattened_record['filtre_date_debut'] = str(session_start_date)
|
|
flattened_record['filtre_date_fin'] = str(session_end_date)
|
|
flattened_record['date_extraction'] = str(todays_date)
|
|
|
|
|
|
write.writerow(flattened_record)
|
|
|
|
if os.path.exists(outputFilename):
|
|
# print(" ### ok os.path.exists(outputFilename) "+str(outputFilename))
|
|
return True, send_file(outputFilename, as_attachment=True)
|
|
|
|
return False, "Impossible de générer l'export csv (2) "
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
|
return False, " Impossible d'exporter les données "
|
|
|
|
|
|
"""
|
|
Cette fonction permet de calculer les cout
|
|
par employé, par tache, selon le contrat
|
|
"""
|
|
|
|
def Get_Humain_Ressource_With_Planning_And_Cost(diction):
|
|
try:
|
|
diction = mycommon.strip_dictionary(diction)
|
|
|
|
"""
|
|
Verification des input acceptés
|
|
"""
|
|
field_list = ['token', 'session_start_date', 'session_end_date', 'filter_value', 'tab_rh_id', 'event_type']
|
|
|
|
incom_keys = diction.keys()
|
|
for val in incom_keys:
|
|
if val not in field_list and val.startswith('my_') is False:
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification des champs obligatoires
|
|
"""
|
|
field_list_obligatoire = ['token', 'session_start_date', 'session_end_date']
|
|
for val in field_list_obligatoire:
|
|
if val not in diction:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification de l'identité et autorisation de l'entité qui
|
|
appelle cette API
|
|
"""
|
|
token = ""
|
|
if ("token" in diction.keys()):
|
|
if diction['token']:
|
|
token = diction['token']
|
|
|
|
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
|
if (local_status is not True):
|
|
return local_status, my_partner
|
|
|
|
# Verfication de la validité du type d'evenement
|
|
|
|
event_type_filter = {}
|
|
if ("event_type" in diction.keys() and diction['event_type']):
|
|
if (diction['event_type'] not in MYSY_GV.AGENDA_EVENT_TYPE):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " Le type d'évènement est invalide ")
|
|
return False, " Le type d'évènement est invalide "
|
|
|
|
event_type_filter = {'$eq': ["$event_type", str(diction['event_type'])]}
|
|
|
|
|
|
"""
|
|
/!\ : si 'tab_rh_id' == 'all', cela veut dire qu'on prend touts les employés
|
|
si non, on va aller filtrer selon les contenus de la tab tab_rh_id
|
|
"""
|
|
tab_rh_id = []
|
|
tab_rh_object_id = []
|
|
|
|
if (str(diction['tab_rh_id']) == "all"):
|
|
for val in MYSY_GV.dbname['ressource_humaine'].find({'valide': '1',
|
|
'locked': '0',
|
|
'partner_recid': str(my_partner['recid'])}):
|
|
tab_rh_object_id.append(val['_id'])
|
|
|
|
|
|
|
|
|
|
else: # Verification de la validité de toutes les ressources humaines dans tab_rh_id
|
|
tab_rh_id = str(diction['tab_rh_id']).split(',')
|
|
for rh_id in tab_rh_id:
|
|
if (MYSY_GV.dbname['ressource_humaine'].count_documents({'_id': ObjectId(str(rh_id)),
|
|
'valide': '1',
|
|
'locked': '0',
|
|
'partner_recid': str(
|
|
my_partner['recid'])}) != 1):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " La ressource humaine :" + str(rh_id) + " n'est pas valide ")
|
|
return False, " La session_id :" + str(rh_id) + " n'est pas valide "
|
|
|
|
tab_rh_object_id.append(ObjectId(str(rh_id)))
|
|
|
|
filt_session_start_date = ""
|
|
if ("session_start_date" in diction.keys() and diction['session_start_date']):
|
|
filt_session_start_date = str(diction['session_start_date'])[0:10]
|
|
local_status = mycommon.CheckisDate(filt_session_start_date)
|
|
if (local_status is False):
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le filtre : 'date de debut de session' n'est pas au format jj/mm/aaaa.")
|
|
return False, " Le filtre : 'date de debut de session' n'est pas au format jj/mm/aaaa."
|
|
|
|
filt_session_end_date = ""
|
|
if ("session_end_date" in diction.keys() and diction['session_end_date']):
|
|
filt_session_end_date = str(diction['session_end_date'])[0:10]
|
|
local_status = mycommon.CheckisDate(filt_session_end_date)
|
|
if (local_status is False):
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le filtre : 'date de fin de session' n'est pas au format jj/mm/aaaa.")
|
|
return False, " Le filtre : 'date de fin de session' n'est pas au format jj/mm/aaaa."
|
|
|
|
"""
|
|
Si la valeur de 'filter_value' est m0 ou m1, on va aller recuperer les date du mois correspondant.
|
|
On ecrase les valeur de filt_session_start_date et filt_session_end_date
|
|
"""
|
|
if ('filter_value' in diction.keys()):
|
|
if (str(diction['filter_value']) == "m0"):
|
|
# On recupere les date du mois en cours
|
|
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Current_Month_Start_End_Date()
|
|
if (local_status is False):
|
|
return local_status, start_current_month_date
|
|
|
|
filt_session_start_date = start_current_month_date
|
|
filt_session_end_date = end_current_month_date
|
|
|
|
elif (str(diction['filter_value']) == "m1"):
|
|
# On recupere les date du mois en cours
|
|
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Previous_Month_Start_End_Date()
|
|
if (local_status is False):
|
|
return local_status, start_current_month_date
|
|
|
|
filt_session_start_date = start_current_month_date
|
|
filt_session_end_date = end_current_month_date
|
|
|
|
"""
|
|
Algorithme :
|
|
|
|
1 - Pour chaque employé, aller chercher ses contrats valides de la periode. Completer ce tableaux avec les données du contrat
|
|
mettre dans une tableau en local
|
|
|
|
2 - Pour chaque tache de l'employée, aller chercher le contrat qui correspond à la periode de la tache
|
|
- Peupler ce tableau avec : tache (agenda), contrat (data), employé (data)
|
|
|
|
"""
|
|
|
|
tab_lines_employe_by_contract = []
|
|
|
|
"""
|
|
Etape 1: Récuperation des donnée employé avec les contrat associés
|
|
"""
|
|
#print(" #### tab_rh_object_id = ", tab_rh_object_id)
|
|
# { "$match": {'_id':{'$in':tab_rh_object_id} }},
|
|
|
|
# {'$match': {'_id': ObjectId('6509a1adaca6e5023d7af487')}},
|
|
qry_employe_contrat = ([
|
|
|
|
{"$match": {'_id': {'$in': tab_rh_object_id}}},
|
|
{'$lookup': {
|
|
'from': 'ressource_humaine_contrat',
|
|
'let': {'rh_id': {'$toString': '$_id'}, 'rh_partner_owner_recid': '$partner_recid'},
|
|
'pipeline': [
|
|
{'$match':
|
|
{'$expr':
|
|
{'$and':
|
|
[
|
|
|
|
{'$eq': ['$valide', '1']},
|
|
{'$eq': ['$rh_id', '$$rh_id']},
|
|
{'$eq': ['$partner_owner_recid', '$$rh_partner_owner_recid']}
|
|
|
|
]
|
|
}
|
|
}
|
|
},
|
|
{"$project": {"_id": 0, }}
|
|
|
|
],
|
|
|
|
'as': 'collection_ressource_humaine_contrat'
|
|
}
|
|
},
|
|
{
|
|
'$unwind': '$collection_ressource_humaine_contrat'
|
|
}
|
|
|
|
])
|
|
|
|
cpt = 0
|
|
for retval in MYSY_GV.dbname['ressource_humaine'].aggregate(qry_employe_contrat):
|
|
# print(" ### retval = ", retval)
|
|
|
|
cpt = cpt + 1
|
|
|
|
rh_id_Object = ObjectId(str(retval['_id']))
|
|
retval['_id'] = str(rh_id_Object)
|
|
|
|
"""
|
|
Si le contrat est lié à un groupe de prix, alors aller chercher les elements du groupe de prix
|
|
"""
|
|
if ("groupe_prix_achat_id" in retval['collection_ressource_humaine_contrat'].keys() and
|
|
retval['collection_ressource_humaine_contrat']['groupe_prix_achat_id']):
|
|
price_grp_data = MYSY_GV.dbname['purchase_prices'].find_one(
|
|
{'_id': ObjectId(str(retval['collection_ressource_humaine_contrat']['groupe_prix_achat_id'])),
|
|
'valide': '1',
|
|
'locked': '0',
|
|
'partner_owner_recid': str(retval['collection_ressource_humaine_contrat']['partner_owner_recid']),
|
|
})
|
|
|
|
if (price_grp_data and "prix" in price_grp_data.keys() and "periodicite" in price_grp_data.keys()):
|
|
retval['collection_ressource_humaine_contrat']['groupe_prix_achat_code'] = str(price_grp_data['code_groupe_prix'])
|
|
retval['collection_ressource_humaine_contrat']['groupe_prix_achat_cout'] = str(
|
|
price_grp_data['prix'])
|
|
retval['collection_ressource_humaine_contrat']['groupe_prix_achat_periodicite'] = str(
|
|
price_grp_data['periodicite'])
|
|
else:
|
|
retval['collection_ressource_humaine_contrat']['groupe_prix_achat_code'] = ""
|
|
retval['collection_ressource_humaine_contrat']['groupe_prix_achat_cout'] = ""
|
|
retval['collection_ressource_humaine_contrat']['groupe_prix_achat_periodicite'] = ""
|
|
|
|
else:
|
|
retval['collection_ressource_humaine_contrat']['groupe_prix_achat_code'] = ""
|
|
retval['collection_ressource_humaine_contrat']['groupe_prix_achat_cout'] = ""
|
|
retval['collection_ressource_humaine_contrat']['groupe_prix_achat_periodicite'] = ""
|
|
|
|
retval['Tab_agenda'] = []
|
|
"""
|
|
Aller chercher toutes taches valides entre les date du contrat
|
|
"""
|
|
date_debut_contrat = ""
|
|
date_fin_contrat = ""
|
|
if ("collection_ressource_humaine_contrat" in retval.keys() and "date_debut" in retval[
|
|
'collection_ressource_humaine_contrat'].keys()):
|
|
date_debut_contrat = retval['collection_ressource_humaine_contrat']['date_debut']
|
|
|
|
if ("collection_ressource_humaine_contrat" in retval.keys() and "date_fin" in retval[
|
|
'collection_ressource_humaine_contrat'].keys()):
|
|
date_fin_contrat = retval['collection_ressource_humaine_contrat']['date_fin']
|
|
|
|
|
|
|
|
if (mycommon.CheckisDate(date_debut_contrat) is False or mycommon.CheckisDate(date_fin_contrat) is False):
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Les dates de debut et/ou fin de contrat de l'employé " + str(
|
|
retval['nom'] + " " + retval['prenom']) + " ne sont au format jj/mm/aaaa.")
|
|
return False, " Les dates de debut et/ou fin de contrat de l'employé " + str(
|
|
retval['nom'] + " " + retval['prenom']) + " ne sont au format jj/mm/aaaa. "
|
|
|
|
|
|
"""
|
|
Etape 2 :
|
|
Aller recuperer les taches dans les agendas
|
|
"""
|
|
|
|
filt_session_start_date_ISODATE = datetime.strptime(str(date_debut_contrat), '%d/%m/%Y')
|
|
filt_session_end_date_ISODATE = datetime.strptime(str(date_fin_contrat), '%d/%m/%Y')
|
|
|
|
local_qry_tache_agenda = ([
|
|
{"$match": {'_id': rh_id_Object}},
|
|
{'$lookup': {
|
|
'from': 'agenda',
|
|
"let": {"rh_id": {'$toString': "$_id"}, 'rh_partner_owner_recid': '$partner_recid'},
|
|
'pipeline': [
|
|
{'$match':
|
|
{'$expr':
|
|
{'$and':
|
|
[
|
|
|
|
event_type_filter,
|
|
{'$eq': ["$valide", "1"]},
|
|
{'$eq': ["$related_collection_recid", '$$rh_id']},
|
|
{'$eq': ["$partner_owner_recid", '$$rh_partner_owner_recid']},
|
|
{'$gte': [{'$toDate': "$event_start"}, filt_session_start_date_ISODATE]},
|
|
{'$lte': [{'$toDate': "$event_start"}, filt_session_end_date_ISODATE]}
|
|
|
|
]
|
|
}
|
|
}
|
|
},
|
|
|
|
],
|
|
'as': 'collection_agenda'
|
|
}
|
|
},
|
|
{
|
|
'$unwind': '$collection_agenda'
|
|
}
|
|
])
|
|
|
|
#print(" ### local_qry_tache_agenda ici pipe_qry = ", local_qry_tache_agenda)
|
|
for retval_agenda in MYSY_GV.dbname['ressource_humaine'].aggregate(local_qry_tache_agenda):
|
|
# print(" ### retval_agenda Trouvé = ", retval_agenda)
|
|
agenda_id_Object = ObjectId(str(retval_agenda['_id']))
|
|
|
|
retval_agenda['_id'] = str(agenda_id_Object)
|
|
retval_agenda['collection_agenda']['_id'] = str(retval_agenda['collection_agenda']['_id'])
|
|
|
|
if ("event_start" in retval_agenda['collection_agenda'].keys()):
|
|
retval_agenda['collection_agenda']['agenda_date_jour'] = str(
|
|
datetime.strptime(str(retval_agenda['collection_agenda']['event_end'])[0:16],
|
|
'%Y-%m-%dT%H:%M').strftime("%d/%m/%Y"))
|
|
|
|
else:
|
|
retval_agenda['collection_agenda']['agenda_date_jour'] = ""
|
|
|
|
event_duration_second = datetime.strptime(str(retval_agenda['collection_agenda']['event_end'])[0:16],
|
|
'%Y-%m-%dT%H:%M') - datetime.strptime(
|
|
str(retval_agenda['collection_agenda']['event_start'])[0:16], '%Y-%m-%dT%H:%M')
|
|
event_duration_second = event_duration_second.total_seconds()
|
|
|
|
event_duration_hour = round(
|
|
divmod(event_duration_second, 3600)[0] + divmod(event_duration_second, 3600)[1] / 3600, 2)
|
|
|
|
retval_agenda['collection_agenda']['event_duration_second'] = str(event_duration_second)
|
|
retval_agenda['collection_agenda']['event_duration_hour'] = str(event_duration_hour)
|
|
|
|
#json_agenda_formatted_str = json.dumps(retval_agenda, indent=2)
|
|
|
|
if ("collection_agenda" in retval_agenda.keys() and "partner_owner_recid" in retval_agenda[
|
|
'collection_agenda'].keys()):
|
|
new_node = retval_agenda['collection_agenda']
|
|
retval['Tab_agenda'].append(new_node)
|
|
# print(" ### NODE ADDED = ", new_node)
|
|
|
|
|
|
|
|
#json_formatted_str = json.dumps(retval, indent=2)
|
|
#print(" ### Final Line : ", json_formatted_str)
|
|
tab_lines_employe_by_contract.append(retval)
|
|
|
|
local_status, local_retval = Format_employee_contrat_agenda_events(tab_lines_employe_by_contract)
|
|
if (local_status is False):
|
|
return local_status, local_retval
|
|
|
|
|
|
RetObject = []
|
|
cpt = 0
|
|
for val in local_retval:
|
|
val['id'] = str(cpt)
|
|
RetObject.append(mycommon.JSONEncoder().encode(val))
|
|
cpt = cpt + 1
|
|
tab_lines_employe_by_contract_json_formatted_str = json.dumps(val, indent=2)
|
|
#print(" ### Final Line " + str(cpt) + " : ", tab_lines_employe_by_contract_json_formatted_str)
|
|
|
|
#print(" RetObject = ", RetObject)
|
|
#print(' on a ' + str(cpt) + ' lignes ')
|
|
|
|
return True, RetObject
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
|
return False, "Impossible de récupérer le tableau des tache et avec les cout"
|
|
|
|
|
|
"""
|
|
Cette fonction prend un dictionnaire qui rattache chaque evenement (agenda : type planning) au
|
|
contrat en cours (+les données de l'employé) et retourne une dictionnaire reformaté de manière
|
|
à avoir une ligne par evenement + toutes les data du contrat en face
|
|
"""
|
|
|
|
|
|
def Format_employee_contrat_agenda_events(tab_diction):
|
|
try:
|
|
|
|
return_tab = []
|
|
|
|
total_duration = 0
|
|
total_cost = 0
|
|
|
|
for val in tab_diction:
|
|
#print(" #### Debut traitement de la ligne " + str(val))
|
|
|
|
for event_data in val['Tab_agenda']:
|
|
new_node = {}
|
|
new_node['rh_id'] = val['_id']
|
|
new_node['rh_partner_recid'] = val['partner_recid']
|
|
new_node['rh_civilite'] = val['civilite']
|
|
new_node['rh_nom'] = val['nom']
|
|
new_node['rh_prenom'] = val['prenom']
|
|
new_node['rh_email'] = val['email']
|
|
new_node['rh_telephone_mobile'] = val['telephone_mobile']
|
|
new_node['rh_ismanager'] = val['ismanager']
|
|
new_node['rh_fonction'] = val['fonction']
|
|
new_node['rh_telephone'] = val['telephone']
|
|
|
|
if( "purchase_price_group_id" in val.keys() ):
|
|
new_node['rh_purchase_price_group_id'] = val['purchase_price_group_id']
|
|
else:
|
|
new_node['rh_purchase_price_group_id'] = ""
|
|
|
|
if ("date_naissance" in val.keys()):
|
|
new_node['rh_date_naissance'] = val['date_naissance']
|
|
else:
|
|
new_node['rh_date_naissance'] = ""
|
|
|
|
if ("adr_adresse" in val.keys()):
|
|
new_node['rh_adr_adresse'] = val['adr_adresse']
|
|
else:
|
|
new_node['rh_date_naissance'] = ""
|
|
|
|
if ("adr_code_postal" in val.keys()):
|
|
new_node['rh_adr_code_postal'] = val['adr_code_postal']
|
|
else:
|
|
new_node['rh_date_naissance'] = ""
|
|
|
|
if ("adr_ville" in val.keys()):
|
|
new_node['rh_adr_ville'] = val['adr_ville']
|
|
else:
|
|
new_node['rh_adr_ville'] = ""
|
|
|
|
if ("adr_pays" in val.keys()):
|
|
new_node['rh_adr_pays'] = val['adr_pays']
|
|
else:
|
|
new_node['rh_adr_pays'] = ""
|
|
|
|
if ("collection_ressource_humaine_contrat" in val.keys()):
|
|
if ("date_debut" in val['collection_ressource_humaine_contrat'].keys()):
|
|
new_node['rh_contrat_date_debut'] = val['collection_ressource_humaine_contrat']['date_debut']
|
|
else:
|
|
new_node['rh_contrat_date_debut'] = ""
|
|
|
|
if ("date_fin" in val['collection_ressource_humaine_contrat'].keys()):
|
|
new_node['rh_contrat_date_fin'] = val['collection_ressource_humaine_contrat']['date_fin']
|
|
else:
|
|
new_node['rh_contrat_date_fin'] = ""
|
|
|
|
if ("type_contrat" in val['collection_ressource_humaine_contrat'].keys()):
|
|
new_node['rh_contrat_type_contrat'] = val['collection_ressource_humaine_contrat'][
|
|
'type_contrat']
|
|
else:
|
|
new_node['rh_contrat_type_contrat'] = ""
|
|
|
|
if ("type_employe" in val['collection_ressource_humaine_contrat'].keys()):
|
|
new_node['rh_contrat_type_employe'] = val['collection_ressource_humaine_contrat'][
|
|
'type_employe']
|
|
else:
|
|
new_node['rh_contrat_type_employe'] = ""
|
|
|
|
if ("cout" in val['collection_ressource_humaine_contrat'].keys()):
|
|
new_node['rh_contrat_cout'] = val['collection_ressource_humaine_contrat']['cout']
|
|
else:
|
|
new_node['rh_contrat_cout'] = ""
|
|
|
|
if ("periodicite" in val['collection_ressource_humaine_contrat'].keys()):
|
|
new_node['rh_contrat_periodicite'] = val['collection_ressource_humaine_contrat']['periodicite']
|
|
else:
|
|
new_node['rh_contrat_periodicite'] = ""
|
|
|
|
if ("quantite" in val['collection_ressource_humaine_contrat'].keys()):
|
|
new_node['rh_contrat_quantite'] = val['collection_ressource_humaine_contrat']['quantite']
|
|
else:
|
|
new_node['rh_contrat_quantite'] = ""
|
|
|
|
if ("groupe_prix_achat_id" in val['collection_ressource_humaine_contrat'].keys()):
|
|
new_node['rh_contrat_groupe_prix_achat_id'] = val['collection_ressource_humaine_contrat'][
|
|
'groupe_prix_achat_id']
|
|
else:
|
|
new_node['rh_contrat_groupe_prix_achat_id'] = ""
|
|
|
|
if ("comment" in val['collection_ressource_humaine_contrat'].keys()):
|
|
new_node['rh_contrat_comment'] = val['collection_ressource_humaine_contrat']['comment']
|
|
else:
|
|
new_node['rh_contrat_comment'] = ""
|
|
|
|
|
|
|
|
if ("groupe_prix_achat_code" in val['collection_ressource_humaine_contrat'].keys()):
|
|
new_node['rh_contrat_groupe_prix_achat_code'] = val['collection_ressource_humaine_contrat'][ 'groupe_prix_achat_code']
|
|
else:
|
|
new_node['rh_contrat_groupe_prix_achat_code'] = ""
|
|
|
|
if ("groupe_prix_achat_cout" in val['collection_ressource_humaine_contrat'].keys()):
|
|
new_node['rh_contrat_groupe_prix_achat_cout'] = val['collection_ressource_humaine_contrat']['groupe_prix_achat_cout']
|
|
else:
|
|
new_node['rh_contrat_groupe_prix_achat_cout'] = ""
|
|
|
|
if ("groupe_prix_achat_periodicite" in val['collection_ressource_humaine_contrat'].keys()):
|
|
new_node['rh_contrat_groupe_prix_achat_periodicite'] = \
|
|
val['collection_ressource_humaine_contrat']['groupe_prix_achat_periodicite']
|
|
else:
|
|
new_node['rh_contrat_groupe_prix_achat_periodicite'] = ""
|
|
|
|
# Traitement des données de l'evement dans l'agenda
|
|
new_node['rh_event_planning_event_type'] = event_data['event_type']
|
|
new_node['rh_event_planning_event_title'] = event_data['event_title']
|
|
new_node['rh_event_planning_event_start'] = str(datetime.strptime(str(event_data['event_start'] )[0:16], '%Y-%m-%dT%H:%M').strftime("%d/%m/%Y %H:%M"))
|
|
new_node['rh_event_planning_event_end'] = str(datetime.strptime(str(event_data['event_end'] )[0:16], '%Y-%m-%dT%H:%M').strftime("%d/%m/%Y %H:%M"))
|
|
new_node['rh_event_planning_agenda_date_jour'] = event_data['agenda_date_jour']
|
|
new_node['rh_event_planning_event_duration_hour'] = event_data['event_duration_hour']
|
|
|
|
total_duration = total_duration + mycommon.tryFloat(str(event_data['event_duration_hour']))
|
|
|
|
new_node['rh_event_planning_event_duration_second'] = event_data['event_duration_second']
|
|
if ("comment" in event_data.keys()):
|
|
new_node['rh_event_planning_even_comment'] = event_data['comment']
|
|
else:
|
|
new_node['rh_event_planning_even_comment'] = ""
|
|
|
|
"""
|
|
# Calcul du cout total d'une tache
|
|
/!\ Le prix du contrat prend toujours le pas sur le prix rentré à la main.
|
|
C'est a dire que si un pour quelque raison que ce soit, sur la même période un employé
|
|
a un groupe de prix d'achat et une prix saisie à la main, alors les totaux seront calculés en se basant
|
|
sur le prix qui est dans le contrat
|
|
|
|
"""
|
|
if ("rh_contrat_groupe_prix_achat_id" in new_node.keys() and new_node['rh_contrat_groupe_prix_achat_id']
|
|
and "rh_contrat_groupe_prix_achat_periodicite" in new_node.keys() and new_node[
|
|
'rh_contrat_groupe_prix_achat_periodicite']
|
|
and "rh_event_planning_event_duration_hour" in new_node.keys() and new_node[
|
|
'rh_event_planning_event_duration_hour']
|
|
and "rh_contrat_groupe_prix_achat_cout" in new_node.keys() and new_node[
|
|
'rh_contrat_groupe_prix_achat_cout']):
|
|
|
|
if (str(new_node['rh_contrat_groupe_prix_achat_periodicite']) == "heure"):
|
|
new_node['rh_event_planning_event_cost'] = str( mycommon.tryFloat(new_node['rh_contrat_groupe_prix_achat_cout']) * mycommon.tryFloat( new_node['rh_event_planning_event_duration_hour']))
|
|
total_cost = total_cost + mycommon.tryFloat((str(new_node['rh_event_planning_event_cost'])))
|
|
|
|
elif (str(new_node['rh_contrat_groupe_prix_achat_periodicite']) == "fixe"):
|
|
new_node['rh_event_planning_event_cost'] = str((new_node['rh_contrat_groupe_prix_achat_cout']))
|
|
total_cost = total_cost + mycommon.tryFloat((str(new_node['rh_event_planning_event_cost'])))
|
|
|
|
|
|
elif ("rh_contrat_groupe_prix_achat_id" in new_node.keys() and str(
|
|
new_node['rh_contrat_groupe_prix_achat_id']) == ""
|
|
and "rh_contrat_periodicite" in new_node.keys() and new_node['rh_contrat_periodicite']
|
|
and "rh_event_planning_event_duration_hour" in new_node.keys() and new_node[
|
|
'rh_event_planning_event_duration_hour']
|
|
and "rh_contrat_cout" in new_node.keys() and new_node['rh_contrat_cout']):
|
|
|
|
if (str(new_node['rh_contrat_periodicite']) == "heure"):
|
|
new_node['rh_event_planning_event_cost'] = str(
|
|
mycommon.tryFloat(new_node['rh_contrat_cout']) * mycommon.tryFloat(
|
|
new_node['rh_event_planning_event_duration_hour']))
|
|
total_cost = total_cost + mycommon.tryFloat((str(new_node['rh_event_planning_event_cost'])))
|
|
|
|
elif (str(new_node['rh_contrat_periodicite']) == "fixe"):
|
|
new_node['rh_event_planning_event_cost'] = str((new_node['rh_contrat_cout']))
|
|
total_cost = total_cost + mycommon.tryFloat((str(new_node['rh_event_planning_event_cost'])))
|
|
|
|
|
|
else:
|
|
new_node['rh_event_planning_event_cost'] = "Impossible a calculer"
|
|
|
|
|
|
return_tab.append(new_node)
|
|
|
|
node_total_duration = {'total_duration':str(total_duration)}
|
|
return_tab.append(node_total_duration)
|
|
|
|
node_total_cost = {'total_cost':str(total_cost)}
|
|
return_tab.append(node_total_cost)
|
|
|
|
|
|
#print(" #### total_duration = ", total_duration)
|
|
#print(" #### total_cost = ", total_cost)
|
|
|
|
return True, return_tab
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
|
return False, " Impossible de reformater et de normer le tableau des tâches et coûts "
|
|
|
|
|
|
"""
|
|
Cette fonction permet d'experter un dashbord en Excel
|
|
"""
|
|
|
|
def TBD_RH_01_Export_Dashbord_To_Excel(diction):
|
|
try:
|
|
diction = mycommon.strip_dictionary(diction)
|
|
|
|
"""
|
|
Verification des input acceptés
|
|
"""
|
|
field_list = ['token', 'user_dashbord_id']
|
|
|
|
incom_keys = diction.keys()
|
|
for val in incom_keys:
|
|
if val not in field_list and val.startswith('my_') is False:
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification des champs obligatoires
|
|
"""
|
|
field_list_obligatoire = ['token', 'user_dashbord_id']
|
|
for val in field_list_obligatoire:
|
|
if val not in diction:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification de l'identité et autorisation de l'entité qui
|
|
appelle cette API
|
|
"""
|
|
token = ""
|
|
if ("token" in diction.keys()):
|
|
if diction['token']:
|
|
token = diction['token']
|
|
|
|
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
|
if (local_status is not True):
|
|
return local_status, my_partner
|
|
|
|
# Recuperation des données du user_dashbord
|
|
my_user_dashbord = MYSY_GV.dbname['user_dashbord'].find_one({'_id': ObjectId(str(diction['user_dashbord_id'])),
|
|
'valide': '1',
|
|
'locked': '0',
|
|
'partner_owner_recid': str(my_partner['recid'])})
|
|
|
|
if (my_user_dashbord is None):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " - L'identifiant du tableau de bord est invalide ")
|
|
return False, " L'identifiant du tableau de bord est invalide",
|
|
|
|
new_retval_data = {}
|
|
my_new_diction = {}
|
|
my_new_diction['token'] = diction['token']
|
|
|
|
session_start_date = ""
|
|
session_end_date = ""
|
|
|
|
local_default_filter = ast.literal_eval(str(my_user_dashbord['default_filter']))
|
|
if ("session_start_date" in local_default_filter.keys() and "session_end_date" in local_default_filter.keys()):
|
|
session_start_date = local_default_filter['session_start_date']
|
|
session_end_date = local_default_filter['session_end_date']
|
|
|
|
elif ("periode" in local_default_filter.keys()):
|
|
my_new_diction['filter_value'] = str(local_default_filter['periode'])
|
|
if (str(local_default_filter['periode']) == "m0"):
|
|
# On recupere les date du mois en cours
|
|
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Current_Month_Start_End_Date()
|
|
if (local_status is False):
|
|
return local_status, start_current_month_date
|
|
|
|
session_start_date = start_current_month_date
|
|
session_end_date = end_current_month_date
|
|
|
|
elif (str(local_default_filter['periode']) == "m1"):
|
|
# On recupere les date du mois en cours
|
|
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Previous_Month_Start_End_Date()
|
|
if (local_status is False):
|
|
return local_status, start_current_month_date
|
|
|
|
session_start_date = start_current_month_date
|
|
session_end_date = end_current_month_date
|
|
|
|
my_new_diction['session_start_date'] = session_start_date
|
|
my_new_diction['session_end_date'] = session_end_date
|
|
my_new_diction['tab_rh_id'] = "all"
|
|
|
|
if (my_user_dashbord['dashbord_internal_code'] == "tbd_rh_01"):
|
|
local_status, local_retval = Get_Humain_Ressource_With_Planning(my_new_diction)
|
|
if (local_status is False):
|
|
return local_status, local_retval
|
|
new_retval_data = local_retval
|
|
|
|
if (my_user_dashbord['dashbord_internal_code'] == "tbd_rh_02"):
|
|
local_status, local_retval = Get_Humain_Ressource_With_Planning_And_Cost(my_new_diction)
|
|
if (local_status is False):
|
|
return local_status, local_retval
|
|
new_retval_data = local_retval
|
|
|
|
|
|
else:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " Requête inconnue ")
|
|
return False, " Requête inconnue ",
|
|
|
|
|
|
# Recuperation des colonne à exporter
|
|
base_config_dashbord_data = MYSY_GV.dbname['base_config_dashbord'].find_one(
|
|
{'dashbord_internal_code': str(my_user_dashbord['dashbord_internal_code'])})
|
|
|
|
tab_exported_fields = []
|
|
if ("exported_fields" in base_config_dashbord_data):
|
|
tab_exported_fields = base_config_dashbord_data['exported_fields']
|
|
|
|
# Remettres les dates de filtres en debut de liste
|
|
if ("filtre_date_fin" in tab_exported_fields):
|
|
tab_exported_fields.remove("filtre_date_fin")
|
|
tab_exported_fields.insert(0, "filtre_date_fin")
|
|
|
|
if ("filtre_date_debut" in tab_exported_fields):
|
|
tab_exported_fields.remove("filtre_date_debut")
|
|
tab_exported_fields.insert(0, "filtre_date_debut")
|
|
|
|
tab_exported_fields.insert(0, "date_extraction")
|
|
|
|
|
|
todays_date = str(date.today().strftime("%d/%m/%Y"))
|
|
ts = datetime.now().timestamp()
|
|
ts = str(ts).replace(".", "").replace(",", "")[-5:]
|
|
|
|
orig_file_name = "Export_csv_" + str(my_partner['recid']) + "_" + str(ts) + ".csv"
|
|
outputFilename = str(MYSY_GV.TEMPORARY_DIRECTORY) + "/" + str(orig_file_name)
|
|
|
|
# Create a workbook and add a worksheet.
|
|
workbook = xlsxwriter.Workbook(outputFilename)
|
|
worksheet = workbook.add_worksheet()
|
|
row = 0
|
|
column = 0
|
|
|
|
# Ecrire l'entete
|
|
for header_item in tab_exported_fields:
|
|
worksheet.write(row, column, header_item)
|
|
column += 1
|
|
|
|
# Ecrire le reste des lignes
|
|
for answers_record in new_retval_data: # Here we are using 'cursor' as an iterator
|
|
column = 0
|
|
row = row + 1
|
|
|
|
worksheet.write(row, column, str(session_start_date))
|
|
column += 1
|
|
worksheet.write(row, column, str(session_end_date))
|
|
column += 1
|
|
worksheet.write(row, column, str(todays_date))
|
|
column += 1
|
|
|
|
|
|
flattened_record = {}
|
|
|
|
for local_fiels in tab_exported_fields:
|
|
answers_record_JSON = ast.literal_eval(str(answers_record))
|
|
|
|
if (str(local_fiels) in answers_record_JSON.keys()):
|
|
|
|
local_status, local_retval = mycommon.IsFloat(str(answers_record_JSON[str(local_fiels)]).strip())
|
|
if (local_status is True):
|
|
no_html = answers_record_JSON[str(local_fiels)]
|
|
else:
|
|
no_html = mycommon.cleanhtml(answers_record_JSON[str(local_fiels)])
|
|
|
|
else:
|
|
no_html = ""
|
|
|
|
worksheet.write(row, column, no_html)
|
|
column += 1
|
|
|
|
|
|
workbook.close()
|
|
|
|
if os.path.exists(outputFilename):
|
|
# print(" ### ok os.path.exists(outputFilename) "+str(outputFilename))
|
|
return True, send_file(outputFilename, as_attachment=True)
|
|
|
|
return False, "Impossible de générer l'export csv (2) "
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
|
return False, " Impossible d'exporter les données "
|