Ela_Back/Dashbord_queries/ressources_humaines_tbd_qri...

480 lines
20 KiB
Python

"""
Ce fichier permet de traiter les tableaux de bord associées aux ressources humaines
"""
import ast
import dateutil
import pymongo
from dateutil.relativedelta import relativedelta
from flask import send_file
from pymongo import MongoClient
import json
from bson import ObjectId
import re
from datetime import datetime, timezone, date
import prj_common as mycommon
import secrets
import inspect
import sys, os
import csv
import pandas as pd
from pymongo import ReturnDocument
import GlobalVariable as MYSY_GV
from math import isnan
import GlobalVariable as MYSY_GV
from datetime import timedelta
from datetime import timedelta
from operator import itemgetter
"""
Recupération du tableau des ressources humaines en lien avec leur planification
"""
def Get_Humain_Ressource_With_Planning(diction):
try:
diction = mycommon.strip_dictionary(diction)
"""
Verification des input acceptés
"""
field_list = ['token', 'session_start_date', 'session_end_date', 'filter_value', 'tab_rh_id', 'event_type' ]
incom_keys = diction.keys()
for val in incom_keys:
if val not in field_list and val.startswith('my_') is False:
mycommon.myprint(str(
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
return False, " Les informations fournies sont incorrectes",
"""
Verification des champs obligatoires
"""
field_list_obligatoire = ['token', 'session_start_date', 'session_end_date' ]
for val in field_list_obligatoire:
if val not in diction:
mycommon.myprint(
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
return False, " Les informations fournies sont incorrectes",
"""
Verification de l'identité et autorisation de l'entité qui
appelle cette API
"""
token = ""
if ("token" in diction.keys()):
if diction['token']:
token = diction['token']
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
if (local_status is not True):
return local_status, my_partner
# Verfication de la validité du type d'evenement
event_type_filter = {}
if( "event_type" in diction.keys() and diction['event_type']):
if( diction['event_type'] not in MYSY_GV.AGENDA_EVENT_TYPE):
mycommon.myprint(
str(inspect.stack()[0][3]) + " Le type d'évènement est invalide ")
return False, " Le type d'évènement est invalide "
event_type_filter = {'$eq': ["$event_type", str(diction['event_type'])]}
print(" ### event_type_filter = ", event_type_filter)
"""
/!\ : si 'tab_rh_id' == 'all', cela veut dire qu'on prend touts les employés
si non, on va aller filtrer selon les contenus de la tab tab_rh_id
"""
tab_rh_id = []
tab_rh_object_id = []
if( str(diction['tab_rh_id']) == "all" ):
for val in MYSY_GV.dbname['ressource_humaine'].find({'valide': '1',
'locked': '0',
'partner_recid': str(my_partner['recid'])}):
tab_rh_object_id.append(val['_id'])
else : # Verification de la validité de toutes les ressources humaines dans tab_rh_id
tab_rh_id = str(diction['tab_rh_id']).split(',')
for rh_id in tab_rh_id:
if (MYSY_GV.dbname['ressource_humaine'].count_documents({'_id': ObjectId(str(rh_id)),
'valide': '1',
'locked': '0',
'partner_recid': str(
my_partner['recid'])}) != 1):
mycommon.myprint(
str(inspect.stack()[0][3]) + " La ressource humaine :" + str(rh_id) + " n'est pas valide ")
return False, " La session_id :" + str(rh_id) + " n'est pas valide "
tab_rh_object_id.append(ObjectId(str(rh_id)))
filt_session_start_date = ""
if ("session_start_date" in diction.keys() and diction['session_start_date']):
filt_session_start_date = str(diction['session_start_date'])[0:10]
local_status = mycommon.CheckisDate(filt_session_start_date)
if (local_status is False):
mycommon.myprint(str(
inspect.stack()[0][3]) + " Le filtre : 'date de debut de session' n'est pas au format jj/mm/aaaa.")
return False, " Le filtre : 'date de debut de session' n'est pas au format jj/mm/aaaa."
filt_session_end_date = ""
if ("session_end_date" in diction.keys() and diction['session_end_date']):
filt_session_end_date = str(diction['session_end_date'])[0:10]
local_status = mycommon.CheckisDate(filt_session_end_date)
if (local_status is False):
mycommon.myprint(str(
inspect.stack()[0][3]) + " Le filtre : 'date de fin de session' n'est pas au format jj/mm/aaaa.")
return False, " Le filtre : 'date de fin de session' n'est pas au format jj/mm/aaaa."
"""
Si la valeur de 'filter_value' est m0 ou m1, on va aller recuperer les date du mois correspondant.
On ecrase les valeur de filt_session_start_date et filt_session_end_date
"""
if( 'filter_value' in diction.keys() ):
if( str(diction['filter_value']) == "m0"):
# On recupere les date du mois en cours
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Current_Month_Start_End_Date()
if (local_status is False):
return local_status, start_current_month_date
filt_session_start_date = start_current_month_date
filt_session_end_date = end_current_month_date
elif (str(diction['filter_value']) == "m1"):
# On recupere les date du mois en cours
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Previous_Month_Start_End_Date()
if (local_status is False):
return local_status, start_current_month_date
filt_session_start_date = start_current_month_date
filt_session_end_date = end_current_month_date
RetObject = []
val_tmp = 1
filt_session_start_date_ISODATE = datetime.strptime(str(filt_session_start_date), '%d/%m/%Y')
filt_session_end_date_ISODATE = datetime.strptime(str(filt_session_end_date), '%d/%m/%Y')
filt_session_start_date_ISODATE_work = filt_session_start_date_ISODATE
filt_session_end_date_ISODATE_work = filt_session_end_date_ISODATE
interval_date = []
pipe_qry = ( [
{ "$match": {'_id':{'$in':tab_rh_object_id} }},
{'$lookup': {
'from': 'agenda',
"let": {"rh_id": {'$toString': "$_id"}, 'rh_partner_owner_recid':'$partner_recid'},
'pipeline': [
{'$match':
{'$expr':
{'$and':
[
event_type_filter,
{'$eq': ["$valide", "1"]},
{'$eq': ["$related_collection_recid", '$$rh_id']},
{'$eq': ["$partner_owner_recid", '$$rh_partner_owner_recid']},
{'$gte': [{'$toDate': "$event_start" },filt_session_start_date_ISODATE]},
{'$lte': [{'$toDate': "$event_start"}, filt_session_end_date_ISODATE]}
]
}
}
},
],
'as': 'collection_agenda'
}
}
])
print(" ### Get_Humain_Ressource_With_Planning ici pipe_qry = ", pipe_qry)
new_retval_titles = []
new_retval_data = []
local_id = 0
for retval in MYSY_GV.dbname['ressource_humaine'].aggregate(pipe_qry):
## Recuperation des agenda
if( "collection_agenda" in retval.keys() and len(retval['collection_agenda']) > 0 ):
for agenda_retval in retval['collection_agenda'] :
local_id = local_id + 1
new_node = {}
new_node['id'] = str(local_id)
new_node['_id'] = str(retval['_id'])
if ("civilite" in retval.keys()):
new_node['civilite'] = str(retval['civilite'])
else:
new_node['civilite'] = ""
if ("nom" in retval.keys()):
new_node['nom'] = str(retval['nom'])
else:
new_node['nom'] = ""
if ("prenom" in retval.keys()):
new_node['prenom'] = str(retval['prenom'])
else:
new_node['prenom'] = ""
if ("email" in retval.keys()):
new_node['email'] = str(retval['email'])
else:
new_node['email'] = ""
if ("ismanager" in retval.keys()):
new_node['ismanager'] = str(retval['ismanager'])
else:
new_node['ismanager'] = "0"
if ("fonction" in retval.keys()):
new_node['fonction'] = str(retval['fonction'])
else:
new_node['fonction'] = ""
if ("_id" in agenda_retval.keys()):
new_node['agenda_id'] = str(agenda_retval['_id'])
else:
new_node['agenda_id'] = ""
if ("event_title" in agenda_retval.keys()):
new_node['agenda_event_title'] = str(agenda_retval['event_title'])
else:
new_node['agenda_event_title'] = ""
if ("comment" in agenda_retval.keys()):
new_node['agenda_event_code_session'] = str(agenda_retval['comment'])
else:
new_node['agenda_event_code_session'] = ""
if ("sequence_session_id" in agenda_retval.keys()):
new_node['agenda_event_sequence_session_id'] = str(agenda_retval['sequence_session_id'])
else:
new_node['agenda_event_sequence_session_id'] = ""
if ("event_type" in agenda_retval.keys()):
new_node['agenda_event_type'] = str(agenda_retval['event_type'])
else:
new_node['agenda_event_type'] = ""
if ("event_start" in agenda_retval.keys()):
new_node['agenda_date_jour'] = str(datetime.strptime(str(agenda_retval['event_end'])[0:16], '%Y-%m-%dT%H:%M').strftime("%d/%m/%Y"))
new_node['agenda_event_start'] = str(datetime.strptime(str(agenda_retval['event_end'])[0:16], '%Y-%m-%dT%H:%M').strftime("%d/%m/%Y %H:%M"))
else:
new_node['agenda_date_jour'] = ""
new_node['agenda_event_start'] = ""
if ("event_end" in agenda_retval.keys()):
new_node['agenda_event_end'] = str(datetime.strptime(str(agenda_retval['event_end'])[0:16], '%Y-%m-%dT%H:%M').strftime("%d/%m/%Y %H:%M"))
else:
new_node['agenda_event_end'] = ""
event_duration_second = datetime.strptime(str(agenda_retval['event_end'])[0:16], '%Y-%m-%dT%H:%M') - datetime.strptime(str(agenda_retval['event_start'])[0:16],'%Y-%m-%dT%H:%M')
event_duration_second = event_duration_second.total_seconds()
event_duration_hour = round(divmod(event_duration_second, 3600)[0]+divmod(event_duration_second, 3600)[1]/3600, 2)
new_node['event_duration_second'] = str(event_duration_second)
new_node['event_duration_hour'] = str(event_duration_hour)
#print(" ### new_node = ", new_node)
RetObject.append(mycommon.JSONEncoder().encode(new_node))
return True, RetObject
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
return False, " Impossible de récupérer les données "
"""
Cette fonction permet d'experter un dashbord en csv
"""
def TBD_RH_01_Export_Dashbord_To_Csv(diction):
try:
diction = mycommon.strip_dictionary(diction)
"""
Verification des input acceptés
"""
field_list = ['token', 'user_dashbord_id']
incom_keys = diction.keys()
for val in incom_keys:
if val not in field_list and val.startswith('my_') is False:
mycommon.myprint(str(
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
return False, " Les informations fournies sont incorrectes",
"""
Verification des champs obligatoires
"""
field_list_obligatoire = ['token', 'user_dashbord_id']
for val in field_list_obligatoire:
if val not in diction:
mycommon.myprint(
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
return False, " Les informations fournies sont incorrectes",
"""
Verification de l'identité et autorisation de l'entité qui
appelle cette API
"""
token = ""
if ("token" in diction.keys()):
if diction['token']:
token = diction['token']
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
if (local_status is not True):
return local_status, my_partner
# Recuperation des données du user_dashbord
my_user_dashbord = MYSY_GV.dbname['user_dashbord'].find_one({'_id': ObjectId(str(diction['user_dashbord_id'])),
'valide': '1',
'locked': '0',
'partner_owner_recid': str(my_partner['recid'])})
if (my_user_dashbord is None):
mycommon.myprint(
str(inspect.stack()[0][3]) + " - L'identifiant du tableau de bord est invalide ")
return False, " L'identifiant du tableau de bord est invalide",
new_retval_data = {}
my_new_diction = {}
my_new_diction['token'] = diction['token']
session_start_date = ""
session_end_date = ""
local_default_filter = ast.literal_eval(str(my_user_dashbord['default_filter']))
if ("session_start_date" in local_default_filter.keys() and "session_end_date" in local_default_filter.keys()):
session_start_date = local_default_filter['session_start_date']
session_end_date = local_default_filter['session_end_date']
elif ("periode" in local_default_filter.keys()):
my_new_diction['filter_value'] = str(local_default_filter['periode'])
if (str(local_default_filter['periode']) == "m0"):
# On recupere les date du mois en cours
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Current_Month_Start_End_Date()
if (local_status is False):
return local_status, start_current_month_date
session_start_date = start_current_month_date
session_end_date = end_current_month_date
elif (str(local_default_filter['periode']) == "m1"):
# On recupere les date du mois en cours
local_status, start_current_month_date, end_current_month_date = mycommon.Get_Previous_Month_Start_End_Date()
if (local_status is False):
return local_status, start_current_month_date
session_start_date = start_current_month_date
session_end_date = end_current_month_date
my_new_diction['session_start_date'] = session_start_date
my_new_diction['session_end_date'] = session_end_date
my_new_diction['tab_rh_id'] = "all"
if (my_user_dashbord['dashbord_internal_code'] == "tbd_rh_01"):
local_status, local_retval = Get_Humain_Ressource_With_Planning(my_new_diction)
if (local_status is False):
return local_status, local_retval
new_retval_data = local_retval
else:
mycommon.myprint(
str(inspect.stack()[0][3]) + " Requête inconnue ")
return False, " Requête inconnue ",
# Recuperation des colonne à exporter
base_config_dashbord_data = MYSY_GV.dbname['base_config_dashbord'].find_one(
{'dashbord_internal_code': str(my_user_dashbord['dashbord_internal_code'])})
tab_exported_fields = []
if ("exported_fields" in base_config_dashbord_data):
tab_exported_fields = base_config_dashbord_data['exported_fields']
# Remettres les dates de filtres en debut de liste
if ("filtre_date_fin" in tab_exported_fields):
tab_exported_fields.remove("filtre_date_fin")
tab_exported_fields.insert(0, "filtre_date_fin")
if ("filtre_date_debut" in tab_exported_fields):
tab_exported_fields.remove("filtre_date_debut")
tab_exported_fields.insert(0, "filtre_date_debut")
tab_exported_fields.insert(0, "date_extraction")
todays_date = str(date.today().strftime("%d/%m/%Y"))
ts = datetime.now().timestamp()
ts = str(ts).replace(".", "").replace(",", "")[-5:]
orig_file_name = "Export_csv_" + str(my_partner['recid']) + "_" + str(ts) + ".csv"
outputFilename = str(MYSY_GV.TEMPORARY_DIRECTORY) + "/" + str(orig_file_name)
with open(outputFilename, 'w', newline='') as outfile:
fields = tab_exported_fields
write = csv.DictWriter(outfile, fieldnames=fields)
write.writeheader()
for answers_record in new_retval_data: # Here we are using 'cursor' as an iterator
flattened_record = {}
for local_fiels in tab_exported_fields:
answers_record_JSON = ast.literal_eval(str(answers_record))
if (str(local_fiels) in answers_record_JSON.keys()):
flattened_record[str(local_fiels)] = answers_record_JSON[str(local_fiels)]
flattened_record['filtre_date_debut'] = str(session_start_date)
flattened_record['filtre_date_fin'] = str(session_end_date)
flattened_record['date_extraction'] = str(todays_date)
write.writerow(flattened_record)
if os.path.exists(outputFilename):
# print(" ### ok os.path.exists(outputFilename) "+str(outputFilename))
return True, send_file(outputFilename, as_attachment=True)
return False, "Impossible de générer l'export csv (2) "
except Exception as e:
exc_type, exc_obj, exc_tb = sys.exc_info()
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
return False, " Impossible d'exporter les données "