7501 lines
300 KiB
Python
7501 lines
300 KiB
Python
import ast
|
||
import hashlib
|
||
import _pickle as cPickle
|
||
import pickle
|
||
from PIL import Image
|
||
import requests
|
||
import bson
|
||
from pymongo import MongoClient
|
||
import pymongo
|
||
from difflib import SequenceMatcher
|
||
import textdistance
|
||
from datetime import datetime, date
|
||
import logging
|
||
import secrets
|
||
import base64
|
||
from bson import ObjectId
|
||
from pymongo import MongoClient
|
||
import inspect
|
||
from werkzeug.utils import secure_filename
|
||
import time
|
||
import os
|
||
import csv
|
||
import inspect
|
||
import sys
|
||
import pandas as pd
|
||
from datetime import datetime
|
||
from pymongo import ReturnDocument
|
||
from unidecode import unidecode
|
||
import GlobalVariable as MYSY_GV
|
||
from serpapi import GoogleSearch
|
||
import re
|
||
import email_mgt as email_mgt
|
||
import random
|
||
import json
|
||
import Ela_Spacy as ElaSpacy
|
||
from colorama import Fore
|
||
from colorama import Style
|
||
from flask import Flask, Response, render_template
|
||
from xhtml2pdf import pisa
|
||
import jinja2
|
||
import ftplib
|
||
import pysftp
|
||
import html
|
||
import mariadb
|
||
from datetime import timedelta
|
||
import Collection_Historique as Collection_Historique
|
||
import partner_client
|
||
import partners
|
||
import math
|
||
import module_editique as module_editique
|
||
|
||
class JSONEncoder(json.JSONEncoder):
|
||
def default(self, o):
|
||
if ( isinstance(o, ObjectId)):
|
||
return str(o)
|
||
if ( isinstance(o, (datetime, date)) ):
|
||
return o.isoformat()
|
||
|
||
if isinstance(o, bytes):
|
||
return base64.b64encode(o).decode()
|
||
|
||
return json.JSONEncoder.default(self, o)
|
||
|
||
def myprint(message = ""):
|
||
logging.info(str(datetime.now()) + " : "+str(message) )
|
||
print(Fore.RED+str(datetime.now()) + " : " + str(message)+Style.RESET_ALL)
|
||
|
||
|
||
def myprint_debug(message = ""):
|
||
if( MYSY_GV.MYSY_DEBUG_LEVEL > 0 ):
|
||
logging.info("DEBUG : "+str(datetime.now()) + " : "+str(message) )
|
||
print(Fore.RED+"DEBUG : "+str(datetime.now()) + " : " + str(message)+Style.RESET_ALL)
|
||
|
||
|
||
def create_order_id():
|
||
return secrets.token_urlsafe(3)
|
||
|
||
|
||
def create_token_urlsafe():
|
||
return secrets.token_urlsafe(MYSY_GV.TOKEN_SIZE)
|
||
|
||
|
||
def create_user_recid():
|
||
return secrets.token_hex(MYSY_GV.TOKEN_SIZE)
|
||
|
||
|
||
'''
|
||
Cette fonction recupere et enregistrer un
|
||
fichier dans le Dossier "./Data/".
|
||
|
||
La fonction retourne le nom du fichier
|
||
'''
|
||
def Upload_Save_CSV_File(file=None, Folder=None):
|
||
try:
|
||
basename = os.path.basename(file.filename)
|
||
basename2 = basename.split(".")
|
||
|
||
'''
|
||
Verification qu'il s'agit bien d'un fichier csv, dont le nom ne comporte pas de "."
|
||
'''
|
||
if(len(basename2) != 2 ):
|
||
myprint(str(inspect.stack()[0][3]) + " - : Le nom du fichier est invalide")
|
||
return False, "Le nom du fichier est invalide"
|
||
|
||
if( str(basename2[1]).lower() != "csv" ):
|
||
myprint(str(inspect.stack()[0][3]) + " - : Ce n'est pas un fichier csv")
|
||
return False, "Le fichier doit être de type '.csv'"
|
||
|
||
|
||
new_basename2= re.sub(r'[^a-zA-Z0-9]', '', str(basename2[0]))
|
||
|
||
timestr = time.strftime("%Y%m%d%H%M%S")
|
||
local_base_name = str(new_basename2).replace('(', '').replace(')', '').replace(' ', '')
|
||
new_file_name = str(local_base_name) + "_" + str(timestr) + "." + str(basename2[1])
|
||
file.filename = new_file_name
|
||
file.save(os.path.join(str(Folder), secure_filename(file.filename))) # t
|
||
|
||
Global_file_name = str(Folder)+str(file.filename)
|
||
|
||
return True, Global_file_name
|
||
|
||
except Exception as e :
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "Impossible de traiter le fichier."
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction recupere et enregistrer un
|
||
fichier dans le Dossier "./Data/".
|
||
|
||
Seulement les fichier de type PDF et image sont acceptés
|
||
'''
|
||
def Upload_Save_PDF_IMG_File(file=None, Folder=None):
|
||
try:
|
||
basename = os.path.basename(file.filename)
|
||
basename2 = basename.split(".")
|
||
|
||
'''
|
||
Verification qu'il s'agit bien d'un fichier image ('jpg', 'jpeg', 'png', 'jpe', ...), dont le nom ne comporte pas de "."
|
||
'''
|
||
if (len(basename2) != 2):
|
||
myprint(str(inspect.stack()[0][3]) + " - : Le nom du fichier " + str(file.filename) + " est invalide")
|
||
return False, "Le nom du fichier " + str(file.filename) + " est invalide "
|
||
|
||
if (str(basename2[1]).lower() not in MYSY_GV.IMG_FORMAT and str(basename2[1]).lower() != "pdf"):
|
||
myprint(str(inspect.stack()[0][3]) + " - : Le fichier " + str(file.filename) + ". L'extention '" + str(
|
||
basename2[1]) + "' n'est pas un fichier imagep ou pdf. Les extentions autorisées sont :" + str(
|
||
MYSY_GV.IMG_FORMAT)+" et ['pdf'] ")
|
||
|
||
return False, " Le fichier " + str(file.filename) + ". L'extention '" + str(
|
||
basename2[1]) + "' n'est pas un fichier image ou pfd. Les extentions autorisées sont :" + str(MYSY_GV.IMG_FORMAT) + " et ['pdf'] "
|
||
|
||
new_basename2= re.sub(r'[^a-zA-Z0-9]', '', str(basename2[0]))
|
||
|
||
timestr = time.strftime("%Y%m%d%H%M%S")
|
||
local_base_name = str(new_basename2).replace('(', '').replace(')', '').replace(' ', '')
|
||
new_file_name = str(local_base_name) + "_" + str(timestr) + "." + str(basename2[1])
|
||
file.filename = new_file_name
|
||
file.save(os.path.join(str(Folder), secure_filename(file.filename))) # t
|
||
|
||
|
||
Global_file_name = str(Folder) + str(file.filename)
|
||
|
||
return True, Global_file_name
|
||
|
||
except Exception as e :
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "Impossible de traiter le fichier."
|
||
|
||
|
||
|
||
'''
|
||
Controle de l'import des images.
|
||
les formats acceptés sont :
|
||
- 'jpg',
|
||
- 'jpeg',
|
||
- 'png'
|
||
'''
|
||
def Upload_Save_IMG_File(file=None, Folder=None):
|
||
try:
|
||
basename = os.path.basename(file.filename)
|
||
basename2 = basename.split(".")
|
||
|
||
'''
|
||
Verification qu'il s'agit bien d'un fichier image ('jpg', 'jpeg', 'png', 'jpe', ...), dont le nom ne comporte pas de "."
|
||
'''
|
||
if(len(basename2) != 2 ):
|
||
myprint(str(inspect.stack()[0][3]) + " - : Le nom du fichier "+str(file.filename)+" est invalide")
|
||
return False, "Le nom du fichier "+str(file.filename)+" est invalide "
|
||
|
||
if( str(basename2[1]).lower() not in MYSY_GV.IMG_FORMAT):
|
||
myprint(str(inspect.stack()[0][3]) + " - : Le fichier "+str(file.filename)+ ". L'extention '"+str(basename2[1])+"' n'est pas un fichier image. Les extentions autorisées sont :"+str(MYSY_GV.IMG_FORMAT))
|
||
return False, " Le fichier "+str(file.filename)+ ". L'extention '"+str(basename2[1])+"' n'est pas un fichier image. Les extentions autorisées sont :"+str(MYSY_GV.IMG_FORMAT)+" "
|
||
|
||
|
||
|
||
timestr = time.strftime("%Y%m%d%H%M%S")
|
||
local_base_name = str(basename2[0]).replace('(', '').replace(')', '').replace(' ', '')
|
||
new_file_name = str(local_base_name) + "_" + str(timestr) + "." + str(basename2[1])
|
||
file.filename = new_file_name
|
||
file.save(os.path.join(str(Folder), secure_filename(file.filename))) # t
|
||
|
||
Global_file_name = str(Folder) + str(file.filename)
|
||
|
||
print(" ### Global_file_name == ", Global_file_name)
|
||
|
||
return True, Global_file_name
|
||
|
||
except Exception as e :
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, None
|
||
|
||
def Upload_Save_IMG_File_No_Change_Origial_Name(file=None, Folder=None):
|
||
try:
|
||
|
||
print(" ### file.filename === ", file.filename)
|
||
|
||
basename = os.path.basename(file.filename)
|
||
basename2 = basename.split(".")
|
||
|
||
'''
|
||
Verification qu'il s'agit bien d'un fichier image (PNG), dont le nom ne comporte pas de "."
|
||
'''
|
||
if(len(basename2) != 2 ):
|
||
myprint(str(inspect.stack()[0][3]) + " - : Le nom du fichier "+str(file.filename)+" est invalide")
|
||
return False, " Le nom du fichier "+str(file.filename)+" est invalide "
|
||
|
||
if( str(basename2[1]).lower() not in MYSY_GV.IMG_FORMAT):
|
||
myprint(str(inspect.stack()[0][3]) + " - : le fichier "+str(file.filename)+ ". L'extention '"+str(basename2[1])+"' n'est pas un fichier image. Les extentions autorisées sont :"+str(MYSY_GV.IMG_FORMAT))
|
||
return False, "Le fichier "+str(file.filename)+ ". L'extention '"+str(basename2[1])+"' n'est pas un fichier image. Les extentions autorisées sont :"+str(MYSY_GV.IMG_FORMAT)
|
||
|
||
if(len(str(basename2[0])) > 80 ):
|
||
myprint(str(inspect.stack()[0][3]) + " : Le nom du fichier ne doit pas faire plus de 80 caractères")
|
||
return False, "Le nom du fichier ne doit pas faire plus de 80 caractères "
|
||
|
||
|
||
file.save(os.path.join(str(Folder), secure_filename(file.filename)))
|
||
|
||
Global_file_name = str(Folder) + secure_filename(file.filename)
|
||
|
||
return True, Global_file_name
|
||
|
||
except Exception as e :
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, None
|
||
|
||
|
||
|
||
"""
|
||
Controle de l'import des images UNIQUEMENT ET EXCLUSIVEMENT PNG
|
||
les formats acceptés sont : PNG
|
||
"""
|
||
def Upload_Save_IMG_PNG_File(file=None, Folder=None):
|
||
try:
|
||
basename = os.path.basename(file.filename)
|
||
basename2 = basename.split(".")
|
||
|
||
'''
|
||
Verification qu'il s'agit bien d'un fichier image (PNG), dont le nom ne comporte pas de "."
|
||
'''
|
||
if(len(basename2) != 2 ):
|
||
myprint(str(inspect.stack()[0][3]) + " - : Le nom du fichier "+str(file.filename)+" est invalide")
|
||
return False, " Le nom du fichier "+str(file.filename)+" est invalide "
|
||
|
||
if( str(basename2[1]).lower() not in MYSY_GV.IMG_PNG_FORMAT):
|
||
myprint(str(inspect.stack()[0][3]) + " - : le fichier "+str(file.filename)+ ". L'extention '"+str(basename2[1])+"' n'est pas un fichier image. Les extentions autorisées sont :"+str(MYSY_GV.IMG_FORMAT))
|
||
return False, "Le fichier "+str(file.filename)+ ". L'extention '"+str(basename2[1])+"' n'est pas un fichier image. Les extentions autorisées sont :"+str(MYSY_GV.IMG_FORMAT)
|
||
|
||
if (len(str(basename2[0])) > 80):
|
||
myprint(str(inspect.stack()[0][3]) + " : Le nom du fichier ne doit pas faire plus de 80 caractères")
|
||
return False, "Le nom du fichier ne doit pas faire plus de 80 caractères "
|
||
|
||
new_basename2 = re.sub(r'[^a-zA-Z0-9]', '', str(basename2[0]))
|
||
|
||
timestr = time.strftime("%Y%m%d%H%M%S")
|
||
local_base_name = str(new_basename2).replace('(', '').replace(')', '').replace(' ', '')
|
||
new_file_name = str(local_base_name) + "_" + str(timestr) + "." + str(basename2[1])
|
||
file.filename = new_file_name
|
||
file.save(os.path.join(str(Folder), secure_filename(file.filename)))
|
||
|
||
Global_file_name = str(Folder) + secure_filename(file.filename)
|
||
|
||
return True, Global_file_name
|
||
|
||
except Exception as e :
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, None
|
||
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction prend un email et un token
|
||
puis verifie si la validité du trile (email, token, statut)
|
||
'''
|
||
def check_token_validity(email="", token=""):
|
||
try :
|
||
|
||
coll_token = MYSY_GV.dbname['partner_token']
|
||
#print(" check_token_validity token == "+str(token))
|
||
tmp_count = coll_token.count_documents({ 'token': str(token), 'valide': '1'})
|
||
if (tmp_count <= 0):
|
||
myprint( str(inspect.stack()[0][3]) +" La session de connexion n'est pas valide")
|
||
return False
|
||
|
||
return True
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
'''
|
||
Cette fonction prend un email et un token
|
||
puis verifie si le compte utilisateur est actif
|
||
'''
|
||
def check_user_validity(email="", token=""):
|
||
try :
|
||
|
||
coll_token = MYSY_GV.dbname['user_account']
|
||
message = {}
|
||
ret = True
|
||
|
||
for retVal in coll_token.find({ 'token': str(token)}):
|
||
user = retVal
|
||
if( user['valide'] == '0'):
|
||
print(" le compte avec le token : "+token+" n'est pas valide")
|
||
message['valide'] = '0'
|
||
ret = False
|
||
|
||
if (user['locked'] == '1'):
|
||
print(" le compte avec le token : " + token + " est verrouillé")
|
||
message['locked'] = '1'
|
||
ret = False
|
||
|
||
return ret, message
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, message
|
||
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction prend un token
|
||
puis verifie si la validité du trile (email, token, statut) EXCLUSIVEMENT PR LES PARTNER, d
|
||
donc la table partner_token
|
||
'''
|
||
def check_partner_token_validity(email="", token=""):
|
||
try :
|
||
|
||
#print(" check_partner_token_validity : Token = "+token)
|
||
coll_token = MYSY_GV.dbname['partner_token']
|
||
tmp_count = coll_token.count_documents({ 'token': str(token), 'locked':'0', 'valide': '1'})
|
||
|
||
#tmp_count = coll_token.find({ 'token': str(token), 'locked':'0', 'valide': '1'}).count()
|
||
|
||
if (tmp_count <= 0):
|
||
myprint("La session de connexion n'est pas valide")
|
||
return False
|
||
return True
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
|
||
def check_partner_token_validity_v2(diction):
|
||
try :
|
||
|
||
token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
token = diction['token']
|
||
else:
|
||
myprint("La session de connexion n'est pas valide")
|
||
return False
|
||
|
||
|
||
coll_token = MYSY_GV.dbname['partner_token']
|
||
local_qry = {'locked':'0', 'valide': '1', 'token':str(token)}
|
||
tmp_count = MYSY_GV.dbname['partner_token'].count_documents(local_qry)
|
||
|
||
#tmp_count = coll_token.find({ 'token': str(token), 'locked':'0', 'valide': '1'}).count()
|
||
|
||
if (tmp_count <= 0):
|
||
myprint("La session de connexion n'est pas valide")
|
||
return False
|
||
return True
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
|
||
"""
|
||
Cette fonction retour le stripe_account_id
|
||
"""
|
||
def get_parnter_stripe_account_id_from_recid(recid = ""):
|
||
try:
|
||
if len(str(recid)) <= 0 :
|
||
myprint(" get_parnter_stripe_account_id_from_token : Le recid partner est vide")
|
||
return False
|
||
|
||
print(" #### recid = "+str(recid))
|
||
coll_partner = MYSY_GV.dbname['partnair_account']
|
||
tmp_val = coll_partner.find({'recid': str(recid), 'active': '1', 'locked':'0'})
|
||
|
||
if( "stripe_account_id" not in tmp_val[0].keys()):
|
||
myprint(" Pas de champ : stripe_account_id")
|
||
return False
|
||
|
||
stripe_account_id = tmp_val[0]['stripe_account_id']
|
||
return stripe_account_id
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
|
||
"""
|
||
Cette fonction retour le stripe_paymentmethod_id
|
||
"""
|
||
def get_parnter_stripe_stripe_paymentmethod_id_from_recid(recid = ""):
|
||
try:
|
||
if len(str(recid)) <= 0 :
|
||
myprint(" get_parnter_stripe_account_id_from_token : Le recid partner est vide")
|
||
return False
|
||
|
||
print(" #### recid = "+str(recid))
|
||
coll_partner = MYSY_GV.dbname['partnair_account']
|
||
tmp_val = coll_partner.find({'recid': str(recid), 'active': '1', 'locked':'0'})
|
||
stripe_paymentmethod_id = tmp_val[0]['stripe_paymentmethod_id']
|
||
return stripe_paymentmethod_id
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
|
||
'''
|
||
recuperation du recid du user
|
||
|
||
/!\ : 22/12/2024 : cette fonction es un peu bizarre.
|
||
J'ai réecrée une fonction get_user_account_id_from_token qui retour _id de ligne au lieu du recid
|
||
'''
|
||
def get_user_recid_from_token(token = ""):
|
||
try :
|
||
if len(str(token)) <= 0 :
|
||
myprint(" Le token est vide")
|
||
return False
|
||
|
||
coll_token = MYSY_GV.dbname['partner_token']
|
||
tmp_val = coll_token.find({'token': str(token), 'valide': '1'})
|
||
user_recid = tmp_val[0]['recid']
|
||
return user_recid
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
"""
|
||
22/12/2024 :
|
||
Cette fonction retourne l'_id depuis (user_account) en partant du token
|
||
"""
|
||
def get_user_account_id_from_token(token = ""):
|
||
try :
|
||
if len(str(token)) <= 0 :
|
||
myprint(" Le token est vide")
|
||
return False
|
||
|
||
coll_token = MYSY_GV.dbname['user_account']
|
||
tmp_val = coll_token.find({'token': str(token), 'valide': '1', 'active':'1'})
|
||
user_recid = tmp_val[0]['recid']
|
||
return user_recid
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
|
||
|
||
|
||
"""
|
||
Cette fonction retourne les données d'un user en partant du recid.
|
||
return false si le recid est faux ou que le compte utilisateur n'est pas actif
|
||
"""
|
||
def get_partner_data_from_recid(user_recid = ""):
|
||
try :
|
||
if len(str(user_recid)) <= 0 :
|
||
myprint(" Le partner_recid est vide")
|
||
return False, ""
|
||
|
||
coll_token = MYSY_GV.dbname['partner_token']
|
||
# print(" myquery pr demo_account = " + str(myquery))
|
||
tmp_count = coll_token.count_documents({'recid': str(user_recid), 'valide': '1', 'locked':'0'})
|
||
if (tmp_count <= 0):
|
||
return False, "Le token est invalide"
|
||
|
||
coll_token = MYSY_GV.dbname['partnair_account']
|
||
|
||
#print(" FFFF = ",{'recid': str(user_recid), 'active': '1', 'locked':'0'} )
|
||
|
||
tmp_val = coll_token.find_one({'recid': str(user_recid), 'active': '1', 'locked':'0'})
|
||
|
||
if( tmp_val is None ):
|
||
return False, "Impossible de trouver le partner"
|
||
return True, tmp_val
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
"""
|
||
Cette fonction retourne les données d'un user en partant du ObjectID (_id=.
|
||
"""
|
||
def get_partner_data_from_id(partner_id = ""):
|
||
try :
|
||
|
||
|
||
if len(str(partner_id)) <= 0 :
|
||
myprint(" Le partner_id est vide")
|
||
return False, " Le partner_id est vide "
|
||
|
||
coll_token = MYSY_GV.dbname['partnair_account']
|
||
|
||
tmp_val = coll_token.find_one({'_id': ObjectId(str(partner_id)), 'active': '1', 'locked':'0'})
|
||
|
||
if( tmp_val is None ):
|
||
return False, "Impossible de trouver le partner"
|
||
return True, tmp_val
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
|
||
"""
|
||
Recuperation des données d'une personne connecté à partir du token
|
||
"""
|
||
def get_connected_data_from_token(connected_token = ""):
|
||
try:
|
||
if len(str(connected_token)) <= 0:
|
||
myprint(" Le token cid est vide")
|
||
return False, ""
|
||
|
||
coll_token = MYSY_GV.dbname['partner_token']
|
||
# print(" myquery pr demo_account = " + str(myquery))
|
||
tmp_count = coll_token.count_documents({'token': str(connected_token), 'valide': '1', 'locked': '0'})
|
||
if (tmp_count <= 0):
|
||
return False, "Le token est invalide"
|
||
|
||
token_data = coll_token.find_one({'token': str(connected_token), 'valide': '1', 'locked': '0'})
|
||
|
||
"""
|
||
/!\ 21/12/2024 : en attendant de fusionner proprement les collection 'partnair_account' et 'user_account'
|
||
on utiliser le champs 'partner_token['type'] pour savoir si on à faire un partner ou un user.
|
||
|
||
|
||
Si c'est un user_account, on a recuperer son 'partner_owner_recid', puis aller chercher le données du partner associe
|
||
|
||
"""
|
||
tmp_val = None
|
||
|
||
if( token_data and "type" in token_data.keys() and str(token_data['type']) == "user"):
|
||
coll_token = MYSY_GV.dbname['user_account']
|
||
tmp_val = coll_token.find_one({'token': str(connected_token), 'active': '1', 'locked': '0'})
|
||
|
||
else:
|
||
coll_token = MYSY_GV.dbname['partnair_account']
|
||
tmp_val = coll_token.find_one({'token': str(connected_token), 'active': '1', 'locked': '0'})
|
||
|
||
|
||
return True, tmp_val
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "Impossible de récupérer les données de l'entité connectée"
|
||
|
||
|
||
"""
|
||
Recuperation des données du partner à partir du token
|
||
"""
|
||
def get_partner_data_from_token(token = ""):
|
||
try :
|
||
if len(str(token)) <= 0:
|
||
myprint(" Le token partner est vide")
|
||
return False, ""
|
||
|
||
coll_token = MYSY_GV.dbname['partner_token']
|
||
# print(" myquery pr demo_account = " + str(myquery))
|
||
tmp_count = coll_token.count_documents({'token': str(token), 'valide': '1', 'locked': '0'})
|
||
if (tmp_count <= 0):
|
||
return False, "Le token est invalide"
|
||
|
||
token_data = coll_token.find_one({'token': str(token), 'valide': '1', 'locked': '0'})
|
||
"""
|
||
/!\ 21/12/2024 : en attendant de fusionner proprement les collection 'partnair_account' et 'user_account'
|
||
on utiliser le champs 'partner_token['type'] pour savoir si on à faire un partner ou un user.
|
||
|
||
|
||
Si c'est un user_account, on a recuperer son 'partner_owner_recid', puis aller chercher le données du partner associe
|
||
|
||
"""
|
||
ret_val = None
|
||
|
||
if (token_data and "type" in token_data.keys() and str(token_data['type']) == "user"):
|
||
coll_token = MYSY_GV.dbname['user_account']
|
||
user_account_data = coll_token.find_one({'token': str(token), 'active': '1', 'locked': '0'},
|
||
{"_id": 1, "partner_owner_recid": 1})
|
||
|
||
if (user_account_data and "partner_owner_recid" in user_account_data.keys() and user_account_data[
|
||
'partner_owner_recid']):
|
||
coll_token = MYSY_GV.dbname['partnair_account']
|
||
ret_val = coll_token.find_one({'recid': str(user_account_data['partner_owner_recid']),
|
||
'is_partner_admin_account': '1', 'active': '1', 'locked': '0'})
|
||
|
||
if (ret_val is None or '_id' not in ret_val.keys()):
|
||
myprint(
|
||
"ERRR : Connexion d'un compte de type utilisateur, mais erreur sur la configuration du partner_owner_recid")
|
||
return False, "Impossible de récupérer les données de l'entité connectée"
|
||
else:
|
||
val_tmp = MYSY_GV.dbname['partnair_account'].count_documents(
|
||
{'token': str(token), 'active': '1', 'locked': '0'})
|
||
if (val_tmp <= 0):
|
||
return False, ""
|
||
|
||
ret_val = MYSY_GV.dbname['partnair_account'].find_one(
|
||
{'token': str(token), 'active': '1', 'locked': '0'})
|
||
|
||
return True, ret_val
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, ""
|
||
|
||
|
||
|
||
'''
|
||
recuperation d'email du user from token
|
||
'''
|
||
def get_user_email_from_token(token = ""):
|
||
try:
|
||
if len(str(token)) <= 0 :
|
||
myprint(" Le token est vide")
|
||
return False
|
||
|
||
coll_token = MYSY_GV.dbname['partner_token']
|
||
tmp_val = coll_token.find({'token': str(token), 'valide': '1'})
|
||
user_email = tmp_val[0]['email']
|
||
return user_email
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
|
||
'''
|
||
recuperation d'email du user from recid
|
||
'''
|
||
def get_user_email_from_recid(recid = ""):
|
||
|
||
try:
|
||
if len(str(recid)) <= 0 :
|
||
myprint(" Le recid est vide")
|
||
return False
|
||
|
||
coll_token = MYSY_GV.dbname['user_account']
|
||
tmp_val = coll_token.find({'recid': str(recid), 'active': '0'})
|
||
|
||
if( tmp_val and tmp_val[0] and tmp_val[0]['email']):
|
||
user_email = tmp_val[0]['email']
|
||
else:
|
||
return False, "Impossible de supprimer le compte utilisateur"
|
||
|
||
return user_email
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "Impossible de recuperer l'email le compte utilisateur"
|
||
|
||
'''
|
||
recuperation du recid du partner
|
||
'''
|
||
def get_parnter_recid_from_token(token = ""):
|
||
if len(str(token)) <= 0 :
|
||
myprint(" Le token partner est vide")
|
||
return False
|
||
|
||
coll_token = MYSY_GV.dbname['partner_token']
|
||
tmp_val = coll_token.find_one({'token': str(token), 'valide': '1', 'locked':'0'})
|
||
|
||
if( tmp_val is None ):
|
||
return False
|
||
|
||
"""
|
||
/!\ 21/12/2024 : en attendant de fusionner proprement les collection 'partnair_account' et 'user_account'
|
||
on utiliser le champs 'partner_token['type'] pour savoir si on à faire un partner ou un user.
|
||
|
||
|
||
Si c'est un user_account, on a recuperer son 'partner_owner_recid', puis aller chercher le données du partner associe
|
||
|
||
"""
|
||
user_recid = False
|
||
|
||
|
||
if (tmp_val and "type" in tmp_val.keys() and str(tmp_val['type']) == "user"):
|
||
# C'est une connexion de type USER, on retourne le 'partner_owner_recid'
|
||
coll_token = MYSY_GV.dbname['user_account']
|
||
user_account_data = coll_token.find_one({'token': str(token), 'active': '1', 'locked': '0'},
|
||
{"_id": 1, "partner_owner_recid": 1})
|
||
|
||
if( user_account_data and "partner_owner_recid" in user_account_data.keys() and str(user_account_data['partner_owner_recid']) ):
|
||
user_recid = str(user_account_data['partner_owner_recid'])
|
||
else:
|
||
# C'est une connexion de type PARTENAIRE, on retourne le 'recid'
|
||
if ("recid" in tmp_val.keys() and str(tmp_val['recid'])):
|
||
user_recid = str(tmp_val['recid'])
|
||
|
||
return user_recid
|
||
|
||
|
||
|
||
|
||
def get_user_recid_from_email(email = ""):
|
||
if len(str(email)) <= 0 :
|
||
myprint(" L'email est vide")
|
||
return False
|
||
|
||
|
||
coll_user = MYSY_GV.dbname['user_account']
|
||
tmp_val = coll_user.find({'email': str(email), 'active': '1'})
|
||
user_recid = tmp_val[0]['recid']
|
||
|
||
return user_recid
|
||
|
||
|
||
'''
|
||
Cette fonction créer la reference interne d'une formation
|
||
'''
|
||
def Create_internal_call_ref():
|
||
retval = None
|
||
now = datetime.now()
|
||
|
||
# getting the timestamp
|
||
ts = str(datetime.timestamp(now)).replace(".", "").replace(",", "")
|
||
|
||
retval = "Mysy_"+str(ts)
|
||
return str(retval)
|
||
|
||
def textdist():
|
||
val = textdistance.mra("doe", "dough")
|
||
#print(" mra = "+str(val))
|
||
|
||
val2 = textdistance.editex("doe", "dough")
|
||
#print(" editex = " + str(val2))
|
||
|
||
|
||
def similaire():
|
||
mots = ["Durand est present", "Meyer", "Dupond", "Dopon", "DUPON", "Nguyen", "Toto"]
|
||
ratio = 0.8
|
||
|
||
for mot in mots:
|
||
#print(" CMP de 'Dupont' et '"+mot+"'" )
|
||
my_ratio = SequenceMatcher(None, "Dupont", mot).ratio()
|
||
#print(" ## RATION = "+str(my_ratio))
|
||
|
||
|
||
#resultat = [mot for mot in mots if SequenceMatcher(None, "Dupont", mot).ratio() >= ratio]
|
||
#print(resultat)
|
||
|
||
return
|
||
|
||
|
||
|
||
def levenshtein(mot1,mot2):
|
||
try:
|
||
# ligne_i est un tableau tel que tout au long de l'algorithme,
|
||
# ligne_i[k] contienne la distance de levenshtein entre les k premières lettres de mot1
|
||
# et les i premières lettres de mot2
|
||
# Au début, i=0, et la distance entre les k premières lettres de mot1 et la chaîne vide
|
||
# vaut bien sûr k. (il faut faire k suppressions pour passer des k premières lettres de mot1
|
||
# à la chaîne vide)
|
||
ligne_i = [ k for k in range(len(mot1)+1) ]
|
||
# i va ensuite varier de 1 à len(mot2)
|
||
for i in range(1, len(mot2) + 1):
|
||
# i vient d'être incrémenté. On stocke dans ligne_prec la valeur de la ligne numéro i-1
|
||
ligne_prec = ligne_i
|
||
# On crée la nouvelle ligne, dont le premier élément (l'élement numéro 0) doit être
|
||
# la distance de levenshtein entre la chaîne vide ("") et les i premières lettres de mot2, soit i
|
||
# (il faut faire i additions pour passer de la chaîne vide aux i premières lettres de mot2)
|
||
ligne_i = [i]*(len(mot1)+1)
|
||
# On va ensuite remplir le reste de la ligne i, c'est-à-dire calculer ligne_i[k] pour k allant de 1 à len(mot1)
|
||
for k in range(1,len(ligne_i)):
|
||
# La variable cout vaut 0 si la kième lettre de mot1 est la même que la ième lettre de mot2, et 1 sinon
|
||
#La kième lettre de mot1 s'obtient avec mot1[k-1], les indices commencent à 0
|
||
cout = int(mot1[k-1] != mot2[i-1])
|
||
#Voilà enfin le sel de l'algorithme, le calcul de ligne_i[k] pour i et k quelconques,
|
||
# connaissant ligne_prec[k-1], ligne_prec[k] et ligne_i[k-1]
|
||
ligne_i[k] = min(ligne_i[k-1] + 1, ligne_prec[k] + 1, ligne_prec[k-1] + cout)
|
||
# Lorsque l'on sort de la boucle, i vaut len(mot2)
|
||
#Ce que l'on cherche est la distance de levenshtein entre les len(mot1) premières lettres de mot1
|
||
# et les len(mot2) premières lettres de mot2, qui est stockée dans ligne_i[len(mot1)]
|
||
return ligne_i[len(mot1)]
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, None
|
||
|
||
|
||
'''
|
||
Check if string is float and convert.
|
||
0 if error
|
||
'''
|
||
def tryFloat(val):
|
||
try:
|
||
val = str(val).replace(",",".")
|
||
myfloat = float(val)
|
||
return myfloat
|
||
except ValueError:
|
||
return 0
|
||
|
||
"""
|
||
Verifie si c'est un float.
|
||
returl status et la valeur
|
||
"""
|
||
def IsFloat(val):
|
||
try:
|
||
val = str(val).replace(",",".")
|
||
myfloat = round(float(val), 2)
|
||
return True, myfloat
|
||
except ValueError:
|
||
return False, 0
|
||
|
||
|
||
'''
|
||
Check if string is Int and convert.
|
||
0 if error
|
||
'''
|
||
def tryInt(val):
|
||
try:
|
||
val = str(val).replace(",",".")
|
||
tab_val = val.split(".")
|
||
myint = int(tab_val[0])
|
||
return myint
|
||
except ValueError:
|
||
return 0
|
||
|
||
"""
|
||
Verifie si c'est un entier.
|
||
return status et la valeur
|
||
"""
|
||
def IsInt(val):
|
||
try:
|
||
val = str(val).replace(",", ".")
|
||
tab_val = val.split(".")
|
||
myint = int(tab_val[0])
|
||
return True, myint
|
||
except ValueError:
|
||
return False, 0
|
||
|
||
"""
|
||
Cette fonction verifie si une valeur est bien un 'ObjectId' valide
|
||
"""
|
||
def IsObjectId(oid):
|
||
try:
|
||
ObjectId(oid)
|
||
print(" IsObjectId "+str(oid)+" est valde")
|
||
return True
|
||
except ValueError:
|
||
return False
|
||
|
||
|
||
'''
|
||
Cette fonction verifie si une chaine est de type date jj/mm/aaaa
|
||
'''
|
||
def CheckisDate(mydate):
|
||
# mydate doit etre au format : jj/mm/aaaa
|
||
try:
|
||
#print(" analyse de "+mydate)
|
||
if( str(mydate).strip() == ""):
|
||
return False
|
||
|
||
val = datetime.strptime(str(mydate).strip(), '%d/%m/%Y')
|
||
|
||
return True
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
'''
|
||
Cette fonction verifie si une chaine est de type date time jj/mm/aaaa hh:mm
|
||
'''
|
||
def CheckisDate_Hours(mydate):
|
||
# mydate doit etre au format : %d/%m/%Y %H:%M
|
||
try:
|
||
#print(" analyse de "+mydate)
|
||
val = datetime.strptime(str(mydate).strip(), '%d/%m/%Y %H:%M')
|
||
|
||
return True
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
'''
|
||
Cette fonction verifie si une chaine est de type date time jj/mm/aaaa hh:mm:ss
|
||
'''
|
||
def CheckisDate_Hours_Second(mydate):
|
||
# mydate doit etre au format :jj/mm/aaaa hh:mm:ss
|
||
try:
|
||
#print(" analyse de "+mydate)
|
||
val = datetime.strptime(str(mydate).strip(), '%d/%m/%Y %H:%M:%S')
|
||
|
||
return True
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
|
||
|
||
'''
|
||
Verification que le mot n'est pas
|
||
stemisable à traver la tabla "word_not_stem
|
||
'''
|
||
def Word_Not_Stemmize(word = None):
|
||
try:
|
||
coll_not_stem = MYSY_GV.dbname["word_not_stem"]
|
||
|
||
#print(' ### Word_Not_Stemmize mot = ', word)
|
||
val_tmp = coll_not_stem.count_documents({'mot': str(word)})
|
||
|
||
|
||
if (val_tmp > 0):
|
||
return True
|
||
else:
|
||
return False
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de verifier Word_Not_Stemmize"
|
||
|
||
|
||
'''
|
||
Cette fonction verifie si un mot est dans le dictionnaire français - une table interne
|
||
si non, le mot est enregistré dans une table pour traitement utérieur.
|
||
Utilisé dans le cas de l'indexation d'une formation
|
||
'''
|
||
def check_word_in_fr_dict(mot=None):
|
||
try:
|
||
print("#### analyse du mot "+str(mot))
|
||
col_name = MYSY_GV.dbname["list_mots_fr"]
|
||
col_name_not_fr = MYSY_GV.dbname["list_mots_not_fr"]
|
||
mydata = {}
|
||
val_tmp = col_name.count_documents({'mot': str(mot)})
|
||
|
||
if (val_tmp <= 0):
|
||
myprint(" Le mot '" + mot + "' n'existe pas dans le dictionnaire")
|
||
|
||
mydata['mot'] = mot
|
||
mydata['treated'] = int("0")
|
||
mydata['update_date'] = datetime.now()
|
||
|
||
|
||
ret_val = col_name_not_fr.find_one_and_update(
|
||
{'mot': str(mot) }, {"$set": mydata},upsert=True,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
if ( ret_val['_id'] is False):
|
||
print(" Impossible d'enregistrer le '" + mot + "'")
|
||
return False
|
||
|
||
return False
|
||
|
||
return True
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
'''
|
||
Cette fonction recherche à savoir si un mot est dans le dictionnaire dans le cadre de la re
|
||
recherche d'un utilisateur '''
|
||
def recherche_check_word_in_fr_dict(mot=None):
|
||
try:
|
||
print("#### analyse du mot "+str(mot))
|
||
col_name = MYSY_GV.dbname["list_mots_fr"]
|
||
val_tmp = col_name.count_documents({'mot': str(mot)})
|
||
|
||
if (val_tmp <= 0):
|
||
myprint(" Le mot '" + mot + "' n'existe pas dans le dictionnaire")
|
||
|
||
return False
|
||
|
||
return True
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
'''
|
||
Securité : Ip source requester
|
||
Cette fonction verifie si l'adresse IP de la source
|
||
est autorisé ou pas.
|
||
'''
|
||
def check_source_ipv4(source_ip=None):
|
||
try:
|
||
if source_ip in MYSY_GV.AUTORIZED_SOURCE_IPV4:
|
||
myprint(" Security check : IP adresse '"+str(source_ip)+"' connected")
|
||
return True
|
||
else:
|
||
myprint(" Security check : IP adresse '" + str(source_ip) + "' is not autorized")
|
||
return False
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
'''
|
||
Dans le cadre de la recherche, l'expression saisit par l'utilisateur dans
|
||
la search bar doit etre nettoyé, traité avant de rentrer dans le process.
|
||
|
||
Puis la phrase est renvoyée en mode "unicode"
|
||
'''
|
||
def Parse_Clean_Search_Text(sentence=None):
|
||
try:
|
||
if (len(str(sentence)) <= 0 ):
|
||
return False, ""
|
||
|
||
'''
|
||
/!\ : On supprime tous les caratère "spaciaux" et ponctuation EXCEPTE
|
||
- le ":" dont on a besoin pour identifier les patterns et
|
||
- le ' " ' dont on a besoin pour identifier les patterns
|
||
'''
|
||
list_noises = ['...', '.', ';', ',', '!', '?', ')', '(', '[', ']', '\'', '’', '`', '©', '–',
|
||
'{', '}', '-', '=', '°', '#', '-', '/', '~', '&', '\\', '.', '^', '$', '*', '+', '\\n',
|
||
'?', '{', '}', '[', ']', '|', '(', ')', '-', '>', '<', '@', '®', '™', '«', '»']
|
||
|
||
for noise in list_noises:
|
||
# print(" suppression de : '"+str(noise)+"' ")
|
||
sentence = sentence.replace(str(noise), " ")
|
||
|
||
unicode_sentence = unidecode(sentence)
|
||
|
||
|
||
return True, unicode_sentence
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, ""
|
||
|
||
|
||
'''
|
||
Cette fonction va aller cherche la recherche edendue en
|
||
utilisant l'api de "serpapi"
|
||
'''
|
||
def Get_Extended_Result(sentence=None):
|
||
try:
|
||
list_extended = []
|
||
external_code_prefixe = str(datetime.now().timestamp()).replace(".", "")
|
||
print("external_code_prefixe = " + str(external_code_prefixe))
|
||
|
||
|
||
status = RunSearchAPI(sentence, external_code_prefixe)
|
||
if( status is False):
|
||
return False, list_extended
|
||
|
||
return True, external_code_prefixe
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, []
|
||
|
||
|
||
'''
|
||
Cette fonction lance l'API et enregistre le resultat en base
|
||
'''
|
||
def RunSearchAPI(search_text=None, external_code_prefixe=None):
|
||
try:
|
||
if( len(str(search_text).strip()) < 0 ):
|
||
return True
|
||
|
||
params = {
|
||
"q": str(search_text),
|
||
"hl": "fr",
|
||
"gl": "fr",
|
||
"num":"20",
|
||
"safe": "active",
|
||
"google_domain": "google.com",
|
||
"api_key": "596cb9a468f8292fcefa6f297444db9c12478685d8734b52efdf8aa53c54fd55"
|
||
}
|
||
|
||
myprint("######## PARAM RunSearchAPI = "+str(params))
|
||
|
||
'''
|
||
return false prematuré pour eviter de consommer les credits de l'API
|
||
'''
|
||
return False
|
||
|
||
|
||
search = GoogleSearch(params)
|
||
results = search.get_dict()
|
||
organic_results = results['organic_results']
|
||
|
||
my_collection = MYSY_GV.YTUBES_dbname['mysyserpapi']
|
||
|
||
#myprint("resutlat 1 organic_results ")
|
||
cmpt = 0
|
||
for val in organic_results:
|
||
cmpt = cmpt +1
|
||
mydata = {}
|
||
'''
|
||
print("External_code = " + str(val['title']))
|
||
print("Title = "+str(val['title']))
|
||
print("url = " + str(val['link']))
|
||
print("description = " + str(val['snippet']))
|
||
|
||
if ("snippet_highlighted_words" in val.keys()):
|
||
if val['snippet_highlighted_words']:
|
||
print("mot_cles = " + str(val['snippet_highlighted_words']))
|
||
'''
|
||
|
||
mydata['external_code'] = external_code_prefixe+"_"+str(cmpt)
|
||
mydata['title'] = str(val['title'])
|
||
mydata['url'] = str(val['link'])
|
||
|
||
if ("snippet" in val.keys()):
|
||
if val['snippet']:
|
||
mydata['description'] = str(val['snippet'])
|
||
|
||
if ("snippet_highlighted_words" in val.keys()):
|
||
if val['snippet_highlighted_words']:
|
||
mydata['tags'] = str(val['snippet_highlighted_words'])
|
||
|
||
mydata['update_date'] = str(datetime.now())
|
||
|
||
if ("position" in val.keys()):
|
||
if val['position']:
|
||
mydata['rang'] = str(val['position'])
|
||
|
||
mydata['orign_search_text'] = str(search_text)
|
||
mydata['valide'] = "1"
|
||
mydata['treated'] = "0"
|
||
|
||
ret_val = my_collection.find_one_and_update({'url': str(mydata['url'])},
|
||
{"$set": mydata},
|
||
upsert=True,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
if ret_val and ret_val['_id']:
|
||
myprint(" Le document de la recherche étendu a bien été ajouté = " + str(ret_val['_id']))
|
||
|
||
else:
|
||
myprint(" WARNING : Impossible d'ajouter le document de la recherche étentue " + str(mydata['url']))
|
||
|
||
return True
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction prends un mot,
|
||
et retourne sont masculin / singulier
|
||
Cette fonction va s'etoffer au fur et à mesure
|
||
|
||
/!\ Cette fonction prend un pot, pas une phrase
|
||
'''
|
||
def GetMasculinSingulier(word=None):
|
||
try:
|
||
mot = str(word).lower() # mettre les mots en minuscule
|
||
# Retirons les caractères spéciaux :
|
||
|
||
# mots terminés par "es"
|
||
patter2 = re.compile(r"\w+(ees)+$")
|
||
|
||
# mots terminés par "s"
|
||
patter3 = re.compile(r"\w+(s)+$")
|
||
|
||
# mots terminés par "x"
|
||
patter4 = re.compile(r"\w+(x)+$")
|
||
|
||
|
||
if (len(str(mot)) > 3):
|
||
if( re.match(patter2, str(mot))):
|
||
neword = mot[:-2]
|
||
return True, neword
|
||
|
||
if (re.match(patter3, str(mot))):
|
||
neword = mot[:-1]
|
||
return True, neword
|
||
|
||
if (re.match(patter4, str(mot))):
|
||
neword = mot[:-1]
|
||
return True, neword
|
||
|
||
|
||
return True, str(mot)
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, ""
|
||
|
||
|
||
'''
|
||
cette fonction créer un code_externe unique basé sur le timestemp actuel
|
||
'''
|
||
def CreateMyCode():
|
||
try:
|
||
mycode = str(datetime.now().timestamp()).replace(".", '').replace(',', '')
|
||
return True, mycode
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, '-1'
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction va aller cherche tous les messages à envoyer
|
||
dans la collection 'user_message' et declencher le traitement
|
||
adequat
|
||
'''
|
||
def CronUSerMessage_Mail():
|
||
try:
|
||
nb_message = 0
|
||
coll_message = MYSY_GV.dbname['user_message']
|
||
my_today = datetime.today()
|
||
|
||
|
||
for val in coll_message.find({'sent':'0', 'valide':'1', 'type':'email'}):
|
||
ismail_traited = '1'
|
||
error_message = ""
|
||
nb_message = nb_message + 1
|
||
print(" traitement du message : "+str(val))
|
||
|
||
|
||
mail_recever = ""
|
||
if ("recever_mail" in val.keys()):
|
||
if val['recever_mail']:
|
||
mail_recever = str(val['recever_mail']).strip()
|
||
|
||
mail_object = ""
|
||
if ("object" in val.keys()):
|
||
if val['object']:
|
||
mail_object = str(val['object']).strip()
|
||
|
||
message = ""
|
||
if ("message" in val.keys()):
|
||
if val['message']:
|
||
message = str(val['message']).strip()
|
||
|
||
# pattern des email
|
||
patter_mail = re.compile(r"^[\w\.]+@([\w-]+\.)+[\w]{2,4}$")
|
||
|
||
if (re.match(patter_mail, str(mail_recever))):
|
||
if( len(mail_object) == 0 or len(message) == 0 ):
|
||
myprint(" WARNING : Impossible d'envoyer le mail au destinataire. l'objet ou le message sont vides : mail_object = "
|
||
+ str(mail_object)+ " ou message = "+str(message))
|
||
ismail_traited = 'error'
|
||
error_message = "l'objet ou le message sont vides"
|
||
|
||
else:
|
||
if( email_mgt.SendGenericEmail(mail_recever, mail_object, message) is False ):
|
||
ismail_traited = 'error'
|
||
error_message = " Erreur SMPT "
|
||
|
||
# envoyer le mail
|
||
else:
|
||
myprint(" WARNING : Impossible d'envoyer le mail au destinataire. format incorrecte: "+str(mail_recever))
|
||
ismail_traited = 'error'
|
||
error_message = "format du mail receveur est incorrecte "
|
||
|
||
|
||
'''
|
||
Mise à jour du statut du message
|
||
'''
|
||
ret_val = coll_message.find_one_and_update({'_id': ObjectId(val['_id']),},
|
||
{"$set": {'sent':str(ismail_traited),
|
||
'error_message':str(error_message)}},
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
if (ret_val and ret_val['_id']):
|
||
nb_doc = str(ret_val['_id'])
|
||
myprint(" le message = " + str(nb_doc)+" a bien été mise à jour")
|
||
|
||
else:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " WARNING : Impossible de mettre à jour le user_message = " + str(
|
||
nb_doc))
|
||
|
||
|
||
return True, str(my_today), str(nb_message) + " traite (s) : OK"
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False,str(datetime.today()), " Impossible de traiter les demandes d'envoi d'email "
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction prend un tableau
|
||
et retourn le meme tableau mais dans un order aléatoire
|
||
/!\ cette fonction est trop lente, je ne peux l'utiliser
|
||
'''
|
||
|
||
def RendomizeTab(table):
|
||
try:
|
||
|
||
if( table.count() <= 0 ):
|
||
print(" impossible de rendomize")
|
||
return False, []
|
||
|
||
taille = table.count()-1
|
||
new_tab = []
|
||
new_tab_cpt = 0
|
||
i = 0
|
||
|
||
while( new_tab_cpt <= taille ):
|
||
cpt = random.randint(0, taille)
|
||
#print("cpt = " + str(cpt) + " - len(table) = " + str(table.count()))
|
||
#print(" ANALYSE DE " + str(table[cpt]['_id']))
|
||
|
||
if( table[cpt] not in new_tab ):
|
||
#print(" AJOUT DE "+str(table[cpt]['_id']))
|
||
new_tab.append(table[cpt])
|
||
new_tab_cpt = new_tab_cpt +1
|
||
|
||
return True, new_tab
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, []
|
||
|
||
|
||
|
||
def Reordertab():
|
||
try:
|
||
|
||
coll_name = MYSY_GV.dbname['myclass']
|
||
table = coll_name.find({'trainer':'PYRAMYD'},
|
||
{"external_code": 1, "title": 1, "_id": 1, "url": 1, })
|
||
|
||
print(str(table.count()))
|
||
taille = table.count()-1
|
||
new_tab = []
|
||
new_tab_cpt = 0
|
||
i = 0
|
||
|
||
while( new_tab_cpt <= taille ):
|
||
cpt = random.randint(0, taille)
|
||
#print("cpt = " + str(cpt) + " - len(table) = " + str(table.count()))
|
||
#print(" ANALYSE DE " + str(table[cpt]['_id']))
|
||
|
||
if( table[cpt] not in new_tab ):
|
||
#print(" AJOUT DE "+str(table[cpt]['_id']))
|
||
new_tab.append(table[cpt])
|
||
new_tab_cpt = new_tab_cpt +1
|
||
|
||
return True, new_tab
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, []
|
||
|
||
|
||
'''
|
||
Pour etre en mesure d'afficher que 100 de X formations qui existent
|
||
en base, un systeme de "aLaUne" va etre mis en place
|
||
Un cron va a periodicité données choisir de manière aleatoire
|
||
les formations à afficher à la une.'''
|
||
def FormationAlaUne():
|
||
try:
|
||
|
||
coll_name = MYSY_GV.dbname['myclass']
|
||
df = pd.DataFrame(list(coll_name.find({'valide':'1', "price": {"$gte": 0}, "duration": {"$gte": 0},
|
||
"support" : { "$exists" : "true" },
|
||
"type" : { "$exists" : "true" } },{'_id':1, 'external_code':1 })))
|
||
df['isalaune'] = '0'
|
||
|
||
|
||
taille2 = df.shape[0]
|
||
affectation = 0
|
||
while( affectation < MYSY_GV.MAINPAGE_QUERY_LIMIT_ROW ):
|
||
affectation = affectation + 1
|
||
cpt = random.randint(0, taille2)
|
||
df.at[cpt, 'isalaune'] = '1'
|
||
|
||
i = 0
|
||
tab_id = []
|
||
while( i < taille2):
|
||
if( str(df.at[i,'isalaune']) == "1"):
|
||
#print(str(df.at[i,'external_code']) +" = "+str(df.at[i,'isalaune']))
|
||
tab_id.append(str(df.at[i,'_id']))
|
||
i = i+1
|
||
|
||
'''
|
||
Suppression des ancien "is a la une"
|
||
'''
|
||
# update many with "num" field greater than 100
|
||
result = coll_name.update_many(
|
||
{"isalaune": "1"},
|
||
{
|
||
"$set": {"isalaune": '0'}
|
||
})
|
||
print("raw:", result.raw_result)
|
||
print("acknowledged:", result.acknowledged)
|
||
print("matched_count:", result.matched_count)
|
||
|
||
|
||
|
||
for val in tab_id:
|
||
print(" val ="+val)
|
||
ret_val = coll_name.find_one_and_update(
|
||
{'_id': ObjectId(str(val)), 'valide':'1', 'locked':'0'},
|
||
{"$set": {'isalaune':'1'}},
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
return True, "ok"
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "KO"
|
||
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction replace les caractères speciaux et ponctuation par des space
|
||
'''
|
||
def local_Remove_Ponct_Special_Caractere(sentence):
|
||
try:
|
||
text = sentence.lower() # mettre les mots en minuscule
|
||
|
||
# Retirons les caractères spéciaux :
|
||
text = re.sub(r"[,\!\?\%\(\)\/\"]", " ", text)
|
||
text = re.sub(r"\&\S*\s", " ", text)
|
||
text = re.sub(r"\-", " ", text)
|
||
|
||
list_noises = ['...', '.', ';', ',', ':', '!', '?', ')', '(', '[', ']', '\'', '"', '’', '`','©', '–',
|
||
'{', '}', '-', '=', '°', '#', '-', '/', '~', '&', '\\', '.', '^', '$', '*', '+','\\n','\n',
|
||
'?', '{', '}', '[', ']', '|', '(', ')', '-', '>', '<', '@','®', '™', '«', '»']
|
||
|
||
sentence = text
|
||
for noise in list_noises:
|
||
#print(" suppression de : '"+str(noise)+"' ")
|
||
sentence = sentence.replace(str(noise), " ")
|
||
|
||
#print(" AFTER REPLACE NOISES = "+str(sentence))
|
||
return True, sentence
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e)+" - Line : "+ str(exc_tb.tb_lineno) )
|
||
return False, " Impossible Ela_Remove_Ponct_Special_Caractere"
|
||
|
||
def Migration_internal_url():
|
||
try:
|
||
coll_name = MYSY_GV.dbname['myclass']
|
||
for retVal in coll_name.find({}):
|
||
user = retVal
|
||
|
||
my_internal_url = str(user['title'])
|
||
local_status, my_internal_url = local_Remove_Ponct_Special_Caractere(my_internal_url)
|
||
my_internal_url = unidecode(my_internal_url.lower())
|
||
my_internal_url = my_internal_url.replace(" ", "-")
|
||
my_internal_url = my_internal_url.replace("/", "-")
|
||
if (my_internal_url.startswith('-')):
|
||
my_internal_url = my_internal_url[1:]
|
||
|
||
if (my_internal_url.endswith('-')):
|
||
my_internal_url = my_internal_url[:-1]
|
||
|
||
|
||
suffix = hashlib.md5(my_internal_url.encode()).hexdigest()
|
||
|
||
new_internal_url = str(my_internal_url) + "-" + str(suffix[-3:])
|
||
|
||
new_internal_url = new_internal_url.replace("---", "-")
|
||
new_internal_url = new_internal_url.replace("--", "-")
|
||
|
||
print('new_internal_url = '+new_internal_url)
|
||
|
||
|
||
result = coll_name.update_many(
|
||
{'_id':ObjectId(str(user['_id']))},
|
||
{
|
||
"$set": {"internal_url": str(new_internal_url)}
|
||
})
|
||
'''print("raw:", result.raw_result)
|
||
print("acknowledged:", result.acknowledged)
|
||
print("matched_count:", result.matched_count)'''
|
||
|
||
return True, "ok"
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "KO"
|
||
|
||
|
||
'''
|
||
Cette fonction prends un mot de X digit
|
||
et retourne le top X des mot ayant le meme digit
|
||
/!\ cette fonction est glement utilisee dans le car des
|
||
des recherches vides
|
||
'''
|
||
def GetMotFromElaIndex(diction):
|
||
try :
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
field_list = ['mot', 'domaine']
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list:
|
||
myprint(str(inspect.stack()[0][
|
||
3]) + " - Le champ '" + val + "' n'existe pas, Creation formation annulée")
|
||
return False, []
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['mot']
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, []
|
||
|
||
|
||
coll_name = MYSY_GV.dbname['elaindex']
|
||
training_mots = []
|
||
|
||
mot = ""
|
||
if ("mot" in diction.keys()):
|
||
if diction['mot']:
|
||
mot = diction['mot']
|
||
|
||
if( len(mot) < 3 ):
|
||
return True, []
|
||
|
||
search = str(mot)
|
||
search_expr = re.compile(f".*{search}.*", re.I)
|
||
|
||
print(" #### mot recu "+mot+" search_expr = "+str(search_expr))
|
||
for x in coll_name.find({'mots': {'$regex': search_expr}}).\
|
||
sort([("occurence", pymongo.DESCENDING)]).\
|
||
limit(MYSY_GV.HELP_WORD_QUERY_LIMIT):
|
||
if( x['mots'] not in training_mots ):
|
||
training_mots.append(x['mots'])
|
||
|
||
return True, training_mots
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, []
|
||
|
||
|
||
|
||
'''
|
||
correction erreur titre mymooc.com
|
||
'''
|
||
def Migration_mooc_title():
|
||
try:
|
||
coll_name = MYSY_GV.dbname['myclass']
|
||
for retVal in coll_name.find({'owner':'mymooc.com'}):
|
||
|
||
new_title = str(retVal['title'])
|
||
|
||
new_title = new_title.replace("-", " ")
|
||
if (new_title.startswith('-')):
|
||
new_title = new_title[1:]
|
||
|
||
if (new_title.endswith('-')):
|
||
new_title = new_title[:-1]
|
||
|
||
|
||
print(" new_title = "+new_title)
|
||
result = coll_name.update_many(
|
||
{'_id': ObjectId(str(retVal['_id']))},
|
||
{
|
||
"$set": {"title": str(new_title)}
|
||
})
|
||
|
||
return True, "ok"
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "KO"
|
||
|
||
|
||
''' Cette fonction clean la collection "elaindex"
|
||
comme elle est tres lourd que les mise à jour des data
|
||
font que certains formations ne sont plus valident, alors on doit supprimer
|
||
dans la collection "elaindex" toutes les lignes correspondant à des formations
|
||
inexistante dans la collection "myclass"
|
||
'''
|
||
|
||
def clean_ElaIndex():
|
||
try:
|
||
|
||
index_coll = MYSY_GV.dbname["elaindex"]
|
||
class_coll = MYSY_GV.dbname["myclass"]
|
||
list_to_delete = []
|
||
i = 0
|
||
|
||
for val in index_coll.find({}, {"id_formation":1}):
|
||
tmp_count = class_coll.count_documents({'external_code': str(val["id_formation"])})
|
||
#print(" val = "+str(val))
|
||
if (tmp_count <= 0 and str(val["id_formation"]) not in list_to_delete):
|
||
list_to_delete.append(str(val["id_formation"]))
|
||
i = i +1
|
||
|
||
# Operation de suppression :
|
||
#print(" lancement suppression de "+str(list_to_delete))
|
||
nb_avant_delete = index_coll.count_documents({})
|
||
#print(" AVANT SUPPRESSION ON A "+str(nb_avant_delete)+" LIGNE DANS la collection index")
|
||
query = {"id_formation": {"$in": list_to_delete}}
|
||
d = index_coll.delete_many(query)
|
||
print(d.deleted_count, " documents deleted !!")
|
||
|
||
|
||
return True, "clean index OK"
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "clean index KO"
|
||
|
||
|
||
"""
|
||
Cette fonction nettoye les mauvaises internal url
|
||
"""
|
||
def clean_internal_url():
|
||
try:
|
||
|
||
nb = 0
|
||
class_coll = MYSY_GV.dbname["myclass"]
|
||
search = str("%")
|
||
search_expr = re.compile(f".*{search}.*", re.I)
|
||
for x in class_coll.find({'internal_url': {'$regex': search_expr}}):
|
||
nb = nb+1
|
||
clean_url = str(x['internal_url']).replace("%","")
|
||
ret_val = class_coll.find_one_and_update({'_id': ObjectId(x['_id']), },
|
||
{"$set": {'internal_url': str(clean_url)}},
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
search = str(":")
|
||
search_expr = re.compile(f".*{search}.*", re.I)
|
||
for x in class_coll.find({'internal_url': {'$regex': search_expr}}):
|
||
nb = nb + 1
|
||
clean_url = str(x['internal_url']).replace(":", "")
|
||
ret_val = class_coll.find_one_and_update({'_id': ObjectId(x['_id']), },
|
||
{"$set": {'internal_url': str(clean_url)}},
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
search = str("--")
|
||
search_expr = re.compile(f".*{search}.*", re.I)
|
||
for x in class_coll.find({'internal_url': {'$regex': search_expr}}):
|
||
nb = nb + 1
|
||
clean_url = str(x['internal_url']).replace("--", "-")
|
||
ret_val = class_coll.find_one_and_update({'_id': ObjectId(x['_id']), },
|
||
{"$set": {'internal_url': str(clean_url)}},
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
|
||
return True, "clean internal url OK de "+str(nb)+" url"
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "clean internal url KO"
|
||
|
||
'''
|
||
Cette fonction supprime les emoji des text
|
||
'''
|
||
def clean_emoji(sentence=None):
|
||
try:
|
||
emoji_pattern = re.compile("["
|
||
u"\U0001F600-\U0001F64F" # emoticons
|
||
u"\U0001F300-\U0001F5FF" # symbols & pictographs
|
||
u"\U0001F680-\U0001F6FF" # transport & map symbols
|
||
u"\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||
"]+", flags=re.UNICODE)
|
||
|
||
|
||
new_sentence = emoji_pattern.sub(r'', sentence)
|
||
|
||
return new_sentence
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return ""
|
||
|
||
|
||
|
||
'''
|
||
Fonction d'enregistrement d'une image d'un profils
|
||
'''
|
||
def recordImage(file=None, Folder=None, diction=None):
|
||
try:
|
||
|
||
# Dictionnaire des champs utilisables
|
||
field_list = ['token', 'type',]
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, " Impossible de se connecter"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['token', 'type']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans la liste des arguments des champs")
|
||
return False, "Impossible de se connecter"
|
||
|
||
mydata = {}
|
||
mytoken = ""
|
||
myuserrecid = ""
|
||
# recuperation des paramettre
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
mytoken = diction['token']
|
||
|
||
|
||
mytype = ""
|
||
# recuperation des paramettre
|
||
if ("type" in diction.keys()):
|
||
if diction['type']:
|
||
mytype = diction['type']
|
||
|
||
|
||
'''
|
||
Verification de la validité du token et recuperation du recid du user
|
||
'''
|
||
retval = check_user_validity("", str(mytoken))
|
||
|
||
if retval is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - La session de connexion n'est pas valide")
|
||
return False, "L'email ou le token ne sont pas valident"
|
||
|
||
# Recuperation du recid de la personne
|
||
if( mytype == "user"):
|
||
user_recid = get_user_recid_from_token(str(mytoken))
|
||
if user_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du user")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
|
||
elif( mytype == "partner"):
|
||
user_recid = get_parnter_recid_from_token(str(mytoken))
|
||
if user_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du partner")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
myuserrecid = user_recid
|
||
|
||
|
||
status, saved_file = Upload_Save_IMG_File(file, Folder)
|
||
if (status == False):
|
||
return False, "Impossible d'inserer les formations en masse "
|
||
|
||
|
||
# " Lecture du fichier "
|
||
#print(" Lecture du fichier : " + saved_file+". le token est :"+str(mytoken ))
|
||
nb_line = 0
|
||
coll_name = MYSY_GV.dbname['image_ch']
|
||
|
||
|
||
with open(saved_file, "rb") as imageFile:
|
||
|
||
#strimg = base64.b64encode(imageFile.read())
|
||
strimg = base64.b64encode(imageFile.read())
|
||
|
||
new_diction = {}
|
||
new_diction['img'] = strimg
|
||
new_diction['type'] = 'profil'
|
||
new_diction['recid'] = myuserrecid
|
||
new_diction['valide'] = '1'
|
||
new_diction['date_update'] = str(datetime.now())
|
||
new_diction['locked'] = "0"
|
||
|
||
ret_val = coll_name.find_one_and_update({"recid":myuserrecid, "valide":"1", "locked":"0"},
|
||
{"$set": {"img":strimg, "date_update":str(datetime.now()),
|
||
"type":"profil","recid":myuserrecid, "valide":"1",
|
||
"locked":"0" }},
|
||
upsert=True,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
|
||
if ret_val and ret_val['_id']:
|
||
myprint(" L'image a bien ete enregistrée. ")
|
||
return True
|
||
else:
|
||
myprint(" IMPOSSIBLE D'ENREGISTRER L'IMAGE")
|
||
return False
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
'''
|
||
Fonction d'enregistrement d'une image d'un profils d'une formation
|
||
'''
|
||
def recordClassImage(file=None, Folder=None, diction=None):
|
||
try:
|
||
|
||
# Dictionnaire des champs utilisables
|
||
field_list = ['token', 'type', 'class_external_code', 'related_collection', 'related_collection_recid']
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, " Impossible de se connecter"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['token', 'type', 'class_external_code', 'related_collection', 'related_collection_recid']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans la liste des arguments des champs")
|
||
return False, "Impossible de se connecter"
|
||
|
||
mydata = {}
|
||
mytoken = ""
|
||
|
||
# recuperation des paramettre
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
mytoken = diction['token']
|
||
|
||
mytype = ""
|
||
# recuperation des paramettre
|
||
if ("type" in diction.keys()):
|
||
if diction['type']:
|
||
mytype = diction['type']
|
||
|
||
|
||
myclassexternalcode = ""
|
||
if ("class_external_code" in diction.keys()):
|
||
if diction['class_external_code']:
|
||
myclassexternalcode = diction['class_external_code']
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data({'token':str(diction['token'])})
|
||
if (local_status is not True):
|
||
return local_status, my_partner
|
||
|
||
myuserrecid = str(my_partner['recid'])
|
||
|
||
status, saved_file = Upload_Save_IMG_File(file, Folder)
|
||
if (status == False):
|
||
return False, "Impossible d'inserer l'image de la formation "
|
||
|
||
# " Lecture du fichier "
|
||
print(" Lecture du fichier : " + saved_file + ". le token est :" + str(mytoken))
|
||
nb_line = 0
|
||
coll_name = MYSY_GV.dbname['image_ch']
|
||
|
||
with open(saved_file, "rb") as imageFile:
|
||
|
||
# strimg = base64.b64encode(imageFile.read())
|
||
strimg = base64.b64encode(imageFile.read())
|
||
|
||
new_diction = {}
|
||
new_diction['img'] = strimg
|
||
new_diction['type'] = 'profil_class'
|
||
new_diction['recid'] = myuserrecid
|
||
new_diction['class_external_code'] = myclassexternalcode
|
||
new_diction['valide'] = '1'
|
||
new_diction['date_update'] = str(datetime.now())
|
||
new_diction['locked'] = "0"
|
||
|
||
new_diction['partner_owner_recid'] = str(my_partner['recid'])
|
||
new_diction['update_by'] = str(my_partner['_id'])
|
||
new_diction['related_collection'] = str(diction['related_collection'])
|
||
new_diction['related_collection_recid'] = str(diction['related_collection_recid'])
|
||
|
||
|
||
ret_val = coll_name.find_one_and_update({"partner_owner_recid": str(my_partner['recid']), "valide": "1", "locked": "0", "related_collection":str(diction['related_collection']),
|
||
"related_collection_recid": str(diction['related_collection_recid'])},
|
||
{"$set": new_diction},
|
||
upsert=True,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
if ret_val and ret_val['_id']:
|
||
myprint(" L'image de la formation "+str(myclassexternalcode)+" a bien ete enregistrée. ")
|
||
return True, " L'image de la formation "+str(myclassexternalcode)+" a bien ete enregistrée. "
|
||
else:
|
||
myprint(" IMPOSSIBLE D'ENREGISTRER L'IMAGE de la formation "+str(myclassexternalcode))
|
||
return False, "IMPOSSIBLE D'ENREGISTRER L'IMAGE de la formation "+str(myclassexternalcode)
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False,"IMPOSSIBLE D'ENREGISTRER L'IMAGE de la formation "+str(myclassexternalcode)
|
||
|
||
|
||
"""
|
||
Recriture de la fonction de base pour pouvoir enregistrer une image
|
||
avec les paramettre suivant
|
||
- related_collection
|
||
- related_collection_recid
|
||
|
||
"""
|
||
def recordClassImage_v2(file=None, Folder=None, diction=None):
|
||
try:
|
||
|
||
# Dictionnaire des champs utilisables
|
||
field_list = ['token', 'related_collection', 'related_collection_recid', 'image_recid', 'type_img']
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, " Impossible de se connecter"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['token', 'related_collection', 'related_collection_recid', 'type_img']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans la liste des arguments des champs")
|
||
return False, "Impossible de se connecter"
|
||
|
||
mydata = {}
|
||
mytoken = ""
|
||
|
||
# recuperation des paramettre
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
mytoken = diction['token']
|
||
|
||
type_img = ""
|
||
if ("type_img" in diction.keys()):
|
||
if diction['type_img']:
|
||
type_img = diction['type_img']
|
||
|
||
if( type_img not in MYSY_GV.BDD_STOCKAGE_IMG):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + type_img + "' n'est pas autorisé. les valeurs acceptées sont "+str(MYSY_GV.BDD_STOCKAGE_IMG) )
|
||
return False, " Le type d'image n'est pas autorisé. les valeurs acceptées sont "+str(MYSY_GV.BDD_STOCKAGE_IMG)
|
||
|
||
related_collection = ""
|
||
if ("related_collection" in diction.keys()):
|
||
if diction['related_collection']:
|
||
related_collection = diction['related_collection']
|
||
|
||
|
||
related_collection_recid = ""
|
||
if ("related_collection_recid" in diction.keys()):
|
||
if diction['related_collection_recid']:
|
||
related_collection_recid = diction['related_collection_recid']
|
||
|
||
image_recid = ""
|
||
if ("image_recid" in diction.keys()):
|
||
if diction['image_recid']:
|
||
image_recid = diction['image_recid']
|
||
|
||
if(len(str(image_recid)) <= 0 ):
|
||
# cette image n'a jamais ete enregistrée
|
||
# Creation du RecId
|
||
image_recid = create_user_recid()
|
||
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data(diction)
|
||
if (local_status is not True):
|
||
return local_status, my_partner
|
||
|
||
|
||
status, saved_file = Upload_Save_IMG_File(file, Folder)
|
||
if (status is False):
|
||
myprint(str(saved_file))
|
||
return False, str(saved_file)
|
||
|
||
# " Lecture du fichier "
|
||
#print(" Lecture du fichier : " + saved_file + ". le token est :" + str(mytoken))
|
||
nb_line = 0
|
||
coll_name = MYSY_GV.dbname['mysy_images']
|
||
|
||
with open(saved_file, "rb") as imageFile:
|
||
|
||
# strimg = base64.b64encode(imageFile.read())
|
||
strimg = base64.b64encode(imageFile.read())
|
||
|
||
new_diction = {}
|
||
new_diction['img'] = strimg
|
||
new_diction['related_collection'] = related_collection
|
||
new_diction['related_collection_recid'] = related_collection_recid
|
||
new_diction['valide'] = '1'
|
||
new_diction['date_update'] = str(datetime.now())
|
||
new_diction['locked'] = "0"
|
||
new_diction['recid'] = image_recid
|
||
new_diction['type_img'] = type_img
|
||
|
||
|
||
|
||
ret_val = coll_name.find_one_and_update({"recid":image_recid, "related_collection_recid": related_collection_recid,
|
||
"valide": "1", "locked": "0", "related_collection":str(related_collection),
|
||
"type_img":str(type_img)},
|
||
{"$set": new_diction},
|
||
upsert=True,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
|
||
if( ret_val is None or '_id' not in ret_val.keys()):
|
||
myprint(" IMPOSSIBLE D'ENREGISTRER L'IMAGE")
|
||
return False, "IMPOSSIBLE D'ENREGISTRER L'IMAGE "
|
||
|
||
return True, "L'image a été correctement enregistrée"
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False,"IMPOSSIBLE D'ENREGISTRER L'IMAGE "
|
||
|
||
|
||
"""
|
||
Suppression d'une image
|
||
"""
|
||
def DeleteClassImage_v2(diction=None):
|
||
try:
|
||
|
||
# Dictionnaire des champs utilisables
|
||
field_list = ['token', 'image_recid', ]
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, " Impossible de se connecter"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['token', 'image_recid']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans la liste des arguments des champs")
|
||
return False, "Impossible de se connecter"
|
||
|
||
mydata = {}
|
||
mytoken = ""
|
||
|
||
# recuperation des paramettre
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
mytoken = diction['token']
|
||
|
||
image_recid = ""
|
||
if ("image_recid" in diction.keys()):
|
||
if diction['image_recid']:
|
||
image_recid = diction['image_recid']
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data(diction)
|
||
if (local_status is not True):
|
||
return local_status, my_partner
|
||
|
||
if(len(str(image_recid)) <= 0 ):
|
||
# cette image n'a jamais ete enregistrée
|
||
# Creation du RecId
|
||
image_recid = create_user_recid()
|
||
|
||
|
||
# " Lecture du fichier "
|
||
#print(" Lecture du fichier : " + saved_file + ". le token est :" + str(mytoken))
|
||
nb_line = 0
|
||
coll_name = MYSY_GV.dbname['mysy_images']
|
||
|
||
query_delete = {"recid":image_recid, 'related_collection_recid': str(my_partner['recid']) }
|
||
|
||
#print(" ### query_delete = ", query_delete)
|
||
ret_val = coll_name.delete_one({"recid":image_recid,
|
||
'related_collection_recid': str(my_partner['recid']) },)
|
||
|
||
#print(" ### detelete ret_val = ", ret_val)
|
||
|
||
|
||
#print(" ### recordClassImage_v2 :L'image a été correctement supprimée ")
|
||
return True, "L'image a été correctement supprimée"
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False,"IMPOSSIBLE D'ENREGISTRER L'IMAGE "
|
||
|
||
|
||
'''
|
||
Recuperation d'une image de profils
|
||
'''
|
||
def getRecodedImage(diction=None):
|
||
try:
|
||
# Dictionnaire des champs utilisables
|
||
field_list = ['token', 'type']
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, " Impossible de se connecter"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['token','type']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans la liste des arguments des champs")
|
||
return False, "Impossible de se connecter"
|
||
|
||
mydata = {}
|
||
mytoken = ""
|
||
myuserrecid = ""
|
||
# recuperation des paramettre
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
mytoken = diction['token']
|
||
|
||
mytype = ""
|
||
if ("type" in diction.keys()):
|
||
if diction['type']:
|
||
mytype = diction['type']
|
||
|
||
'''
|
||
Verification de la validité du token et recuperation du recid du user
|
||
|
||
retval = check_user_validity("", str(mytoken))
|
||
|
||
if retval is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - La session de connexion n'est pas valide ")
|
||
return False, "L'email ou le token ne sont pas valident"
|
||
'''
|
||
|
||
# Recuperation du recid de la personne
|
||
if( mytype == "user"):
|
||
user_recid = get_user_recid_from_token(str(mytoken))
|
||
if user_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du user")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
|
||
elif (mytype == "partner"):
|
||
user_recid = get_parnter_recid_from_token(str(mytoken))
|
||
if user_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du user")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
|
||
myuserrecid = user_recid
|
||
|
||
|
||
coll_name = MYSY_GV.dbname['image_ch']
|
||
RetObject = []
|
||
|
||
for retVal in coll_name.find({'recid':myuserrecid, "type":"profil"} ):
|
||
user2={}
|
||
user2['date_update'] = retVal['date_update']
|
||
decode = retVal['img'].decode()
|
||
user2['img'] = decode
|
||
data1 = json.loads(json.dumps(user2))
|
||
|
||
return True, data1
|
||
|
||
return False, ""
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, ""
|
||
|
||
|
||
|
||
'''
|
||
Recuperation d'une image de profils d'une formation
|
||
'''
|
||
def getRecodedClassImage(diction=None):
|
||
try:
|
||
# Dictionnaire des champs utilisables
|
||
field_list = ['token', 'type', 'class_external_code', 'related_collection', 'related_collection_recid']
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, " Impossible de se connecter"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['token','type','class_external_code']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans la liste des arguments des champs")
|
||
return False, "Impossible de se connecter"
|
||
|
||
mydata = {}
|
||
mytoken = ""
|
||
|
||
# recuperation des paramettre
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
mytoken = diction['token']
|
||
|
||
mytype = ""
|
||
if ("type" in diction.keys()):
|
||
if diction['type']:
|
||
mytype = diction['type']
|
||
|
||
myclassexternalcode = ""
|
||
if ("class_external_code" in diction.keys()):
|
||
if diction['class_external_code']:
|
||
myclassexternalcode = diction['class_external_code']
|
||
|
||
|
||
'''
|
||
Verification de la validité du token et recuperation du recid du user
|
||
'''
|
||
retval = check_user_validity("", str(mytoken))
|
||
|
||
if retval is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - La session de connexion n'est pas valide")
|
||
return False, "L'email ou le token ne sont pas valident"
|
||
|
||
|
||
if (mytype == "partner"):
|
||
user_recid = get_parnter_recid_from_token(str(mytoken))
|
||
if user_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du user")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
|
||
else:
|
||
myprint(str(inspect.stack()[0][3]) + " - l'API est incorrecte. le type n'est pas renseingé")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
|
||
myuserrecid = user_recid
|
||
|
||
|
||
coll_name = MYSY_GV.dbname['image_ch']
|
||
RetObject = []
|
||
|
||
qry = {'recid':myuserrecid, "type":"profil_class", "class_external_code":str(myclassexternalcode),
|
||
'related_collection_recid':str(diction['related_collection_recid']),
|
||
"related_collection":str(diction['related_collection'])}
|
||
|
||
print(" qry = ", qry)
|
||
|
||
for retVal in coll_name.find({'recid':myuserrecid, "type":"profil_class", "class_external_code":str(myclassexternalcode),
|
||
'related_collection_recid':str(diction['related_collection_recid']),
|
||
"related_collection":str(diction['related_collection'])} ):
|
||
user2={}
|
||
user2['date_update'] = retVal['date_update']
|
||
decode = retVal['img'].decode()
|
||
user2['img'] = decode
|
||
data1 = json.loads(json.dumps(user2))
|
||
|
||
return True, data1
|
||
|
||
return False, ""
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, ""
|
||
|
||
|
||
'''
|
||
Recuperation d'une image de profils d'une formation sans que
|
||
l'utilisateur ne soit connecté.
|
||
|
||
par exemple lorsqu'une personne lambda affiche les formation
|
||
'''
|
||
def getRecodedClassImage_no_token(diction=None):
|
||
try:
|
||
# Dictionnaire des champs utilisables
|
||
field_list = [ 'type', 'class_external_code', 'related_collection', 'related_collection_recid']
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, " Impossible de se connecter"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['type','class_external_code']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans la liste des arguments des champs")
|
||
return False, "Impossible de se connecter"
|
||
|
||
mydata = {}
|
||
|
||
mytype = ""
|
||
if ("type" in diction.keys()):
|
||
if diction['type']:
|
||
mytype = diction['type']
|
||
|
||
myclassexternalcode = ""
|
||
if ("class_external_code" in diction.keys()):
|
||
if diction['class_external_code']:
|
||
myclassexternalcode = diction['class_external_code']
|
||
|
||
|
||
|
||
coll_name = MYSY_GV.dbname['image_ch']
|
||
RetObject = []
|
||
|
||
qry = {"type":"profil_class", "class_external_code":str(myclassexternalcode),
|
||
'related_collection_recid':str(diction['related_collection_recid']),
|
||
"related_collection":str(diction['related_collection'])}
|
||
|
||
print(" qry = ", qry)
|
||
|
||
for retVal in coll_name.find({ "type":"profil_class", "class_external_code":str(myclassexternalcode),
|
||
'related_collection_recid':str(diction['related_collection_recid']),
|
||
"related_collection":str(diction['related_collection'])} ):
|
||
user2={}
|
||
user2['date_update'] = retVal['date_update']
|
||
decode = retVal['img'].decode()
|
||
user2['img'] = decode
|
||
data1 = json.loads(json.dumps(user2))
|
||
|
||
return True, data1
|
||
|
||
return False, ""
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, ""
|
||
|
||
|
||
|
||
'''
|
||
Suppression d'une image de profils d'une formation
|
||
'''
|
||
def DeleteRecodedClassImage(diction=None):
|
||
try:
|
||
# Dictionnaire des champs utilisables
|
||
field_list = ['token', 'type', 'class_external_code', 'related_collection', 'related_collection_recid']
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, "Impossible de supprimer le logo de la formation"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['token','type','class_external_code']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans la liste des arguments des champs")
|
||
return False, "Impossible de supprimer le logo de la formation"
|
||
|
||
mydata = {}
|
||
mytoken = ""
|
||
|
||
# recuperation des paramettre
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
mytoken = diction['token']
|
||
|
||
mytype = ""
|
||
if ("type" in diction.keys()):
|
||
if diction['type']:
|
||
mytype = diction['type']
|
||
|
||
myclassexternalcode = ""
|
||
if ("class_external_code" in diction.keys()):
|
||
if diction['class_external_code']:
|
||
myclassexternalcode = diction['class_external_code']
|
||
|
||
|
||
'''
|
||
Verification de la validité du token et recuperation du recid du user
|
||
'''
|
||
retval = check_user_validity("", str(mytoken))
|
||
|
||
if retval is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - La session de connexion n'est pas valide")
|
||
return False, "Impossible de supprimer le logo de la formation"
|
||
|
||
|
||
if (mytype == "partner"):
|
||
user_recid = get_parnter_recid_from_token(str(mytoken))
|
||
if user_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du user")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
|
||
else:
|
||
myprint(str(inspect.stack()[0][3]) + " - l'API est incorrecte. le type n'est pas renseingé")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
|
||
myuserrecid = user_recid
|
||
|
||
|
||
coll_name = MYSY_GV.dbname['image_ch']
|
||
|
||
|
||
myquery = { "class_external_code":str(myclassexternalcode), 'related_collection':str(diction['related_collection']),
|
||
'related_collection_recid':str(diction['related_collection_recid'])}
|
||
|
||
delete_row = coll_name.delete_many(myquery)
|
||
myprint(str(inspect.stack()[0][3]) + "- "+str(delete_row.deleted_count)+" image logo supprimée(s) pour la formation class_external_code = "+str(myclassexternalcode))
|
||
|
||
return True, str(delete_row.deleted_count)+" supprimée(s)"
|
||
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "Impossible de supprimer le logo de la formation"
|
||
|
||
|
||
|
||
'''
|
||
Recuperation d'une image de profils d'une formation pour le mode non connecté. c'est a dire depuis le front
|
||
'''
|
||
def getRecodedClassImage_from_front(diction=None):
|
||
try:
|
||
# Dictionnaire des champs utilisables
|
||
field_list = [ 'class_external_code']
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, " Impossible de se connecter"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['class_external_code']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans la liste des arguments des champs")
|
||
return False, "Impossible de se connecter"
|
||
|
||
|
||
myclassexternalcode = ""
|
||
if ("class_external_code" in diction.keys()):
|
||
if diction['class_external_code']:
|
||
myclassexternalcode = diction['class_external_code']
|
||
|
||
|
||
coll_name = MYSY_GV.dbname['image_ch']
|
||
RetObject = []
|
||
|
||
for retVal in coll_name.find({"type":"profil_class", "class_external_code":str(myclassexternalcode)} ):
|
||
user2={}
|
||
user2['date_update'] = retVal['date_update']
|
||
decode = retVal['img'].decode()
|
||
user2['img'] = decode
|
||
data1 = json.loads(json.dumps(user2))
|
||
|
||
return True, data1
|
||
|
||
return False, ""
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, ""
|
||
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction replace les caractères speciaux et ponctuation par des space
|
||
'''
|
||
def Ela_Remove_Ponct_Special_Caractere(sentence):
|
||
try:
|
||
text = sentence.lower() # mettre les mots en minuscule
|
||
|
||
# Retirons les caractères spéciaux :
|
||
text = re.sub(r"[,\!\?\%\(\)\/\"]", " ", text)
|
||
text = re.sub(r"\&\S*\s", " ", text)
|
||
text = re.sub(r"\-", " ", text)
|
||
|
||
list_noises = ['...', '.', ';', ',', ':', '!', '?', ')', '(', '[', ']', '\'', '"', '’', '`','©', '–',
|
||
'{', '}', '-', '=', '°', '#', '-', '/', '~', '&', '\\', '.', '^', '$', '*', '+','\\n','\n',
|
||
'?', '{', '}', '[', ']', '|', '(', ')', '-', '>', '<', '@','®', '™', '«', '»']
|
||
|
||
sentence = text
|
||
for noise in list_noises:
|
||
#print(" suppression de : '"+str(noise)+"' ")
|
||
sentence = sentence.replace(str(noise), " ")
|
||
|
||
#print(" AFTER REPLACE NOISES = "+str(sentence))
|
||
return True, sentence
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e)+" - Line : "+ str(exc_tb.tb_lineno) )
|
||
return False, " Impossible Ela_Remove_Ponct_Special_Caractere"
|
||
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction prend un titre (une phrase) et retrounre
|
||
l'internal url associé.
|
||
|
||
L'agoritheme de fonction prend un titre, supprime les caractère speciaux
|
||
la suffixe avec une variable aleatoire qui depend du "datetime.now()"
|
||
|
||
'''
|
||
def CreateInternalUrl(sentence=None):
|
||
try:
|
||
|
||
internal_url = ""
|
||
|
||
if( len(sentence) <= 0 ):
|
||
myprint(str(inspect.stack()[0][3]) + " - sentence est vide " )
|
||
return False, ""
|
||
|
||
my_internal_url = sentence
|
||
local_status, my_internal_url = Ela_Remove_Ponct_Special_Caractere(my_internal_url)
|
||
my_internal_url = unidecode(my_internal_url.lower())
|
||
my_internal_url = my_internal_url.replace("--", "-")
|
||
my_internal_url = my_internal_url.replace(" ", "-")
|
||
my_internal_url = my_internal_url.replace("/", "-")
|
||
|
||
if (my_internal_url.startswith('-')):
|
||
my_internal_url = my_internal_url[1:]
|
||
|
||
if (my_internal_url.endswith('-')):
|
||
my_internal_url = my_internal_url[:-1]
|
||
|
||
# Creation de la variable aleatoire en se basant sur le datetime now
|
||
suffix = hashlib.md5(str(datetime.now()).encode()).hexdigest()
|
||
|
||
final_internal_url = str(my_internal_url) + "-" + str(suffix[-3:])
|
||
final_internal_url = final_internal_url.replace("--", "-")
|
||
internal_url = final_internal_url
|
||
|
||
return True, internal_url
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, ""
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction va lire un fichier ou un collection tmp_class_metier(_id, metier) et va mettre à jour les metier
|
||
'''
|
||
def UpdateMetier():
|
||
try:
|
||
|
||
myclass_coll = MYSY_GV.dbname["myclass"]
|
||
myclass_tmp_metier_coll = MYSY_GV.dbname["tmp_class_metier"]
|
||
my_data = []
|
||
i = 0
|
||
for x in myclass_tmp_metier_coll.find():
|
||
|
||
mymetier = str(x['metier']).lower()
|
||
ret_val2 = myclass_coll.find_one_and_update({"_id": ObjectId(str(x['class_id']))},
|
||
{"$set": {"metier":mymetier }},
|
||
upsert=True,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
|
||
if ( (ret_val2 is False) or (ret_val2['_id'] is False) ):
|
||
myprint("Impossible de mettre à jour la formation _id =" + str(x['class_id']) )
|
||
return False
|
||
i = i+1
|
||
|
||
print(str(i)+" Formation (metier) ont été mises à jour")
|
||
|
||
return True
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
"""
|
||
Cette fonction retroune les mots suggerés au format JSON
|
||
"""
|
||
def Get_Suggested_Word():
|
||
try:
|
||
|
||
nb_result = 0
|
||
insertObject = []
|
||
coll_name = MYSY_GV.dbname['search_suggestion_words']
|
||
|
||
for x in coll_name.find({}, {'_id':0}).sort([("display_rank", pymongo.DESCENDING), ("mot", pymongo.ASCENDING), ]):
|
||
nb_result = nb_result + 1
|
||
val_tmp = {}
|
||
val_tmp['id'] = str(nb_result)
|
||
val_tmp['name'] = str(x['mot']).lower()
|
||
insertObject.append(JSONEncoder().encode(val_tmp))
|
||
|
||
return True, insertObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False
|
||
|
||
|
||
"""
|
||
Cette fonction retroune la liste des villes française en JSON
|
||
pour aider à la recherche.
|
||
Quand un utilisateur tape les 3 première lettre d'une ville, on l'aide...
|
||
"""
|
||
def Get_Suggested_Fr_Cities():
|
||
try:
|
||
|
||
nb_result = 0
|
||
insertObject = []
|
||
coll_name = MYSY_GV.dbname['ville_commune']
|
||
|
||
for x in coll_name.find({}, {'_id':0}).sort([ ("ville_commune", pymongo.ASCENDING),("population", pymongo.DESCENDING), ]):
|
||
nb_result = nb_result + 1
|
||
val_tmp = {}
|
||
val_tmp['id'] = str(nb_result)
|
||
val_tmp['ville'] = str(x['ville_commune']).lower()
|
||
insertObject.append(JSONEncoder().encode(val_tmp))
|
||
|
||
return True, insertObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False
|
||
|
||
|
||
'''
|
||
Cette fonction prendre le recid d'un partenaire et retourne
|
||
- son pack
|
||
- le nombre de formation accordé
|
||
'''
|
||
def Partner_Get_pack_nbTraining(partner_recid):
|
||
try:
|
||
|
||
if( len(str(partner_recid)) <= 0):
|
||
return False, "", ""
|
||
|
||
partner_pack = ""
|
||
partner_pack_nb_formation = ""
|
||
coll_partner = MYSY_GV.dbname['partnair_account']
|
||
tmp = coll_partner.find({'recid':str(partner_recid)})
|
||
|
||
#print("tmp = " + str(tmp[0]))
|
||
|
||
if(tmp and tmp[0] and tmp[0]['pack_service'] and tmp[0]['nb_formation']):
|
||
partner_pack = tmp[0]['pack_service']
|
||
partner_pack_nb_formation = tmp[0]['nb_formation']
|
||
|
||
else:
|
||
return False, partner_pack, partner_pack_nb_formation
|
||
|
||
|
||
return True, partner_pack, partner_pack_nb_formation
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False, False
|
||
|
||
|
||
""""
|
||
Cette fonction prend un recid et retourne le nombre de formation deja actif.
|
||
"""
|
||
def Get_partner_nb_active_training(partner_recid):
|
||
try:
|
||
|
||
if( len(str(partner_recid)) <= 0):
|
||
return False, ""
|
||
|
||
partner_class_nb_formation = ""
|
||
coll_partner_class = MYSY_GV.dbname['myclass']
|
||
tmp = coll_partner_class.count_documents({'partner_owner_recid':str(partner_recid), 'valide':'1'})
|
||
|
||
return True, str(tmp)
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False
|
||
|
||
|
||
""""
|
||
Cette fonction récupérer les mot du titre et de la description
|
||
pour remplir la collection des suggestions de mot
|
||
"""
|
||
|
||
def fillSuggestionCollection():
|
||
try:
|
||
coll_name = MYSY_GV.dbname['myclass']
|
||
for retVal in coll_name.find({'valide':'1'}).limit(10):
|
||
|
||
mytitle = str(retVal['title'])
|
||
mydesc = str(retVal['description'])
|
||
|
||
class_contact = str(mytitle)+". "+str(mydesc)
|
||
|
||
class_token = ElaSpacy.Ela_Tokenize(class_contact)
|
||
status, tab_tokens2 = ElaSpacy.Ela_remove_stop_words(class_token)
|
||
if (status is False):
|
||
break
|
||
|
||
status, tab_tokens3 = ElaSpacy.Ela_remove_pronoun(tab_tokens2)
|
||
if (status is False):
|
||
break
|
||
|
||
status, tab_tokens4 = ElaSpacy.Ela_stemmize_Class(tab_tokens3)
|
||
if (status is False):
|
||
break
|
||
|
||
print(" Pour "+str(retVal['title'])+" : On a "+str(tab_tokens4))
|
||
|
||
|
||
return True
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False,
|
||
|
||
"""
|
||
Cette fonction attribut des notes
|
||
"""
|
||
def PutClassNote():
|
||
try:
|
||
coll_name = MYSY_GV.dbname['myclass']
|
||
i = 2
|
||
for retVal in coll_name.find({}):
|
||
user = retVal
|
||
if( i == 5 ):
|
||
i = 2
|
||
|
||
i = i+0.5
|
||
|
||
result = coll_name.update_many(
|
||
{'_id':ObjectId(str(user['_id']))},
|
||
{
|
||
"$set": {"note": str(i)}
|
||
})
|
||
'''print("raw:", result.raw_result)
|
||
print("acknowledged:", result.acknowledged)
|
||
print("matched_count:", result.matched_count)'''
|
||
|
||
return True, "ok"
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "KO"
|
||
|
||
"""
|
||
Cette fonction essaye de convertir une chaine en date (yyyy-mm-dd)
|
||
"""
|
||
def TryToDateYYYMMDD(mydate):
|
||
try:
|
||
datetime.strptime(mydate, '%Y-%m-%d')
|
||
return True, datetime.strptime(mydate, '%Y-%m-%d')
|
||
|
||
except ValueError:
|
||
return False, False
|
||
|
||
|
||
"""
|
||
Cette fonction prend une formation (myclass) et l'insert dans
|
||
la table de statistique
|
||
- diction_class : les données de la formation
|
||
- type_view : type d'affichage ("decouverte", "detail", etc)
|
||
- user_location : les coordonnées du demandeur
|
||
"""
|
||
def InsertStatistic(diction_class, type_view, user_location):
|
||
try:
|
||
|
||
mydata = {}
|
||
mydata['internal_url'] = diction_class['internal_url']
|
||
mydata['date_update'] = datetime.now()
|
||
mydata['type_view'] = str(type_view)
|
||
|
||
mydict_combined = {**diction_class, **mydata, **user_location}
|
||
mydict_combined['date_update'] = str(datetime.now())
|
||
mydict_combined['type_view'] = "summary"
|
||
|
||
|
||
coll_name = MYSY_GV.dbname['user_recherche_result']
|
||
|
||
if("_id" in mydict_combined.keys() ):
|
||
del mydict_combined['_id']
|
||
|
||
ret_val = coll_name.insert_one(mydict_combined)
|
||
if (ret_val is False):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - WARNING : Impossbile d'inserer la formation " + str(diction_class['internal_url']) + " dans les statistiques")
|
||
|
||
|
||
return True
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
"""
|
||
Suppression du compte de LS
|
||
"""
|
||
def removeLSaccount():
|
||
try:
|
||
coll = MYSY_GV.dbname["partnair_account"]
|
||
myquery = {"email": "ls.lutmanmicca@gmail.com"}
|
||
x = coll.delete_many(myquery)
|
||
print(x.deleted_count, " documents deleted.")
|
||
return True, str(x.deleted_count) + " documents deleted."
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
"""
|
||
Suppression du compte de Nicole"""
|
||
def removeNBaccount():
|
||
try:
|
||
coll = MYSY_GV.dbname["partnair_account"]
|
||
myquery = {"email": "nicole.beauchesne@hotmail.fr"}
|
||
x = coll.delete_many(myquery)
|
||
print(x.deleted_count, " documents deleted.")
|
||
return True, str(x.deleted_count) + " documents deleted."
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
|
||
"""
|
||
Cette fonction verifier si une url est une image.
|
||
elle permet si besoin de verifier la taille de l'image
|
||
Cette fonction sera dans un premier temps utilisée
|
||
dans la gestion des images associées à une formation.
|
||
"""
|
||
def TryUrlImage(url=None):
|
||
try:
|
||
img = Image.open(requests.get(url, stream=True).raw)
|
||
return True, img
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False
|
||
|
||
|
||
"""
|
||
Suite à un bug dans la collection : user_recherche_result
|
||
Le champ "partner_owner_recid" n'existe pas . \
|
||
Cette fonction va corriger en cela en mettant la bonne valeur\
|
||
"""
|
||
def correction_collection():
|
||
try:
|
||
nb_line = 0
|
||
my_coll = MYSY_GV.dbname['user_recherche_result']
|
||
my_coll_class = MYSY_GV.dbname['myclass']
|
||
|
||
for val in my_coll.find():
|
||
|
||
if ("external_code" in val.keys()):
|
||
if val['external_code']:
|
||
training_external_code = val['external_code']
|
||
|
||
#print(" TRAITEMENT DE "+training_external_code)
|
||
val_tmp = my_coll_class.count_documents({'external_code':str(training_external_code)})
|
||
if( val_tmp > 0 ):
|
||
class_part_recid = my_coll_class.find({'external_code':str(training_external_code)})
|
||
if( class_part_recid and class_part_recid[0]) :
|
||
#print(" pour la formation : "+str(training_external_code)+" --- le partner_owner_recid = "+
|
||
# str(class_part_recid[0]))
|
||
|
||
x = class_part_recid[0]
|
||
|
||
if ("partner_owner_recid" in x.keys()):
|
||
if x['partner_owner_recid']:
|
||
|
||
result = my_coll.update_many(
|
||
{'external_code': str(training_external_code)},
|
||
{
|
||
"$set": {"partner_owner_recid": str(x['partner_owner_recid'])}
|
||
})
|
||
|
||
if (result.matched_count > 0):
|
||
nb_line = nb_line + 1
|
||
print(" OK "+str(training_external_code))
|
||
else:
|
||
print(' PBBBBBBBBBB pour external_code = '+str(training_external_code))
|
||
else:
|
||
result = my_coll.update_many(
|
||
{'external_code': str(training_external_code)},
|
||
{
|
||
"$set": {"partner_owner_recid": "todelete"}
|
||
})
|
||
|
||
if (result.matched_count > 0):
|
||
nb_line = nb_line + 1
|
||
print(" TO DELETE " + str(training_external_code))
|
||
return True, str(nb_line)+" ont été traitées "
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False
|
||
|
||
"""
|
||
Cette fonction enleve les caractères non ascii
|
||
"""
|
||
def remove_non_ascii(string):
|
||
return ''.join(char for char in string if ord(char) < 128)
|
||
|
||
|
||
"""
|
||
Pour des test, et seulement pour les test
|
||
cette fonction permet de mettre les formations du user "ls"
|
||
en première page.
|
||
|
||
Cette fonction doit etre inactive en production
|
||
"""
|
||
def PutLSClassFirst():
|
||
try:
|
||
nb_line = 0
|
||
# Recuperation du partenaire recid
|
||
coll_account = MYSY_GV.dbname["partnair_account"]
|
||
myquery = {"email": "ls.lutmanmicca@gmail.com"}
|
||
|
||
for val_tmp in coll_account.find(myquery):
|
||
local_recid = val_tmp['recid']
|
||
coll_class = MYSY_GV.dbname["myclass"]
|
||
|
||
quere_reset_ranking = {{}, {"$set": {"display_rank": "20"}}}
|
||
result = coll_class.update_many(quere_reset_ranking)
|
||
|
||
quere_update = {{"partner_owner_recid": str(local_recid)}, {"$set": {"freeacces": "1", "display_rank": "70",
|
||
"isalaune": "1"}}}
|
||
result = coll_class.update_many(quere_update)
|
||
|
||
print("raw:", result.raw_result)
|
||
print("acknowledged:", result.acknowledged)
|
||
print("matched_count:", result.matched_count)
|
||
|
||
nb_line = result.matched_count
|
||
|
||
|
||
return True, str(nb_line) + " formations ont mise sur la page principale. "
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False
|
||
|
||
|
||
"""
|
||
Validation de l'emargement d'un participant à une formation
|
||
"""
|
||
def UserEmargementValidation(diction):
|
||
try:
|
||
field_list_obligatoire = ['value', 'session']
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Impossible de valider l'emargement"
|
||
|
||
"""
|
||
value = la valeur du "_id" de la ligne concernée
|
||
|
||
"""
|
||
coll_emargement = MYSY_GV.dbname['emargement']
|
||
now = datetime.now()
|
||
|
||
mydata = {}
|
||
mydata['date_update'] = str(datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
|
||
mydata['statut'] = "2"
|
||
mydata['is_present'] = True
|
||
mydata['date_emargement'] = str(datetime.now().strftime("%d/%m/%Y %H:%M:%S"))
|
||
mydata['update_by'] = ""
|
||
|
||
if( diction['session'] == "1" ):
|
||
|
||
ret_val = coll_emargement.find_one_and_update({'_id': ObjectId(str( diction['value']))},
|
||
{"$set":mydata },
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
print(ret_val)
|
||
|
||
|
||
|
||
|
||
return True, " Emargement ok "
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False
|
||
|
||
|
||
"""
|
||
Cette fonction prends une session de formation,
|
||
Créer l'ID fonctionnel associé : session_id
|
||
|
||
la regles est :
|
||
Session_id = date_du+date_au+code_postal+adresse
|
||
/!\ : les dates sont au formation : jjmmaaaa.
|
||
exemple de session_id : 10022023150220230enligne => du 10/02/2023 au 15/02/2023, au CP : 0, à l'adresse : 'en ligne'
|
||
|
||
"""
|
||
def CreateTrainingSession_id(diction):
|
||
try:
|
||
|
||
field_list_obligatoire = ['date_du', 'date_au', 'code_postal', 'adresse']
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Impossible de créer la session de formation - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Impossible de créer la session de formation"
|
||
|
||
if( CheckisDate(str(diction['date_du'])) is False or CheckisDate(str(diction['date_au'])) is False):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - Les dates de debut ou de fin sont incorrecte. Les formats attendus : jj/mm/aaaa. Verifiez les dates : "+ str(diction['date_du'])+" et "+str(diction['date_au']))
|
||
return False, " Impossible de créer la session de formation : Les dates de debut ou de fin sont incorrecte. Les formats attendus : jj/mm/aaaa.Verifiez les dates : "+ str(diction['date_du'])+" et "+str(diction['date_au'])
|
||
|
||
|
||
## Verification de la cohérence des dates. Date_du doit <= Date_au
|
||
if (datetime.strptime(str(diction['date_du']).strip(), '%d/%m/%Y') > datetime.strptime(str(diction['date_au']).strip(), '%d/%m/%Y')):
|
||
myprint(
|
||
str(inspect.stack()[0][
|
||
3]) + " - Impossible de créer la session de formation : La date debut "+str(str(diction['date_du'])[0:10])+" est postérieur à la date de fin "+str(diction['date_au'])[0:10]+" pour la formation ")
|
||
|
||
return False, " Impossible de créer la session de formation : La date debut "+str(diction['date_du'])[0:10]+" est postérieur à la date de fin "+str(diction['date_au'])[0:10]+" pour la formation "
|
||
|
||
session_id = str(
|
||
str(diction['date_du']).strip() +
|
||
str(diction['date_au']).strip() +
|
||
str(diction['code_postal']).strip() +
|
||
str(diction['adresse']).strip() ).replace(' ', '')
|
||
|
||
|
||
session_id = remove_non_ascii(session_id)
|
||
session_id = re.sub(r'[^a-zA-Z0-9]', '', session_id)
|
||
|
||
return True, session_id
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False
|
||
|
||
|
||
"""
|
||
Cette fonction prend un email, et une session de formation
|
||
puis retourne toutes les informations relatif au couple (personne / session formation)
|
||
"""
|
||
def GetAttendeeDetail_perSession(diction):
|
||
try:
|
||
field_list_obligatoire = ['session_id', 'token', 'attendee_email', 'internal_url']
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][
|
||
3]) + " La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Impossible de récupérer les informations detaillées"
|
||
|
||
my_token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
my_token = diction['token']
|
||
|
||
# Verifier la validité du token
|
||
retval = check_partner_token_validity("", my_token)
|
||
if retval is False:
|
||
return "Err_Connexion", " La session de connexion n'est pas valide"
|
||
|
||
|
||
# Recuperation du recid du partenaire
|
||
partner_recid = get_parnter_recid_from_token(str(my_token))
|
||
if partner_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du partenaire")
|
||
return False, " Les informations d'identification sont invalides"
|
||
|
||
RetObject = []
|
||
|
||
qry_filter = {'session_id':str(diction['session_id']),'email':str(diction['attendee_email']),}
|
||
|
||
""""
|
||
Verification qu'il n'y a q'une seule inscription pour ce critère
|
||
"""
|
||
|
||
qry_filter_count = MYSY_GV.dbname['inscription'].count_documents(qry_filter)
|
||
if( qry_filter_count > 1 ):
|
||
myprint(str(inspect.stack()[0][3]) + " - Données d'inscription incohérentes. il y a plus d'une inscription pour les critère "+str(qry_filter))
|
||
return False, "Données d'inscription incohérentes"
|
||
|
||
|
||
pipe_qry = ([{'$match':qry_filter},
|
||
{'$lookup':
|
||
{
|
||
'from': 'session_formation',
|
||
'let': {'session_id': "$session_id", 'class_internal_url': '$class_internal_url'},
|
||
|
||
'pipeline': [
|
||
{'$match':
|
||
{'$expr':
|
||
{'$and':
|
||
[
|
||
{'$eq': ['$_id', { '$toObjectId': '$$session_id' }]},
|
||
{'$eq': ["$class_internal_url", "$$class_internal_url"]},
|
||
{'$eq': ["$valide", "1"]}
|
||
]
|
||
}
|
||
}
|
||
},
|
||
|
||
],
|
||
'as': 'inscription_collectoin'
|
||
}
|
||
},
|
||
{'$lookup': {'from': 'apprenant', 'let': {'apprenant_id': '$apprenant_id','partner_owner_recid': '$partner_owner_recid'},
|
||
'pipeline': [{'$match': {'$expr': {'$and': [{'$eq': ['$valide', '1']},
|
||
{'$eq': ['$_id', {'$convert': {
|
||
'input': '$$apprenant_id',
|
||
'to': 'objectId',
|
||
'onError': {'error': 'true'},
|
||
'onNull': {
|
||
'isnull': 'true'}}}]}]}}}],
|
||
'as': 'apprenant_collection'}}
|
||
])
|
||
|
||
|
||
#print(" ### GetAttendeeDetail_perSession ici pipe_qry = ",pipe_qry)
|
||
# Recuperation des infos de la formation
|
||
"""local_Insc_retval = MYSY_GV.dbname['inscription'].find_one({'session_id':str(diction['session_id']),
|
||
'email':str(diction['attendee_email']),
|
||
'class_internal_url':str(diction['internal_url']),})
|
||
"""
|
||
|
||
|
||
for local_Insc_retval in MYSY_GV.dbname['inscription'].aggregate(pipe_qry) :
|
||
|
||
#print(" ### local_Insc_retval laa== ", local_Insc_retval)
|
||
|
||
if( local_Insc_retval is None):
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du partenaire (2)")
|
||
return False, " Les informations d'identifier la session (2) "
|
||
my_retrun_dict = {}
|
||
|
||
if ('inscription_collectoin' in local_Insc_retval.keys() and len(local_Insc_retval['inscription_collectoin']) > 0):
|
||
|
||
if ("session_id" in local_Insc_retval.keys()):
|
||
my_retrun_dict['session_id'] = local_Insc_retval['session_id']
|
||
|
||
if ("apprenant_id" in local_Insc_retval.keys()):
|
||
my_retrun_dict['apprenant_id'] = local_Insc_retval['apprenant_id']
|
||
else:
|
||
my_retrun_dict['apprenant_id'] = ""
|
||
|
||
|
||
if ("invoiced" in local_Insc_retval.keys()):
|
||
my_retrun_dict['invoiced'] = str(local_Insc_retval['invoiced']).lower()
|
||
else:
|
||
my_retrun_dict['invoiced'] = "0"
|
||
|
||
|
||
if ("civilite" in local_Insc_retval.keys()):
|
||
my_retrun_dict['civilite'] = str(local_Insc_retval['civilite']).lower()
|
||
else:
|
||
my_retrun_dict['civilite'] = ""
|
||
|
||
if ("date_naissance" in local_Insc_retval.keys()):
|
||
my_retrun_dict['date_naissance'] = local_Insc_retval['date_naissance']
|
||
else:
|
||
my_retrun_dict['date_naissance'] = ""
|
||
|
||
|
||
if ("code_session" in local_Insc_retval['inscription_collectoin'][0].keys()):
|
||
my_retrun_dict['code_session'] = str(local_Insc_retval['inscription_collectoin'][0]['code_session'])
|
||
|
||
if ("date_debut" in local_Insc_retval['inscription_collectoin'][0].keys()):
|
||
my_retrun_dict['date_du'] = str(local_Insc_retval['inscription_collectoin'][0]['date_debut'])[0:10]
|
||
|
||
if ("date_fin" in local_Insc_retval['inscription_collectoin'][0].keys()):
|
||
my_retrun_dict['date_au'] = str(local_Insc_retval['inscription_collectoin'][0]['date_fin'])[0:10]
|
||
|
||
|
||
if ("tuteur1_civilite" not in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur1_civilite'] = ""
|
||
elif (local_Insc_retval['tuteur1_civilite'] not in MYSY_GV.CIVILITE):
|
||
# la civilité n'est pas une de celle autorisée, alors je renvoie vide
|
||
my_retrun_dict['tuteur1_civilite'] = ""
|
||
else:
|
||
my_retrun_dict['tuteur1_civilite'] = local_Insc_retval['tuteur1_civilite']
|
||
|
||
|
||
if ("tuteur2_civilite" not in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur2_civilite'] = ""
|
||
elif (local_Insc_retval['tuteur2_civilite'] not in MYSY_GV.CIVILITE):
|
||
# la civilité n'est pas une de celle autorisée, alors je renvoie vide
|
||
my_retrun_dict['tuteur2_civilite'] = ""
|
||
else:
|
||
my_retrun_dict['tuteur2_civilite'] = local_Insc_retval['tuteur2_civilite']
|
||
|
||
if ("ville" in local_Insc_retval.keys()):
|
||
my_retrun_dict['ville'] = local_Insc_retval['ville']
|
||
else:
|
||
my_retrun_dict['tuteur1_nom'] = ""
|
||
|
||
if ("code_postal" in local_Insc_retval.keys()):
|
||
my_retrun_dict['code_postal'] = local_Insc_retval['code_postal']
|
||
else:
|
||
my_retrun_dict['code_postal'] = ""
|
||
|
||
if ("pays" in local_Insc_retval.keys()):
|
||
my_retrun_dict['pays'] = local_Insc_retval['pays']
|
||
else:
|
||
my_retrun_dict['pays'] = ""
|
||
|
||
if ("type_apprenant" in local_Insc_retval.keys()):
|
||
my_retrun_dict['type_apprenant'] = local_Insc_retval['type_apprenant']
|
||
else:
|
||
my_retrun_dict['type_apprenant'] = ""
|
||
|
||
# Tuteurs
|
||
if ("tuteur1_nom" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur1_nom'] = local_Insc_retval['tuteur1_nom']
|
||
else:
|
||
my_retrun_dict['tuteur1_nom'] = ""
|
||
|
||
if ("tuteur1_prenom" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur1_prenom'] = local_Insc_retval['tuteur1_prenom']
|
||
else:
|
||
my_retrun_dict['tuteur1_prenom'] = ""
|
||
|
||
if ("tuteur1_email" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur1_email'] = local_Insc_retval['tuteur1_email']
|
||
else:
|
||
my_retrun_dict['tuteur1_email'] = ""
|
||
|
||
if ("tuteur1_telephone" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur1_telephone'] = local_Insc_retval['tuteur1_telephone']
|
||
else:
|
||
my_retrun_dict['tuteur1_telephone'] = ""
|
||
|
||
if ("tuteur1_adresse" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur1_adresse'] = local_Insc_retval['tuteur1_adresse']
|
||
else:
|
||
my_retrun_dict['tuteur1_adresse'] = ""
|
||
|
||
if ("tuteur1_cp" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur1_cp'] = local_Insc_retval['tuteur1_cp']
|
||
else:
|
||
my_retrun_dict['tuteur1_cp'] = ""
|
||
|
||
if ("tuteur1_ville" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur1_ville'] = local_Insc_retval['tuteur1_ville']
|
||
else:
|
||
my_retrun_dict['tuteur1_ville'] = ""
|
||
|
||
if ("tuteur1_pays" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur1_pays'] = local_Insc_retval['tuteur1_pays']
|
||
else:
|
||
my_retrun_dict['tuteur1_pays'] = ""
|
||
|
||
if ("tuteur1_include_com" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur1_include_com'] = local_Insc_retval['tuteur1_include_com']
|
||
else:
|
||
my_retrun_dict['tuteur1_include_com'] = ""
|
||
|
||
if ("tuteur2_nom" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur2_nom'] = local_Insc_retval['tuteur2_nom']
|
||
else:
|
||
my_retrun_dict['tuteur2_nom'] = ""
|
||
|
||
if ("tuteur2_prenom" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur2_prenom'] = local_Insc_retval['tuteur2_prenom']
|
||
else:
|
||
my_retrun_dict['tuteur2_prenom'] = ""
|
||
|
||
if ("tuteur2_email" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur2_email'] = local_Insc_retval['tuteur2_email']
|
||
else:
|
||
my_retrun_dict['tuteur2_email'] = ""
|
||
|
||
if ("tuteur2_telephone" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur2_telephone'] = local_Insc_retval['tuteur2_telephone']
|
||
else:
|
||
my_retrun_dict['tuteur2_telephone'] = ""
|
||
|
||
if ("tuteur2_adresse" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur2_adresse'] = local_Insc_retval['tuteur2_adresse']
|
||
else:
|
||
my_retrun_dict['tuteur2_adresse'] = ""
|
||
|
||
if ("tuteur2_cp" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur2_cp'] = local_Insc_retval['tuteur2_cp']
|
||
else:
|
||
my_retrun_dict['tuteur2_cp'] = ""
|
||
|
||
if ("tuteur2_ville" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur2_ville'] = local_Insc_retval['tuteur2_ville']
|
||
else:
|
||
my_retrun_dict['tuteur2_ville'] = ""
|
||
|
||
if ("tuteur2_pays" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur2_pays'] = local_Insc_retval['tuteur2_pays']
|
||
else:
|
||
my_retrun_dict['tuteur2_pays'] = ""
|
||
|
||
if ("tuteur2_include_com" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur2_include_com'] = local_Insc_retval['tuteur2_include_com']
|
||
else:
|
||
my_retrun_dict['tuteur2_include_com'] = ""
|
||
|
||
|
||
if ("certification_send_date" in local_Insc_retval.keys()):
|
||
if (str(local_Insc_retval['certification_send_date'])):
|
||
my_retrun_dict['certification_send_date'] = str(local_Insc_retval['certification_send_date'])[0:10]
|
||
|
||
local_adresse = ""
|
||
if ("adresse" in local_Insc_retval.keys()):
|
||
local_adresse = local_Insc_retval['adresse']
|
||
my_retrun_dict['adresse'] = local_adresse
|
||
|
||
local_nom = ""
|
||
if ("nom" in local_Insc_retval.keys()):
|
||
local_nom = local_Insc_retval['nom']
|
||
my_retrun_dict['nom'] = local_nom
|
||
|
||
local_prenom = ""
|
||
if ("prenom" in local_Insc_retval.keys()):
|
||
local_prenom = local_Insc_retval['prenom']
|
||
my_retrun_dict['prenom'] = local_prenom
|
||
|
||
if ("date_naissance" in local_Insc_retval.keys()):
|
||
my_retrun_dict['date_naissance'] = local_Insc_retval['date_naissance']
|
||
else:
|
||
my_retrun_dict['date_naissance'] = ""
|
||
|
||
if ("adresse" in local_Insc_retval.keys()):
|
||
my_retrun_dict['adresse'] = local_Insc_retval['adresse']
|
||
else:
|
||
my_retrun_dict['adresse'] = ""
|
||
|
||
if ("code_postal" in local_Insc_retval.keys()):
|
||
my_retrun_dict['code_postal'] = local_Insc_retval['code_postal']
|
||
else:
|
||
my_retrun_dict['code_postal'] = ""
|
||
|
||
if ("ville" in local_Insc_retval.keys()):
|
||
my_retrun_dict['ville'] = local_Insc_retval['ville']
|
||
else:
|
||
my_retrun_dict['ville'] = ""
|
||
|
||
if ("pays" in local_Insc_retval.keys()):
|
||
my_retrun_dict['pays'] = local_Insc_retval['pays']
|
||
else:
|
||
my_retrun_dict['pays'] = ""
|
||
|
||
|
||
local_employeur = ""
|
||
if ("employeur" in local_Insc_retval.keys()):
|
||
local_employeur = local_Insc_retval['employeur']
|
||
my_retrun_dict['employeur'] = local_employeur
|
||
|
||
local_telephone = ""
|
||
if ("telephone" in local_Insc_retval.keys()):
|
||
local_telephone = local_Insc_retval['telephone']
|
||
my_retrun_dict['telephone'] = local_telephone
|
||
|
||
local_email = ""
|
||
if ("email" in local_Insc_retval.keys()):
|
||
local_email = local_Insc_retval['email']
|
||
my_retrun_dict['email'] = local_email
|
||
|
||
local_modefinancement = ""
|
||
if ("modefinancement" in local_Insc_retval.keys()):
|
||
local_modefinancement = local_Insc_retval['modefinancement']
|
||
my_retrun_dict['modefinancement'] = local_modefinancement
|
||
|
||
local_opco = ""
|
||
if ("opco" in local_Insc_retval.keys()):
|
||
local_opco = local_Insc_retval['opco']
|
||
my_retrun_dict['opco'] = local_opco
|
||
|
||
# Recuperation des champs spécifiques se trouvant dans le dictionnaire. ils commencent tous par 'my_'
|
||
for val in local_Insc_retval.keys():
|
||
if (val.startswith('my_')):
|
||
my_retrun_dict[str(val)] = local_Insc_retval[str(val)]
|
||
|
||
local_class_internal_url = ""
|
||
if ("class_internal_url" in local_Insc_retval.keys()):
|
||
local_class_internal_url = local_Insc_retval['class_internal_url']
|
||
my_retrun_dict['class_internal_url'] = local_class_internal_url
|
||
|
||
local_status = ""
|
||
if ("status" in local_Insc_retval.keys()):
|
||
local_status = local_Insc_retval['status']
|
||
my_retrun_dict['status'] = local_status
|
||
|
||
local_price = ""
|
||
if ("price" in local_Insc_retval.keys()):
|
||
local_price = local_Insc_retval['price']
|
||
my_retrun_dict['price'] = local_price
|
||
|
||
local_inscription_validation_date = ""
|
||
if ("inscription_validation_date" in local_Insc_retval.keys()):
|
||
local_inscription_validation_date = local_Insc_retval['inscription_validation_date']
|
||
my_retrun_dict['inscription_validation_date'] = local_inscription_validation_date
|
||
|
||
if ("eval_eval" in local_Insc_retval.keys()):
|
||
if (str(local_Insc_retval['eval_eval'])):
|
||
my_retrun_dict['eval_eval'] = local_Insc_retval['eval_eval']
|
||
|
||
if ("eval_note" in local_Insc_retval.keys()):
|
||
if (str(local_Insc_retval['eval_note'])):
|
||
my_retrun_dict['eval_note'] = local_Insc_retval['eval_note']
|
||
|
||
if ("eval_pedagogie" in local_Insc_retval.keys()):
|
||
if (str(local_Insc_retval['eval_pedagogie'])):
|
||
my_retrun_dict['eval_pedagogie'] = local_Insc_retval['eval_pedagogie']
|
||
|
||
if ("eval_date" in local_Insc_retval.keys()):
|
||
if (str(local_Insc_retval['eval_date'])):
|
||
my_retrun_dict['eval_date'] = str(local_Insc_retval['eval_date'])[0:10]
|
||
|
||
client_rattachement_id = ""
|
||
client_rattachement_nom = ""
|
||
if ("client_rattachement_id" in local_Insc_retval.keys()):
|
||
if( local_Insc_retval['client_rattachement_id'] and str(local_Insc_retval['client_rattachement_id']) != 'undefined' ):
|
||
local_client_retval_data = MYSY_GV.dbname['partner_client'].find_one({'_id': ObjectId(str(local_Insc_retval['client_rattachement_id'])),
|
||
'valide': '1', 'locked': '0'}, {'_id':1, 'nom':1})
|
||
|
||
if( local_client_retval_data is not None):
|
||
client_rattachement_id = local_client_retval_data['_id']
|
||
client_rattachement_nom = local_client_retval_data['nom']
|
||
|
||
my_retrun_dict['client_rattachement_id'] = client_rattachement_id
|
||
my_retrun_dict['client_rattachement_nom'] = client_rattachement_nom
|
||
|
||
#----
|
||
financeur_rattachement_id = ""
|
||
financeur_rattachement_nom = ""
|
||
if ("financeur_rattachement_id" in local_Insc_retval.keys()):
|
||
if (local_Insc_retval['financeur_rattachement_id'] and str(
|
||
local_Insc_retval['financeur_rattachement_id']) != 'undefined'):
|
||
local_client_retval_data = MYSY_GV.dbname['partner_client'].find_one(
|
||
{'_id': ObjectId(str(local_Insc_retval['financeur_rattachement_id'])),
|
||
'valide': '1', 'locked': '0'}, {'_id': 1, 'nom': 1})
|
||
|
||
if (local_client_retval_data is not None):
|
||
financeur_rattachement_id = local_client_retval_data['_id']
|
||
financeur_rattachement_nom = local_client_retval_data['nom']
|
||
|
||
my_retrun_dict['financeur_rattachement_id'] = financeur_rattachement_id
|
||
my_retrun_dict['financeur_rattachement_nom'] = financeur_rattachement_nom
|
||
|
||
if ("facture_client_rattachement_id" in local_Insc_retval.keys()):
|
||
my_retrun_dict['facture_client_rattachement_id'] = local_Insc_retval['facture_client_rattachement_id']
|
||
else:
|
||
my_retrun_dict['facture_client_rattachement_id'] = ""
|
||
|
||
|
||
v = local_Insc_retval['_id'].generation_time
|
||
my_retrun_dict['created_date'] = str(v.strftime("%d/%m/%Y"))
|
||
|
||
# Recuperation des informations de la formation
|
||
local_formation = MYSY_GV.dbname['myclass'].find_one({'internal_url':str(local_Insc_retval['class_internal_url'])})
|
||
|
||
if local_formation is None or local_formation['_id'] is None:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer les informations de la formation ")
|
||
return False, " Impossible de récupérer les informations de la formation"
|
||
|
||
my_retrun_dict['class_id'] = local_formation['_id']
|
||
my_retrun_dict['class_title'] = local_formation['title']
|
||
|
||
|
||
|
||
"""
|
||
/!\ : 28/12/2023 - update
|
||
S'il y a de la data dans la collection 'apprenant_collection' cela veut dire qu'il y a un dossier apprenant,
|
||
alors on va plutot retourner les info qui sont dans cette collection
|
||
"""
|
||
"""
|
||
if ('apprenant_collection' in local_Insc_retval.keys() and len( local_Insc_retval['apprenant_collection']) > 0):
|
||
print(" ### apprenant_collection = ", str(local_Insc_retval['apprenant_collection'][0]) )
|
||
if ("civilite" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['civilite'] = str(local_Insc_retval['apprenant_collection'][0]['civilite']).lower()
|
||
else:
|
||
my_retrun_dict['civilite'] = ""
|
||
|
||
if ("date_naissance" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['date_naissance'] = str(local_Insc_retval['apprenant_collection'][0]['date_naissance'])
|
||
else:
|
||
my_retrun_dict['date_naissance'] = ""
|
||
|
||
local_nom = ""
|
||
if ("nom" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
local_nom = local_Insc_retval['apprenant_collection'][0]['nom']
|
||
my_retrun_dict['nom'] = local_nom
|
||
|
||
local_prenom = ""
|
||
if ("prenom" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
local_prenom = local_Insc_retval['apprenant_collection'][0]['prenom']
|
||
my_retrun_dict['prenom'] = local_prenom
|
||
|
||
if ("date_naissance" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['date_naissance'] = local_Insc_retval['apprenant_collection'][0]['date_naissance']
|
||
else:
|
||
my_retrun_dict['date_naissance'] = ""
|
||
|
||
if ("adresse" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['adresse'] = local_Insc_retval['apprenant_collection'][0]['adresse']
|
||
else:
|
||
my_retrun_dict['adresse'] = ""
|
||
|
||
if ("code_postal" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['code_postal'] = local_Insc_retval['apprenant_collection'][0]['code_postal']
|
||
else:
|
||
my_retrun_dict['code_postal'] = ""
|
||
|
||
if ("ville" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['ville'] = local_Insc_retval['apprenant_collection'][0]['ville']
|
||
else:
|
||
my_retrun_dict['ville'] = ""
|
||
|
||
if ("pays" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['pays'] = local_Insc_retval['apprenant_collection'][0]['pays']
|
||
else:
|
||
my_retrun_dict['pays'] = ""
|
||
|
||
|
||
local_employeur = ""
|
||
if ("employeur" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
local_employeur = local_Insc_retval['apprenant_collection'][0]['employeur']
|
||
my_retrun_dict['employeur'] = local_employeur
|
||
|
||
local_telephone = ""
|
||
if ("telephone" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
local_telephone = local_Insc_retval['apprenant_collection'][0]['telephone']
|
||
my_retrun_dict['telephone'] = local_telephone
|
||
|
||
local_email = ""
|
||
if ("email" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
local_email = local_Insc_retval['apprenant_collection'][0]['email']
|
||
my_retrun_dict['email'] = local_email
|
||
|
||
local_adresse = ""
|
||
if ("adresse" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
local_adresse = local_Insc_retval['apprenant_collection'][0]['adresse']
|
||
my_retrun_dict['adresse'] = local_adresse
|
||
|
||
if ("ville" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['ville'] = local_Insc_retval['apprenant_collection'][0]['ville']
|
||
else:
|
||
my_retrun_dict['ville'] = ""
|
||
|
||
if ("code_postal" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['code_postal'] = local_Insc_retval['apprenant_collection'][0]['code_postal']
|
||
else:
|
||
my_retrun_dict['code_postal'] = ""
|
||
|
||
if ("pays" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['pays'] = local_Insc_retval['apprenant_collection'][0]['pays']
|
||
else:
|
||
my_retrun_dict['pays'] = ""
|
||
|
||
|
||
|
||
# Tuteurs
|
||
if ("tuteur1_nom" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur1_nom'] = local_Insc_retval['apprenant_collection'][0]['tuteur1_nom']
|
||
else:
|
||
my_retrun_dict['tuteur1_nom'] = ""
|
||
|
||
if ("tuteur1_prenom" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur1_prenom'] = local_Insc_retval['apprenant_collection'][0]['tuteur1_prenom']
|
||
else:
|
||
my_retrun_dict['tuteur1_prenom'] = ""
|
||
|
||
if ("tuteur1_email" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur1_email'] = local_Insc_retval['apprenant_collection'][0]['tuteur1_email']
|
||
else:
|
||
my_retrun_dict['tuteur1_email'] = ""
|
||
|
||
if ("tuteur1_telephone" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur1_telephone'] = local_Insc_retval['apprenant_collection'][0]['tuteur1_telephone']
|
||
else:
|
||
my_retrun_dict['tuteur1_telephone'] = ""
|
||
|
||
if ("tuteur1_adresse" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur1_adresse'] = local_Insc_retval['apprenant_collection'][0]['tuteur1_adresse']
|
||
else:
|
||
my_retrun_dict['tuteur1_adresse'] = ""
|
||
|
||
if ("tuteur1_cp" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur1_cp'] = local_Insc_retval['apprenant_collection'][0]['tuteur1_cp']
|
||
else:
|
||
my_retrun_dict['tuteur1_cp'] = ""
|
||
|
||
if ("tuteur1_ville" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur1_ville'] = local_Insc_retval['apprenant_collection'][0]['tuteur1_ville']
|
||
else:
|
||
my_retrun_dict['tuteur1_ville'] = ""
|
||
|
||
if ("tuteur1_pays" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur1_pays'] = local_Insc_retval['apprenant_collection'][0]['tuteur1_pays']
|
||
else:
|
||
my_retrun_dict['tuteur1_pays'] = ""
|
||
|
||
if ("tuteur1_include_com" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur1_include_com'] = local_Insc_retval['apprenant_collection'][0]['tuteur1_include_com']
|
||
else:
|
||
my_retrun_dict['tuteur1_include_com'] = ""
|
||
|
||
if ("tuteur2_nom" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur2_nom'] = local_Insc_retval['apprenant_collection'][0]['tuteur2_nom']
|
||
else:
|
||
my_retrun_dict['tuteur2_nom'] = ""
|
||
|
||
if ("tuteur2_prenom" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur2_prenom'] = local_Insc_retval['apprenant_collection'][0]['tuteur2_prenom']
|
||
else:
|
||
my_retrun_dict['tuteur2_prenom'] = ""
|
||
|
||
|
||
if ("tuteur2_email" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur2_email'] = local_Insc_retval['apprenant_collection'][0]['tuteur2_email']
|
||
else:
|
||
my_retrun_dict['tuteur2_email'] = ""
|
||
|
||
|
||
if ("tuteur2_telephone" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur2_telephone'] = local_Insc_retval['apprenant_collection'][0]['tuteur2_telephone']
|
||
else:
|
||
my_retrun_dict['tuteur2_telephone'] = ""
|
||
|
||
|
||
if ("tuteur2_adresse" in local_Insc_retval.keys()):
|
||
my_retrun_dict['tuteur2_adresse'] = local_Insc_retval['apprenant_collection'][0]['tuteur2_adresse']
|
||
else:
|
||
my_retrun_dict['tuteur2_adresse'] = ""
|
||
|
||
|
||
if ("tuteur2_cp" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur2_cp'] = local_Insc_retval['apprenant_collection'][0]['tuteur2_cp']
|
||
else:
|
||
my_retrun_dict['tuteur2_cp'] = ""
|
||
|
||
|
||
if ("tuteur2_ville" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur2_ville'] = local_Insc_retval['apprenant_collection'][0]['tuteur2_ville']
|
||
else:
|
||
my_retrun_dict['tuteur2_ville'] = ""
|
||
|
||
if ("tuteur2_pays" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur2_pays'] = local_Insc_retval['apprenant_collection'][0]['tuteur2_pays']
|
||
else:
|
||
my_retrun_dict['tuteur2_pays'] = ""
|
||
|
||
|
||
if ("tuteur2_include_com" in local_Insc_retval['apprenant_collection'][0].keys()):
|
||
my_retrun_dict['tuteur2_include_com'] = local_Insc_retval['apprenant_collection'][0]['tuteur2_include_com']
|
||
else:
|
||
my_retrun_dict['tuteur2_include_com'] = ""
|
||
|
||
"""
|
||
RetObject.append(JSONEncoder().encode(my_retrun_dict))
|
||
|
||
return True, RetObject
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer les informations detaillées"
|
||
|
||
|
||
"""
|
||
Cette fonction retourne la liste des domaines
|
||
de formation avec la version utilisant : "class_domaine"
|
||
"""
|
||
def get_List_domaine_formation_V2(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['token', ]
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list and val.startswith('my_') is False:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas autorisé")
|
||
return False, " Les informations fournies sont incorrectes"
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['token', ]
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes"
|
||
|
||
"""
|
||
Verification de l'identité et autorisation de l'entité qui
|
||
appelle cette API
|
||
"""
|
||
token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
token = diction['token']
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data(diction)
|
||
if (local_status is not True):
|
||
return local_status, my_partner
|
||
|
||
coll_liste_domaine_metier = MYSY_GV.dbname['class_domaine']
|
||
|
||
myquery = {}
|
||
myquery['valide'] = "1"
|
||
myquery['partner_owner_recid'] = str(my_partner['recid'])
|
||
|
||
RetObject = []
|
||
val_tmp = 0
|
||
|
||
for retval in coll_liste_domaine_metier.find(myquery):
|
||
user = retval
|
||
user['id'] = val_tmp
|
||
val_tmp = val_tmp + 1
|
||
|
||
RetObject.append(user)
|
||
|
||
return True, RetObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer la liste de domaines de formation"
|
||
|
||
|
||
|
||
|
||
|
||
""" Cette fonction retourne la liste des domaines
|
||
de formation
|
||
"""
|
||
def get_List_domaine_formation():
|
||
try:
|
||
coll_liste_domaine_metier = MYSY_GV.dbname['liste_domaine_metier']
|
||
myquery = {}
|
||
myquery['valide'] = "1"
|
||
|
||
RetObject = []
|
||
val_tmp = 1
|
||
|
||
for retval in coll_liste_domaine_metier.find(myquery).distinct('domaine'):
|
||
user = retval
|
||
RetObject.append(user)
|
||
|
||
return True, RetObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer la liste de domaines de formation"
|
||
|
||
|
||
"""
|
||
Cette fonction retourne la liste des domaines
|
||
de formation en mode JSON
|
||
"""
|
||
def get_List_domaine_formation_JSON():
|
||
try:
|
||
coll_liste_domaine_metier = MYSY_GV.dbname['liste_domaine_metier']
|
||
myquery = {}
|
||
myquery['valide'] = "1"
|
||
|
||
RetObject = []
|
||
val_tmp = 1
|
||
|
||
for retval in coll_liste_domaine_metier.find(myquery).distinct('domaine'):
|
||
user = {}
|
||
user['domaine'] = retval
|
||
user['id'] = str(val_tmp)
|
||
val_tmp = val_tmp + 1
|
||
RetObject.append(user)
|
||
|
||
return True, RetObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer la liste de domaines de formation"
|
||
|
||
|
||
"""
|
||
Cette fonction retour la liste distinct des metiers
|
||
"""
|
||
def get_List_metier_formation():
|
||
try:
|
||
coll_liste_domaine_metier = MYSY_GV.dbname['liste_domaine_metier']
|
||
myquery = {}
|
||
myquery['valide'] = "1"
|
||
|
||
RetObject = []
|
||
val_tmp = 1
|
||
|
||
for retval in coll_liste_domaine_metier.find(myquery).distinct('metier'):
|
||
user = retval
|
||
RetObject.append(user)
|
||
|
||
return True, RetObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer la liste des métiers de formation"
|
||
|
||
"""
|
||
Cette fonction retour la liste distinct des metiers en JSON
|
||
"""
|
||
def get_List_metier_formation_JSON():
|
||
try:
|
||
coll_liste_domaine_metier = MYSY_GV.dbname['liste_domaine_metier']
|
||
myquery = {}
|
||
myquery['valide'] = "1"
|
||
|
||
RetObject = []
|
||
val_tmp = 1
|
||
|
||
for retval in coll_liste_domaine_metier.find(myquery).distinct('metier'):
|
||
user = {}
|
||
user['metier'] = retval
|
||
user['id'] = str(val_tmp)
|
||
val_tmp = val_tmp + 1
|
||
RetObject.append(user)
|
||
|
||
return True, RetObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer la liste des métiers de formation"
|
||
|
||
|
||
"""
|
||
Cette fonction retourne le contenue de la collection :liste_domaine_metier
|
||
"""
|
||
def get_List_all_metier_formation():
|
||
try:
|
||
coll_liste_domaine_metier = MYSY_GV.dbname['liste_domaine_metier']
|
||
myquery = {}
|
||
myquery['valide'] = "1"
|
||
|
||
RetObject = []
|
||
val_tmp = 1
|
||
|
||
for retval in coll_liste_domaine_metier.find({'valide':'1', 'locked':'0'}):
|
||
user = retval
|
||
user['id'] = str(val_tmp)
|
||
val_tmp = val_tmp + 1
|
||
RetObject.append(JSONEncoder().encode(user))
|
||
|
||
return True, RetObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer la liste des métiers de formation"
|
||
|
||
|
||
"""
|
||
Cette fonction retourne un texte sans les tag html
|
||
"""
|
||
def cleanhtml(raw_html):
|
||
CLEANR = re.compile('<.*?>')
|
||
cleantext = re.sub(CLEANR, '', raw_html).replace(" ", "")
|
||
cleantext = html.unescape(str(cleantext))
|
||
cleantext = clean_emoji(str(cleantext))
|
||
|
||
return cleantext
|
||
|
||
"""
|
||
Cette fonction un texte et ajuste des tag, pour faciliter l'import et formatage des formations
|
||
<s> => souligner </s>
|
||
<g> => Gras <g>
|
||
<t> => tablulation
|
||
<l> => saut de ligne
|
||
<b> => bullet point
|
||
"""
|
||
def format_MySy_Text_Tag(sentence):
|
||
sentence = sentence.replace("<b>", "•")
|
||
sentence = sentence.replace("<l>", "<br/>")
|
||
sentence = sentence.replace("<t>", " ")
|
||
|
||
sentence = sentence.replace("<g>", "<b>")
|
||
sentence = sentence.replace("</g>", "</b>")
|
||
|
||
sentence = sentence.replace("<s>", "<u>")
|
||
sentence = sentence.replace("</s>", "</u>")
|
||
|
||
return sentence
|
||
|
||
|
||
"""
|
||
Cette fonction permet gerer le desabonnement d'une personne.
|
||
par defaut, l'adresse email recu est mis dans la collection 'mail_blacklist'
|
||
"""
|
||
def Sedesabonner(diction):
|
||
try:
|
||
|
||
field_list_obligatoire = ['email',]
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Impossible de finaliser le desabonnement."
|
||
|
||
|
||
mydata = {}
|
||
mydata['email'] = diction['email']
|
||
mytoday = datetime.today().strftime("%d/%m/%Y")[0:10]
|
||
mydata['date_blacklist'] = mytoday
|
||
|
||
coll_name = MYSY_GV.dbname['mail_blacklist']
|
||
|
||
ret_val = coll_name.find_one_and_update(
|
||
{'email': str(diction['email'])},
|
||
{"$set": mydata},
|
||
upsert=True,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
if (ret_val and ret_val['_id']):
|
||
nb_doc = str(ret_val['_id'])
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - l'adresse email =" + str(diction['email'])+" a été retirée" )
|
||
|
||
return True, " l'adresse email =" + str(diction['email'])+" a été retirée"
|
||
|
||
else:
|
||
myprint(str(inspect.stack()[0][3]) + " Impossible de desactivier l'adresse email : " +str(diction['email']))
|
||
return False, " Impossible de desactivier l'adresse email : " +str(diction['email'])
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de desactivier l'adresse email "
|
||
|
||
|
||
|
||
"""
|
||
Cette fonction verifie qu'une commande existe est qu'elle est bien valide
|
||
"""
|
||
def CheckSalesOrder(diction):
|
||
|
||
try:
|
||
field_list_obligatoire = ['token', 'orderid']
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Toutes les informations obligatoires n'ont pas été fornies"
|
||
|
||
my_orderid = ""
|
||
if ("orderid" in diction.keys()):
|
||
if diction['orderid']:
|
||
my_orderid = diction['orderid']
|
||
|
||
|
||
my_token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
my_token = diction['token']
|
||
|
||
# Recuperation du recid du partenaire
|
||
partner_recid = get_parnter_recid_from_token(str(my_token))
|
||
if partner_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du partenaire")
|
||
return False, " Les informations d'identification sont invalides"
|
||
|
||
insertObject = []
|
||
|
||
salesorder = MYSY_GV.dbname['sales_order'].find_one({'client_recid':partner_recid, 'order_id':my_orderid,
|
||
'valide':'1'}, {'order_id':1, 'order_date':1, 'stripe_pi':1,
|
||
})
|
||
|
||
if (salesorder is None or salesorder['order_id'] is None):
|
||
myprint(str(inspect.stack()[0][3]) + " La commande "+str(my_orderid)+" n'existe pas")
|
||
return False, " La commande "+str(my_orderid)+" n'existe pas"
|
||
|
||
insertObject.append(JSONEncoder().encode(salesorder))
|
||
|
||
return True, insertObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de verifier l'existence de la commande "
|
||
|
||
|
||
"""
|
||
Cette fonction verifie s'un utilisateur existe vraiment dans le LMS
|
||
"""
|
||
def is_LMS_user_exist(diction):
|
||
try:
|
||
"""
|
||
Verification de la liste des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = [ 'login', 'partner_recid']
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - : La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, "Impossible d'envoyer les information de connexion à la plateforme LMS"
|
||
|
||
conn = mariadb.connect(
|
||
user=MYSY_GV.MYSY_MARIADB_USER,
|
||
password=MYSY_GV.MYSY_MARIADB_USER_PASS,
|
||
host=MYSY_GV.MYSY_MARIADB_HOST,
|
||
port=MYSY_GV.MYSY_MARIADB_PORT,
|
||
database=MYSY_GV.MYSY_LMS_BDD
|
||
)
|
||
cur = conn.cursor()
|
||
|
||
qry = "SELECT count(*) FROM user WHERE username = '"+str(diction['login'])+"' "
|
||
print(" #### qry = ", qry)
|
||
query_compte_exist_exec = cur.execute("SELECT count(*) FROM user WHERE username = '"+str(diction['login'])+"' ")
|
||
|
||
is_exist = False
|
||
|
||
query_compte_exist_exec = ""
|
||
query_compte_exist_exec_val = "0"
|
||
for query_compte_exist_exec in cur:
|
||
query_compte_exist_exec_val = query_compte_exist_exec[0]
|
||
if (int(tryInt(query_compte_exist_exec_val)) > 0):
|
||
is_exist = True
|
||
conn.commit()
|
||
|
||
|
||
return True, is_exist
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de verifier l'existence du compte utilisateur dans le LMS "
|
||
|
||
"""
|
||
# Remove all non-alphanumeric characters from string
|
||
Cette fonction supprime tous les caractères non alphanumeric d'une chaine
|
||
"""
|
||
def RemoveAllNonAlphaNumeric(sentence):
|
||
try:
|
||
new_sentence = re.sub(r'[\W_]', '', sentence)
|
||
return True, new_sentence
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de RemoveAllNonAlphaNumeric "
|
||
|
||
|
||
"""
|
||
Cette fonction nettoie un dictionnaire python :
|
||
Pour chaque mot, il va supprimer les espaces en debut et en fin de mot.
|
||
supprime les mots "nan" et "undefined"
|
||
"""
|
||
def strip_dictionary(orig_diction):
|
||
try:
|
||
|
||
for key in orig_diction:
|
||
if type(orig_diction[key]) == str:
|
||
if( orig_diction[key].strip() == "nan" or orig_diction[key].strip() == "undefined"):
|
||
orig_diction[key] = ""
|
||
else :
|
||
orig_diction[key] = orig_diction[key].strip()
|
||
|
||
|
||
|
||
return orig_diction
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
"""
|
||
Cette fonction verifie si une chaine de caractère est un email valide
|
||
"""
|
||
def isEmailValide(email):
|
||
try:
|
||
pat = r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,7}\b'
|
||
if re.match(pat, email):
|
||
return True
|
||
|
||
return False
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
"""
|
||
Cette fonction verifie la validé d'un token et retour :
|
||
- Une erreur de connexion ou
|
||
- Si tout est ok, le partner associé à ce token
|
||
|
||
on retourne les données du user connecté au lieu du partenaire,
|
||
|
||
"""
|
||
def Check_Connexion_And_Return_Partner_Data(diction):
|
||
try:
|
||
|
||
token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
token = diction['token']
|
||
|
||
# Verifier la validité du token
|
||
retval = check_partner_token_validity("", token)
|
||
|
||
if retval is False:
|
||
return "Err_Connexion", " La session de connexion n'est pas valide"
|
||
|
||
"""
|
||
partner_recid = get_parnter_recid_from_token(token)
|
||
if (partner_recid is False):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - partner_recid est KO. Les données de connexion sont incorrectes ")
|
||
return False, " Vous n'etes pas autorisé à utiliser cette API "
|
||
|
||
print(" ### le partner_recid = ", partner_recid)
|
||
# Recuperation des données du partenaire
|
||
local_status, my_partner = get_partner_data_from_recid(partner_recid)
|
||
if (local_status is False):
|
||
myprint(str(inspect.stack()[0][3]) + " - impossible de récupérer les données du partenaire")
|
||
return False, str(inspect.stack()[0][3]) + " - impossible de récupérer les données du partenaire. "
|
||
"""
|
||
|
||
"""
|
||
/!\ Update du 26/11/2023 :
|
||
On a plutot besoin de recuperer les données du user connecté au lieu du partenaire,
|
||
|
||
"""
|
||
local_status, my_connected_user = get_connected_data_from_token(token)
|
||
if (local_status is False):
|
||
myprint(str(inspect.stack()[0][3]) + " - impossible de récupérer les données du partenaire")
|
||
return False, str(inspect.stack()[0][3]) + " - impossible de récupérer les données du partenaire. "
|
||
|
||
return True, my_connected_user
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de verifier la connexion"
|
||
|
||
"""
|
||
Cette fonction retourne les données du partenair rattaché au user connecté
|
||
"""
|
||
def Get_Connected_User_Partner_Data_From_RecID(user_recid):
|
||
try:
|
||
|
||
mydata = MYSY_GV.dbname['partnair_account'].find_one({'recid':str(user_recid), 'is_partner_admin_account':'1', 'active':'1', 'locked':'0'})
|
||
if( mydata is None):
|
||
return False, " Aucun compte partenaire actif pour cet utilisateur"
|
||
|
||
return True,mydata
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer les données du partenaire"
|
||
|
||
|
||
"""
|
||
Cette fonction fait un escape et clean les string avant injection SQL
|
||
"""
|
||
def Clean_For_SQL(sentence):
|
||
try:
|
||
h = html.parser
|
||
sentence = h.unescape(sentence)
|
||
sentence = sentence.replace("'", "\\'").replace('"', ' ').replace("’", "\’")
|
||
|
||
return sentence
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return ""
|
||
|
||
|
||
"""
|
||
Pour controler les acces utilisateur par module,
|
||
Cette fonction prends l'
|
||
- user_id (id de la collection ressource_humaine
|
||
- module_name
|
||
- action (read ou write)
|
||
|
||
et retourne True ou False selon que le user a droit de faire l'action.
|
||
|
||
La collection de travail est : user_access_right
|
||
|
||
"""
|
||
def Is_User_Has_Right_To_Action(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['token', 'module_name', 'action']
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas autorisé")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['token', 'module_name', 'action']
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification de l'identité et autorisation de l'entité qui
|
||
appelle cette API
|
||
"""
|
||
token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
token = diction['token']
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data(diction)
|
||
if (local_status is not True):
|
||
return local_status, my_partner
|
||
|
||
token = ""
|
||
if ("action" in diction.keys()):
|
||
if ( str(diction['action']).lower().strip() not in ['read', 'write'] ):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - L'action '" + str(diction['action']).lower().strip() + "' n'est pas valide ")
|
||
return False, " Droit d'accès incorrect",
|
||
|
||
qry_access_right = {}
|
||
|
||
if( str(diction['action']).lower().strip() == "read" ):
|
||
qry_access_right = {'partner_owner_recid':str(my_partner['recid']), 'module':str(diction['module_name']),
|
||
'user_id':str(my_partner['ressource_humaine_id']), 'read':True}
|
||
|
||
elif (str(diction['action']).lower().strip() == "write"):
|
||
qry_access_right = {'partner_owner_recid': str(my_partner['recid']), 'module': str(diction['module_name']),
|
||
'user_id': str(my_partner['ressource_humaine_id']), 'write': True}
|
||
|
||
print(" ##### qry_access_right = ", qry_access_right)
|
||
is_acces_right_ok = MYSY_GV.dbname['user_access_right'].count_documents(qry_access_right)
|
||
|
||
if( is_acces_right_ok != 1):
|
||
return False, " Droits d'acces insuffisants"
|
||
|
||
|
||
return True, " OK"
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de verifier les droits d'acces de l'utilisateur "
|
||
|
||
|
||
def daterange(start_date, end_date):
|
||
"""
|
||
07/04/2024 :
|
||
# Pour la fonction 'range' de base fait sur while (x < y).
|
||
# Ici j'ai besoin de faire du while(x <= y).
|
||
# alors on va faire end_date = end_date + 1 days
|
||
"""
|
||
new_end_date = end_date + timedelta(days=1)
|
||
for n in range(int((new_end_date - start_date).days)):
|
||
yield start_date + timedelta(n)
|
||
|
||
|
||
"""
|
||
Cette fonction retourne un document paramétré dans le collection 'courrier_template'
|
||
pour un partenaire donné.
|
||
Si aucun, alors le système renvoie le document par defaut de MySy
|
||
"""
|
||
def Get_Personnalized_Document_From_courrier_template(diction):
|
||
try:
|
||
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['document_ref_intere', 'document_type', 'partner_owner_recid']
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
document_ref_intere = diction["document_ref_intere"]
|
||
document_type = diction["document_type"]
|
||
partner_owner_recid = diction["partner_owner_recid"]
|
||
|
||
partner_document_qry = {'partner_owner_recid': str(partner_owner_recid),
|
||
'valide': '1', 'locked': '0', 'ref_interne': str(document_ref_intere),
|
||
'type_doc': str(document_type)}
|
||
|
||
partner_document_data = MYSY_GV.dbname['courrier_template'].find_one(partner_document_qry)
|
||
|
||
if (partner_document_data is None):
|
||
# Il n'existe pas de personnalisation document pour ce partenaire, on va aller récupérer un eventuel modele par default de MySy
|
||
partner_document_qry_mysy = {'partner_owner_recid': 'default',
|
||
'valide': '1', 'locked': '0', 'ref_interne': str(document_ref_intere),
|
||
'type_doc': str(document_type)}
|
||
|
||
partner_document_data = MYSY_GV.dbname['courrier_template'].find_one(partner_document_qry_mysy)
|
||
|
||
if (partner_document_data is None):
|
||
myprint(
|
||
str(inspect.stack()[0][
|
||
3]) + " Aucun document " + str(document_ref_intere) + " de Type " + str(
|
||
document_type) + " n'est paramétré dans le système ")
|
||
return False, " Aucun document " + str(document_ref_intere) + " de Type " + str(
|
||
document_type) + " n'est paramétré dans le système "
|
||
|
||
if ("contenu_doc" not in partner_document_data or len(
|
||
str(partner_document_data['contenu_doc'])) <= 0):
|
||
myprint(str(inspect.stack()[0][
|
||
3]) + " Le paramétrage du document " + str(
|
||
document_ref_intere) + " de Type " + str(document_type) + " est invalide ")
|
||
return False, " Le paramétrage du document " + str(document_ref_intere) + " de Type " + str(
|
||
document_type) + " est invalide "
|
||
|
||
|
||
return True, partner_document_data
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récuperer le document personnalisé "
|
||
|
||
|
||
"""
|
||
Get current month date : Cette fontion retoune
|
||
start_date = 01/m/y
|
||
end_date = date_jour (jj/mm/aaaa)
|
||
"""
|
||
def Get_Current_Month_Start_End_Date():
|
||
try:
|
||
|
||
todays_date = date.today()
|
||
start_date = "01/"+str(todays_date.month)+"/"+str(todays_date.year)
|
||
end_date = str(date.today().strftime("%d/%m/%Y"))
|
||
|
||
# Verifier que les date sont bien valide
|
||
local_status = CheckisDate(start_date)
|
||
if (local_status is False):
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " la date "+str(start_date)+" n'est pas au format jj/mm/aaaa.")
|
||
return False, " la date "+str(start_date)+" n'est pas au format jj/mm/aaaa.", False
|
||
|
||
local_status = CheckisDate(end_date)
|
||
if (local_status is False):
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " la date " + str(end_date) + " n'est pas au format jj/mm/aaaa.")
|
||
return False, " la date " + str(end_date) + " n'est pas au format jj/mm/aaaa.", False
|
||
|
||
|
||
return True, start_date, end_date
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récuperer les dates début et fin du mois en cours ", False
|
||
|
||
|
||
"""
|
||
Get previous month date : Cette fontion retourne le debut et la fin du mois precedent
|
||
|
||
"""
|
||
def Get_Previous_Month_Start_End_Date():
|
||
try:
|
||
|
||
|
||
this_first = date.today().replace(day=1)
|
||
prev_last = this_first - timedelta(days=1)
|
||
prev_first = prev_last.replace(day=1)
|
||
|
||
start_date = str(prev_first.strftime("%d/%m/%Y"))
|
||
end_date = str(prev_last.strftime("%d/%m/%Y"))
|
||
|
||
|
||
# Verifier que les date sont bien valide
|
||
local_status = CheckisDate(start_date)
|
||
if (local_status is False):
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " la date "+str(start_date)+" n'est pas au format jj/mm/aaaa.")
|
||
return False, " la date "+str(start_date)+" n'est pas au format jj/mm/aaaa.", False
|
||
|
||
local_status = CheckisDate(end_date)
|
||
if (local_status is False):
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " la date " + str(end_date) + " n'est pas au format jj/mm/aaaa.")
|
||
return False, " la date " + str(end_date) + " n'est pas au format jj/mm/aaaa.", False
|
||
|
||
|
||
return True, start_date, end_date
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récuperer les dates début et fin du mois en cours ", False
|
||
|
||
|
||
"""
|
||
Clean session date : cette fontion transforme les formats de date en jj/mm/aaaa, en supprimant les h:m:s
|
||
"""
|
||
def clean_session_dates():
|
||
try:
|
||
|
||
count = MYSY_GV.dbname['session_formation'].count_documents({})
|
||
print(" COUT = ", count)
|
||
|
||
for val in MYSY_GV.dbname['session_formation'].find({}):
|
||
print(" Traitement de : ", str(val))
|
||
my_data = {}
|
||
my_data['date_debut'] = str(val['date_debut'])[0:10]
|
||
my_data['date_fin'] = str(val['date_fin'])[0:10]
|
||
my_data['date_debut_inscription'] = str(val['date_debut_inscription'])[0:10]
|
||
my_data['date_fin_inscription'] = str(val['date_fin_inscription'])[0:10]
|
||
|
||
ret_val = MYSY_GV.dbname['session_formation'].find_one_and_update(
|
||
{'_id': ObjectId(str(val['_id'])) },
|
||
{"$set": my_data }, upsert=False,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
|
||
return True, " OKK "
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " impossible de faire clean_date"
|
||
|
||
|
||
"""
|
||
Cette fonction retourne les champs a utiliser pour les template de document/
|
||
|
||
/!\ : Les tables en paramettre contiennent des valeurs du type ObjectId("idddd")
|
||
Cette fonction retour un json de ce type :
|
||
|
||
convention_dictionnary_data =
|
||
{
|
||
{'connected_user_data':{'email':'toto@.fr', 'contact_nom':'balde'}}
|
||
{'company_data':{'num_nda':'1234', 'website':'www.ssds.fr', 'logo':'mylogo', 'cachet':'my_cache'}
|
||
{'class_data':{....}}
|
||
{'list_session_data':{....}}
|
||
{'list_stagiaire_data':[{...}]}
|
||
{'list_apprenant_data':[{...}]}
|
||
{'list_client_data':{'raison_sociale':'xxx', 'nom':'yyy', list_contact_communication[{'civilite':'aa', 'nom':'bbb', ....'telephone_mobile':'01222'}, {'civilite':'aa', 'nom':'bbb', ....'telephone_mobile':'01222'} .... ]}
|
||
{'systeme':{'date_jour':'11/02/2023'}}
|
||
|
||
}
|
||
Elle prend en argument : token, stagiaire_id, session_id, class_id
|
||
|
||
"""
|
||
def Get_Dictionnary_data_For_Template(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['token', 'list_stagiaire_id', 'list_session_id', 'list_class_id', 'list_client_id',
|
||
'list_apprenant_id', 'list_sequence_session_id', 'list_jury_soutenance_id']
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list and val.startswith('my_') is False:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas autorisé")
|
||
return False, " Les informations fournies sont incorrectes"
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['token', 'list_stagiaire_id', 'list_session_id', 'list_class_id', 'list_client_id']
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes"
|
||
|
||
"""
|
||
Verification de l'identité et autorisation de l'entité qui
|
||
appelle cette API
|
||
"""
|
||
token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
token = diction['token']
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data(diction)
|
||
if (local_status is not True):
|
||
return local_status, my_partner
|
||
|
||
dictionnary_data = {}
|
||
connected_user_data = {}
|
||
|
||
"""
|
||
Traitement des données du : connected_user_data
|
||
"""
|
||
connected_user_liste_champ = ['email', 'contact_nom', 'contact_prenom', 'contact_tel', 'contact_mail']
|
||
for champ in connected_user_liste_champ:
|
||
if (champ in my_partner):
|
||
new_champ_name = "connected_user_" + str(champ)
|
||
new_field = {new_champ_name: str(my_partner[champ])}
|
||
connected_user_data.update(new_field)
|
||
else:
|
||
new_champ_name = "connected_user_" + str(champ)
|
||
new_field = {new_champ_name: ""}
|
||
connected_user_data.update(new_field)
|
||
|
||
dictionnary_data['connected_user_data'] = connected_user_data
|
||
|
||
"""
|
||
Recuperartion des champs de la société à laquelle est rattaché l'utilisateur
|
||
"""
|
||
# Recuperation des données du partenaire associé à l'utilisateur connecté
|
||
local_status, local_retval = Get_Connected_User_Partner_Data_From_RecID(my_partner['recid'])
|
||
if (local_status is False):
|
||
return local_status, local_retval
|
||
|
||
local_company_data = local_retval
|
||
company_data = {}
|
||
|
||
company_liste_champ = ['nom', 'email', 'telephone', 'num_nda', 'website', 'adr_street', 'adr_zip', 'adr_city',
|
||
'adr_country', 'siret', 'contact_nom', 'contact_prenom', 'contact_tel', 'contact_mail']
|
||
for champ in company_liste_champ:
|
||
if (champ in local_company_data):
|
||
new_champ_name = "societe_" + str(champ)
|
||
new_field = {new_champ_name: str(local_company_data[champ])}
|
||
company_data.update(new_field)
|
||
else:
|
||
new_champ_name = "societe_" + str(champ)
|
||
new_field = {new_champ_name: ""}
|
||
company_data.update(new_field)
|
||
|
||
# Recuperation du logo et du cachet de la société si il y en a
|
||
local_part_status, local_part_imgs = partners.getRecodedParnterImage_from_front({'token':str(diction['token'])})
|
||
if(local_part_status is False ):
|
||
myprint(str(inspect.stack()[0][3]) + " WARNING : Impossible de récuperer le logo et le cachet du partenaire ")
|
||
new_field_logo = {'societe_logo':''}
|
||
new_field_cachet = {'societe_cachet': ''}
|
||
company_data.update(new_field_logo)
|
||
company_data.update(new_field_cachet)
|
||
else:
|
||
|
||
local_JSON = ast.literal_eval(local_part_imgs[0])
|
||
|
||
new_field_logo = {'societe_logo': "data:image/png;base64,"+local_JSON['logo_img']}
|
||
new_field_cachet = {'societe_cachet': "data:image/png;base64,"+local_JSON['cachet_img']}
|
||
company_data.update(new_field_logo)
|
||
company_data.update(new_field_cachet)
|
||
|
||
|
||
dictionnary_data['company_data'] = company_data
|
||
|
||
"""
|
||
Recuperartion des champs des sessions
|
||
"""
|
||
session_liste_champ = ['code_session', 'class_internal_url', 'date_debut', 'session_etape', 'date_fin',
|
||
'distantiel', 'presentiel', 'session_ondemande', 'nb_participant',
|
||
'prix_session', 'titre', 'location_type', 'is_bpf', 'formateur_id', 'adresse',
|
||
'code_postal', 'ville', 'pays', 'date_debut_inscription', 'date_fin_inscription',
|
||
'formateur_nom_prenom', 'session_formation_titre', 'session_formation_objectif', 'session_formation_description']
|
||
list_session_data = []
|
||
tab_session_id = diction['list_session_id']
|
||
|
||
|
||
for session_data in MYSY_GV.dbname['session_formation'].find( {'_id': {'$in': tab_session_id, } ,
|
||
'valide': '1',
|
||
'partner_owner_recid': str(my_partner['recid'])
|
||
}):
|
||
|
||
# Recuperation des données de la formation associée
|
||
session_formation_titre = ""
|
||
session_formation_objectif = ""
|
||
session_formation_description = ""
|
||
if ("class_internal_url" in session_data.keys() and session_data['class_internal_url']):
|
||
class_data = MYSY_GV.dbname['myclass'].find_one(
|
||
{'internal_url': str(session_data['class_internal_url']),
|
||
'valide': '1',
|
||
'locked': '0',
|
||
'partner_owner_recid': str(my_partner['recid'])
|
||
}, {'objectif':1, 'title':1, 'description':1})
|
||
|
||
|
||
if (class_data and "title" in class_data.keys() ) :
|
||
session_formation_titre = str(class_data['title'])
|
||
|
||
if (class_data and "objectif" in class_data.keys() ) :
|
||
session_formation_objectif = str(class_data['objectif'])
|
||
|
||
if (class_data and "description" in class_data.keys() ) :
|
||
session_formation_description = str(class_data['description'])
|
||
|
||
session_data['session_formation_titre'] = session_formation_titre
|
||
session_data['session_formation_objectif'] = session_formation_objectif
|
||
session_data['session_formation_description'] = session_formation_description
|
||
|
||
|
||
# Recuperation des données du formation
|
||
formateur_nom_prenom = ""
|
||
# Si il y a un code formateur_id, alors on va recuperer les nom et prenom du formation
|
||
if ("formateur_id" in session_data.keys() and session_data['formateur_id']):
|
||
|
||
formateur_data = MYSY_GV.dbname['ressource_humaine'].find_one(
|
||
{'_id': ObjectId(str(session_data['formateur_id'])),
|
||
'valide': '1',
|
||
'locked': '0',
|
||
'partner_recid': str(my_partner['recid'])
|
||
})
|
||
|
||
if (formateur_data and "nom" in formateur_data.keys() and "prenom" in formateur_data.keys()):
|
||
formateur_nom_prenom = str(formateur_data['nom']) + " " + str(formateur_data['prenom'])
|
||
|
||
session_data['formateur_nom_prenom'] = formateur_nom_prenom
|
||
|
||
new_session_data = {}
|
||
for champ in session_liste_champ:
|
||
if (champ in session_data):
|
||
new_champ_name = "session_" + str(champ)
|
||
new_field = {new_champ_name: str(session_data[champ])}
|
||
new_session_data.update(new_field)
|
||
else:
|
||
new_champ_name = "session_" + str(champ)
|
||
new_field = {new_champ_name: ""}
|
||
new_session_data.update(new_field)
|
||
|
||
list_session_data.append(new_session_data)
|
||
|
||
dictionnary_data['list_session_data'] = list_session_data
|
||
|
||
"""
|
||
Recuperartion des champs des apprenants
|
||
"""
|
||
list_apprenant_data = []
|
||
|
||
if( "list_apprenant_id" in diction.keys()):
|
||
tab_apprenant_id = diction['list_apprenant_id']
|
||
apprenant_liste_champ = ['prenom', 'nom', 'email', 'civilite', 'telephone', 'employeur', 'adresse', 'code_postal', 'ville', 'pays', 'tuteur1_nom', 'tuteur1_prenom', 'tuteur1_email', 'tuteur1_telephone',
|
||
'tuteur2_nom', 'tuteur2_prenom', 'tuteur2_email', 'tuteur2_telephone', 'opco', 'tuteur1_adresse', 'tuteur1_cp', 'tuteur1_ville', 'tuteur1_pays', 'tuteur1_include_com', 'tuteur2_adresse',
|
||
'tuteur2_cp', 'tuteur2_ville', 'tuteur2_pays', 'tuteur2_include_com', 'client_rattachement_id', 'photo_profil']
|
||
|
||
qry_apprenant = {'_id': {'$in': tab_apprenant_id, },
|
||
'valide': '1',
|
||
'locked':'0',
|
||
'partner_owner_recid': str(my_partner['recid'])
|
||
}
|
||
|
||
#print(" #### qry_apprenant = ", qry_apprenant)
|
||
|
||
for apprenant_data in MYSY_GV.dbname['apprenant'].find({'_id': {'$in': tab_apprenant_id, },
|
||
'valide': '1',
|
||
'locked':'0',
|
||
'partner_owner_recid': str(my_partner['recid'])
|
||
}):
|
||
# Récuperation de l'eventuelle photo de l'apprenant
|
||
apprenant_img = ""
|
||
qry_img = {'related_collection':'apprenant',
|
||
'related_collection_recid':str(apprenant_data['_id']),
|
||
'valide':'1',
|
||
'locked':'0'}
|
||
|
||
#print(" ### qry_img = ", qry_img)
|
||
apprenant_photo_data = MYSY_GV.dbname['mysy_images'].find_one({'related_collection':'apprenant',
|
||
'related_collection_recid':str(apprenant_data['_id']),
|
||
'valide':'1',
|
||
'locked':'0'}, {'img':1})
|
||
|
||
if( apprenant_photo_data and "img" in apprenant_photo_data.keys() and apprenant_photo_data['img']):
|
||
apprenant_img = "data:image/png;base64,"+apprenant_photo_data['img'].decode()
|
||
|
||
apprenant_data['photo_profil'] = apprenant_img
|
||
|
||
new_apprenant_data = {}
|
||
for champ in apprenant_liste_champ:
|
||
#print(" ## traitement du champ : ", champ)
|
||
if (champ in apprenant_data):
|
||
# Si on a faire au champ 'client_rattachement_id' avec une valeur, on va aller chercher le nom du clien
|
||
if( champ == "client_rattachement_id"):
|
||
client_rattachement_nom = "";
|
||
if( "client_rattachement_id" in apprenant_data.keys() and apprenant_data['client_rattachement_id'] ) :
|
||
client_rattachement_id_data = MYSY_GV.dbname['partner_client'].find_one({'_id':ObjectId(str(apprenant_data['client_rattachement_id'])),
|
||
'valide':'1',
|
||
'locked':'0',
|
||
'partner_recid':str(my_partner['recid'])},
|
||
{'nom':1})
|
||
|
||
if(client_rattachement_id_data and "nom" in client_rattachement_id_data.keys() ):
|
||
client_rattachement_nom = str(client_rattachement_id_data["nom"])
|
||
|
||
new_champ_name = "apprenant_client_rattachement_nom"
|
||
new_field = {new_champ_name: str(client_rattachement_nom)}
|
||
new_apprenant_data.update(new_field)
|
||
|
||
else :
|
||
new_champ_name = "apprenant_" + str(champ)
|
||
new_field = {new_champ_name: str(apprenant_data[champ])}
|
||
new_apprenant_data.update(new_field)
|
||
|
||
# ----
|
||
if (champ == "financeur_rattachement_id"):
|
||
financeur_rattachement_nom = "";
|
||
if ("financeur_rattachement_id" in apprenant_data.keys() and apprenant_data[
|
||
'financeur_rattachement_id']):
|
||
client_rattachement_id_data = MYSY_GV.dbname['partner_client'].find_one(
|
||
{'_id': ObjectId(str(apprenant_data['financeur_rattachement_id'])),
|
||
'valide': '1',
|
||
'locked': '0',
|
||
'partner_recid': str(my_partner['recid'])},
|
||
{'nom': 1})
|
||
|
||
if (client_rattachement_id_data and "nom" in client_rattachement_id_data.keys()):
|
||
financeur_rattachement_nom = str(client_rattachement_id_data["nom"])
|
||
|
||
new_champ_name = "apprenant_financeur_rattachement_nom"
|
||
new_field = {new_champ_name: str(financeur_rattachement_nom)}
|
||
new_apprenant_data.update(new_field)
|
||
|
||
else:
|
||
new_champ_name = "apprenant_" + str(champ)
|
||
new_field = {new_champ_name: str(apprenant_data[champ])}
|
||
new_apprenant_data.update(new_field)
|
||
|
||
else:
|
||
new_champ_name = "apprenant_" + str(champ)
|
||
new_field = {new_champ_name: ""}
|
||
new_apprenant_data.update(new_field)
|
||
|
||
|
||
list_apprenant_data.append(new_apprenant_data)
|
||
#print(" ### APPEND new_apprenant_data = ", new_apprenant_data)
|
||
|
||
|
||
dictionnary_data['list_apprenant_data'] = list_apprenant_data
|
||
|
||
|
||
|
||
"""
|
||
Recuperartion des champs des stagiaires
|
||
"""
|
||
list_stagiaire_data = []
|
||
tab_stagiaire_id = diction['list_stagiaire_id']
|
||
|
||
|
||
stagiaire_liste_champ = ['email', 'nom', 'prenom', 'telephone', 'client_rattachement_id', 'employeur',
|
||
'modefinancement', 'tuteur1_adresse', 'tuteur1_cp', 'tuteur1_email', 'tuteur1_nom',
|
||
'tuteur1_pays', 'tuteur1_telephone', 'tuteur1_ville', 'tuteur1_prenom',
|
||
'tuteur2_adresse', 'tuteur2_cp', 'tuteur2_email', 'tuteur2_nom', 'tuteur2_pays',
|
||
'tuteur2_prenom','civilite',
|
||
'tuteur2_telephone', 'tuteur2_ville','adresse', 'code_postal', 'ville', 'pays']
|
||
|
||
|
||
for stagiaire_data in MYSY_GV.dbname['inscription'].find({'_id': {'$in': tab_stagiaire_id, },
|
||
'status': '1',
|
||
'partner_owner_recid': str(my_partner['recid'])
|
||
}):
|
||
|
||
new_stagiaire_data = {}
|
||
for champ in stagiaire_liste_champ:
|
||
if (champ in stagiaire_data):
|
||
new_champ_name = "stagiaire_" + str(champ)
|
||
new_field = {new_champ_name: str(stagiaire_data[champ])}
|
||
new_stagiaire_data.update(new_field)
|
||
else:
|
||
new_champ_name = "stagiaire_" + str(champ)
|
||
new_field = {new_champ_name: ""}
|
||
new_stagiaire_data.update(new_field)
|
||
|
||
list_stagiaire_data.append(new_stagiaire_data)
|
||
|
||
dictionnary_data['list_stagiaire_data'] = list_stagiaire_data
|
||
|
||
|
||
"""
|
||
Recuperartion des champs des formations
|
||
"""
|
||
list_myclass_data = []
|
||
tab_myclass_id = diction['list_class_id']
|
||
|
||
myclass_liste_champ = ['certif', 'cpf', 'metier', 'objectif', 'title', 'duration', 'duration_unit']
|
||
|
||
for myclass_data in MYSY_GV.dbname['myclass'].find({'_id': {'$in': tab_myclass_id, },
|
||
'valide': '1',
|
||
'partner_owner_recid': str(my_partner['recid'])
|
||
}):
|
||
|
||
new_myclass_data = {}
|
||
for champ in myclass_liste_champ:
|
||
if (champ in myclass_data):
|
||
new_champ_name = "formation_" + str(champ)
|
||
new_field = {new_champ_name: str(myclass_data[champ])}
|
||
new_myclass_data.update(new_field)
|
||
else:
|
||
new_champ_name = "formation_" + str(champ)
|
||
new_field = {new_champ_name: ""}
|
||
new_myclass_data.update(new_field)
|
||
|
||
list_myclass_data.append(new_myclass_data)
|
||
|
||
|
||
dictionnary_data['list_class_data'] = list_myclass_data
|
||
|
||
"""
|
||
Recuperartion des champs des clients
|
||
"""
|
||
list_client_data = []
|
||
tab_client_id = diction['list_client_id']
|
||
|
||
client_liste_champ = ['raison_sociale', 'nom', 'email', 'adr_adresse', 'adr_code_postal', 'adr_ville', 'adr_code_postal', 'adr_pays',
|
||
'invoice_email', 'invoice_nom', 'invoice_siret', 'invoice_adresse', 'invoice_ville', 'invoice_code_postal',
|
||
'invoice_pays']
|
||
|
||
|
||
|
||
for client_data in MYSY_GV.dbname['partner_client'].find({'_id': {'$in': tab_client_id, },
|
||
'valide': '1',
|
||
'partner_recid': str(my_partner['recid'])
|
||
}):
|
||
|
||
new_client_data = {}
|
||
for champ in client_liste_champ:
|
||
if (champ in client_data):
|
||
new_champ_name = "client_" + str(champ)
|
||
new_field = {new_champ_name: str(client_data[champ])}
|
||
new_client_data.update(new_field)
|
||
else:
|
||
new_champ_name = "client_" + str(champ)
|
||
new_field = {new_champ_name: ""}
|
||
new_client_data.update(new_field)
|
||
|
||
|
||
# Recuperation des contacts de communication du client
|
||
local_diction = {}
|
||
local_diction['token'] = diction['token']
|
||
local_diction['_id'] = str(client_data['_id'])
|
||
|
||
local_status, partner_client_contact_communication = partner_client.Get_Partner_Client_Communication_Contact(
|
||
local_diction)
|
||
|
||
list_contact_communication = []
|
||
if(local_status is True):
|
||
#print(" partner_client_contact_communication = ", partner_client_contact_communication)
|
||
for contact_communication_str in partner_client_contact_communication :
|
||
#print(" contact_communication_str = ", contact_communication_str)
|
||
|
||
contact_communication = ast.literal_eval(contact_communication_str)
|
||
#print(" my_json = ", contact_communication)
|
||
|
||
contact_communication_node = {}
|
||
contact_communication_node['civilite'] = str(contact_communication['civilite']).lower()
|
||
contact_communication_node['nom'] = contact_communication['nom']
|
||
contact_communication_node['prenom'] = contact_communication['prenom']
|
||
contact_communication_node['fonction'] = contact_communication['fonction']
|
||
contact_communication_node['adr_adresse'] = contact_communication['adr_adresse']
|
||
contact_communication_node['adr_code_postal'] = contact_communication['adr_code_postal']
|
||
contact_communication_node['adr_ville'] = contact_communication['adr_ville']
|
||
contact_communication_node['adr_pays'] = contact_communication['adr_pays']
|
||
contact_communication_node['email'] = contact_communication['email']
|
||
contact_communication_node['telephone'] = contact_communication['telephone']
|
||
contact_communication_node['telephone_mobile'] = contact_communication['telephone_mobile']
|
||
|
||
list_contact_communication.append(contact_communication_node)
|
||
|
||
new_client_data["list_contact_communication"] = list_contact_communication
|
||
|
||
|
||
|
||
list_client_data.append(new_client_data)
|
||
|
||
|
||
dictionnary_data['list_client_data'] = list_client_data
|
||
|
||
"""
|
||
Recuperartion des champs des sequences des sessions
|
||
"""
|
||
sequence_session_liste_champ = [ 'sequence_title', 'sequence_start', 'sequence_end', 'agenda', 'objectif', 'commentaire' ]
|
||
list_sequence_session_data = []
|
||
tab_sequence_session = []
|
||
if( "list_sequence_session_id" in diction.keys() ):
|
||
tab_sequence_session = diction['list_sequence_session_id']
|
||
|
||
qery_match = {'_id': {'$in': tab_sequence_session, },'valide': '1', 'partner_owner_recid': str(my_partner['recid']) }
|
||
pipe_qry = ([
|
||
{"$addFields": {
|
||
"mysy_sequence_start": {
|
||
'$dateFromString': {
|
||
'dateString': '$sequence_start',
|
||
'format': '%d/%m/%Y %H:%M'
|
||
}
|
||
}
|
||
}
|
||
},
|
||
|
||
{'$match': qery_match},
|
||
{
|
||
'$sort': {'mysy_sequence_start': 1}
|
||
},
|
||
|
||
])
|
||
|
||
#print( " ### pipe_qry select sequence = ", pipe_qry)
|
||
for sequence_session_data in MYSY_GV.dbname['session_formation_sequence'].aggregate(pipe_qry):
|
||
|
||
|
||
new_session_data = {}
|
||
for champ in sequence_session_liste_champ:
|
||
if (champ in sequence_session_data.keys() ):
|
||
new_champ_name = "sequence_session_" + str(champ)
|
||
new_field = {new_champ_name: str(sequence_session_data[champ])}
|
||
new_session_data.update(new_field)
|
||
else:
|
||
new_champ_name = "sequence_session_" + str(champ)
|
||
new_field = {new_champ_name: ""}
|
||
new_session_data.update(new_field)
|
||
|
||
list_sequence_session_data.append(new_session_data)
|
||
|
||
dictionnary_data['list_sequence_session_data'] = list_sequence_session_data
|
||
|
||
|
||
|
||
"""
|
||
Recuperartion des champs de la soutenance de jury
|
||
"""
|
||
list_jury_soutenance_data = []
|
||
if( "list_jury_soutenance_id" in diction.keys() and diction['list_jury_soutenance_id'] ):
|
||
tab_jury_soutenance_id = diction['list_jury_soutenance_id']
|
||
|
||
|
||
|
||
query_soutenance_match = {'_id': {'$in': tab_jury_soutenance_id, }, 'valide': '1', 'locked':'0',
|
||
'partner_owner_recid': str(my_partner['recid'])}
|
||
|
||
query_soutenance = [{'$match': query_soutenance_match},
|
||
{'$lookup': {
|
||
'from': 'jury',
|
||
"let": {'jury_id': "$jury_id", 'partner_owner_recid': '$partner_owner_recid'},
|
||
'pipeline': [
|
||
{'$match':
|
||
{'$expr':
|
||
{'$and':
|
||
[
|
||
|
||
{'$eq': ["$_id", {'$convert': {
|
||
'input': "$$jury_id",
|
||
'to': "objectId",
|
||
'onError': {'error': 'true'},
|
||
'onNull': {'isnull': 'true'}
|
||
}}]},
|
||
|
||
{'$eq': ["$valide", "1"]},
|
||
{'$eq': ["$partner_owner_recid", '$$partner_owner_recid']}
|
||
|
||
]
|
||
}
|
||
}
|
||
},
|
||
|
||
],
|
||
'as': 'jury_collection'
|
||
}
|
||
},
|
||
{'$lookup': {
|
||
'from': 'agenda',
|
||
"let": {'agenda_id': "$agenda_id", 'partner_owner_recid': '$partner_owner_recid'},
|
||
'pipeline': [
|
||
{'$match':
|
||
{'$expr':
|
||
{'$and':
|
||
[
|
||
|
||
{'$eq': ["$_id", {'$convert': {
|
||
'input': "$$agenda_id",
|
||
'to': "objectId",
|
||
'onError': {'error': 'true'},
|
||
'onNull': {'isnull': 'true'}
|
||
}}]},
|
||
|
||
{'$eq': ["$valide", "1"]},
|
||
{'$eq': ["$partner_owner_recid", '$$partner_owner_recid']}
|
||
|
||
]
|
||
}
|
||
}
|
||
},
|
||
|
||
],
|
||
'as': 'agenda_collection'
|
||
}
|
||
},
|
||
]
|
||
|
||
|
||
for jury_soutenance_data in MYSY_GV.dbname['jury_soutenance'].aggregate(query_soutenance):
|
||
|
||
|
||
new_jury_soutenance_data = {}
|
||
|
||
if( "_id" in jury_soutenance_data.keys() ):
|
||
new_jury_soutenance_data['jury_soutenance_id'] = str(jury_soutenance_data['_id'])
|
||
|
||
if ("sujet" in jury_soutenance_data.keys()):
|
||
new_jury_soutenance_data['jury_soutenance_sujet'] = jury_soutenance_data['sujet']
|
||
else:
|
||
new_jury_soutenance_data['jury_soutenance_sujet'] = ""
|
||
|
||
if ("observation" in jury_soutenance_data.keys()):
|
||
new_jury_soutenance_data['observation'] = jury_soutenance_data['observation']
|
||
else:
|
||
new_jury_soutenance_data['observation'] = ""
|
||
|
||
if ("note" in jury_soutenance_data.keys()):
|
||
new_jury_soutenance_data['jury_soutenance_note'] = jury_soutenance_data['note']
|
||
else:
|
||
new_jury_soutenance_data['jury_soutenance_note'] = ""
|
||
|
||
|
||
if ("jury_soutenance_adresse" in jury_soutenance_data.keys()):
|
||
new_jury_soutenance_data['jury_soutenance_adresse'] = jury_soutenance_data['jury_soutenance_adresse']
|
||
else:
|
||
new_jury_soutenance_data['jury_soutenance_adresse'] = ""
|
||
|
||
if ("jury_soutenance_code_postal" in jury_soutenance_data.keys()):
|
||
new_jury_soutenance_data['jury_soutenance_code_postal'] = jury_soutenance_data['jury_soutenance_code_postal']
|
||
else:
|
||
new_jury_soutenance_data['jury_soutenance_code_postal'] = ""
|
||
|
||
if ("jury_soutenance_pays" in jury_soutenance_data.keys()):
|
||
new_jury_soutenance_data['jury_soutenance_pays'] = jury_soutenance_data['jury_soutenance_pays']
|
||
else:
|
||
new_jury_soutenance_data['jury_soutenance_pays'] = ""
|
||
|
||
if ("jury_soutenance_salle" in jury_soutenance_data.keys()):
|
||
new_jury_soutenance_data['jury_soutenance_salle'] = jury_soutenance_data['jury_soutenance_salle']
|
||
else:
|
||
new_jury_soutenance_data['jury_soutenance_salle'] = ""
|
||
|
||
if ("jury_soutenance_ville" in jury_soutenance_data.keys()):
|
||
new_jury_soutenance_data['jury_soutenance_ville'] = jury_soutenance_data['jury_soutenance_ville']
|
||
else:
|
||
new_jury_soutenance_data['jury_soutenance_ville'] = ""
|
||
|
||
|
||
|
||
if( "jury_collection" in jury_soutenance_data.keys() ):
|
||
if( "code" in jury_soutenance_data["jury_collection"][0].keys() ):
|
||
new_jury_soutenance_data['jury_code'] = jury_soutenance_data["jury_collection"][0]['code']
|
||
else:
|
||
new_jury_soutenance_data['jury_code'] = ""
|
||
|
||
if ("description" in jury_soutenance_data["jury_collection"][0].keys()):
|
||
new_jury_soutenance_data['jury_description'] = jury_soutenance_data["jury_collection"][0]['description']
|
||
else:
|
||
new_jury_soutenance_data['jury_description'] = ""
|
||
|
||
if ("agenda_collection" in jury_soutenance_data.keys()):
|
||
if ("event_start" in jury_soutenance_data["agenda_collection"][0].keys()):
|
||
if( jury_soutenance_data["agenda_collection"][0]['event_start'] ):
|
||
local_date = str( jury_soutenance_data["agenda_collection"][0]['event_start'])[0:16]
|
||
new_date = datetime.strptime(str(local_date ), '%Y-%m-%dT%H:%M').strftime( "%d/%m/%Y %H:%M")
|
||
new_jury_soutenance_data['event_start'] = str(new_date)
|
||
else:
|
||
new_jury_soutenance_data['event_start'] = ""
|
||
|
||
else:
|
||
new_jury_soutenance_data['event_start'] = ""
|
||
|
||
if ("event_end" in jury_soutenance_data["agenda_collection"][0].keys()):
|
||
if (jury_soutenance_data["agenda_collection"][0]['event_end']):
|
||
local_date = str(jury_soutenance_data["agenda_collection"][0]['event_end'])[0:16]
|
||
new_date = datetime.strptime(str(local_date), '%Y-%m-%dT%H:%M').strftime("%d/%m/%Y %H:%M")
|
||
new_jury_soutenance_data['event_end'] = str(new_date)
|
||
else:
|
||
new_jury_soutenance_data['event_end'] = ""
|
||
else:
|
||
new_jury_soutenance_data['event_end'] = ""
|
||
|
||
|
||
|
||
list_jury_soutenance_data.append(new_jury_soutenance_data)
|
||
|
||
|
||
dictionnary_data['list_jury_soutenance_data'] = list_jury_soutenance_data
|
||
|
||
|
||
|
||
# Recuperation des données système
|
||
# Ajout des champs système comme la date du jour
|
||
systeme_data = {}
|
||
|
||
ct = datetime.now()
|
||
ts = ct.timestamp()
|
||
date_jour = ct.strftime("%d/%m/%Y")
|
||
new_field = {"date_jour": str(date_jour)}
|
||
systeme_data.update(new_field)
|
||
dictionnary_data['systeme_data'] = systeme_data
|
||
|
||
json_formatted_str = json.dumps(dictionnary_data, indent=2)
|
||
|
||
#print(" ### AFFICHAGE dU data dictionnary ")
|
||
#print(json_formatted_str)
|
||
|
||
|
||
return True, dictionnary_data
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer le dictionnaire pour les template"
|
||
|
||
|
||
"""
|
||
Recuperation du nombre d'heure par jour depuis la confif du partner
|
||
"""
|
||
def Get_Partner_Hour_Per_Day(partner_owner_recid):
|
||
try:
|
||
|
||
hour_per_day_value = "0"
|
||
|
||
hour_per_day = MYSY_GV.dbname['base_partner_setup'].find_one({'config_name':"partner_jour_heure",
|
||
'valide':'1',
|
||
'locked':'0',
|
||
'partner_owner_recid':str(partner_owner_recid)})
|
||
|
||
if(hour_per_day is None ):
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " WARNING : Le partner (partner_owner_recid = "+str(partner_owner_recid)+" ) n'a pas de configuration pour la convertion jour / heure."
|
||
" Utilisation de la convertion par defaut ")
|
||
|
||
|
||
hour_per_day = MYSY_GV.dbname['base_partner_setup'].find_one({'config_name': "partner_jour_heure",
|
||
'valide': '1',
|
||
'locked': '0',
|
||
'partner_owner_recid': "default"})
|
||
if (hour_per_day is None):
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " WARNING : Auncune configuration par defaut pour la convertion jour / heure."
|
||
" Fixation unilaterale à 1 jour = 7 h ")
|
||
|
||
hour_per_day_value = "7"
|
||
|
||
else:
|
||
hour_per_day_value = str(hour_per_day['config_value'])
|
||
else:
|
||
hour_per_day_value = str(hour_per_day['config_value'])
|
||
|
||
|
||
return hour_per_day_value
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
"""
|
||
Recuperation de la devise du partner dans jour depuis la confif du partner
|
||
"""
|
||
def Get_Partner_Currency(partner_owner_recid):
|
||
try:
|
||
|
||
currency_value = "euro"
|
||
|
||
currency = MYSY_GV.dbname['base_partner_setup'].find_one({'config_name':"partner_devise",
|
||
'valide':'1',
|
||
'locked':'0',
|
||
'partner_owner_recid':str(partner_owner_recid)})
|
||
|
||
if(currency is None ):
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " WARNING : Le partner (partner_owner_recid = "+str(partner_owner_recid)+" ) n'a pas de configuration de devise."
|
||
" Utilisation de la devise par defaut du systeme")
|
||
|
||
|
||
currency = MYSY_GV.dbname['base_partner_setup'].find_one({'config_name': "partner_devise",
|
||
'valide': '1',
|
||
'locked': '0',
|
||
'partner_owner_recid': "default"})
|
||
if (currency is None):
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " WARNING : Auncune configuration par defaut pour la devise du systeme."
|
||
" Fixation unilaterale à l'euro ")
|
||
|
||
currency_value = "euro"
|
||
|
||
else:
|
||
currency_value = str(currency['config_value'])
|
||
else:
|
||
currency_value = str(currency['config_value'])
|
||
|
||
|
||
return currency_value
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
"""
|
||
12/03/2024 :
|
||
** initialiser / initialisation / **
|
||
Cette fonction permette d'initialiser les configuration par defaut d'un nouveau partenaire de MySy.
|
||
|
||
C'est une fonction qui doit etre utilisé à l'activation du compte partenaire. Cela ne l'empechera pas de
|
||
ensuite de faire la mise à jour manuellement.
|
||
|
||
Mais ca permet d'envoir un systeme pleinement operationnel des l'activation du compte.
|
||
Liste des points de configuration à ajoouter
|
||
- Les documents : Il faut créer à minima les documents suivants :
|
||
-> Demande d'inscription
|
||
-> Confirmation Inscription
|
||
-> Convention individuelle
|
||
-> Convention d'entreprise
|
||
-> Convocation
|
||
-> Attestation
|
||
-> Devis / Commande
|
||
-> Factue
|
||
|
||
|
||
- Un modele de journée de travail
|
||
|
||
- Les étapes d'un session :
|
||
-> Plannification
|
||
-> En cours
|
||
-> Terminée
|
||
-> Archivée
|
||
|
||
- Un site par defaut :
|
||
-> Site Principal
|
||
|
||
- CRM Etap Opportunité:
|
||
-> Nouveau
|
||
-> Qualification
|
||
-> Proposition
|
||
-> Gagne
|
||
-> Perdu
|
||
-> Archivé
|
||
|
||
- Conditions de paiement
|
||
-> 0JF
|
||
-> 30JF
|
||
-> 15JFDM
|
||
|
||
- initialiser aussi les document envoyé automatiquement (en mode no automatic, pour que le client l'active lui meme)
|
||
- Configuration des types de client
|
||
- Configuration des domaines de formation
|
||
- Configuration des catégorie de formation
|
||
- Configuration des métiers de formation
|
||
- Configuration des formulaire (positionnement, à chaud, froid, enseignant, etc
|
||
|
||
"""
|
||
def Configure_Partner_Init_Setup_No_Token(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['partner_id']
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list and val.startswith('my_') is False:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas autorisé")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['partner_id']
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
|
||
"""
|
||
Recuperer les données du partenaire.
|
||
/!\ : Ceci ne s'applique qu'a un compte de type admin -- test ko à refaire
|
||
"""
|
||
my_partner_data = MYSY_GV.dbname['partnair_account'].find_one({'_id':ObjectId(str(diction['partner_id'])),
|
||
})
|
||
if (my_partner_data is None ):
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " L'identifiant '" + str(diction['partner_id']) + "' n'est pas valide ")
|
||
return False, " L'identifiant '" + str(diction['partner_id']) + "' n'est pas valide",
|
||
|
||
|
||
""""
|
||
1 - Configuration des documents
|
||
"""
|
||
liste_document_ref_interne = ['QUALIOPI_AUTOMATIC_DOCUMENT_SENDING', 'LMS_USER_ACCES_NOTIF', 'INTRANET_CLIENT_LOGIN_NOTIF',
|
||
'CRM_OPPORTUNITE_CHANGE_NOTIF', 'EVALUATION_CONVOCATION', 'BPF_MODEL',
|
||
'EVALUATION_MODEL_PDF', 'EVAL_RESSOURCE_HUMAINE', 'EVAL_AUTRE', 'PART_INVOICE', 'PART_ORDER',
|
||
'QUOTATION_REMINDER_LEVEL_1', 'CLASS_INFO_REQUEST', 'PARTICIPANT_LIST_UPDATE',
|
||
'QUESTION_POSITIONNEMENT', 'E_DOCUMENT_SIGNATURE_CODE', 'E_DOCUMENT_SIGNED',
|
||
'E_DOCUMENT_SIGNATURE_REQUEST', 'ATTESTATION_FORMATION', 'CONVENTION_STAGIAIRE',
|
||
'FACTURATION_SESSION', 'EMPLOYEE_CREDENTIALS_FOR_LMS', 'CONVOCATION_STAGIAIRE',
|
||
'EMARGEMENT', 'EMPLOYEE_CREDENTIALS', 'TABLEAU EMARGEMENT', 'EMARGEMENT_FORMATION',
|
||
'EVAL_FORMATION', 'ANNULE_INSCRIPTION', 'REFUS_INSCRIPTION', 'CONF_INSCRIPTION',
|
||
'CONF_PRE_INSCRIPTION', ]
|
||
|
||
"""
|
||
Supprimer les eventuel ancienne information de ce type pour ce partenaire
|
||
"""
|
||
MYSY_GV.dbname['courrier_template'].delete_many({'partner_owner_recid':str(my_partner_data['recid'])})
|
||
|
||
"""
|
||
Insertion des nouveaux courriers
|
||
"""
|
||
cpt = 0
|
||
for my_courrier_data in MYSY_GV.dbname['courrier_template'].find({"ref_interne": {"$in": liste_document_ref_interne},
|
||
'partner_owner_recid':'default', 'valide':'1',
|
||
'locked':'0'}, ):
|
||
new_courrier = my_courrier_data
|
||
new_courrier['date_update'] = str(datetime.now())
|
||
new_courrier['update_by'] = str(my_partner_data['_id'])
|
||
new_courrier['partner_owner_recid'] = str(my_partner_data['recid'])
|
||
new_courrier['original_courrier_template_id'] = str(my_courrier_data['_id'])
|
||
if( "_id" in new_courrier.keys() ):
|
||
del new_courrier['_id']
|
||
|
||
MYSY_GV.dbname['courrier_template'].insert_one(new_courrier)
|
||
cpt = cpt +1
|
||
|
||
myprint(" #### FIN CONFIGURATION DE "+str(cpt)+" DOCUMENT(S) ")
|
||
|
||
|
||
"""
|
||
2 - Configuration du modele de journée de travail
|
||
"""
|
||
"""
|
||
Supprimer les eventuel ancienne information de ce type pour ce partenaire
|
||
"""
|
||
MYSY_GV.dbname['base_config_modele_journee'].delete_many({'partner_owner_recid': str(my_partner_data['recid'])})
|
||
|
||
|
||
cpt = 0
|
||
my_modele_journee_data = MYSY_GV.dbname['base_config_modele_journee'].find_one({'partner_owner_recid':'default',
|
||
'valide':'1',
|
||
'locked':'0'}, {'_id': 0})
|
||
if( my_modele_journee_data ):
|
||
new_my_modele_journee_data = my_modele_journee_data
|
||
new_my_modele_journee_data['date_update'] = str(datetime.now())
|
||
new_my_modele_journee_data['update_by'] = str(my_partner_data['_id'])
|
||
new_my_modele_journee_data['partner_owner_recid'] = str(my_partner_data['recid'])
|
||
|
||
MYSY_GV.dbname['base_config_modele_journee'].insert_one(new_my_modele_journee_data)
|
||
|
||
|
||
cpt = cpt + 1
|
||
|
||
myprint(" #### FIN CONFIGURATION DE " + str(cpt) + " MODELE DE JOURNEE (S) ")
|
||
|
||
|
||
|
||
"""
|
||
3 - Configuration des étapes d'un session de formation
|
||
"""
|
||
|
||
# Suppression des étapes qui existe pour ce partenaire
|
||
MYSY_GV.dbname['base_partner_session_step'].delete_many(
|
||
{'partner_owner_recid': str(my_partner_data['recid'])})
|
||
|
||
"""
|
||
Insertion des nouvelles étapes par defaut
|
||
"""
|
||
cpt = 0
|
||
for my_step_session in MYSY_GV.dbname['base_partner_session_step'].find(
|
||
{'partner_owner_recid': 'default', 'valide': '1',
|
||
'locked': '0'}, {'_id': 0}):
|
||
new_step_session = my_step_session
|
||
new_step_session['date_update'] = str(datetime.now())
|
||
new_step_session['update_by'] = str(my_partner_data['_id'])
|
||
new_step_session['partner_owner_recid'] = str(my_partner_data['recid'])
|
||
|
||
|
||
|
||
MYSY_GV.dbname['base_partner_session_step'].insert_one(new_step_session)
|
||
cpt = cpt + 1
|
||
|
||
myprint(" #### FIN CONFIGURATION DE " + str(cpt) + " ETAPE(S) DE SESSION DE FORMATION ")
|
||
|
||
|
||
"""
|
||
4 - Configuration des CRM Etap Opportunité
|
||
"""
|
||
# Suppression des étapes qui existe pour ce partenaire
|
||
MYSY_GV.dbname['base_partner_opportunite_step'].delete_many(
|
||
{'partner_owner_recid': str(my_partner_data['recid'])})
|
||
|
||
|
||
"""
|
||
Insertion des nouvelles étapes des CRM opportunité par defaut
|
||
"""
|
||
cpt = 0
|
||
for my_step_CRM_Opp in MYSY_GV.dbname['base_partner_opportunite_step'].find(
|
||
{'partner_owner_recid': 'default', 'valide': '1',
|
||
'locked': '0'}, {'_id': 0}):
|
||
new_step_CRM_Opp = my_step_CRM_Opp
|
||
new_step_CRM_Opp['date_update'] = str(datetime.now())
|
||
new_step_CRM_Opp['update_by'] = str(my_partner_data['_id'])
|
||
new_step_CRM_Opp['partner_owner_recid'] = str(my_partner_data['recid'])
|
||
|
||
MYSY_GV.dbname['base_partner_opportunite_step'].insert_one(new_step_CRM_Opp)
|
||
cpt = cpt + 1
|
||
|
||
myprint(" #### FIN CONFIGURATION DE " + str(cpt) + " ETAPE(S) DE CRM OPPORTUNITE ")
|
||
|
||
|
||
|
||
"""
|
||
5 - Configuration des Conditions de paiement
|
||
"""
|
||
# Suppression des étapes qui existe pour ce partenaire
|
||
MYSY_GV.dbname['base_partner_paiement_condition'].delete_many(
|
||
{'partner_owner_recid': str(my_partner_data['recid'])})
|
||
|
||
cpt = 0
|
||
for my_paiement_cdtion in MYSY_GV.dbname['base_partner_paiement_condition'].find(
|
||
{'partner_owner_recid': 'default', 'valide': '1',
|
||
'locked': '0'}, {'_id': 0}):
|
||
new_paiement_cdtion = my_paiement_cdtion
|
||
new_paiement_cdtion['date_update'] = str(datetime.now())
|
||
new_paiement_cdtion['update_by'] = str(my_partner_data['_id'])
|
||
new_paiement_cdtion['partner_owner_recid'] = str(my_partner_data['recid'])
|
||
|
||
MYSY_GV.dbname['base_partner_paiement_condition'].insert_one(new_paiement_cdtion)
|
||
cpt = cpt + 1
|
||
|
||
myprint(" #### FIN CONFIGURATION DE " + str(cpt) + " CONDITION(S) PAIEMENT ")
|
||
|
||
|
||
"""
|
||
6 - Creation d'un site par default
|
||
"""
|
||
# Suppression des site qui existe pour ce partenaire
|
||
MYSY_GV.dbname['site_formation'].delete_many(
|
||
{'partner_owner_recid': str(my_partner_data['recid'])})
|
||
|
||
cpt = 0
|
||
new_site_formation_default = {}
|
||
new_site_formation_default['date_update'] = str(datetime.now())
|
||
new_site_formation_default['update_by'] = str(my_partner_data['_id'])
|
||
new_site_formation_default['partner_owner_recid'] = str(my_partner_data['recid'])
|
||
|
||
new_site_formation_default['code_site'] = "site_01"
|
||
new_site_formation_default['nom_site'] = "Site Principale de Formation"
|
||
new_site_formation_default['valide'] = "1"
|
||
new_site_formation_default['locked'] = "0"
|
||
|
||
site_ville = ""
|
||
if( "adr_city" in my_partner_data.keys() ):
|
||
site_ville = my_partner_data['adr_city']
|
||
new_site_formation_default['site_ville'] = site_ville
|
||
|
||
site_pays = ""
|
||
if ("adr_country" in my_partner_data.keys()):
|
||
site_pays = my_partner_data['adr_country']
|
||
new_site_formation_default['site_pays'] = site_pays
|
||
|
||
site_adr = ""
|
||
if ("adr_street" in my_partner_data.keys()):
|
||
site_adr = my_partner_data['adr_street']
|
||
new_site_formation_default['site_adr'] = site_adr
|
||
|
||
site_cp = ""
|
||
if ("adr_zip" in my_partner_data.keys()):
|
||
site_cp = my_partner_data['adr_zip']
|
||
new_site_formation_default['site_cp'] = site_cp
|
||
|
||
MYSY_GV.dbname['site_formation'].insert_one(new_site_formation_default)
|
||
cpt = cpt + 1
|
||
|
||
myprint(" #### FIN CONFIGURATION DE " + str(cpt) + " SITE DE FORMATION ")
|
||
|
||
"""
|
||
7 - Creation du paramettrage pour l'automatisation de l'envoi de certains courrier
|
||
(ex : activer et decider d'envoyer les convention X jour avant le début de la session
|
||
"""
|
||
cpt = 0
|
||
for my_courrier_data in MYSY_GV.dbname['base_document_automatic_setup'].find(
|
||
{'partner_owner_recid': 'default', 'valide': '1',
|
||
'locked': '0'}, ):
|
||
new_courrier_for_automation = my_courrier_data
|
||
new_courrier_for_automation['actif'] = "0"
|
||
new_courrier_for_automation['date_created'] = str(datetime.now())
|
||
new_courrier_for_automation['update_by'] = str(my_partner_data['_id'])
|
||
new_courrier_for_automation['partner_owner_recid'] = str(my_partner_data['recid'])
|
||
|
||
if ("_id" in new_courrier_for_automation.keys()):
|
||
del new_courrier_for_automation['_id']
|
||
|
||
key_data = {}
|
||
key_data['partner_owner_recid'] = str(my_courrier_data['partner_owner_recid'])
|
||
key_data['courrier_template_type_document_ref_interne'] = str( my_courrier_data['courrier_template_type_document_ref_interne'])
|
||
|
||
new_retval_data = MYSY_GV.dbname['base_document_automatic_setup'].find_one_and_update(key_data,
|
||
{"$set": new_courrier_for_automation},
|
||
upsert=True,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
if new_retval_data is None or "_id" not in new_retval_data.keys():
|
||
myprint(
|
||
str(inspect.stack()[0][
|
||
3]) + " Impossible d'initialiser le point de configuration technique de l'envoi automatique des documents" +
|
||
str(my_courrier_data['courrier_template_type_document_ref_interne']) + ". ")
|
||
|
||
|
||
cpt = cpt + 1
|
||
|
||
myprint(" #### FIN CONFIGURATION DE " + str(cpt) + " AUTOMATISATION DOCUMENT(S) ")
|
||
|
||
"""
|
||
8 - Configuration des types de client
|
||
"""
|
||
|
||
# Suppression des types de client qui existe pour ce partenaire
|
||
MYSY_GV.dbname['partner_client_type'].delete_many(
|
||
{'partner_owner_recid': str(my_partner_data['recid'])})
|
||
|
||
"""
|
||
Insertion des types de client par defaut
|
||
"""
|
||
cpt = 0
|
||
for my_step_session in MYSY_GV.dbname['partner_client_type'].find(
|
||
{'partner_owner_recid': 'default', 'valide': '1',
|
||
'locked': '0'}, {'_id': 0}):
|
||
new_step_session = my_step_session
|
||
new_step_session['date_update'] = str(datetime.now())
|
||
new_step_session['update_by'] = str(my_partner_data['_id'])
|
||
new_step_session['partner_owner_recid'] = str(my_partner_data['recid'])
|
||
|
||
MYSY_GV.dbname['partner_client_type'].insert_one(new_step_session)
|
||
cpt = cpt + 1
|
||
|
||
myprint(" #### FIN CONFIGURATION DE " + str(cpt) + " TYPE(S) DE CLIENT ")
|
||
|
||
"""
|
||
9 - Configuration des domaines de formation
|
||
"""
|
||
|
||
# Suppression des types de client qui existe pour ce partenaire
|
||
MYSY_GV.dbname['class_domaine'].delete_many(
|
||
{'partner_owner_recid': str(my_partner_data['recid'])})
|
||
|
||
"""
|
||
Insertion des domaines de formation
|
||
"""
|
||
cpt = 0
|
||
for my_step_session in MYSY_GV.dbname['class_domaine'].find(
|
||
{'partner_owner_recid': 'default', 'valide': '1',
|
||
'locked': '0'}, {'_id': 0}):
|
||
new_step_session = my_step_session
|
||
new_step_session['date_update'] = str(datetime.now())
|
||
new_step_session['update_by'] = str(my_partner_data['_id'])
|
||
new_step_session['partner_owner_recid'] = str(my_partner_data['recid'])
|
||
|
||
MYSY_GV.dbname['class_domaine'].insert_one(new_step_session)
|
||
cpt = cpt + 1
|
||
|
||
myprint(" #### FIN CONFIGURATION DE " + str(cpt) + " DOMAINE(S) DE FORMATION ")
|
||
|
||
"""
|
||
10 - Configuration des catégorie de formation
|
||
"""
|
||
|
||
# Suppression des types de client qui existe pour ce partenaire
|
||
MYSY_GV.dbname['class_categorie'].delete_many(
|
||
{'partner_owner_recid': str(my_partner_data['recid'])})
|
||
|
||
"""
|
||
Insertion des catégories de formation
|
||
"""
|
||
cpt = 0
|
||
for my_step_session in MYSY_GV.dbname['class_categorie'].find(
|
||
{'partner_owner_recid': 'default', 'valide': '1',
|
||
'locked': '0'}, {'_id': 0}):
|
||
new_step_session = my_step_session
|
||
new_step_session['date_update'] = str(datetime.now())
|
||
new_step_session['update_by'] = str(my_partner_data['_id'])
|
||
new_step_session['partner_owner_recid'] = str(my_partner_data['recid'])
|
||
|
||
MYSY_GV.dbname['class_categorie'].insert_one(new_step_session)
|
||
cpt = cpt + 1
|
||
|
||
myprint(" #### FIN CONFIGURATION DE " + str(cpt) + " CATEGORIE(S) DE FORMATION ")
|
||
|
||
"""
|
||
11 - Configuration des métiers de formation
|
||
"""
|
||
|
||
# Suppression des métiers de formation qui existe pour ce partenaire
|
||
MYSY_GV.dbname['class_metier'].delete_many(
|
||
{'partner_owner_recid': str(my_partner_data['recid'])})
|
||
|
||
"""
|
||
Insertion des métiers de formation
|
||
"""
|
||
cpt = 0
|
||
for my_step_session in MYSY_GV.dbname['class_metier'].find(
|
||
{'partner_owner_recid': 'default', 'valide': '1',
|
||
'locked': '0'}, {'_id': 0}):
|
||
new_step_session = my_step_session
|
||
new_step_session['date_update'] = str(datetime.now())
|
||
new_step_session['update_by'] = str(my_partner_data['_id'])
|
||
new_step_session['partner_owner_recid'] = str(my_partner_data['recid'])
|
||
|
||
MYSY_GV.dbname['class_metier'].insert_one(new_step_session)
|
||
cpt = cpt + 1
|
||
|
||
myprint(" #### FIN CONFIGURATION DE " + str(cpt) + " METIER(S) DE FORMATION ")
|
||
|
||
"""
|
||
12 - Configuration des formulaire
|
||
"""
|
||
|
||
# Suppression des formulaire qui existe pour ce partenaire
|
||
MYSY_GV.dbname['formulaire'].delete_many(
|
||
{'partner_owner_recid': str(my_partner_data['recid'])})
|
||
|
||
"""
|
||
Insertion des métiers de formation
|
||
"""
|
||
cpt = 0
|
||
for my_step_session in MYSY_GV.dbname['formulaire'].find(
|
||
{'partner_owner_recid': 'default', 'valide': '1',
|
||
'locked': '0'}, {'_id': 0}):
|
||
new_step_session = my_step_session
|
||
new_step_session['date_update'] = str(datetime.now())
|
||
new_step_session['update_by'] = str(my_partner_data['_id'])
|
||
new_step_session['partner_owner_recid'] = str(my_partner_data['recid'])
|
||
|
||
MYSY_GV.dbname['formulaire'].insert_one(new_step_session)
|
||
cpt = cpt + 1
|
||
|
||
myprint(" #### FIN CONFIGURATION DE " + str(cpt) + " FORMULAIRE(S) ")
|
||
|
||
|
||
return True, "La configuration fonctionnelle a été correctement faite"
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de Configurer le partenaire "
|
||
|
||
|
||
|
||
"""
|
||
Cette fonction permet de savoir si un partenaire dispose de la signature electronique
|
||
"""
|
||
def Is_Partnair_Has_Digital_Signature(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['token', ]
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list and val.startswith('my_') is False:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['token', ]
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification de l'identité et autorisation de l'entité qui
|
||
appelle cette API
|
||
"""
|
||
token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
token = diction['token']
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data(diction)
|
||
if (local_status is not True):
|
||
return local_status, my_partner
|
||
|
||
"""
|
||
Clés de mise à jour
|
||
"""
|
||
data_cle = {}
|
||
data_cle['partner_owner_recid'] = str(my_partner['recid'])
|
||
data_cle['config_name'] = "signature_digital"
|
||
data_cle['locked'] = "0"
|
||
data_cle['valide'] = "1"
|
||
|
||
RetObject = []
|
||
val_tmp = 1
|
||
|
||
print(" ### data_cle = ", data_cle)
|
||
|
||
for retval in MYSY_GV.dbname['base_partner_setup'].find(data_cle):
|
||
user = retval
|
||
user['id'] = str(val_tmp)
|
||
val_tmp = val_tmp + 1
|
||
|
||
RetObject.append(JSONEncoder().encode(user))
|
||
|
||
return True, RetObject
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de verifier si le partenaire dispose de la signature digitale "
|
||
|
||
|
||
"""
|
||
Cette fonction verifie si le paramettre de notification automatique des inscriptions
|
||
est actif pour ce partenaire
|
||
"""
|
||
def Is_Partnair_Inscription_Notification(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['token', ]
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list and val.startswith('my_') is False:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['token', ]
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification de l'identité et autorisation de l'entité qui
|
||
appelle cette API
|
||
"""
|
||
token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
token = diction['token']
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data(diction)
|
||
if (local_status is not True):
|
||
return local_status, my_partner
|
||
|
||
"""
|
||
Clés de mise à jour
|
||
"""
|
||
data_cle = {}
|
||
data_cle['partner_owner_recid'] = str(my_partner['recid'])
|
||
data_cle['config_name'] = "inscription_notification_email"
|
||
data_cle['locked'] = "0"
|
||
data_cle['valide'] = "1"
|
||
|
||
RetObject = []
|
||
val_tmp = 1
|
||
|
||
retval_value = ""
|
||
for retval in MYSY_GV.dbname['base_partner_setup'].find(data_cle):
|
||
user = retval
|
||
user['id'] = str(val_tmp)
|
||
val_tmp = val_tmp + 1
|
||
|
||
RetObject.append(JSONEncoder().encode(user))
|
||
|
||
if ("config_value" in retval.keys()):
|
||
retval_value = retval['config_value']
|
||
|
||
return True, str(retval_value)
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de verifier si le partenaire dispose de la signature digitale "
|
||
|
||
|
||
|
||
"""
|
||
Cette fonction verifie si le paramettre de notification automatique des inscriptions
|
||
est actif pour ce partenaire
|
||
"""
|
||
def Is_Partnair_Prenscription_Notification(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['token', ]
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list and val.startswith('my_') is False:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['token', ]
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification de l'identité et autorisation de l'entité qui
|
||
appelle cette API
|
||
"""
|
||
token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
token = diction['token']
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data(diction)
|
||
if (local_status is not True):
|
||
return local_status, my_partner
|
||
|
||
"""
|
||
Clés de mise à jour
|
||
"""
|
||
data_cle = {}
|
||
data_cle['partner_owner_recid'] = str(my_partner['recid'])
|
||
data_cle['config_name'] = "preinscription_notification_email"
|
||
data_cle['locked'] = "0"
|
||
data_cle['valide'] = "1"
|
||
|
||
RetObject = []
|
||
val_tmp = 1
|
||
|
||
retval_value = "0"
|
||
|
||
for retval in MYSY_GV.dbname['base_partner_setup'].find(data_cle):
|
||
user = retval
|
||
user['id'] = str(val_tmp)
|
||
val_tmp = val_tmp + 1
|
||
|
||
if( "config_value" in retval.keys()):
|
||
retval_value = retval['config_value']
|
||
|
||
RetObject.append(JSONEncoder().encode(user))
|
||
|
||
return True, str(retval_value)
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de verifier si le partenaire dispose de la signature digitale "
|
||
|
||
"""
|
||
Recuperation des differents niveaux de compétence
|
||
"""
|
||
def Get_Competence_Level():
|
||
try:
|
||
|
||
RetObject = []
|
||
val_tmp = 1
|
||
|
||
for val in MYSY_GV.dbname['base_competence_level'].find({'partner_owner_recid':"default", 'valide':'1', 'locked':'0'}):
|
||
user = val
|
||
user['id'] = str(val_tmp)
|
||
val_tmp = val_tmp + 1
|
||
|
||
RetObject.append(JSONEncoder().encode(user))
|
||
|
||
|
||
return True,RetObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer les niveaux de compétence"
|
||
|
||
"""
|
||
Recuperation des coordonnées SMTP
|
||
"""
|
||
def Get_Partner_SMTP_Param(partner_recid):
|
||
try:
|
||
"""
|
||
Recuperation des parametre SMTP du partner si le client a decidé d'utiliser son propre smpt
|
||
"""
|
||
partner_own_smtp_value = "0"
|
||
partner_SMTP_COUNT_smtpsrv = ""
|
||
partner_SMTP_COUNT_password = ""
|
||
partner_SMTP_COUNT_user = ""
|
||
partner_SMTP_COUNT_From_User = ""
|
||
partner_SMTP_COUNT_port = ""
|
||
|
||
|
||
|
||
|
||
partner_own_smtp = MYSY_GV.dbname['base_partner_setup'].find_one(
|
||
{'partner_owner_recid': str(partner_recid),
|
||
'config_name': 'partner_smtp',
|
||
'valide': '1',
|
||
'locked': '0'})
|
||
|
||
if (partner_own_smtp and "config_value" in partner_own_smtp.keys()):
|
||
partner_own_smtp_value = partner_own_smtp['config_value']
|
||
|
||
if (str(partner_own_smtp_value) == "1"):
|
||
partner_SMTP_COUNT_password = str(MYSY_GV.dbname['base_partner_setup'].find_one(
|
||
{'partner_owner_recid': str(partner_recid),
|
||
'config_name': 'smtp_user_pwd',
|
||
'valide': '1',
|
||
'locked': '0'}, {'config_value': 1})['config_value'])
|
||
|
||
partner_SMTP_COUNT_smtpsrv = str(MYSY_GV.dbname['base_partner_setup'].find_one(
|
||
{'partner_owner_recid': str(partner_recid),
|
||
'config_name': 'smtp_server',
|
||
'valide': '1',
|
||
'locked': '0'}, {'config_value': 1})['config_value'])
|
||
|
||
partner_SMTP_COUNT_user = str(MYSY_GV.dbname['base_partner_setup'].find_one(
|
||
{'partner_owner_recid': str(partner_recid),
|
||
'config_name': 'smtp_user',
|
||
'valide': '1',
|
||
'locked': '0'}, {'config_value': 1})['config_value'])
|
||
|
||
partner_SMTP_COUNT_From_User = str(MYSY_GV.dbname['base_partner_setup'].find_one(
|
||
{'partner_owner_recid': str(partner_recid),
|
||
'config_name': 'smtp_count_from_name',
|
||
'valide': '1',
|
||
'locked': '0'}, {'config_value': 1})['config_value'])
|
||
|
||
partner_SMTP_COUNT_port = str(MYSY_GV.dbname['base_partner_setup'].find_one(
|
||
{'partner_owner_recid': str(partner_recid),
|
||
'config_name': 'smtp_count_port',
|
||
'valide': '1',
|
||
'locked': '0'}, {'config_value': 1})['config_value'])
|
||
|
||
else:
|
||
# On recuperer les parametre de mysy qui sont stocké dans le fichier : MYSY_GV
|
||
partner_SMTP_COUNT_smtpsrv = MYSY_GV.O365_SMTP_COUNT_smtpsrv
|
||
partner_SMTP_COUNT_password = MYSY_GV.O365_SMTP_COUNT_password
|
||
partner_SMTP_COUNT_user = MYSY_GV.O365_SMTP_COUNT_user
|
||
partner_SMTP_COUNT_From_User = MYSY_GV.O365_SMTP_COUNT_From_User
|
||
partner_SMTP_COUNT_port = MYSY_GV.O365_SMTP_COUNT_port
|
||
|
||
|
||
return True, partner_SMTP_COUNT_smtpsrv, partner_own_smtp_value, partner_SMTP_COUNT_password, partner_SMTP_COUNT_user, partner_SMTP_COUNT_From_User, partner_SMTP_COUNT_port
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer les paramettres smtp du partenaire ", False, False, False, False
|
||
|
||
|
||
"""
|
||
Cette fonction permet de recuperer les données d'un modele de courrier
|
||
"""
|
||
def Get_Partner_Courrier_Model(diction):
|
||
try:
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list = [ "courrier_template_id", "courrier_template_ref_interne", "partner_recid"]
|
||
|
||
filt_courrier_template_id = {}
|
||
filt_courrier_template_ref_interne = {}
|
||
|
||
is_one_filter = 0
|
||
|
||
if("courrier_template_id" in diction.keys() and diction['courrier_template_id']):
|
||
filt_courrier_template_id = {'_id':ObjectId(str(diction['courrier_template_id']))}
|
||
is_one_filter = 1
|
||
|
||
if ("courrier_template_ref_interne" in diction.keys() and diction['courrier_template_ref_interne']):
|
||
filt_courrier_template_ref_interne = {'ref_interne': str(diction['courrier_template_ref_interne'])}
|
||
is_one_filter = 1
|
||
|
||
if( is_one_filter == 0 ):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Vous devez fournir au moins un filter pour identifier le modèle de courrier ")
|
||
return False, " Vous devez fournir au moins un filter pour identifier le modèle de courrier "
|
||
|
||
query_filter_partner_owner_recid = {'$and': [
|
||
{'valide': '1',
|
||
'locked': '0',
|
||
'partner_owner_recid': str(diction['partner_recid'])},
|
||
filt_courrier_template_id,
|
||
filt_courrier_template_ref_interne,]
|
||
}
|
||
|
||
query_filter_default = {'$and': [
|
||
{'valide': '1',
|
||
'locked': '0',
|
||
'partner_owner_recid': "default"},
|
||
filt_courrier_template_id,
|
||
filt_courrier_template_ref_interne, ]
|
||
}
|
||
|
||
is_courrier_found = 0
|
||
|
||
is_courrier_template_id_valide = MYSY_GV.dbname['courrier_template'].count_documents(query_filter_partner_owner_recid)
|
||
|
||
|
||
|
||
if (is_courrier_template_id_valide == 1):
|
||
courrier_template_id_data = MYSY_GV.dbname['courrier_template'].find_one(query_filter_partner_owner_recid)
|
||
is_courrier_found = 1
|
||
|
||
|
||
else:
|
||
is_courrier_template_id_valide = MYSY_GV.dbname['courrier_template'].count_documents(
|
||
query_filter_default)
|
||
|
||
if (is_courrier_template_id_valide == 1):
|
||
courrier_template_id_data = MYSY_GV.dbname['courrier_template'].find_one(
|
||
query_filter_default)
|
||
is_courrier_found = 1
|
||
|
||
|
||
|
||
if( is_courrier_found != 1 ):
|
||
myprint(
|
||
str(inspect.stack()[0][
|
||
3]) + " Aucun modèle de courrier n'a été trouvé ")
|
||
return False, " Aucun modèle de courrier n'a été trouvé "
|
||
|
||
|
||
|
||
return True, courrier_template_id_data
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer le modèle de courrier "
|
||
|
||
|
||
"""
|
||
Cette fonction verifie si une ressource (humaine ou materielle) est disponibe sur une creneau donnée
|
||
"""
|
||
def Is_Ressource_Available(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['token', 'related_collection', 'related_collection_recid', 'event_start', 'event_end' ]
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list and val.startswith('my_') is False:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
||
return False, " Les informations fournies sont incorrectes", "0"
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['token', 'related_collection', 'related_collection_recid', 'event_start', 'event_end' ]
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes", "0"
|
||
|
||
"""
|
||
Verification de l'identité et autorisation de l'entité qui
|
||
appelle cette API
|
||
"""
|
||
token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
token = diction['token']
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data(diction)
|
||
if (local_status is not True):
|
||
return local_status, my_partner, "0"
|
||
|
||
local_status = CheckisDate_Hours(diction['event_start'])
|
||
if (local_status is False):
|
||
myprint(
|
||
str(inspect.stack()[0][
|
||
3]) + " La date de debut n'est pas au format 'jj/mm/aaaa hh:mm' ")
|
||
return False, " La date de debut n'est pas au format 'jj/mm/aaaa hh:mm'", "0"
|
||
|
||
local_status = CheckisDate_Hours(diction['event_end'])
|
||
if (local_status is False):
|
||
myprint(
|
||
str(inspect.stack()[0][
|
||
3]) + " La date de fin n'est pas au format 'jj/mm/aaaa hh:mm' ")
|
||
return False, " La date de fin n'est pas au format 'jj/mm/aaaa hh:mm'", "0"
|
||
|
||
|
||
local_event_date_debut = datetime.strptime(str(diction['event_start']), '%d/%m/%Y %H:%M')
|
||
local_event_date_fin = datetime.strptime(str(diction['event_end']), '%d/%m/%Y %H:%M')
|
||
|
||
|
||
event_start_ISODATE = local_event_date_debut.strftime("%Y-%m-%dT%H:%M")
|
||
event_end_ISODATE = local_event_date_fin.strftime("%Y-%m-%dT%H:%M")
|
||
|
||
|
||
|
||
# Verification de l'existance et de l'acceptation d'un evement pour le 'related_collection'
|
||
if (diction['related_collection'] not in MYSY_GV.ALLOWED_AGENDA_RELATED_COLLECTION):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - L'objet metier " + str(
|
||
diction['related_collection']) + " n'accepte pas de planning")
|
||
return False, " L'objet metier " + str(diction['related_collection']) + " n'accepte pas de planning ", "0"
|
||
|
||
|
||
# Verifier que 'related_collection_recid' existe et est valide dans la collection 'related_collection'
|
||
is_existe_valide_related_collection_recid = MYSY_GV.dbname[
|
||
str(diction['related_collection'])].count_documents(
|
||
{'_id': ObjectId(str(diction['related_collection_recid'])),
|
||
'partner_recid': str(my_partner['recid']),
|
||
'valide': '1',
|
||
'locked': '0'})
|
||
|
||
if (is_existe_valide_related_collection_recid <= 0):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - L'identifiant du 'related_collection_recid' n'est pas valide ")
|
||
return False, " L'identifiant du 'related_collection_recid' n'est pas valide ", "0"
|
||
|
||
if (event_start_ISODATE >= event_end_ISODATE ):
|
||
myprint(
|
||
str(inspect.stack()[0][
|
||
3]) + " La date de fin " + str(
|
||
diction['event_end']) + " doit être postérieure à la date de début " + str(
|
||
diction['event_start']) + " ")
|
||
return False, " La date de fin " + str(
|
||
diction['event_end']) + " doit être postérieure à la date de début " + str(
|
||
diction['event_start']) + " ", "0"
|
||
|
||
filt_periode_start_date_ISODATE = datetime.strptime(str(diction['event_start']), '%d/%m/%Y %H:%M')
|
||
filt_periode_end_date_ISODATE = datetime.strptime(str(diction['event_end']), '%d/%m/%Y %H:%M')
|
||
|
||
|
||
qery_match = {
|
||
"$match":{
|
||
"$and":[
|
||
{
|
||
"$or":[
|
||
{
|
||
"mysy_session_end_date":{
|
||
"$lte":filt_periode_end_date_ISODATE,
|
||
"$gte":filt_periode_start_date_ISODATE
|
||
}
|
||
},
|
||
{
|
||
"mysy_session_start_date": {
|
||
"$lte": filt_periode_end_date_ISODATE,
|
||
"$gte": filt_periode_start_date_ISODATE
|
||
}
|
||
},
|
||
|
||
]
|
||
},
|
||
{
|
||
"valide":"1",
|
||
"partner_owner_recid":str(my_partner['recid']),
|
||
"related_collection_recid":str(diction['related_collection_recid'])
|
||
}
|
||
]
|
||
}
|
||
}
|
||
|
||
|
||
|
||
pipe_qry = ([
|
||
{"$addFields": {
|
||
"mysy_session_end_date": {
|
||
'$dateFromString': {
|
||
'dateString': { '$substr': [ "$event_end", 0, 16 ] },
|
||
'format': "%Y-%m-%dT%H:%M"
|
||
}
|
||
}
|
||
}
|
||
},
|
||
{"$addFields": {
|
||
"mysy_session_start_date": {
|
||
'$dateFromString': {
|
||
'dateString': {'$substr': ["$event_start", 0, 16]},
|
||
'format': "%Y-%m-%dT%H:%M"
|
||
}
|
||
}
|
||
}
|
||
},
|
||
qery_match,
|
||
{
|
||
"$count": "nb_existing_event"
|
||
}
|
||
|
||
])
|
||
|
||
#print(" ### Is_Ressource_Available ici pipe_qry = ", pipe_qry)
|
||
|
||
existing_event = 0
|
||
for retval in MYSY_GV.dbname['agenda'].aggregate(pipe_qry):
|
||
#print( " ### retval = ", retval)
|
||
existing_event = tryInt(retval['nb_existing_event'])
|
||
|
||
if( existing_event > 0 ):
|
||
return True, "Cette ressource n'est pas disponible entre le "+str(diction['event_start'])+" et le "+str(diction['event_end'])+" ", "0"
|
||
|
||
|
||
return True, "Ressource disponible ", "1"
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de verifier la disponibilité de la ressource ", "0"
|
||
|
||
|
||
"""
|
||
30/05/2024 - Important :
|
||
Une fois une inscription validée, certaines données comme (nom, prenom, email, etc (les données personnelles))
|
||
sont basculer vers la collection 'apprenant', et l'information 'apprenant_id' est ajouté à l'insciption.
|
||
|
||
L'objectif de cette fonction est de :
|
||
- prendre en entrée l'inscription_id et
|
||
- retourne un json de ce type :
|
||
return[incrit_data] = données de l'inscription
|
||
return[apprenant_data] = données de l'apprenant
|
||
|
||
"""
|
||
def Get_Inscrit_And_Apprenant_Data(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['token', 'inscrit_id']
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list and val.startswith('my_') is False:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['token', 'inscrit_id']
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification de l'identité et autorisation de l'entité qui
|
||
appelle cette API
|
||
"""
|
||
token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
token = diction['token']
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data(diction)
|
||
if (local_status is not True):
|
||
return local_status, my_partner
|
||
|
||
|
||
is_inscription_valide_count = MYSY_GV.dbname['inscription'].count_documents({'_id': ObjectId(str(diction['inscrit_id'])),
|
||
'partner_owner_recid': str(
|
||
my_partner['recid']),
|
||
'valide': '1',
|
||
'locked': '0'})
|
||
|
||
if( is_inscription_valide_count != 1):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " L'identifiant de l'inscrit "+str(diction['inscrit_id'])+ " n'est pas valide ")
|
||
return False, " L'identifiant de l'inscrit "+str(diction['inscrit_id'])+ " n'est pas valide ",
|
||
|
||
RetObject = {}
|
||
val_tmp = 1
|
||
for New_retVal in MYSY_GV.dbname['inscription'].find({'_id': ObjectId(str(diction['inscrit_id'])),
|
||
'partner_owner_recid': str(
|
||
my_partner['recid']),
|
||
'valide': '1',
|
||
'locked': '0'}):
|
||
|
||
RetObject['inscrit_data'] = New_retVal
|
||
|
||
|
||
if( "apprenant_id" in New_retVal.keys() and New_retVal['apprenant_id']):
|
||
|
||
|
||
|
||
|
||
apprenant_data = MYSY_GV.dbname['apprenant'].find_one({'_id': ObjectId(str(New_retVal['apprenant_id'])),
|
||
'partner_owner_recid': str(my_partner['recid']),
|
||
'valide': '1',
|
||
'locked': '0'})
|
||
|
||
if( apprenant_data and '_id' in apprenant_data.keys() ):
|
||
RetObject['apprenant_data'] = apprenant_data
|
||
else:
|
||
RetObject['apprenant_data'] = {}
|
||
|
||
|
||
|
||
|
||
return True, RetObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer les données de l'inscrit "
|
||
|
||
|
||
"""
|
||
Cette fonction prend 2 dates, un montant (prix total)
|
||
et retourne le prix mensuel
|
||
"""
|
||
def Compute_Monthly_Price_From_Dates(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['date_from', 'date_to', 'total_price']
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list and val.startswith('my_') is False:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['date_from', 'date_to', 'total_price']
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
local_status = CheckisDate(diction['date_from'])
|
||
if (local_status is False):
|
||
myprint(str(inspect.stack()[0][
|
||
3]) + " La date de début "+str(diction['date_from'])+" n'est pas au format jj/mm/aaaa.")
|
||
return False, " La date de début "+str(diction['date_from'])+" n'est pas au format jj/mm/aaaa."
|
||
|
||
|
||
local_status = CheckisDate(diction['date_to'])
|
||
if (local_status is False):
|
||
myprint(str(inspect.stack()[0][
|
||
3]) + " La date de fin " + str(diction['date_to']) + " n'est pas au format jj/mm/aaaa.")
|
||
return False, " La date de fin " + str(diction['date_from']) + " n'est pas au format jj/mm/aaaa."
|
||
|
||
local_isfloat_status, local_isfloat_retval = IsFloat(str(diction['total_price']))
|
||
if (local_isfloat_status is False):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + str(diction['total_price']) + "' n'est pas valide ")
|
||
return False, " La valeur '" + str(diction['total_price']) + "' n'est pas valide "
|
||
|
||
|
||
local_date_from = datetime.strptime(str(diction['date_from']), '%d/%m/%Y')
|
||
local_date_to = datetime.strptime(str(diction['date_to']), '%d/%m/%Y')
|
||
|
||
if( local_date_from >= local_date_to ):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La date de début '" + str(local_date_from) + "' est postérieure à la date de fin "+ str(diction['date_to'])+" " )
|
||
return False, " La date de début '" + str(local_date_from) + "' est postérieure à la date de fin "+ str(diction['date_to'])+" "
|
||
|
||
nb_months = math.ceil((local_date_to - local_date_from).days / 30)
|
||
|
||
monthly_price = tryFloat(diction['total_price']) / nb_months
|
||
|
||
|
||
|
||
RetObject = {}
|
||
RetObject['nb_month'] = str(nb_months)
|
||
RetObject['monthly_price'] = str(monthly_price)
|
||
|
||
|
||
return True, RetObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de calculer le prix mensuel "
|
||
|
||
|
||
|
||
"""
|
||
Cette fonction prend une session_id et verifie si les parametre
|
||
d'alerte de la session sont actifs pr le partenaire
|
||
|
||
et si c'est le cas, la fonction verifie si on a depassé le seuil d'alert
|
||
et qu'il reste des action à mener
|
||
|
||
use case :
|
||
si le partenaire souhaite etre alerté 5 jours avant le debut de chaque session, s'il manque des actions
|
||
(convocation, convention, etc)
|
||
|
||
"""
|
||
def Check_Partner_Session_Alert(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['token', 'session_id',]
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list and val.startswith('my_') is False:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas autorisé")
|
||
return False, " Les informations fournies sont incorrectes ", False
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['token', 'session_id',]
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes ", False
|
||
|
||
"""
|
||
Verification de l'identité et autorisation de l'entité qui
|
||
appelle cette API
|
||
"""
|
||
token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
token = diction['token']
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data(diction)
|
||
if (local_status is not True):
|
||
return local_status, my_partner, False
|
||
|
||
|
||
"""
|
||
Verifier la validité de la session
|
||
"""
|
||
is_session_valide_cout = MYSY_GV.dbname["session_formation"].count_documents({'_id':ObjectId(str(diction['session_id'])),
|
||
'valide':'1',
|
||
'partner_owner_recid':str(my_partner['recid'])})
|
||
|
||
if( is_session_valide_cout != 1):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " L'identifiant de session est invalide ")
|
||
return False, " L'identifiant de session est invalide ", False
|
||
|
||
is_session_valide_data = MYSY_GV.dbname["session_formation"].find_one(
|
||
{'_id': ObjectId(str(diction['session_id'])),
|
||
'valide': '1',
|
||
'partner_owner_recid': str(my_partner['recid'])})
|
||
|
||
mytoday = datetime.today().strftime("%d/%m/%Y")
|
||
|
||
delta_day = datetime.strptime(str(is_session_valide_data['date_debut'])[0:10], '%d/%m/%Y') - datetime.strptime(str(mytoday).strip(), '%d/%m/%Y')
|
||
|
||
delta_day_INT = delta_day.days
|
||
|
||
"""
|
||
Recuperer les paramettres de controle du partenaire
|
||
"""
|
||
session_warning = ""
|
||
tmp_val = MYSY_GV.dbname["base_partner_setup"].find_one({'partner_owner_recid': str(my_partner['recid']),
|
||
'valide': '1',
|
||
'locked': '0',
|
||
'config_name': 'session_warning'
|
||
})
|
||
if (tmp_val and "config_value" in tmp_val.keys()):
|
||
session_warning = str(tmp_val['config_value'])
|
||
|
||
|
||
session_warning_lead_time = ""
|
||
session_warning_lead_time_INT = 0
|
||
tmp_val = MYSY_GV.dbname["base_partner_setup"].find_one({'partner_owner_recid':str(my_partner['recid']),
|
||
'valide':'1',
|
||
'locked':'0',
|
||
'config_name':'session_warning_lead_time'
|
||
})
|
||
if( tmp_val and "config_value" in tmp_val.keys()):
|
||
session_warning_lead_time = str(tmp_val['config_value'])
|
||
session_warning_lead_time_INT = tryInt(session_warning_lead_time)
|
||
|
||
|
||
make_ctl = 0
|
||
if(session_warning == "1" and delta_day_INT > 0 and session_warning_lead_time_INT >= delta_day_INT ):
|
||
make_ctl = 1
|
||
|
||
if( make_ctl == 0 ):
|
||
return True, "OK", "0"
|
||
|
||
|
||
#print(" ## session_warning_lead_time_INT = ", session_warning_lead_time_INT)
|
||
|
||
"""
|
||
Pour la liste des étapes à controler, on prent : 'courrier_template_ref_interne'
|
||
|
||
s'il n'y pas d'inscription avec client, alors on ne gere pas les conventions d'entrerpise....
|
||
|
||
"""
|
||
list_etape_a_controler = ['CONF_INSCRIPTION', 'QUESTION_POSITIONNEMENT', 'CONVOCATION_STAGIAIRE']
|
||
|
||
"""
|
||
Regarder si cette inscriptiona des inscriptions (validées) d'entreprise
|
||
"""
|
||
nb_convention_entreprise = MYSY_GV.dbname['inscription'].count_documents({'partner_owner_recid': str(my_partner['recid']),
|
||
'valide': '1',
|
||
'status': '1',
|
||
'session_id': str(diction['session_id']),
|
||
'client_rattachement_id': {'$exists': True,
|
||
'$ne': ""}})
|
||
|
||
|
||
if( nb_convention_entreprise > 0 ):
|
||
list_etape_a_controler.append("CONVENTION_STAGIAIRE_ENTREPRISE")
|
||
|
||
"""
|
||
Regarder si cette inscriptiona des inscriptions (validées) d'individuelle
|
||
"""
|
||
nb_convention_individuelle = MYSY_GV.dbname['inscription'].count_documents({'$or': [
|
||
{'partner_owner_recid': str(my_partner['recid']),
|
||
'valide': '1',
|
||
'status': '1',
|
||
'session_id': str(diction['session_id']),
|
||
'client_rattachement_id': {'$exists': False}}
|
||
,
|
||
{'partner_owner_recid': str(my_partner['recid']),
|
||
'valide': '1',
|
||
'status': '1',
|
||
'session_id': str(diction['session_id']),
|
||
'client_rattachement_id': ""}
|
||
|
||
]}
|
||
)
|
||
|
||
if (nb_convention_individuelle > 0):
|
||
list_etape_a_controler.append("CONVENTION_STAGIAIRE_INDIVIDUELLE")
|
||
|
||
|
||
|
||
|
||
is_warning = 0
|
||
warning_message = ""
|
||
"""
|
||
La liste des action à controler :
|
||
- Au moins une inscription valide
|
||
- S'il a des preinscription, il faut qu'il les traite
|
||
- Si une conventon n'a pas été envoyée
|
||
- Si une convocation n'a pas été envoyée
|
||
- si les questionnaire de positionnement n'ont pas été envoyés
|
||
"""
|
||
|
||
for tmp_val in MYSY_GV.dbname['courrier_template_type_document'].find({'valide':'1', 'locked':'0',
|
||
'partner_owner_recid':'default'}):
|
||
diction_list_tracked_doc = {}
|
||
diction_list_tracked_doc['token'] = str(diction['token'])
|
||
diction_list_tracked_doc['courrier_template_type_document_ref_interne'] = str(tmp_val['ref_interne'])
|
||
diction_list_tracked_doc['session_id'] = str(diction['session_id'])
|
||
|
||
local_status, local_retval = module_editique.Get_Given_Session_Action_Status_By_Document(
|
||
diction_list_tracked_doc)
|
||
if (local_status is False):
|
||
return local_status, local_retval, False
|
||
|
||
local_data = local_retval[0]
|
||
|
||
local_courrier_template_ref_interne = local_data['courrier_template_type_document_ref_interne']
|
||
|
||
if (local_courrier_template_ref_interne in list_etape_a_controler):
|
||
if (str(local_data['global_status']) == "partiel"):
|
||
is_warning = 1
|
||
warning_message = warning_message + "<br/> " + str(local_data['courrier_template_type_document_ref_interne']) + " Partiel"
|
||
|
||
if (str(local_data['global_status']) == "aucun"):
|
||
is_warning = 1
|
||
warning_message = warning_message + "<br/> " + str(local_data['courrier_template_type_document_ref_interne']) + " Non Commencé"
|
||
|
||
# print(" is_warning = ", is_warning)
|
||
# print(" warning_message = ", warning_message)
|
||
|
||
|
||
#json_formatted_str = json.dumps(local_retval, indent=2)
|
||
|
||
#print(" ### AFFICHAGE dU data Get_Given_Session_Action_Status_By_Document ")
|
||
#print(json_formatted_str)
|
||
|
||
|
||
if( is_warning == 1 ):
|
||
return True, warning_message, str(is_warning)
|
||
|
||
return True, "OK", str(is_warning)
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de verifier si la session est ok ", False
|
||
|
||
|
||
"""
|
||
Cette fonction permet de recuper les données du modele de courrier en prenant
|
||
en compte le modèle standard
|
||
c'est a dire, si on ne trouve pas de courrier pour le client, on va cherche le courrier modele de base du système
|
||
"""
|
||
def Get_Courrier_Template_Include_Default_Data(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['partner_owner_recid', 'ref_interne', 'type_doc', 'default', 'default_version']
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list and val.startswith('my_') is False:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas autorisé")
|
||
return False, " Les informations fournies sont incorrectes"
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['partner_owner_recid', 'ref_interne', 'type_doc']
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes"
|
||
|
||
Qry_courrier_template = {'partner_owner_recid':diction['partner_owner_recid'],
|
||
'valide':'1',
|
||
'locked':'0',
|
||
'type_doc':str(diction['type_doc']),
|
||
'ref_interne':str(diction['ref_interne'])}
|
||
|
||
if( "default_version" in diction.keys() and diction['default_version'] == "1"):
|
||
Qry_courrier_template = {'partner_owner_recid': diction['partner_owner_recid'],
|
||
'valide': '1',
|
||
'locked': '0',
|
||
'type_doc': str(diction['type_doc']).lower(),
|
||
'ref_interne': str(diction['ref_interne']),
|
||
'default_version':'1'}
|
||
|
||
|
||
courrier_template_data_count = MYSY_GV.dbname['courrier_template'].count_documents(Qry_courrier_template)
|
||
|
||
if( courrier_template_data_count > 0 ):
|
||
courrier_template_data_data = MYSY_GV.dbname['courrier_template'].find_one(Qry_courrier_template)
|
||
|
||
return True, courrier_template_data_data
|
||
else:
|
||
|
||
Qry_courrier_template = {'partner_owner_recid': 'default',
|
||
'valide': '1',
|
||
'locked': '0',
|
||
'type_doc': str(diction['type_doc']),
|
||
'ref_interne': str(diction['ref_interne'])}
|
||
|
||
if ("default_version" in diction.keys() and diction['default_version'] == "1"):
|
||
Qry_courrier_template = {'partner_owner_recid': 'default',
|
||
'valide': '1',
|
||
'locked': '0',
|
||
'type_doc': str(diction['type_doc']),
|
||
'ref_interne': str(diction['ref_interne']),
|
||
'default_version': '1'}
|
||
|
||
courrier_template_data_count = MYSY_GV.dbname['courrier_template'].count_documents(Qry_courrier_template)
|
||
|
||
if (courrier_template_data_count > 0):
|
||
courrier_template_data_data = MYSY_GV.dbname['courrier_template'].find_one(Qry_courrier_template)
|
||
|
||
return True, courrier_template_data_data
|
||
else:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Il n'y a aucun modèle de document avec la reférence : "+str(diction['ref_interne'])+" de Type : "+str(diction['type_doc']))
|
||
return False, " Il n'y a aucun modèle de document avec la reférence : "+str(diction['ref_interne'])+" de Type : "+str(diction['type_doc'])
|
||
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récuperer les données du modèle de courrier "
|
||
|
||
|
||
"""
|
||
Cette fonction permet de recuper les données du modele de courrier SANS PRENDRE EN COMPTE le modèle standard (default)
|
||
c'est a dire, si on ne trouve pas de courrier pour le client, ON NE VA PAS chercher le courrier modele de base du système
|
||
"""
|
||
def Get_Courrier_Template_Exclude_Default_Data(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['partner_owner_recid', 'ref_interne', 'type_doc']
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list and val.startswith('my_') is False:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas autorisé")
|
||
return False, " Les informations fournies sont incorrectes"
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['partner_owner_recid', 'ref_interne', 'type_doc']
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes"
|
||
|
||
courrier_template_data_count = MYSY_GV.dbname['courrier_template'].count_documents({'partner_owner_recid':diction['partner_owner_recid'],
|
||
'valide':'1',
|
||
'locked':'0',
|
||
'type_doc':str(diction['type_doc']),
|
||
'ref_interne':str(diction['ref_interne'])})
|
||
if( courrier_template_data_count > 0 ):
|
||
courrier_template_data_data = MYSY_GV.dbname['courrier_template'].find_one({'partner_owner_recid':diction['partner_owner_recid'],
|
||
'valide':'1',
|
||
'locked':'0',
|
||
'type_doc':str(diction['type_doc']),
|
||
'ref_interne':str(diction['ref_interne'])})
|
||
|
||
return True, courrier_template_data_data
|
||
else:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Il n'y a aucun modèle de document avec la reférence : " + str(
|
||
diction['ref_interne']) + " de Type : " + str(diction['type_doc']))
|
||
return False, " Il n'y a aucun modèle de document avec la reférence : " + str(
|
||
diction['ref_interne']) + " de Type : " + str(diction['type_doc'])
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récuperer les données du modèle de courrier "
|
||
|
||
|
||
|
||
"""
|
||
Cette fonction prend en subdomain et retourne les
|
||
données du partenaire concernée.
|
||
|
||
pour info, le subdomain = partnair_account.nom
|
||
|
||
|
||
"""
|
||
def Get_Partner_Data_From_Subdomain(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['subdomain',]
|
||
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['subdomain', ]
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes"
|
||
|
||
|
||
|
||
|
||
partnair_data_count = MYSY_GV.dbname['partnair_account'].count_documents({'subdomaine_catalog_pub':diction['subdomain'],
|
||
'active':'1',
|
||
'locked':'0',
|
||
'firstconnexion':'0'
|
||
})
|
||
if( partnair_data_count != 1 ):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Sous Domaine invalide ")
|
||
return False, " Sous Domaine invalide"
|
||
|
||
RetObject = []
|
||
val_tmp = 0
|
||
|
||
# print(" ### data_cle = ", data_cle)
|
||
for retval in MYSY_GV.dbname['partnair_account'].find({'subdomaine_catalog_pub': diction['subdomain'],
|
||
'active': '1',
|
||
'locked': '0',
|
||
'firstconnexion': '0'
|
||
}, {'_id':1, 'recid':1, 'nom':1}):
|
||
user = retval
|
||
user['id'] = str(val_tmp)
|
||
val_tmp = val_tmp + 1
|
||
|
||
"""
|
||
Recuperer les elements graphique : logo, bannière, etc
|
||
"""
|
||
config_partner_catalog_data = MYSY_GV.dbname['config_partner_catalog'].find_one({'valide':'1', 'locked':'0',
|
||
'partner_owner_recid':str(retval['recid'])})
|
||
|
||
user['catalog_pub_cgv_pdf'] = ""
|
||
if (config_partner_catalog_data and "cgv_pdf" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_cgv_pdf'] = config_partner_catalog_data['cgv_pdf']
|
||
|
||
user['catalog_pub_cgu_pdf'] = ""
|
||
if (config_partner_catalog_data and "cgu_pdf" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_cgu_pdf'] = config_partner_catalog_data['cgu_pdf']
|
||
|
||
user['catalog_pub_mention_legale_pdf'] = ""
|
||
if (config_partner_catalog_data and "mention_legale_pdf" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_mention_legale_pdf'] = config_partner_catalog_data['mention_legale_pdf']
|
||
|
||
|
||
user['catalog_pub_logo'] = ""
|
||
if( config_partner_catalog_data and "logo" in config_partner_catalog_data.keys() ):
|
||
user['catalog_pub_logo'] = config_partner_catalog_data['logo']
|
||
|
||
user['catalog_pub_message_1_txt'] = ""
|
||
if (config_partner_catalog_data and "message_1_txt" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_message_1_txt'] = config_partner_catalog_data['message_1_txt']
|
||
|
||
user['catalog_pub_message_1_taille'] = ""
|
||
if (config_partner_catalog_data and "message_1_taille" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_message_1_taille'] = config_partner_catalog_data['message_1_taille']
|
||
|
||
user['catalog_pub_message_1_color'] = ""
|
||
if (config_partner_catalog_data and "message_1_color" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_message_1_color'] = config_partner_catalog_data['message_1_color']
|
||
|
||
user['catalog_pub_message_1_gras'] = ""
|
||
if (config_partner_catalog_data and "message_1_gras" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_message_1_gras'] = config_partner_catalog_data['message_1_gras']
|
||
|
||
|
||
|
||
|
||
user['catalog_pub_message_2_txt'] = ""
|
||
if (config_partner_catalog_data and "message_2_txt" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_message_2_txt'] = config_partner_catalog_data['message_2_txt']
|
||
|
||
user['catalog_pub_message_2_taille'] = ""
|
||
if (config_partner_catalog_data and "message_2_taille" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_message_2_taille'] = config_partner_catalog_data['message_2_taille']
|
||
|
||
user['catalog_pub_message_2_color'] = ""
|
||
if (config_partner_catalog_data and "message_2_color" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_message_2_color'] = config_partner_catalog_data['message_2_color']
|
||
|
||
user['catalog_pub_message_2_gras'] = ""
|
||
if (config_partner_catalog_data and "message_2_gras" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_message_2_gras'] = config_partner_catalog_data['message_2_gras']
|
||
|
||
|
||
user['catalog_pub_message_3_txt'] = ""
|
||
if (config_partner_catalog_data and "message_3_txt" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_message_3_txt'] = config_partner_catalog_data['message_3_txt']
|
||
|
||
user['catalog_pub_message_3_taille'] = ""
|
||
if (config_partner_catalog_data and "message_3_taille" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_message_3_taille'] = config_partner_catalog_data['message_3_taille']
|
||
|
||
user['catalog_pub_message_3_color'] = ""
|
||
if (config_partner_catalog_data and "message_3_color" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_message_3_color'] = config_partner_catalog_data['message_3_color']
|
||
|
||
user['catalog_pub_message_3_gras'] = ""
|
||
if (config_partner_catalog_data and "message_3_gras" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_message_3_gras'] = config_partner_catalog_data['message_3_gras']
|
||
|
||
|
||
#---------- les indicateurs
|
||
user['catalog_pub_indicateur_1_txt'] = ""
|
||
if (config_partner_catalog_data and "indicateur_1_txt" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_indicateur_1_txt'] = config_partner_catalog_data['indicateur_1_txt']
|
||
|
||
user['catalog_pub_indicateur_1_color'] = ""
|
||
if (config_partner_catalog_data and "indicateur_1_color" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_indicateur_1_color'] = config_partner_catalog_data['indicateur_1_color']
|
||
|
||
user['catalog_pub_indicateur_1_gras'] = ""
|
||
if (config_partner_catalog_data and "indicateur_1_gras" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_indicateur_1_gras'] = config_partner_catalog_data['indicateur_1_gras']
|
||
|
||
user['catalog_pub_indicateur_1_taille'] = ""
|
||
if (config_partner_catalog_data and "indicateur_1_taille" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_indicateur_1_taille'] = config_partner_catalog_data['indicateur_1_taille']
|
||
|
||
# indicateur 2
|
||
user['catalog_pub_indicateur_2_txt'] = ""
|
||
if (config_partner_catalog_data and "indicateur_2_txt" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_indicateur_2_txt'] = config_partner_catalog_data['indicateur_2_txt']
|
||
|
||
user['catalog_pub_indicateur_2_color'] = ""
|
||
if (config_partner_catalog_data and "indicateur_2_color" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_indicateur_2_color'] = config_partner_catalog_data['indicateur_2_color']
|
||
|
||
user['catalog_pub_indicateur_2_gras'] = ""
|
||
if (config_partner_catalog_data and "indicateur_2_gras" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_indicateur_2_gras'] = config_partner_catalog_data['indicateur_2_gras']
|
||
|
||
user['catalog_pub_indicateur_2_taille'] = ""
|
||
if (config_partner_catalog_data and "indicateur_2_taille" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_indicateur_2_taille'] = config_partner_catalog_data['indicateur_2_taille']
|
||
|
||
# indicateur 3
|
||
user['catalog_pub_indicateur_3_txt'] = ""
|
||
if (config_partner_catalog_data and "indicateur_3_txt" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_indicateur_3_txt'] = config_partner_catalog_data['indicateur_3_txt']
|
||
|
||
user['catalog_pub_indicateur_3_color'] = ""
|
||
if (config_partner_catalog_data and "indicateur_3_color" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_indicateur_3_color'] = config_partner_catalog_data['indicateur_3_color']
|
||
|
||
user['catalog_pub_indicateur_3_gras'] = ""
|
||
if (config_partner_catalog_data and "indicateur_3_gras" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_indicateur_3_gras'] = config_partner_catalog_data['indicateur_3_gras']
|
||
|
||
user['catalog_pub_indicateur_3_taille'] = ""
|
||
if (config_partner_catalog_data and "indicateur_3_taille" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_indicateur_3_taille'] = config_partner_catalog_data['indicateur_3_taille']
|
||
|
||
# indicateur 4
|
||
user['catalog_pub_indicateur_4_txt'] = ""
|
||
if (config_partner_catalog_data and "indicateur_4_txt" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_indicateur_4_txt'] = config_partner_catalog_data['indicateur_4_txt']
|
||
|
||
user['catalog_pub_indicateur_4_color'] = ""
|
||
if (config_partner_catalog_data and "indicateur_4_color" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_indicateur_4_color'] = config_partner_catalog_data['indicateur_4_color']
|
||
|
||
user['catalog_pub_indicateur_4_gras'] = ""
|
||
if (config_partner_catalog_data and "indicateur_4_gras" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_indicateur_4_gras'] = config_partner_catalog_data['indicateur_4_gras']
|
||
|
||
user['catalog_pub_indicateur_4_taille'] = ""
|
||
if (config_partner_catalog_data and "indicateur_4_taille" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_indicateur_4_taille'] = config_partner_catalog_data['indicateur_4_taille']
|
||
|
||
# les contacts publics
|
||
user['catalog_pub_contact_public_email'] = ""
|
||
if (config_partner_catalog_data and "contact_public_email" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_contact_public_email'] = config_partner_catalog_data['contact_public_email']
|
||
|
||
user['catalog_pub_contact_public_telephone'] = ""
|
||
if (config_partner_catalog_data and "contact_public_telephone" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_contact_public_telephone'] = config_partner_catalog_data['contact_public_telephone']
|
||
|
||
|
||
|
||
#-----------
|
||
|
||
user['catalog_pub_banniere_img'] = ""
|
||
if (config_partner_catalog_data and "banniere_img" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_banniere_img'] = config_partner_catalog_data['banniere_img']
|
||
|
||
user['catalog_pub_theme_id'] = ""
|
||
if (config_partner_catalog_data and "theme_id" in config_partner_catalog_data.keys()):
|
||
user['catalog_pub_banniere_theme_id'] = config_partner_catalog_data['theme_id']
|
||
|
||
"""
|
||
Recuperer les données (preview) du
|
||
config_partner_catalog_theme
|
||
"""
|
||
user['theme_code'] = ""
|
||
|
||
config_partner_catalog_theme_data = MYSY_GV.dbname['config_partner_catalog_theme'].find_one(
|
||
{'_id': ObjectId(str(config_partner_catalog_data['theme_id'])),
|
||
'valide': '1',
|
||
'locked': '0',
|
||
'partner_owner_recid': "default"})
|
||
|
||
if (config_partner_catalog_theme_data and "code" in config_partner_catalog_theme_data.keys()):
|
||
user['theme_code'] = config_partner_catalog_theme_data['code']
|
||
|
||
|
||
RetObject.append(JSONEncoder().encode(user))
|
||
|
||
#print(" ### Get_Partner_Data_From_Subdomain RetObject = ", RetObject)
|
||
return True, RetObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récuperer les données liées au sous domaine "
|
||
|
||
|
||
"""
|
||
Recuperation de la liste de TYPE dispositifs de financement de la formation professionnelle
|
||
"""
|
||
def Get_List_Type_Organisme_Financement(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['token', ]
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list and val.startswith('my_') is False:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['token', ]
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification de l'identité et autorisation de l'entité qui
|
||
appelle cette API
|
||
"""
|
||
token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
token = diction['token']
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data(diction)
|
||
if (local_status is not True):
|
||
return local_status, my_partner
|
||
|
||
"""
|
||
Clés de mise à jour
|
||
"""
|
||
data_cle = {}
|
||
data_cle['partner_owner_recid'] = "default"
|
||
data_cle['valide'] = "1"
|
||
data_cle['locked'] = "0"
|
||
|
||
|
||
RetObject = []
|
||
val_tmp = 0
|
||
|
||
for retval in MYSY_GV.dbname['type_organisme_financement'].find(data_cle).sort([("_id", pymongo.DESCENDING), ]):
|
||
user = retval
|
||
user['id'] = str(val_tmp)
|
||
val_tmp = val_tmp + 1
|
||
|
||
RetObject.append(JSONEncoder().encode(user))
|
||
|
||
return True, RetObject
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer la liste des categories de formation "
|
||
|
||
|
||
|
||
"""
|
||
Recuperation de la liste des type de pouvoir public (des instances européennes,
|
||
de l’Etat,
|
||
des conseils régionaux,
|
||
du Pôle Emploi et
|
||
des collectivités territoriales,
|
||
etc)
|
||
"""
|
||
def Get_List_Type_Pouvoir_Public(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['token', ]
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list and val.startswith('my_') is False:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['token', ]
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification de l'identité et autorisation de l'entité qui
|
||
appelle cette API
|
||
"""
|
||
token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
token = diction['token']
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data(diction)
|
||
if (local_status is not True):
|
||
return local_status, my_partner
|
||
|
||
"""
|
||
Clés de mise à jour
|
||
"""
|
||
data_cle = {}
|
||
data_cle['partner_owner_recid'] = "default"
|
||
data_cle['valide'] = "1"
|
||
data_cle['locked'] = "0"
|
||
|
||
|
||
RetObject = []
|
||
val_tmp = 0
|
||
|
||
for retval in MYSY_GV.dbname['type_pouvoir_public'].find(data_cle).sort([("_id", pymongo.DESCENDING), ]):
|
||
user = retval
|
||
user['id'] = str(val_tmp)
|
||
val_tmp = val_tmp + 1
|
||
|
||
RetObject.append(JSONEncoder().encode(user))
|
||
|
||
return True, RetObject
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer la liste des categories de formation "
|
||
|
||
"""
|
||
Cette fonction retourne la conversion en heures de
|
||
- jour, semaine, mois, année
|
||
"""
|
||
def Get_Config_Conversion_Heures(locl_partner_owner_recid):
|
||
try:
|
||
local_nb_heure_par_jour = 0
|
||
tmp_nb_heure_par_jour = MYSY_GV.dbname['base_partner_setup'].find_one(
|
||
{'partner_owner_recid': str(locl_partner_owner_recid),
|
||
'valide': '1',
|
||
'locked': '0',
|
||
'config_name': 'nb_heure_par_jour'})
|
||
|
||
if( tmp_nb_heure_par_jour and "config_value" in tmp_nb_heure_par_jour.keys() and tmp_nb_heure_par_jour['config_value']):
|
||
local_nb_heure_par_jour = tryFloat(str(tmp_nb_heure_par_jour['config_value']))
|
||
|
||
local_nb_heure_par_semaine = 0
|
||
tmp_nb_heure_par_sem = MYSY_GV.dbname['base_partner_setup'].find_one(
|
||
{'partner_owner_recid': str(locl_partner_owner_recid),
|
||
'valide': '1',
|
||
'locked': '0',
|
||
'config_name': 'nb_heure_par_semaine'})
|
||
|
||
if (tmp_nb_heure_par_sem and "config_value" in tmp_nb_heure_par_sem.keys() and tmp_nb_heure_par_sem[
|
||
'config_value']):
|
||
local_nb_heure_par_semaine = tryFloat(str(tmp_nb_heure_par_sem['config_value']))
|
||
|
||
local_nb_heure_par_mois = 0
|
||
tmp_nb_heure_par_mois = MYSY_GV.dbname['base_partner_setup'].find_one(
|
||
{'partner_owner_recid': str(locl_partner_owner_recid),
|
||
'valide': '1',
|
||
'locked': '0',
|
||
'config_name': 'nb_heure_par_mois'})
|
||
|
||
if (tmp_nb_heure_par_mois and "config_value" in tmp_nb_heure_par_mois.keys() and tmp_nb_heure_par_mois[
|
||
'config_value']):
|
||
local_nb_heure_par_mois = tryFloat(str(tmp_nb_heure_par_mois['config_value']))
|
||
|
||
local_nb_heure_par_annee = 0
|
||
tmp_nb_heure_par_annee = MYSY_GV.dbname['base_partner_setup'].find_one(
|
||
{'partner_owner_recid': str(locl_partner_owner_recid),
|
||
'valide': '1',
|
||
'locked': '0',
|
||
'config_name': 'nb_heure_par_annee'})
|
||
|
||
if (tmp_nb_heure_par_annee and "config_value" in tmp_nb_heure_par_annee.keys() and tmp_nb_heure_par_annee[
|
||
'config_value']):
|
||
local_nb_heure_par_annee = tryFloat(str(tmp_nb_heure_par_mois['config_value']))
|
||
|
||
|
||
|
||
|
||
return True, local_nb_heure_par_jour, local_nb_heure_par_semaine, local_nb_heure_par_mois, local_nb_heure_par_annee
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer la conversion des heure "
|
||
|
||
|
||
"""
|
||
Newsletter ajout email
|
||
Cette fonction permet d'ajouter une adresse email dans liste des newsletter
|
||
"""
|
||
def Add_Update_Email_To_Newsletter(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['email', ]
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list and val.startswith('my_') is False:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas autorisé")
|
||
return False, " Les informations fournies sont incorrectes"
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['email', ]
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans la liste des arguments ")
|
||
return False, " Les informations fournies sont incorrectes"
|
||
|
||
|
||
if (isEmailValide(str(diction['email'])) is False):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " L'adresse email " + str(diction['email']) + " n'est pas valide")
|
||
return False, " L'adresse email " + str(diction['email']) + " n'est pas valide "
|
||
|
||
|
||
new_data = {}
|
||
new_data['email'] = str(diction['email']).strip()
|
||
new_data['date_update'] = str(datetime.now())
|
||
new_data['date_created'] = str(datetime.now())
|
||
new_data['valide'] = "1"
|
||
new_data['locked'] = "0"
|
||
|
||
new_data['update_by'] = ""
|
||
new_data['created_by'] = ""
|
||
|
||
ret_val = MYSY_GV.dbname['newsletter_email'].find_one_and_update(
|
||
{'email': str(diction['email']).strip(), },
|
||
{"$set": new_data},
|
||
upsert=True,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
if (ret_val is None or ret_val['_id'] is False):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " : Impossible de mettre à jour (ispending':'1') le compte partenaire")
|
||
return False, "Impossible d'ajouter le mode de payement"
|
||
|
||
|
||
return True, "Merci, votre adresse email a bien été ajoutée"
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible d'ajouter l'adresse email à la newsletter"
|
||
|