3553 lines
110 KiB
Python
3553 lines
110 KiB
Python
import hashlib
|
||
import _pickle as cPickle
|
||
import pickle
|
||
from PIL import Image
|
||
import requests
|
||
import bson
|
||
from pymongo import MongoClient
|
||
import pymongo
|
||
from difflib import SequenceMatcher
|
||
import textdistance
|
||
from datetime import datetime
|
||
import logging
|
||
import secrets
|
||
import base64
|
||
from bson import ObjectId
|
||
from pymongo import MongoClient
|
||
import inspect
|
||
from werkzeug.utils import secure_filename
|
||
import time
|
||
import os
|
||
import csv
|
||
import inspect
|
||
import sys
|
||
import pandas as pd
|
||
from datetime import datetime
|
||
from pymongo import ReturnDocument
|
||
from unidecode import unidecode
|
||
import GlobalVariable as MYSY_GV
|
||
from serpapi import GoogleSearch
|
||
import re
|
||
import email_mgt as email_mgt
|
||
import random
|
||
import json
|
||
import Ela_Spacy as ElaSpacy
|
||
from colorama import Fore
|
||
from colorama import Style
|
||
from flask import Flask, Response, render_template
|
||
from xhtml2pdf import pisa
|
||
import jinja2
|
||
import ftplib
|
||
import pysftp
|
||
import html
|
||
import mariadb
|
||
|
||
class JSONEncoder(json.JSONEncoder):
|
||
def default(self, o):
|
||
if isinstance(o, ObjectId):
|
||
return str(o)
|
||
return json.JSONEncoder.default(self, o)
|
||
|
||
def myprint(message = ""):
|
||
logging.info(str(datetime.now()) + " : "+str(message) )
|
||
print(Fore.RED+str(datetime.now()) + " : " + str(message)+Style.RESET_ALL)
|
||
|
||
|
||
def create_order_id():
|
||
return secrets.token_urlsafe(3)
|
||
|
||
|
||
def create_token_urlsafe():
|
||
return secrets.token_urlsafe(MYSY_GV.TOKEN_SIZE)
|
||
|
||
|
||
def create_user_recid():
|
||
return secrets.token_hex(MYSY_GV.TOKEN_SIZE)
|
||
|
||
|
||
'''
|
||
Cette fonction recupere et enregistrer un
|
||
fichier dans le Dossier "./Data/".
|
||
|
||
La fonction retourne le nom du fichier
|
||
'''
|
||
def Upload_Save_CSV_File(file=None, Folder=None):
|
||
try:
|
||
basename = os.path.basename(file.filename)
|
||
basename2 = basename.split(".")
|
||
|
||
'''
|
||
Verification qu'il s'agit bien d'un fichier csv, dont le nom ne comporte pas de "."
|
||
'''
|
||
if(len(basename2) != 2 ):
|
||
myprint(str(inspect.stack()[0][3]) + " - : Le nom du fichier est invalide")
|
||
return False, "Le nom du fichier est invalide"
|
||
|
||
if( str(basename2[1]).lower() != "csv" ):
|
||
myprint(str(inspect.stack()[0][3]) + " - : Ce n'est pas un fichier csv")
|
||
return False, "Le fichier doit être de type '.csv'"
|
||
|
||
|
||
new_basename2= re.sub(r'[^a-zA-Z0-9]', '', str(basename2[0]))
|
||
|
||
timestr = time.strftime("%Y%m%d%H%M%S")
|
||
local_base_name = str(new_basename2).replace('(', '').replace(')', '').replace(' ', '')
|
||
new_file_name = str(local_base_name) + "_" + str(timestr) + "." + str(basename2[1])
|
||
file.filename = new_file_name
|
||
file.save(os.path.join(str(Folder), secure_filename(file.filename))) # t
|
||
|
||
Global_file_name = "./Data/"+file.filename
|
||
|
||
|
||
|
||
return True, Global_file_name
|
||
|
||
except Exception as e :
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "Impossible de traiter le fichier."
|
||
|
||
'''
|
||
Controle de l'import des images.
|
||
les formats acceptés sont :
|
||
- 'jpg',
|
||
- 'jpeg',
|
||
- 'png'
|
||
'''
|
||
def Upload_Save_IMG_File(file=None, Folder=None):
|
||
try:
|
||
basename = os.path.basename(file.filename)
|
||
basename2 = basename.split(".")
|
||
|
||
'''
|
||
Verification qu'il s'agit bien d'un fichier image ('jpg', 'jpeg', 'png', 'jpe', ...), dont le nom ne comporte pas de "."
|
||
'''
|
||
if(len(basename2) != 2 ):
|
||
myprint(str(inspect.stack()[0][3]) + " - : Le nom du fichier "+str(file.filename)+" est invalide")
|
||
return False, "Le nom du fichier "+str(file.filename)+" est invalide "
|
||
|
||
if( str(basename2[1]).lower() not in MYSY_GV.IMG_FORMAT):
|
||
myprint(str(inspect.stack()[0][3]) + " - : Le fichier "+str(file.filename)+ ". L'extention '"+str(basename2[1])+"' n'est pas un fichier image. Les extentions autorisées sont :"+str(MYSY_GV.IMG_FORMAT))
|
||
return False, " Le fichier "+str(file.filename)+ ". L'extention '"+str(basename2[1])+"' n'est pas un fichier image. Les extentions autorisées sont :"+str(MYSY_GV.IMG_FORMAT)+" "
|
||
|
||
|
||
|
||
timestr = time.strftime("%Y%m%d%H%M%S")
|
||
local_base_name = str(basename2[0]).replace('(', '').replace(')', '').replace(' ', '')
|
||
new_file_name = str(local_base_name) + "_" + str(timestr) + "." + str(basename2[1])
|
||
file.filename = new_file_name
|
||
file.save(os.path.join(str(Folder), secure_filename(file.filename))) # t
|
||
|
||
Global_file_name = "./Data/"+file.filename
|
||
|
||
|
||
|
||
return True, Global_file_name
|
||
|
||
except Exception as e :
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, None
|
||
|
||
|
||
|
||
"""
|
||
Controle de l'import des images UNIQUEMENT ET EXCLUSIVEMENT PNG
|
||
les formats acceptés sont : PNG
|
||
"""
|
||
def Upload_Save_IMG_PNG_File(file=None, Folder=None):
|
||
try:
|
||
basename = os.path.basename(file.filename)
|
||
basename2 = basename.split(".")
|
||
|
||
'''
|
||
Verification qu'il s'agit bien d'un fichier image (PNG), dont le nom ne comporte pas de "."
|
||
'''
|
||
if(len(basename2) != 2 ):
|
||
myprint(str(inspect.stack()[0][3]) + " - : Le nom du fichier "+str(file.filename)+" est invalide")
|
||
return False, " Le nom du fichier "+str(file.filename)+" est invalide "
|
||
|
||
if( str(basename2[1]).lower() not in MYSY_GV.IMG_PNG_FORMAT):
|
||
myprint(str(inspect.stack()[0][3]) + " - : le fichier "+str(file.filename)+ ". L'extention '"+str(basename2[1])+"' n'est pas un fichier image. Les extentions autorisées sont :"+str(MYSY_GV.IMG_FORMAT))
|
||
return False, "Le fichier "+str(file.filename)+ ". L'extention '"+str(basename2[1])+"' n'est pas un fichier image. Les extentions autorisées sont :"+str(MYSY_GV.IMG_FORMAT)
|
||
|
||
new_basename2 = re.sub(r'[^a-zA-Z0-9]', '', str(basename2[0]))
|
||
|
||
timestr = time.strftime("%Y%m%d%H%M%S")
|
||
local_base_name = str(new_basename2).replace('(', '').replace(')', '').replace(' ', '')
|
||
new_file_name = str(local_base_name) + "_" + str(timestr) + "." + str(basename2[1])
|
||
file.filename = new_file_name
|
||
file.save(os.path.join(str(Folder), secure_filename(file.filename))) # t
|
||
|
||
Global_file_name = "./Data/"+file.filename
|
||
|
||
|
||
|
||
return True, Global_file_name
|
||
|
||
except Exception as e :
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, None
|
||
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction prend un email et un token
|
||
puis verifie si la validité du trile (email, token, statut)
|
||
'''
|
||
def check_token_validity(email="", token=""):
|
||
try :
|
||
|
||
coll_token = MYSY_GV.dbname['user_token']
|
||
print(" check_token_validity token == "+str(token))
|
||
tmp_count = coll_token.count_documents({ 'token': str(token), 'valide': '1'})
|
||
if (tmp_count <= 0):
|
||
myprint( str(inspect.stack()[0][3]) +" La session de connexion n'est pas valide")
|
||
return False
|
||
|
||
return True
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
'''
|
||
Cette fonction prend un email et un token
|
||
puis verifie si le compte utilisateur est actif
|
||
'''
|
||
def check_user_validity(email="", token=""):
|
||
try :
|
||
|
||
coll_token = MYSY_GV.dbname['user_account']
|
||
message = {}
|
||
ret = True
|
||
|
||
for retVal in coll_token.find({ 'token': str(token)}):
|
||
user = retVal
|
||
if( user['valide'] == '0'):
|
||
print(" le compte avec le token : "+token+" n'est pas valide")
|
||
message['valide'] = '0'
|
||
ret = False
|
||
|
||
if (user['locked'] == '1'):
|
||
print(" le compte avec le token : " + token + " est verrouillé")
|
||
message['locked'] = '1'
|
||
ret = False
|
||
|
||
return ret, message
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, message
|
||
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction prend un token
|
||
puis verifie si la validité du trile (email, token, statut) EXCLUSIVEMENT PR LES PARTNER, d
|
||
donc la table partner_token
|
||
'''
|
||
def check_partner_token_validity(email="", token=""):
|
||
try :
|
||
|
||
#print(" check_partner_token_validity : Token = "+token)
|
||
coll_token = MYSY_GV.dbname['partner_token']
|
||
tmp_count = coll_token.count_documents({ 'token': str(token), 'locked':'0', 'valide': '1'})
|
||
|
||
#tmp_count = coll_token.find({ 'token': str(token), 'locked':'0', 'valide': '1'}).count()
|
||
|
||
if (tmp_count <= 0):
|
||
myprint("La session de connexion n'est pas valide")
|
||
return False
|
||
return True
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
|
||
def check_partner_token_validity_v2(diction):
|
||
try :
|
||
|
||
token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
token = diction['token']
|
||
else:
|
||
myprint("La session de connexion n'est pas valide")
|
||
return False
|
||
|
||
|
||
coll_token = MYSY_GV.dbname['partner_token']
|
||
local_qry = {'locked':'0', 'valide': '1', 'token':str(token)}
|
||
tmp_count = MYSY_GV.dbname['partner_token'].count_documents(local_qry)
|
||
|
||
#tmp_count = coll_token.find({ 'token': str(token), 'locked':'0', 'valide': '1'}).count()
|
||
|
||
if (tmp_count <= 0):
|
||
myprint("La session de connexion n'est pas valide")
|
||
return False
|
||
return True
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
|
||
"""
|
||
Cette fonction retour le stripe_account_id
|
||
"""
|
||
def get_parnter_stripe_account_id_from_recid(recid = ""):
|
||
try:
|
||
if len(str(recid)) <= 0 :
|
||
myprint(" get_parnter_stripe_account_id_from_token : Le recid partner est vide")
|
||
return False
|
||
|
||
print(" #### recid = "+str(recid))
|
||
coll_partner = MYSY_GV.dbname['partnair_account']
|
||
tmp_val = coll_partner.find({'recid': str(recid), 'active': '1', 'locked':'0'})
|
||
|
||
if( "stripe_account_id" not in tmp_val[0].keys()):
|
||
myprint(" Pas de champ : stripe_account_id")
|
||
return False
|
||
|
||
stripe_account_id = tmp_val[0]['stripe_account_id']
|
||
return stripe_account_id
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
|
||
"""
|
||
Cette fonction retour le stripe_paymentmethod_id
|
||
"""
|
||
def get_parnter_stripe_stripe_paymentmethod_id_from_recid(recid = ""):
|
||
try:
|
||
if len(str(recid)) <= 0 :
|
||
myprint(" get_parnter_stripe_account_id_from_token : Le recid partner est vide")
|
||
return False
|
||
|
||
print(" #### recid = "+str(recid))
|
||
coll_partner = MYSY_GV.dbname['partnair_account']
|
||
tmp_val = coll_partner.find({'recid': str(recid), 'active': '1', 'locked':'0'})
|
||
stripe_paymentmethod_id = tmp_val[0]['stripe_paymentmethod_id']
|
||
return stripe_paymentmethod_id
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
|
||
'''
|
||
recuperation du recid du user
|
||
'''
|
||
def get_user_recid_from_token(token = ""):
|
||
try :
|
||
if len(str(token)) <= 0 :
|
||
myprint(" Le token est vide")
|
||
return False
|
||
|
||
coll_token = MYSY_GV.dbname['user_token']
|
||
tmp_val = coll_token.find({'token': str(token), 'valide': '1'})
|
||
user_recid = tmp_val[0]['recid']
|
||
return user_recid
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
"""
|
||
Cette fonction retourne les données d'un user en partant du recid.
|
||
return false si le recid est faux ou que le compte utilisateur n'est pas actif
|
||
"""
|
||
def get_partner_data_from_recid(user_recid = ""):
|
||
try :
|
||
if len(str(user_recid)) <= 0 :
|
||
myprint(" Le partner_recid est vide")
|
||
return False, ""
|
||
|
||
coll_token = MYSY_GV.dbname['partner_token']
|
||
# print(" myquery pr demo_account = " + str(myquery))
|
||
tmp_count = coll_token.count_documents({'recid': str(user_recid), 'valide': '1', 'locked':'0'})
|
||
if (tmp_count <= 0):
|
||
return False, ""
|
||
|
||
coll_token = MYSY_GV.dbname['partnair_account']
|
||
tmp_val = coll_token.find_one({'recid': str(user_recid), 'active': '1', 'locked':'0'})
|
||
|
||
return True, tmp_val
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
"""
|
||
Recuperation des données du partner à partir du token
|
||
"""
|
||
def get_partner_data_from_token(token = ""):
|
||
try :
|
||
if len(str(token)) <= 0:
|
||
myprint(" Le token partner est vide")
|
||
return False, ""
|
||
|
||
val_tmp = MYSY_GV.dbname['partnair_account'].count_documents({'token': str(token), 'active': '1', 'locked':'0'})
|
||
if (val_tmp <= 0):
|
||
return False, ""
|
||
|
||
ret_val = MYSY_GV.dbname['partnair_account'].find_one(
|
||
{'token': str(token), 'active': '1', 'locked': '0'})
|
||
|
||
return True, ret_val
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, ""
|
||
|
||
|
||
|
||
'''
|
||
recuperation d'email du user from token
|
||
'''
|
||
def get_user_email_from_token(token = ""):
|
||
try:
|
||
if len(str(token)) <= 0 :
|
||
myprint(" Le token est vide")
|
||
return False
|
||
|
||
coll_token = MYSY_GV.dbname['user_token']
|
||
tmp_val = coll_token.find({'token': str(token), 'valide': '1'})
|
||
user_email = tmp_val[0]['email']
|
||
return user_email
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
'''
|
||
recuperation d'email du user from recid
|
||
'''
|
||
def get_user_email_from_recid(recid = ""):
|
||
|
||
try:
|
||
if len(str(recid)) <= 0 :
|
||
myprint(" Le recid est vide")
|
||
return False
|
||
|
||
coll_token = MYSY_GV.dbname['user_account']
|
||
tmp_val = coll_token.find({'recid': str(recid), 'active': '0'})
|
||
|
||
if( tmp_val and tmp_val[0] and tmp_val[0]['email']):
|
||
user_email = tmp_val[0]['email']
|
||
else:
|
||
return False, "Impossible de supprimer le compte utilisateur"
|
||
|
||
return user_email
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "Impossible de supprimer le compte utilisateur"
|
||
|
||
'''
|
||
recuperation du recid du partner
|
||
'''
|
||
def get_parnter_recid_from_token(token = ""):
|
||
if len(str(token)) <= 0 :
|
||
myprint(" Le token partner est vide")
|
||
return False
|
||
|
||
coll_token = MYSY_GV.dbname['partner_token']
|
||
tmp_val = coll_token.find_one({'token': str(token), 'valide': '1', 'locked':'0'})
|
||
|
||
if( tmp_val is None ):
|
||
return False
|
||
|
||
if ("recid" not in tmp_val.keys()):
|
||
return False
|
||
|
||
if( not tmp_val['recid'] ):
|
||
return False
|
||
|
||
user_recid = tmp_val['recid']
|
||
return user_recid
|
||
|
||
|
||
|
||
|
||
def get_user_recid_from_email(email = ""):
|
||
if len(str(email)) <= 0 :
|
||
myprint(" L'email est vide")
|
||
return False
|
||
|
||
|
||
coll_user = MYSY_GV.dbname['user_account']
|
||
tmp_val = coll_user.find({'email': str(email), 'active': '1'})
|
||
user_recid = tmp_val[0]['recid']
|
||
|
||
return user_recid
|
||
|
||
|
||
'''
|
||
Cette fonction créer la reference interne d'une formation
|
||
'''
|
||
def Create_internal_call_ref():
|
||
retval = None
|
||
now = datetime.now()
|
||
|
||
# getting the timestamp
|
||
ts = str(datetime.timestamp(now)).replace(".", "").replace(",", "")
|
||
|
||
retval = "Mysy_"+str(ts)
|
||
return str(retval)
|
||
|
||
def textdist():
|
||
val = textdistance.mra("doe", "dough")
|
||
#print(" mra = "+str(val))
|
||
|
||
val2 = textdistance.editex("doe", "dough")
|
||
#print(" editex = " + str(val2))
|
||
|
||
|
||
def similaire():
|
||
mots = ["Durand est present", "Meyer", "Dupond", "Dopon", "DUPON", "Nguyen", "Toto"]
|
||
ratio = 0.8
|
||
|
||
for mot in mots:
|
||
#print(" CMP de 'Dupont' et '"+mot+"'" )
|
||
my_ratio = SequenceMatcher(None, "Dupont", mot).ratio()
|
||
#print(" ## RATION = "+str(my_ratio))
|
||
|
||
|
||
#resultat = [mot for mot in mots if SequenceMatcher(None, "Dupont", mot).ratio() >= ratio]
|
||
#print(resultat)
|
||
|
||
return
|
||
|
||
|
||
|
||
def levenshtein(mot1,mot2):
|
||
try:
|
||
# ligne_i est un tableau tel que tout au long de l'algorithme,
|
||
# ligne_i[k] contienne la distance de levenshtein entre les k premières lettres de mot1
|
||
# et les i premières lettres de mot2
|
||
# Au début, i=0, et la distance entre les k premières lettres de mot1 et la chaîne vide
|
||
# vaut bien sûr k. (il faut faire k suppressions pour passer des k premières lettres de mot1
|
||
# à la chaîne vide)
|
||
ligne_i = [ k for k in range(len(mot1)+1) ]
|
||
# i va ensuite varier de 1 à len(mot2)
|
||
for i in range(1, len(mot2) + 1):
|
||
# i vient d'être incrémenté. On stocke dans ligne_prec la valeur de la ligne numéro i-1
|
||
ligne_prec = ligne_i
|
||
# On crée la nouvelle ligne, dont le premier élément (l'élement numéro 0) doit être
|
||
# la distance de levenshtein entre la chaîne vide ("") et les i premières lettres de mot2, soit i
|
||
# (il faut faire i additions pour passer de la chaîne vide aux i premières lettres de mot2)
|
||
ligne_i = [i]*(len(mot1)+1)
|
||
# On va ensuite remplir le reste de la ligne i, c'est-à-dire calculer ligne_i[k] pour k allant de 1 à len(mot1)
|
||
for k in range(1,len(ligne_i)):
|
||
# La variable cout vaut 0 si la kième lettre de mot1 est la même que la ième lettre de mot2, et 1 sinon
|
||
#La kième lettre de mot1 s'obtient avec mot1[k-1], les indices commencent à 0
|
||
cout = int(mot1[k-1] != mot2[i-1])
|
||
#Voilà enfin le sel de l'algorithme, le calcul de ligne_i[k] pour i et k quelconques,
|
||
# connaissant ligne_prec[k-1], ligne_prec[k] et ligne_i[k-1]
|
||
ligne_i[k] = min(ligne_i[k-1] + 1, ligne_prec[k] + 1, ligne_prec[k-1] + cout)
|
||
# Lorsque l'on sort de la boucle, i vaut len(mot2)
|
||
#Ce que l'on cherche est la distance de levenshtein entre les len(mot1) premières lettres de mot1
|
||
# et les len(mot2) premières lettres de mot2, qui est stockée dans ligne_i[len(mot1)]
|
||
return ligne_i[len(mot1)]
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, None
|
||
|
||
|
||
'''
|
||
Check if string is float and convert.
|
||
0 if error
|
||
'''
|
||
def tryFloat(val):
|
||
try:
|
||
val = str(val).replace(",",".")
|
||
myfloat = float(val)
|
||
return myfloat
|
||
except ValueError:
|
||
return 0
|
||
|
||
"""
|
||
Verifie si c'est un float.
|
||
returl status et la valeur
|
||
"""
|
||
def IsFloat(val):
|
||
try:
|
||
val = str(val).replace(",",".")
|
||
myfloat = round(float(val), 2)
|
||
return True, myfloat
|
||
except ValueError:
|
||
return False, 0
|
||
|
||
|
||
'''
|
||
Check if string is Int and convert.
|
||
0 if error
|
||
'''
|
||
def tryInt(val):
|
||
try:
|
||
val = str(val).replace(",",".")
|
||
tab_val = val.split(".")
|
||
myint = int(tab_val[0])
|
||
return myint
|
||
except ValueError:
|
||
return 0
|
||
|
||
"""
|
||
Verifie si c'est un entier.
|
||
return status et la valeur
|
||
"""
|
||
def IsInt(val):
|
||
try:
|
||
val = str(val).replace(",", ".")
|
||
tab_val = val.split(".")
|
||
myint = int(tab_val[0])
|
||
return True, myint
|
||
except ValueError:
|
||
return False, 0
|
||
|
||
"""
|
||
Cette fonction verifie si une valeur est bien un 'ObjectId' valide
|
||
"""
|
||
def IsObjectId(oid):
|
||
try:
|
||
ObjectId(oid)
|
||
print(" IsObjectId "+str(oid)+" est valde")
|
||
return True
|
||
except ValueError:
|
||
return False
|
||
|
||
|
||
'''
|
||
Cette fonction verifie si une chaine est de type date jj/mm/aaaa
|
||
'''
|
||
def CheckisDate(mydate):
|
||
# mydate doit etre au format : jj/mm/aaaa
|
||
try:
|
||
#print(" analyse de "+mydate)
|
||
val = datetime.strptime(str(mydate).strip(), '%d/%m/%Y')
|
||
|
||
return True
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
'''
|
||
Verification que le mot n'est pas
|
||
stemisable à traver la tabla "word_not_stem
|
||
'''
|
||
def Word_Not_Stemmize(word = None):
|
||
try:
|
||
coll_not_stem = MYSY_GV.dbname["word_not_stem"]
|
||
|
||
#print(' ### Word_Not_Stemmize mot = ', word)
|
||
val_tmp = coll_not_stem.count_documents({'mot': str(word)})
|
||
|
||
|
||
if (val_tmp > 0):
|
||
return True
|
||
else:
|
||
return False
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de verifier Word_Not_Stemmize"
|
||
|
||
|
||
'''
|
||
Cette fonction verifie si un mot est dans le dictionnaire français - une table interne
|
||
si non, le mot est enregistré dans une table pour traitement utérieur.
|
||
Utilisé dans le cas de l'indexation d'une formation
|
||
'''
|
||
def check_word_in_fr_dict(mot=None):
|
||
try:
|
||
print("#### analyse du mot "+str(mot))
|
||
col_name = MYSY_GV.dbname["list_mots_fr"]
|
||
col_name_not_fr = MYSY_GV.dbname["list_mots_not_fr"]
|
||
mydata = {}
|
||
val_tmp = col_name.count_documents({'mot': str(mot)})
|
||
|
||
if (val_tmp <= 0):
|
||
myprint(" Le mot '" + mot + "' n'existe pas dans le dictionnaire")
|
||
|
||
mydata['mot'] = mot
|
||
mydata['treated'] = int("0")
|
||
mydata['update_date'] = datetime.now()
|
||
|
||
|
||
ret_val = col_name_not_fr.find_one_and_update(
|
||
{'mot': str(mot) }, {"$set": mydata},upsert=True,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
if ( ret_val['_id'] is False):
|
||
print(" Impossible d'enregistrer le '" + mot + "'")
|
||
return False
|
||
|
||
return False
|
||
|
||
return True
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
'''
|
||
Cette fonction recherche à savoir si un mot est dans le dictionnaire dans le cadre de la re
|
||
recherche d'un utilisateur '''
|
||
def recherche_check_word_in_fr_dict(mot=None):
|
||
try:
|
||
print("#### analyse du mot "+str(mot))
|
||
col_name = MYSY_GV.dbname["list_mots_fr"]
|
||
val_tmp = col_name.count_documents({'mot': str(mot)})
|
||
|
||
if (val_tmp <= 0):
|
||
myprint(" Le mot '" + mot + "' n'existe pas dans le dictionnaire")
|
||
|
||
return False
|
||
|
||
return True
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
'''
|
||
Securité : Ip source requester
|
||
Cette fonction verifie si l'adresse IP de la source
|
||
est autorisé ou pas.
|
||
'''
|
||
def check_source_ipv4(source_ip=None):
|
||
try:
|
||
if source_ip in MYSY_GV.AUTORIZED_SOURCE_IPV4:
|
||
myprint(" Security check : IP adresse '"+str(source_ip)+"' connected")
|
||
return True
|
||
else:
|
||
myprint(" Security check : IP adresse '" + str(source_ip) + "' is not autorized")
|
||
return False
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
'''
|
||
Dans le cadre de la recherche, l'expression saisit par l'utilisateur dans
|
||
la search bar doit etre nettoyé, traité avant de rentrer dans le process.
|
||
|
||
Puis la phrase est renvoyée en mode "unicode"
|
||
'''
|
||
def Parse_Clean_Search_Text(sentence=None):
|
||
try:
|
||
if (len(str(sentence)) <= 0 ):
|
||
return False, ""
|
||
|
||
'''
|
||
/!\ : On supprime tous les caratère "spaciaux" et ponctuation EXCEPTE
|
||
- le ":" dont on a besoin pour identifier les patterns et
|
||
- le ' " ' dont on a besoin pour identifier les patterns
|
||
'''
|
||
list_noises = ['...', '.', ';', ',', '!', '?', ')', '(', '[', ']', '\'', '’', '`', '©', '–',
|
||
'{', '}', '-', '=', '°', '#', '-', '/', '~', '&', '\\', '.', '^', '$', '*', '+', '\\n',
|
||
'?', '{', '}', '[', ']', '|', '(', ')', '-', '>', '<', '@', '®', '™', '«', '»']
|
||
|
||
for noise in list_noises:
|
||
# print(" suppression de : '"+str(noise)+"' ")
|
||
sentence = sentence.replace(str(noise), " ")
|
||
|
||
unicode_sentence = unidecode(sentence)
|
||
|
||
|
||
return True, unicode_sentence
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, ""
|
||
|
||
|
||
'''
|
||
Cette fonction va aller cherche la recherche edendue en
|
||
utilisant l'api de "serpapi"
|
||
'''
|
||
def Get_Extended_Result(sentence=None):
|
||
try:
|
||
list_extended = []
|
||
external_code_prefixe = str(datetime.now().timestamp()).replace(".", "")
|
||
print("external_code_prefixe = " + str(external_code_prefixe))
|
||
|
||
|
||
status = RunSearchAPI(sentence, external_code_prefixe)
|
||
if( status is False):
|
||
return False, list_extended
|
||
|
||
return True, external_code_prefixe
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, []
|
||
|
||
|
||
'''
|
||
Cette fonction lance l'API et enregistre le resultat en base
|
||
'''
|
||
def RunSearchAPI(search_text=None, external_code_prefixe=None):
|
||
try:
|
||
if( len(str(search_text).strip()) < 0 ):
|
||
return True
|
||
|
||
params = {
|
||
"q": str(search_text),
|
||
"hl": "fr",
|
||
"gl": "fr",
|
||
"num":"20",
|
||
"safe": "active",
|
||
"google_domain": "google.com",
|
||
"api_key": "596cb9a468f8292fcefa6f297444db9c12478685d8734b52efdf8aa53c54fd55"
|
||
}
|
||
|
||
myprint("######## PARAM RunSearchAPI = "+str(params))
|
||
|
||
'''
|
||
return false prematuré pour eviter de consommer les credits de l'API
|
||
'''
|
||
return False
|
||
|
||
|
||
search = GoogleSearch(params)
|
||
results = search.get_dict()
|
||
organic_results = results['organic_results']
|
||
|
||
my_collection = MYSY_GV.YTUBES_dbname['mysyserpapi']
|
||
|
||
#myprint("resutlat 1 organic_results ")
|
||
cmpt = 0
|
||
for val in organic_results:
|
||
cmpt = cmpt +1
|
||
mydata = {}
|
||
'''
|
||
print("External_code = " + str(val['title']))
|
||
print("Title = "+str(val['title']))
|
||
print("url = " + str(val['link']))
|
||
print("description = " + str(val['snippet']))
|
||
|
||
if ("snippet_highlighted_words" in val.keys()):
|
||
if val['snippet_highlighted_words']:
|
||
print("mot_cles = " + str(val['snippet_highlighted_words']))
|
||
'''
|
||
|
||
mydata['external_code'] = external_code_prefixe+"_"+str(cmpt)
|
||
mydata['title'] = str(val['title'])
|
||
mydata['url'] = str(val['link'])
|
||
|
||
if ("snippet" in val.keys()):
|
||
if val['snippet']:
|
||
mydata['description'] = str(val['snippet'])
|
||
|
||
if ("snippet_highlighted_words" in val.keys()):
|
||
if val['snippet_highlighted_words']:
|
||
mydata['tags'] = str(val['snippet_highlighted_words'])
|
||
|
||
mydata['update_date'] = str(datetime.now())
|
||
|
||
if ("position" in val.keys()):
|
||
if val['position']:
|
||
mydata['rang'] = str(val['position'])
|
||
|
||
mydata['orign_search_text'] = str(search_text)
|
||
mydata['valide'] = "1"
|
||
mydata['treated'] = "0"
|
||
|
||
ret_val = my_collection.find_one_and_update({'url': str(mydata['url'])},
|
||
{"$set": mydata},
|
||
upsert=True,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
if ret_val and ret_val['_id']:
|
||
myprint(" Le document de la recherche étendu a bien été ajouté = " + str(ret_val['_id']))
|
||
|
||
else:
|
||
myprint(" WARNING : Impossible d'ajouter le document de la recherche étentue " + str(mydata['url']))
|
||
|
||
return True
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction prends un mot,
|
||
et retourne sont masculin / singulier
|
||
Cette fonction va s'etoffer au fur et à mesure
|
||
|
||
/!\ Cette fonction prend un pot, pas une phrase
|
||
'''
|
||
def GetMasculinSingulier(word=None):
|
||
try:
|
||
mot = str(word).lower() # mettre les mots en minuscule
|
||
# Retirons les caractères spéciaux :
|
||
|
||
# mots terminés par "es"
|
||
patter2 = re.compile(r"\w+(ees)+$")
|
||
|
||
# mots terminés par "s"
|
||
patter3 = re.compile(r"\w+(s)+$")
|
||
|
||
# mots terminés par "x"
|
||
patter4 = re.compile(r"\w+(x)+$")
|
||
|
||
|
||
if (len(str(mot)) > 3):
|
||
if( re.match(patter2, str(mot))):
|
||
neword = mot[:-2]
|
||
return True, neword
|
||
|
||
if (re.match(patter3, str(mot))):
|
||
neword = mot[:-1]
|
||
return True, neword
|
||
|
||
if (re.match(patter4, str(mot))):
|
||
neword = mot[:-1]
|
||
return True, neword
|
||
|
||
|
||
return True, str(mot)
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, ""
|
||
|
||
|
||
'''
|
||
cette fonction créer un code_externe unique basé sur le timestemp actuel
|
||
'''
|
||
def CreateMyCode():
|
||
try:
|
||
mycode = str(datetime.now().timestamp()).replace(".", '').replace(',', '')
|
||
return True, mycode
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, '-1'
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction va aller cherche tous les messages à envoyer
|
||
dans la collection 'user_message' et declencher le traitement
|
||
adequat
|
||
'''
|
||
def CronUSerMessage_Mail():
|
||
try:
|
||
nb_message = 0
|
||
coll_message = MYSY_GV.dbname['user_message']
|
||
my_today = datetime.today()
|
||
|
||
|
||
for val in coll_message.find({'sent':'0', 'valide':'1', 'type':'email'}):
|
||
ismail_traited = '1'
|
||
error_message = ""
|
||
nb_message = nb_message + 1
|
||
print(" traitement du message : "+str(val))
|
||
|
||
|
||
mail_recever = ""
|
||
if ("recever_mail" in val.keys()):
|
||
if val['recever_mail']:
|
||
mail_recever = str(val['recever_mail']).strip()
|
||
|
||
mail_object = ""
|
||
if ("object" in val.keys()):
|
||
if val['object']:
|
||
mail_object = str(val['object']).strip()
|
||
|
||
message = ""
|
||
if ("message" in val.keys()):
|
||
if val['message']:
|
||
message = str(val['message']).strip()
|
||
|
||
# pattern des email
|
||
patter_mail = re.compile(r"^[\w\.]+@([\w-]+\.)+[\w]{2,4}$")
|
||
|
||
if (re.match(patter_mail, str(mail_recever))):
|
||
if( len(mail_object) == 0 or len(message) == 0 ):
|
||
myprint(" WARNING : Impossible d'envoyer le mail au destinataire. l'objet ou le message sont vides : mail_object = "
|
||
+ str(mail_object)+ " ou message = "+str(message))
|
||
ismail_traited = 'error'
|
||
error_message = "l'objet ou le message sont vides"
|
||
|
||
else:
|
||
if( email_mgt.SendGenericEmail(mail_recever, mail_object, message) is False ):
|
||
ismail_traited = 'error'
|
||
error_message = " Erreur SMPT "
|
||
|
||
# envoyer le mail
|
||
else:
|
||
myprint(" WARNING : Impossible d'envoyer le mail au destinataire. format incorrecte: "+str(mail_recever))
|
||
ismail_traited = 'error'
|
||
error_message = "format du mail receveur est incorrecte "
|
||
|
||
|
||
'''
|
||
Mise à jour du statut du message
|
||
'''
|
||
ret_val = coll_message.find_one_and_update({'_id': ObjectId(val['_id']),},
|
||
{"$set": {'sent':str(ismail_traited),
|
||
'error_message':str(error_message)}},
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
if (ret_val and ret_val['_id']):
|
||
nb_doc = str(ret_val['_id'])
|
||
myprint(" le message = " + str(nb_doc)+" a bien été mise à jour")
|
||
|
||
else:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " WARNING : Impossible de mettre à jour le user_message = " + str(
|
||
nb_doc))
|
||
|
||
|
||
return True, str(my_today), str(nb_message) + " traite (s) : OK"
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False,str(datetime.today()), " Impossible de traiter les demandes d'envoi d'email "
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction prend un tableau
|
||
et retourn le meme tableau mais dans un order aléatoire
|
||
/!\ cette fonction est trop lente, je ne peux l'utiliser
|
||
'''
|
||
|
||
def RendomizeTab(table):
|
||
try:
|
||
|
||
if( table.count() <= 0 ):
|
||
print(" impossible de rendomize")
|
||
return False, []
|
||
|
||
taille = table.count()-1
|
||
new_tab = []
|
||
new_tab_cpt = 0
|
||
i = 0
|
||
|
||
while( new_tab_cpt <= taille ):
|
||
cpt = random.randint(0, taille)
|
||
#print("cpt = " + str(cpt) + " - len(table) = " + str(table.count()))
|
||
#print(" ANALYSE DE " + str(table[cpt]['_id']))
|
||
|
||
if( table[cpt] not in new_tab ):
|
||
#print(" AJOUT DE "+str(table[cpt]['_id']))
|
||
new_tab.append(table[cpt])
|
||
new_tab_cpt = new_tab_cpt +1
|
||
|
||
return True, new_tab
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, []
|
||
|
||
|
||
|
||
def Reordertab():
|
||
try:
|
||
|
||
coll_name = MYSY_GV.dbname['myclass']
|
||
table = coll_name.find({'trainer':'PYRAMYD'},
|
||
{"external_code": 1, "title": 1, "_id": 1, "url": 1, })
|
||
|
||
print(str(table.count()))
|
||
taille = table.count()-1
|
||
new_tab = []
|
||
new_tab_cpt = 0
|
||
i = 0
|
||
|
||
while( new_tab_cpt <= taille ):
|
||
cpt = random.randint(0, taille)
|
||
#print("cpt = " + str(cpt) + " - len(table) = " + str(table.count()))
|
||
#print(" ANALYSE DE " + str(table[cpt]['_id']))
|
||
|
||
if( table[cpt] not in new_tab ):
|
||
#print(" AJOUT DE "+str(table[cpt]['_id']))
|
||
new_tab.append(table[cpt])
|
||
new_tab_cpt = new_tab_cpt +1
|
||
|
||
return True, new_tab
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, []
|
||
|
||
|
||
'''
|
||
Pour etre en mesure d'afficher que 100 de X formations qui existent
|
||
en base, un systeme de "aLaUne" va etre mis en place
|
||
Un cron va a periodicité données choisir de manière aleatoire
|
||
les formations à afficher à la une.'''
|
||
def FormationAlaUne():
|
||
try:
|
||
|
||
coll_name = MYSY_GV.dbname['myclass']
|
||
df = pd.DataFrame(list(coll_name.find({'valide':'1', "price": {"$gte": 0}, "duration": {"$gte": 0},
|
||
"support" : { "$exists" : "true" },
|
||
"type" : { "$exists" : "true" } },{'_id':1, 'external_code':1 })))
|
||
df['isalaune'] = '0'
|
||
|
||
|
||
taille2 = df.shape[0]
|
||
affectation = 0
|
||
while( affectation < MYSY_GV.MAINPAGE_QUERY_LIMIT_ROW ):
|
||
affectation = affectation + 1
|
||
cpt = random.randint(0, taille2)
|
||
df.at[cpt, 'isalaune'] = '1'
|
||
|
||
i = 0
|
||
tab_id = []
|
||
while( i < taille2):
|
||
if( str(df.at[i,'isalaune']) == "1"):
|
||
#print(str(df.at[i,'external_code']) +" = "+str(df.at[i,'isalaune']))
|
||
tab_id.append(str(df.at[i,'_id']))
|
||
i = i+1
|
||
|
||
'''
|
||
Suppression des ancien "is a la une"
|
||
'''
|
||
# update many with "num" field greater than 100
|
||
result = coll_name.update_many(
|
||
{"isalaune": "1"},
|
||
{
|
||
"$set": {"isalaune": '0'}
|
||
})
|
||
print("raw:", result.raw_result)
|
||
print("acknowledged:", result.acknowledged)
|
||
print("matched_count:", result.matched_count)
|
||
|
||
|
||
|
||
for val in tab_id:
|
||
print(" val ="+val)
|
||
ret_val = coll_name.find_one_and_update(
|
||
{'_id': ObjectId(str(val)), 'valide':'1', 'locked':'0'},
|
||
{"$set": {'isalaune':'1'}},
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
return True, "ok"
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "KO"
|
||
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction replace les caractères speciaux et ponctuation par des space
|
||
'''
|
||
def local_Remove_Ponct_Special_Caractere(sentence):
|
||
try:
|
||
text = sentence.lower() # mettre les mots en minuscule
|
||
|
||
# Retirons les caractères spéciaux :
|
||
text = re.sub(r"[,\!\?\%\(\)\/\"]", " ", text)
|
||
text = re.sub(r"\&\S*\s", " ", text)
|
||
text = re.sub(r"\-", " ", text)
|
||
|
||
list_noises = ['...', '.', ';', ',', ':', '!', '?', ')', '(', '[', ']', '\'', '"', '’', '`','©', '–',
|
||
'{', '}', '-', '=', '°', '#', '-', '/', '~', '&', '\\', '.', '^', '$', '*', '+','\\n','\n',
|
||
'?', '{', '}', '[', ']', '|', '(', ')', '-', '>', '<', '@','®', '™', '«', '»']
|
||
|
||
sentence = text
|
||
for noise in list_noises:
|
||
#print(" suppression de : '"+str(noise)+"' ")
|
||
sentence = sentence.replace(str(noise), " ")
|
||
|
||
#print(" AFTER REPLACE NOISES = "+str(sentence))
|
||
return True, sentence
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e)+" - Line : "+ str(exc_tb.tb_lineno) )
|
||
return False, " Impossible Ela_Remove_Ponct_Special_Caractere"
|
||
|
||
def Migration_internal_url():
|
||
try:
|
||
coll_name = MYSY_GV.dbname['myclass']
|
||
for retVal in coll_name.find({}):
|
||
user = retVal
|
||
|
||
my_internal_url = str(user['title'])
|
||
local_status, my_internal_url = local_Remove_Ponct_Special_Caractere(my_internal_url)
|
||
my_internal_url = unidecode(my_internal_url.lower())
|
||
my_internal_url = my_internal_url.replace(" ", "-")
|
||
my_internal_url = my_internal_url.replace("/", "-")
|
||
if (my_internal_url.startswith('-')):
|
||
my_internal_url = my_internal_url[1:]
|
||
|
||
if (my_internal_url.endswith('-')):
|
||
my_internal_url = my_internal_url[:-1]
|
||
|
||
|
||
suffix = hashlib.md5(my_internal_url.encode()).hexdigest()
|
||
|
||
new_internal_url = str(my_internal_url) + "-" + str(suffix[-3:])
|
||
|
||
new_internal_url = new_internal_url.replace("---", "-")
|
||
new_internal_url = new_internal_url.replace("--", "-")
|
||
|
||
print('new_internal_url = '+new_internal_url)
|
||
|
||
|
||
result = coll_name.update_many(
|
||
{'_id':ObjectId(str(user['_id']))},
|
||
{
|
||
"$set": {"internal_url": str(new_internal_url)}
|
||
})
|
||
'''print("raw:", result.raw_result)
|
||
print("acknowledged:", result.acknowledged)
|
||
print("matched_count:", result.matched_count)'''
|
||
|
||
return True, "ok"
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "KO"
|
||
|
||
|
||
'''
|
||
Cette fonction prends un mot de X digit
|
||
et retourne le top X des mot ayant le meme digit
|
||
/!\ cette fonction est glement utilisee dans le car des
|
||
des recherches vides
|
||
'''
|
||
def GetMotFromElaIndex(diction):
|
||
try :
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
field_list = ['mot', 'domaine']
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list:
|
||
myprint(str(inspect.stack()[0][
|
||
3]) + " - Le champ '" + val + "' n'existe pas, Creation formation annulée")
|
||
return False, []
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['mot']
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
|
||
return False, []
|
||
|
||
|
||
coll_name = MYSY_GV.dbname['elaindex']
|
||
training_mots = []
|
||
|
||
mot = ""
|
||
if ("mot" in diction.keys()):
|
||
if diction['mot']:
|
||
mot = diction['mot']
|
||
|
||
if( len(mot) < 3 ):
|
||
return True, []
|
||
|
||
search = str(mot)
|
||
search_expr = re.compile(f".*{search}.*", re.I)
|
||
|
||
print(" #### mot recu "+mot+" search_expr = "+str(search_expr))
|
||
for x in coll_name.find({'mots': {'$regex': search_expr}}).\
|
||
sort([("occurence", pymongo.DESCENDING)]).\
|
||
limit(MYSY_GV.HELP_WORD_QUERY_LIMIT):
|
||
if( x['mots'] not in training_mots ):
|
||
training_mots.append(x['mots'])
|
||
|
||
return True, training_mots
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, []
|
||
|
||
|
||
|
||
'''
|
||
correction erreur titre mymooc.com
|
||
'''
|
||
def Migration_mooc_title():
|
||
try:
|
||
coll_name = MYSY_GV.dbname['myclass']
|
||
for retVal in coll_name.find({'owner':'mymooc.com'}):
|
||
|
||
new_title = str(retVal['title'])
|
||
|
||
new_title = new_title.replace("-", " ")
|
||
if (new_title.startswith('-')):
|
||
new_title = new_title[1:]
|
||
|
||
if (new_title.endswith('-')):
|
||
new_title = new_title[:-1]
|
||
|
||
|
||
print(" new_title = "+new_title)
|
||
result = coll_name.update_many(
|
||
{'_id': ObjectId(str(retVal['_id']))},
|
||
{
|
||
"$set": {"title": str(new_title)}
|
||
})
|
||
|
||
return True, "ok"
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "KO"
|
||
|
||
|
||
''' Cette fonction clean la collection "elaindex"
|
||
comme elle est tres lourd que les mise à jour des data
|
||
font que certains formations ne sont plus valident, alors on doit supprimer
|
||
dans la collection "elaindex" toutes les lignes correspondant à des formations
|
||
inexistante dans la collection "myclass"
|
||
'''
|
||
|
||
def clean_ElaIndex():
|
||
try:
|
||
|
||
index_coll = MYSY_GV.dbname["elaindex"]
|
||
class_coll = MYSY_GV.dbname["myclass"]
|
||
list_to_delete = []
|
||
i = 0
|
||
|
||
for val in index_coll.find({}, {"id_formation":1}):
|
||
tmp_count = class_coll.count_documents({'external_code': str(val["id_formation"])})
|
||
#print(" val = "+str(val))
|
||
if (tmp_count <= 0 and str(val["id_formation"]) not in list_to_delete):
|
||
list_to_delete.append(str(val["id_formation"]))
|
||
i = i +1
|
||
|
||
# Operation de suppression :
|
||
#print(" lancement suppression de "+str(list_to_delete))
|
||
nb_avant_delete = index_coll.count_documents({})
|
||
#print(" AVANT SUPPRESSION ON A "+str(nb_avant_delete)+" LIGNE DANS la collection index")
|
||
query = {"id_formation": {"$in": list_to_delete}}
|
||
d = index_coll.delete_many(query)
|
||
print(d.deleted_count, " documents deleted !!")
|
||
|
||
|
||
return True, "clean index OK"
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "clean index KO"
|
||
|
||
|
||
"""
|
||
Cette fonction nettoye les mauvaises internal url
|
||
"""
|
||
def clean_internal_url():
|
||
try:
|
||
|
||
nb = 0
|
||
class_coll = MYSY_GV.dbname["myclass"]
|
||
search = str("%")
|
||
search_expr = re.compile(f".*{search}.*", re.I)
|
||
for x in class_coll.find({'internal_url': {'$regex': search_expr}}):
|
||
nb = nb+1
|
||
clean_url = str(x['internal_url']).replace("%","")
|
||
ret_val = class_coll.find_one_and_update({'_id': ObjectId(x['_id']), },
|
||
{"$set": {'internal_url': str(clean_url)}},
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
search = str(":")
|
||
search_expr = re.compile(f".*{search}.*", re.I)
|
||
for x in class_coll.find({'internal_url': {'$regex': search_expr}}):
|
||
nb = nb + 1
|
||
clean_url = str(x['internal_url']).replace(":", "")
|
||
ret_val = class_coll.find_one_and_update({'_id': ObjectId(x['_id']), },
|
||
{"$set": {'internal_url': str(clean_url)}},
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
search = str("--")
|
||
search_expr = re.compile(f".*{search}.*", re.I)
|
||
for x in class_coll.find({'internal_url': {'$regex': search_expr}}):
|
||
nb = nb + 1
|
||
clean_url = str(x['internal_url']).replace("--", "-")
|
||
ret_val = class_coll.find_one_and_update({'_id': ObjectId(x['_id']), },
|
||
{"$set": {'internal_url': str(clean_url)}},
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
|
||
return True, "clean internal url OK de "+str(nb)+" url"
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "clean internal url KO"
|
||
|
||
'''
|
||
Cette fonction supprime les emoji des text
|
||
'''
|
||
def clean_emoji(sentence=None):
|
||
try:
|
||
emoji_pattern = re.compile("["
|
||
u"\U0001F600-\U0001F64F" # emoticons
|
||
u"\U0001F300-\U0001F5FF" # symbols & pictographs
|
||
u"\U0001F680-\U0001F6FF" # transport & map symbols
|
||
u"\U0001F1E0-\U0001F1FF" # flags (iOS)
|
||
"]+", flags=re.UNICODE)
|
||
|
||
|
||
new_sentence = emoji_pattern.sub(r'', sentence)
|
||
|
||
return new_sentence
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return ""
|
||
|
||
|
||
|
||
'''
|
||
Fonction d'enregistrement d'une image d'un profils
|
||
'''
|
||
def recordImage(file=None, Folder=None, diction=None):
|
||
try:
|
||
|
||
# Dictionnaire des champs utilisables
|
||
field_list = ['token', 'type']
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, " Impossible de se connecter"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['token', 'type']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans liste des champs")
|
||
return False, "Impossible de se connecter"
|
||
|
||
mydata = {}
|
||
mytoken = ""
|
||
myuserrecid = ""
|
||
# recuperation des paramettre
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
mytoken = diction['token']
|
||
|
||
|
||
mytype = ""
|
||
# recuperation des paramettre
|
||
if ("type" in diction.keys()):
|
||
if diction['type']:
|
||
mytype = diction['type']
|
||
|
||
|
||
'''
|
||
Verification de la validité du token et recuperation du recid du user
|
||
'''
|
||
retval = check_user_validity("", str(mytoken))
|
||
|
||
if retval is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - La session de connexion n'est pas valide")
|
||
return False, "L'email ou le token ne sont pas valident"
|
||
|
||
# Recuperation du recid de la personne
|
||
if( mytype == "user"):
|
||
user_recid = get_user_recid_from_token(str(mytoken))
|
||
if user_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du user")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
|
||
elif( mytype == "partner"):
|
||
user_recid = get_parnter_recid_from_token(str(mytoken))
|
||
if user_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du partner")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
myuserrecid = user_recid
|
||
|
||
|
||
status, saved_file = Upload_Save_IMG_File(file, Folder)
|
||
if (status == False):
|
||
return False, "Impossible d'inserer les formations en masse "
|
||
|
||
|
||
# " Lecture du fichier "
|
||
#print(" Lecture du fichier : " + saved_file+". le token est :"+str(mytoken ))
|
||
nb_line = 0
|
||
coll_name = MYSY_GV.dbname['image_ch']
|
||
|
||
|
||
with open(saved_file, "rb") as imageFile:
|
||
|
||
#strimg = base64.b64encode(imageFile.read())
|
||
strimg = base64.b64encode(imageFile.read())
|
||
|
||
new_diction = {}
|
||
new_diction['img'] = strimg
|
||
new_diction['type'] = 'profil'
|
||
new_diction['recid'] = myuserrecid
|
||
new_diction['valide'] = '1'
|
||
new_diction['date_update'] = str(datetime.now())
|
||
new_diction['locked'] = "0"
|
||
|
||
ret_val = coll_name.find_one_and_update({"recid":myuserrecid, "valide":"1", "locked":"0"},
|
||
{"$set": {"img":strimg, "date_update":str(datetime.now()),
|
||
"type":"profil","recid":myuserrecid, "valide":"1",
|
||
"locked":"0" }},
|
||
upsert=True,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
|
||
if ret_val and ret_val['_id']:
|
||
myprint(" L'image a bien ete enregistrée. ")
|
||
return True
|
||
else:
|
||
myprint(" IMPOSSIBLE D'ENREGISTRER L'IMAGE")
|
||
return False
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
'''
|
||
Fonction d'enregistrement d'une image d'un profils d'une formation
|
||
'''
|
||
def recordClassImage(file=None, Folder=None, diction=None):
|
||
try:
|
||
|
||
# Dictionnaire des champs utilisables
|
||
field_list = ['token', 'type', 'class_external_code']
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, " Impossible de se connecter"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['token', 'type', 'class_external_code']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans liste des champs")
|
||
return False, "Impossible de se connecter"
|
||
|
||
mydata = {}
|
||
mytoken = ""
|
||
|
||
# recuperation des paramettre
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
mytoken = diction['token']
|
||
|
||
mytype = ""
|
||
# recuperation des paramettre
|
||
if ("type" in diction.keys()):
|
||
if diction['type']:
|
||
mytype = diction['type']
|
||
|
||
|
||
myclassexternalcode = ""
|
||
if ("class_external_code" in diction.keys()):
|
||
if diction['class_external_code']:
|
||
myclassexternalcode = diction['class_external_code']
|
||
|
||
'''
|
||
Verification de la validité du token et recuperation du recid du user
|
||
'''
|
||
retval = check_user_validity("", str(mytoken))
|
||
|
||
if retval is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - La session de connexion n'est pas valide")
|
||
return False, "L'email ou le token ne sont pas valident"
|
||
|
||
|
||
|
||
if (mytype == "partner"):
|
||
user_recid = get_parnter_recid_from_token(str(mytoken))
|
||
if user_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du partner")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
else:
|
||
myprint(str(inspect.stack()[0][3]) + " - l'API est incorrecte. le type n'est pas renseingé")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
|
||
|
||
myuserrecid = user_recid
|
||
|
||
status, saved_file = Upload_Save_IMG_File(file, Folder)
|
||
if (status == False):
|
||
return False, "Impossible d'inserer l'image de la formation "
|
||
|
||
# " Lecture du fichier "
|
||
#print(" Lecture du fichier : " + saved_file + ". le token est :" + str(mytoken))
|
||
nb_line = 0
|
||
coll_name = MYSY_GV.dbname['image_ch']
|
||
|
||
with open(saved_file, "rb") as imageFile:
|
||
|
||
# strimg = base64.b64encode(imageFile.read())
|
||
strimg = base64.b64encode(imageFile.read())
|
||
|
||
new_diction = {}
|
||
new_diction['img'] = strimg
|
||
new_diction['type'] = 'profil_class'
|
||
new_diction['recid'] = myuserrecid
|
||
new_diction['class_external_code'] = myclassexternalcode
|
||
new_diction['valide'] = '1'
|
||
new_diction['date_update'] = str(datetime.now())
|
||
new_diction['locked'] = "0"
|
||
|
||
ret_val = coll_name.find_one_and_update({"recid": myuserrecid, "valide": "1", "locked": "0", "class_external_code":str(myclassexternalcode)},
|
||
{"$set": {"img": strimg, "date_update": str(datetime.now()),
|
||
"type": "profil_class", "recid": myuserrecid, "valide": "1",
|
||
"locked": "0",
|
||
"class_external_code":str(myclassexternalcode)}},
|
||
upsert=True,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
if ret_val and ret_val['_id']:
|
||
myprint(" L'image de la formation "+str(myclassexternalcode)+" a bien ete enregistrée. ")
|
||
return True, " L'image de la formation "+str(myclassexternalcode)+" a bien ete enregistrée. "
|
||
else:
|
||
myprint(" IMPOSSIBLE D'ENREGISTRER L'IMAGE de la formation "+str(myclassexternalcode))
|
||
return False, "IMPOSSIBLE D'ENREGISTRER L'IMAGE de la formation "+str(myclassexternalcode)
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False,"IMPOSSIBLE D'ENREGISTRER L'IMAGE de la formation "+str(myclassexternalcode)
|
||
|
||
|
||
"""
|
||
Recriture de la fonction de base pour pouvoir enregistrer une image
|
||
avec les paramettre suivant
|
||
- related_collection
|
||
- related_collection_recid
|
||
|
||
"""
|
||
def recordClassImage_v2(file=None, Folder=None, diction=None):
|
||
try:
|
||
|
||
# Dictionnaire des champs utilisables
|
||
field_list = ['token', 'related_collection', 'related_collection_recid', 'image_recid', 'type_img']
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, " Impossible de se connecter"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['token', 'related_collection', 'related_collection_recid', 'type_img']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans liste des champs")
|
||
return False, "Impossible de se connecter"
|
||
|
||
mydata = {}
|
||
mytoken = ""
|
||
|
||
# recuperation des paramettre
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
mytoken = diction['token']
|
||
|
||
type_img = ""
|
||
if ("type_img" in diction.keys()):
|
||
if diction['type_img']:
|
||
type_img = diction['type_img']
|
||
|
||
if( type_img not in MYSY_GV.BDD_STOCKAGE_IMG):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + type_img + "' n'est pas autorisé. les valeurs acceptées sont "+str(MYSY_GV.BDD_STOCKAGE_IMG) )
|
||
return False, " Le type d'image n'est pas autorisé. les valeurs acceptées sont "+str(MYSY_GV.BDD_STOCKAGE_IMG)
|
||
|
||
related_collection = ""
|
||
if ("related_collection" in diction.keys()):
|
||
if diction['related_collection']:
|
||
related_collection = diction['related_collection']
|
||
|
||
|
||
related_collection_recid = ""
|
||
if ("related_collection_recid" in diction.keys()):
|
||
if diction['related_collection_recid']:
|
||
related_collection_recid = diction['related_collection_recid']
|
||
|
||
image_recid = ""
|
||
if ("image_recid" in diction.keys()):
|
||
if diction['image_recid']:
|
||
image_recid = diction['image_recid']
|
||
|
||
if(len(str(image_recid)) <= 0 ):
|
||
# cette image n'a jamais ete enregistrée
|
||
# Creation du RecId
|
||
image_recid = create_user_recid()
|
||
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data(diction)
|
||
if (local_status is not True):
|
||
return local_status, my_partner
|
||
|
||
|
||
status, saved_file = Upload_Save_IMG_File(file, Folder)
|
||
if (status is False):
|
||
myprint(str(saved_file))
|
||
return False, str(saved_file)
|
||
|
||
# " Lecture du fichier "
|
||
#print(" Lecture du fichier : " + saved_file + ". le token est :" + str(mytoken))
|
||
nb_line = 0
|
||
coll_name = MYSY_GV.dbname['mysy_images']
|
||
|
||
with open(saved_file, "rb") as imageFile:
|
||
|
||
# strimg = base64.b64encode(imageFile.read())
|
||
strimg = base64.b64encode(imageFile.read())
|
||
|
||
new_diction = {}
|
||
new_diction['img'] = strimg
|
||
new_diction['related_collection'] = related_collection
|
||
new_diction['related_collection_recid'] = related_collection_recid
|
||
new_diction['valide'] = '1'
|
||
new_diction['date_update'] = str(datetime.now())
|
||
new_diction['locked'] = "0"
|
||
new_diction['recid'] = image_recid
|
||
new_diction['type_img'] = type_img
|
||
|
||
|
||
|
||
ret_val = coll_name.find_one_and_update({"recid":image_recid, "related_collection_recid": related_collection_recid,
|
||
"valide": "1", "locked": "0", "related_collection":str(related_collection),
|
||
"type_img":str(type_img)},
|
||
{"$set": new_diction},
|
||
upsert=True,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
|
||
if( ret_val is None or '_id' not in ret_val.keys()):
|
||
myprint(" IMPOSSIBLE D'ENREGISTRER L'IMAGE")
|
||
return False, "IMPOSSIBLE D'ENREGISTRER L'IMAGE de la formation "
|
||
|
||
print(" ### recordClassImage_v2 :L'image a été correctement enregistrée ")
|
||
return True, "L'image a été correctement enregistrée"
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False,"IMPOSSIBLE D'ENREGISTRER L'IMAGE "
|
||
|
||
|
||
"""
|
||
Suppression d'une image
|
||
"""
|
||
def DeleteClassImage_v2(diction=None):
|
||
try:
|
||
|
||
# Dictionnaire des champs utilisables
|
||
field_list = ['token', 'image_recid', ]
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, " Impossible de se connecter"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['token', 'image_recid']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans liste des champs")
|
||
return False, "Impossible de se connecter"
|
||
|
||
mydata = {}
|
||
mytoken = ""
|
||
|
||
# recuperation des paramettre
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
mytoken = diction['token']
|
||
|
||
image_recid = ""
|
||
if ("image_recid" in diction.keys()):
|
||
if diction['image_recid']:
|
||
image_recid = diction['image_recid']
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data(diction)
|
||
if (local_status is not True):
|
||
return local_status, my_partner
|
||
|
||
if(len(str(image_recid)) <= 0 ):
|
||
# cette image n'a jamais ete enregistrée
|
||
# Creation du RecId
|
||
image_recid = create_user_recid()
|
||
|
||
|
||
# " Lecture du fichier "
|
||
#print(" Lecture du fichier : " + saved_file + ". le token est :" + str(mytoken))
|
||
nb_line = 0
|
||
coll_name = MYSY_GV.dbname['mysy_images']
|
||
|
||
query_delete = {"recid":image_recid, 'related_collection_recid': str(my_partner['recid']) }
|
||
|
||
print(" ### query_delete = ", query_delete)
|
||
ret_val = coll_name.delete_one({"recid":image_recid,
|
||
'related_collection_recid': str(my_partner['recid']) },)
|
||
|
||
print(" ### detelete ret_val = ", ret_val)
|
||
|
||
|
||
print(" ### recordClassImage_v2 :L'image a été correctement supprimée ")
|
||
return True, "L'image a été correctement supprimée"
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False,"IMPOSSIBLE D'ENREGISTRER L'IMAGE "
|
||
|
||
|
||
'''
|
||
Recuperation d'une image de profils
|
||
'''
|
||
def getRecodedImage(diction=None):
|
||
try:
|
||
# Dictionnaire des champs utilisables
|
||
field_list = ['token', 'type']
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, " Impossible de se connecter"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['token','type']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans liste des champs")
|
||
return False, "Impossible de se connecter"
|
||
|
||
mydata = {}
|
||
mytoken = ""
|
||
myuserrecid = ""
|
||
# recuperation des paramettre
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
mytoken = diction['token']
|
||
|
||
mytype = ""
|
||
if ("type" in diction.keys()):
|
||
if diction['type']:
|
||
mytype = diction['type']
|
||
|
||
'''
|
||
Verification de la validité du token et recuperation du recid du user
|
||
|
||
retval = check_user_validity("", str(mytoken))
|
||
|
||
if retval is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - La session de connexion n'est pas valide ")
|
||
return False, "L'email ou le token ne sont pas valident"
|
||
'''
|
||
|
||
# Recuperation du recid de la personne
|
||
if( mytype == "user"):
|
||
user_recid = get_user_recid_from_token(str(mytoken))
|
||
if user_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du user")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
|
||
elif (mytype == "partner"):
|
||
user_recid = get_parnter_recid_from_token(str(mytoken))
|
||
if user_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du user")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
|
||
myuserrecid = user_recid
|
||
|
||
|
||
coll_name = MYSY_GV.dbname['image_ch']
|
||
RetObject = []
|
||
|
||
for retVal in coll_name.find({'recid':myuserrecid, "type":"profil"} ):
|
||
user2={}
|
||
user2['date_update'] = retVal['date_update']
|
||
decode = retVal['img'].decode()
|
||
user2['img'] = decode
|
||
data1 = json.loads(json.dumps(user2))
|
||
|
||
return True, data1
|
||
|
||
return False, ""
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, ""
|
||
|
||
|
||
|
||
'''
|
||
Recuperation d'une image de profils d'une formation
|
||
'''
|
||
def getRecodedClassImage(diction=None):
|
||
try:
|
||
# Dictionnaire des champs utilisables
|
||
field_list = ['token', 'type', 'class_external_code']
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, " Impossible de se connecter"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['token','type','class_external_code']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans liste des champs")
|
||
return False, "Impossible de se connecter"
|
||
|
||
mydata = {}
|
||
mytoken = ""
|
||
|
||
# recuperation des paramettre
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
mytoken = diction['token']
|
||
|
||
mytype = ""
|
||
if ("type" in diction.keys()):
|
||
if diction['type']:
|
||
mytype = diction['type']
|
||
|
||
myclassexternalcode = ""
|
||
if ("class_external_code" in diction.keys()):
|
||
if diction['class_external_code']:
|
||
myclassexternalcode = diction['class_external_code']
|
||
|
||
|
||
'''
|
||
Verification de la validité du token et recuperation du recid du user
|
||
'''
|
||
retval = check_user_validity("", str(mytoken))
|
||
|
||
if retval is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - La session de connexion n'est pas valide")
|
||
return False, "L'email ou le token ne sont pas valident"
|
||
|
||
|
||
if (mytype == "partner"):
|
||
user_recid = get_parnter_recid_from_token(str(mytoken))
|
||
if user_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du user")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
|
||
else:
|
||
myprint(str(inspect.stack()[0][3]) + " - l'API est incorrecte. le type n'est pas renseingé")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
|
||
myuserrecid = user_recid
|
||
|
||
|
||
coll_name = MYSY_GV.dbname['image_ch']
|
||
RetObject = []
|
||
|
||
for retVal in coll_name.find({'recid':myuserrecid, "type":"profil_class", "class_external_code":str(myclassexternalcode)} ):
|
||
user2={}
|
||
user2['date_update'] = retVal['date_update']
|
||
decode = retVal['img'].decode()
|
||
user2['img'] = decode
|
||
data1 = json.loads(json.dumps(user2))
|
||
|
||
return True, data1
|
||
|
||
return False, ""
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, ""
|
||
|
||
|
||
|
||
'''
|
||
Suppression d'une image de profils d'une formation
|
||
'''
|
||
def DeleteRecodedClassImage(diction=None):
|
||
try:
|
||
# Dictionnaire des champs utilisables
|
||
field_list = ['token', 'type', 'class_external_code']
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, "Impossible de supprimer le logo de la formation"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['token','type','class_external_code']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans liste des champs")
|
||
return False, "Impossible de supprimer le logo de la formation"
|
||
|
||
mydata = {}
|
||
mytoken = ""
|
||
|
||
# recuperation des paramettre
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
mytoken = diction['token']
|
||
|
||
mytype = ""
|
||
if ("type" in diction.keys()):
|
||
if diction['type']:
|
||
mytype = diction['type']
|
||
|
||
myclassexternalcode = ""
|
||
if ("class_external_code" in diction.keys()):
|
||
if diction['class_external_code']:
|
||
myclassexternalcode = diction['class_external_code']
|
||
|
||
|
||
'''
|
||
Verification de la validité du token et recuperation du recid du user
|
||
'''
|
||
retval = check_user_validity("", str(mytoken))
|
||
|
||
if retval is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - La session de connexion n'est pas valide")
|
||
return False, "Impossible de supprimer le logo de la formation"
|
||
|
||
|
||
if (mytype == "partner"):
|
||
user_recid = get_parnter_recid_from_token(str(mytoken))
|
||
if user_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du user")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
|
||
else:
|
||
myprint(str(inspect.stack()[0][3]) + " - l'API est incorrecte. le type n'est pas renseingé")
|
||
return False, " Les informations d'identification sont incorrectes"
|
||
|
||
myuserrecid = user_recid
|
||
|
||
|
||
coll_name = MYSY_GV.dbname['image_ch']
|
||
|
||
|
||
myquery = {'recid':myuserrecid, "type":"profil_class", "class_external_code":str(myclassexternalcode)}
|
||
delete_row = coll_name.delete_many(myquery)
|
||
myprint(str(inspect.stack()[0][3]) + "- "+str(delete_row.deleted_count)+" image logo supprimée(s) pour la formation class_external_code = "+str(myclassexternalcode))
|
||
|
||
return True, str(delete_row.deleted_count)+" supprimée(s)"
|
||
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "Impossible de supprimer le logo de la formation"
|
||
|
||
|
||
|
||
'''
|
||
Recuperation d'une image de profils d'une formation pour le mode non connecté. c'est a dire depuis le front
|
||
'''
|
||
def getRecodedClassImage_from_front(diction=None):
|
||
try:
|
||
# Dictionnaire des champs utilisables
|
||
field_list = [ 'class_external_code']
|
||
incom_keys = diction.keys()
|
||
|
||
'''
|
||
# Verification que les champs reçus dans l'API sont bien dans la liste des champs autorisés
|
||
# Cela evite le cas ou une entité tierce ajouter les valeurs inconnu dans l'API
|
||
# Ici on doit mettre tous les champs possible (obligatoire ou non) de la BDD dans la liste
|
||
# field_list.
|
||
'''
|
||
for val in incom_keys:
|
||
if str(val).lower() not in str(field_list).lower():
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas accepté dans cette API")
|
||
return False, " Impossible de se connecter"
|
||
|
||
'''
|
||
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
||
On controle que les champs obligatoires sont presents dans la liste
|
||
'''
|
||
field_list_obligatoire = ['class_external_code']
|
||
for val in field_list_obligatoire:
|
||
if str(val).lower() not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " La valeur '" + val + "' n'est pas presente dans liste des champs")
|
||
return False, "Impossible de se connecter"
|
||
|
||
|
||
myclassexternalcode = ""
|
||
if ("class_external_code" in diction.keys()):
|
||
if diction['class_external_code']:
|
||
myclassexternalcode = diction['class_external_code']
|
||
|
||
|
||
coll_name = MYSY_GV.dbname['image_ch']
|
||
RetObject = []
|
||
|
||
for retVal in coll_name.find({"type":"profil_class", "class_external_code":str(myclassexternalcode)} ):
|
||
user2={}
|
||
user2['date_update'] = retVal['date_update']
|
||
decode = retVal['img'].decode()
|
||
user2['img'] = decode
|
||
data1 = json.loads(json.dumps(user2))
|
||
|
||
return True, data1
|
||
|
||
return False, ""
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, ""
|
||
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction replace les caractères speciaux et ponctuation par des space
|
||
'''
|
||
def Ela_Remove_Ponct_Special_Caractere(sentence):
|
||
try:
|
||
text = sentence.lower() # mettre les mots en minuscule
|
||
|
||
# Retirons les caractères spéciaux :
|
||
text = re.sub(r"[,\!\?\%\(\)\/\"]", " ", text)
|
||
text = re.sub(r"\&\S*\s", " ", text)
|
||
text = re.sub(r"\-", " ", text)
|
||
|
||
list_noises = ['...', '.', ';', ',', ':', '!', '?', ')', '(', '[', ']', '\'', '"', '’', '`','©', '–',
|
||
'{', '}', '-', '=', '°', '#', '-', '/', '~', '&', '\\', '.', '^', '$', '*', '+','\\n','\n',
|
||
'?', '{', '}', '[', ']', '|', '(', ')', '-', '>', '<', '@','®', '™', '«', '»']
|
||
|
||
sentence = text
|
||
for noise in list_noises:
|
||
#print(" suppression de : '"+str(noise)+"' ")
|
||
sentence = sentence.replace(str(noise), " ")
|
||
|
||
#print(" AFTER REPLACE NOISES = "+str(sentence))
|
||
return True, sentence
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e)+" - Line : "+ str(exc_tb.tb_lineno) )
|
||
return False, " Impossible Ela_Remove_Ponct_Special_Caractere"
|
||
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction prend un titre (une phrase) et retrounre
|
||
l'internal url associé
|
||
'''
|
||
def CreateInternalUrl(sentence=None):
|
||
try:
|
||
|
||
internal_url = ""
|
||
|
||
if( len(sentence) <= 0 ):
|
||
myprint(str(inspect.stack()[0][3]) + " - sentence est vide " )
|
||
return False, ""
|
||
|
||
my_internal_url = sentence
|
||
local_status, my_internal_url = Ela_Remove_Ponct_Special_Caractere(my_internal_url)
|
||
my_internal_url = unidecode(my_internal_url.lower())
|
||
my_internal_url = my_internal_url.replace("--", "-")
|
||
my_internal_url = my_internal_url.replace(" ", "-")
|
||
my_internal_url = my_internal_url.replace("/", "-")
|
||
|
||
if (my_internal_url.startswith('-')):
|
||
my_internal_url = my_internal_url[1:]
|
||
|
||
if (my_internal_url.endswith('-')):
|
||
my_internal_url = my_internal_url[:-1]
|
||
|
||
|
||
suffix = hashlib.md5(str(datetime.now()).encode()).hexdigest()
|
||
|
||
final_internal_url = str(my_internal_url) + "-" + str(suffix[-3:])
|
||
final_internal_url = final_internal_url.replace("--", "-")
|
||
internal_url = final_internal_url
|
||
|
||
return True, internal_url
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, ""
|
||
|
||
|
||
|
||
'''
|
||
Cette fonction va lire un fichier ou un collection tmp_class_metier(_id, metier) et va mettre à jour les metier
|
||
'''
|
||
def UpdateMetier():
|
||
try:
|
||
|
||
myclass_coll = MYSY_GV.dbname["myclass"]
|
||
myclass_tmp_metier_coll = MYSY_GV.dbname["tmp_class_metier"]
|
||
my_data = []
|
||
i = 0
|
||
for x in myclass_tmp_metier_coll.find():
|
||
|
||
mymetier = str(x['metier']).lower()
|
||
ret_val2 = myclass_coll.find_one_and_update({"_id": ObjectId(str(x['class_id']))},
|
||
{"$set": {"metier":mymetier }},
|
||
upsert=True,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
|
||
if ( (ret_val2 is False) or (ret_val2['_id'] is False) ):
|
||
myprint("Impossible de mettre à jour la formation _id =" + str(x['class_id']) )
|
||
return False
|
||
i = i+1
|
||
|
||
print(str(i)+" Formation (metier) ont été mises à jour")
|
||
|
||
return True
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
"""
|
||
Cette fonction retroune les mots suggerés au format JSON
|
||
"""
|
||
def Get_Suggested_Word():
|
||
try:
|
||
|
||
nb_result = 0
|
||
insertObject = []
|
||
coll_name = MYSY_GV.dbname['search_suggestion_words']
|
||
|
||
for x in coll_name.find({}, {'_id':0}).sort([("display_rank", pymongo.DESCENDING), ("mot", pymongo.ASCENDING), ]):
|
||
nb_result = nb_result + 1
|
||
val_tmp = {}
|
||
val_tmp['id'] = str(nb_result)
|
||
val_tmp['name'] = str(x['mot']).lower()
|
||
insertObject.append(JSONEncoder().encode(val_tmp))
|
||
|
||
return True, insertObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False
|
||
|
||
|
||
"""
|
||
Cette fonction retroune la liste des villes française en JSON
|
||
pour aider à la recherche.
|
||
Quand un utilisateur tape les 3 première lettre d'une ville, on l'aide...
|
||
"""
|
||
def Get_Suggested_Fr_Cities():
|
||
try:
|
||
|
||
nb_result = 0
|
||
insertObject = []
|
||
coll_name = MYSY_GV.dbname['ville_commune']
|
||
|
||
for x in coll_name.find({}, {'_id':0}).sort([ ("ville_commune", pymongo.ASCENDING),("population", pymongo.DESCENDING), ]):
|
||
nb_result = nb_result + 1
|
||
val_tmp = {}
|
||
val_tmp['id'] = str(nb_result)
|
||
val_tmp['ville'] = str(x['ville_commune']).lower()
|
||
insertObject.append(JSONEncoder().encode(val_tmp))
|
||
|
||
return True, insertObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False
|
||
|
||
|
||
'''
|
||
Cette fonction prendre le recid d'un partenaire et retourne
|
||
- son pack
|
||
- le nombre de formation accordé
|
||
'''
|
||
def Partner_Get_pack_nbTraining(partner_recid):
|
||
try:
|
||
|
||
if( len(str(partner_recid)) <= 0):
|
||
return False, "", ""
|
||
|
||
partner_pack = ""
|
||
partner_pack_nb_formation = ""
|
||
coll_partner = MYSY_GV.dbname['partnair_account']
|
||
tmp = coll_partner.find({'recid':str(partner_recid)})
|
||
|
||
#print("tmp = " + str(tmp[0]))
|
||
|
||
if(tmp and tmp[0] and tmp[0]['pack_service'] and tmp[0]['nb_formation']):
|
||
partner_pack = tmp[0]['pack_service']
|
||
partner_pack_nb_formation = tmp[0]['nb_formation']
|
||
|
||
else:
|
||
return False, partner_pack, partner_pack_nb_formation
|
||
|
||
|
||
return True, partner_pack, partner_pack_nb_formation
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False, False
|
||
|
||
|
||
""""
|
||
Cette fonction prend un recid et retourne le nombre de formation deja actif.
|
||
"""
|
||
def Get_partner_nb_active_training(partner_recid):
|
||
try:
|
||
|
||
if( len(str(partner_recid)) <= 0):
|
||
return False, ""
|
||
|
||
partner_class_nb_formation = ""
|
||
coll_partner_class = MYSY_GV.dbname['myclass']
|
||
tmp = coll_partner_class.count_documents({'partner_owner_recid':str(partner_recid), 'valide':'1'})
|
||
|
||
return True, str(tmp)
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False
|
||
|
||
|
||
""""
|
||
Cette fonction récupérer les mot du titre et de la description
|
||
pour remplir la collection des suggestions de mot
|
||
"""
|
||
|
||
def fillSuggestionCollection():
|
||
try:
|
||
coll_name = MYSY_GV.dbname['myclass']
|
||
for retVal in coll_name.find({'valide':'1'}).limit(10):
|
||
|
||
mytitle = str(retVal['title'])
|
||
mydesc = str(retVal['description'])
|
||
|
||
class_contact = str(mytitle)+". "+str(mydesc)
|
||
|
||
class_token = ElaSpacy.Ela_Tokenize(class_contact)
|
||
status, tab_tokens2 = ElaSpacy.Ela_remove_stop_words(class_token)
|
||
if (status is False):
|
||
break
|
||
|
||
status, tab_tokens3 = ElaSpacy.Ela_remove_pronoun(tab_tokens2)
|
||
if (status is False):
|
||
break
|
||
|
||
status, tab_tokens4 = ElaSpacy.Ela_stemmize_Class(tab_tokens3)
|
||
if (status is False):
|
||
break
|
||
|
||
print(" Pour "+str(retVal['title'])+" : On a "+str(tab_tokens4))
|
||
|
||
|
||
return True
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False,
|
||
|
||
"""
|
||
Cette fonction attribut des notes
|
||
"""
|
||
def PutClassNote():
|
||
try:
|
||
coll_name = MYSY_GV.dbname['myclass']
|
||
i = 2
|
||
for retVal in coll_name.find({}):
|
||
user = retVal
|
||
if( i == 5 ):
|
||
i = 2
|
||
|
||
i = i+0.5
|
||
|
||
result = coll_name.update_many(
|
||
{'_id':ObjectId(str(user['_id']))},
|
||
{
|
||
"$set": {"note": str(i)}
|
||
})
|
||
'''print("raw:", result.raw_result)
|
||
print("acknowledged:", result.acknowledged)
|
||
print("matched_count:", result.matched_count)'''
|
||
|
||
return True, "ok"
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "KO"
|
||
|
||
"""
|
||
Cette fonction essaye de convertir une chaine en date (yyyy-mm-dd)
|
||
"""
|
||
def TryToDateYYYMMDD(mydate):
|
||
try:
|
||
datetime.strptime(mydate, '%Y-%m-%d')
|
||
return True, datetime.strptime(mydate, '%Y-%m-%d')
|
||
|
||
except ValueError:
|
||
return False, False
|
||
|
||
|
||
"""
|
||
Cette fonction prend une formation (myclass) et l'insert dans
|
||
la table de statistique
|
||
- diction_class : les données de la formation
|
||
- type_view : type d'affichage ("decouverte", "detail", etc)
|
||
- user_location : les coordonnées du demandeur
|
||
"""
|
||
def InsertStatistic(diction_class, type_view, user_location):
|
||
try:
|
||
|
||
mydata = {}
|
||
mydata['internal_url'] = diction_class['internal_url']
|
||
mydata['date_update'] = datetime.now()
|
||
mydata['type_view'] = str(type_view)
|
||
|
||
mydict_combined = {**diction_class, **mydata, **user_location}
|
||
mydict_combined['date_update'] = str(datetime.now())
|
||
mydict_combined['type_view'] = "summary"
|
||
|
||
|
||
coll_name = MYSY_GV.dbname['user_recherche_result']
|
||
ret_val = coll_name.insert_one(mydict_combined)
|
||
if (ret_val is False):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - WARNING : Impossbile d'inserer la formation " + str(diction_class['internal_url']) + " dans les statistiques")
|
||
|
||
|
||
return True
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
"""
|
||
Suppression du compte de LS
|
||
"""
|
||
def removeLSaccount():
|
||
try:
|
||
coll = MYSY_GV.dbname["partnair_account"]
|
||
myquery = {"email": "ls.lutmanmicca@gmail.com"}
|
||
x = coll.delete_many(myquery)
|
||
print(x.deleted_count, " documents deleted.")
|
||
return True, str(x.deleted_count) + " documents deleted."
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
"""
|
||
Suppression du compte de Nicole"""
|
||
def removeNBaccount():
|
||
try:
|
||
coll = MYSY_GV.dbname["partnair_account"]
|
||
myquery = {"email": "nicole.beauchesne@hotmail.fr"}
|
||
x = coll.delete_many(myquery)
|
||
print(x.deleted_count, " documents deleted.")
|
||
return True, str(x.deleted_count) + " documents deleted."
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
|
||
"""
|
||
Cette fonction verifier si une url est une image.
|
||
elle permet si besoin de verifier la taille de l'image
|
||
Cette fonction sera dans un premier temps utilisée
|
||
dans la gestion des images associées à une formation.
|
||
"""
|
||
def TryUrlImage(url=None):
|
||
try:
|
||
img = Image.open(requests.get(url, stream=True).raw)
|
||
return True, img
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False
|
||
|
||
|
||
"""
|
||
Suite à un bug dans la collection : user_recherche_result
|
||
Le champ "partner_owner_recid" n'existe pas . \
|
||
Cette fonction va corriger en cela en mettant la bonne valeur\
|
||
"""
|
||
def correction_collection():
|
||
try:
|
||
nb_line = 0
|
||
my_coll = MYSY_GV.dbname['user_recherche_result']
|
||
my_coll_class = MYSY_GV.dbname['myclass']
|
||
|
||
for val in my_coll.find():
|
||
|
||
if ("external_code" in val.keys()):
|
||
if val['external_code']:
|
||
training_external_code = val['external_code']
|
||
|
||
#print(" TRAITEMENT DE "+training_external_code)
|
||
val_tmp = my_coll_class.count_documents({'external_code':str(training_external_code)})
|
||
if( val_tmp > 0 ):
|
||
class_part_recid = my_coll_class.find({'external_code':str(training_external_code)})
|
||
if( class_part_recid and class_part_recid[0]) :
|
||
#print(" pour la formation : "+str(training_external_code)+" --- le partner_owner_recid = "+
|
||
# str(class_part_recid[0]))
|
||
|
||
x = class_part_recid[0]
|
||
|
||
if ("partner_owner_recid" in x.keys()):
|
||
if x['partner_owner_recid']:
|
||
|
||
result = my_coll.update_many(
|
||
{'external_code': str(training_external_code)},
|
||
{
|
||
"$set": {"partner_owner_recid": str(x['partner_owner_recid'])}
|
||
})
|
||
|
||
if (result.matched_count > 0):
|
||
nb_line = nb_line + 1
|
||
print(" OK "+str(training_external_code))
|
||
else:
|
||
print(' PBBBBBBBBBB pour external_code = '+str(training_external_code))
|
||
else:
|
||
result = my_coll.update_many(
|
||
{'external_code': str(training_external_code)},
|
||
{
|
||
"$set": {"partner_owner_recid": "todelete"}
|
||
})
|
||
|
||
if (result.matched_count > 0):
|
||
nb_line = nb_line + 1
|
||
print(" TO DELETE " + str(training_external_code))
|
||
return True, str(nb_line)+" ont été traitées "
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False
|
||
|
||
"""
|
||
Cette fonction enleve les caractères non ascii
|
||
"""
|
||
def remove_non_ascii(string):
|
||
return ''.join(char for char in string if ord(char) < 128)
|
||
|
||
|
||
"""
|
||
Pour des test, et seulement pour les test
|
||
cette fonction permet de mettre les formations du user "ls"
|
||
en première page.
|
||
|
||
Cette fonction doit etre inactive en production
|
||
"""
|
||
def PutLSClassFirst():
|
||
try:
|
||
nb_line = 0
|
||
# Recuperation du partenaire recid
|
||
coll_account = MYSY_GV.dbname["partnair_account"]
|
||
myquery = {"email": "ls.lutmanmicca@gmail.com"}
|
||
|
||
for val_tmp in coll_account.find(myquery):
|
||
local_recid = val_tmp['recid']
|
||
coll_class = MYSY_GV.dbname["myclass"]
|
||
|
||
quere_reset_ranking = {{}, {"$set": {"display_rank": "20"}}}
|
||
result = coll_class.update_many(quere_reset_ranking)
|
||
|
||
quere_update = {{"partner_owner_recid": str(local_recid)}, {"$set": {"freeacces": "1", "display_rank": "70",
|
||
"isalaune": "1"}}}
|
||
result = coll_class.update_many(quere_update)
|
||
|
||
print("raw:", result.raw_result)
|
||
print("acknowledged:", result.acknowledged)
|
||
print("matched_count:", result.matched_count)
|
||
|
||
nb_line = result.matched_count
|
||
|
||
|
||
return True, str(nb_line) + " formations ont mise sur la page principale. "
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False
|
||
|
||
|
||
"""
|
||
Validation de l'emargement d'un participant à une formation
|
||
"""
|
||
def UserEmargementValidation(diction):
|
||
try:
|
||
field_list_obligatoire = ['value', 'session']
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
|
||
return False, " Impossible de valider l'emargement"
|
||
|
||
"""
|
||
Si session == 1 => alors c'est l'emargement de la session du matin
|
||
Si session == 2 => alors c'est l'emargement de la session du apres midi
|
||
|
||
value = la valeur du "_id" de la ligne concernée
|
||
|
||
"""
|
||
coll_emargement = MYSY_GV.dbname['emargement']
|
||
now = datetime.now()
|
||
if( diction['session'] == "1" ):
|
||
print(" ### emargement le matin")
|
||
ret_val = coll_emargement.find_one_and_update({'_id': ObjectId(str( diction['value']))},
|
||
{"$set":
|
||
{"date_update": str(now),
|
||
'matin': True,
|
||
}
|
||
},
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
print(ret_val)
|
||
|
||
|
||
elif (diction['session'] == "2"):
|
||
print(" ### emargement l'apres midi")
|
||
ret_val = coll_emargement.find_one_and_update({'_id': ObjectId(str(diction['value']))},
|
||
{"$set":
|
||
{"date_update": str(now),
|
||
'apresmidi': True,
|
||
}
|
||
},
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
print(ret_val)
|
||
|
||
return True, " Emargement ok "
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False
|
||
|
||
|
||
"""
|
||
Cette fonction prends une session de formation,
|
||
Créer l'ID fonctionnel associé : session_id
|
||
|
||
la regles est :
|
||
Session_id = date_du+date_au+code_postal+adresse
|
||
/!\ : les dates sont au formation : jjmmaaaa.
|
||
exemple de session_id : 10022023150220230enligne => du 10/02/2023 au 15/02/2023, au CP : 0, à l'adresse : 'en ligne'
|
||
|
||
"""
|
||
def CreateTrainingSession_id(diction):
|
||
try:
|
||
|
||
field_list_obligatoire = ['date_du', 'date_au', 'code_postal', 'adresse']
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " Impossible de créer la session de formation - La valeur '" + val + "' n'est pas presente dans liste ")
|
||
return False, " Impossible de créer la session de formation"
|
||
|
||
if( CheckisDate(str(diction['date_du'])) is False or CheckisDate(str(diction['date_au'])) is False):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - Les dates de debut ou de fin sont incorrecte. Les formats attendus : jj/mm/aaaa. Verifiez les dates : "+ str(diction['date_du'])+" et "+str(diction['date_au']))
|
||
return False, " Impossible de créer la session de formation : Les dates de debut ou de fin sont incorrecte. Les formats attendus : jj/mm/aaaa.Verifiez les dates : "+ str(diction['date_du'])+" et "+str(diction['date_au'])
|
||
|
||
|
||
## Verification de la cohérence des dates. Date_du doit <= Date_au
|
||
if (datetime.strptime(str(diction['date_du']).strip(), '%d/%m/%Y') > datetime.strptime(str(diction['date_au']).strip(), '%d/%m/%Y')):
|
||
myprint(
|
||
str(inspect.stack()[0][
|
||
3]) + " - Impossible de créer la session de formation : La date debut "+str(str(diction['date_du'])[0:10])+" est postérieur à la date de fin "+str(diction['date_au'])[0:10]+" pour la formation ")
|
||
|
||
return False, " Impossible de créer la session de formation : La date debut "+str(diction['date_du'])[0:10]+" est postérieur à la date de fin "+str(diction['date_au'])[0:10]+" pour la formation "
|
||
|
||
session_id = str(
|
||
str(diction['date_du']).strip() +
|
||
str(diction['date_au']).strip() +
|
||
str(diction['code_postal']).strip() +
|
||
str(diction['adresse']).strip() ).replace(' ', '')
|
||
|
||
|
||
session_id = remove_non_ascii(session_id)
|
||
session_id = re.sub(r'[^a-zA-Z0-9]', '', session_id)
|
||
|
||
return True, session_id
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, False
|
||
|
||
|
||
"""
|
||
Cette fonction prend un email, et une session de formation
|
||
puis retourne toutes les informations relatif au couple (personne / session formation)
|
||
"""
|
||
def GetAttendeeDetail_perSession(diction):
|
||
try:
|
||
field_list_obligatoire = ['session_id', 'token', 'attendee_email', 'internal_url']
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][
|
||
3]) + " La valeur '" + val + "' n'est pas presente dans liste ")
|
||
return False, " Impossible de récupérer les informations detaillées"
|
||
|
||
my_token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
my_token = diction['token']
|
||
|
||
# Verifier la validité du token
|
||
retval = check_partner_token_validity("", my_token)
|
||
if retval is False:
|
||
return "Err_Connexion", " La session de connexion n'est pas valide"
|
||
|
||
|
||
# Recuperation du recid du partenaire
|
||
partner_recid = get_parnter_recid_from_token(str(my_token))
|
||
if partner_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du partenaire")
|
||
return False, " Les informations d'identification sont invalides"
|
||
|
||
RetObject = []
|
||
|
||
qry_filter = {'session_id':str(diction['session_id']),'email':str(diction['attendee_email']),}
|
||
|
||
""""
|
||
Verification qu'il n'y a q'une seule inscription pour ce critère
|
||
"""
|
||
|
||
qry_filter_count = MYSY_GV.dbname['inscription'].count_documents(qry_filter)
|
||
if( qry_filter_count > 1 ):
|
||
myprint(str(inspect.stack()[0][3]) + " - Données d'inscription incohérentes. il y a plus d'une inscription pour les critère "+str(qry_filter))
|
||
return False, "Données d'inscription incohérentes"
|
||
|
||
|
||
pipe_qry = ([{'$match':qry_filter},
|
||
{'$lookup':
|
||
{
|
||
'from': 'session_formation',
|
||
'let': {'session_id': "$session_id", 'class_internal_url': '$class_internal_url'},
|
||
|
||
'pipeline': [
|
||
{'$match':
|
||
{'$expr':
|
||
{'$and':
|
||
[
|
||
{'$eq': ['$_id', { '$toObjectId': '$$session_id' }]},
|
||
{'$eq': ["$class_internal_url", "$$class_internal_url"]},
|
||
{'$eq': ["$valide", "1"]}
|
||
]
|
||
}
|
||
}
|
||
},
|
||
|
||
],
|
||
'as': 'inscription_collectoin'
|
||
}
|
||
},
|
||
|
||
])
|
||
|
||
|
||
print(" ### GetAttendeeDetail_perSession ici pipe_qry = ",pipe_qry)
|
||
# Recuperation des infos de la formation
|
||
"""local_Insc_retval = MYSY_GV.dbname['inscription'].find_one({'session_id':str(diction['session_id']),
|
||
'email':str(diction['attendee_email']),
|
||
'class_internal_url':str(diction['internal_url']),})
|
||
"""
|
||
|
||
|
||
for local_Insc_retval in MYSY_GV.dbname['inscription'].aggregate(pipe_qry) :
|
||
|
||
#print(" ### local_Insc_retval laa== ", local_Insc_retval)
|
||
|
||
if( local_Insc_retval is None):
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du partenaire (2)")
|
||
return False, " Les informations d'identifier la session (2) "
|
||
|
||
if ('inscription_collectoin' in local_Insc_retval.keys() and len(local_Insc_retval['inscription_collectoin']) > 0):
|
||
my_retrun_dict = {}
|
||
|
||
if ("session_id" in local_Insc_retval.keys()):
|
||
my_retrun_dict['session_id'] = local_Insc_retval['session_id']
|
||
|
||
|
||
if ("code_session" in local_Insc_retval['inscription_collectoin'][0].keys()):
|
||
my_retrun_dict['code_session'] = str(local_Insc_retval['inscription_collectoin'][0]['code_session'])
|
||
|
||
if ("date_debut" in local_Insc_retval['inscription_collectoin'][0].keys()):
|
||
my_retrun_dict['date_du'] = str(local_Insc_retval['inscription_collectoin'][0]['date_debut'])[0:10]
|
||
|
||
if ("date_fin" in local_Insc_retval['inscription_collectoin'][0].keys()):
|
||
my_retrun_dict['date_au'] = str(local_Insc_retval['inscription_collectoin'][0]['date_fin'])[0:10]
|
||
|
||
if ("ville" in local_Insc_retval.keys()):
|
||
my_retrun_dict['ville'] = local_Insc_retval['ville']
|
||
|
||
if ("code_postal" in local_Insc_retval.keys()):
|
||
my_retrun_dict['code_postal'] = local_Insc_retval['code_postal']
|
||
|
||
if ("pays" in local_Insc_retval.keys()):
|
||
my_retrun_dict['pays'] = local_Insc_retval['pays']
|
||
|
||
if ("certification_send_date" in local_Insc_retval.keys()):
|
||
if (str(local_Insc_retval['certification_send_date'])):
|
||
my_retrun_dict['certification_send_date'] = str(local_Insc_retval['certification_send_date'])[0:10]
|
||
|
||
local_adresse = ""
|
||
if ("adresse" in local_Insc_retval.keys()):
|
||
local_adresse = local_Insc_retval['adresse']
|
||
my_retrun_dict['adresse'] = local_adresse
|
||
|
||
local_nom = ""
|
||
if ("nom" in local_Insc_retval.keys()):
|
||
local_nom = local_Insc_retval['nom']
|
||
my_retrun_dict['nom'] = local_nom
|
||
|
||
local_prenom = ""
|
||
if ("prenom" in local_Insc_retval.keys()):
|
||
local_prenom = local_Insc_retval['prenom']
|
||
my_retrun_dict['prenom'] = local_prenom
|
||
|
||
local_employeur = ""
|
||
if ("employeur" in local_Insc_retval.keys()):
|
||
local_employeur = local_Insc_retval['employeur']
|
||
my_retrun_dict['employeur'] = local_employeur
|
||
|
||
local_telephone = ""
|
||
if ("telephone" in local_Insc_retval.keys()):
|
||
local_telephone = local_Insc_retval['telephone']
|
||
my_retrun_dict['telephone'] = local_telephone
|
||
|
||
local_email = ""
|
||
if ("email" in local_Insc_retval.keys()):
|
||
local_email = local_Insc_retval['email']
|
||
my_retrun_dict['email'] = local_email
|
||
|
||
local_modefinancement = ""
|
||
if ("modefinancement" in local_Insc_retval.keys()):
|
||
local_modefinancement = local_Insc_retval['modefinancement']
|
||
my_retrun_dict['modefinancement'] = local_modefinancement
|
||
|
||
local_opco = ""
|
||
if ("opco" in local_Insc_retval.keys()):
|
||
local_opco = local_Insc_retval['opco']
|
||
my_retrun_dict['opco'] = local_opco
|
||
|
||
local_class_internal_url = ""
|
||
if ("class_internal_url" in local_Insc_retval.keys()):
|
||
local_class_internal_url = local_Insc_retval['class_internal_url']
|
||
my_retrun_dict['class_internal_url'] = local_class_internal_url
|
||
|
||
local_status = ""
|
||
if ("status" in local_Insc_retval.keys()):
|
||
local_status = local_Insc_retval['status']
|
||
my_retrun_dict['status'] = local_status
|
||
|
||
local_price = ""
|
||
if ("price" in local_Insc_retval.keys()):
|
||
local_price = local_Insc_retval['price']
|
||
my_retrun_dict['price'] = local_price
|
||
|
||
local_inscription_validation_date = ""
|
||
if ("inscription_validation_date" in local_Insc_retval.keys()):
|
||
local_inscription_validation_date = local_Insc_retval['inscription_validation_date']
|
||
my_retrun_dict['inscription_validation_date'] = local_inscription_validation_date
|
||
|
||
if ("eval_eval" in local_Insc_retval.keys()):
|
||
if (str(local_Insc_retval['eval_eval'])):
|
||
my_retrun_dict['eval_eval'] = local_Insc_retval['eval_eval']
|
||
|
||
if ("eval_note" in local_Insc_retval.keys()):
|
||
if (str(local_Insc_retval['eval_note'])):
|
||
my_retrun_dict['eval_note'] = local_Insc_retval['eval_note']
|
||
|
||
if ("eval_pedagogie" in local_Insc_retval.keys()):
|
||
if (str(local_Insc_retval['eval_pedagogie'])):
|
||
my_retrun_dict['eval_pedagogie'] = local_Insc_retval['eval_pedagogie']
|
||
|
||
if ("eval_date" in local_Insc_retval.keys()):
|
||
if (str(local_Insc_retval['eval_date'])):
|
||
my_retrun_dict['eval_date'] = str(local_Insc_retval['eval_date'])[0:10]
|
||
|
||
client_rattachement_id = ""
|
||
client_rattachement_nom = ""
|
||
if ("client_rattachement_id" in local_Insc_retval.keys()):
|
||
if( local_Insc_retval['client_rattachement_id'] and str(local_Insc_retval['client_rattachement_id']) != 'undefined' ):
|
||
local_client_retval_data = MYSY_GV.dbname['partner_client'].find_one({'_id': ObjectId(str(local_Insc_retval['client_rattachement_id'])),
|
||
'valide': '1', 'locked': '0'}, {'_id':1, 'nom':1})
|
||
|
||
if( local_client_retval_data is not None):
|
||
client_rattachement_id = local_client_retval_data['_id']
|
||
client_rattachement_nom = local_client_retval_data['nom']
|
||
|
||
my_retrun_dict['client_rattachement_id'] = client_rattachement_id
|
||
my_retrun_dict['client_rattachement_nom'] = client_rattachement_nom
|
||
|
||
|
||
v = local_Insc_retval['_id'].generation_time
|
||
my_retrun_dict['created_date'] = str(v.strftime("%d/%m/%Y"))
|
||
|
||
# Recuperation des informations de la formation
|
||
local_formation = MYSY_GV.dbname['myclass'].find_one({'internal_url':str(local_Insc_retval['class_internal_url'])})
|
||
|
||
if local_formation is None or local_formation['_id'] is None:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer les informations de la formation ")
|
||
return False, " Impossible de récupérer les informations de la formation"
|
||
|
||
|
||
|
||
my_retrun_dict['class_title'] = local_formation['title']
|
||
|
||
RetObject.append(JSONEncoder().encode(my_retrun_dict))
|
||
|
||
|
||
return True, RetObject
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer les informations detaillées"
|
||
|
||
|
||
""" Cette fonction retourne la liste des domaines
|
||
de formation
|
||
"""
|
||
def get_List_domaine_formation():
|
||
try:
|
||
coll_liste_domaine_metier = MYSY_GV.dbname['liste_domaine_metier']
|
||
myquery = {}
|
||
myquery['valide'] = "1"
|
||
|
||
RetObject = []
|
||
val_tmp = 1
|
||
|
||
for retval in coll_liste_domaine_metier.find(myquery).distinct('domaine'):
|
||
user = retval
|
||
|
||
RetObject.append(user)
|
||
|
||
return True, RetObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer la liste de domaines de formation"
|
||
|
||
"""
|
||
Cette fonction retour la liste distinct des metiers
|
||
"""
|
||
def get_List_metier_formation():
|
||
try:
|
||
coll_liste_domaine_metier = MYSY_GV.dbname['liste_domaine_metier']
|
||
myquery = {}
|
||
myquery['valide'] = "1"
|
||
|
||
RetObject = []
|
||
val_tmp = 1
|
||
|
||
for retval in coll_liste_domaine_metier.find(myquery).distinct('metier'):
|
||
user = retval
|
||
|
||
RetObject.append(user)
|
||
|
||
return True, RetObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de récupérer la liste de domaines de formation"
|
||
|
||
|
||
|
||
"""
|
||
Fonction de recuperation de liste des domaines & metiers
|
||
"""
|
||
def get_List_domaine_metier():
|
||
try:
|
||
## Recuperation de toutes les stagiaire rattaché à cette session
|
||
coll_opco = MYSY_GV.dbname['liste_domaine_metier']
|
||
myquery = {}
|
||
myquery['valide'] = "1"
|
||
myquery['locked'] = "0"
|
||
|
||
RetObject = []
|
||
val_tmp = 1
|
||
|
||
# print(" #### myquery = "+str(myquery))
|
||
|
||
for retval in coll_opco.find(myquery, {'valide':0, 'locked':0, '_id':0}).\
|
||
sort([("priorite",pymongo.ASCENDING), ]) :
|
||
user = retval
|
||
"""
|
||
user['id'] = str(val_tmp)
|
||
local_val = str(retval['metier'])
|
||
user['label'] = local_val
|
||
user['value'] = local_val
|
||
val_tmp = val_tmp + 1
|
||
"""
|
||
RetObject.append(JSONEncoder().encode(user))
|
||
|
||
return True, RetObject
|
||
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
print(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, "Impossible de récupérer la liste des domaines et metiers"
|
||
|
||
|
||
|
||
"""
|
||
Cette fonction retourne un texte sans les tag html
|
||
"""
|
||
def cleanhtml(raw_html):
|
||
CLEANR = re.compile('<.*?>')
|
||
cleantext = re.sub(CLEANR, '', raw_html).replace(" ", "")
|
||
cleantext = html.unescape(str(cleantext))
|
||
cleantext = clean_emoji(str(cleantext))
|
||
|
||
return cleantext
|
||
|
||
"""
|
||
Cette fonction un texte et ajuste des tag, pour faciliter l'import et formatage des formations
|
||
<s> => souligner </s>
|
||
<g> => Gras <g>
|
||
<t> => tablulation
|
||
<l> => saut de ligne
|
||
<b> => bullet point
|
||
"""
|
||
def format_MySy_Text_Tag(sentence):
|
||
sentence = sentence.replace("<b>", "•")
|
||
sentence = sentence.replace("<l>", "<br/>")
|
||
sentence = sentence.replace("<t>", " ")
|
||
|
||
sentence = sentence.replace("<g>", "<b>")
|
||
sentence = sentence.replace("</g>", "</b>")
|
||
|
||
sentence = sentence.replace("<s>", "<u>")
|
||
sentence = sentence.replace("</s>", "</u>")
|
||
|
||
return sentence
|
||
|
||
|
||
"""
|
||
Cette fonction permet gerer le desabonnement d'une personne.
|
||
par defaut, l'adresse email recu est mis dans la collection 'mail_blacklist'
|
||
"""
|
||
def Sedesabonner(diction):
|
||
try:
|
||
|
||
field_list_obligatoire = ['email',]
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
|
||
return False, " Impossible de finaliser le desabonnement."
|
||
|
||
|
||
mydata = {}
|
||
mydata['email'] = diction['email']
|
||
mytoday = datetime.today().strftime("%d/%m/%Y")[0:10]
|
||
mydata['date_blacklist'] = mytoday
|
||
|
||
coll_name = MYSY_GV.dbname['mail_blacklist']
|
||
|
||
ret_val = coll_name.find_one_and_update(
|
||
{'email': str(diction['email'])},
|
||
{"$set": mydata},
|
||
upsert=True,
|
||
return_document=ReturnDocument.AFTER
|
||
)
|
||
|
||
if (ret_val and ret_val['_id']):
|
||
nb_doc = str(ret_val['_id'])
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - l'adresse email =" + str(diction['email'])+" a été retirée" )
|
||
|
||
return True, " l'adresse email =" + str(diction['email'])+" a été retirée"
|
||
|
||
else:
|
||
myprint(str(inspect.stack()[0][3]) + " Impossible de desactivier l'adresse email : " +str(diction['email']))
|
||
return False, " Impossible de desactivier l'adresse email : " +str(diction['email'])
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de desactivier l'adresse email "
|
||
|
||
|
||
|
||
"""
|
||
Cette fonction verifie qu'une commande existe est qu'elle est bien valide
|
||
"""
|
||
def CheckSalesOrder(diction):
|
||
|
||
try:
|
||
field_list_obligatoire = ['token', 'orderid']
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
|
||
return False, " Toutes les informations obligatoires n'ont pas été fornies"
|
||
|
||
my_orderid = ""
|
||
if ("orderid" in diction.keys()):
|
||
if diction['orderid']:
|
||
my_orderid = diction['orderid']
|
||
|
||
|
||
my_token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
my_token = diction['token']
|
||
|
||
# Recuperation du recid du partenaire
|
||
partner_recid = get_parnter_recid_from_token(str(my_token))
|
||
if partner_recid is False:
|
||
myprint(str(inspect.stack()[0][3]) + " - Impossible de récupérer le recid du partenaire")
|
||
return False, " Les informations d'identification sont invalides"
|
||
|
||
insertObject = []
|
||
|
||
salesorder = MYSY_GV.dbname['sales_order'].find_one({'client_recid':partner_recid, 'order_id':my_orderid,
|
||
'valide':'1'}, {'order_id':1, 'order_date':1, 'stripe_pi':1,
|
||
})
|
||
|
||
if (salesorder is None or salesorder['order_id'] is None):
|
||
myprint(str(inspect.stack()[0][3]) + " La commande "+str(my_orderid)+" n'existe pas")
|
||
return False, " La commande "+str(my_orderid)+" n'existe pas"
|
||
|
||
insertObject.append(JSONEncoder().encode(salesorder))
|
||
|
||
return True, insertObject
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de verifier l'existence de la commande "
|
||
|
||
|
||
"""
|
||
Cette fonction verifie s'un utilisateur existe vraiment dans le LMS
|
||
"""
|
||
def is_LMS_user_exist(diction):
|
||
try:
|
||
"""
|
||
Verification de la liste des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = [ 'login', 'partner_recid']
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - : La valeur '" + val + "' n'est pas presente dans liste ")
|
||
return False, "Impossible d'envoyer les information de connexion à la plateforme LMS"
|
||
|
||
conn = mariadb.connect(
|
||
user=MYSY_GV.MYSY_MARIADB_USER,
|
||
password=MYSY_GV.MYSY_MARIADB_USER_PASS,
|
||
host=MYSY_GV.MYSY_MARIADB_HOST,
|
||
port=MYSY_GV.MYSY_MARIADB_PORT,
|
||
database=MYSY_GV.MYSY_LMS_BDD
|
||
)
|
||
cur = conn.cursor()
|
||
|
||
qry = "SELECT count(*) FROM user WHERE username = '"+str(diction['login'])+"' "
|
||
print(" #### qry = ", qry)
|
||
query_compte_exist_exec = cur.execute("SELECT count(*) FROM user WHERE username = '"+str(diction['login'])+"' ")
|
||
|
||
is_exist = False
|
||
|
||
query_compte_exist_exec = ""
|
||
query_compte_exist_exec_val = "0"
|
||
for query_compte_exist_exec in cur:
|
||
query_compte_exist_exec_val = query_compte_exist_exec[0]
|
||
if (int(tryInt(query_compte_exist_exec_val)) > 0):
|
||
is_exist = True
|
||
conn.commit()
|
||
|
||
|
||
return True, is_exist
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de verifier l'existence du compte utilisateur dans le LMS "
|
||
|
||
"""
|
||
# Remove all non-alphanumeric characters from string
|
||
Cette fonction supprime tous les caractères non alphanumeric d'une chaine
|
||
"""
|
||
def RemoveAllNonAlphaNumeric(sentence):
|
||
try:
|
||
new_sentence = re.sub(r'[\W_]', '', sentence)
|
||
return True, new_sentence
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de RemoveAllNonAlphaNumeric "
|
||
|
||
|
||
"""
|
||
Cette fonction nettoie un dictionnaire python :
|
||
Pour chaque mot, il va supprimer les espaces en debut et en fin de mot.
|
||
supprime les mots "nan" et "undefined"
|
||
"""
|
||
def strip_dictionary(orig_diction):
|
||
try:
|
||
|
||
for key in orig_diction:
|
||
if type(orig_diction[key]) == str:
|
||
if( orig_diction[key].strip() == "nan" or orig_diction[key].strip() == "undefined"):
|
||
orig_diction[key] = ""
|
||
else :
|
||
orig_diction[key] = orig_diction[key].strip()
|
||
|
||
return orig_diction
|
||
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
"""
|
||
Cette fonction verifie si une chaine de caractère est un email valide
|
||
"""
|
||
def isEmailValide(email):
|
||
try:
|
||
pat = r'\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,7}\b'
|
||
if re.match(pat, email):
|
||
return True
|
||
|
||
return False
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False
|
||
|
||
|
||
"""
|
||
Cette fonction verifie la validé d'un token et retour :
|
||
- Une erreur de connexion ou
|
||
- Si tout est ok, le partner associé à ce token
|
||
|
||
"""
|
||
def Check_Connexion_And_Return_Partner_Data(diction):
|
||
try:
|
||
|
||
token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
token = diction['token']
|
||
|
||
# Verifier la validité du token
|
||
retval = check_partner_token_validity("", token)
|
||
|
||
if retval is False:
|
||
return "Err_Connexion", " La session de connexion n'est pas valide"
|
||
|
||
partner_recid = get_parnter_recid_from_token(token)
|
||
if (partner_recid is False):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - partner_recid est KO. Les données de connexion sont incorrectes ")
|
||
return False, " Vous n'etes pas autorisé à utiliser cette API "
|
||
|
||
# Recuperation des données du partenaire
|
||
local_status, my_partner = get_partner_data_from_recid(partner_recid)
|
||
if (local_status is False):
|
||
myprint(str(inspect.stack()[0][3]) + " - impossible de récupérer les données du partenaire")
|
||
return False, str(inspect.stack()[0][3]) + " - impossible de récupérer les données du partenaire. "
|
||
|
||
return True, my_partner
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de verifier la connexion"
|
||
|
||
|
||
|
||
"""
|
||
Cette fonction fait un escape et clean les string avant injection SQL
|
||
"""
|
||
def Clean_For_SQL(sentence):
|
||
try:
|
||
h = html.parser
|
||
sentence = h.unescape(sentence)
|
||
sentence = sentence.replace("'", "\\'").replace('"', ' ').replace("’", "\’")
|
||
|
||
return sentence
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
||
return ""
|
||
|
||
|
||
"""
|
||
Pour controler les acces utilisateur par module,
|
||
Cette fonction prends l'
|
||
- user_id (id de la collection ressource_humaine
|
||
- module_name
|
||
- action (read ou write)
|
||
|
||
et retourne True ou False selon que le user a droit de faire l'action.
|
||
|
||
La collection de travail est : user_access_right
|
||
|
||
"""
|
||
def Is_User_Has_Right_To_Action(diction):
|
||
try:
|
||
diction = strip_dictionary(diction)
|
||
|
||
"""
|
||
Verification des input acceptés
|
||
"""
|
||
field_list = ['token', 'module_name', 'action']
|
||
|
||
incom_keys = diction.keys()
|
||
for val in incom_keys:
|
||
if val not in field_list:
|
||
myprint(str(
|
||
inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas autorisé")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification des champs obligatoires
|
||
"""
|
||
field_list_obligatoire = ['token', 'module_name', 'action']
|
||
|
||
for val in field_list_obligatoire:
|
||
if val not in diction:
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
|
||
return False, " Les informations fournies sont incorrectes",
|
||
|
||
"""
|
||
Verification de l'identité et autorisation de l'entité qui
|
||
appelle cette API
|
||
"""
|
||
token = ""
|
||
if ("token" in diction.keys()):
|
||
if diction['token']:
|
||
token = diction['token']
|
||
|
||
local_status, my_partner = Check_Connexion_And_Return_Partner_Data(diction)
|
||
if (local_status is not True):
|
||
return local_status, my_partner
|
||
|
||
token = ""
|
||
if ("action" in diction.keys()):
|
||
if ( str(diction['action']).lower().strip() not in ['read', 'write'] ):
|
||
myprint(
|
||
str(inspect.stack()[0][3]) + " - L'action '" + str(diction['action']).lower().strip() + "' n'est pas valide ")
|
||
return False, " Droit d'accès incorrect",
|
||
|
||
qry_access_right = {}
|
||
|
||
if( str(diction['action']).lower().strip() == "read" ):
|
||
qry_access_right = {'partner_owner_recid':str(my_partner['recid']), 'module':str(diction['module_name']),
|
||
'user_id':str(my_partner['ressource_humaine_id']), 'read':True}
|
||
|
||
elif (str(diction['action']).lower().strip() == "write"):
|
||
qry_access_right = {'partner_owner_recid': str(my_partner['recid']), 'module': str(diction['module_name']),
|
||
'user_id': str(my_partner['ressource_humaine_id']), 'write': True}
|
||
|
||
print(" ##### qry_access_right = ", qry_access_right)
|
||
is_acces_right_ok = MYSY_GV.dbname['user_access_right'].count_documents(qry_access_right)
|
||
|
||
if( is_acces_right_ok != 1):
|
||
return False, " Droits d'acces insuffisants"
|
||
|
||
|
||
return True, " OK"
|
||
|
||
except Exception as e:
|
||
exc_type, exc_obj, exc_tb = sys.exc_info()
|
||
myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - Line : " + str(exc_tb.tb_lineno))
|
||
return False, " Impossible de verifier les droits d'acces de l'utilisateur "
|