1244 lines
50 KiB
Python
1244 lines
50 KiB
Python
'''
|
|
Ce fichier permet de gerer les pièces jointes.
|
|
toutes les fonctions necessaires à la gestion des pièces jointes)
|
|
'''
|
|
import hashlib
|
|
import _pickle as cPickle
|
|
import pickle
|
|
from PIL import Image
|
|
import requests
|
|
import bson
|
|
from pymongo import MongoClient
|
|
import pymongo
|
|
from difflib import SequenceMatcher
|
|
import textdistance
|
|
from datetime import datetime
|
|
import logging
|
|
import secrets
|
|
import base64
|
|
from bson import ObjectId
|
|
from pymongo import MongoClient
|
|
import inspect
|
|
from werkzeug.utils import secure_filename
|
|
import time
|
|
import os
|
|
import csv
|
|
import inspect
|
|
import sys
|
|
import pandas as pd
|
|
from datetime import datetime
|
|
from pymongo import ReturnDocument
|
|
from unidecode import unidecode
|
|
import GlobalVariable as MYSY_GV
|
|
from serpapi import GoogleSearch
|
|
import re
|
|
import random
|
|
import json
|
|
from colorama import Fore
|
|
from colorama import Style
|
|
from flask import Flask, Response, render_template, request, abort, jsonify, send_from_directory
|
|
from xhtml2pdf import pisa
|
|
import jinja2
|
|
import ftplib
|
|
import pysftp
|
|
import html
|
|
import mariadb
|
|
from flask import send_file
|
|
import GlobalVariable as MYSY_GV
|
|
import prj_common as mycommon
|
|
from pathlib import Path
|
|
|
|
"""
|
|
Cette fonction recuperer un fichier,
|
|
effectue les controle de taille et de securité,
|
|
le stock dans une emplacement et
|
|
retourne le path complet.
|
|
|
|
/!\ : les metadata sont enregistrés dans une collection.
|
|
metadata :
|
|
- file_business_object : ici on definit a quoi correspond le fichier (ex : diplome 1, pièce identité, etc).
|
|
/!\ : ce champ est la clé. si un utilisateur envoie 2 fois un fichier pr le meme ob, une mise à jour sera faite dans la collection, en ecrasant l'ancien.
|
|
- file_name
|
|
- full_path
|
|
- file_extention
|
|
- date_downeload
|
|
- object_owner_collection : Ce champ definit la collection a laquelle est rattachée la PJ. par exemple : "candidat", "employe"
|
|
- object_owner_id : id du proprietaire dans la collection (ex : le champ '_id' dans la collection "employee"
|
|
(les 2 champs 'object_owner' sont utilisé pour gerer à qui / quoi est rattacher à la pièce jointe).
|
|
- status :
|
|
0 : downloaded
|
|
1 : accepted
|
|
-1 : rejected
|
|
|
|
/!\ : 24/12/2024
|
|
- type_document : le champs 'file_business_object' semble problematique, nous allons donc introduire un champs : 'type_document' qui permettra de savori si c'est un devis, une convention, etc
|
|
(ce champs n'est pas obligatoire)
|
|
|
|
- metadata_field : est un champs de texte qui prends au maximum 500 caractère pour stocker des metadata pour effectuer des recherches
|
|
"""
|
|
def Store_User_Downloaded_File(file=None, Folder=None, diction=None):
|
|
try:
|
|
full_file_path = ""
|
|
|
|
diction = mycommon.strip_dictionary(diction)
|
|
|
|
"""
|
|
Verification des input acceptés
|
|
"""
|
|
field_list = ['token', 'file_business_object', 'file_name', 'status','object_owner_collection',
|
|
'object_owner_id', 'type_document', 'metadata_field']
|
|
incom_keys = diction.keys()
|
|
for val in incom_keys:
|
|
if val not in field_list and val.startswith('my_') is False:
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification des champs obligatoires
|
|
"""
|
|
field_list_obligatoire = ['token', 'file_business_object', 'object_owner_collection', 'object_owner_id' ]
|
|
for val in field_list_obligatoire:
|
|
if val not in diction:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
|
if (local_status is not True):
|
|
return local_status, my_partner
|
|
|
|
basename = os.path.basename(file.filename)
|
|
basename2 = basename.split(".")
|
|
|
|
|
|
if (len(basename2) != 2):
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " - : Le nom du fichier est incorrect")
|
|
return False, "Le nom du fichier est incorrect"
|
|
|
|
if (str(basename2[1]).lower() not in MYSY_GV.ALLOWED_EXTENSIONS):
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " - : le format de fichier '"+str(basename2[1])+"' n'est pas autorisé. Les extentions autorisées sont : "+str(MYSY_GV.ALLOWED_EXTENSIONS))
|
|
return False, "le format de fichier '"+str(basename2[1])+"' n'est pas autorisé. Les extentions autorisées sont : "+str(MYSY_GV.ALLOWED_EXTENSIONS)
|
|
|
|
file_extention = str(basename2[1]).lower().strip()
|
|
|
|
new_basename2 = re.sub(r'[^a-zA-Z0-9]', '', str(basename2[0]))
|
|
|
|
timestr = time.strftime("%Y%m%d_%H%M%S")
|
|
local_base_name = str(new_basename2).replace('(', '').replace(')', '').replace(' ', '')
|
|
new_file_name = str(local_base_name) + "_" + str(timestr) +"_"+str(my_partner['recid'])+ "." + str(basename2[1])
|
|
file.filename = new_file_name
|
|
file.save(os.path.join(str(MYSY_GV.upload_folder), secure_filename(file.filename))) # t
|
|
|
|
Global_file_name = MYSY_GV.upload_folder+ file.filename
|
|
|
|
file_business_object = ""
|
|
if ("file_business_object" in diction.keys()):
|
|
if diction['file_business_object']:
|
|
file_business_object = diction['file_business_object']
|
|
|
|
token = ""
|
|
if ("token" in diction.keys()):
|
|
if diction['token']:
|
|
token = diction['token']
|
|
|
|
object_owner_collection = ""
|
|
if ("object_owner_collection" in diction.keys()):
|
|
if diction['object_owner_collection']:
|
|
object_owner_collection = diction['object_owner_collection']
|
|
|
|
type_document = ""
|
|
if ("type_document" in diction.keys()):
|
|
if diction['type_document']:
|
|
type_document = str(diction['type_document']).lower()
|
|
|
|
metadata_field = ""
|
|
if ("metadata_field" in diction.keys()):
|
|
if ( diction['metadata_field'] and str( diction['metadata_field'] ) > 500 ) :
|
|
metadata_field = diction['metadata_field'][0:500]
|
|
elif ( diction['metadata_field'] and str( diction['metadata_field'] ) <= 500 ) :
|
|
metadata_field = diction['metadata_field']
|
|
|
|
|
|
object_owner_id = ""
|
|
if ("object_owner_id" in diction.keys()):
|
|
if diction['object_owner_id']:
|
|
object_owner_id = diction['object_owner_id']
|
|
|
|
document_display_order = ""
|
|
if ("document_display_order" in diction.keys()):
|
|
if diction['document_display_order']:
|
|
document_display_order = diction['document_display_order']
|
|
|
|
if (len(str(file.filename)) > MYSY_GV.FILE_NAME_MAX_SIZE):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][
|
|
3]) + " - Fichier invalide, trop long = " + str(
|
|
file.filename) + ". le nom doit faire moins de 100 caractère")
|
|
|
|
return False, " - Nom de fichier trop long. Il doit faire moins de 100 caractères"
|
|
|
|
|
|
mydata = {}
|
|
mydata['file_business_object'] = file_business_object
|
|
mydata['date_update'] = str(datetime.now())
|
|
mydata['file_name'] = file.filename
|
|
mydata['type_document'] = type_document
|
|
mydata['metadata_field'] = metadata_field
|
|
mydata['full_path'] = Global_file_name
|
|
mydata['file_extention'] = file_extention
|
|
mydata['object_owner_collection'] = object_owner_collection
|
|
mydata['object_owner_id'] = object_owner_id
|
|
mydata['document_display_order'] = document_display_order
|
|
mydata['partner_owner_recid'] = str(my_partner['recid'])
|
|
mydata['valide'] = "1"
|
|
mydata['status'] = "0"
|
|
mydata['locked'] = "0"
|
|
mydata['created_date'] = str(datetime.now())
|
|
mydata['created_by'] = str(my_partner['_id'])
|
|
|
|
ret_val = MYSY_GV.dbname['download_files'].find_one_and_update({'file_business_object': str(file_business_object),
|
|
'object_owner_collection':str(object_owner_collection),
|
|
'object_owner_id':str(object_owner_id),
|
|
'partner_owner_recid': str(my_partner['recid']),
|
|
'valide': '1'},
|
|
{"$set": mydata},
|
|
upsert=True,
|
|
return_document=ReturnDocument.AFTER
|
|
)
|
|
|
|
return True, Global_file_name
|
|
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
|
return False, "Error "
|
|
|
|
|
|
"""
|
|
Cette fonction est la copie de la première a l'exception qu'il s'agit d'une utilisation interne
|
|
/!\ : 24/12/2024
|
|
- type_document : le champs 'file_business_object' semble problematique, nous allons donc introduire un champs : 'type_document' qui permettra de savori si c'est un devis, une convention, etc
|
|
(ce champs n'est pas obligatoire)
|
|
|
|
- metadata_field : est un champs de texte qui prends au maximum 500 caractère pour stocker des metadata pour effectuer des recherches
|
|
|
|
"""
|
|
def Internal_Usage_Store_User_Downloaded_File(Folder=None, diction=None):
|
|
try:
|
|
full_file_path = ""
|
|
|
|
diction = mycommon.strip_dictionary(diction)
|
|
|
|
"""
|
|
Verification des input acceptés
|
|
"""
|
|
field_list = ['token', 'file_business_object', 'file_name', 'status','object_owner_collection', 'object_owner_id',
|
|
'file_name_to_store', 'type_document', 'metadata_field']
|
|
incom_keys = diction.keys()
|
|
for val in incom_keys:
|
|
if val not in field_list and val.startswith('my_') is False:
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification des champs obligatoires
|
|
"""
|
|
field_list_obligatoire = ['token', 'file_business_object', 'object_owner_collection', 'object_owner_id', 'file_name_to_store' ]
|
|
for val in field_list_obligatoire:
|
|
if val not in diction:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
|
if (local_status is not True):
|
|
return local_status, my_partner
|
|
|
|
basename = os.path.basename(str(diction['file_name_to_store']))
|
|
basename2 = basename.split(".")
|
|
|
|
|
|
if (len(basename2) != 2):
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " - : Le nom du fichier est incorrect")
|
|
return False, "Le nom du fichier est incorrect"
|
|
|
|
if (str(basename2[1]).lower() not in MYSY_GV.ALLOWED_EXTENSIONS):
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " - : le format de fichier '"+str(basename2[1])+"' n'est pas autorisé. Les extentions autorisées sont : "+str(MYSY_GV.ALLOWED_EXTENSIONS))
|
|
return False, "le format de fichier '"+str(basename2[1])+"' n'est pas autorisé. Les extentions autorisées sont : "+str(MYSY_GV.ALLOWED_EXTENSIONS)
|
|
|
|
file_extention = str(basename2[1]).lower().strip()
|
|
|
|
new_basename2 = str(basename2[0])
|
|
|
|
timestr = time.strftime("%Y%m%d_%H%M%S")
|
|
local_base_name = str(new_basename2).replace('(', '').replace(')', '').replace(' ', '')
|
|
|
|
partial_file_name_without_folder = str(local_base_name) + "_" + str(timestr) +"_"+str(my_partner['recid'])+ "." + str(basename2[1])
|
|
|
|
new_file_name = MYSY_GV.upload_folder+str(local_base_name) + "_" + str(timestr) +"_"+str(my_partner['recid'])+ "." + str(basename2[1])
|
|
|
|
|
|
|
|
os.rename(str(diction['file_name_to_store']), new_file_name)
|
|
|
|
Global_file_name = new_file_name
|
|
|
|
file_business_object = ""
|
|
if ("file_business_object" in diction.keys()):
|
|
if diction['file_business_object']:
|
|
file_business_object = diction['file_business_object']
|
|
|
|
token = ""
|
|
if ("token" in diction.keys()):
|
|
if diction['token']:
|
|
token = diction['token']
|
|
|
|
object_owner_collection = ""
|
|
if ("object_owner_collection" in diction.keys()):
|
|
if diction['object_owner_collection']:
|
|
object_owner_collection = diction['object_owner_collection']
|
|
|
|
object_owner_id = ""
|
|
if ("object_owner_id" in diction.keys()):
|
|
if diction['object_owner_id']:
|
|
object_owner_id = diction['object_owner_id']
|
|
|
|
type_document = ""
|
|
if ("type_document" in diction.keys()):
|
|
if diction['type_document']:
|
|
type_document = str(diction['type_document']).lower()
|
|
|
|
metadata_field = ""
|
|
if ("metadata_field" in diction.keys()):
|
|
if (diction['metadata_field'] and str(diction['metadata_field']) > 500):
|
|
metadata_field = diction['metadata_field'][0:500]
|
|
elif (diction['metadata_field'] and str(diction['metadata_field']) <= 500):
|
|
metadata_field = diction['metadata_field']
|
|
|
|
|
|
document_display_order = ""
|
|
if ("document_display_order" in diction.keys()):
|
|
if diction['document_display_order']:
|
|
document_display_order = diction['document_display_order']
|
|
|
|
mydata = {}
|
|
mydata['file_business_object'] = file_business_object
|
|
mydata['date_update'] = str(datetime.now())
|
|
mydata['file_name'] = partial_file_name_without_folder
|
|
mydata['full_path'] = Global_file_name
|
|
mydata['type_document'] = type_document
|
|
mydata['metadata_field'] = metadata_field
|
|
mydata['file_extention'] = file_extention
|
|
mydata['object_owner_collection'] = object_owner_collection
|
|
mydata['object_owner_id'] = object_owner_id
|
|
mydata['document_display_order'] = document_display_order
|
|
mydata['partner_owner_recid'] = str(my_partner['recid'])
|
|
mydata['valide'] = "1"
|
|
mydata['status'] = "0"
|
|
mydata['locked'] = "0"
|
|
mydata['created_date'] = str(datetime.now())
|
|
mydata['created_by'] = str(my_partner['_id'])
|
|
|
|
ret_val = MYSY_GV.dbname['download_files'].find_one_and_update({'file_business_object': str(file_business_object),
|
|
'object_owner_collection':str(object_owner_collection),
|
|
'object_owner_id':str(object_owner_id),
|
|
'partner_owner_recid': str(my_partner['recid']),
|
|
'file_name':str(partial_file_name_without_folder),
|
|
'valide': '1'},
|
|
{"$set": mydata},
|
|
upsert=True,
|
|
return_document=ReturnDocument.AFTER
|
|
)
|
|
|
|
return True, Global_file_name
|
|
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
|
return False, "Error "
|
|
|
|
|
|
"""
|
|
Fonction de recuperation des pièces jointes d'objet en utilisant le 'file_name'
|
|
"""
|
|
|
|
def Get_Stored_Downloaded_File(diction):
|
|
try:
|
|
full_file_path = ""
|
|
|
|
diction = mycommon.strip_dictionary(diction)
|
|
"""
|
|
Verification des input acceptés
|
|
"""
|
|
|
|
|
|
field_list = ['token', 'file_name']
|
|
incom_keys = diction.keys()
|
|
for val in incom_keys:
|
|
if val not in field_list and val.startswith('my_') is False:
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas autorisé")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification des champs obligatoires
|
|
"""
|
|
field_list_obligatoire = ['token', 'file_name']
|
|
for val in field_list_obligatoire:
|
|
if val not in diction:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification de l'identité et autorisation de l'entité qui
|
|
appelle cette API
|
|
"""
|
|
token = ""
|
|
if ("token" in diction.keys()):
|
|
if diction['token']:
|
|
token = diction['token']
|
|
|
|
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
|
if (local_status is not True):
|
|
return local_status, my_partner
|
|
|
|
file_name = ""
|
|
if ("file_name" in diction.keys()):
|
|
if diction['file_name']:
|
|
file_name = diction['file_name']
|
|
|
|
if(len(str(file_name)) > MYSY_GV.FILE_NAME_MAX_SIZE ):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][
|
|
3]) + " - Fichier invalide, trop long = "+ str(file_name)+ ". le nom doit faire moins de 100 caractère")
|
|
|
|
return False, " - Nom de fichier trop long. Il doit faire moins de 100 caractères"
|
|
|
|
if( not file_name ):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][
|
|
3]) + " - Fichier invalide = ", str(file_name))
|
|
|
|
return False, " - nom du fichier invalide "
|
|
|
|
full_path = MYSY_GV.upload_folder+str(file_name)
|
|
|
|
"""
|
|
Recuperation de la list des fichier dans la collection
|
|
"""
|
|
tmp_count = MYSY_GV.dbname['download_files'].count_documents({'full_path': str(full_path), 'valide': '1',
|
|
'partner_owner_recid':my_partner['recid']})
|
|
if (tmp_count <= 0):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][
|
|
3]) + " - Fichier invalide = ", str(full_path))
|
|
|
|
return False, " - Fichier invalide "
|
|
|
|
|
|
if os.path.exists(full_path):
|
|
print(" ### full_path = ", full_path)
|
|
return True, send_file(full_path, as_attachment=True)
|
|
|
|
else:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][
|
|
3]) + " - Fichier invalide (2)= ", str(full_path))
|
|
|
|
return False, " - Fichier invalide (2) "
|
|
|
|
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
|
return False, False
|
|
|
|
|
|
|
|
|
|
"""
|
|
Fonction de recuperation des pièces jointes d'objet en utilisant le '_id'
|
|
"""
|
|
|
|
def Get_Stored_Downloaded_File_From_Id(diction):
|
|
try:
|
|
full_file_path = ""
|
|
|
|
diction = mycommon.strip_dictionary(diction)
|
|
"""
|
|
Verification des input acceptés
|
|
"""
|
|
|
|
|
|
field_list = ['token', '_id']
|
|
incom_keys = diction.keys()
|
|
for val in incom_keys:
|
|
if val not in field_list and val.startswith('my_') is False:
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas autorisé")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification des champs obligatoires
|
|
"""
|
|
field_list_obligatoire = ['token', '_id']
|
|
for val in field_list_obligatoire:
|
|
if val not in diction:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification de l'identité et autorisation de l'entité qui
|
|
appelle cette API
|
|
"""
|
|
token = ""
|
|
if ("token" in diction.keys()):
|
|
if diction['token']:
|
|
token = diction['token']
|
|
|
|
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
|
if (local_status is not True):
|
|
return local_status, my_partner
|
|
|
|
file_name = ""
|
|
if ("file_name" in diction.keys()):
|
|
if diction['file_name']:
|
|
file_name = diction['file_name']
|
|
|
|
|
|
|
|
"""
|
|
Verifier la validité de l'identifiant
|
|
"""
|
|
is_valide_download_files = MYSY_GV.dbname["download_files"].count_documents({'_id':ObjectId(str(diction['_id'])),
|
|
'valide':'1',
|
|
'partner_owner_recid':my_partner['recid']})
|
|
if(is_valide_download_files != 1 ):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " L'identifiant du fichier est invalide ")
|
|
return False, " L'identifiant du fichier est invalide ",
|
|
|
|
|
|
is_valide_download_files_data = MYSY_GV.dbname["download_files"].find_one(
|
|
{'_id': ObjectId(str(diction['_id'])),
|
|
'valide': '1',
|
|
'partner_owner_recid': my_partner['recid']})
|
|
|
|
|
|
|
|
file_name = ""
|
|
if( "file_name" in is_valide_download_files_data.keys() ):
|
|
file_name = is_valide_download_files_data['full_path']
|
|
|
|
if( not file_name ):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][
|
|
3]) + " - Fichier invalide = ", str(file_name))
|
|
|
|
return False, " Fichier invalide "
|
|
|
|
full_path = str(file_name)
|
|
|
|
|
|
|
|
if os.path.exists(full_path):
|
|
print(" ### full_path = ", full_path)
|
|
return True, send_file(full_path, as_attachment=True)
|
|
|
|
else:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][
|
|
3]) + " Fichier invalide (2)= ", str(full_path))
|
|
|
|
return False, " Fichier invalide (2) "
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
|
return False, False
|
|
|
|
|
|
|
|
|
|
"""
|
|
Fonction qui test la recuperation
|
|
d'un fichier stocké sur le serveur
|
|
"""
|
|
|
|
UPLOAD_DIRECTORY = "./user_download_files"
|
|
|
|
|
|
def get_file():
|
|
"""Download a file."""
|
|
path2 = "test_file.pdf"
|
|
print(" ### PATH2 = ", path2)
|
|
|
|
if os.path.exists(UPLOAD_DIRECTORY + "/" + str(path2) ):
|
|
path = UPLOAD_DIRECTORY + "/" + str(path2)
|
|
|
|
print(" ### PATH = ", path)
|
|
return True, send_file(path, as_attachment=True)
|
|
else:
|
|
return False, False
|
|
|
|
|
|
def test_get_stored_file():
|
|
try:
|
|
|
|
filename = "./user_download_files/"
|
|
return True, "true"
|
|
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
|
return False, False
|
|
|
|
|
|
"""
|
|
Recuperation de la liste des fichiers stocké par :
|
|
- object_owner_collection
|
|
- object_owner_id
|
|
"""
|
|
def Get_List_object_owner_collection_Stored_Files(diction):
|
|
try:
|
|
|
|
diction = mycommon.strip_dictionary(diction)
|
|
|
|
"""
|
|
Verification des input acceptés
|
|
"""
|
|
field_list = ['token', 'object_owner_collection', 'object_owner_id']
|
|
incom_keys = diction.keys()
|
|
for val in incom_keys:
|
|
if val not in field_list and val.startswith('my_') is False:
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
|
|
|
|
"""
|
|
Verification de l'identité et autorisation de l'entité qui
|
|
appelle cette API
|
|
"""
|
|
token = ""
|
|
if ("token" in diction.keys()):
|
|
if diction['token']:
|
|
token = diction['token']
|
|
|
|
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
|
if (local_status is not True):
|
|
return local_status, my_partner
|
|
|
|
|
|
object_owner_collection = ""
|
|
if ("object_owner_collection" in diction.keys()):
|
|
if diction['object_owner_collection']:
|
|
object_owner_collection = diction['object_owner_collection']
|
|
|
|
|
|
object_owner_id = ""
|
|
if ("object_owner_id" in diction.keys()):
|
|
if diction['object_owner_id']:
|
|
object_owner_id = diction['object_owner_id']
|
|
|
|
"""
|
|
Verification des champs obligatoires
|
|
24/12/2024 :
|
|
Les 2 valeurs ('object_owner_collection', 'object_owner_id') ne sont plus obligatories, mais au moins une doit
|
|
etre fourni
|
|
"""
|
|
if( str(object_owner_collection).strip() == "" and str(object_owner_id).strip() == ""):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " Il manque des informations obligatoires ")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
my_query = {}
|
|
if( object_owner_collection ):
|
|
my_query['object_owner_collection'] = object_owner_collection
|
|
|
|
if(object_owner_id ):
|
|
my_query['object_owner_id'] = object_owner_id
|
|
|
|
my_query['valide'] = "1"
|
|
my_query['partner_owner_recid'] = str(my_partner['recid'])
|
|
|
|
|
|
print(" ##### myquery = " + str(my_query))
|
|
RetObject = []
|
|
nb_val = 0
|
|
for retval in MYSY_GV.dbname['download_files'].find(my_query).sort([("_id", pymongo.DESCENDING), ("document_display_order", pymongo.DESCENDING)]):
|
|
ret_file = {}
|
|
|
|
ret_file['id'] = str(nb_val)
|
|
ret_file['_id'] = str(retval['_id'])
|
|
|
|
ret_file['file_business_object'] = retval['file_business_object']
|
|
|
|
if ("created_date" in retval.keys()):
|
|
ret_file['created_date'] = retval['created_date']
|
|
else:
|
|
ret_file['created_date'] = "9999-99-99"
|
|
|
|
|
|
if ("file_business_object" in retval.keys()):
|
|
ret_file['file_business_object'] = retval['file_business_object']
|
|
|
|
if ("object_owner_collection" in retval.keys()):
|
|
ret_file['object_owner_collection'] = retval['object_owner_collection']
|
|
|
|
if ("object_owner_id" in retval.keys()):
|
|
ret_file['object_owner_id'] = retval['object_owner_id']
|
|
|
|
if ("file_name" in retval.keys()):
|
|
ret_file['file_name'] = retval['file_name']
|
|
|
|
if ("status" in retval.keys()):
|
|
ret_file['status'] = retval['status']
|
|
|
|
if ("document_display_order" in retval.keys()):
|
|
ret_file['document_display_order'] = retval['document_display_order']
|
|
|
|
if ("full_path" in retval.keys()):
|
|
ret_file['full_path'] = retval['full_path']
|
|
|
|
if ("file_cononical_name" in retval.keys()):
|
|
ret_file['file_cononical_name'] = retval['file_cononical_name']
|
|
else:
|
|
ret_file['file_cononical_name'] = "Anonyme"
|
|
|
|
if ("type_document" in retval.keys()):
|
|
ret_file['type_document'] = retval['type_document']
|
|
else:
|
|
ret_file['type_document'] = "Autre"
|
|
|
|
if ("file_extention" in retval.keys()):
|
|
ret_file['file_extention'] = retval['file_extention']
|
|
else:
|
|
ret_file['file_extention'] = "file_extention"
|
|
|
|
|
|
"""
|
|
Recuperation du nom du client
|
|
"""
|
|
client_raison_sociale = ""
|
|
client_nom = ""
|
|
|
|
if( "object_owner_id" in retval.keys() and retval["object_owner_id"]):
|
|
partner_client_data = MYSY_GV.dbname['partner_client'].find_one({'_id':ObjectId(str( retval["object_owner_id"]))},{'raison_sociale':1, 'nom':1})
|
|
|
|
if(partner_client_data and "raison_sociale" in partner_client_data.keys()):
|
|
client_raison_sociale = partner_client_data['raison_sociale']
|
|
|
|
if (partner_client_data and "nom" in partner_client_data.keys()):
|
|
client_nom = partner_client_data['nom']
|
|
|
|
ret_file['client_raison_sociale'] = client_raison_sociale
|
|
ret_file['client_nom'] = client_nom
|
|
|
|
|
|
|
|
|
|
RetObject.append(mycommon.JSONEncoder().encode(ret_file))
|
|
nb_val = nb_val + 1
|
|
|
|
#print(" ### Get_List_object_owner_collection_Stored_Files = ", RetObject)
|
|
return True, RetObject
|
|
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
|
return False, False
|
|
|
|
|
|
|
|
"""
|
|
Suppression d'un fichier stocké
|
|
"""
|
|
def Delete_Stored_Downloaded_File(diction):
|
|
try:
|
|
full_file_path = ""
|
|
|
|
diction = mycommon.strip_dictionary(diction)
|
|
"""
|
|
Verification des input acceptés
|
|
"""
|
|
|
|
|
|
field_list = ['token', 'file_name']
|
|
incom_keys = diction.keys()
|
|
for val in incom_keys:
|
|
if val not in field_list and val.startswith('my_') is False:
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas autorisé")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification des champs obligatoires
|
|
"""
|
|
field_list_obligatoire = ['token', 'file_name']
|
|
for val in field_list_obligatoire:
|
|
if val not in diction:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification de l'identité et autorisation de l'entité qui
|
|
appelle cette API
|
|
"""
|
|
token = ""
|
|
if ("token" in diction.keys()):
|
|
if diction['token']:
|
|
token = diction['token']
|
|
|
|
file_name = ""
|
|
if ("file_name" in diction.keys()):
|
|
if diction['file_name']:
|
|
file_name = diction['file_name']
|
|
|
|
if(len(str(file_name)) > MYSY_GV.FILE_NAME_MAX_SIZE ):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][
|
|
3]) + " - Fichier invalide, trop long = "+ str(file_name)+ ". le nom doit faire moins de 100 caractère")
|
|
|
|
return False, " - Nom de fichier trop long. Il doit faire moins de 100 caractères"
|
|
|
|
if( not file_name ):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][
|
|
3]) + " - Fichier invalide = ", str(file_name))
|
|
|
|
return False, " - nom du fichier invalide "
|
|
|
|
full_path = MYSY_GV.upload_folder+str(file_name)
|
|
|
|
"""
|
|
Recuperation de la list des fichier dans la collection
|
|
"""
|
|
tmp_count = MYSY_GV.dbname['download_files'].count_documents({'full_path': str(full_path), 'valide': '1'})
|
|
if (tmp_count <= 0):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][
|
|
3]) + " - Fichier invalide = ", str(full_path))
|
|
|
|
return False, " - Fichier invalide "
|
|
|
|
delete_doc = MYSY_GV.dbname['download_files'].delete_one({'full_path': str(full_path), 'valide': '1'})
|
|
|
|
if os.path.exists(full_path):
|
|
print(" ### full_path = ", full_path)
|
|
os.remove(full_path)
|
|
return True, "Le fichier a été supprimé"
|
|
|
|
else:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][
|
|
3]) + " - Fichier invalide (2)= ", str(full_path))
|
|
|
|
return False, " - Fichier invalide (2) "
|
|
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
|
return False, " Impossible de supprimer le fichier"
|
|
|
|
|
|
"""
|
|
Suppression de toutes les pièces jointes d'une entité données (object_owner_id)
|
|
par exemple lorqu'on supprime un apprenant, il faudrait supprimer toutes
|
|
les pièces jointes associé à cette personne
|
|
"""
|
|
def Delete_Entity_Stored_Downloaded_File(diction):
|
|
try:
|
|
full_file_path = ""
|
|
|
|
diction = mycommon.strip_dictionary(diction)
|
|
"""
|
|
Verification des input acceptés
|
|
"""
|
|
|
|
|
|
field_list = ['token', 'object_owner_id', 'object_owner_collection']
|
|
incom_keys = diction.keys()
|
|
for val in incom_keys:
|
|
if val not in field_list and val.startswith('my_') is False:
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le champ '" + val + "' n'est pas autorisé")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification des champs obligatoires
|
|
"""
|
|
field_list_obligatoire = ['token', 'object_owner_id', 'object_owner_collection']
|
|
for val in field_list_obligatoire:
|
|
if val not in diction:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " - La valeur '" + val + "' n'est pas presente dans liste ")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
"""
|
|
Verification de l'identité et autorisation de l'entité qui
|
|
appelle cette API
|
|
"""
|
|
token = ""
|
|
if ("token" in diction.keys()):
|
|
if diction['token']:
|
|
token = diction['token']
|
|
|
|
|
|
|
|
# Verifier que le l'entité existe (ON NE verifie PAS le valide ou locked)
|
|
is_entity_valide = MYSY_GV.dbname[str(diction['object_owner_collection'])].count_documents({'_id':ObjectId(str(diction['object_owner_id']))})
|
|
if( is_entity_valide <= 0 ):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " L'identifiant de l'entité est invalide ")
|
|
return False, " L'identifiant de l'entité est invalide "
|
|
|
|
for val in MYSY_GV.dbname['download_files'].find({'object_owner_collection':str(diction['object_owner_collection']),
|
|
'object_owner_id':str(diction['object_owner_id'])}):
|
|
if( "full_path" in val.keys() ):
|
|
file_name_to_remove = val['full_path']
|
|
try:
|
|
os.unlink(file_name_to_remove)
|
|
except:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " WARNING : Impossible de supprimer physiquement le fichier "+str(file_name_to_remove)+" ")
|
|
|
|
"""
|
|
Suppression des données dans la collection
|
|
"""
|
|
delete_row = MYSY_GV.dbname['download_files'].delete_many({'object_owner_collection': str(diction['object_owner_collection']),
|
|
'object_owner_id': str(diction['object_owner_id'])})
|
|
|
|
|
|
return True, " ("+str(delete_row.deleted_count)+" document(s) supprimé(s) "
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
|
return False, " Impossible de supprimer les fichier"
|
|
|
|
|
|
"""
|
|
Cette fonction permet de recuperer une liste de documents
|
|
avec plusieurs filtre
|
|
"""
|
|
|
|
|
|
def Get_List_object_owner_collection_Stored_Files_With_Filter(diction):
|
|
try:
|
|
|
|
diction = mycommon.strip_dictionary(diction)
|
|
|
|
"""
|
|
Verification des input acceptés
|
|
"""
|
|
field_list = ['token', 'filter_object_owner_collection', 'filter_object_owner_id',
|
|
'filter_file_business_object', 'date_create_start_date', 'date_create_end_date', 'filter_type_document',
|
|
'filter_nom_client', 'filter_formation_external_code', 'filter_session_code',
|
|
'filter_class_external_code']
|
|
|
|
incom_keys = diction.keys()
|
|
for val in incom_keys:
|
|
if val not in field_list and val.startswith('my_') is False:
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le champ '" + val + "' n'existe pas")
|
|
return False, " Les informations fournies sont incorrectes",
|
|
|
|
'''
|
|
Une fois qu'on a controlé que toutes les clés mise dans l'API sont correcte. etape precedente,
|
|
On controle que les champs obligatoires sont presents dans la liste
|
|
'''
|
|
field_list_obligatoire = ['token',]
|
|
|
|
for val in field_list_obligatoire:
|
|
if val not in diction:
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][3]) + " - : La valeur '" + val + "' n'est pas presente dans liste ")
|
|
return False, "Tous les champs obligatoires ne sont pas fournis"
|
|
|
|
|
|
"""
|
|
Verification de l'identité et autorisation de l'entité qui
|
|
appelle cette API
|
|
"""
|
|
token = ""
|
|
if ("token" in diction.keys()):
|
|
if diction['token']:
|
|
token = diction['token']
|
|
|
|
local_status, my_partner = mycommon.Check_Connexion_And_Return_Partner_Data(diction)
|
|
if (local_status is not True):
|
|
return local_status, my_partner
|
|
|
|
|
|
# Ici le nom du document
|
|
filt_file_business_object = {}
|
|
if ("filter_file_business_object" in diction.keys() and diction['filter_file_business_object']):
|
|
filt_file_business_object = {
|
|
'file_business_object': {'$regex': str(diction['filter_file_business_object']) , "$options": 'i'}}
|
|
|
|
|
|
filt_object_owner_collection = {}
|
|
if ("filter_object_owner_collection" in diction.keys() and diction['filter_object_owner_collection']):
|
|
filt_object_owner_collection = {'object_owner_collection': str(diction['filter_object_owner_collection'])}
|
|
|
|
filt_filter_object_owner_id = {}
|
|
if ("filter_object_owner_id" in diction.keys() and diction['filter_object_owner_id']):
|
|
filt_filter_object_owner_id = {'object_owner_id': str(diction['filter_object_owner_id'])}
|
|
|
|
filt_type_document = {}
|
|
if ("filter_type_document" in diction.keys() and diction['filter_type_document'] ):
|
|
|
|
if( str(diction['filter_type_document']).strip().lower() == "alltypedocument" ):
|
|
# Il faut faire une recherche sur touts les types de document, ca reviens à laisser le filtre vide, a ne rien filtrer
|
|
filt_type_document = {}
|
|
|
|
elif( str(diction['filter_type_document']).strip().lower() == "autre" or str(diction['filter_type_document']).strip().lower() == "" ):
|
|
# Si la valeur du type de document est 'autre' ou vide, alors filtre sur tous les document avec la valeur 'type_document' = ''
|
|
filt_type_document = {'type_document': ''}
|
|
else:
|
|
filt_type_document = {'type_document': {'$regex': "^"+str(diction['filter_type_document'])+"$", "$options": 'i'}}
|
|
|
|
|
|
filt_date_create_start_date = ""
|
|
if ("date_create_start_date" in diction.keys() and diction['date_create_start_date']):
|
|
filt_date_create_start_date = str(diction['date_create_start_date'])[0:10]
|
|
local_status = mycommon.CheckisDate(filt_date_create_start_date)
|
|
if (local_status is False):
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le filtre : 'date de debut' n'est pas au format jj/mm/aaaa.")
|
|
return False, " Le filtre : 'date de debut' n'est pas au format jj/mm/aaaa."
|
|
|
|
filt_date_create_end_date = ""
|
|
if ("date_create_end_date" in diction.keys() and diction['date_create_end_date']):
|
|
filt_date_create_end_date = str(diction['date_create_end_date'])[0:10]
|
|
local_status = mycommon.CheckisDate(filt_date_create_end_date)
|
|
if (local_status is False):
|
|
mycommon.myprint(str(
|
|
inspect.stack()[0][3]) + " Le filtre : 'date de fin' n'est pas au format jj/mm/aaaa.")
|
|
return False, " Le filtre : 'date de fin' n'est pas au format jj/mm/aaaa."
|
|
|
|
|
|
# Controle de cohérence des dates
|
|
if ("date_create_start_date" in diction.keys() and "date_create_end_date" in diction.keys() and
|
|
datetime.strptime(str(diction['date_create_start_date'])[0:10], '%d/%m/%Y') > datetime.strptime(
|
|
str(diction['date_create_end_date'])[0:10], '%d/%m/%Y')):
|
|
mycommon.myprint(
|
|
str(inspect.stack()[0][
|
|
3]) + " Filtre de recherche : La date debut " + str(
|
|
diction['date_create_start_date'])[0:10] +
|
|
" est postérieure à la date de fin " + str(diction['date_create_end_date'])[0:10])
|
|
|
|
return False, " Filtre de recherche : La date debut de début " + str(diction['date_create_start_date'])[0:10] + \
|
|
" est postérieure à la date de fin " + str(diction['date_create_end_date'])[0:10] + " "
|
|
|
|
|
|
"""
|
|
Gestion du filtre code session.
|
|
Si ce champs est fourni, voici comment il faut proceder :
|
|
|
|
1 - aller recuperer l'_id de session concernée (==> session_id)
|
|
2 - Dans la collection 'download_files' faire un filtre comme suit :
|
|
download_files.object_owner_id = session_id
|
|
"""
|
|
filt_session_code = {}
|
|
Lists_session_id = []
|
|
if ("filter_session_code" in diction.keys() and diction['filter_session_code']):
|
|
sub_filt_session_code = {'code_session': {'$regex': str(diction['filter_session_code']), "$options": "i"},
|
|
'partner_owner_recid': str(my_partner['recid']), 'valide': '1'}
|
|
# Recuperation des '_id' des session dont le nom match en regexp
|
|
|
|
for List_session_Data in MYSY_GV.dbname['session_formation'].find(sub_filt_session_code, {'_id': 1}):
|
|
Lists_session_id.append(str(List_session_Data['_id']))
|
|
|
|
filt_session_code = {'object_owner_id': {'$in': Lists_session_id, }}
|
|
|
|
|
|
"""
|
|
Gestion du filtre code formation.
|
|
Si ce champs est fourni, voici comment il faut proceder :
|
|
|
|
1 - aller recuperer l'_id des formations concernées (==> class_id)
|
|
2 - Dans la collection 'download_files' faire un filtre comme suit :
|
|
download_files.object_owner_id = class_id
|
|
"""
|
|
filt_class_external_code = {}
|
|
Lists_class_id = []
|
|
if ("filter_class_external_code" in diction.keys() and diction['filter_class_external_code']):
|
|
sub_filt_class_external_code = {'external_code': {'$regex': str(diction['filter_class_external_code']), "$options": "i"},
|
|
'partner_owner_recid': str(my_partner['recid']), 'valide': '1', 'locked':'0'}
|
|
# Recuperation des '_id' des session dont le nom match en regexp
|
|
|
|
for List_class_Data in MYSY_GV.dbname['myclass'].find(sub_filt_class_external_code, {'_id': 1}):
|
|
Lists_class_id.append(str(List_class_Data['_id']))
|
|
|
|
filt_class_external_code = {'object_owner_id': {'$in': Lists_class_id, }}
|
|
|
|
"""
|
|
Gestion du filtre nom client.
|
|
"""
|
|
filt_partner_client_nom = {}
|
|
Lists_partner_client_id = []
|
|
if ("filter_nom_client" in diction.keys() and diction['filter_nom_client']):
|
|
sub_filt_partner_client_nom = {
|
|
'nom': {'$regex': str(diction['filter_nom_client']), "$options": "i"},
|
|
'partner_recid': str(my_partner['recid']), 'valide': '1', 'locked': '0'}
|
|
# Recuperation des '_id' des session dont le nom match en regexp
|
|
|
|
|
|
|
|
for List_partner_client_Data in MYSY_GV.dbname['partner_client'].find(sub_filt_partner_client_nom, {'_id': 1}):
|
|
Lists_partner_client_id.append(str(List_partner_client_Data['_id']))
|
|
|
|
filt_partner_client_nom = {'object_owner_id': {'$in': Lists_partner_client_id, }}
|
|
|
|
|
|
"""
|
|
Construction de la recherche
|
|
"""
|
|
filt_date_create_start_date_ISODATE = None
|
|
filt_date_create_end_date_ISODATE = None
|
|
|
|
if (filt_date_create_start_date):
|
|
filt_date_create_start_date_ISODATE = datetime.strptime(str(filt_date_create_start_date), '%d/%m/%Y')
|
|
|
|
if (filt_date_create_end_date):
|
|
filt_date_create_end_date_ISODATE = datetime.strptime(str(filt_date_create_end_date), '%d/%m/%Y')
|
|
|
|
|
|
filt_date_creation = {}
|
|
if(filt_date_create_start_date_ISODATE and filt_date_create_end_date_ISODATE ):
|
|
filt_date_creation = { 'mysy_download_files_date_creation': {'$gte': filt_date_create_start_date_ISODATE,
|
|
'$lte': filt_date_create_end_date_ISODATE}
|
|
}
|
|
elif (filt_date_create_start_date_ISODATE ):
|
|
filt_date_creation = {'mysy_download_files_date_creation': {'$gte': filt_date_create_start_date_ISODATE,
|
|
}
|
|
}
|
|
|
|
elif (filt_date_create_end_date_ISODATE ):
|
|
filt_date_creation = {'mysy_download_files_date_creation': {'$lte': filt_date_create_end_date_ISODATE,
|
|
}
|
|
}
|
|
|
|
|
|
my_query = {'$and': [ filt_object_owner_collection, filt_filter_object_owner_id, filt_date_creation,
|
|
filt_type_document, filt_session_code,filt_partner_client_nom, filt_file_business_object,
|
|
filt_class_external_code, {'partner_owner_recid': str(my_partner['recid']),
|
|
'valide':'1'}]}
|
|
|
|
new_query = ([ {"$addFields": {
|
|
"mysy_download_files_date_creation": {
|
|
'$dateFromString': {
|
|
'dateString': {"$substr": ["$created_date", 0, 10]},
|
|
"format": "%Y-%m-%d"
|
|
}
|
|
}
|
|
}
|
|
},
|
|
{'$match': my_query
|
|
},
|
|
{
|
|
'$sort': {'_id':-1, 'mysy_download_files_date_creation': -1}
|
|
},
|
|
|
|
])
|
|
|
|
print(" ##### new_query = " + str(new_query))
|
|
|
|
|
|
RetObject = []
|
|
nb_val = 0
|
|
for retval in MYSY_GV.dbname['download_files'].aggregate(new_query):
|
|
ret_file = {}
|
|
|
|
ret_file['id'] = str(nb_val)
|
|
ret_file['_id'] = str(retval['_id'])
|
|
|
|
ret_file['file_business_object'] = retval['file_business_object']
|
|
|
|
if ("created_date" in retval.keys()):
|
|
ret_file['created_date'] = retval['created_date']
|
|
else:
|
|
ret_file['created_date'] = "9999-99-99"
|
|
|
|
if ("file_business_object" in retval.keys()):
|
|
ret_file['file_business_object'] = retval['file_business_object']
|
|
|
|
|
|
if ("object_owner_collection" in retval.keys()):
|
|
ret_file['object_owner_collection'] = retval['object_owner_collection']
|
|
|
|
if ("object_owner_id" in retval.keys()):
|
|
ret_file['object_owner_id'] = retval['object_owner_id']
|
|
|
|
if ("file_name" in retval.keys()):
|
|
ret_file['file_name'] = retval['file_name']
|
|
|
|
if ("status" in retval.keys()):
|
|
ret_file['status'] = retval['status']
|
|
|
|
if ("document_display_order" in retval.keys()):
|
|
ret_file['document_display_order'] = retval['document_display_order']
|
|
|
|
if ("full_path" in retval.keys()):
|
|
ret_file['full_path'] = retval['full_path']
|
|
|
|
if ("file_cononical_name" in retval.keys()):
|
|
ret_file['file_cononical_name'] = retval['file_cononical_name']
|
|
else:
|
|
ret_file['file_cononical_name'] = "Anonyme"
|
|
|
|
if ("type_document" in retval.keys()):
|
|
ret_file['type_document'] = retval['type_document']
|
|
else:
|
|
ret_file['type_document'] = "Autre"
|
|
|
|
if ("file_extention" in retval.keys()):
|
|
ret_file['file_extention'] = retval['file_extention']
|
|
else:
|
|
ret_file['file_extention'] = "file_extention"
|
|
|
|
"""
|
|
Recuperation du nom du client
|
|
"""
|
|
client_raison_sociale = ""
|
|
client_nom = ""
|
|
|
|
if ("object_owner_id" in retval.keys() and retval["object_owner_id"]):
|
|
partner_client_data = MYSY_GV.dbname['partner_client'].find_one(
|
|
{'_id': ObjectId(str(retval["object_owner_id"]))}, {'raison_sociale': 1, 'nom': 1})
|
|
|
|
if (partner_client_data and "raison_sociale" in partner_client_data.keys()):
|
|
client_raison_sociale = partner_client_data['raison_sociale']
|
|
|
|
if (partner_client_data and "nom" in partner_client_data.keys()):
|
|
client_nom = partner_client_data['nom']
|
|
|
|
ret_file['client_raison_sociale'] = client_raison_sociale
|
|
ret_file['client_nom'] = client_nom
|
|
|
|
RetObject.append(mycommon.JSONEncoder().encode(ret_file))
|
|
nb_val = nb_val + 1
|
|
|
|
#print(" ### Get_List_object_owner_collection_Stored_Files = ", RetObject)
|
|
return True, RetObject
|
|
|
|
|
|
except Exception as e:
|
|
exc_type, exc_obj, exc_tb = sys.exc_info()
|
|
mycommon.myprint(str(inspect.stack()[0][3]) + " -" + str(e) + " - ERRORRRR AT Line : " + str(exc_tb.tb_lineno))
|
|
return False, " Impossible récupérer la liste des documents "
|
|
|