[DEV] better bdd checker

This commit is contained in:
Edouard DUPIN 2020-02-24 07:50:04 +01:00
parent f98c90cbd8
commit 7b84f44710
10 changed files with 197 additions and 346 deletions

View File

@ -58,31 +58,36 @@ def add(_app, _name_api):
"name": "id",
"type": "int",
"modifiable": False,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
{
"name": "size",
"type": "int",
"modifiable": False,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
{
"name": "sha512",
"type": "str",
"modifiable": False,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
{
"name": "mime_type",
"type": "str",
"modifiable": False,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
{
"name": "original_name",
"type": "str",
"modifiable": False,
"can_be_null": True
"can_be_null": True,
"visible": False,
},
]
data_global_elements.get_interface(_name_api).set_data_model(dataModelBdd)

View File

@ -39,26 +39,23 @@ def add(_app, _name_api):
"name": "id",
"type": "int",
"modifiable": False,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
{
"name": "name",
"type": "str",
"modifiable": True,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
{
"name": "description",
"type": "str",
"modifiable": True,
"can_be_null": False
},
{
"name": "cover",
"type": "list",
"modifiable": False,
"can_be_null": False
},
"can_be_null": False,
"visible": True,
}
]
data_global_elements.get_interface(_name_api).set_data_model(dataModelBdd)

View File

@ -38,31 +38,29 @@ def add(_app, _name_api):
"name": "id",
"type": "int",
"modifiable": False,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
{
"name": "number",
"type": "int",
"modifiable": True,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
{
"name": "description",
"type": "str",
"modifiable": True,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
{
"name": "group_id",
"type": "int",
"modifiable": True,
"can_be_null": False
},
{
"name": "cover",
"type": "list",
"modifiable": False,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
]
data_global_elements.get_interface(_name_api).set_data_model(dataModelBdd)

View File

@ -39,25 +39,22 @@ def add(_app, _name_api):
"name": "id",
"type": "int",
"modifiable": False,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
{
"name": "name",
"type": "str",
"modifiable": True,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
{
"name": "description",
"type": "str",
"modifiable": True,
"can_be_null": False
},
{
"name": "cover",
"type": "list",
"modifiable": False,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
]
data_global_elements.get_interface(_name_api).set_data_model(dataModelBdd)

View File

@ -37,25 +37,22 @@ def add(_app, _name_api):
"name": "id",
"type": "int",
"modifiable": False,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
{
"name": "name",
"type": "str",
"modifiable": True,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
{
"name": "description",
"type": "str",
"modifiable": True,
"can_be_null": False
},
{
"name": "cover",
"type": "list",
"modifiable": False,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
]
data_global_elements.get_interface(_name_api).set_data_model(dataModelBdd)

View File

@ -78,73 +78,78 @@ def add(_app, _name_api):
"name": "id",
"type": "int",
"modifiable": False,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
{
"name": "data_id",
"type": "int",
"modifiable": True,
"can_be_null": False
"can_be_null": False,
"visible": True,
},
{
"name": "type_id",
"type": "int",
"modifiable": True,
"can_be_null": True
"can_be_null": True,
"visible": True,
},
{
"name": "saison_id",
"type": "int",
"modifiable": True,
"can_be_null": True
"can_be_null": True,
"visible": True,
},
{
"name": "episode",
"type": "int",
"modifiable": True,
"can_be_null": True
"can_be_null": True,
"visible": True,
},
{
"name": "univers_id",
"type": "int",
"modifiable": True,
"can_be_null": True
"can_be_null": True,
"visible": True,
},
{
"name": "group_id",
"type": "int",
"modifiable": True,
"can_be_null": True
"can_be_null": True,
"visible": True,
},
{
"name": "name",
"type": "str",
"modifiable": True,
"can_be_null": True
"can_be_null": True,
"visible": True,
},
{
"name": "description",
"type": "str",
"modifiable": True,
"can_be_null": True
"can_be_null": True,
"visible": True,
},
{
"name": "date",
"type": "int",
"modifiable": True,
"can_be_null": True
"can_be_null": True,
"visible": True,
},
{
"name": "time",
"type": "int",
"modifiable": True,
"can_be_null": True
},
{
"name": "cover",
"type": "list",
"modifiable": False,
"can_be_null": True
"can_be_null": True,
"visible": True,
},
]
data_global_elements.get_interface(_name_api).set_data_model(dataModelBdd)

View File

@ -19,46 +19,46 @@ from psycopg2.extras import RealDictCursor
import db
def is_str(s, authorise):
if s == None:
if authorise == True:
return True
return False;
if type(s) == str:
return True
return False
if s == None:
if authorise == True:
return True
return False;
if type(s) == str:
return True
return False
def is_boolean(s, authorise):
if s == None:
if authorise == True:
return True
return False;
if s == True or s == False:
return True
return False
if s == None:
if authorise == True:
return True
return False;
if s == True or s == False:
return True
return False
def is_int(s, authorise):
if s == None:
if authorise == True:
return True
return False;
try:
int(s)
return True
except ValueError:
return False
return False
if s == None:
if authorise == True:
return True
return False;
try:
int(s)
return True
except ValueError:
return False
return False
def is_float(s, authorise):
if s == None:
if authorise == True:
return True
return False;
try:
float(s)
return True
except ValueError:
return False
return False
if s == None:
if authorise == True:
return True
return False;
try:
float(s)
return True
except ValueError:
return False
return False
##
## @breif Generic interface to access to the BDD (no BDD, direct file IO)
##
@ -66,97 +66,23 @@ class DataInterface():
def __init__(self, _name, _base_name):
self.model = None
self.name = _name
self.extract_base = "*"
self.base_name = _base_name
self.connection = db.connect_bdd();
self.need_save = False
#self.conn = self.connection.cursor()
def __del__(self):
self.connection.commit()
self.connection.close()
db.remove_connection();
def set_data_model(self, _data_model):
self.model = _data_model
def reset_with_value(self, _data):
#self.bdd = _data
#self.last_id = 0
#self.mark_to_store()
pass
def check_with_model(self, _data):
return True
"""
if self.model == None:
return True
values = []
for elem in dir(self.model):
debug.info("check element : " + elem);
if elem[:2] == "__":
continue
debug.info(" ==> select");
values.append(elem)
have_error = False
for key in _data.keys():
if key not in values:
have_error = True
# TODO: ...
debug.warning("Add element that is not allowed " + key + " not in " + str(values))
for elem in values:
if key not in _data.keys():
have_error = True
# TODO: ...
debug.warning("Missing key " + elem + " not in " + str(_data.keys()))
if have_error == True:
return False
for key in _data.keys():
elem = getattr(self.model, key)
if type(elem) == list:
find_error = True
for my_type in elem:
if type(_data[key]) == my_type:
find_error = False
break
if find_error == True:
debug.warning("data : " + str(_data))
tmp_list = []
for my_type in elem:
tmp_list.append(my_type.__name__)
debug.warning("[key='" + key + "'] try to add wrong type in BDD " + type(_data[key]).__name__ + " is not: " + str(my_type))
else:
if type(_data[key]) != getattr(self.model, key):
debug.warning("data : " + str(_data))
debug.warning("[key='" + key + "'] try to add wrong type in BDD " + type(_data[key]).__name__ + " is not: " + getattr(self.model, key).__name__)
return False
return True
"""
pass
def upgrade_global_bdd_id(self):
"""
self.last_id = 0
for elem in self.bdd:
if 'id' not in elem.keys():
continue
if elem["id"] >= self.last_id:
self.last_id = elem["id"] + 1
# start at a random value permit to vaidate the basis inctance test
if self.last_id == 0:
self.last_id = random.randint(20, 100)
"""
pass
def get_table_index(self, _id):
"""
id_in_bdd = 0
for elem in self.bdd:
if 'id' in elem.keys() \
and elem["id"] == _id:
return id_in_bdd
id_in_bdd += 1
return None
"""
pass
self.extract_base = ""
for elem in self.model:
if elem["visible"] == True:
if self.extract_base != "":
self.extract_base += ","
self.extract_base += elem["name"]
##
## @brief Mark the current BDD to store all in File system (sync)
@ -171,13 +97,13 @@ class DataInterface():
def check_save(self):
if self.need_save == False:
return
debug.warning("Save bdd: " + self.file)
debug.warning("Save bdd: ")
self.connection.commit()
def gets(self, filter=None):
debug.info("gets " + self.name)
cursor = self.connection.cursor(cursor_factory=RealDictCursor)
cursor.execute('SELECT * FROM ' + self.base_name + ' WHERE deleted = false')
cursor.execute('SELECT ' + self.extract_base + ' FROM ' + self.base_name + ' WHERE deleted = false')
results = cursor.fetchall()
#debug.info("gets data = " + json.dumps(results, indent=4))
if filter == None:
@ -202,26 +128,11 @@ class DataInterface():
#results = cursor.fetchall()
#debug.info("display data = " + json.dumps(results, indent=4))
req = (_id,)
cursor.execute('SELECT * FROM ' + self.base_name + ' WHERE deleted=false AND id=%s', req)
cursor.execute('SELECT ' + self.extract_base + ' FROM ' + self.base_name + ' WHERE deleted=false AND id=%s', req)
results = cursor.fetchone()
#debug.info("get specific data = " + json.dumps(results))
return results;
def set(self, _id, _value):
"""
if type(_id) != int:
debug.warning("get wrong input type...")
for elem in self.bdd:
if 'id' in elem.keys() \
and elem["id"] == _id:
elem = _value
self.mark_to_store()
return elem
debug.warning("not found element: " + str(len(self.bdd)))
"""
self.mark_to_store();
return None
def delete(self, _id):
debug.info("delete " + self.name + ": " + str(_id))
cursor = self.connection.cursor()
@ -241,13 +152,13 @@ class DataInterface():
if elem["type"] == "str":
if is_str(_value, elem["can_be_null"]) == True:
return True
elif if elem["type"] == "int":
elif elem["type"] == "int":
if is_int(_value, elem["can_be_null"]) == True:
return True
elif if elem["type"] == "float":
elif elem["type"] == "float":
if is_float(_value, elem["can_be_null"]) == True:
return True
elif if elem["type"] == "boolean":
elif elem["type"] == "boolean":
if is_boolean(_value, elem["can_be_null"]) == True:
return True
else:
@ -274,7 +185,7 @@ class DataInterface():
else:
request += " , "
list_data.append(_value[elem])
request += " '" + elem + "' = %s"
request += " " + elem + " = %s"
request += " WHERE id = %s "
list_data.append(_id)
debug.info("Request executed : '" + request + "'")
@ -292,142 +203,5 @@ class DataInterface():
self.mark_to_store();
return _value
# TODO : rework this
def find(self, _list_token, _values):
"""
out = []
for elem in self.bdd:
find = True
for token in _list_token:
if elem[token] != _values[token]:
find = False
break
if find == True:
out.append(elem)
return out
"""
pass
def count(self, select = None):
"""if select == None:
return len(self.bdd)
tmp = self.get_sub_list(self.bdd, select)
return len(tmp)
"""
pass
def get_sub_list(self, _values, _select):
"""
out = []
for elem in _values:
find = True
if len(_select) == 0:
find = False
for elem_select in _select:
if len(elem_select) != 3:
raise ServerError("Internal Server Error: wrong select definition", 500)
type_check = elem_select[0]
token = elem_select[1]
value = elem_select[2]
if type(value) == list:
if token in elem.keys():
if type_check == "==":
if not (elem[token] in value):
find = False
break
elif type_check == "!=":
if not (elem[token] not in value):
find = False
break
else:
raise ServerError("Internal Server Error: unknow comparing type ...", 500)
else:
find = False
break
else:
if token in elem.keys():
if type_check == "==":
if not (elem[token] == value):
find = False
break
elif type_check == "!=":
if not (elem[token] != value):
find = False
break
elif type_check == "<":
if not (elem[token] < value):
find = False
break
elif type_check == "<=":
if not (elem[token] <= value):
find = False
break
elif type_check == ">":
if not (elem[token] >= value):
find = False
break
elif type_check == ">=":
if not (elem[token] >= value):
find = False
break
else:
raise ServerError("Internal Server Error: unknow comparing type ...", 500)
else:
find = False
break
if find == True:
out.append(elem)
return out
"""
pass
def order_by(self, _values, _order):
"""
if _order == None:
return _values
if len(_order) == 0:
return _values
value_order = _order[0]
out = []
out_unclassable = []
for elem in _values:
if value_order not in elem.keys():
out_unclassable.append(elem);
continue
if elem[value_order] == None:
out_unclassable.append(elem);
continue
out.append(elem);
out = sorted(out, key=lambda x: x[value_order])
if len(_order) > 1:
out_unclassable = self.order_by(out_unclassable, _order[1:]);
for elem in out_unclassable:
out.append(elem);
return out;
"""
pass
def filter_object_values(self, _values, _filter):
"""
out = []
if _filter == None:
return _values
if len(_filter) == 1:
token = _filter[0]
for elem in _values:
if token not in elem.keys():
continue
if elem[token] not in out:
out.append(elem[token])
return out
for elem in _values:
element_out = {}
for token in _filter:
if token not in elem.keys():
continue
element_out[token] = elem[token]
out.append(element_out)
return out
"""

View File

@ -2,11 +2,31 @@ from realog import debug
import psycopg2
connection = None
connection_count = 0
def connect_bdd():
debug.info("connect BDD: ")
conn = psycopg2.connect(dbname="karideo", user="root", password="postgress_password", host="localhost", port="15032")
return conn
global connection
global connection_count
if connection == None:
debug.info("connect BDD: ")
connection = psycopg2.connect(dbname="karideo", user="root", password="postgress_password", host="localhost", port="15032")
connection_count += 1
return connection
def remove_connection():
global connection
global connection_count
connection_count -= 1
if connection_count < 0:
debug.warning("Request remove too much time the BDD connection");
connection_count = 0;
return;
if connection_count == 0:
debug.warning("dicconnect BDD");
connection.commit()
connection.close()
connection = None
return;
base_bdd_name = "karideo_"

View File

@ -342,19 +342,19 @@ def push_video_file(_path, _basic_key={}):
mime_type = "unknown"
# do it by myself .. it is better ...
filename___, file_extension = os.path.splitext(_path)
if file_extension == "mkv":
if file_extension in ["mkv", ".mkv"]:
mime_type = "video/x-matroska"
elif file_extension == "mka":
elif file_extension in ["mka", ".mka"]:
mime_type = "audio/x-matroska"
elif file_extension == "mp4":
elif file_extension in ["mp4", ".mp4"]:
mime_type = "video/mp4"
elif file_extension == "webm":
elif file_extension in ["webm", ".webm"]:
mime_type = "video/webm"
elif file_extension == "json":
elif file_extension in ["json", ".json"]:
mime_type = "application/json"
elif file_extension == "jpeg":
elif file_extension in ["jpeg", ".jpeg", ".JPEG", "JPEG", "jpg", ".jpg", ".JPG", "JPG"]:
mime_type = "image/jpeg"
elif file_extension == "png":
elif file_extension in ["png", ".png"]:
mime_type = "image/png"
try:
_path.encode('latin-1')

View File

@ -141,6 +141,14 @@ COMMENT ON COLUMN data.original_name IS 'Name of the file when upload it in the
''')
connection.commit()
c.execute('''
CREATE TRIGGER set_timestamp_data
BEFORE UPDATE ON data
FOR EACH ROW
EXECUTE PROCEDURE trigger_set_timestamp();
''')
connection.commit()
# Create table
c.execute('''
CREATE TABLE node (
@ -152,6 +160,13 @@ COMMENT ON COLUMN node.name IS 'Name of the Node.';
COMMENT ON COLUMN node.description IS 'Description of the Node.';
''')
connection.commit()
c.execute('''
CREATE TRIGGER set_timestamp_node
BEFORE UPDATE ON node
FOR EACH ROW
EXECUTE PROCEDURE trigger_set_timestamp();
''')
connection.commit()
# Create table
c.execute('''
@ -162,6 +177,13 @@ CREATE TABLE cover_link (
COMMENT ON TABLE cover_link IS 'Link between cover data id and Nodes.';
''')
connection.commit()
c.execute('''
CREATE TRIGGER set_timestamp_cover_link
BEFORE UPDATE ON cover_link
FOR EACH ROW
EXECUTE PROCEDURE trigger_set_timestamp();
''')
connection.commit()
# Create table
c.execute('''
@ -170,6 +192,14 @@ COMMENT ON TABLE grp IS 'Group of the video.';
''')
connection.commit()
c.execute('''
CREATE TRIGGER set_timestamp_grp
BEFORE UPDATE ON grp
FOR EACH ROW
EXECUTE PROCEDURE trigger_set_timestamp();
''')
connection.commit()
# Create table
c.execute('''
CREATE TABLE saison (
@ -178,6 +208,13 @@ CREATE TABLE saison (
COMMENT ON TABLE saison IS 'Saison of the video.';
''')
connection.commit()
c.execute('''
CREATE TRIGGER set_timestamps_saison
BEFORE UPDATE ON saison
FOR EACH ROW
EXECUTE PROCEDURE trigger_set_timestamp();
''')
connection.commit()
# Create table
c.execute('''
@ -185,6 +222,13 @@ CREATE TABLE type () INHERITS (node);
COMMENT ON TABLE type IS 'Type of the video.';
''')
connection.commit()
c.execute('''
CREATE TRIGGER set_timestamp_type
BEFORE UPDATE ON type
FOR EACH ROW
EXECUTE PROCEDURE trigger_set_timestamp();
''')
connection.commit()
# Create table
c.execute('''
@ -192,6 +236,13 @@ CREATE TABLE univers () INHERITS (node);
COMMENT ON TABLE univers IS 'Univers of the video.';
''')
connection.commit()
c.execute('''
CREATE TRIGGER set_timestamp_univers
BEFORE UPDATE ON univers
FOR EACH ROW
EXECUTE PROCEDURE trigger_set_timestamp();
''')
connection.commit()
# Create table
c.execute('''
@ -215,6 +266,13 @@ COMMENT ON COLUMN video.age_limit IS 'Limitation of the age to show the display'
# Save (commit) the changes
connection.commit()
c.execute('''
CREATE TRIGGER set_timestamp_video
BEFORE UPDATE ON video
FOR EACH ROW
EXECUTE PROCEDURE trigger_set_timestamp();
''')
connection.commit()
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.