[DEV] update BDD migration

This commit is contained in:
Edouard DUPIN 2020-02-20 21:18:41 +01:00
parent d0790c7f1b
commit 64713d2ed4
7 changed files with 468 additions and 313 deletions

View File

@ -23,6 +23,27 @@ debug.info("create the table:")
c = connection.cursor()
c.execute('''
DROP TABLE IF EXISTS video;
DROP TABLE IF EXISTS univers;
DROP TABLE IF EXISTS saison;
DROP TABLE IF EXISTS type;
DROP TABLE IF EXISTS grp;
DROP TABLE IF EXISTS cover_link;
DROP TABLE IF EXISTS node;
DROP TABLE IF EXISTS data;
DROP TABLE IF EXISTS object;
DROP SEQUENCE IF EXISTS kar_id_sequence;
''');
connection.commit()
# Create table
c.execute('''
CREATE SEQUENCE kar_id_sequence;
''')
connection.commit()
# Create table
c.execute('''
CREATE OR REPLACE FUNCTION trigger_set_timestamp()
@ -35,16 +56,66 @@ $$ LANGUAGE plpgsql;
''')
connection.commit()
aaa = '''
CREATE OR REPLACE FUNCTION check_exist(_table character, _id INTEGER)
RETURNS BOOLEAN AS $$
DECLARE vvv int;
DECLARE eee text;
BEGIN
raise WARNING 'check_exist(%,%)%', _table, _id, E'\n';
IF _id IS NULL THEN
raise WARNING ' ==> return 1 (detect NULL)%', E'\n';
RETURN 1;
END IF;
eee = 'select 1 FROM ' || quote_ident(_table) || ' WHERE id = ' || _id;
raise WARNING 'Execute: % %', eee, E'\n';
EXECUTE 'select 1 FROM ' || quote_ident(_table) || ' WHERE id = ' || _id INTO vvv;
raise WARNING 'Value vvv: % %', vvv, E'\n';
IF vvv = 1 THEN
raise WARNING ' ==> return 1 %', E'\n';
RETURN 1;
ELSE
raise WARNING ' ==> return 0 %', E'\n';
RETURN 0;
END IF;
END;
$$ LANGUAGE plpgsql;
'''
c.execute('''
CREATE OR REPLACE FUNCTION check_exist(_table character, _id INTEGER)
RETURNS BOOLEAN AS $$
DECLARE vvv int;
DECLARE eee text;
BEGIN
IF _id IS NULL THEN
RETURN 1;
END IF;
eee = 'select 1 FROM ' || quote_ident(_table) || ' WHERE id = ' || _id;
EXECUTE 'select 1 FROM ' || quote_ident(_table) || ' WHERE id = ' || _id INTO vvv;
IF vvv = 1 THEN
RETURN 1;
ELSE
RETURN 0;
END IF;
END;
$$ LANGUAGE plpgsql;
''')
connection.commit()
# Create table
c.execute('''
CREATE TABLE object (
id SERIAL PRIMARY KEY,
id INTEGER PRIMARY KEY default nextval('kar_id_sequence'),
deleted BOOLEAN NOT NULL DEFAULT false,
create_date TIMESTAMPTZ NOT NULL DEFAULT NOW(),
modify_date TIMESTAMPTZ NOT NULL DEFAULT NOW());
COMMENT ON TABLE object IS 'Basic element in this BDD (manage the create and modfy property, the deletion and the unique ID.';
COMMENT ON COLUMN object.id IS 'Unique global ID in the BDD.';
COMMENT ON COLUMN object.deleted IS 'If true the element is dead and must not be shown.';
COMMENT ON COLUMN object.create_date IS 'Creation date of this Object (automatically setup by the BDD).';
COMMENT ON COLUMN object.modify_date IS 'Modify date of this object (automatically updated by the BDD).';
''')
connection.commit()
c.execute('''
CREATE TRIGGER set_timestamp
@ -56,12 +127,17 @@ connection.commit()
# Create table
c.execute('''
CREATE TABLE data(
CREATE TABLE data (
sha512 VARCHAR(129) NOT NULL,
mime_type VARCHAR(50) NOT NULL,
mime_type VARCHAR(128) NOT NULL,
size BIGINT NOT NULL,
original_name TEXT
) INHERITS (object)
) INHERITS (object);
COMMENT ON TABLE data IS 'Data basic reference on the big data managed.';
COMMENT ON COLUMN data.sha512 IS 'Unique Sha512 of the file.';
COMMENT ON COLUMN data.mime_type IS 'Type of the object with his mine-type description.';
COMMENT ON COLUMN data.size IS 'Size of the file in Byte.';
COMMENT ON COLUMN data.original_name IS 'Name of the file when upload it in the BDD ==> MUST be remove later.';
''')
connection.commit()
@ -71,58 +147,70 @@ CREATE TABLE node (
name TEXT NOT NULL,
description TEXT
) INHERITS (object);
COMMENT ON TABLE node IS 'Node is a basic element of what must be hierarchie apears.';
COMMENT ON COLUMN node.name IS 'Name of the Node.';
COMMENT ON COLUMN node.description IS 'Description of the Node.';
''')
connection.commit()
# Create table
c.execute('''
CREATE TABLE cover_link (
id SERIAL PRIMARY KEY,
deleted BOOLEAN NOT NULL DEFAULT false,
node_id INTEGER REFERENCES object(id),
data_id INTEGER REFERENCES object(id)
);
node_id INTEGER CHECK(check_exist('node', node_id)),
data_id INTEGER CHECK(check_exist('data', data_id))
) INHERITS (object);
COMMENT ON TABLE cover_link IS 'Link between cover data id and Nodes.';
''')
connection.commit()
# Create table
c.execute('''
CREATE TABLE grp () INHERITS (node);
COMMENT ON TABLE grp IS 'Group of the video.';
''')
connection.commit()
# Create table
c.execute('''
CREATE TABLE saison (
group_id INTEGER REFERENCES object(id)
group_id INTEGER CHECK(check_exist('grp', group_id))
) INHERITS (node);
COMMENT ON TABLE saison IS 'Saison of the video.';
''')
connection.commit()
# Create table
c.execute('''
CREATE TABLE type () INHERITS (node);
COMMENT ON TABLE type IS 'Type of the video.';
''')
connection.commit()
# Create table
c.execute('''
CREATE TABLE univers () INHERITS (node);
COMMENT ON TABLE univers IS 'Univers of the video.';
''')
connection.commit()
# Create table
c.execute('''
CREATE TABLE video (
data_id INTEGER REFERENCES object(id),
type_id INTEGER REFERENCES object(id),
univers_id INTEGER REFERENCES object(id),
group_id INTEGER REFERENCES object(id),
saison_id INTEGER REFERENCES object(id),
episode INTEGER,
date INTEGER, -- simple date in years of the creation of the media
time INTEGER -- Time in second of the media
data_id INTEGER CHECK(check_exist('data', data_id)),
type_id INTEGER CHECK(check_exist('type', type_id)),
univers_id INTEGER CHECK(check_exist('univers', univers_id)),
group_id INTEGER CHECK(check_exist('grp', group_id)),
saison_id INTEGER CHECK(check_exist('saison', saison_id)),
episode INTEGER CHECK(episode >=0),
date INTEGER CHECK(date > 1850),
time INTEGER CHECK(time >= 0),
age_limit INTEGER CHECK(age_limit >= 0)
) INHERITS (node);
COMMENT ON TABLE video IS 'Video Media that is visible.';
COMMENT ON COLUMN video.episode IS 'Number of the episode in the saison sequence.';
COMMENT ON COLUMN video.date IS 'Simple date in years of the creation of the media.';
COMMENT ON COLUMN video.time IS 'Time in second of the media';
COMMENT ON COLUMN video.age_limit IS 'Limitation of the age to show the display';
''')
# Save (commit) the changes
@ -134,15 +222,20 @@ connection.close()
print(" =================================================== Send DATA ");
import transfert_data
data_mapping = transfert_data.transfert_db()
print(" =================================================== Send TYPE ");
import transfert_type
type_mapping = transfert_type.transfert_db(data_mapping)
print(" =================================================== Send GROUP ");
import transfert_group
group_mapping = transfert_group.transfert_db(data_mapping, type_mapping)
print(" =================================================== Send SAISON ");
import transfert_saison
print(" =================================================== Send UNIVERS ");
import transfert_univers
saison_mapping = transfert_saison.transfert_db(data_mapping, type_mapping, group_mapping)
#print(" =================================================== Send UNIVERS ");
#import transfert_univers
#univers_mapping = transfert_univers.transfert_db(data_mapping, type_mapping, group_mapping)
print(" =================================================== Send VIDEO ");
import transfert_video
video_mapping = transfert_video.transfert_db(data_mapping, type_mapping, group_mapping, saison_mapping)

View File

@ -17,7 +17,6 @@ import copy
from dateutil import parser
import db
connection = db.connect_bdd();
def file_read_data(path):
if not os.path.isfile(path):
@ -27,33 +26,46 @@ def file_read_data(path):
file.close()
return data_file
debug.info("Load old BDD: ")
data = file_read_data('bdd_data.json')
my_old_bdd = json.loads(data)
debug.info("create the table:")
c = connection.cursor()
debug.info("insert elements: ")
iii = 0;
for elem in my_old_bdd:
iii+=1;
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
id = elem["id"]
time_create = elem["create_date"];
mime_type = elem["mime_type"]
original_name = elem["original_name"]
sha512 = elem["sha512"]
size = elem["size"]
request_insert = (id, time_create, sha512, mime_type, size, original_name)
c.execute('INSERT INTO data (id, create_date, sha512, mime_type, size, original_name) VALUES (%s,%s,%s,%s,%s,%s)', request_insert)
# Save (commit) the changes
connection.commit()
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
connection.close()
def transfert_db():
out = {}
out[str(None)] = None
connection = db.connect_bdd();
debug.info("Load old BDD: ")
data = file_read_data('bdd_data.json')
my_old_bdd = json.loads(data)
debug.info("create the table:")
c = connection.cursor()
debug.info("insert elements: ")
iii = 0;
for elem in my_old_bdd:
iii+=1;
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
id = elem["id"]
time_create = elem["create_date"];
mime_type = elem["mime_type"]
original_name = elem["original_name"]
sha512 = elem["sha512"]
size = elem["size"]
if mime_type == "unknown" and len(original_name) > 3 and original_name[-3:] == "mkv":
mime_type = "video/x-matroska"
request_insert = (time_create, sha512, mime_type, size, original_name)
c.execute('INSERT INTO data (create_date, sha512, mime_type, size, original_name) VALUES (%s,%s,%s,%s,%s) RETURNING id', request_insert)
id_of_new_row = c.fetchone()[0]
debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
out[str(id)] = id_of_new_row
# Save (commit) the changes
connection.commit()
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
connection.close()
return out

View File

@ -18,7 +18,6 @@ from dateutil import parser
import datetime
import db
connection = db.connect_bdd();
def file_read_data(path):
@ -29,48 +28,60 @@ def file_read_data(path):
file.close()
return data_file
debug.info("Load old BDD: ")
data = file_read_data('bdd_group.json')
my_old_bdd = json.loads(data)
debug.info("create the table:")
c = connection.cursor()
debug.info("insert elements: ")
iii = 0;
for elem in my_old_bdd:
iii+=1;
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
id = elem["id"]
name = elem["name"]
if "description" not in elem.keys():
description = None
else:
description = elem["description"]
if "covers" not in elem.keys():
covers = []
else:
covers = elem["covers"]
if covers == None:
covers = [];
request_insert = (id, name, description)
c.execute('INSERT INTO grp (id, name, description) VALUES (%s,%s,%s)', request_insert)
def transfert_db(data_mapping, type_mapping):
out = {}
out[str(None)] = None
connection = db.connect_bdd();
debug.info("Load old BDD: ")
data = file_read_data('bdd_group.json')
my_old_bdd = json.loads(data)
debug.info("create the table:")
c = connection.cursor()
debug.info("insert elements: ")
iii = 0;
for elem in my_old_bdd:
iii+=1;
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
id = elem["id"]
name = elem["name"]
if "description" not in elem.keys():
description = None
else:
description = elem["description"]
if "covers" not in elem.keys():
covers = []
else:
covers = elem["covers"]
if covers == None:
covers = [];
request_insert = (name, description)
c.execute('INSERT INTO grp (name, description) VALUES (%s,%s) RETURNING id', request_insert)
id_of_new_row = c.fetchone()[0]
debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
out[str(id)] = id_of_new_row
connection.commit()
for elem_cover in covers:
request_insert = (id_of_new_row, data_mapping[str(elem_cover)])
print(" insert cover " + str(request_insert))
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s) RETURNING id', request_insert)
connection.commit()
# Save (commit) the changes
connection.commit()
for elem_cover in covers:
request_insert = (id, elem_cover)
print(" insert cover " + str(request_insert))
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s)', request_insert)
connection.commit()
# Save (commit) the changes
connection.commit()
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
connection.close()
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
connection.close()
return out;

View File

@ -18,7 +18,6 @@ from dateutil import parser
import datetime
import db
connection = db.connect_bdd();
def file_read_data(path):
@ -29,49 +28,59 @@ def file_read_data(path):
file.close()
return data_file
debug.info("Load old BDD: ")
data = file_read_data('bdd_saison.json')
my_old_bdd = json.loads(data)
debug.info("create the table:")
c = connection.cursor()
debug.info("insert elements: ")
iii = 0;
for elem in my_old_bdd:
iii+=1;
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
id = elem["id"]
name = elem["number"]
if "group_id" not in elem.keys():
group_id = None
else:
group_id = elem["group_id"]
if "description" not in elem.keys():
description = None
else:
description = elem["description"]
if "covers" not in elem.keys():
covers = []
else:
covers = elem["covers"]
if covers == None:
covers = [];
request_insert = (id, name, description, group_id)
c.execute('INSERT INTO saison (id, name, description, group_id) VALUES (%s,%s,%s,%s)', request_insert)
def transfert_db(data_mapping, type_mapping, group_mapping):
out = {}
out[str(None)] = None
connection = db.connect_bdd();
debug.info("Load old BDD: ")
data = file_read_data('bdd_saison.json')
my_old_bdd = json.loads(data)
debug.info("create the table:")
c = connection.cursor()
debug.info("insert elements: ")
iii = 0;
for elem in my_old_bdd:
iii+=1;
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
id = elem["id"]
name = elem["number"]
if "group_id" not in elem.keys():
group_id = None
else:
group_id = elem["group_id"]
if "description" not in elem.keys():
description = None
else:
description = elem["description"]
if "covers" not in elem.keys():
covers = []
else:
covers = elem["covers"]
if covers == None:
covers = [];
request_insert = (name, description, group_mapping[str(group_id)])
c.execute('INSERT INTO saison (name, description, group_id) VALUES (%s,%s,%s) RETURNING id', request_insert)
id_of_new_row = c.fetchone()[0]
debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
out[str(id)] = id_of_new_row
connection.commit()
for elem_cover in covers:
request_insert = (id_of_new_row, data_mapping[str(elem_cover)])
print(" insert cover " + str(request_insert))
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s) RETURNING id', request_insert)
connection.commit()
# Save (commit) the changes
connection.commit()
for elem_cover in covers:
request_insert = (id, elem_cover)
print(" insert cover " + str(request_insert))
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s)', request_insert)
connection.commit()
# Save (commit) the changes
connection.commit()
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
connection.close()
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
connection.close()
return out

View File

@ -18,7 +18,6 @@ from dateutil import parser
import datetime
import db
connection = db.connect_bdd();
def file_read_data(path):
@ -29,45 +28,53 @@ def file_read_data(path):
file.close()
return data_file
debug.info("Load old BDD: ")
data = file_read_data('bdd_type.json')
my_old_bdd = json.loads(data)
debug.info("create the table:")
c = connection.cursor()
debug.info("insert elements: ")
iii = 0;
for elem in my_old_bdd:
iii+=1;
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
id = elem["id"]
name = elem["name"]
if "description" not in elem.keys():
description = None
else:
description = elem["description"]
if "covers" not in elem.keys():
covers = []
else:
covers = elem["covers"]
if covers == None:
covers = [];
request_insert = (id, name, description)
c.execute('INSERT INTO type (id, name, description) VALUES (%s,%s,%s)', request_insert)
def transfert_db(data_mapping):
out = {}
out[str(None)] = None
connection = db.connect_bdd();
debug.info("Load old BDD: ")
data = file_read_data('bdd_type.json')
my_old_bdd = json.loads(data)
debug.info("create the table:")
c = connection.cursor()
debug.info("insert elements: ")
iii = 0;
for elem in my_old_bdd:
iii+=1;
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
id = elem["id"]
name = elem["name"]
if "description" not in elem.keys():
description = None
else:
description = elem["description"]
if "covers" not in elem.keys():
covers = []
else:
covers = elem["covers"]
if covers == None:
covers = [];
request_insert = (name, description)
c.execute('INSERT INTO type (name, description) VALUES (%s,%s) RETURNING id', request_insert)
id_of_new_row = c.fetchone()[0]
debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
out[str(id)] = id_of_new_row
connection.commit()
for elem_cover in covers:
request_insert = (id_of_new_row, data_mapping[str(elem_cover)])
print(" insert cover " + str(request_insert))
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s) RETURNING id', request_insert)
connection.commit()
# Save (commit) the changes
connection.commit()
for elem_cover in covers:
request_insert = (id, elem_cover)
print(" insert cover " + str(request_insert))
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s)', request_insert)
connection.commit()
# Save (commit) the changes
connection.commit()
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
connection.close()
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
connection.close()
return out

View File

@ -18,7 +18,6 @@ from dateutil import parser
import datetime
import db
connection = db.connect_bdd();
def file_read_data(path):
@ -29,45 +28,53 @@ def file_read_data(path):
file.close()
return data_file
debug.info("Load old BDD: ")
data = file_read_data('bdd_univers.json')
my_old_bdd = json.loads(data)
debug.info("create the table:")
c = connection.cursor()
debug.info("insert elements: ")
iii = 0;
for elem in my_old_bdd:
iii+=1;
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
id = elem["id"]
name = elem["name"]
if "description" not in elem.keys():
description = None
else:
description = elem["description"]
if "covers" not in elem.keys():
covers = []
else:
covers = elem["covers"]
if covers == None:
covers = [];
request_insert = (id, name, description)
c.execute('INSERT INTO univers (id, name, description) VALUES (%s,%s,%s)', request_insert)
def transfert_db():
out = {}
out[str(None)] = None
connection = db.connect_bdd();
debug.info("Load old BDD: ")
data = file_read_data('bdd_univers.json')
my_old_bdd = json.loads(data)
debug.info("create the table:")
c = connection.cursor()
debug.info("insert elements: ")
iii = 0;
for elem in my_old_bdd:
iii+=1;
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
id = elem["id"]
name = elem["name"]
if "description" not in elem.keys():
description = None
else:
description = elem["description"]
if "covers" not in elem.keys():
covers = []
else:
covers = elem["covers"]
if covers == None:
covers = [];
request_insert = (name, description)
c.execute('INSERT INTO univers (name, description) VALUES (%s,%s) RETURNING id', request_insert)
id_of_new_row = c.fetchone()[0]
debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
out[str(id)] = id_of_new_row
connection.commit()
for elem_cover in covers:
request_insert = (id_of_new_row, data_mapping[str(elem_cover)])
print(" insert cover " + str(request_insert))
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s) RETURNING id', request_insert)
connection.commit()
# Save (commit) the changes
connection.commit()
for elem_cover in covers:
request_insert = (id, elem_cover)
print(" insert cover " + str(request_insert))
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s)', request_insert)
connection.commit()
# Save (commit) the changes
connection.commit()
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
connection.close()
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
connection.close()
return out

View File

@ -18,8 +18,14 @@ from dateutil import parser
import datetime
import db
connection = db.connect_bdd();
def force_number(s):
if s == None:
return None;
try:
return int(s)
except ValueError:
return None
def file_read_data(path):
if not os.path.isfile(path):
@ -29,92 +35,102 @@ def file_read_data(path):
file.close()
return data_file
debug.info("Load old BDD: ")
data = file_read_data('bdd_video.json')
my_old_bdd = json.loads(data)
debug.info("create the table:")
c = connection.cursor()
debug.info("insert elements: ")
iii = 0;
for elem in my_old_bdd:
iii+=1;
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
id = elem["id"]
time_create = elem["create_date"];
name = elem["name"]
if "description" not in elem.keys():
description = None
else:
description = elem["description"]
if "covers" not in elem.keys():
covers = []
else:
covers = elem["covers"]
if covers == None:
covers = [];
if "data_id" not in elem.keys():
data_id = None
else:
data_id = elem["data_id"]
if "type_id" not in elem.keys():
type_id = None
else:
type_id = elem["type_id"]
if "univers_id" not in elem.keys():
univers_id = None
else:
univers_id = elem["univers_id"]
if "group_id" not in elem.keys():
group_id = None
else:
group_id = elem["group_id"]
if "saison_id" not in elem.keys():
saison_id = None
else:
saison_id = elem["saison_id"]
if "date" not in elem.keys():
date = None
else:
date = elem["date"]
if "episode" not in elem.keys():
episode = None
else:
episode = elem["episode"]
if "time" not in elem.keys():
time = None
else:
time = elem["time"]
request_insert = (id, time_create, name, description, data_id, type_id, univers_id, group_id, saison_id, date, episode, time)
c.execute('INSERT INTO video (id, create_date, name, description, data_id, type_id, univers_id, group_id, saison_id, date, episode, time) VALUES (%s,false,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)', request_insert)
def transfert_db(data_mapping, type_mapping, group_mapping, saison_mapping):
out = {}
out[str(None)] = None
connection = db.connect_bdd();
debug.info("Load old BDD: ")
data = file_read_data('bdd_video.json')
my_old_bdd = json.loads(data)
debug.info("create the table:")
c = connection.cursor()
debug.info("insert elements: ")
iii = 0;
for elem in my_old_bdd:
iii+=1;
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
id = elem["id"]
time_create = elem["create_date"];
name = elem["name"]
if "description" not in elem.keys():
description = None
else:
description = elem["description"]
if "covers" not in elem.keys():
covers = []
else:
covers = elem["covers"]
if covers == None:
covers = [];
if "data_id" not in elem.keys():
data_id = None
else:
data_id = elem["data_id"]
if "type_id" not in elem.keys():
type_id = None
else:
type_id = elem["type_id"]
if "univers_id" not in elem.keys():
univers_id = None
else:
univers_id = elem["univers_id"]
if "group_id" not in elem.keys():
group_id = None
else:
group_id = elem["group_id"]
if "saison_id" not in elem.keys():
saison_id = None
else:
saison_id = elem["saison_id"]
if "date" not in elem.keys():
date = None
else:
date = elem["date"]
date = force_number(date)
if date != None and date < 1850:
date = None
if "episode" not in elem.keys():
episode = None
else:
episode = elem["episode"]
if "time" not in elem.keys():
time = None
else:
time = elem["time"]
request_insert = (time_create, name, description, data_mapping[str(data_id)], type_mapping[str(type_id)], group_mapping[str(group_id)], saison_mapping[str(saison_id)], force_number(date), force_number(episode), time)
c.execute('INSERT INTO video (create_date, name, description, data_id, type_id, group_id, saison_id, date, episode, time) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) RETURNING id', request_insert)
id_of_new_row = c.fetchone()[0]
debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
out[str(id)] = id_of_new_row
connection.commit()
for elem_cover in covers:
request_insert = (id_of_new_row, data_mapping[str(elem_cover)])
print(" insert cover " + str(request_insert))
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s) RETURNING id', request_insert)
connection.commit()
# Save (commit) the changes
connection.commit()
for elem_cover in covers:
request_insert = (id, elem_cover)
print(" insert cover " + str(request_insert))
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s)', request_insert)
connection.commit()
# Save (commit) the changes
connection.commit()
# def dict_factory(cursor, row):
# d = {}
# for idx, col in enumerate(cursor.description):
# d[col[0]] = row[idx]
# return d
# conn.row_factory = dict_factory
# c = conn.cursor()
# c.execute('SELECT * FROM video WHERE deleted=false')
# results = c.fetchall()
# print(results)
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
connection.close()
# def dict_factory(cursor, row):
# d = {}
# for idx, col in enumerate(cursor.description):
# d[col[0]] = row[idx]
# return d
# conn.row_factory = dict_factory
# c = conn.cursor()
# c.execute('SELECT * FROM video WHERE deleted=false')
# results = c.fetchall()
# print(results)
# We can also close the connection if we are done with it.
# Just be sure any changes have been committed or they will be lost.
connection.close()