[DEV] update BDD migration
This commit is contained in:
parent
d0790c7f1b
commit
64713d2ed4
@ -23,6 +23,27 @@ debug.info("create the table:")
|
|||||||
|
|
||||||
c = connection.cursor()
|
c = connection.cursor()
|
||||||
|
|
||||||
|
c.execute('''
|
||||||
|
DROP TABLE IF EXISTS video;
|
||||||
|
DROP TABLE IF EXISTS univers;
|
||||||
|
DROP TABLE IF EXISTS saison;
|
||||||
|
DROP TABLE IF EXISTS type;
|
||||||
|
DROP TABLE IF EXISTS grp;
|
||||||
|
DROP TABLE IF EXISTS cover_link;
|
||||||
|
DROP TABLE IF EXISTS node;
|
||||||
|
DROP TABLE IF EXISTS data;
|
||||||
|
DROP TABLE IF EXISTS object;
|
||||||
|
DROP SEQUENCE IF EXISTS kar_id_sequence;
|
||||||
|
''');
|
||||||
|
connection.commit()
|
||||||
|
|
||||||
|
|
||||||
|
# Create table
|
||||||
|
c.execute('''
|
||||||
|
CREATE SEQUENCE kar_id_sequence;
|
||||||
|
''')
|
||||||
|
connection.commit()
|
||||||
|
|
||||||
# Create table
|
# Create table
|
||||||
c.execute('''
|
c.execute('''
|
||||||
CREATE OR REPLACE FUNCTION trigger_set_timestamp()
|
CREATE OR REPLACE FUNCTION trigger_set_timestamp()
|
||||||
@ -35,16 +56,66 @@ $$ LANGUAGE plpgsql;
|
|||||||
''')
|
''')
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
|
aaa = '''
|
||||||
|
CREATE OR REPLACE FUNCTION check_exist(_table character, _id INTEGER)
|
||||||
|
RETURNS BOOLEAN AS $$
|
||||||
|
DECLARE vvv int;
|
||||||
|
DECLARE eee text;
|
||||||
|
BEGIN
|
||||||
|
raise WARNING 'check_exist(%,%)%', _table, _id, E'\n';
|
||||||
|
IF _id IS NULL THEN
|
||||||
|
raise WARNING ' ==> return 1 (detect NULL)%', E'\n';
|
||||||
|
RETURN 1;
|
||||||
|
END IF;
|
||||||
|
eee = 'select 1 FROM ' || quote_ident(_table) || ' WHERE id = ' || _id;
|
||||||
|
raise WARNING 'Execute: % %', eee, E'\n';
|
||||||
|
EXECUTE 'select 1 FROM ' || quote_ident(_table) || ' WHERE id = ' || _id INTO vvv;
|
||||||
|
raise WARNING 'Value vvv: % %', vvv, E'\n';
|
||||||
|
IF vvv = 1 THEN
|
||||||
|
raise WARNING ' ==> return 1 %', E'\n';
|
||||||
|
RETURN 1;
|
||||||
|
ELSE
|
||||||
|
raise WARNING ' ==> return 0 %', E'\n';
|
||||||
|
RETURN 0;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
'''
|
||||||
|
|
||||||
|
c.execute('''
|
||||||
|
CREATE OR REPLACE FUNCTION check_exist(_table character, _id INTEGER)
|
||||||
|
RETURNS BOOLEAN AS $$
|
||||||
|
DECLARE vvv int;
|
||||||
|
DECLARE eee text;
|
||||||
|
BEGIN
|
||||||
|
IF _id IS NULL THEN
|
||||||
|
RETURN 1;
|
||||||
|
END IF;
|
||||||
|
eee = 'select 1 FROM ' || quote_ident(_table) || ' WHERE id = ' || _id;
|
||||||
|
EXECUTE 'select 1 FROM ' || quote_ident(_table) || ' WHERE id = ' || _id INTO vvv;
|
||||||
|
IF vvv = 1 THEN
|
||||||
|
RETURN 1;
|
||||||
|
ELSE
|
||||||
|
RETURN 0;
|
||||||
|
END IF;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
''')
|
||||||
|
connection.commit()
|
||||||
|
|
||||||
# Create table
|
# Create table
|
||||||
c.execute('''
|
c.execute('''
|
||||||
CREATE TABLE object (
|
CREATE TABLE object (
|
||||||
id SERIAL PRIMARY KEY,
|
id INTEGER PRIMARY KEY default nextval('kar_id_sequence'),
|
||||||
deleted BOOLEAN NOT NULL DEFAULT false,
|
deleted BOOLEAN NOT NULL DEFAULT false,
|
||||||
create_date TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
create_date TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
modify_date TIMESTAMPTZ NOT NULL DEFAULT NOW());
|
modify_date TIMESTAMPTZ NOT NULL DEFAULT NOW());
|
||||||
|
COMMENT ON TABLE object IS 'Basic element in this BDD (manage the create and modfy property, the deletion and the unique ID.';
|
||||||
|
COMMENT ON COLUMN object.id IS 'Unique global ID in the BDD.';
|
||||||
|
COMMENT ON COLUMN object.deleted IS 'If true the element is dead and must not be shown.';
|
||||||
|
COMMENT ON COLUMN object.create_date IS 'Creation date of this Object (automatically setup by the BDD).';
|
||||||
|
COMMENT ON COLUMN object.modify_date IS 'Modify date of this object (automatically updated by the BDD).';
|
||||||
''')
|
''')
|
||||||
connection.commit()
|
|
||||||
|
|
||||||
c.execute('''
|
c.execute('''
|
||||||
CREATE TRIGGER set_timestamp
|
CREATE TRIGGER set_timestamp
|
||||||
@ -56,12 +127,17 @@ connection.commit()
|
|||||||
|
|
||||||
# Create table
|
# Create table
|
||||||
c.execute('''
|
c.execute('''
|
||||||
CREATE TABLE data(
|
CREATE TABLE data (
|
||||||
sha512 VARCHAR(129) NOT NULL,
|
sha512 VARCHAR(129) NOT NULL,
|
||||||
mime_type VARCHAR(50) NOT NULL,
|
mime_type VARCHAR(128) NOT NULL,
|
||||||
size BIGINT NOT NULL,
|
size BIGINT NOT NULL,
|
||||||
original_name TEXT
|
original_name TEXT
|
||||||
) INHERITS (object)
|
) INHERITS (object);
|
||||||
|
COMMENT ON TABLE data IS 'Data basic reference on the big data managed.';
|
||||||
|
COMMENT ON COLUMN data.sha512 IS 'Unique Sha512 of the file.';
|
||||||
|
COMMENT ON COLUMN data.mime_type IS 'Type of the object with his mine-type description.';
|
||||||
|
COMMENT ON COLUMN data.size IS 'Size of the file in Byte.';
|
||||||
|
COMMENT ON COLUMN data.original_name IS 'Name of the file when upload it in the BDD ==> MUST be remove later.';
|
||||||
''')
|
''')
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
@ -71,58 +147,70 @@ CREATE TABLE node (
|
|||||||
name TEXT NOT NULL,
|
name TEXT NOT NULL,
|
||||||
description TEXT
|
description TEXT
|
||||||
) INHERITS (object);
|
) INHERITS (object);
|
||||||
|
COMMENT ON TABLE node IS 'Node is a basic element of what must be hierarchie apears.';
|
||||||
|
COMMENT ON COLUMN node.name IS 'Name of the Node.';
|
||||||
|
COMMENT ON COLUMN node.description IS 'Description of the Node.';
|
||||||
''')
|
''')
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
# Create table
|
# Create table
|
||||||
c.execute('''
|
c.execute('''
|
||||||
CREATE TABLE cover_link (
|
CREATE TABLE cover_link (
|
||||||
id SERIAL PRIMARY KEY,
|
node_id INTEGER CHECK(check_exist('node', node_id)),
|
||||||
deleted BOOLEAN NOT NULL DEFAULT false,
|
data_id INTEGER CHECK(check_exist('data', data_id))
|
||||||
node_id INTEGER REFERENCES object(id),
|
) INHERITS (object);
|
||||||
data_id INTEGER REFERENCES object(id)
|
COMMENT ON TABLE cover_link IS 'Link between cover data id and Nodes.';
|
||||||
);
|
|
||||||
''')
|
''')
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
# Create table
|
# Create table
|
||||||
c.execute('''
|
c.execute('''
|
||||||
CREATE TABLE grp () INHERITS (node);
|
CREATE TABLE grp () INHERITS (node);
|
||||||
|
COMMENT ON TABLE grp IS 'Group of the video.';
|
||||||
''')
|
''')
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
# Create table
|
# Create table
|
||||||
c.execute('''
|
c.execute('''
|
||||||
CREATE TABLE saison (
|
CREATE TABLE saison (
|
||||||
group_id INTEGER REFERENCES object(id)
|
group_id INTEGER CHECK(check_exist('grp', group_id))
|
||||||
) INHERITS (node);
|
) INHERITS (node);
|
||||||
|
COMMENT ON TABLE saison IS 'Saison of the video.';
|
||||||
''')
|
''')
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
# Create table
|
# Create table
|
||||||
c.execute('''
|
c.execute('''
|
||||||
CREATE TABLE type () INHERITS (node);
|
CREATE TABLE type () INHERITS (node);
|
||||||
|
COMMENT ON TABLE type IS 'Type of the video.';
|
||||||
''')
|
''')
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
# Create table
|
# Create table
|
||||||
c.execute('''
|
c.execute('''
|
||||||
CREATE TABLE univers () INHERITS (node);
|
CREATE TABLE univers () INHERITS (node);
|
||||||
|
COMMENT ON TABLE univers IS 'Univers of the video.';
|
||||||
''')
|
''')
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
# Create table
|
# Create table
|
||||||
c.execute('''
|
c.execute('''
|
||||||
CREATE TABLE video (
|
CREATE TABLE video (
|
||||||
data_id INTEGER REFERENCES object(id),
|
data_id INTEGER CHECK(check_exist('data', data_id)),
|
||||||
type_id INTEGER REFERENCES object(id),
|
type_id INTEGER CHECK(check_exist('type', type_id)),
|
||||||
univers_id INTEGER REFERENCES object(id),
|
univers_id INTEGER CHECK(check_exist('univers', univers_id)),
|
||||||
group_id INTEGER REFERENCES object(id),
|
group_id INTEGER CHECK(check_exist('grp', group_id)),
|
||||||
saison_id INTEGER REFERENCES object(id),
|
saison_id INTEGER CHECK(check_exist('saison', saison_id)),
|
||||||
episode INTEGER,
|
episode INTEGER CHECK(episode >=0),
|
||||||
date INTEGER, -- simple date in years of the creation of the media
|
date INTEGER CHECK(date > 1850),
|
||||||
time INTEGER -- Time in second of the media
|
time INTEGER CHECK(time >= 0),
|
||||||
|
age_limit INTEGER CHECK(age_limit >= 0)
|
||||||
) INHERITS (node);
|
) INHERITS (node);
|
||||||
|
COMMENT ON TABLE video IS 'Video Media that is visible.';
|
||||||
|
COMMENT ON COLUMN video.episode IS 'Number of the episode in the saison sequence.';
|
||||||
|
COMMENT ON COLUMN video.date IS 'Simple date in years of the creation of the media.';
|
||||||
|
COMMENT ON COLUMN video.time IS 'Time in second of the media';
|
||||||
|
COMMENT ON COLUMN video.age_limit IS 'Limitation of the age to show the display';
|
||||||
''')
|
''')
|
||||||
|
|
||||||
# Save (commit) the changes
|
# Save (commit) the changes
|
||||||
@ -134,15 +222,20 @@ connection.close()
|
|||||||
|
|
||||||
print(" =================================================== Send DATA ");
|
print(" =================================================== Send DATA ");
|
||||||
import transfert_data
|
import transfert_data
|
||||||
|
data_mapping = transfert_data.transfert_db()
|
||||||
print(" =================================================== Send TYPE ");
|
print(" =================================================== Send TYPE ");
|
||||||
import transfert_type
|
import transfert_type
|
||||||
|
type_mapping = transfert_type.transfert_db(data_mapping)
|
||||||
print(" =================================================== Send GROUP ");
|
print(" =================================================== Send GROUP ");
|
||||||
import transfert_group
|
import transfert_group
|
||||||
|
group_mapping = transfert_group.transfert_db(data_mapping, type_mapping)
|
||||||
print(" =================================================== Send SAISON ");
|
print(" =================================================== Send SAISON ");
|
||||||
import transfert_saison
|
import transfert_saison
|
||||||
print(" =================================================== Send UNIVERS ");
|
saison_mapping = transfert_saison.transfert_db(data_mapping, type_mapping, group_mapping)
|
||||||
import transfert_univers
|
#print(" =================================================== Send UNIVERS ");
|
||||||
|
#import transfert_univers
|
||||||
|
#univers_mapping = transfert_univers.transfert_db(data_mapping, type_mapping, group_mapping)
|
||||||
print(" =================================================== Send VIDEO ");
|
print(" =================================================== Send VIDEO ");
|
||||||
import transfert_video
|
import transfert_video
|
||||||
|
video_mapping = transfert_video.transfert_db(data_mapping, type_mapping, group_mapping, saison_mapping)
|
||||||
|
|
||||||
|
@ -17,7 +17,6 @@ import copy
|
|||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
|
|
||||||
import db
|
import db
|
||||||
connection = db.connect_bdd();
|
|
||||||
|
|
||||||
def file_read_data(path):
|
def file_read_data(path):
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
@ -27,18 +26,24 @@ def file_read_data(path):
|
|||||||
file.close()
|
file.close()
|
||||||
return data_file
|
return data_file
|
||||||
|
|
||||||
debug.info("Load old BDD: ")
|
|
||||||
|
|
||||||
data = file_read_data('bdd_data.json')
|
def transfert_db():
|
||||||
my_old_bdd = json.loads(data)
|
out = {}
|
||||||
|
out[str(None)] = None
|
||||||
|
connection = db.connect_bdd();
|
||||||
|
|
||||||
debug.info("create the table:")
|
debug.info("Load old BDD: ")
|
||||||
|
|
||||||
c = connection.cursor()
|
data = file_read_data('bdd_data.json')
|
||||||
|
my_old_bdd = json.loads(data)
|
||||||
|
|
||||||
debug.info("insert elements: ")
|
debug.info("create the table:")
|
||||||
iii = 0;
|
|
||||||
for elem in my_old_bdd:
|
c = connection.cursor()
|
||||||
|
|
||||||
|
debug.info("insert elements: ")
|
||||||
|
iii = 0;
|
||||||
|
for elem in my_old_bdd:
|
||||||
iii+=1;
|
iii+=1;
|
||||||
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
|
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
|
||||||
id = elem["id"]
|
id = elem["id"]
|
||||||
@ -47,13 +52,20 @@ for elem in my_old_bdd:
|
|||||||
original_name = elem["original_name"]
|
original_name = elem["original_name"]
|
||||||
sha512 = elem["sha512"]
|
sha512 = elem["sha512"]
|
||||||
size = elem["size"]
|
size = elem["size"]
|
||||||
request_insert = (id, time_create, sha512, mime_type, size, original_name)
|
if mime_type == "unknown" and len(original_name) > 3 and original_name[-3:] == "mkv":
|
||||||
c.execute('INSERT INTO data (id, create_date, sha512, mime_type, size, original_name) VALUES (%s,%s,%s,%s,%s,%s)', request_insert)
|
mime_type = "video/x-matroska"
|
||||||
|
request_insert = (time_create, sha512, mime_type, size, original_name)
|
||||||
|
c.execute('INSERT INTO data (create_date, sha512, mime_type, size, original_name) VALUES (%s,%s,%s,%s,%s) RETURNING id', request_insert)
|
||||||
|
id_of_new_row = c.fetchone()[0]
|
||||||
|
debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
|
||||||
|
out[str(id)] = id_of_new_row
|
||||||
|
|
||||||
# Save (commit) the changes
|
# Save (commit) the changes
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
# We can also close the connection if we are done with it.
|
# We can also close the connection if we are done with it.
|
||||||
# Just be sure any changes have been committed or they will be lost.
|
# Just be sure any changes have been committed or they will be lost.
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
|
@ -18,7 +18,6 @@ from dateutil import parser
|
|||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
import db
|
import db
|
||||||
connection = db.connect_bdd();
|
|
||||||
|
|
||||||
|
|
||||||
def file_read_data(path):
|
def file_read_data(path):
|
||||||
@ -29,21 +28,28 @@ def file_read_data(path):
|
|||||||
file.close()
|
file.close()
|
||||||
return data_file
|
return data_file
|
||||||
|
|
||||||
debug.info("Load old BDD: ")
|
|
||||||
|
|
||||||
data = file_read_data('bdd_group.json')
|
def transfert_db(data_mapping, type_mapping):
|
||||||
my_old_bdd = json.loads(data)
|
out = {}
|
||||||
|
out[str(None)] = None
|
||||||
|
|
||||||
debug.info("create the table:")
|
connection = db.connect_bdd();
|
||||||
|
|
||||||
|
debug.info("Load old BDD: ")
|
||||||
|
|
||||||
|
data = file_read_data('bdd_group.json')
|
||||||
|
my_old_bdd = json.loads(data)
|
||||||
|
|
||||||
|
debug.info("create the table:")
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
c = connection.cursor()
|
c = connection.cursor()
|
||||||
|
|
||||||
|
|
||||||
debug.info("insert elements: ")
|
debug.info("insert elements: ")
|
||||||
iii = 0;
|
iii = 0;
|
||||||
for elem in my_old_bdd:
|
for elem in my_old_bdd:
|
||||||
iii+=1;
|
iii+=1;
|
||||||
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
|
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
|
||||||
id = elem["id"]
|
id = elem["id"]
|
||||||
@ -58,19 +64,24 @@ for elem in my_old_bdd:
|
|||||||
covers = elem["covers"]
|
covers = elem["covers"]
|
||||||
if covers == None:
|
if covers == None:
|
||||||
covers = [];
|
covers = [];
|
||||||
request_insert = (id, name, description)
|
request_insert = (name, description)
|
||||||
c.execute('INSERT INTO grp (id, name, description) VALUES (%s,%s,%s)', request_insert)
|
c.execute('INSERT INTO grp (name, description) VALUES (%s,%s) RETURNING id', request_insert)
|
||||||
|
id_of_new_row = c.fetchone()[0]
|
||||||
|
debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
|
||||||
|
out[str(id)] = id_of_new_row
|
||||||
connection.commit()
|
connection.commit()
|
||||||
for elem_cover in covers:
|
for elem_cover in covers:
|
||||||
request_insert = (id, elem_cover)
|
request_insert = (id_of_new_row, data_mapping[str(elem_cover)])
|
||||||
print(" insert cover " + str(request_insert))
|
print(" insert cover " + str(request_insert))
|
||||||
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s)', request_insert)
|
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s) RETURNING id', request_insert)
|
||||||
|
connection.commit()
|
||||||
|
# Save (commit) the changes
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
# Save (commit) the changes
|
# We can also close the connection if we are done with it.
|
||||||
connection.commit()
|
# Just be sure any changes have been committed or they will be lost.
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
return out;
|
||||||
|
|
||||||
# We can also close the connection if we are done with it.
|
|
||||||
# Just be sure any changes have been committed or they will be lost.
|
|
||||||
connection.close()
|
|
||||||
|
|
||||||
|
@ -18,7 +18,6 @@ from dateutil import parser
|
|||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
import db
|
import db
|
||||||
connection = db.connect_bdd();
|
|
||||||
|
|
||||||
|
|
||||||
def file_read_data(path):
|
def file_read_data(path):
|
||||||
@ -29,18 +28,22 @@ def file_read_data(path):
|
|||||||
file.close()
|
file.close()
|
||||||
return data_file
|
return data_file
|
||||||
|
|
||||||
debug.info("Load old BDD: ")
|
def transfert_db(data_mapping, type_mapping, group_mapping):
|
||||||
|
out = {}
|
||||||
|
out[str(None)] = None
|
||||||
|
connection = db.connect_bdd();
|
||||||
|
debug.info("Load old BDD: ")
|
||||||
|
|
||||||
data = file_read_data('bdd_saison.json')
|
data = file_read_data('bdd_saison.json')
|
||||||
my_old_bdd = json.loads(data)
|
my_old_bdd = json.loads(data)
|
||||||
|
|
||||||
debug.info("create the table:")
|
debug.info("create the table:")
|
||||||
|
|
||||||
c = connection.cursor()
|
c = connection.cursor()
|
||||||
|
|
||||||
debug.info("insert elements: ")
|
debug.info("insert elements: ")
|
||||||
iii = 0;
|
iii = 0;
|
||||||
for elem in my_old_bdd:
|
for elem in my_old_bdd:
|
||||||
iii+=1;
|
iii+=1;
|
||||||
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
|
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
|
||||||
id = elem["id"]
|
id = elem["id"]
|
||||||
@ -59,19 +62,25 @@ for elem in my_old_bdd:
|
|||||||
covers = elem["covers"]
|
covers = elem["covers"]
|
||||||
if covers == None:
|
if covers == None:
|
||||||
covers = [];
|
covers = [];
|
||||||
request_insert = (id, name, description, group_id)
|
request_insert = (name, description, group_mapping[str(group_id)])
|
||||||
c.execute('INSERT INTO saison (id, name, description, group_id) VALUES (%s,%s,%s,%s)', request_insert)
|
c.execute('INSERT INTO saison (name, description, group_id) VALUES (%s,%s,%s) RETURNING id', request_insert)
|
||||||
|
id_of_new_row = c.fetchone()[0]
|
||||||
|
debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
|
||||||
|
out[str(id)] = id_of_new_row
|
||||||
connection.commit()
|
connection.commit()
|
||||||
for elem_cover in covers:
|
for elem_cover in covers:
|
||||||
request_insert = (id, elem_cover)
|
request_insert = (id_of_new_row, data_mapping[str(elem_cover)])
|
||||||
print(" insert cover " + str(request_insert))
|
print(" insert cover " + str(request_insert))
|
||||||
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s)', request_insert)
|
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s) RETURNING id', request_insert)
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
# Save (commit) the changes
|
# Save (commit) the changes
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
|
# We can also close the connection if we are done with it.
|
||||||
|
# Just be sure any changes have been committed or they will be lost.
|
||||||
|
connection.close()
|
||||||
|
|
||||||
|
return out
|
||||||
|
|
||||||
# We can also close the connection if we are done with it.
|
|
||||||
# Just be sure any changes have been committed or they will be lost.
|
|
||||||
connection.close()
|
|
||||||
|
|
||||||
|
@ -18,7 +18,6 @@ from dateutil import parser
|
|||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
import db
|
import db
|
||||||
connection = db.connect_bdd();
|
|
||||||
|
|
||||||
|
|
||||||
def file_read_data(path):
|
def file_read_data(path):
|
||||||
@ -29,18 +28,22 @@ def file_read_data(path):
|
|||||||
file.close()
|
file.close()
|
||||||
return data_file
|
return data_file
|
||||||
|
|
||||||
debug.info("Load old BDD: ")
|
def transfert_db(data_mapping):
|
||||||
|
out = {}
|
||||||
|
out[str(None)] = None
|
||||||
|
connection = db.connect_bdd();
|
||||||
|
debug.info("Load old BDD: ")
|
||||||
|
|
||||||
data = file_read_data('bdd_type.json')
|
data = file_read_data('bdd_type.json')
|
||||||
my_old_bdd = json.loads(data)
|
my_old_bdd = json.loads(data)
|
||||||
|
|
||||||
debug.info("create the table:")
|
debug.info("create the table:")
|
||||||
|
|
||||||
c = connection.cursor()
|
c = connection.cursor()
|
||||||
|
|
||||||
debug.info("insert elements: ")
|
debug.info("insert elements: ")
|
||||||
iii = 0;
|
iii = 0;
|
||||||
for elem in my_old_bdd:
|
for elem in my_old_bdd:
|
||||||
iii+=1;
|
iii+=1;
|
||||||
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
|
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
|
||||||
id = elem["id"]
|
id = elem["id"]
|
||||||
@ -55,19 +58,23 @@ for elem in my_old_bdd:
|
|||||||
covers = elem["covers"]
|
covers = elem["covers"]
|
||||||
if covers == None:
|
if covers == None:
|
||||||
covers = [];
|
covers = [];
|
||||||
request_insert = (id, name, description)
|
request_insert = (name, description)
|
||||||
c.execute('INSERT INTO type (id, name, description) VALUES (%s,%s,%s)', request_insert)
|
c.execute('INSERT INTO type (name, description) VALUES (%s,%s) RETURNING id', request_insert)
|
||||||
|
id_of_new_row = c.fetchone()[0]
|
||||||
|
debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
|
||||||
|
out[str(id)] = id_of_new_row
|
||||||
connection.commit()
|
connection.commit()
|
||||||
for elem_cover in covers:
|
for elem_cover in covers:
|
||||||
request_insert = (id, elem_cover)
|
request_insert = (id_of_new_row, data_mapping[str(elem_cover)])
|
||||||
print(" insert cover " + str(request_insert))
|
print(" insert cover " + str(request_insert))
|
||||||
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s)', request_insert)
|
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s) RETURNING id', request_insert)
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
# Save (commit) the changes
|
# Save (commit) the changes
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
# We can also close the connection if we are done with it.
|
# We can also close the connection if we are done with it.
|
||||||
# Just be sure any changes have been committed or they will be lost.
|
# Just be sure any changes have been committed or they will be lost.
|
||||||
connection.close()
|
connection.close()
|
||||||
|
return out
|
||||||
|
|
||||||
|
@ -18,7 +18,6 @@ from dateutil import parser
|
|||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
import db
|
import db
|
||||||
connection = db.connect_bdd();
|
|
||||||
|
|
||||||
|
|
||||||
def file_read_data(path):
|
def file_read_data(path):
|
||||||
@ -29,18 +28,22 @@ def file_read_data(path):
|
|||||||
file.close()
|
file.close()
|
||||||
return data_file
|
return data_file
|
||||||
|
|
||||||
debug.info("Load old BDD: ")
|
def transfert_db():
|
||||||
|
out = {}
|
||||||
|
out[str(None)] = None
|
||||||
|
connection = db.connect_bdd();
|
||||||
|
debug.info("Load old BDD: ")
|
||||||
|
|
||||||
data = file_read_data('bdd_univers.json')
|
data = file_read_data('bdd_univers.json')
|
||||||
my_old_bdd = json.loads(data)
|
my_old_bdd = json.loads(data)
|
||||||
|
|
||||||
debug.info("create the table:")
|
debug.info("create the table:")
|
||||||
|
|
||||||
c = connection.cursor()
|
c = connection.cursor()
|
||||||
|
|
||||||
debug.info("insert elements: ")
|
debug.info("insert elements: ")
|
||||||
iii = 0;
|
iii = 0;
|
||||||
for elem in my_old_bdd:
|
for elem in my_old_bdd:
|
||||||
iii+=1;
|
iii+=1;
|
||||||
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
|
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
|
||||||
id = elem["id"]
|
id = elem["id"]
|
||||||
@ -55,19 +58,23 @@ for elem in my_old_bdd:
|
|||||||
covers = elem["covers"]
|
covers = elem["covers"]
|
||||||
if covers == None:
|
if covers == None:
|
||||||
covers = [];
|
covers = [];
|
||||||
request_insert = (id, name, description)
|
request_insert = (name, description)
|
||||||
c.execute('INSERT INTO univers (id, name, description) VALUES (%s,%s,%s)', request_insert)
|
c.execute('INSERT INTO univers (name, description) VALUES (%s,%s) RETURNING id', request_insert)
|
||||||
|
id_of_new_row = c.fetchone()[0]
|
||||||
|
debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
|
||||||
|
out[str(id)] = id_of_new_row
|
||||||
connection.commit()
|
connection.commit()
|
||||||
for elem_cover in covers:
|
for elem_cover in covers:
|
||||||
request_insert = (id, elem_cover)
|
request_insert = (id_of_new_row, data_mapping[str(elem_cover)])
|
||||||
print(" insert cover " + str(request_insert))
|
print(" insert cover " + str(request_insert))
|
||||||
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s)', request_insert)
|
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s) RETURNING id', request_insert)
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
# Save (commit) the changes
|
# Save (commit) the changes
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
# We can also close the connection if we are done with it.
|
# We can also close the connection if we are done with it.
|
||||||
# Just be sure any changes have been committed or they will be lost.
|
# Just be sure any changes have been committed or they will be lost.
|
||||||
connection.close()
|
connection.close()
|
||||||
|
return out
|
||||||
|
|
||||||
|
@ -18,8 +18,14 @@ from dateutil import parser
|
|||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
import db
|
import db
|
||||||
connection = db.connect_bdd();
|
|
||||||
|
|
||||||
|
def force_number(s):
|
||||||
|
if s == None:
|
||||||
|
return None;
|
||||||
|
try:
|
||||||
|
return int(s)
|
||||||
|
except ValueError:
|
||||||
|
return None
|
||||||
|
|
||||||
def file_read_data(path):
|
def file_read_data(path):
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
@ -29,19 +35,23 @@ def file_read_data(path):
|
|||||||
file.close()
|
file.close()
|
||||||
return data_file
|
return data_file
|
||||||
|
|
||||||
debug.info("Load old BDD: ")
|
def transfert_db(data_mapping, type_mapping, group_mapping, saison_mapping):
|
||||||
|
out = {}
|
||||||
|
out[str(None)] = None
|
||||||
|
connection = db.connect_bdd();
|
||||||
|
debug.info("Load old BDD: ")
|
||||||
|
|
||||||
data = file_read_data('bdd_video.json')
|
data = file_read_data('bdd_video.json')
|
||||||
my_old_bdd = json.loads(data)
|
my_old_bdd = json.loads(data)
|
||||||
|
|
||||||
debug.info("create the table:")
|
debug.info("create the table:")
|
||||||
|
|
||||||
c = connection.cursor()
|
c = connection.cursor()
|
||||||
|
|
||||||
|
|
||||||
debug.info("insert elements: ")
|
debug.info("insert elements: ")
|
||||||
iii = 0;
|
iii = 0;
|
||||||
for elem in my_old_bdd:
|
for elem in my_old_bdd:
|
||||||
iii+=1;
|
iii+=1;
|
||||||
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
|
debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
|
||||||
id = elem["id"]
|
id = elem["id"]
|
||||||
@ -81,6 +91,9 @@ for elem in my_old_bdd:
|
|||||||
date = None
|
date = None
|
||||||
else:
|
else:
|
||||||
date = elem["date"]
|
date = elem["date"]
|
||||||
|
date = force_number(date)
|
||||||
|
if date != None and date < 1850:
|
||||||
|
date = None
|
||||||
if "episode" not in elem.keys():
|
if "episode" not in elem.keys():
|
||||||
episode = None
|
episode = None
|
||||||
else:
|
else:
|
||||||
@ -89,32 +102,35 @@ for elem in my_old_bdd:
|
|||||||
time = None
|
time = None
|
||||||
else:
|
else:
|
||||||
time = elem["time"]
|
time = elem["time"]
|
||||||
request_insert = (id, time_create, name, description, data_id, type_id, univers_id, group_id, saison_id, date, episode, time)
|
request_insert = (time_create, name, description, data_mapping[str(data_id)], type_mapping[str(type_id)], group_mapping[str(group_id)], saison_mapping[str(saison_id)], force_number(date), force_number(episode), time)
|
||||||
c.execute('INSERT INTO video (id, create_date, name, description, data_id, type_id, univers_id, group_id, saison_id, date, episode, time) VALUES (%s,false,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)', request_insert)
|
c.execute('INSERT INTO video (create_date, name, description, data_id, type_id, group_id, saison_id, date, episode, time) VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) RETURNING id', request_insert)
|
||||||
|
|
||||||
|
id_of_new_row = c.fetchone()[0]
|
||||||
|
debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
|
||||||
|
out[str(id)] = id_of_new_row
|
||||||
connection.commit()
|
connection.commit()
|
||||||
for elem_cover in covers:
|
for elem_cover in covers:
|
||||||
request_insert = (id, elem_cover)
|
request_insert = (id_of_new_row, data_mapping[str(elem_cover)])
|
||||||
print(" insert cover " + str(request_insert))
|
print(" insert cover " + str(request_insert))
|
||||||
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s)', request_insert)
|
c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s) RETURNING id', request_insert)
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
# Save (commit) the changes
|
# Save (commit) the changes
|
||||||
connection.commit()
|
connection.commit()
|
||||||
|
|
||||||
# def dict_factory(cursor, row):
|
# def dict_factory(cursor, row):
|
||||||
# d = {}
|
# d = {}
|
||||||
# for idx, col in enumerate(cursor.description):
|
# for idx, col in enumerate(cursor.description):
|
||||||
# d[col[0]] = row[idx]
|
# d[col[0]] = row[idx]
|
||||||
# return d
|
# return d
|
||||||
|
|
||||||
# conn.row_factory = dict_factory
|
# conn.row_factory = dict_factory
|
||||||
# c = conn.cursor()
|
# c = conn.cursor()
|
||||||
# c.execute('SELECT * FROM video WHERE deleted=false')
|
# c.execute('SELECT * FROM video WHERE deleted=false')
|
||||||
# results = c.fetchall()
|
# results = c.fetchall()
|
||||||
# print(results)
|
# print(results)
|
||||||
|
|
||||||
# We can also close the connection if we are done with it.
|
# We can also close the connection if we are done with it.
|
||||||
# Just be sure any changes have been committed or they will be lost.
|
# Just be sure any changes have been committed or they will be lost.
|
||||||
connection.close()
|
connection.close()
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user