From 935dac6404ab54cebd75cb8e08f3c8245cdabae2 Mon Sep 17 00:00:00 2001 From: Mehdi Katranji Date: Sat, 15 Feb 2020 09:08:07 +0100 Subject: [PATCH 1/3] feat: postgres docker --- postgres/docker-compose.yaml | 14 ++++++++++++++ 1 file changed, 14 insertions(+) create mode 100644 postgres/docker-compose.yaml diff --git a/postgres/docker-compose.yaml b/postgres/docker-compose.yaml new file mode 100644 index 0000000..1222458 --- /dev/null +++ b/postgres/docker-compose.yaml @@ -0,0 +1,14 @@ +version: "3.7" + +services: + karideo_postgres: + restart: always + image: postgres:alpine + environment: + PGDATA: /var/lib/postgresql/data + POSTGRES_DB: karideo + ports: + - "43522:5432" + volumes: + - ./data:/var/lib/postgresql/data + From d7d916bddd79c24347f5d7a34b9dcd142e2b6558 Mon Sep 17 00:00:00 2001 From: Mehdi Katranji Date: Sat, 15 Feb 2020 09:08:40 +0100 Subject: [PATCH 2/3] feat: transfer_bdd to postgres --- back/docker-compose.yaml | 10 ++++-- back/transfer_bdd/v0.0...v1.0/db.py | 7 ++++ .../v0.0...v1.0/transfert_data.py | 12 +++---- .../v0.0...v1.0/transfert_group.py | 10 +++--- .../v0.0...v1.0/transfert_saison.py | 10 +++--- .../v0.0...v1.0/transfert_type.py | 10 +++--- ...nsfert_usivers.py => transfert_univers.py} | 10 +++--- .../v0.0...v1.0/transfert_video.py | 32 +++++++++---------- 8 files changed, 51 insertions(+), 50 deletions(-) create mode 100644 back/transfer_bdd/v0.0...v1.0/db.py rename back/transfer_bdd/v0.0...v1.0/{transfert_usivers.py => transfert_univers.py} (90%) diff --git a/back/docker-compose.yaml b/back/docker-compose.yaml index bf4ce73..0ee813b 100755 --- a/back/docker-compose.yaml +++ b/back/docker-compose.yaml @@ -1,12 +1,18 @@ -version: '3' +version: "3.7" + services: REST_video_service: build: src restart: always + environment: + SANIC_DB_ADDRESS: postgres + SANIC_DB_NAME: karideo + SANIC_DB_USER: postgres + SANIC_DB_PASSWORD: postgres image: yui.heero/video_rest_api container_name: video_rest_api ports: - - 15080:80 + - "15080:80" volumes: - ./data/data_karideo:/application/data diff --git a/back/transfer_bdd/v0.0...v1.0/db.py b/back/transfer_bdd/v0.0...v1.0/db.py new file mode 100644 index 0000000..fd54156 --- /dev/null +++ b/back/transfer_bdd/v0.0...v1.0/db.py @@ -0,0 +1,7 @@ +from realog import debug + +import psycopg2 + +debug.info("connect BDD: ") + +conn = psycopg2.connect(dbname="karideo", user="postgres", password="postgres", host="localhost", port="43522") diff --git a/back/transfer_bdd/v0.0...v1.0/transfert_data.py b/back/transfer_bdd/v0.0...v1.0/transfert_data.py index 5af0d1d..01350ee 100755 --- a/back/transfer_bdd/v0.0...v1.0/transfert_data.py +++ b/back/transfer_bdd/v0.0...v1.0/transfert_data.py @@ -16,6 +16,8 @@ import random import copy from dateutil import parser +from db import conn + def file_read_data(path): if not os.path.isfile(path): return "" @@ -29,10 +31,6 @@ debug.info("Load old BDD: ") data = file_read_data('bdd_data.json') my_old_bdd = json.loads(data) -debug.info("open new BDD: ") -import sqlite3 -conn = sqlite3.connect('bdd_data.db3') - debug.info("create the table:") c = conn.cursor() @@ -44,8 +42,8 @@ CREATE TABLE data( deleted INTEGER, sha512 TEXT NOT NULL, mime_type TEXT NOT NULL, - size INTEGER NOT NULL, - create_date INTEGER NOT NULL, + size BIGINT NOT NULL, + create_date BIGINT NOT NULL, original_name TEXT) ''') @@ -64,7 +62,7 @@ for elem in my_old_bdd: sha512 = elem["sha512"] size = elem["size"] request_insert = (id, sha512, mime_type, size, new_time, original_name) - c.execute('INSERT INTO data VALUES (?,0,?,?,?,?,?)', request_insert) + c.execute('INSERT INTO data VALUES (%s,0,%s,%s,%s,%s,%s)', request_insert) # Save (commit) the changes conn.commit() diff --git a/back/transfer_bdd/v0.0...v1.0/transfert_group.py b/back/transfer_bdd/v0.0...v1.0/transfert_group.py index 6bb9749..61dbede 100755 --- a/back/transfer_bdd/v0.0...v1.0/transfert_group.py +++ b/back/transfer_bdd/v0.0...v1.0/transfert_group.py @@ -17,6 +17,8 @@ import copy from dateutil import parser import datetime +from db import conn + def file_read_data(path): if not os.path.isfile(path): return "" @@ -30,17 +32,13 @@ debug.info("Load old BDD: ") data = file_read_data('bdd_group.json') my_old_bdd = json.loads(data) -debug.info("open new BDD: ") -import sqlite3 -conn = sqlite3.connect('bdd_group.db3') - debug.info("create the table:") c = conn.cursor() # Create table c.execute(''' -CREATE TABLE data ( +CREATE TABLE karideo_group ( id INTEGER PRIMARY KEY, deleted INTEGER, create_date INTEGER NOT NULL, @@ -79,7 +77,7 @@ for elem in my_old_bdd: if covers == None: covers = []; request_insert = (id, new_time, new_time, name, description, list_to_string(covers)) - c.execute('INSERT INTO data VALUES (?,0,?,?,?,?,?)', request_insert) + c.execute('INSERT INTO karideo_group VALUES (%s,0,%s,%s,%s,%s,%s)', request_insert) # Save (commit) the changes conn.commit() diff --git a/back/transfer_bdd/v0.0...v1.0/transfert_saison.py b/back/transfer_bdd/v0.0...v1.0/transfert_saison.py index b6ed6f1..0cbfe39 100755 --- a/back/transfer_bdd/v0.0...v1.0/transfert_saison.py +++ b/back/transfer_bdd/v0.0...v1.0/transfert_saison.py @@ -17,6 +17,8 @@ import copy from dateutil import parser import datetime +from db import conn + def file_read_data(path): if not os.path.isfile(path): return "" @@ -30,17 +32,13 @@ debug.info("Load old BDD: ") data = file_read_data('bdd_saison.json') my_old_bdd = json.loads(data) -debug.info("open new BDD: ") -import sqlite3 -conn = sqlite3.connect('bdd_saison.db3') - debug.info("create the table:") c = conn.cursor() # Create table c.execute(''' -CREATE TABLE data ( +CREATE TABLE saison ( id INTEGER PRIMARY KEY, deleted INTEGER, create_date INTEGER NOT NULL, @@ -84,7 +82,7 @@ for elem in my_old_bdd: if covers == None: covers = []; request_insert = (id, new_time, new_time, name, description, group_id, list_to_string(covers)) - c.execute('INSERT INTO data VALUES (?,0,?,?,?,?,?,?)', request_insert) + c.execute('INSERT INTO saison VALUES (%s,0,%s,%s,%s,%s,%s,%s)', request_insert) # Save (commit) the changes conn.commit() diff --git a/back/transfer_bdd/v0.0...v1.0/transfert_type.py b/back/transfer_bdd/v0.0...v1.0/transfert_type.py index 374ec3f..d3057ae 100755 --- a/back/transfer_bdd/v0.0...v1.0/transfert_type.py +++ b/back/transfer_bdd/v0.0...v1.0/transfert_type.py @@ -17,6 +17,8 @@ import copy from dateutil import parser import datetime +from db import conn + def file_read_data(path): if not os.path.isfile(path): return "" @@ -30,17 +32,13 @@ debug.info("Load old BDD: ") data = file_read_data('bdd_type.json') my_old_bdd = json.loads(data) -debug.info("open new BDD: ") -import sqlite3 -conn = sqlite3.connect('bdd_type.db3') - debug.info("create the table:") c = conn.cursor() # Create table c.execute(''' -CREATE TABLE data ( +CREATE TABLE type ( id INTEGER PRIMARY KEY, deleted INTEGER, create_date INTEGER NOT NULL, @@ -79,7 +77,7 @@ for elem in my_old_bdd: if covers == None: covers = []; request_insert = (id, new_time, new_time, name, description, list_to_string(covers)) - c.execute('INSERT INTO data VALUES (?,0,?,?,?,?,?)', request_insert) + c.execute('INSERT INTO type VALUES (%s,0,%s,%s,%s,%s,%s)', request_insert) # Save (commit) the changes conn.commit() diff --git a/back/transfer_bdd/v0.0...v1.0/transfert_usivers.py b/back/transfer_bdd/v0.0...v1.0/transfert_univers.py similarity index 90% rename from back/transfer_bdd/v0.0...v1.0/transfert_usivers.py rename to back/transfer_bdd/v0.0...v1.0/transfert_univers.py index c4afbe5..ff675ff 100755 --- a/back/transfer_bdd/v0.0...v1.0/transfert_usivers.py +++ b/back/transfer_bdd/v0.0...v1.0/transfert_univers.py @@ -17,6 +17,8 @@ import copy from dateutil import parser import datetime +from db import conn + def file_read_data(path): if not os.path.isfile(path): return "" @@ -30,17 +32,13 @@ debug.info("Load old BDD: ") data = file_read_data('bdd_univers.json') my_old_bdd = json.loads(data) -debug.info("open new BDD: ") -import sqlite3 -conn = sqlite3.connect('bdd_univers.db3') - debug.info("create the table:") c = conn.cursor() # Create table c.execute(''' -CREATE TABLE data ( +CREATE TABLE univers ( id INTEGER PRIMARY KEY, deleted INTEGER, create_date INTEGER NOT NULL, @@ -79,7 +77,7 @@ for elem in my_old_bdd: if covers == None: covers = []; request_insert = (id, new_time, new_time, name, description, list_to_string(covers)) - c.execute('INSERT INTO data VALUES (?,0,?,?,?,?,?)', request_insert) + c.execute('INSERT INTO univers VALUES (%s,0,%s,%s,%s,%s,%s)', request_insert) # Save (commit) the changes conn.commit() diff --git a/back/transfer_bdd/v0.0...v1.0/transfert_video.py b/back/transfer_bdd/v0.0...v1.0/transfert_video.py index f98f5fb..b226d77 100755 --- a/back/transfer_bdd/v0.0...v1.0/transfert_video.py +++ b/back/transfer_bdd/v0.0...v1.0/transfert_video.py @@ -17,6 +17,8 @@ import copy from dateutil import parser import datetime +from db import conn + def file_read_data(path): if not os.path.isfile(path): return "" @@ -30,17 +32,13 @@ debug.info("Load old BDD: ") data = file_read_data('bdd_video.json') my_old_bdd = json.loads(data) -debug.info("open new BDD: ") -import sqlite3 -conn = sqlite3.connect('bdd_video.db3') - debug.info("create the table:") c = conn.cursor() # Create table c.execute(''' -CREATE TABLE data ( +CREATE TABLE video ( id INTEGER PRIMARY KEY, deleted INTEGER, create_date INTEGER NOT NULL, @@ -53,7 +51,7 @@ CREATE TABLE data ( univers_id INTEGER, group_id INTEGER, saison_id INTEGER, - date INTEGER, + date VARCHAR, episode INTEGER, time INTEGER) ''') @@ -123,22 +121,22 @@ for elem in my_old_bdd: else: time = elem["time"] request_insert = (id, new_time, modify_time, name, description, list_to_string(covers), data_id, type_id, univers_id, group_id, saison_id, date, episode, time) - c.execute('INSERT INTO data VALUES (?,0,?,?,?,?,?,?,?,?,?,?,?,?,?)', request_insert) + c.execute('INSERT INTO video VALUES (%s,0,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)', request_insert) # Save (commit) the changes conn.commit() -def dict_factory(cursor, row): - d = {} - for idx, col in enumerate(cursor.description): - d[col[0]] = row[idx] - return d +# def dict_factory(cursor, row): +# d = {} +# for idx, col in enumerate(cursor.description): +# d[col[0]] = row[idx] +# return d -conn.row_factory = dict_factory -c = conn.cursor() -c.execute('SELECT * FROM data WHERE deleted=false') -results = c.fetchall() -print(results) +# conn.row_factory = dict_factory +# c = conn.cursor() +# c.execute('SELECT * FROM video WHERE deleted=false') +# results = c.fetchall() +# print(results) # We can also close the connection if we are done with it. # Just be sure any changes have been committed or they will be lost. From ad7e26d77ec5730f93a5eb514732eb9d1d859066 Mon Sep 17 00:00:00 2001 From: Mehdi Katranji Date: Sat, 15 Feb 2020 09:13:56 +0100 Subject: [PATCH 3/3] feat: add postgres/data in gitignore --- .gitignore | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.gitignore b/.gitignore index edb9cea..ce865d4 100644 --- a/.gitignore +++ b/.gitignore @@ -6,6 +6,9 @@ /tmp /out-tsc +# postgres +/postgres/data + dataPush node_modules