Merge pull request #9 from meakio/master
Issue #1 - Postgres juste le nom d'une base a changer (group), peut-ètre le dossier a changer pour la BDD, et retirer les variables d'environement...
This commit is contained in:
commit
e2b5f53f83
3
.gitignore
vendored
3
.gitignore
vendored
@ -6,6 +6,9 @@
|
|||||||
/tmp
|
/tmp
|
||||||
/out-tsc
|
/out-tsc
|
||||||
|
|
||||||
|
# postgres
|
||||||
|
/postgres/data
|
||||||
|
|
||||||
dataPush
|
dataPush
|
||||||
node_modules
|
node_modules
|
||||||
|
|
||||||
|
@ -1,12 +1,18 @@
|
|||||||
version: '3'
|
version: "3.7"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
REST_video_service:
|
REST_video_service:
|
||||||
build: src
|
build: src
|
||||||
restart: always
|
restart: always
|
||||||
|
environment:
|
||||||
|
SANIC_DB_ADDRESS: postgres
|
||||||
|
SANIC_DB_NAME: karideo
|
||||||
|
SANIC_DB_USER: postgres
|
||||||
|
SANIC_DB_PASSWORD: postgres
|
||||||
image: yui.heero/video_rest_api
|
image: yui.heero/video_rest_api
|
||||||
container_name: video_rest_api
|
container_name: video_rest_api
|
||||||
ports:
|
ports:
|
||||||
- 15080:80
|
- "15080:80"
|
||||||
volumes:
|
volumes:
|
||||||
- ./data/data_karideo:/application/data
|
- ./data/data_karideo:/application/data
|
||||||
|
|
||||||
|
7
back/transfer_bdd/v0.0...v1.0/db.py
Normal file
7
back/transfer_bdd/v0.0...v1.0/db.py
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
from realog import debug
|
||||||
|
|
||||||
|
import psycopg2
|
||||||
|
|
||||||
|
debug.info("connect BDD: ")
|
||||||
|
|
||||||
|
conn = psycopg2.connect(dbname="karideo", user="postgres", password="postgres", host="localhost", port="43522")
|
@ -16,6 +16,8 @@ import random
|
|||||||
import copy
|
import copy
|
||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
|
|
||||||
|
from db import conn
|
||||||
|
|
||||||
def file_read_data(path):
|
def file_read_data(path):
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
return ""
|
return ""
|
||||||
@ -29,10 +31,6 @@ debug.info("Load old BDD: ")
|
|||||||
data = file_read_data('bdd_data.json')
|
data = file_read_data('bdd_data.json')
|
||||||
my_old_bdd = json.loads(data)
|
my_old_bdd = json.loads(data)
|
||||||
|
|
||||||
debug.info("open new BDD: ")
|
|
||||||
import sqlite3
|
|
||||||
conn = sqlite3.connect('bdd_data.db3')
|
|
||||||
|
|
||||||
debug.info("create the table:")
|
debug.info("create the table:")
|
||||||
|
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
@ -44,8 +42,8 @@ CREATE TABLE data(
|
|||||||
deleted INTEGER,
|
deleted INTEGER,
|
||||||
sha512 TEXT NOT NULL,
|
sha512 TEXT NOT NULL,
|
||||||
mime_type TEXT NOT NULL,
|
mime_type TEXT NOT NULL,
|
||||||
size INTEGER NOT NULL,
|
size BIGINT NOT NULL,
|
||||||
create_date INTEGER NOT NULL,
|
create_date BIGINT NOT NULL,
|
||||||
original_name TEXT)
|
original_name TEXT)
|
||||||
''')
|
''')
|
||||||
|
|
||||||
@ -64,7 +62,7 @@ for elem in my_old_bdd:
|
|||||||
sha512 = elem["sha512"]
|
sha512 = elem["sha512"]
|
||||||
size = elem["size"]
|
size = elem["size"]
|
||||||
request_insert = (id, sha512, mime_type, size, new_time, original_name)
|
request_insert = (id, sha512, mime_type, size, new_time, original_name)
|
||||||
c.execute('INSERT INTO data VALUES (?,0,?,?,?,?,?)', request_insert)
|
c.execute('INSERT INTO data VALUES (%s,0,%s,%s,%s,%s,%s)', request_insert)
|
||||||
|
|
||||||
# Save (commit) the changes
|
# Save (commit) the changes
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
@ -17,6 +17,8 @@ import copy
|
|||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
from db import conn
|
||||||
|
|
||||||
def file_read_data(path):
|
def file_read_data(path):
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
return ""
|
return ""
|
||||||
@ -30,17 +32,13 @@ debug.info("Load old BDD: ")
|
|||||||
data = file_read_data('bdd_group.json')
|
data = file_read_data('bdd_group.json')
|
||||||
my_old_bdd = json.loads(data)
|
my_old_bdd = json.loads(data)
|
||||||
|
|
||||||
debug.info("open new BDD: ")
|
|
||||||
import sqlite3
|
|
||||||
conn = sqlite3.connect('bdd_group.db3')
|
|
||||||
|
|
||||||
debug.info("create the table:")
|
debug.info("create the table:")
|
||||||
|
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
|
|
||||||
# Create table
|
# Create table
|
||||||
c.execute('''
|
c.execute('''
|
||||||
CREATE TABLE data (
|
CREATE TABLE karideo_group (
|
||||||
id INTEGER PRIMARY KEY,
|
id INTEGER PRIMARY KEY,
|
||||||
deleted INTEGER,
|
deleted INTEGER,
|
||||||
create_date INTEGER NOT NULL,
|
create_date INTEGER NOT NULL,
|
||||||
@ -79,7 +77,7 @@ for elem in my_old_bdd:
|
|||||||
if covers == None:
|
if covers == None:
|
||||||
covers = [];
|
covers = [];
|
||||||
request_insert = (id, new_time, new_time, name, description, list_to_string(covers))
|
request_insert = (id, new_time, new_time, name, description, list_to_string(covers))
|
||||||
c.execute('INSERT INTO data VALUES (?,0,?,?,?,?,?)', request_insert)
|
c.execute('INSERT INTO karideo_group VALUES (%s,0,%s,%s,%s,%s,%s)', request_insert)
|
||||||
|
|
||||||
# Save (commit) the changes
|
# Save (commit) the changes
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
@ -17,6 +17,8 @@ import copy
|
|||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
from db import conn
|
||||||
|
|
||||||
def file_read_data(path):
|
def file_read_data(path):
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
return ""
|
return ""
|
||||||
@ -30,17 +32,13 @@ debug.info("Load old BDD: ")
|
|||||||
data = file_read_data('bdd_saison.json')
|
data = file_read_data('bdd_saison.json')
|
||||||
my_old_bdd = json.loads(data)
|
my_old_bdd = json.loads(data)
|
||||||
|
|
||||||
debug.info("open new BDD: ")
|
|
||||||
import sqlite3
|
|
||||||
conn = sqlite3.connect('bdd_saison.db3')
|
|
||||||
|
|
||||||
debug.info("create the table:")
|
debug.info("create the table:")
|
||||||
|
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
|
|
||||||
# Create table
|
# Create table
|
||||||
c.execute('''
|
c.execute('''
|
||||||
CREATE TABLE data (
|
CREATE TABLE saison (
|
||||||
id INTEGER PRIMARY KEY,
|
id INTEGER PRIMARY KEY,
|
||||||
deleted INTEGER,
|
deleted INTEGER,
|
||||||
create_date INTEGER NOT NULL,
|
create_date INTEGER NOT NULL,
|
||||||
@ -84,7 +82,7 @@ for elem in my_old_bdd:
|
|||||||
if covers == None:
|
if covers == None:
|
||||||
covers = [];
|
covers = [];
|
||||||
request_insert = (id, new_time, new_time, name, description, group_id, list_to_string(covers))
|
request_insert = (id, new_time, new_time, name, description, group_id, list_to_string(covers))
|
||||||
c.execute('INSERT INTO data VALUES (?,0,?,?,?,?,?,?)', request_insert)
|
c.execute('INSERT INTO saison VALUES (%s,0,%s,%s,%s,%s,%s,%s)', request_insert)
|
||||||
|
|
||||||
# Save (commit) the changes
|
# Save (commit) the changes
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
@ -17,6 +17,8 @@ import copy
|
|||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
from db import conn
|
||||||
|
|
||||||
def file_read_data(path):
|
def file_read_data(path):
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
return ""
|
return ""
|
||||||
@ -30,17 +32,13 @@ debug.info("Load old BDD: ")
|
|||||||
data = file_read_data('bdd_type.json')
|
data = file_read_data('bdd_type.json')
|
||||||
my_old_bdd = json.loads(data)
|
my_old_bdd = json.loads(data)
|
||||||
|
|
||||||
debug.info("open new BDD: ")
|
|
||||||
import sqlite3
|
|
||||||
conn = sqlite3.connect('bdd_type.db3')
|
|
||||||
|
|
||||||
debug.info("create the table:")
|
debug.info("create the table:")
|
||||||
|
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
|
|
||||||
# Create table
|
# Create table
|
||||||
c.execute('''
|
c.execute('''
|
||||||
CREATE TABLE data (
|
CREATE TABLE type (
|
||||||
id INTEGER PRIMARY KEY,
|
id INTEGER PRIMARY KEY,
|
||||||
deleted INTEGER,
|
deleted INTEGER,
|
||||||
create_date INTEGER NOT NULL,
|
create_date INTEGER NOT NULL,
|
||||||
@ -79,7 +77,7 @@ for elem in my_old_bdd:
|
|||||||
if covers == None:
|
if covers == None:
|
||||||
covers = [];
|
covers = [];
|
||||||
request_insert = (id, new_time, new_time, name, description, list_to_string(covers))
|
request_insert = (id, new_time, new_time, name, description, list_to_string(covers))
|
||||||
c.execute('INSERT INTO data VALUES (?,0,?,?,?,?,?)', request_insert)
|
c.execute('INSERT INTO type VALUES (%s,0,%s,%s,%s,%s,%s)', request_insert)
|
||||||
|
|
||||||
# Save (commit) the changes
|
# Save (commit) the changes
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
@ -17,6 +17,8 @@ import copy
|
|||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
from db import conn
|
||||||
|
|
||||||
def file_read_data(path):
|
def file_read_data(path):
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
return ""
|
return ""
|
||||||
@ -30,17 +32,13 @@ debug.info("Load old BDD: ")
|
|||||||
data = file_read_data('bdd_univers.json')
|
data = file_read_data('bdd_univers.json')
|
||||||
my_old_bdd = json.loads(data)
|
my_old_bdd = json.loads(data)
|
||||||
|
|
||||||
debug.info("open new BDD: ")
|
|
||||||
import sqlite3
|
|
||||||
conn = sqlite3.connect('bdd_univers.db3')
|
|
||||||
|
|
||||||
debug.info("create the table:")
|
debug.info("create the table:")
|
||||||
|
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
|
|
||||||
# Create table
|
# Create table
|
||||||
c.execute('''
|
c.execute('''
|
||||||
CREATE TABLE data (
|
CREATE TABLE univers (
|
||||||
id INTEGER PRIMARY KEY,
|
id INTEGER PRIMARY KEY,
|
||||||
deleted INTEGER,
|
deleted INTEGER,
|
||||||
create_date INTEGER NOT NULL,
|
create_date INTEGER NOT NULL,
|
||||||
@ -79,7 +77,7 @@ for elem in my_old_bdd:
|
|||||||
if covers == None:
|
if covers == None:
|
||||||
covers = [];
|
covers = [];
|
||||||
request_insert = (id, new_time, new_time, name, description, list_to_string(covers))
|
request_insert = (id, new_time, new_time, name, description, list_to_string(covers))
|
||||||
c.execute('INSERT INTO data VALUES (?,0,?,?,?,?,?)', request_insert)
|
c.execute('INSERT INTO univers VALUES (%s,0,%s,%s,%s,%s,%s)', request_insert)
|
||||||
|
|
||||||
# Save (commit) the changes
|
# Save (commit) the changes
|
||||||
conn.commit()
|
conn.commit()
|
@ -17,6 +17,8 @@ import copy
|
|||||||
from dateutil import parser
|
from dateutil import parser
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
|
from db import conn
|
||||||
|
|
||||||
def file_read_data(path):
|
def file_read_data(path):
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
return ""
|
return ""
|
||||||
@ -30,17 +32,13 @@ debug.info("Load old BDD: ")
|
|||||||
data = file_read_data('bdd_video.json')
|
data = file_read_data('bdd_video.json')
|
||||||
my_old_bdd = json.loads(data)
|
my_old_bdd = json.loads(data)
|
||||||
|
|
||||||
debug.info("open new BDD: ")
|
|
||||||
import sqlite3
|
|
||||||
conn = sqlite3.connect('bdd_video.db3')
|
|
||||||
|
|
||||||
debug.info("create the table:")
|
debug.info("create the table:")
|
||||||
|
|
||||||
c = conn.cursor()
|
c = conn.cursor()
|
||||||
|
|
||||||
# Create table
|
# Create table
|
||||||
c.execute('''
|
c.execute('''
|
||||||
CREATE TABLE data (
|
CREATE TABLE video (
|
||||||
id INTEGER PRIMARY KEY,
|
id INTEGER PRIMARY KEY,
|
||||||
deleted INTEGER,
|
deleted INTEGER,
|
||||||
create_date INTEGER NOT NULL,
|
create_date INTEGER NOT NULL,
|
||||||
@ -53,7 +51,7 @@ CREATE TABLE data (
|
|||||||
univers_id INTEGER,
|
univers_id INTEGER,
|
||||||
group_id INTEGER,
|
group_id INTEGER,
|
||||||
saison_id INTEGER,
|
saison_id INTEGER,
|
||||||
date INTEGER,
|
date VARCHAR,
|
||||||
episode INTEGER,
|
episode INTEGER,
|
||||||
time INTEGER)
|
time INTEGER)
|
||||||
''')
|
''')
|
||||||
@ -123,22 +121,22 @@ for elem in my_old_bdd:
|
|||||||
else:
|
else:
|
||||||
time = elem["time"]
|
time = elem["time"]
|
||||||
request_insert = (id, new_time, modify_time, name, description, list_to_string(covers), data_id, type_id, univers_id, group_id, saison_id, date, episode, time)
|
request_insert = (id, new_time, modify_time, name, description, list_to_string(covers), data_id, type_id, univers_id, group_id, saison_id, date, episode, time)
|
||||||
c.execute('INSERT INTO data VALUES (?,0,?,?,?,?,?,?,?,?,?,?,?,?,?)', request_insert)
|
c.execute('INSERT INTO video VALUES (%s,0,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)', request_insert)
|
||||||
|
|
||||||
# Save (commit) the changes
|
# Save (commit) the changes
|
||||||
conn.commit()
|
conn.commit()
|
||||||
|
|
||||||
def dict_factory(cursor, row):
|
# def dict_factory(cursor, row):
|
||||||
d = {}
|
# d = {}
|
||||||
for idx, col in enumerate(cursor.description):
|
# for idx, col in enumerate(cursor.description):
|
||||||
d[col[0]] = row[idx]
|
# d[col[0]] = row[idx]
|
||||||
return d
|
# return d
|
||||||
|
|
||||||
conn.row_factory = dict_factory
|
# conn.row_factory = dict_factory
|
||||||
c = conn.cursor()
|
# c = conn.cursor()
|
||||||
c.execute('SELECT * FROM data WHERE deleted=false')
|
# c.execute('SELECT * FROM video WHERE deleted=false')
|
||||||
results = c.fetchall()
|
# results = c.fetchall()
|
||||||
print(results)
|
# print(results)
|
||||||
|
|
||||||
# We can also close the connection if we are done with it.
|
# We can also close the connection if we are done with it.
|
||||||
# Just be sure any changes have been committed or they will be lost.
|
# Just be sure any changes have been committed or they will be lost.
|
||||||
|
14
postgres/docker-compose.yaml
Normal file
14
postgres/docker-compose.yaml
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
version: "3.7"
|
||||||
|
|
||||||
|
services:
|
||||||
|
karideo_postgres:
|
||||||
|
restart: always
|
||||||
|
image: postgres:alpine
|
||||||
|
environment:
|
||||||
|
PGDATA: /var/lib/postgresql/data
|
||||||
|
POSTGRES_DB: karideo
|
||||||
|
ports:
|
||||||
|
- "43522:5432"
|
||||||
|
volumes:
|
||||||
|
- ./data:/var/lib/postgresql/data
|
||||||
|
|
Loading…
Reference in New Issue
Block a user