diff --git a/back/.gitignore b/back/.gitignore
deleted file mode 100644
index 0445b23..0000000
--- a/back/.gitignore
+++ /dev/null
@@ -1,9 +0,0 @@
-#config.*
-config.env
-.env
-config
-data
-cache
-
-__pycache__
-*.pyc
diff --git a/back/CheckStyle.xml b/back/CheckStyle.xml
new file mode 100755
index 0000000..d68aedd
--- /dev/null
+++ b/back/CheckStyle.xml
@@ -0,0 +1,66 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/back/CleanUp.xml b/back/CleanUp.xml
new file mode 100644
index 0000000..9df98d2
--- /dev/null
+++ b/back/CleanUp.xml
@@ -0,0 +1,66 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/back/Dockerfile b/back/Dockerfile
new file mode 100644
index 0000000..68f1d8f
--- /dev/null
+++ b/back/Dockerfile
@@ -0,0 +1,22 @@
+FROM maven:3.6.3-openjdk-16 AS build
+
+COPY pom.xml /tmp/
+COPY src /tmp/src/
+WORKDIR /tmp/
+RUN mvn clean compile assembly:single
+
+
+
+FROM bellsoft/liberica-openjdk-alpine:latest
+ENV LANG=C.UTF-8
+
+
+RUN mkdir /application/
+COPY --from=build /tmp/out/maven/*.jar /application/application.jar
+ADD scenarium-oauth.jar /application/
+WORKDIR /application/
+
+EXPOSE 18080
+
+CMD ["java", "-cp", "/application/application.jar", "org.kar.oauth.WebLauncher"]
+
diff --git a/back/Formatter.xml b/back/Formatter.xml
new file mode 100644
index 0000000..b775e22
--- /dev/null
+++ b/back/Formatter.xml
@@ -0,0 +1,366 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/back/LICENSE b/back/LICENSE
new file mode 100644
index 0000000..a9d1e81
--- /dev/null
+++ b/back/LICENSE
@@ -0,0 +1,6 @@
+PROPIETARY licence
+==================
+
+Copyright at Edouard DUPIN
+
+you have no right
\ No newline at end of file
diff --git a/back/README.md b/back/README.md
new file mode 100644
index 0000000..10b2059
--- /dev/null
+++ b/back/README.md
@@ -0,0 +1,17 @@
+Generic backend for karideo in java
+===================================
+
+
+
+
+mvn install
+
+// create a single package jar
+mvn clean compile assembly:single
+
+
+
+java -cp out/maven/karideo-0.1.0-jar-with-dependencies.jar org.kar.karideo.WebLauncher
+
+
+
diff --git a/back/config_sample.env b/back/config_sample.env
deleted file mode 100644
index 7c7aabd..0000000
--- a/back/config_sample.env
+++ /dev/null
@@ -1,9 +0,0 @@
-# sample value with default config:
-
-#DB_HOSTNAME=localhost
-#DB_PORT=15032
-#DB_NAME=karideo
-#DB_USER=root
-#DB_PASSWORD=postgress_password
-
-
diff --git a/back/docker-compose.yaml b/back/docker-compose.yaml
deleted file mode 100755
index a7ec009..0000000
--- a/back/docker-compose.yaml
+++ /dev/null
@@ -1,19 +0,0 @@
-version: "3.7"
-
-services:
- REST_video_service:
- build: src
- restart: always
- image: yui.heero/video_rest_api
- container_name: video_rest_api
- ports:
- - "15080:80"
- volumes:
- - /workspace/data/karideo/media:/application/data
- - ./default_images:/default_images:ro
- env_file:
- - ./config.env
-
-
-
-
diff --git a/back/karideo-back.iml b/back/karideo-back.iml
new file mode 100644
index 0000000..c56ba78
--- /dev/null
+++ b/back/karideo-back.iml
@@ -0,0 +1,61 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/back/karideo.iml b/back/karideo.iml
new file mode 100644
index 0000000..c56ba78
--- /dev/null
+++ b/back/karideo.iml
@@ -0,0 +1,61 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/back/module-info.java b/back/module-info.java
new file mode 100644
index 0000000..e50dfdb
--- /dev/null
+++ b/back/module-info.java
@@ -0,0 +1,12 @@
+/** Basic module interface.
+ *
+ * @author Edouard DUPIN */
+
+open module io.scenarium.store {
+ exports io.scenarium.oauth;
+ requires java.util;
+ requires javax.ws.rs.api;
+ requires java.xml.bind;
+ requires jackson.annotations;
+ requires jersey.server;
+}
diff --git a/back/pom.xml b/back/pom.xml
new file mode 100644
index 0000000..b9eb909
--- /dev/null
+++ b/back/pom.xml
@@ -0,0 +1,247 @@
+
+ 4.0.0
+ kar
+ karideo
+ 0.1.0
+
+ 2.1
+ 2.32
+ 2.3.1
+ 3.0.7
+
+ 3.1
+ 14
+ 14
+
+ 3.1.1
+
+
+
+
+
+ org.glassfish.jersey
+ jersey-bom
+ ${jersey.version}
+ pom
+ import
+
+
+
+
+
+
+
+ org.glassfish.jersey.media
+ jersey-media-multipart
+ ${jersey.version}
+
+
+ org.glassfish.jersey.inject
+ jersey-hk2
+ ${jersey.version}
+
+
+ org.glassfish.jersey.containers
+ jersey-container-grizzly2-http
+ ${jersey.version}
+
+
+ javax.xml.bind
+ jaxb-api
+ ${jaxb.version}
+
+
+ javax.ws.rs
+ javax.ws.rs-api
+ 2.1.1
+
+
+ com.sun.xml.bind
+ jaxb-impl
+ ${jaxb.version}
+
+
+ com.sun.istack
+ istack-commons-runtime
+ ${istack.version}
+
+
+ org.glassfish.jersey.test-framework.providers
+ jersey-test-framework-provider-grizzly2
+ test
+
+
+ mysql
+ mysql-connector-java
+ 5.1.45
+
+
+ org.glassfish.jersey.media
+ jersey-media-json-jackson
+ ${jersey.version}
+
+
+ com.fasterxml.jackson.core
+ jackson-databind
+ 2.8.10
+
+
+ javax.servlet
+ javax.servlet-api
+ 3.0.1
+ compile
+
+
+ org.jetbrains
+ annotations
+ RELEASE
+ compile
+
+
+
+
+ src
+ test/src
+ ${project.basedir}/out/maven/
+
+
+ org.apache.maven.plugins
+ maven-compiler-plugin
+ ${maven.compiler.version}
+
+
+ ${maven.compiler.target}
+
+
+
+
+ org.codehaus.mojo
+ exec-maven-plugin
+ 1.4.0
+
+ io.scenarium.oauth.WebLauncher
+
+
+
+
+ org.apache.maven.plugins
+ maven-source-plugin
+
+
+ attach-sources
+
+ jar
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+ 3.0.0-M5
+
+
+ maven-assembly-plugin
+
+
+
+ fully.qualified.MainClass
+
+
+
+ jar-with-dependencies
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+ 3.2.0
+
+ private
+ true
+
+
+
+
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+ 3.2.0
+
+ public
+
+
+
+
+
+
\ No newline at end of file
diff --git a/back/readme.md b/back/readme.md
deleted file mode 100755
index 1188ade..0000000
--- a/back/readme.md
+++ /dev/null
@@ -1,63 +0,0 @@
-REST video API
-==============
-
-REST API for video streaming for personal web / application interface
-
-
-Download the project
-====================
-
-simply download the application:
-```
-mkdir WORKSPACE & cd $_
-git clone http://xxx/HeeroYui/rest_video.git restvideo
-cd rest_video
-```
-
-**Note:** It is important to remove ```-``` and ```_``` becose some docker remove these element in the network name _(like ubuntu ...)_
-**Note:** The networkname of a docker compose is ```thefoldername_default```
-
-
-Run the application
-===================
-
-Create configuration:
-```
-cp config_sample.env config.env
-# set your server IP in the hostname
-vim config.env
-```
-
-Start the application:
-```
-docker-compose up -d
-```
-
-Stop the application:
-```
-docker-compose down
-```
-
-Restart the application (on the fly):
-```
-docker-compose up -d --force-recreate --build
-```
-
-
-
-Run the application (debug)
-===========================
-before the first run:
-```
-cp -r data_base data
-```
-
-```
-./src/app_video.py
-```
-
-or
-```
-SANIC_REST_PORT=15080 ./src/app_video.py
-```
-
diff --git a/back/release_karideo_back/Dockerfile b/back/release_karideo_back/Dockerfile
new file mode 100644
index 0000000..bf60360
--- /dev/null
+++ b/back/release_karideo_back/Dockerfile
@@ -0,0 +1,17 @@
+FROM bellsoft/liberica-openjdk-alpine:latest
+
+ENV LANG=C.UTF-8
+#ENV JAVA_HOME=/usr/lib/jvm/java-14-openjdk
+#ENV JAVAFX_HOME=$JAVA_HOME
+#ENV PATH=/usr/lib/jvm/java-14-openjdk/bin/:$PATH
+#ENV JAVA_VERSION=14.0.2
+
+
+RUN mkdir /application/
+ADD karideo.jar /application/
+WORKDIR /application/
+
+EXPOSE 18080
+
+CMD ["java", "-cp", "/application/karideo.jar", "org.kar.karideo.WebLauncher"]
+
diff --git a/back/release_karideo_back/docker-compose.yaml b/back/release_karideo_back/docker-compose.yaml
new file mode 100644
index 0000000..da601c1
--- /dev/null
+++ b/back/release_karideo_back/docker-compose.yaml
@@ -0,0 +1,12 @@
+version: '3'
+services:
+ karideo_back_service_2:
+ build: .
+ restart: always
+ image: org.kar/karideo
+ container_name: org.kar.karideo
+ ports:
+ - 22080:18080
+ volumes:
+ - ./properties.txt:/application/properties.txt
+ - /workspace/data/karideo/media:/application/data
diff --git a/back/release_karideo_back/karideo.jar b/back/release_karideo_back/karideo.jar
new file mode 100644
index 0000000..03eb719
Binary files /dev/null and b/back/release_karideo_back/karideo.jar differ
diff --git a/back/release_karideo_back/propertyies.txt b/back/release_karideo_back/propertyies.txt
new file mode 100644
index 0000000..b9d43f2
--- /dev/null
+++ b/back/release_karideo_back/propertyies.txt
@@ -0,0 +1,9 @@
+org.kar.karideo.dataTmpFolder=/application/data/tmp
+org.kar.karideo.dataTmpFolder=/application/data/media
+org.kar.karideo.rest.oauth=http://192.168.1.156:21080/oauth/api/
+org.kar.karideo.db.host=1992.156.1.156
+org.kar.karideo.db.port=20306
+org.kar.karideo.db.login=root
+org.kar.karideo.db.port=klkhj456gkgtkhjgvkujfhjgkjhgsdfhb3467465fgdhdesfgh
+org.kar.karideo.db.name=karideo
+org.kar.karideo.address=http://0.0.0.0:18080/karideo/api/
diff --git a/back/src/Dockerfile b/back/src/Dockerfile
deleted file mode 100755
index f077b11..0000000
--- a/back/src/Dockerfile
+++ /dev/null
@@ -1,36 +0,0 @@
-FROM python:alpine3.6
-
-RUN apk update && \
- apk upgrade && \
- apk add --update-cache \
- --repository http://dl-cdn.alpinelinux.org/alpine/edge/community \
- --repository http://dl-cdn.alpinelinux.org/alpine/edge/main \
- --repository http://dl-cdn.alpinelinux.org/alpine/edge/testing \
- build-base mediainfo postgresql-dev gcc python3-dev musl-dev
-
-RUN pip3 install --upgrade pip
-
-RUN pip3 install sanic==19.9.0
-
-RUN pip3 install sanic-cors
-
-RUN pip3 install sanic-simple-swagger
-
-RUN pip3 install python-dateutil
-
-RUN pip3 install realog
-
-RUN pip3 install python-magic
-
-RUN pip3 install pymediainfo
-
-RUN pip3 install psycopg2
-
-EXPOSE 80
-
-ADD . /application/
-WORKDIR /application/
-CMD ["python3", "-u", "./app_video.py"]
-
-
-
diff --git a/back/src/api/data.py b/back/src/api/data.py
deleted file mode 100644
index cde5572..0000000
--- a/back/src/api/data.py
+++ /dev/null
@@ -1,189 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2019, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-
-import time
-import json
-import os
-import sys
-import datetime
-import time, threading
-import realog.debug as debug
-
-from aiofiles import os as async_os
-
-from pymediainfo import MediaInfo
-
-from sanic import Sanic
-from sanic import response
-from sanic import views
-from sanic import Blueprint
-from sanic.exceptions import ServerError
-from sanic.response import file_stream
-
-from sanic_simple_swagger import swagger_blueprint, openapi_blueprint
-from sanic_simple_swagger import doc
-
-import tools
-import data_interface
-import data_global_elements
-
-import hashlib
-import shutil
-import random
-
-tmp_value = 0
-
-#curl -F 'file=@Totally_Spies.mp4;type=application/octet-stream' -H 'transfer-encoding:chunked' 127.0.0.1:15080/data -X POST -O; echo ;
-
-
-def add(_app, _name_api):
- elem_blueprint = Blueprint(_name_api)
- """
- @elem_blueprint.get('/' + _name_api, strict_slashes=True)
- @doc.summary("Show saisons")
- @doc.description("Display a listing of the resource.")
- @doc.produces(content_type='application/json')
- async def list(request):
- return response.json(data_global_elements.get_interface(_name_api).gets())
- """
-
- dataModelBdd = [
- {
- "name": "id",
- "type": "int",
- "modifiable": False,
- "creatable": False,
- "can_be_null": False,
- "visible": True,
- },
- {
- "name": "size",
- "type": "int",
- "modifiable": False,
- "creatable": True,
- "can_be_null": False,
- "visible": True,
- },
- {
- "name": "sha512",
- "type": "str",
- "modifiable": False,
- "creatable": True,
- "can_be_null": False,
- "visible": True,
- },
- {
- "name": "mime_type",
- "type": "str",
- "modifiable": False,
- "creatable": True,
- "can_be_null": False,
- "visible": True,
- },
- {
- "name": "original_name",
- "type": "str",
- "modifiable": False,
- "creatable": True,
- "can_be_null": True,
- "visible": False,
- },
- ]
- data_global_elements.get_interface(_name_api).set_data_model(dataModelBdd)
-
-
- @elem_blueprint.get('/' + _name_api + '/exist/', strict_slashes=True)
- @doc.summary("check resource existance")
- @doc.description("simply check if the resource is already uploaded.")
- @doc.produces(content_type='application/json')
- async def check_existance(request, sha512):
- value = data_global_elements.get_interface(_name_api).find("sha512", sha512)
- if value != None:
- return response.json(value)
- return response.HTTPResponse("No data found", status=404)
-
-
- @elem_blueprint.post('/' + _name_api, strict_slashes=True, stream=True)
- @doc.summary("send new file data")
- @doc.description("Create a new data file (associated with his sha512.")
- #@doc.consumes(DataModel, location='body')#, required=True)
- @doc.response_success(status=201, description='If successful created')
- async def create(_request):
- debug.info("request streaming " + str(_request));
- args_with_blank_values = _request.headers
- debug.info("List arguments: " + str(args_with_blank_values));
- async def streaming(_response):
- global tmp_value
- #debug.info("streaming " + str(_response));
- total_size = 0
- tmp_value += random.randint(1,50)
- temporary_file = os.path.join(_app.config['REST_TMP_DATA'], str(tmp_value) + ".tmp")
- if not os.path.exists(_app.config['REST_TMP_DATA']):
- os.makedirs(_app.config['REST_TMP_DATA'])
- if not os.path.exists(_app.config['REST_MEDIA_DATA']):
- os.makedirs(_app.config['REST_MEDIA_DATA'])
- file_stream = open(temporary_file,"wb")
- sha1 = hashlib.sha512()
- while True:
- #debug.warning("ploufffff " + str(dir(_request.stream)))
- body = await _request.stream.read()
- if body is None:
- debug.warning("empty body");
- break
- total_size += len(body)
- debug.verbose("body " + str(len(body)) + "/" + str(total_size))
- file_stream.write(body)
- sha1.update(body)
- file_stream.close()
- print("SHA512: " + str(sha1.hexdigest()))
-
- new_data = {
- "size": total_size,
- "sha512": str(sha1.hexdigest()),
- 'original_name': _request.headers["filename"],
- 'mime_type': _request.headers["mime-type"]
- }
- # TODO: Check if the element already exist ...
-
- return_bdd = data_global_elements.get_interface(_name_api).post(new_data)
-
- basic_data_path = os.path.join(_app.config['REST_MEDIA_DATA'], str(return_bdd["id"]))
-
- if not os.path.exists(basic_data_path):
- os.makedirs(basic_data_path)
- destination_filename = os.path.join(basic_data_path, "data")
- """
- if os.path.isfile(destination_filename) == True:
- answer_data = {
- "size": total_size,
- "sha512": str(sha1.hexdigest()),
- 'filename': _request.headers["filename"],
- 'mime_type': _request.headers["mime-type"],
- "already_exist": True,
- }
- await _response.write(json.dumps(answer_data, sort_keys=True, indent=4))
- return
- """
-
- # move the file
- shutil.move(temporary_file, destination_filename)
- # collect media info ...
- media_info = MediaInfo.parse(destination_filename)
- data_metafile = {
- "sha512": str(sha1.hexdigest()),
- "size": total_size,
- 'filename': _request.headers["filename"],
- 'mime_type': _request.headers["mime-type"],
- 'media_info': json.loads(media_info.to_json())
- }
- tools.file_write_data(os.path.join(basic_data_path, "meta.json"), json.dumps(data_metafile, sort_keys=True, indent=4))
- await _response.write(json.dumps(return_bdd, sort_keys=True, indent=4))
- return response.stream(streaming, content_type='application/json')
-
diff --git a/back/src/api/group.py b/back/src/api/group.py
deleted file mode 100644
index 011aabb..0000000
--- a/back/src/api/group.py
+++ /dev/null
@@ -1,155 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2019, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-
-import time
-import json
-import os
-import sys
-import datetime
-import time, threading
-import realog.debug as debug
-
-from sanic import Sanic
-from sanic import response
-from sanic import views
-from sanic import Blueprint
-from sanic.exceptions import ServerError
-
-from sanic_simple_swagger import swagger_blueprint, openapi_blueprint
-from sanic_simple_swagger import doc
-
-import tools
-import data_interface
-import data_global_elements
-
-
-
-def add(_app, _name_api):
- elem_blueprint = Blueprint(_name_api)
-
- dataModelBdd = [
- {
- "name": "id",
- "type": "int",
- "modifiable": False,
- "creatable": False,
- "can_be_null": False,
- },
- {
- "name": "type",
- "type": "string",
- "modifiable": False,
- "creatable": True,
- "can_be_null": False,
- },
- {
- "name": "name",
- "type": "str",
- "modifiable": True,
- "creatable": True,
- "can_be_null": False,
- },
- {
- "name": "description",
- "type": "str",
- "modifiable": True,
- "creatable": True,
- "can_be_null": False,
- }
- ]
- data_global_elements.get_interface(_name_api).set_data_model(dataModelBdd)
- data_global_elements.get_interface(_name_api).set_add_where(" AND type='serie' ")
-
- class DataModel:
- name = str
- description = str
-
- @elem_blueprint.get('/' + _name_api, strict_slashes=True)
- @doc.summary("Show resources")
- @doc.description("Display a listing of the resource.")
- @doc.produces(content_type='application/json')
- async def list(request):
- if "select" in request.args:
- if request.args["select"] == "*":
- list_values = data_global_elements.get_interface(_name_api).gets_where(select=[["!=", "id", None]], order_by=["name"])
- else:
- list_values = data_global_elements.get_interface(_name_api).gets_where(select=[["!=", "id", None]], order_by=["name"], filter=request.args["select"])
- return response.json(list_values)
- return response.json(data_global_elements.get_interface(_name_api).gets())
-
- @elem_blueprint.post('/' + _name_api, strict_slashes=True)
- @doc.summary("Create new resource")
- @doc.description("Store a newly created resource in storage.")
- @doc.consumes(DataModel, location='body')#, required=True)
- @doc.response_success(status=201, description='If successful created')
- async def create(request):
- data = request.json
- data["type"] = 'serie'
- return response.json(data_global_elements.get_interface(_name_api).post(data))
-
- @elem_blueprint.post('/' + _name_api + "/find", strict_slashes=True)
- @doc.summary("Create new resource if the name does not already exist")
- @doc.description("Store a newly created resource in storage.")
- @doc.consumes(DataModel, location='body')#, required=True)
- @doc.response_success(status=201, description='If successful created')
- async def find_with_name(request):
- value = data_global_elements.get_interface(_name_api).find("name", request.json["name"])
- if value != None:
- return response.json(value)
- return response.HTTPResponse("No data found", status=404)
-
- @elem_blueprint.get('/' + _name_api + '/', strict_slashes=True)
- @doc.summary("Show resources")
- @doc.description("Display a listing of the resource.")
- @doc.produces(content_type='application/json')
- async def retrive(request, id):
- value = data_global_elements.get_interface(_name_api).get(id)
- if value != None:
- return response.json(value)
- return response.HTTPResponse("No data found", status=404)
-
- @elem_blueprint.put('/' + _name_api + '/', strict_slashes=True)
- @doc.summary("Update resource")
- @doc.description("Update the specified resource in storage.")
- @doc.response_success(status=201, description='If successful updated')
- async def update(request, id):
- ret = data_global_elements.get_interface(_name_api).put(id, request.json)
- return response.json({})
-
- @elem_blueprint.delete('/' + _name_api + '/', strict_slashes=True)
- @doc.summary("Remove resource")
- @doc.description("Remove the specified resource from storage.")
- @doc.response_success(status=201, description='If successful deleted')
- async def delete(request, id):
- ret = data_global_elements.get_interface(_name_api).delete(id)
- if ret == True:
- return response.json({})
- return response.HTTPResponse("No data found", status=404)
-
- @elem_blueprint.post('/' + _name_api + "//add_cover", strict_slashes=True)
- @doc.summary("Add cover on group")
- @doc.description("Add a cover data ID to the group.")
- @doc.consumes(DataModel, location='body')#, required=True)
- @doc.response_success(status=201, description='If successful added')
- async def create_cover(request, id):
- for type_key in ["data_id"]:
- if type_key not in request.json.keys():
- return response.HTTPResponse("Bad Request: Missing Key '" + type_key + "'", status=400)
- data = {}
- data["node_id"] = id
- data["data_id"] = request.json["data_id"]
- value = data_global_elements.get_interface(_name_api).get(id)
- if value == None:
- return response.HTTPResponse("No data found", status=404)
- data_global_elements.get_interface(data_global_elements.API_COVER).post(data)
- value = data_global_elements.get_interface(_name_api).get(id)
- return response.json(value)
-
- _app.blueprint(elem_blueprint)
diff --git a/back/src/api/root.py b/back/src/api/root.py
deleted file mode 100644
index a0141e7..0000000
--- a/back/src/api/root.py
+++ /dev/null
@@ -1,43 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2019, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-
-import time
-import json
-import os
-import sys
-import datetime
-import time, threading
-import realog.debug as debug
-
-from sanic import Sanic
-from sanic import response
-from sanic import views
-from sanic import Blueprint
-from sanic.exceptions import ServerError
-
-from sanic_simple_swagger import swagger_blueprint, openapi_blueprint
-from sanic_simple_swagger import doc
-
-import tools
-import data_interface
-import data_global_elements
-
-def add(_app):
- @_app.route("/")
- @doc.description("get api system information")
- async def test(request):
- return response.json({
- "api-type": "video-broker",
- "api-version": _app.config['API_VERSION'],
- "title": _app.config['API_TITLE'],
- "description": _app.config['API_DESCRIPTION'],
- "contact": _app.config['API_CONTACT_EMAIL'],
- "licence": _app.config['API_LICENSE_NAME']
- })
diff --git a/back/src/api/saison.py b/back/src/api/saison.py
deleted file mode 100644
index 6a6a37d..0000000
--- a/back/src/api/saison.py
+++ /dev/null
@@ -1,160 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2019, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-
-import time
-import json
-import os
-import sys
-import datetime
-import time, threading
-import realog.debug as debug
-
-from sanic import Sanic
-from sanic import response
-from sanic import views
-from sanic import Blueprint
-from sanic.exceptions import ServerError
-
-from sanic_simple_swagger import swagger_blueprint, openapi_blueprint
-from sanic_simple_swagger import doc
-
-import tools
-import data_interface
-import data_global_elements
-
-
-def add(_app, _name_api):
- elem_blueprint = Blueprint(_name_api)
-
- dataModelBdd = [
- {
- "name": "id",
- "type": "int",
- "modifiable": False,
- "creatable": False,
- "can_be_null": False,
- "visible": True,
- },
- {
- "name": "type",
- "type": "string",
- "modifiable": False,
- "creatable": True,
- "can_be_null": False,
- },
- {
- "name": "name",
- "type": "string",
- "modifiable": True,
- "creatable": True,
- "can_be_null": False,
- "visible": True,
- },
- {
- "name": "description",
- "type": "str",
- "modifiable": True,
- "creatable": True,
- "can_be_null": False,
- "visible": True,
- },
- {
- "name": "parent_id",
- "type": "int",
- "modifiable": True,
- "creatable": True,
- "can_be_null": False,
- "visible": True,
- },
- ]
- data_global_elements.get_interface(_name_api).set_data_model(dataModelBdd)
- data_global_elements.get_interface(_name_api).set_add_where(" AND type='saison' ")
-
- class DataModel:
- name = int
- description = str
- parent_id = int
-
- @elem_blueprint.get('/' + _name_api, strict_slashes=True)
- @doc.summary("Show saisons")
- @doc.description("Display a listing of the resource.")
- @doc.produces(content_type='application/json')
- async def list(request):
- return response.json(data_global_elements.get_interface(_name_api).gets())
-
- @elem_blueprint.post('/' + _name_api, strict_slashes=True)
- @doc.summary("Create new saison")
- @doc.description("Create a new saison for a aspecific group id.")
- @doc.consumes(DataModel, location='body')#, required=True)
- @doc.response_success(status=201, description='If successful created')
- async def create(request):
- data = request.json
- data["type"] = 'saison'
- return response.json(data_global_elements.get_interface(_name_api).post(data))
-
- @elem_blueprint.post('/' + _name_api + "/find", strict_slashes=True)
- @doc.summary("find a season existance")
- @doc.description("return the ID of the season table.")
- @doc.consumes(DataModel, location='body')
- @doc.response_success(status=201, description='If successful created')
- async def find_with_name(request):
- value = data_global_elements.get_interface(_name_api).find2("parent_id", request.json["parent_id"], "name", request.json["name"])
- if value != None:
- return response.json(value)
- return response.HTTPResponse("No data found", status=404)
-
- @elem_blueprint.get('/' + _name_api + '/', strict_slashes=True)
- @doc.summary("Show resources")
- @doc.description("Display a listing of the resource.")
- @doc.produces(content_type='application/json')
- async def retrive(request, id):
- value = data_global_elements.get_interface(_name_api).get(id)
- if value != None:
- return response.json(value)
- return response.HTTPResponse("No data found", status=404)
-
- @elem_blueprint.put('/' + _name_api + '/', strict_slashes=True)
- @doc.summary("Update resource")
- @doc.description("Update the specified resource in storage.")
- @doc.response_success(status=201, description='If successful updated')
- async def update(request, id):
- ret = data_global_elements.get_interface(_name_api).put(id, request.json)
- return response.json({})
-
- @elem_blueprint.delete('/' + _name_api + '/', strict_slashes=True)
- @doc.summary("Remove resource")
- @doc.description("Remove the specified resource from storage.")
- @doc.response_success(status=201, description='If successful deleted')
- async def delete(request, id):
- ret = data_global_elements.get_interface(_name_api).delete(id)
- if ret == True:
- return response.json({})
- return response.HTTPResponse("No data found", status=404)
-
- @elem_blueprint.post('/' + _name_api + "//add_cover", strict_slashes=True)
- @doc.summary("Add cover on video")
- @doc.description("Add a cover data ID to the video.")
- @doc.consumes(DataModel, location='body')#, required=True)
- @doc.response_success(status=201, description='If successful added')
- async def create_cover(request, id):
- for type_key in ["data_id"]:
- if type_key not in request.json.keys():
- return response.HTTPResponse("Bad Request: Missing Key '" + type_key + "'", status=400)
- data = {}
- data["node_id"] = id
- data["data_id"] = request.json["data_id"]
- value = data_global_elements.get_interface(_name_api).get(id)
- if value == None:
- return response.HTTPResponse("No data found", status=404)
- data_global_elements.get_interface(data_global_elements.API_COVER).post(data)
- value = data_global_elements.get_interface(_name_api).get(id)
- return response.json(value)
-
- _app.blueprint(elem_blueprint)
diff --git a/back/src/api/type.py b/back/src/api/type.py
deleted file mode 100644
index 419ba68..0000000
--- a/back/src/api/type.py
+++ /dev/null
@@ -1,141 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2019, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-
-import time
-import json
-import os
-import sys
-import datetime
-import time, threading
-import realog.debug as debug
-
-from sanic import Sanic
-from sanic import response
-from sanic import views
-from sanic import Blueprint
-from sanic.exceptions import ServerError
-
-from sanic_simple_swagger import swagger_blueprint, openapi_blueprint
-from sanic_simple_swagger import doc
-
-import tools
-import data_interface
-import data_global_elements
-
-
-
-def add(_app, _name_api):
- elem_blueprint = Blueprint(_name_api)
-
- dataModelBdd = [
- {
- "name": "id",
- "type": "int",
- "modifiable": False,
- "creatable": False,
- "can_be_null": False,
- "visible": True,
- },
- {
- "name": "type",
- "type": "string",
- "modifiable": False,
- "creatable": True,
- "can_be_null": False,
- },
- {
- "name": "name",
- "type": "str",
- "modifiable": True,
- "creatable": True,
- "can_be_null": False,
- "visible": True,
- },
- {
- "name": "description",
- "type": "str",
- "modifiable": True,
- "creatable": True,
- "can_be_null": False,
- "visible": True,
- },
- ]
- data_global_elements.get_interface(_name_api).set_data_model(dataModelBdd)
- data_global_elements.get_interface(_name_api).set_add_where(" AND type='type' ")
-
- class DataModel:
- name = str
- description = str
-
- @elem_blueprint.get('/' + _name_api, strict_slashes=True)
- @doc.summary("Show resources")
- @doc.description("Display a listing of the resource.")
- @doc.produces(content_type='application/json')
- async def list(request):
- return response.json(data_global_elements.get_interface(_name_api).gets())
-
- @elem_blueprint.post('/' + _name_api, strict_slashes=True)
- @doc.summary("Create new resource")
- @doc.description("Store a newly created resource in storage.")
- @doc.consumes(DataModel, location='body')#, required=True)
- @doc.response_success(status=201, description='If successful created')
- async def create(request):
- data = request.json
- data["type"] = 'type'
- return response.json(data_global_elements.get_interface(_name_api).post(data))
-
- @elem_blueprint.get('/' + _name_api + '/', strict_slashes=True)
- @doc.summary("Show resources")
- @doc.description("Display a listing of the resource.")
- @doc.produces(content_type='application/json')
- async def retrive(request, id):
- value = data_global_elements.get_interface(_name_api).get(id)
- if value != None:
- return response.json(value)
- return response.HTTPResponse("No data found", status=404)
-
- @elem_blueprint.put('/' + _name_api + '/', strict_slashes=True)
- @doc.summary("Update resource")
- @doc.description("Update the specified resource in storage.")
- @doc.response_success(status=201, description='If successful updated')
- async def update(request, id):
- ret = data_global_elements.get_interface(_name_api).put(id, request.json)
- return response.json({})
-
- @elem_blueprint.delete('/' + _name_api + '/', strict_slashes=True)
- @doc.summary("Remove resource")
- @doc.description("Remove the specified resource from storage.")
- @doc.response_success(status=201, description='If successful deleted')
- async def delete(request, id):
- ret = data_global_elements.get_interface(_name_api).delete(id)
- if ret == True:
- return response.json({})
- return response.HTTPResponse("No data found", status=404)
-
- @elem_blueprint.post('/' + _name_api + "//add_cover", strict_slashes=True)
- @doc.summary("Add cover on video")
- @doc.description("Add a cover data ID to the video.")
- @doc.consumes(DataModel, location='body')#, required=True)
- @doc.response_success(status=201, description='If successful added')
- async def create_cover(request, id):
- for type_key in ["data_id"]:
- if type_key not in request.json.keys():
- return response.HTTPResponse("Bad Request: Missing Key '" + type_key + "'", status=400)
- data = {}
- data["node_id"] = id
- data["data_id"] = request.json["data_id"]
- value = data_global_elements.get_interface(_name_api).get(id)
- if value == None:
- return response.HTTPResponse("No data found", status=404)
- data_global_elements.get_interface(data_global_elements.API_COVER).post(data)
- value = data_global_elements.get_interface(_name_api).get(id)
- return response.json(value)
-
- _app.blueprint(elem_blueprint)
\ No newline at end of file
diff --git a/back/src/api/univers.py b/back/src/api/univers.py
deleted file mode 100644
index 9ece958..0000000
--- a/back/src/api/univers.py
+++ /dev/null
@@ -1,139 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2019, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-
-import time
-import json
-import os
-import sys
-import datetime
-import time, threading
-import realog.debug as debug
-
-from sanic import Sanic
-from sanic import response
-from sanic import views
-from sanic import Blueprint
-from sanic.exceptions import ServerError
-
-from sanic_simple_swagger import swagger_blueprint, openapi_blueprint
-from sanic_simple_swagger import doc
-
-import tools
-import data_interface
-import data_global_elements
-
-def add(_app, _name_api):
- elem_blueprint = Blueprint(_name_api)
-
- dataModelBdd = [
- {
- "name": "id",
- "type": "int",
- "modifiable": False,
- "creatable": False,
- "can_be_null": False,
- "visible": True,
- },
- {
- "name": "type",
- "type": "string",
- "modifiable": False,
- "creatable": True,
- "can_be_null": False,
- },
- {
- "name": "name",
- "type": "str",
- "modifiable": True,
- "creatable": True,
- "can_be_null": False,
- "visible": True,
- },
- {
- "name": "description",
- "type": "str",
- "modifiable": True,
- "creatable": True,
- "can_be_null": False,
- "visible": True,
- },
- ]
- data_global_elements.get_interface(_name_api).set_data_model(dataModelBdd)
- data_global_elements.get_interface(_name_api).set_add_where(" AND type='univers' ")
-
- class DataModel:
- name = str
- description = str
-
- @elem_blueprint.get('/' + _name_api, strict_slashes=True)
- @doc.summary("Show resources")
- @doc.description("Display a listing of the resource.")
- @doc.produces(content_type='application/json')
- async def list(request):
- return response.json(data_global_elements.get_interface(_name_api).gets())
-
- @elem_blueprint.post('/' + _name_api, strict_slashes=True)
- @doc.summary("Create new resource")
- @doc.description("Store a newly created resource in storage.")
- @doc.consumes(DataModel, location='body')#, required=True)
- @doc.response_success(status=201, description='If successful created')
- async def create(request):
- data = request.json
- data["type"] = 'univers'
- return response.json(data_global_elements.get_interface(_name_api).post(data))
-
- @elem_blueprint.get('/' + _name_api + '/', strict_slashes=True)
- @doc.summary("Show resources")
- @doc.description("Display a listing of the resource.")
- @doc.produces(content_type='application/json')
- async def retrive(request, id):
- value = data_global_elements.get_interface(_name_api).get(id)
- if value != None:
- return response.json(value)
- return response.HTTPResponse("No data found", status=404)
-
- @elem_blueprint.put('/' + _name_api + '/', strict_slashes=True)
- @doc.summary("Update resource")
- @doc.description("Update the specified resource in storage.")
- @doc.response_success(status=201, description='If successful updated')
- async def update(request, id):
- ret = data_global_elements.get_interface(_name_api).put(id, request.json)
- return response.json({})
-
- @elem_blueprint.delete('/' + _name_api + '/', strict_slashes=True)
- @doc.summary("Remove resource")
- @doc.description("Remove the specified resource from storage.")
- @doc.response_success(status=201, description='If successful deleted')
- async def delete(request, id):
- ret = data_global_elements.get_interface(_name_api).delete(id)
- if ret == True:
- return response.json({})
- return response.HTTPResponse("No data found", status=404)
-
- @elem_blueprint.post('/' + _name_api + "//add_cover", strict_slashes=True)
- @doc.summary("Add cover on univers")
- @doc.description("Add a cover data ID to the univers.")
- @doc.consumes(DataModel, location='body')#, required=True)
- @doc.response_success(status=201, description='If successful added')
- async def create_cover(request, id):
- for type_key in ["data_id"]:
- if type_key not in request.json.keys():
- return response.HTTPResponse("Bad Request: Missing Key '" + type_key + "'", status=400)
- data = {}
- data["node_id"] = id
- data["data_id"] = request.json["data_id"]
- value = data_global_elements.get_interface(_name_api).get(id)
- if value == None:
- return response.HTTPResponse("No data found", status=404)
- data_global_elements.get_interface(data_global_elements.API_COVER).post(data)
- value = data_global_elements.get_interface(_name_api).get(id)
- return response.json(value)
-
- _app.blueprint(elem_blueprint)
\ No newline at end of file
diff --git a/back/src/api/video.py b/back/src/api/video.py
deleted file mode 100644
index 98b3cb2..0000000
--- a/back/src/api/video.py
+++ /dev/null
@@ -1,266 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2019, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-
-import time
-import json
-import os
-import sys
-import copy
-import datetime
-import time, threading
-import realog.debug as debug
-
-from sanic import Sanic
-from sanic import response
-from sanic import views
-from sanic import Blueprint
-from sanic.exceptions import ServerError
-
-from sanic_simple_swagger import swagger_blueprint, openapi_blueprint
-from sanic_simple_swagger import doc
-
-import tools
-import data_interface
-import data_global_elements
-
-
-
-def generate_name(_value):
- group_name = ""
- if "univers_id" in _value.keys():
- univers_property = data_global_elements.get_interface(data_global_elements.API_UNIVERS).get(_value["univers_id"])
- if univers_property != None:
- group_name = univers_property["name"] + ":"
- if "serie_id" in _value.keys():
- group_property = data_global_elements.get_interface(data_global_elements.API_GROUP).get(_value["serie_id"])
- if group_property != None:
- group_name = group_property["name"]
- saison_number = ""
- if "saison_id" in _value.keys():
- saison_property = data_global_elements.get_interface(data_global_elements.API_SAISON).get(_value["saison_id"])
- if saison_property != None:
- saison_number = str(saison_property["number"])
- if len(saison_number) == 1:
- saison_number = "0" + saison_number
- out = ""
- if group_name != "":
- out += group_name + "-"
- if saison_number != "":
- out += "s" + saison_number + "-"
- if "episode" in _value.keys() and _value["episode"] != None:
- episode_id = _value["episode"];
- if type(episode_id) == str:
- episode_id = int(episode_id)
- if episode_id < 10:
- out += "e00" + str(episode_id) + "-"
- elif episode_id < 100:
- out += "e0" + str(episode_id) + "-"
- else:
- out += "e" + str(episode_id) + "-"
- out += _value["name"]
- if "time" in _value.keys() and _value["time"] != None:
- out += "(" + _value["name"] + ")"
- return out
-
-
-def add(_app, _name_api):
- elem_blueprint = Blueprint(_name_api)
-
- dataModelBdd = [
- {
- "name": "id",
- "type": "int",
- "modifiable": False,
- "creatable": False,
- "can_be_null": False,
- "visible": True,
- },
- {
- "name": "type",
- "type": "string",
- "modifiable": False,
- "creatable": True,
- "can_be_null": False,
- },
- {
- "name": "data_id",
- "type": "int",
- "modifiable": True,
- "creatable": True,
- "can_be_null": False,
- "visible": True,
- },
- {
- "name": "type_id",
- "type": "int",
- "modifiable": True,
- "creatable": True,
- "can_be_null": True,
- "visible": True,
- },
- {
- "name": "saison_id",
- "type": "int",
- "modifiable": True,
- "creatable": True,
- "can_be_null": True,
- "visible": True,
- },
- {
- "name": "episode",
- "type": "int",
- "modifiable": True,
- "creatable": True,
- "can_be_null": True,
- "visible": True,
- },
- {
- "name": "univers_id",
- "type": "int",
- "modifiable": True,
- "creatable": True,
- "can_be_null": True,
- "visible": True,
- },
- {
- "name": "serie_id",
- "type": "int",
- "modifiable": True,
- "creatable": True,
- "can_be_null": True,
- "visible": True,
- },
- {
- "name": "name",
- "type": "str",
- "modifiable": True,
- "creatable": True,
- "can_be_null": True,
- "visible": True,
- },
- {
- "name": "description",
- "type": "str",
- "modifiable": True,
- "creatable": True,
- "can_be_null": True,
- "visible": True,
- },
- {
- "name": "date",
- "type": "int",
- "modifiable": True,
- "creatable": True,
- "can_be_null": True,
- "visible": True,
- },
- {
- "name": "time",
- "type": "int",
- "modifiable": True,
- "creatable": True,
- "can_be_null": True,
- "visible": True,
- },
- ]
- data_global_elements.get_interface(_name_api).set_data_model(dataModelBdd)
- data_global_elements.get_interface(_name_api).set_add_where(" AND type='media' ")
-
- class DataModel:
- type_id = int
- saison_id = int
- episode = int
- univers_id = int
- serie_id = int
- name = str
- description = str
- # creating time
- create_date = str
- # date of the video
- date = str
- # number of second
- time = int
-
- @elem_blueprint.get('/' + _name_api, strict_slashes=True)
- @doc.summary("Show saisons")
- @doc.description("Display a listing of the resource.")
- @doc.produces(content_type='application/json')
- async def list(request):
- return response.json(data_global_elements.get_interface(_name_api).gets())
-
- @elem_blueprint.post('/' + _name_api, strict_slashes=True)
- @doc.summary("Create new saison")
- @doc.description("Create a new saison for a aspecific group id.")
- @doc.consumes(DataModel, location='body')#, required=True)
- @doc.response_success(status=201, description='If successful created')
- async def create(request):
- data = request.json
- for type_key in ["data_id","name"]:
- if type_key not in data.keys():
- raise ServerError("Bad Request: Missing Key '" + type_key + "'", status_code=400)
- for type_key in ["create_date"]:
- if type_key in data.keys():
- raise ServerError("Forbidden: Must not be set Key '" + type_key + "'", status_code=403)
- #Find if already exist
- data["type"] = 'media'
- return response.json(data_global_elements.get_interface(_name_api).post(data))
-
- @elem_blueprint.get('/' + _name_api + '/', strict_slashes=True)
- @doc.summary("Show resources")
- @doc.description("Display a listing of the resource.")
- @doc.produces(content_type='application/json')
- async def retrive(request, id):
- value = data_global_elements.get_interface(_name_api).get(id)
- if value != None:
- generated_name = generate_name(value)
- tmp = copy.deepcopy(value)
- tmp["generated_name"] = generated_name
- return response.json(tmp)
- return response.HTTPResponse("No data found", status=404)
-
- @elem_blueprint.put('/' + _name_api + '/', strict_slashes=True)
- @doc.summary("Update resource")
- @doc.description("Update the specified resource in storage.")
- @doc.response_success(status=201, description='If successful updated')
- async def update(request, id):
- ret = data_global_elements.get_interface(_name_api).put(id, request.json)
- return response.json(ret);
-
- @elem_blueprint.delete('/' + _name_api + '/', strict_slashes=True)
- @doc.summary("Remove resource")
- @doc.description("Remove the specified resource from storage.")
- @doc.response_success(status=201, description='If successful deleted')
- async def delete(request, id):
- ret = data_global_elements.get_interface(_name_api).delete(id)
- if ret == True:
- return response.json({})
- return response.HTTPResponse("No data found", status=404)
-
- @elem_blueprint.post('/' + _name_api + "//add_cover", strict_slashes=True)
- @doc.summary("Add cover on video")
- @doc.description("Add a cover data ID to the video.")
- @doc.consumes(DataModel, location='body')#, required=True)
- @doc.response_success(status=201, description='If successful added')
- async def create_cover(request, id):
- for type_key in ["data_id"]:
- if type_key not in request.json.keys():
- return response.HTTPResponse("Bad Request: Missing Key '" + type_key + "'", status=400)
- data = {}
- data["node_id"] = id
- data["data_id"] = request.json["data_id"]
- value = data_global_elements.get_interface(_name_api).get(id)
- if value == None:
- return response.HTTPResponse("No data found", status=404)
- data_global_elements.get_interface(data_global_elements.API_COVER).post(data)
- value = data_global_elements.get_interface(_name_api).get(id)
-
- return response.json(value)
-
- _app.blueprint(elem_blueprint)
diff --git a/back/src/app_video.py b/back/src/app_video.py
deleted file mode 100755
index 1361f25..0000000
--- a/back/src/app_video.py
+++ /dev/null
@@ -1,115 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2019, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-#pip install flask --user
-#pip install flask_restful --user
-#pip install python-dateutil --user
-#pip install sanic --user
-
-from sanic import Sanic
-from sanic import response
-from sanic import views
-from sanic import Blueprint
-from sanic.exceptions import ServerError
-from spf import SanicPluginsFramework
-
-import dateutil.parser
-
-
-import time
-import json
-import os
-import sys
-import datetime
-import time, threading
-import realog.debug as debug
-
-debug.enable_color()
-
-import tools
-import data_interface
-import data_global_elements
-
-
-import create_bdd
-
-create_bdd.create_if_needed();
-
-from sanic_cors.extension import cors
-app = Sanic(__name__)
-spf = SanicPluginsFramework(app)
-spf.register_plugin(cors, automatic_options=True)
-
-app.config['API_VERSION'] = '2.0.0'
-app.config['API_TITLE'] = 'Rest personal video API'
-app.config['API_DESCRIPTION'] = 'Simple API for the Video broker.'
-app.config['API_CONTACT_EMAIL'] = "yui.heero@gmail.com"
-app.config['API_LICENSE_NAME'] = 'MPL 2.0'
-app.config['API_LICENSE_URL'] = 'https://www.mozilla.org/en-US/MPL/2.0/'
-app.config['schemes'] = ['http', 'https']
-if "REST_TMP_DATA" not in app.config.keys():
- app.config['REST_TMP_DATA'] = os.path.join("data", "tmp")
-if "REST_MEDIA_DATA" not in app.config.keys():
- app.config['REST_MEDIA_DATA'] = os.path.join("data", "media")
-if "REST_DATA" not in app.config.keys():
- app.config['REST_DATA'] = "data"
-if "REST_HOST" not in app.config.keys():
- app.config['REST_HOST'] = "0.0.0.0"
-if "REST_PORT" not in app.config.keys():
- app.config['REST_PORT'] = "80"
-
-def add_interface(_name, _base_name, _name_view):
- interface = data_interface.DataInterface(_name, _base_name, _name_view)
- data_global_elements.add_interface(_name, interface)
-
-add_interface(data_global_elements.API_DATA, data_global_elements.API_DATA, "view_data")
-add_interface(data_global_elements.API_TYPE, "node", "view_type")
-add_interface(data_global_elements.API_UNIVERS, "node", "view_univers")
-add_interface(data_global_elements.API_GROUP, "node", "view_serie")
-add_interface(data_global_elements.API_SAISON, "node", "view_saison")
-add_interface(data_global_elements.API_VIDEO, "media", "view_video")
-add_interface(data_global_elements.API_COVER, data_global_elements.API_COVER, data_global_elements.API_COVER)
-
-import api.root as api_root
-api_root.add(app)
-
-import api.type as api_type
-api_type.add(app, data_global_elements.API_TYPE)
-
-import api.univers as api_univers
-api_univers.add(app, data_global_elements.API_UNIVERS)
-
-import api.group as api_group
-api_group.add(app, data_global_elements.API_GROUP)
-
-import api.saison as api_saison
-api_saison.add(app, data_global_elements.API_SAISON)
-
-import api.video as api_video
-api_video.add(app, data_global_elements.API_VIDEO)
-
-import api.data as api_data
-api_data.add(app, data_global_elements.API_DATA)
-
-
-
-
-if __name__ == "__main__":
- debug.info("Start REST application: " + str(app.config['REST_HOST']) + ":" + str(app.config['REST_PORT']))
- app.config.REQUEST_MAX_SIZE=10*1024*1024*1024
- app.config.REQUEST_TIMEOUT=60*60
- #app.run(host=app.config['REST_HOST'], port=int(app.config['REST_PORT']), workers=100)
- app.run(host=app.config['REST_HOST'], port=int(app.config['REST_PORT']), log_config=None)
- #app.stop()
- debug.info("Sync all BDD ... (do not force stop ...)");
- data_global_elements.save_all_before_stop();
- debug.info("END program");
- sys.exit(0)
-
-
diff --git a/back/src/config.py b/back/src/config.py
deleted file mode 100755
index 6967c84..0000000
--- a/back/src/config.py
+++ /dev/null
@@ -1,27 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2012, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-#pip install paho-mqtt --user
-
-import os
-
-def get_rest_config():
- variable = {
- #"tmp_data": "tmp",
- #"data": "data",
- #"data_media": "data/media",
- #"host": os.getenv('REST_HOSTNAME', "0.0.0.0"),
- #"port": int(os.getenv('REST_PORT', 80)),
- "db_host": os.getenv('DB_HOSTNAME', "localhost"),
- "db_port": int(os.getenv('DB_PORT', 15032)),
- "db_name": os.getenv('DB_NAME', "karideo"),
- "db_user": os.getenv('DB_USER', "root"),
- "db_password": os.getenv('DB_PASSWORD', "postgress_password"),
- }
- return variable
diff --git a/back/src/create_bdd.py b/back/src/create_bdd.py
deleted file mode 100755
index d89ac01..0000000
--- a/back/src/create_bdd.py
+++ /dev/null
@@ -1,373 +0,0 @@
-#!/usr/bin/python3
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2012, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-#pip install paho-mqtt --user
-
-from realog import debug
-import json
-import os
-import random
-import copy
-import config
-import tools
-from dateutil import parser
-
-import db
-import psycopg2
-
-def create_if_needed():
- connection = db.connect_bdd();
-
- debug.info("create the table:")
-
- c = connection.cursor()
-
- need_to_create_table = False
- try:
- c.execute('''
- SELECT * FROM object LIMIT 2;
- ''');
- connection.commit()
- except psycopg2.errors.UndefinedTable:
- need_to_create_table = True
-
- if need_to_create_table == False:
- debug.info("Does not need to create the BDD");
- connection.commit()
- db.remove_connection();
- return
- connection.commit()
- debug.info("Add default BDD format");
- c.execute('''
- CREATE TYPE node_type AS ENUM ('type', 'univers', 'serie', 'saison', 'media');
- CREATE TYPE age_type AS ENUM ('-', '5', '9', '12', '14', '16', '18');
- ''')
- connection.commit()
-
- # Create table
- c.execute('''
- CREATE SEQUENCE kar_id_sequence;
- ''')
- connection.commit()
-
- # Create table
- c.execute('''
- CREATE OR REPLACE FUNCTION trigger_set_timestamp()
- RETURNS TRIGGER AS $$
- BEGIN
- NEW.modify_date = NOW();
- RETURN NEW;
- END;
- $$ LANGUAGE plpgsql;
- ''')
- connection.commit()
-
- aaa = '''
- CREATE OR REPLACE FUNCTION check_exist(_table character, _id INTEGER)
- RETURNS BOOLEAN AS $$
- DECLARE vvv int;
- DECLARE eee text;
- BEGIN
- raise WARNING 'check_exist(%,%)%', _table, _id, E'\n';
- IF _id IS NULL THEN
- raise WARNING ' ==> return 1 (detect NULL)%', E'\n';
- RETURN 1;
- END IF;
- eee = 'select 1 FROM ' || quote_ident(_table) || ' WHERE id = ' || _id;
- raise WARNING 'Execute: % %', eee, E'\n';
- EXECUTE 'select 1 FROM ' || quote_ident(_table) || ' WHERE id = ' || _id INTO vvv;
- raise WARNING 'Value vvv: % %', vvv, E'\n';
- IF vvv = 1 THEN
- raise WARNING ' ==> return 1 %', E'\n';
- RETURN 1;
- ELSE
- raise WARNING ' ==> return 0 %', E'\n';
- RETURN 0;
- END IF;
- END;
- $$ LANGUAGE plpgsql;
- '''
-
- c.execute('''
- CREATE OR REPLACE FUNCTION check_exist(_table character, _id INTEGER)
- RETURNS BOOLEAN AS $$
- DECLARE vvv int;
- DECLARE eee text;
- BEGIN
- IF _id IS NULL THEN
- RETURN 1;
- END IF;
- EXECUTE 'select 1 FROM ' || quote_ident(_table) || ' WHERE id = ' || _id INTO vvv;
- IF vvv = 1 THEN
- RETURN 1;
- ELSE
- RETURN 0;
- END IF;
- END;
- $$ LANGUAGE plpgsql;
- ''')
- connection.commit()
-
- c.execute("""
- CREATE OR REPLACE FUNCTION check_node_exist(_type character, _id INTEGER)
- RETURNS BOOLEAN AS $$
- DECLARE vvv int;
- DECLARE eee text;
- BEGIN
- IF _id IS NULL THEN
- RETURN 1;
- END IF;
- EXECUTE 'select 1 FROM node WHERE type = ''' || quote_ident(_type) || ''' AND id = ' || _id INTO vvv;
- IF vvv = 1 THEN
- RETURN 1;
- ELSE
- RETURN 0;
- END IF;
- END;
- $$ LANGUAGE plpgsql;
- """)
- connection.commit()
-
- debug.info("Add Main Object interface");
- # Create table
- c.execute('''
- CREATE TABLE object (
- id INTEGER PRIMARY KEY default nextval('kar_id_sequence'),
- deleted BOOLEAN NOT NULL DEFAULT FALSE,
- create_date TIMESTAMPTZ NOT NULL DEFAULT NOW(),
- modify_date TIMESTAMPTZ NOT NULL DEFAULT NOW());
- COMMENT ON TABLE object IS 'Basic element in this BDD (manage the create and modfy property, the deletion and the unique ID.';
- COMMENT ON COLUMN object.id IS 'Unique global ID in the BDD.';
- COMMENT ON COLUMN object.deleted IS 'If true the element is dead and must not be shown.';
- COMMENT ON COLUMN object.create_date IS 'Creation date of this Object (automatically setup by the BDD).';
- COMMENT ON COLUMN object.modify_date IS 'Modify date of this object (automatically updated by the BDD).';
- ''')
-
- c.execute('''
- CREATE TRIGGER set_timestamp
- AFTER UPDATE ON object
- FOR EACH ROW
- EXECUTE PROCEDURE trigger_set_timestamp();
- ''')
- connection.commit()
-
- debug.info("Add DATA interface");
- # Create table
- c.execute('''
- CREATE TABLE data (
- sha512 VARCHAR(129) NOT NULL,
- mime_type VARCHAR(128) NOT NULL,
- size BIGINT NOT NULL,
- original_name TEXT
- ) INHERITS (object);
- COMMENT ON TABLE data IS 'Data basic reference on the big data managed.';
- COMMENT ON COLUMN data.sha512 IS 'Unique Sha512 of the file.';
- COMMENT ON COLUMN data.mime_type IS 'Type of the object with his mine-type description.';
- COMMENT ON COLUMN data.size IS 'Size of the file in Byte.';
- COMMENT ON COLUMN data.original_name IS 'Name of the file when upload it in the BDD ==> MUST be remove later.';
- ''')
- connection.commit()
-
-
- debug.info("Add NODE interface");
-
- # Create table
- c.execute('''
- CREATE TABLE node (
- type node_type NOT NULL,
- name TEXT NOT NULL,
- description TEXT,
- parent_id INTEGER CHECK(check_exist('node', parent_id))
- ) INHERITS (object);
- COMMENT ON TABLE node IS 'Node is a basic element of what must be hierarchie apears.';
- COMMENT ON COLUMN node.name IS 'Name of the Node.';
- COMMENT ON COLUMN node.description IS 'Description of the Node.';
- ''')
- connection.commit()
-
- debug.info("Add Cover interface");
- # Create table
- c.execute('''
- CREATE TABLE cover_link (
- node_id INTEGER CHECK(check_exist('node', node_id)),
- data_id INTEGER CHECK(check_exist('data', data_id))
- ) INHERITS (object);
- COMMENT ON TABLE cover_link IS 'Link between cover data id and Nodes.';
- ''')
- connection.commit()
-
- debug.info("Add MEDIA interface");
- # Create table
- c.execute('''
- CREATE TABLE media (
- data_id INTEGER CHECK(check_exist('data', data_id)),
- type_id INTEGER CHECK(check_node_exist('type', type_id)),
- univers_id INTEGER CHECK(check_node_exist('univers', univers_id)),
- serie_id INTEGER CHECK(check_node_exist('serie', serie_id)),
- saison_id INTEGER CHECK(check_node_exist('saison', saison_id)),
- episode INTEGER CHECK(episode >=0),
- date INTEGER CHECK(date > 1850),
- time INTEGER CHECK(time >= 0),
- age_limit age_type NOT NULL DEFAULT '-'
- ) INHERITS (node);
- COMMENT ON TABLE media IS 'Media Media that is visible.';
- COMMENT ON COLUMN media.episode IS 'Number of the episode in the saison sequence.';
- COMMENT ON COLUMN media.date IS 'Simple date in years of the creation of the media.';
- COMMENT ON COLUMN media.time IS 'Time in second of the media';
- COMMENT ON COLUMN media.age_limit IS 'Limitation of the age to show the display ("-" for no limitation)';
- ''')
-
- # Save (commit) the changes
- connection.commit()
-
- debug.info("Add Views models");
-
- c.execute('''
- CREATE VIEW view_data AS
- SELECT id, sha512, mime_type, size
- FROM data
- WHERE deleted = false
- ORDER BY id;
- CREATE VIEW view_type AS
- SELECT id, name, description,
- array(
- SELECT data_id
- FROM cover_link
- WHERE cover_link.node_id = node.id
- ) AS covers
- FROM node
- WHERE deleted = false AND type = 'type'
- ORDER BY name;
- CREATE VIEW view_univers AS
- SELECT id, name, description,
- array(
- SELECT data_id
- FROM cover_link
- WHERE cover_link.node_id = node.id
- ) AS covers
- FROM node
- WHERE deleted = false AND type = 'univers'
- ORDER BY name;
- CREATE VIEW view_serie AS
- SELECT id, name, description,
- array(
- SELECT data_id
- FROM cover_link
- WHERE cover_link.node_id = node.id
- ) AS covers
- FROM node
- WHERE deleted = false AND type = 'serie'
- ORDER BY name;
- CREATE VIEW view_saison AS
- SELECT id, name, description, parent_id,
- array(
- SELECT data_id
- FROM cover_link
- WHERE cover_link.node_id = node.id
- ) AS covers
- FROM node
- WHERE deleted = false AND type = 'saison'
- ORDER BY name;
- CREATE VIEW view_video AS
- SELECT id, name, description, data_id, type_id, univers_id, serie_id, saison_id, episode, date, time, age_limit,
- array(
- SELECT data_id
- FROM cover_link
- WHERE cover_link.node_id = media.id
- ) AS covers
- FROM media
- WHERE deleted = false AND type = 'media'
- ORDER BY name;
- ''')
- connection.commit()
-
- debug.info("Add default type");
-
- default_values_type = [
- {
- "id": 0,
- "name": "Documentary",
- "description": "Documentary (annimals, space, earth...)",
- "image": "../default_images/type_documentary.svg"
- },{
- "id": 1,
- "name": "Movie",
- "description": "Movie with real humans (film)",
- "image": "../default_images/type_film.svg"
- },{
- "id": 2,
- "name": "Annimation",
- "description": "Annimation movies (film)",
- "image": "../default_images/type_annimation.svg"
- },{
- "id": 3,
- "name": "Short movie",
- "description": "Small movies (less 2 minutes)",
- "image": "../default_images/type_film-short.svg"
- },{
- "id": 4,
- "name": "TV show",
- "description": "Tv show form old peoples",
- "image": "../default_images/type_tv-show.svg"
- }, {
- "id": 5,
- "name": "Anniation TV show",
- "description": "Tv show form young peoples",
- "image": "../default_images/type_tv-show-annimation.svg"
- }, {
- "id": 6,
- "name": "Theater",
- "description": "recorder theater pices",
- "image": "../default_images/type_theater.svg"
- }, {
- "id": 7,
- "name": "One man show",
- "description": "Recorded stand up",
- "image": "../default_images/type_one-man-show.svg"
- }, {
- "id": 8,
- "name": "Concert",
- "description": "Recorded concert",
- "image": "../default_images/type_concert.svg"
- }, {
- "id": 9,
- "name": "Opera",
- "description": "Recorded Opera",
- "image": "../default_images/type_opera.svg"
- }
- ]
- tmp_config = config.get_rest_config()
- for elem in default_values_type:
- debug.info(" add type: " + elem["name"]);
- request_insert = (elem["name"], elem["description"])
- c.execute('INSERT INTO node (type, name, description) VALUES (\'type\', %s, %s) RETURNING id', request_insert)
- elem["id"] = c.fetchone()[0]
- connection.commit()
- if elem["image"] != None and elem["image"] != "":
- # calculate sha512:
- local_file_name = os.path.join(tools.get_current_path(__file__), elem["image"])
- sha512 = tools.calculate_sha512(local_file_name)
- mime_type = "image/svg+xml"
- size = tools.file_size(local_file_name)
- original_name = local_file_name
- c.execute('INSERT INTO data (sha512, mime_type, size, original_name) VALUES (%s, %s, %s, %s) RETURNING id', (sha512, mime_type, size, original_name))
- connection.commit()
- elem["data_id"] = c.fetchone()[0]
- c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s, %s)', (elem["id"], elem["data_id"]))
- connection.commit()
- tools.file_copy(local_file_name, os.path.join(tmp_config["data_media"] , str(elem["data_id"]), "data"))
- connection.commit()
-
-
- # We can also close the connection if we are done with it.
- # Just be sure any changes have been committed or they will be lost.
- db.remove_connection();
-
-
-
diff --git a/back/src/data_global_elements.py b/back/src/data_global_elements.py
deleted file mode 100644
index e1f560f..0000000
--- a/back/src/data_global_elements.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2019, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-
-interfaces = {}
-
-def get_list_interface():
- global interfaces
- return interfaces
-
-def get_interface(_name):
- global interfaces
- return interfaces[_name]
-
-def add_interface(_name, _interface):
- global interfaces
- interfaces[_name] = _interface
-
-
-import time, threading
-
-system_stop = False
-system_counter = 0
-
-def save_all():
- global system_counter
- system_counter += 1
- if system_counter <= 10:
- return
- system_counter = 0
- print(time.ctime())
- for elem in interfaces.keys():
- if system_stop == True:
- return
- interfaces[elem].check_save()
-
-def save_all_before_stop():
- global system_stop
- system_stop = True
- for elem in interfaces.keys():
- interfaces[elem].check_save()
-
-def check_save():
- save_all()
- if system_stop == True:
- return
- threading.Timer(1, check_save).start()
-
-check_save()
-
-API_TYPE = "type"
-API_UNIVERS = "univers"
-API_GROUP = "group"
-API_SAISON = "saison"
-API_VIDEO = "video"
-API_DATA = "data"
-API_COVER = "cover_link"
-
diff --git a/back/src/data_interface.py b/back/src/data_interface.py
deleted file mode 100644
index a79fa96..0000000
--- a/back/src/data_interface.py
+++ /dev/null
@@ -1,272 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2019, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-
-import tools
-import json
-from realog import debug
-import random
-import copy
-from sanic.exceptions import ServerError
-from psycopg2.extras import RealDictCursor
-import psycopg2
-
-import db
-
-def is_str(s, authorise):
- if s == None:
- if authorise == True:
- return True
- return False;
- if type(s) == str:
- return True
- return False
-
-def is_boolean(s, authorise):
- if s == None:
- if authorise == True:
- return True
- return False;
- if s == True or s == False:
- return True
- return False
-
-def is_int(s, authorise):
- if s == None:
- if authorise == True:
- return True
- return False;
- try:
- int(s)
- return True
- except ValueError:
- return False
- return False
-
-def is_float(s, authorise):
- if s == None:
- if authorise == True:
- return True
- return False;
- try:
- float(s)
- return True
- except ValueError:
- return False
- return False
-##
-## @breif Generic interface to access to the BDD (no BDD, direct file IO)
-##
-class DataInterface():
- def __init__(self, _name, _base_name, _name_view):
- self.model = None
- self.name = _name
- self.name_view = _name_view
- self.extract_base = "*"
- self.base_name = _base_name
- self.connection = db.connect_bdd();
- self.need_save = False
- self.where_expand = "";
- #self.conn = self.connection.cursor()
-
- def __del__(self):
- db.remove_connection();
-
- def set_data_model(self, _data_model):
- self.model = _data_model
- """
- self.extract_base = ""
- for elem in self.model:
- if elem["visible"] == True:
- if self.extract_base != "":
- self.extract_base += ","
- self.extract_base += elem["name"]
- """
- def set_add_where(self, _expand):
- self.where_expand = _expand
- ##
- ## @brief Mark the current BDD to store all in File system (sync)
- ##
- def mark_to_store(self):
- self.need_save = True
-
- ##
- ## @brief Check if the Bdd need to be stored. It is stored if it has been requested.
- ## The BDD is store in a separate file and move in the old one. Safe way to store
- ##
- def check_save(self):
- if self.need_save == False:
- return
- debug.warning("Save bdd: ")
- self.connection.commit()
- self.need_save = False
-
- def gets(self, filter=None):
- debug.info("gets " + self.name)
- cursor = self.connection.cursor(cursor_factory=RealDictCursor)
- cursor.execute('SELECT * FROM ' + self.name_view + '')
- results = cursor.fetchall()
- #debug.info("gets data = " + json.dumps(results, indent=4))
- if filter == None:
- return results
- debug.warning("BDD does not suppor filter now ...");
- self.connection.commit()
- return results
-
- def get(self, _id):
- if type(_id) != int:
- debug.warning("get wrong input type...")
- debug.info("get " + self.name + ": " + str(_id))
- cursor = self.connection.cursor(cursor_factory=RealDictCursor)
- #cursor.execute('SELECT * FROM data WHERE deleted=0')
- #results = cursor.fetchall()
- #debug.info("display data = " + json.dumps(results, indent=4))
- req = (_id,)
- try:
- cursor.execute('SELECT * FROM ' + self.name_view + ' WHERE id=%s', req)
- except psycopg2.errors.UndefinedFunction:
- raise ServerError("INTERNAL_ERROR fail request SQL ...", status_code=500)
- finally:
- self.connection.commit()
- results = cursor.fetchone()
- #debug.info("get specific data = " + json.dumps(results))
- return results;
-
- def find(self, _key, _value):
- debug.info("get " + self.name + ": " + str(_value))
- cursor = self.connection.cursor(cursor_factory=RealDictCursor)
- req = (_value,)
- try:
- cursor.execute('SELECT * FROM ' + self.name_view + ' WHERE ' + _key + '=%s', req)
- except psycopg2.errors.UndefinedFunction:
- raise ServerError("INTERNAL_ERROR fail request SQL ...", status_code=500)
- finally:
- self.connection.commit()
- results = cursor.fetchone()
- #debug.info("get specific data = " + json.dumps(results))
- return results;
- def find2(self, _key1, _value1, _key2, _value2):
- debug.info("get " + self.name + ": " + str(_value1))
- cursor = self.connection.cursor(cursor_factory=RealDictCursor)
- req = (_value1,_value2)
- try:
- cursor.execute('SELECT * FROM ' + self.name_view + ' WHERE ' + _key1 + '=%s AND ' + _key2 + '=%s', req)
- except psycopg2.errors.UndefinedFunction:
- raise ServerError("INTERNAL_ERROR fail request SQL ...", status_code=500)
- finally:
- self.connection.commit()
- results = cursor.fetchone()
- #debug.info("get specific data = " + json.dumps(results))
- return results;
-
- def delete(self, _id):
- debug.info("delete " + self.name + ": " + str(_id))
- cursor = self.connection.cursor()
- req = (_id,)
- try:
- cursor.execute('UPDATE ' + self.base_name + ' SET deleted=true WHERE id=%s' + self.where_expand, req)
- except psycopg2.errors.UndefinedFunction:
- raise ServerError("INTERNAL_ERROR fail request SQL ...", status_code=500)
- finally:
- self.connection.commit()
- self.mark_to_store();
- return True
-
- def is_value_modifiable_and_good_type(self, _key, _value, _check_with="modifiable"):
- if self.model == None:
- return True
- for elem in self.model:
- if _key == elem["name"]:
- if elem[_check_with] == False:
- debug.warning("Try to set an input '" + str(_key) + "' but the element is not modifiable ... ");
- raise ServerError("FORBIDDEN Try to set an input '" + str(_key) + "' but the element is not modifiable", status_code=403)
- if elem["type"] == "str":
- if is_str(_value, elem["can_be_null"]) == True:
- return True
- elif elem["type"] == "int":
- if is_int(_value, elem["can_be_null"]) == True:
- return True
- elif elem["type"] == "float":
- if is_float(_value, elem["can_be_null"]) == True:
- return True
- elif elem["type"] == "boolean":
- if is_boolean(_value, elem["can_be_null"]) == True:
- return True
- else:
- return True;
- debug.warning("get element type == '" + str(type(_value)) + "' but request " + str(elem["type"]));
- raise ServerError("FORBIDDEN get element type == '" + str(type(_value)) + "' but request " + str(elem["type"]), status_code=403)
- # The key does not exist ...
- debug.warning("The KEY: '" + str(_key) + "' Is not in the list of availlable keys");
- raise ServerError("FORBIDDEN The KEY: '" + str(_key) + "' Is not in the list of availlable keys", status_code=403)
- return False
-
- def put(self, _id, _value):
- debug.info("put in " + self.name + ": " + str(_id))
- cursor = self.connection.cursor()
- request = 'UPDATE ' + self.base_name + ' SET'
- list_data = []
- first = True;
- for elem in _value.keys():
- if elem == "id":
- continue
- if self.is_value_modifiable_and_good_type(elem, _value[elem]) == False:
- return;
- if first == True:
- first = False
- else:
- request += " , "
- list_data.append(_value[elem])
- request += " " + elem + " = %s"
- request += " WHERE id = %s " + self.where_expand
- list_data.append(_id)
- debug.info("Request executed : '" + request + "'")
- try:
- cursor.execute(request, list_data)
- except psycopg2.errors.UndefinedFunction:
- raise ServerError("INTERNAL_ERROR fail request SQL ...", status_code=500)
- finally:
- self.connection.commit()
- self.mark_to_store();
- return self.get(_id);
-
- def post(self, _value):
- debug.info("post " + self.name)
- cursor = self.connection.cursor()
- request = 'INSERT INTO ' + self.base_name
- list_data = []
- first = True;
- aaa = ""
- bbb = ""
- for elem in _value.keys():
- if elem == "id":
- continue
- if self.is_value_modifiable_and_good_type(elem, _value[elem], "creatable") == False:
- return;
- if aaa != "":
- aaa += " , "
- if bbb != "":
- bbb += " , "
- aaa += elem
- bbb += "%s"
- list_data.append(_value[elem])
- request += " ( " + aaa + ") VALUES ( " + bbb + ") RETURNING id"
- debug.info("Request executed : '" + request + "'")
- try:
- cursor.execute(request, list_data)
- except psycopg2.errors.UndefinedFunction:
- raise ServerError("INTERNAL_ERROR fail request SQL ...", status_code=500)
- finally:
- self.connection.commit()
- id_of_new_row = cursor.fetchone()[0]
- self.mark_to_store();
- return self.get(id_of_new_row);
-
-
-
diff --git a/back/src/db.py b/back/src/db.py
deleted file mode 100644
index d4a610f..0000000
--- a/back/src/db.py
+++ /dev/null
@@ -1,34 +0,0 @@
-from realog import debug
-
-import psycopg2
-import config
-
-connection = None
-connection_count = 0
-def connect_bdd():
- global connection
- global connection_count
- if connection == None:
- debug.info("connect BDD: ")
- conf = config.get_rest_config()
- connection = psycopg2.connect(dbname=conf["db_name"], user=conf["db_user"], password=conf["db_password"], host=conf["db_host"], port=conf["db_port"])
- connection_count += 1
- return connection
-
-def remove_connection():
- global connection
- global connection_count
- connection_count -= 1
- if connection_count < 0:
- debug.warning("Request remove too much time the BDD connection");
- connection_count = 0;
- return;
- if connection_count == 0:
- debug.warning("dicconnect BDD");
- connection.commit()
- connection.close()
- connection = None
- return;
-
-
-base_bdd_name = "karideo_"
diff --git a/back/src/org/kar/karideo/AuthenticationFilter.java b/back/src/org/kar/karideo/AuthenticationFilter.java
new file mode 100644
index 0000000..32003ce
--- /dev/null
+++ b/back/src/org/kar/karideo/AuthenticationFilter.java
@@ -0,0 +1,93 @@
+package org.kar.karideo;
+
+import org.kar.karideo.model.User;
+import org.kar.karideo.model.UserSmall;
+
+import javax.annotation.Priority;
+import javax.ws.rs.Priorities;
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.Provider;
+import java.io.IOException;
+// https://stackoverflow.com/questions/26777083/best-practice-for-rest-token-based-authentication-with-jax-rs-and-jersey
+// https://stackoverflow.com/questions/26777083/best-practice-for-rest-token-based-authentication-with-jax-rs-and-jersey/45814178#45814178
+// https://stackoverflow.com/questions/32817210/how-to-access-jersey-resource-secured-by-rolesallowed
+
+//@Provider
+//@PreMatching
+@Secured
+@Provider
+@Priority(Priorities.AUTHENTICATION)
+public class AuthenticationFilter implements ContainerRequestFilter {
+
+ private static final String REALM = "example";
+ private static final String AUTHENTICATION_SCHEME = "Yota";
+
+ @Override
+ public void filter(ContainerRequestContext requestContext) throws IOException {
+ System.out.println("-----------------------------------------------------");
+ System.out.println("---- Check if have authorization ----");
+ System.out.println("-----------------------------------------------------");
+ // Get the Authorization header from the request
+ String authorizationHeader = requestContext.getHeaderString(HttpHeaders.AUTHORIZATION);
+
+ System.out.println("authorizationHeader: " + authorizationHeader);
+ // Validate the Authorization header
+ if (!isTokenBasedAuthentication(authorizationHeader)) {
+ abortWithUnauthorized(requestContext);
+ return;
+ }
+
+ // Extract the token from the Authorization header
+ String token = authorizationHeader.substring(AUTHENTICATION_SCHEME.length()).trim();
+ System.out.println("token: " + token);
+ User user = null;
+ try {
+ user = validateToken(token);
+ } catch (Exception e) {
+ abortWithUnauthorized(requestContext);
+ }
+ if (user == null) {
+ abortWithUnauthorized(requestContext);
+ }
+ String scheme = requestContext.getUriInfo().getRequestUri().getScheme();
+ requestContext.setSecurityContext(new MySecurityContext(user, scheme));
+ System.out.println("Get local user : " + user);
+ }
+
+ private boolean isTokenBasedAuthentication(String authorizationHeader) {
+
+ // Check if the Authorization header is valid
+ // It must not be null and must be prefixed with "Bearer" plus a whitespace
+ // The authentication scheme comparison must be case-insensitive
+ return authorizationHeader != null && authorizationHeader.toLowerCase()
+ .startsWith(AUTHENTICATION_SCHEME.toLowerCase() + " ");
+ }
+
+ private void abortWithUnauthorized(ContainerRequestContext requestContext) {
+
+ // Abort the filter chain with a 401 status code response
+ // The WWW-Authenticate header is sent along with the response
+ requestContext.abortWith(
+ Response.status(Response.Status.UNAUTHORIZED)
+ .header(HttpHeaders.WWW_AUTHENTICATE,
+ AUTHENTICATION_SCHEME + " realm=\"" + REALM + "\"")
+ .build());
+ }
+
+ private User validateToken(String authorization) throws Exception {
+ System.out.println("-----------------------------------------------------");
+ System.out.println("---- TODO validate token ----");
+ System.out.println("-----------------------------------------------------");
+ // Check if the token was issued by the server and if it's not expired
+ // Throw an Exception if the token is invalid
+ String[] value = authorization.split(":");
+ long user = Long.valueOf(value[0]);
+ String token = value[1];
+ UserSmall userOAuth = UserDB.getUserOAuth(user, token);
+ System.out.println("Get local userOAuth : " + userOAuth);
+ return UserDB.getUserOrCreate(userOAuth);
+ }
+}
\ No newline at end of file
diff --git a/back/src/org/kar/karideo/CORSFilter.java b/back/src/org/kar/karideo/CORSFilter.java
new file mode 100644
index 0000000..7c6ee6d
--- /dev/null
+++ b/back/src/org/kar/karideo/CORSFilter.java
@@ -0,0 +1,26 @@
+package org.kar.karideo;
+
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerResponseContext;
+import javax.ws.rs.container.ContainerResponseFilter;
+import javax.ws.rs.ext.Provider;
+import java.io.IOException;
+
+
+@Provider
+public class CORSFilter implements ContainerResponseFilter {
+
+ @Override
+ public void filter(ContainerRequestContext request,
+ ContainerResponseContext response) throws IOException {
+ //System.err.println("filter cors ..." + request.toString());
+
+ response.getHeaders().add("Access-Control-Allow-Origin", "*");
+ response.getHeaders().add("Access-Control-Allow-Headers", "*");
+ // "Origin, content-type, Content-type, Accept, authorization, mime-type, filename");
+ response.getHeaders().add("Access-Control-Allow-Credentials", "true");
+ response.getHeaders().add("Access-Control-Allow-Methods",
+ "GET, POST, PUT, DELETE, OPTIONS, HEAD");
+ }
+}
+
diff --git a/back/src/org/kar/karideo/ConfigVariable.java b/back/src/org/kar/karideo/ConfigVariable.java
new file mode 100644
index 0000000..4e83eb3
--- /dev/null
+++ b/back/src/org/kar/karideo/ConfigVariable.java
@@ -0,0 +1,76 @@
+package org.kar.karideo;
+
+public class ConfigVariable {
+
+ public static String getTmpDataFolder() {
+ String out = System.getProperty("org.kar.karideo.dataTmpFolder");
+ if (out == null) {
+ return "/application/data/tmp";
+ }
+ return out;
+ }
+
+ public static String getMediaDataFolder() {
+ String out = System.getProperty("org.kar.karideo.dataFolder");
+ if (out == null) {
+ return "/application/data/media";
+ }
+ return out;
+ }
+
+ public static String getRestOAuthServer() {
+ String out = System.getProperty("org.kar.karideo.rest.oauth");
+ if (out == null) {
+ return "http://localhost:17080/oauth/api/";
+ }
+ return out;
+ }
+
+ public static String getDBHost() {
+ String out = System.getProperty("org.kar.karideo.db.host");
+ if (out == null) {
+ return "localhost";
+ }
+ return out;
+ }
+
+ public static String getDBPort() {
+ String out = System.getProperty("org.kar.karideo.db.port");
+ if (out == null) {
+ return "3306";
+ }
+ return out;
+ }
+
+ public static String getDBLogin() {
+ String out = System.getProperty("org.kar.karideo.db.login");
+ if (out == null) {
+ return "root";
+ }
+ return out;
+ }
+
+ public static String getDBPassword() {
+ String out = System.getProperty("org.kar.karideo.db.password");
+ if (out == null) {
+ return "klkhj456gkgtkhjgvkujfhjgkjhgsdfhb3467465fgdhdesfgh";
+ }
+ return out;
+ }
+
+ public static String getDBName() {
+ String out = System.getProperty("org.kar.karideo.db.name");
+ if (out == null) {
+ return "karideo";
+ }
+ return out;
+ }
+
+ public static String getlocalAddress() {
+ String out = System.getProperty("org.kar.karideo.address");
+ if (out == null) {
+ return "http://localhost:18080/karideo/api/";
+ }
+ return out;
+ }
+}
diff --git a/back/src/org/kar/karideo/GenericContext.java b/back/src/org/kar/karideo/GenericContext.java
new file mode 100644
index 0000000..4091383
--- /dev/null
+++ b/back/src/org/kar/karideo/GenericContext.java
@@ -0,0 +1,22 @@
+package org.kar.karideo;
+
+import org.kar.karideo.model.User;
+
+import java.security.Principal;
+
+public class GenericContext implements Principal {
+
+ public User user;
+
+ public GenericContext(User user) {
+ this.user = user;
+ }
+
+ @Override
+ public String getName() {
+ if (user == null) {
+ return "???";
+ }
+ return user.login;
+ }
+}
diff --git a/back/src/org/kar/karideo/MySecurityContext.java b/back/src/org/kar/karideo/MySecurityContext.java
new file mode 100644
index 0000000..dbedb08
--- /dev/null
+++ b/back/src/org/kar/karideo/MySecurityContext.java
@@ -0,0 +1,46 @@
+package org.kar.karideo;
+
+
+import org.kar.karideo.model.User;
+
+import javax.ws.rs.core.SecurityContext;
+import java.security.Principal;
+
+// https://simplapi.wordpress.com/2015/09/19/jersey-jax-rs-securitycontext-in-action/
+class MySecurityContext implements SecurityContext {
+
+ private final GenericContext contextPrincipale;
+ private final String sheme;
+
+ public MySecurityContext(User user, String sheme) {
+ this.contextPrincipale = new GenericContext(user);
+ this.sheme = sheme;
+ }
+
+ @Override
+ public Principal getUserPrincipal() {
+ return contextPrincipale;
+ }
+
+ @Override
+ public boolean isUserInRole(String role) {
+ if (role.contentEquals("ADMIN")) {
+ return contextPrincipale.user.admin == true;
+ }
+ if (role.contentEquals("USER")) {
+ return contextPrincipale.user.admin == false;
+ }
+ return false;
+ }
+
+ @Override
+ public boolean isSecure() {
+ return true;
+ }
+
+ @Override
+ public String getAuthenticationScheme() {
+ return "Yota";
+ }
+
+}
\ No newline at end of file
diff --git a/back/src/org/kar/karideo/OptionFilter.java b/back/src/org/kar/karideo/OptionFilter.java
new file mode 100644
index 0000000..a5c0df2
--- /dev/null
+++ b/back/src/org/kar/karideo/OptionFilter.java
@@ -0,0 +1,21 @@
+package org.kar.karideo;
+
+import javax.ws.rs.container.ContainerRequestContext;
+import javax.ws.rs.container.ContainerRequestFilter;
+import javax.ws.rs.container.PreMatching;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.Provider;
+import java.io.IOException;
+
+@Provider
+@PreMatching
+public class OptionFilter implements ContainerRequestFilter {
+ @Override
+ public void filter(ContainerRequestContext requestContext) throws IOException {
+ if (requestContext.getMethod().contentEquals("OPTIONS")) {
+ requestContext.abortWith(Response.status(Response.Status.NO_CONTENT).build());
+ }
+ }
+}
+
+
diff --git a/back/src/org/kar/karideo/Secured.java b/back/src/org/kar/karideo/Secured.java
new file mode 100644
index 0000000..f0157db
--- /dev/null
+++ b/back/src/org/kar/karideo/Secured.java
@@ -0,0 +1,15 @@
+package org.kar.karideo;
+
+import javax.ws.rs.NameBinding;
+import java.lang.annotation.Retention;
+import java.lang.annotation.Target;
+
+import static java.lang.annotation.ElementType.METHOD;
+import static java.lang.annotation.ElementType.TYPE;
+import static java.lang.annotation.RetentionPolicy.RUNTIME;
+
+@NameBinding
+@Retention(RUNTIME)
+@Target({TYPE, METHOD})
+public @interface Secured {
+}
diff --git a/back/src/org/kar/karideo/UserDB.java b/back/src/org/kar/karideo/UserDB.java
new file mode 100755
index 0000000..3997d48
--- /dev/null
+++ b/back/src/org/kar/karideo/UserDB.java
@@ -0,0 +1,216 @@
+package org.kar.karideo;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.kar.karideo.db.DBEntry;
+import org.kar.karideo.model.State;
+import org.kar.karideo.model.User;
+import org.kar.karideo.model.UserSmall;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+public class UserDB {
+
+ public UserDB() {
+ }
+
+ public static User getUsers(long userId) {
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "SELECT * FROM user WHERE id = ?";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ ps.setLong(1, userId);
+ ResultSet rs = ps.executeQuery();
+ if (rs.next()) {
+ User out = new User(rs);
+ entry.disconnect();
+ return out;
+ }
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ return null;
+ }
+
+ @Deprecated
+ public static User getAndCreate(long userId, String token) throws IOException {
+ // check Token:
+ URL obj = new URL(WebLauncher.getOAuthURI() + "users/check_token?id=" + userId + "&token=" + token);
+ HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+ con.setRequestMethod("GET");
+ con.setRequestProperty("User-Agent", "karideo");
+ con.setRequestProperty("Cache-Control", "no-cache");
+ con.setRequestProperty("Content-Type", "application/json");
+ con.setRequestProperty("Accept", "application/json");
+ int responseCode = con.getResponseCode();
+
+ System.out.println("GET Response Code :: " + responseCode);
+ if (responseCode == HttpURLConnection.HTTP_OK) { // success
+ BufferedReader in = new BufferedReader(new InputStreamReader(
+ con.getInputStream()));
+
+ String inputLine;
+ StringBuffer response = new StringBuffer();
+ while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+ }
+ in.close();
+ // print result
+ System.out.println(response.toString());
+ ObjectMapper mapper = new ObjectMapper();
+ ;
+ UserSmall value = mapper.readValue(response.toString(), UserSmall.class);
+ System.out.println("user SMALL " + value);
+
+ return null;
+ } else {
+ System.out.println("GET request not worked");
+ }
+
+ return null;
+ }
+
+ public static UserSmall getUserOAuth(long userId, String token) throws IOException {
+ // check Token:
+ URL obj = new URL(WebLauncher.getOAuthURI() + "users/check_token?id=" + userId + "&token=" + token);
+ HttpURLConnection con = (HttpURLConnection) obj.openConnection();
+ con.setRequestMethod("GET");
+ con.setRequestProperty("User-Agent", "karideo");
+ con.setRequestProperty("Cache-Control", "no-cache");
+ con.setRequestProperty("Content-Type", "application/json");
+ con.setRequestProperty("Accept", "application/json");
+ int responseCode = con.getResponseCode();
+
+ System.out.println("GET Response Code :: " + responseCode);
+ if (responseCode == HttpURLConnection.HTTP_OK) { // success
+ BufferedReader in = new BufferedReader(new InputStreamReader(
+ con.getInputStream()));
+
+ String inputLine;
+ StringBuffer response = new StringBuffer();
+ while ((inputLine = in.readLine()) != null) {
+ response.append(inputLine);
+ }
+ in.close();
+ // print result
+ System.out.println(response.toString());
+ ObjectMapper mapper = new ObjectMapper();
+ ;
+ return mapper.readValue(response.toString(), UserSmall.class);
+ }
+ System.out.println("GET request not worked");
+ return null;
+ }
+
+ public static User getUserOrCreate(UserSmall userOAuth) {
+ User user = getUsers(userOAuth.id);
+ if (user != null) {
+ boolean blocked = false;
+ boolean removed = false;
+ if (userOAuth.authorisationLevel == State.BLOCKED) {
+ blocked = true;
+ } else if (userOAuth.authorisationLevel == State.REMOVED) {
+ removed = true;
+ }
+ if (user.email != userOAuth.email || user.login != userOAuth.login || user.blocked != blocked || user.removed != removed) {
+ updateUsersInfoFromOAuth(userOAuth.id, userOAuth.email, userOAuth.login, blocked, removed);
+ } else {
+ updateUsersConnectionTime(userOAuth.id);
+ }
+ return getUsers(userOAuth.id);
+ } else {
+ if (userOAuth.authorisationLevel == State.BLOCKED) {
+ return null;
+ } else if (userOAuth.authorisationLevel == State.REMOVED) {
+ return null;
+ }
+ createUsersInfoFromOAuth(userOAuth.id, userOAuth.email, userOAuth.login);
+ }
+ return getUsers(userOAuth.id);
+ }
+
+ private static void updateUsersConnectionTime(long userId) {
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "UPDATE `user` SET `lastConnection`=now(3) WHERE `id` = ?";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ ps.setLong(1, userId);
+ ps.executeUpdate();
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ }
+
+ private static void updateUsersInfoFromOAuth(long userId, String email, String login, boolean blocked, boolean removed) {
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "UPDATE `user` SET `login`=?, `email`=?, `lastConnection`=now(3), `blocked`=?, `removed`=? WHERE id = ?";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ ps.setString(1, login);
+ ps.setString(2, email);
+ ps.setString(3, blocked ? "TRUE" : "FALSE");
+ ps.setString(4, removed ? "TRUE" : "FALSE");
+ ps.setLong(5, userId);
+ ps.executeUpdate();
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ }
+
+ private static void createUsersInfoFromOAuth(long userId, String email, String login) {
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "INSERT INTO `user` (`id`, `login`, `email`, `lastConnection`, `admin`, `blocked`, `removed`) VALUE (?,?,?,now(3),'FALSE','FALSE','FALSE')";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ ps.setLong(1, userId);
+ ps.setString(2, login);
+ ps.setString(3, email);
+ ps.executeUpdate();
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ }
+
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/back/src/org/kar/karideo/WebLauncher.java b/back/src/org/kar/karideo/WebLauncher.java
new file mode 100755
index 0000000..490b361
--- /dev/null
+++ b/back/src/org/kar/karideo/WebLauncher.java
@@ -0,0 +1,108 @@
+package org.kar.karideo;
+
+import org.glassfish.grizzly.http.server.HttpServer;
+import org.glassfish.jersey.grizzly2.httpserver.GrizzlyHttpServerFactory;
+import org.glassfish.jersey.media.multipart.MultiPartFeature;
+import org.glassfish.jersey.server.ResourceConfig;
+import org.kar.karideo.api.*;
+import org.kar.karideo.db.DBConfig;
+import org.glassfish.jersey.jackson.JacksonFeature;
+
+import javax.ws.rs.core.UriBuilder;
+import java.io.FileInputStream;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.net.URI;
+import java.util.Properties;
+
+
+public class WebLauncher {
+ public static DBConfig dbConfig;
+ private WebLauncher() {
+ }
+
+ private static URI getBaseURI() {
+ return UriBuilder.fromUri(ConfigVariable.getlocalAddress()).build();
+ }
+
+ public static String getOAuthURI() {
+ return ConfigVariable.getRestOAuthServer();
+ }
+
+ public static void main(String[] args) {
+ try {
+ FileInputStream propFile = new FileInputStream( "/application/properties.txt");
+ Properties p = new Properties(System.getProperties());
+ p.load(propFile);
+ for (String name : p.stringPropertyNames()) {
+ String value = p.getProperty(name);
+ // inject property if not define in cmdline:
+ if (System.getProperty(name) == null) {
+ System.setProperty(name, value);
+ }
+ }
+ } catch (FileNotFoundException e) {
+ System.out.println("File of environment variable not found: 'properties.txt'");
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+
+ ResourceConfig rc = new ResourceConfig();
+ // add multipart models ..
+ //rc.register(new MultiPartFeature());
+ //rc.register(new InjectionBinder());
+ rc.register(new MultiPartFeature());
+ //rc.register(new MyFileUploader());
+ // global authentication system
+ rc.register(new OptionFilter());
+ // remove cors ==> all time called by an other system...
+ rc.register(new CORSFilter());
+ // global authentication system
+ rc.register(new AuthenticationFilter());
+ // add default resource:
+ rc.registerClasses(UserResource.class);
+ rc.registerClasses(SeriesResource.class);
+ rc.registerClasses(DataResource.class);
+ rc.registerClasses(SeasonResource.class);
+ rc.registerClasses(TypeResource.class);
+ rc.registerClasses(UniverseResource.class);
+ rc.registerClasses(VideoResource.class);
+ // add jackson to be discovenr when we are ins standalone server
+ rc.register(JacksonFeature.class);
+ // enable this to show low level request
+ //rc.property(LoggingFeature.LOGGING_FEATURE_LOGGER_LEVEL_SERVER, Level.WARNING.getName());
+
+ System.out.println("Connect on the BDD:");
+ System.out.println(" getDBHost: '" + ConfigVariable.getDBHost() + "'");
+ System.out.println(" getDBPort: '" + ConfigVariable.getDBPort() + "'");
+ System.out.println(" getDBLogin: '" + ConfigVariable.getDBLogin() + "'");
+ System.out.println(" getDBPassword: '" + ConfigVariable.getDBPassword() + "'");
+ System.out.println(" getDBName: '" + ConfigVariable.getDBName() + "'");
+ dbConfig = new DBConfig(ConfigVariable.getDBHost(),
+ Integer.parseInt(ConfigVariable.getDBPort()),
+ ConfigVariable.getDBLogin(),
+ ConfigVariable.getDBPassword(),
+ ConfigVariable.getDBName());
+ System.out.println(" ==> " + dbConfig);
+ System.out.println("OAuth service " + ConfigVariable.getRestOAuthServer());
+ HttpServer server = GrizzlyHttpServerFactory.createHttpServer(getBaseURI(), rc);
+ Runtime.getRuntime().addShutdownHook(new Thread(new Runnable() {
+ @Override
+ public void run() {
+ System.out.println("Stopping server..");
+ server.shutdownNow();
+ }
+ }, "shutdownHook"));
+
+ // run
+ try {
+ server.start();
+ System.out.println("Jersey app started at " + getBaseURI());
+ System.out.println("Press CTRL^C to exit..");
+ Thread.currentThread().join();
+ } catch (Exception e) {
+ System.out.println("There was an error while starting Grizzly HTTP server.");
+ e.printStackTrace();
+ }
+ }
+}
diff --git a/back/src/org/kar/karideo/api/DataResource.java b/back/src/org/kar/karideo/api/DataResource.java
new file mode 100644
index 0000000..ceb77bd
--- /dev/null
+++ b/back/src/org/kar/karideo/api/DataResource.java
@@ -0,0 +1,507 @@
+package org.kar.karideo.api;
+
+import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
+import org.glassfish.jersey.media.multipart.FormDataParam;
+import org.kar.karideo.ConfigVariable;
+import org.kar.karideo.WebLauncher;
+import org.kar.karideo.db.DBEntry;
+import org.kar.karideo.model.Data;
+import org.kar.karideo.model.DataSmall;
+
+import javax.annotation.security.PermitAll;
+import javax.imageio.ImageIO;
+import javax.ws.rs.*;
+import javax.ws.rs.core.HttpHeaders;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.StreamingOutput;
+import java.awt.*;
+import java.awt.image.BufferedImage;
+import java.io.*;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.nio.file.StandardCopyOption;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.Date;
+
+
+// https://stackoverflow.com/questions/35367113/jersey-webservice-scalable-approach-to-download-file-and-reply-to-client
+// https://gist.github.com/aitoroses/4f7a2b197b732a6a691d
+
+@Path("/data")
+@PermitAll
+@Produces({MediaType.APPLICATION_JSON})
+public class DataResource {
+ private final static int CHUNK_SIZE = 1024 * 1024; // 1MB chunks
+ private final static int CHUNK_SIZE_IN = 50 * 1024 * 1024; // 1MB chunks
+ /**
+ * Upload some datas
+ */
+ private static long tmpFolderId = 1;
+
+ private static void createFolder(String path) throws IOException {
+ if (!Files.exists(java.nio.file.Path.of(path))) {
+ //Log.print("Create folder: " + path);
+ Files.createDirectories(java.nio.file.Path.of(path));
+ }
+ }
+
+ public static long getTmpDataId() {
+ return tmpFolderId++;
+ }
+
+ public static String getTmpFileInData(long tmpFolderId) {
+ String filePath = ConfigVariable.getTmpDataFolder() + File.separator + tmpFolderId;
+ try {
+ createFolder(ConfigVariable.getTmpDataFolder() + File.separator);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ return filePath;
+ }
+
+ public static String getFileData(long tmpFolderId) {
+ String filePath = ConfigVariable.getMediaDataFolder() + File.separator + tmpFolderId + File.separator + "data";
+ try {
+ createFolder(ConfigVariable.getMediaDataFolder() + File.separator + tmpFolderId + File.separator);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ return filePath;
+ }
+
+ public static Data getWithSha512(String sha512) {
+ System.out.println("find sha512 = " + sha512);
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "SELECT `id`, `deleted`, `sha512`, `mime_type`, `size` FROM `data` WHERE `sha512` = ?";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ ps.setString(1, sha512);
+ ResultSet rs = ps.executeQuery();
+ if (rs.next()) {
+ Data out = new Data(rs);
+ entry.disconnect();
+ return out;
+ }
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ return null;
+
+ }
+
+ public static Data getWithId(long id) {
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "SELECT `id`, `deleted`, `sha512`, `mime_type`, `size` FROM `data` WHERE `deleted` = false AND `id` = ?";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ ps.setLong(1, id);
+ ResultSet rs = ps.executeQuery();
+ if (rs.next()) {
+ Data out = new Data(rs);
+ entry.disconnect();
+ return out;
+ }
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ return null;
+
+ }
+
+ public static Data createNewData(long tmpUID, String originalFileName, String sha512) throws IOException, SQLException {
+ // determine mime type:
+ String mimeType = "";
+ String extension = originalFileName.substring(originalFileName.lastIndexOf('.') + 1);
+ switch (extension.toLowerCase()) {
+ case "jpg":
+ case "jpeg":
+ mimeType = "image/jpeg";
+ break;
+ case "png":
+ mimeType = "image/png";
+ break;
+ case "webp":
+ mimeType = "image/webp";
+ break;
+ case "mka":
+ mimeType = "audio/x-matroska";
+ break;
+ case "mkv":
+ mimeType = "video/x-matroska";
+ break;
+ case "webm":
+ mimeType = "video/webm";
+ break;
+ default:
+ throw new IOException("Can not find the mime type of data input: '" + extension + "'");
+ }
+ String tmpPath = getTmpFileInData(tmpUID);
+ long fileSize = Files.size(Paths.get(tmpPath));
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ long uniqueSQLID = -1;
+ try {
+ // prepare the request:
+ String query = "INSERT INTO `data` (`sha512`, `mime_type`, `size`, `original_name`) VALUES (?, ?, ?, ?)";
+ PreparedStatement ps = entry.connection.prepareStatement(query,
+ Statement.RETURN_GENERATED_KEYS);
+ int iii = 1;
+ ps.setString(iii++, sha512);
+ ps.setString(iii++, mimeType);
+ ps.setLong(iii++, fileSize);
+ ps.setString(iii++, originalFileName);
+ // execute the request
+ int affectedRows = ps.executeUpdate();
+ if (affectedRows == 0) {
+ throw new SQLException("Creating data failed, no rows affected.");
+ }
+ // retreive uid inserted
+ try (ResultSet generatedKeys = ps.getGeneratedKeys()) {
+ if (generatedKeys.next()) {
+ uniqueSQLID = generatedKeys.getLong(1);
+ } else {
+ throw new SQLException("Creating user failed, no ID obtained (1).");
+ }
+ } catch (Exception ex) {
+ System.out.println("Can not get the UID key inserted ... ");
+ ex.printStackTrace();
+ throw new SQLException("Creating user failed, no ID obtained (2).");
+ }
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ }
+ entry.disconnect();
+ System.out.println("Add Data raw done. uid data=" + uniqueSQLID);
+ Data out = getWithId(uniqueSQLID);
+
+ String mediaPath = getFileData(out.id);
+ System.out.println("src = " + tmpPath);
+ System.out.println("dst = " + mediaPath);
+ Files.move(Paths.get(tmpPath), Paths.get(mediaPath), StandardCopyOption.ATOMIC_MOVE);
+
+ System.out.println("Move done");
+ // all is done the file is corectly installed...
+
+ return out;
+ }
+
+ public static void undelete(Long id) {
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "UPDATE `data` SET `deleted` = false WHERE `id` = ?";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ ps.setLong(1, id);
+ ps.execute();
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ }
+
+ static String saveTemporaryFile(InputStream uploadedInputStream, long idData) {
+ return saveFile(uploadedInputStream, DataResource.getTmpFileInData(idData));
+ }
+
+ static void removeTemporaryFile(long idData) {
+ String filepath = DataResource.getTmpFileInData(idData);
+ if (Files.exists(Paths.get(filepath))) {
+ try {
+ Files.delete(Paths.get(filepath));
+ } catch (IOException e) {
+ System.out.println("can not delete temporary file : " + Paths.get(filepath));
+ e.printStackTrace();
+ }
+ }
+ }
+
+ // save uploaded file to a defined location on the server
+ static String saveFile(InputStream uploadedInputStream, String serverLocation) {
+ String out = "";
+ try {
+ OutputStream outpuStream = new FileOutputStream(new File(
+ serverLocation));
+ int read = 0;
+ byte[] bytes = new byte[CHUNK_SIZE_IN];
+ MessageDigest md = MessageDigest.getInstance("SHA-512");
+
+ outpuStream = new FileOutputStream(new File(serverLocation));
+ while ((read = uploadedInputStream.read(bytes)) != -1) {
+ //System.out.println("write " + read);
+ md.update(bytes, 0, read);
+ outpuStream.write(bytes, 0, read);
+ }
+ System.out.println("Flush input stream ... " + serverLocation);
+ System.out.flush();
+ outpuStream.flush();
+ outpuStream.close();
+ // create the end of sha512
+ byte[] sha512Digest = md.digest();
+ // convert in hexadecimal
+ out = bytesToHex(sha512Digest);
+ uploadedInputStream.close();
+ } catch (IOException ex) {
+ System.out.println("Can not write in temporary file ... ");
+ ex.printStackTrace();
+ } catch (NoSuchAlgorithmException ex) {
+ System.out.println("Can not find sha512 algorithms");
+ ex.printStackTrace();
+ }
+ return out;
+ }
+
+ // curl http://localhost:9993/api/users/3
+ //@Secured
+ /*
+ @GET
+ @Path("{id}")
+ //@RolesAllowed("GUEST")
+ @Produces(MediaType.APPLICATION_OCTET_STREAM)
+ public Response retriveData(@HeaderParam("Range") String range, @PathParam("id") Long id) throws Exception {
+ return retriveDataFull(range, id, "no-name");
+ }
+ */
+
+ public static String bytesToHex(byte[] bytes) {
+ StringBuilder sb = new StringBuilder();
+ for (byte b : bytes) {
+ sb.append(String.format("%02x", b));
+ }
+ return sb.toString();
+ }
+
+
+/*
+ @POST
+ @Path("/upload")
+ @Consumes(MediaType.MULTIPART_FORM_DATA)
+ public Response uploadFile(FormDataMultiPart form) {
+
+ FormDataBodyPart filePart = form.getField("file");
+
+ ContentDisposition headerOfFilePart = filePart.getContentDisposition();
+
+ InputStream fileInputStream = filePart.getValueAs(InputStream.class);
+
+ String filePath = ConfigVariable.getTmpDataFolder() + File.separator + tmpFolderId++;
+ //headerOfFilePart.getFileName();
+
+ // save the file to the server
+ saveFile(fileInputStream, filePath);
+
+ String output = "File saved to server location using FormDataMultiPart : " + filePath;
+
+ return Response.status(200).entity(output).build();
+
+ }
+*/
+
+ public DataSmall getSmall(Long id) {
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "SELECT `id`, `sha512`, `mime_type`, `size` FROM `data` WHERE `deleted` = false AND `id` = ?";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ ps.setLong(1, id);
+ ResultSet rs = ps.executeQuery();
+ if (rs.next()) {
+ DataSmall out = new DataSmall(rs);
+ entry.disconnect();
+ return out;
+ }
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ return null;
+ }
+
+ @POST
+ @Path("/upload/")
+ @Consumes({MediaType.MULTIPART_FORM_DATA})
+ public Response uploadFile(@FormDataParam("file") InputStream fileInputStream, @FormDataParam("file") FormDataContentDisposition fileMetaData) {
+ //public NodeSmall uploadFile(final FormDataMultiPart form) {
+ System.out.println("Upload file: ");
+ String filePath = ConfigVariable.getTmpDataFolder() + File.separator + tmpFolderId++;
+ try {
+ createFolder(ConfigVariable.getTmpDataFolder() + File.separator);
+ } catch (IOException e) {
+ e.printStackTrace();
+ }
+ saveFile(fileInputStream, filePath);
+ return Response.ok("Data uploaded successfully !!").build();
+ //return null;
+ }
+
+ //@Secured
+ @GET
+ @Path("{id}")
+ //@RolesAllowed("GUEST")
+ @Produces(MediaType.APPLICATION_OCTET_STREAM)
+ public Response retriveDataId(/*@Context SecurityContext sc,*/ @HeaderParam("Range") String range, @PathParam("id") Long id) throws Exception {
+ /*
+ GenericContext gc = (GenericContext) sc.getUserPrincipal();
+ System.out.println("===================================================");
+ System.out.println("== USER get data ? " + gc.user);
+ System.out.println("===================================================");
+ */
+ DataSmall value = getSmall(id);
+ if (value == null) {
+ Response.status(404).
+ entity("media NOT FOUND: " + id).
+ type("text/plain").
+ build();
+ }
+ return buildStream(ConfigVariable.getMediaDataFolder() + File.separator + id + File.separator + "data", range, value.mimeType);
+ }
+ //@Secured
+ @GET
+ @Path("thumbnail/{id}")
+ //@RolesAllowed("GUEST")
+ @Produces(MediaType.APPLICATION_OCTET_STREAM)
+ public Response retriveDataThumbnailId(/*@Context SecurityContext sc,*/ @HeaderParam("Range") String range, @PathParam("id") Long id) throws Exception {
+ /*
+ GenericContext gc = (GenericContext) sc.getUserPrincipal();
+ System.out.println("===================================================");
+ System.out.println("== USER get data ? " + gc.user);
+ System.out.println("===================================================");
+ */
+ DataSmall value = getSmall(id);
+ if (value == null) {
+ Response.status(404).
+ entity("media NOT FOUND: " + id).
+ type("text/plain").
+ build();
+ }
+ String filePathName = ConfigVariable.getMediaDataFolder() + File.separator + id + File.separator + "data";
+ if ( value.mimeType.contentEquals("image/jpeg")
+ || value.mimeType.contentEquals("image/png")
+ // || value.mimeType.contentEquals("image/webp")
+ ) {
+ // reads input image
+ File inputFile = new File(filePathName);
+ BufferedImage inputImage = ImageIO.read(inputFile);
+ int scaledWidth = 250;
+ int scaledHeight = (int)((float)inputImage.getHeight() / (float)inputImage.getWidth() * (float) scaledWidth);
+ // creates output image
+ BufferedImage outputImage = new BufferedImage(scaledWidth,
+ scaledHeight, inputImage.getType());
+
+ // scales the input image to the output image
+ Graphics2D g2d = outputImage.createGraphics();
+ g2d.drawImage(inputImage, 0, 0, scaledWidth, scaledHeight, null);
+ g2d.dispose();
+ // create the oputput stream:
+ ByteArrayOutputStream baos = new ByteArrayOutputStream();
+ ImageIO.write(outputImage, "JPG", baos);
+ byte[] imageData = baos.toByteArray();
+ Response.ok(new ByteArrayInputStream(imageData)).build();
+ Response.ResponseBuilder out = Response.ok(imageData)
+ .header(HttpHeaders.CONTENT_LENGTH, imageData.length);
+ out.type("image/jpeg");
+ return out.build();
+ }
+ return buildStream(filePathName, range, value.mimeType);
+ }
+ //@Secured
+ @GET
+ @Path("{id}/{name}")
+ //@RolesAllowed("GUEST")
+ @Produces(MediaType.APPLICATION_OCTET_STREAM)
+ public Response retriveDataFull(/*@Context SecurityContext sc,*/ @HeaderParam("Range") String range, @PathParam("id") Long id, @PathParam("name") String name) throws Exception {
+ /*
+ GenericContext gc = (GenericContext) sc.getUserPrincipal();
+ System.out.println("===================================================");
+ System.out.println("== USER get data ? " + gc.user);
+ System.out.println("===================================================");
+ */
+ DataSmall value = getSmall(id);
+ if (value == null) {
+ Response.status(404).
+ entity("media NOT FOUND: " + id).
+ type("text/plain").
+ build();
+ }
+ return buildStream(ConfigVariable.getMediaDataFolder() + File.separator + id + File.separator + "data", range, value.mimeType);
+ }
+
+ /**
+ * Adapted from http://stackoverflow.com/questions/12768812/video-streaming-to-ipad-does-not-work-with-tapestry5/12829541#12829541
+ *
+ * @param range range header
+ * @return Streaming output
+ * @throws Exception IOException if an error occurs in streaming.
+ */
+ private Response buildStream(final String filename, final String range, String mimeType) throws Exception {
+ File file = new File(filename);
+ //System.out.println("request range : " + range);
+ // range not requested : Firefox does not send range headers
+ if (range == null) {
+ final StreamingOutput output = new StreamingOutput() {
+ @Override
+ public void write(OutputStream out) {
+ try (FileInputStream in = new FileInputStream(file)) {
+ byte[] buf = new byte[1024 * 1024];
+ int len;
+ while ((len = in.read(buf)) != -1) {
+ try {
+ out.write(buf, 0, len);
+ out.flush();
+ //System.out.println("---- wrote " + len + " bytes file ----");
+ } catch (IOException ex) {
+ System.out.println("remote close connection");
+ break;
+ }
+ }
+ } catch (IOException ex) {
+ throw new InternalServerErrorException(ex);
+ }
+ }
+ };
+ Response.ResponseBuilder out = Response.ok(output)
+ .header(HttpHeaders.CONTENT_LENGTH, file.length());
+ if (mimeType != null) {
+ out.type(mimeType);
+ }
+ return out.build();
+
+ }
+
+ String[] ranges = range.split("=")[1].split("-");
+ final long from = Long.parseLong(ranges[0]);
+
+ //System.out.println("request range : " + ranges.length);
+ //Chunk media if the range upper bound is unspecified. Chrome, Opera sends "bytes=0-"
+ long to = CHUNK_SIZE + from;
+ if (ranges.length == 1) {
+ to = file.length() - 1;
+ } else {
+ if (to >= file.length()) {
+ to = (long) (file.length() - 1);
+ }
+ }
+ final String responseRange = String.format("bytes %d-%d/%d", from, to, file.length());
+ //System.out.println("responseRange : " + responseRange);
+ final RandomAccessFile raf = new RandomAccessFile(file, "r");
+ raf.seek(from);
+
+ final long len = to - from + 1;
+ final MediaStreamer streamer = new MediaStreamer(len, raf);
+ Response.ResponseBuilder out = Response.ok(streamer)
+ .status(Response.Status.PARTIAL_CONTENT)
+ .header("Accept-Ranges", "bytes")
+ .header("Content-Range", responseRange)
+ .header(HttpHeaders.CONTENT_LENGTH, streamer.getLenth())
+ .header(HttpHeaders.LAST_MODIFIED, new Date(file.lastModified()));
+ if (mimeType != null) {
+ out.type(mimeType);
+ }
+ return out.build();
+ }
+
+}
diff --git a/back/src/org/kar/karideo/api/MediaStreamer.java b/back/src/org/kar/karideo/api/MediaStreamer.java
new file mode 100644
index 0000000..ce129b2
--- /dev/null
+++ b/back/src/org/kar/karideo/api/MediaStreamer.java
@@ -0,0 +1,71 @@
+package org.kar.karideo.api;
+
+import javax.ws.rs.InternalServerErrorException;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.StreamingOutput;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.io.RandomAccessFile;
+
+public class MediaStreamer implements StreamingOutput {
+ private final int CHUNK_SIZE = 1024 * 1024; // 1MB chunks
+ final byte[] buf = new byte[CHUNK_SIZE];
+ private long length;
+ private RandomAccessFile raf;
+
+ public MediaStreamer(long length, RandomAccessFile raf) throws IOException {
+ //System.out.println("request stream of " + length / 1024 + " data");
+ if (length<0) {
+ throw new IOException("Wrong size of the file to stream: " + length);
+ }
+ this.length = length;
+ this.raf = raf;
+ }
+
+ /*
+ public void write(OutputStream out) {
+ try (FileInputStream in = new FileInputStream(file)) {
+ byte[] buf = new byte[1024*1024];
+ int len;
+ while ((len = in.read(buf)) != -1) {
+ out.write(buf, 0, len);
+ out.flush();
+ //System.out.println("---- wrote " + len + " bytes file ----");
+ }
+ } catch (IOException ex) {
+ throw new InternalServerErrorException(ex);
+ }
+ }
+ */
+ @Override
+ public void write(OutputStream outputStream) {
+ try {
+ while (length != 0) {
+ int read = raf.read(buf, 0, buf.length > length ? (int) length : buf.length);
+ try {
+ outputStream.write(buf, 0, read);
+ } catch (IOException ex) {
+ System.out.println("remote close connection");
+ break;
+ }
+ length -= read;
+ }
+ } catch (IOException ex) {
+ throw new InternalServerErrorException(ex);
+ } catch (WebApplicationException ex) {
+ throw new InternalServerErrorException(ex);
+ } finally {
+ try {
+ raf.close();
+ } catch (IOException ex) {
+ ex.printStackTrace();
+ throw new InternalServerErrorException(ex);
+ }
+ }
+ }
+
+ public long getLenth() {
+ return length;
+ }
+
+}
diff --git a/back/src/org/kar/karideo/api/NodeInterface.java b/back/src/org/kar/karideo/api/NodeInterface.java
new file mode 100644
index 0000000..301b90c
--- /dev/null
+++ b/back/src/org/kar/karideo/api/NodeInterface.java
@@ -0,0 +1,472 @@
+package org.kar.karideo.api;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
+import org.kar.karideo.WebLauncher;
+import org.kar.karideo.db.DBEntry;
+import org.kar.karideo.model.Data;
+import org.kar.karideo.model.NodeSmall;
+
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.io.InputStream;
+import java.sql.*;
+import java.util.ArrayList;
+import java.util.List;
+
+public class NodeInterface {
+ /* test en SQL qui fait joli...
+ SELECT node.id,
+ node.name,
+ node.description,
+ node.parent_id,
+ GROUP_CONCAT(cover_link_node.data_id SEPARATOR '-') as covers
+ FROM node, cover_link_node
+ WHERE node.deleted = false AND cover_link_node.deleted = false AND node.type = "TYPE" AND cover_link_node.node_id = node.id
+ GROUP BY node.id
+ ORDER BY node.name
+
+ // bon c'est bien mais c'est mieux en faisant un left join avec préfiltrage ...
+
+
+ SELECT node.id,
+ node.name,
+ node.description,
+ node.parent_id,
+ cover_link_node.data_id
+ FROM node
+ LEFT JOIN cover_link_node
+ ON node.id = cover_link_node.node_id
+ AND cover_link_node.deleted = false
+ WHERE node.deleted = false
+ AND node.type = "TYPE"
+ ORDER BY node.name
+
+ // marche pas:
+ SELECT node.id,
+ node.name,
+ node.description,
+ node.parent_id,
+ `extract.covers`
+ FROM node
+ LEFT JOIN (SELECT tmp.node_id,
+ GROUP_CONCAT(`tmp.data_id` SEPARATOR '-') as `covers`
+ FROM cover_link_node tmp
+ WHERE tmp.deleted = false
+ GROUP BY tmp.node_id) extract
+ ON node.id = extract.node_id
+ WHERE node.deleted = false
+ AND node.type = "TYPE"
+ ORDER BY node.name
+
+ // et enfin une version qui fonctionne ...
+ SELECT node.id,
+ node.name,
+ node.description,
+ node.parent_id,
+ (SELECT GROUP_CONCAT(tmp.data_id)
+ FROM cover_link_node tmp
+ WHERE tmp.deleted = false
+ AND node.id = tmp.node_id
+ GROUP BY tmp.node_id) AS covers
+ FROM node
+ WHERE node.deleted = false
+ AND node.type = "TYPE"
+ ORDER BY node.name
+
+ */
+ public static List get(String typeInNode) {
+ System.out.println(typeInNode + " get");
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ List out = new ArrayList<>();
+ String query = "SELECT node.id," +
+ " node.name," +
+ " node.description," +
+ " node.parent_id," +
+ " (SELECT GROUP_CONCAT(tmp.data_id SEPARATOR '-')" +
+ " FROM cover_link_node tmp" +
+ " WHERE tmp.deleted = false" +
+ " AND node.id = tmp.node_id" +
+ " GROUP BY tmp.node_id) AS covers" +
+ " FROM node" +
+ " WHERE node.deleted = false " +
+ " AND node.type = ?" +
+ " ORDER BY node.name";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ int iii = 1;
+ ps.setString(iii++, typeInNode);
+ ResultSet rs = ps.executeQuery();
+ while (rs.next()) {
+ out.add(new NodeSmall(rs));
+ }
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ entry = null;
+ System.out.println("retrieve " + out.size() + " " + typeInNode);
+ return out;
+ }
+
+ public static NodeSmall getWithId(String typeInNode, long id) {
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "SELECT node.id," +
+ " node.name," +
+ " node.description," +
+ " node.parent_id," +
+ " (SELECT GROUP_CONCAT(tmp.data_id SEPARATOR '-')" +
+ " FROM cover_link_node tmp" +
+ " WHERE tmp.deleted = false" +
+ " AND node.id = tmp.node_id" +
+ " GROUP BY tmp.node_id) AS covers" +
+ " FROM node" +
+ " WHERE node.deleted = false " +
+ " AND node.type = ?" +
+ " AND node.id = ?" +
+ " ORDER BY node.name";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ int iii = 1;
+ ps.setString(iii++, typeInNode);
+ ps.setLong(iii++, id);
+ ResultSet rs = ps.executeQuery();
+ if (rs.next()) {
+ NodeSmall out = new NodeSmall(rs);
+ entry.disconnect();
+ entry = null;
+ return out;
+ }
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ entry = null;
+ return null;
+ }
+
+ public static List getWithName(String typeInNode, String name) {
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ List out = new ArrayList<>();
+ String query = "SELECT node.id," +
+ " node.name," +
+ " node.description," +
+ " node.parent_id," +
+ " (SELECT GROUP_CONCAT(tmp.data_id SEPARATOR '-')" +
+ " FROM cover_link_node tmp" +
+ " WHERE tmp.deleted = false" +
+ " AND node.id = tmp.node_id" +
+ " GROUP BY tmp.node_id) AS covers" +
+ " FROM node" +
+ " WHERE node.deleted = false " +
+ " AND node.type = ?" +
+ " AND node.name = ?" +
+ " ORDER BY node.name";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ int iii = 1;
+ ps.setString(iii++, typeInNode);
+ ps.setString(iii++, name);
+ ResultSet rs = ps.executeQuery();
+ while (rs.next()) {
+ out.add(new NodeSmall(rs));
+ }
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ entry = null;
+ return out;
+ }
+
+ public static NodeSmall getWithNameAndParent(String typeInNode, String name, long parentId) {
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "SELECT node.id," +
+ " node.name," +
+ " node.description," +
+ " node.parent_id," +
+ " (SELECT GROUP_CONCAT(tmp.data_id SEPARATOR '-')" +
+ " FROM cover_link_node tmp" +
+ " WHERE tmp.deleted = false" +
+ " AND node.id = tmp.node_id" +
+ " GROUP BY tmp.node_id) AS covers" +
+ " FROM node" +
+ " WHERE node.deleted = false " +
+ " AND node.type = ?" +
+ " AND node.name = ?" +
+ " AND node.parent_id = ?" +
+ " ORDER BY node.name";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ int iii = 1;
+ ps.setString(iii++, typeInNode);
+ ps.setString(iii++, name);
+ ps.setLong(iii++, parentId);
+ ResultSet rs = ps.executeQuery();
+ if (rs.next()) {
+ NodeSmall out = new NodeSmall(rs);
+ entry.disconnect();
+ entry = null;
+ return out;
+ }
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ entry = null;
+ return null;
+ }
+
+ public static NodeSmall createNode(String typeInNode, String name, String descrition, Long parentId) {
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ long uniqueSQLID = -1;
+ // real add in the BDD:
+ try {
+ // prepare the request:
+ String query = "INSERT INTO node (`type`, `name`, `description`, `parent_id`) VALUES (?, ?, ?, ?)";
+ PreparedStatement ps = entry.connection.prepareStatement(query,
+ Statement.RETURN_GENERATED_KEYS);
+ int iii = 1;
+ ps.setString(iii++, typeInNode);
+ ps.setString(iii++, name);
+ if (descrition == null) {
+ ps.setNull(iii++, Types.VARCHAR);
+ } else {
+ ps.setString(iii++, descrition);
+ }
+ if (parentId == null) {
+ ps.setNull(iii++, Types.BIGINT);
+ } else {
+ ps.setLong(iii++, parentId);
+ }
+ // execute the request
+ int affectedRows = ps.executeUpdate();
+ if (affectedRows == 0) {
+ throw new SQLException("Creating node failed, no rows affected.");
+ }
+ // retreive uid inserted
+ try (ResultSet generatedKeys = ps.getGeneratedKeys()) {
+ if (generatedKeys.next()) {
+ uniqueSQLID = generatedKeys.getLong(1);
+ } else {
+ throw new SQLException("Creating node failed, no ID obtained (1).");
+ }
+ } catch (Exception ex) {
+ System.out.println("Can not get the UID key inserted ... ");
+ ex.printStackTrace();
+ throw new SQLException("Creating node failed, no ID obtained (2).");
+ }
+ ps.execute();
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ }
+ return getWithId(typeInNode, uniqueSQLID);
+
+ }
+
+ public static NodeSmall getOrCreate(String typeInNode, String name, Long parentId) {
+ if (name == null || name.isEmpty()) {
+ return null;
+ }
+ NodeSmall node = getWithNameAndParent(typeInNode, name, parentId);
+ if (node != null) {
+ return node;
+ }
+ return createNode(typeInNode, name, null, parentId);
+ }
+
+ static private String multipartCorrection(String data) {
+ if (data == null) {
+ return null;
+ }
+ if (data.isEmpty()) {
+ return null;
+ }
+ if (data.contentEquals("null")) {
+ return null;
+ }
+ return data;
+ }
+
+ static public Response uploadCover(String typeInNode,
+ Long nodeId,
+ String file_name,
+ InputStream fileInputStream,
+ FormDataContentDisposition fileMetaData
+ ) {
+ try {
+ // correct input string stream :
+ file_name = multipartCorrection(file_name);
+
+ //public NodeSmall uploadFile(final FormDataMultiPart form) {
+ System.out.println("Upload media file: " + fileMetaData);
+ System.out.println(" - id: " + nodeId);
+ System.out.println(" - file_name: " + file_name);
+ System.out.println(" - fileInputStream: " + fileInputStream);
+ System.out.println(" - fileMetaData: " + fileMetaData);
+ System.out.flush();
+ NodeSmall media = getWithId(typeInNode, nodeId);
+ if (media == null) {
+ return Response.notModified("Media Id does not exist or removed...").build();
+ }
+
+ long tmpUID = DataResource.getTmpDataId();
+ String sha512 = DataResource.saveTemporaryFile(fileInputStream, tmpUID);
+ Data data = DataResource.getWithSha512(sha512);
+ if (data == null) {
+ System.out.println("Need to add the data in the BDD ... ");
+ System.out.flush();
+ try {
+ data = DataResource.createNewData(tmpUID, file_name, sha512);
+ } catch (IOException ex) {
+ DataResource.removeTemporaryFile(tmpUID);
+ ex.printStackTrace();
+ return Response.notModified("can not create input media").build();
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ DataResource.removeTemporaryFile(tmpUID);
+ return Response.notModified("Error in SQL insertion ...").build();
+ }
+ } else if (data.deleted == true) {
+ System.out.println("Data already exist but deleted");
+ System.out.flush();
+ DataResource.undelete(data.id);
+ data.deleted = false;
+ } else {
+ System.out.println("Data already exist ... all good");
+ System.out.flush();
+ }
+ // Fist step: retrieve all the Id of each parents:...
+ System.out.println("Find typeNode");
+
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ long uniqueSQLID = -1;
+ // real add in the BDD:
+ try {
+ // prepare the request:
+ String query = "INSERT INTO cover_link_node (create_date, modify_date, node_id, data_id)" +
+ " VALUES (now(3), now(3), ?, ?)";
+ PreparedStatement ps = entry.connection.prepareStatement(query,
+ Statement.RETURN_GENERATED_KEYS);
+ int iii = 1;
+ ps.setLong(iii++, media.id);
+ ps.setLong(iii++, data.id);
+ // execute the request
+ int affectedRows = ps.executeUpdate();
+ if (affectedRows == 0) {
+ throw new SQLException("Creating data failed, no rows affected.");
+ }
+ // retreive uid inserted
+ try (ResultSet generatedKeys = ps.getGeneratedKeys()) {
+ if (generatedKeys.next()) {
+ uniqueSQLID = generatedKeys.getLong(1);
+ } else {
+ throw new SQLException("Creating user failed, no ID obtained (1).");
+ }
+ } catch (Exception ex) {
+ System.out.println("Can not get the UID key inserted ... ");
+ ex.printStackTrace();
+ throw new SQLException("Creating user failed, no ID obtained (2).");
+ }
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ entry.disconnect();
+ return Response.serverError().build();
+ }
+ // if we do not une the file .. remove it ... otherwise this is meamory leak...
+ DataResource.removeTemporaryFile(tmpUID);
+ System.out.println("uploaded .... compleate: " + uniqueSQLID);
+ return Response.ok(getWithId(typeInNode, nodeId)).build();
+ } catch (Exception ex) {
+ System.out.println("Cat ann unexpected error ... ");
+ ex.printStackTrace();
+ }
+ return Response.serverError().build();
+ }
+ static public Response removeCover(String typeInNode, Long nodeId, Long coverId) {
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "UPDATE `cover_link_node` SET `modify_date`=now(3), `deleted`=true WHERE `node_id` = ? AND `data_id` = ?";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ int iii = 1;
+ ps.setLong(iii++, nodeId);
+ ps.setLong(iii++, coverId);
+ ps.executeUpdate();
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ entry.disconnect();
+ return Response.serverError().build();
+ }
+ entry.disconnect();
+ return Response.ok(getWithId(typeInNode, nodeId)).build();
+ }
+
+ static public Response put(String typeInNode, Long id, String jsonRequest) {
+ ObjectMapper mapper = new ObjectMapper();
+ try {
+ JsonNode root = mapper.readTree(jsonRequest);
+ String query = "UPDATE `node` SET `modify_date`=now(3)";
+ if (!root.path("name").isMissingNode()) {
+ query += ", `name` = ? ";
+ }
+ if (!root.path("description").isMissingNode()) {
+ query += ", `description` = ? ";
+ }
+ query += " WHERE `id` = ?";
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ int iii = 1;
+ if (!root.path("name").isMissingNode()) {
+ if (root.path("name").isNull()) {
+ ps.setString(iii++, "???");
+ } else {
+ ps.setString(iii++, root.path("name").asText());
+ }
+ }
+ if (!root.path("description").isMissingNode()) {
+ if (root.path("description").isNull()) {
+ ps.setNull(iii++, Types.VARCHAR);
+ } else {
+ ps.setString(iii++, root.path("description").asText());
+ }
+ }
+ ps.setLong(iii++, id);
+ System.out.println(" request : " + ps.toString());
+ ps.executeUpdate();
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ entry.disconnect();
+ entry = null;
+ return Response.notModified("SQL error").build();
+ }
+ entry.disconnect();
+ entry = null;
+ } catch (IOException e) {
+ e.printStackTrace();
+ return Response.notModified("input json error error").build();
+ }
+ return Response.ok(getWithId(typeInNode, id)).build();
+ }
+
+
+ static public Response delete(String typeInNode, Long nodeId) {
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "UPDATE `node` SET `modify_date`=now(3), `deleted`=true WHERE `id` = ? AND `type` = ?";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ int iii = 1;
+ ps.setLong(iii++, nodeId);
+ ps.setString(iii++, typeInNode);
+ ps.executeUpdate();
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ entry.disconnect();
+ return Response.serverError().build();
+ }
+ entry.disconnect();
+ return Response.ok().build();
+ }
+
+}
diff --git a/back/src/org/kar/karideo/api/SeasonResource.java b/back/src/org/kar/karideo/api/SeasonResource.java
new file mode 100644
index 0000000..2328415
--- /dev/null
+++ b/back/src/org/kar/karideo/api/SeasonResource.java
@@ -0,0 +1,67 @@
+package org.kar.karideo.api;
+
+import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
+import org.glassfish.jersey.media.multipart.FormDataParam;
+import org.kar.karideo.model.NodeSmall;
+
+import javax.annotation.security.PermitAll;
+import javax.ws.rs.*;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.InputStream;
+import java.util.List;
+
+@Path("/season")
+@PermitAll
+@Produces({MediaType.APPLICATION_JSON})
+public class SeasonResource {
+ private static final String typeInNode = "SEASON";
+
+ @GET
+ @Path("{id}")
+ public static NodeSmall getWithId(@PathParam("id") Long id) {
+ return NodeInterface.getWithId(typeInNode, id);
+ }
+
+ public static List getWithName(String name) {
+ return NodeInterface.getWithName(typeInNode, name);
+ }
+
+ public static NodeSmall getOrCreate(int season, Long seriesId) {
+ return NodeInterface.getOrCreate(typeInNode, Integer.toString(season), seriesId);
+ }
+
+ @GET
+ public List get() {
+ return NodeInterface.get(typeInNode);
+ }
+
+ @PUT
+ @Path("{id}")
+ @Consumes(MediaType.APPLICATION_JSON)
+ public Response put(@PathParam("id") Long id, String jsonRequest) {
+ return NodeInterface.put(typeInNode, id, jsonRequest);
+ }
+
+ @DELETE
+ @Path("{id}")
+ public Response delete(@PathParam("id") Long id) {
+ return NodeInterface.delete(typeInNode, id);
+ }
+
+ @POST
+ @Path("{id}/add_cover")
+ @Consumes({MediaType.MULTIPART_FORM_DATA})
+ public Response uploadCover(@PathParam("id") Long id,
+ @FormDataParam("file_name") String file_name,
+ @FormDataParam("file") InputStream fileInputStream,
+ @FormDataParam("file") FormDataContentDisposition fileMetaData
+ ) {
+ return NodeInterface.uploadCover(typeInNode, id, file_name, fileInputStream, fileMetaData);
+ }
+ @GET
+ @Path("{id}/rm_cover/{cover_id}")
+ public Response removeCover(@PathParam("id") Long nodeId, @PathParam("cover_id") Long coverId) {
+ return NodeInterface.removeCover(typeInNode, nodeId, coverId);
+ }
+}
diff --git a/back/src/org/kar/karideo/api/SeriesResource.java b/back/src/org/kar/karideo/api/SeriesResource.java
new file mode 100644
index 0000000..0f10254
--- /dev/null
+++ b/back/src/org/kar/karideo/api/SeriesResource.java
@@ -0,0 +1,68 @@
+package org.kar.karideo.api;
+
+import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
+import org.glassfish.jersey.media.multipart.FormDataParam;
+import org.kar.karideo.model.NodeSmall;
+
+import javax.annotation.security.PermitAll;
+import javax.ws.rs.*;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.InputStream;
+import java.util.List;
+
+@Path("/series")
+@PermitAll
+@Produces({MediaType.APPLICATION_JSON})
+public class SeriesResource {
+ private static final String typeInNode = "SERIES";
+
+ @GET
+ @Path("{id}")
+ public static NodeSmall getWithId(@PathParam("id") Long id) {
+ return NodeInterface.getWithId(typeInNode, id);
+ }
+
+ public static List getWithName(String name) {
+ return NodeInterface.getWithName(typeInNode, name);
+ }
+
+ public static NodeSmall getOrCreate(String series, Long typeId) {
+ return NodeInterface.getOrCreate(typeInNode, series, typeId);
+
+ }
+
+ @GET
+ public List get() {
+ return NodeInterface.get(typeInNode);
+ }
+
+ @PUT
+ @Path("{id}")
+ @Consumes(MediaType.APPLICATION_JSON)
+ public Response put(@PathParam("id") Long id, String jsonRequest) {
+ return NodeInterface.put(typeInNode, id, jsonRequest);
+ }
+
+ @DELETE
+ @Path("{id}")
+ public Response delete(@PathParam("id") Long id) {
+ return NodeInterface.delete(typeInNode, id);
+ }
+
+ @POST
+ @Path("{id}/add_cover")
+ @Consumes({MediaType.MULTIPART_FORM_DATA})
+ public Response uploadCover(@PathParam("id") Long id,
+ @FormDataParam("file_name") String file_name,
+ @FormDataParam("file") InputStream fileInputStream,
+ @FormDataParam("file") FormDataContentDisposition fileMetaData
+ ) {
+ return NodeInterface.uploadCover(typeInNode, id, file_name, fileInputStream, fileMetaData);
+ }
+ @GET
+ @Path("{id}/rm_cover/{cover_id}")
+ public Response removeCover(@PathParam("id") Long nodeId, @PathParam("cover_id") Long coverId) {
+ return NodeInterface.removeCover(typeInNode, nodeId, coverId);
+ }
+}
diff --git a/back/src/org/kar/karideo/api/TypeResource.java b/back/src/org/kar/karideo/api/TypeResource.java
new file mode 100644
index 0000000..d37f743
--- /dev/null
+++ b/back/src/org/kar/karideo/api/TypeResource.java
@@ -0,0 +1,63 @@
+package org.kar.karideo.api;
+
+import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
+import org.glassfish.jersey.media.multipart.FormDataParam;
+import org.kar.karideo.model.NodeSmall;
+
+import javax.annotation.security.PermitAll;
+import javax.ws.rs.*;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.InputStream;
+import java.util.List;
+
+@Path("/type")
+@PermitAll
+@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
+public class TypeResource {
+ private static final String typeInNode = "TYPE";
+
+ @GET
+ @Path("{id}")
+ public static NodeSmall getWithId(@PathParam("id") Long id) {
+ return NodeInterface.getWithId(typeInNode, id);
+ }
+
+ public static List getWithName(String name) {
+ return NodeInterface.getWithName(typeInNode, name);
+ }
+
+ @GET
+ public List get() {
+ return NodeInterface.get(typeInNode);
+ }
+
+ @PUT
+ @Path("{id}")
+ @Consumes(MediaType.APPLICATION_JSON)
+ public Response put(@PathParam("id") Long id, String jsonRequest) {
+ return NodeInterface.put(typeInNode, id, jsonRequest);
+ }
+
+ @DELETE
+ @Path("{id}")
+ public Response delete(@PathParam("id") Long id) {
+ return NodeInterface.delete(typeInNode, id);
+ }
+
+ @POST
+ @Path("{id}/add_cover")
+ @Consumes({MediaType.MULTIPART_FORM_DATA})
+ public Response uploadCover(@PathParam("id") Long id,
+ @FormDataParam("file_name") String file_name,
+ @FormDataParam("file") InputStream fileInputStream,
+ @FormDataParam("file") FormDataContentDisposition fileMetaData
+ ) {
+ return NodeInterface.uploadCover(typeInNode, id, file_name, fileInputStream, fileMetaData);
+ }
+ @GET
+ @Path("{id}/rm_cover/{cover_id}")
+ public Response removeCover(@PathParam("id") Long nodeId, @PathParam("cover_id") Long coverId) {
+ return NodeInterface.removeCover(typeInNode, nodeId, coverId);
+ }
+}
diff --git a/back/src/org/kar/karideo/api/UniverseResource.java b/back/src/org/kar/karideo/api/UniverseResource.java
new file mode 100644
index 0000000..4e9466d
--- /dev/null
+++ b/back/src/org/kar/karideo/api/UniverseResource.java
@@ -0,0 +1,67 @@
+package org.kar.karideo.api;
+
+import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
+import org.glassfish.jersey.media.multipart.FormDataParam;
+import org.kar.karideo.model.NodeSmall;
+
+import javax.annotation.security.PermitAll;
+import javax.ws.rs.*;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.InputStream;
+import java.util.List;
+
+@Path("/universe")
+@PermitAll
+@Produces({MediaType.APPLICATION_JSON})
+public class UniverseResource {
+ private static final String typeInNode = "UNIVERSE";
+
+ @GET
+ @Path("{id}")
+ public static NodeSmall getWithId(@PathParam("id") Long id) {
+ return NodeInterface.getWithId(typeInNode, id);
+ }
+
+ public static List getWithName(String name) {
+ return NodeInterface.getWithName(typeInNode, name);
+ }
+
+ public static NodeSmall getOrCreate(String universe) {
+ return NodeInterface.getOrCreate(typeInNode, universe, null);
+ }
+
+ @GET
+ public List get() {
+ return NodeInterface.get(typeInNode);
+ }
+
+ @PUT
+ @Path("{id}")
+ @Consumes(MediaType.APPLICATION_JSON)
+ public Response put(@PathParam("id") Long id, String jsonRequest) {
+ return NodeInterface.put(typeInNode, id, jsonRequest);
+ }
+
+ @DELETE
+ @Path("{id}")
+ public Response delete(@PathParam("id") Long id) {
+ return NodeInterface.delete(typeInNode, id);
+ }
+
+ @POST
+ @Path("{id}/add_cover")
+ @Consumes({MediaType.MULTIPART_FORM_DATA})
+ public Response uploadCover(@PathParam("id") Long id,
+ @FormDataParam("file_name") String file_name,
+ @FormDataParam("file") InputStream fileInputStream,
+ @FormDataParam("file") FormDataContentDisposition fileMetaData
+ ) {
+ return NodeInterface.uploadCover(typeInNode, id, file_name, fileInputStream, fileMetaData);
+ }
+ @GET
+ @Path("{id}/rm_cover/{cover_id}")
+ public Response removeCover(@PathParam("id") Long nodeId, @PathParam("cover_id") Long coverId) {
+ return NodeInterface.removeCover(typeInNode, nodeId, coverId);
+ }
+}
diff --git a/back/src/org/kar/karideo/api/UserResource.java b/back/src/org/kar/karideo/api/UserResource.java
new file mode 100755
index 0000000..f5e1331
--- /dev/null
+++ b/back/src/org/kar/karideo/api/UserResource.java
@@ -0,0 +1,259 @@
+package org.kar.karideo.api;
+
+import org.kar.karideo.GenericContext;
+import org.kar.karideo.Secured;
+import org.kar.karideo.UserDB;
+import org.kar.karideo.WebLauncher;
+import org.kar.karideo.db.DBEntry;
+import org.kar.karideo.model.User;
+import org.kar.karideo.model.UserExtern;
+import org.kar.karideo.model.UserPerso;
+
+import javax.annotation.security.PermitAll;
+import javax.annotation.security.RolesAllowed;
+import javax.ws.rs.*;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.core.SecurityContext;
+import java.nio.charset.StandardCharsets;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.sql.PreparedStatement;
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Statement;
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Random;
+
+
+@Path("/users")
+@PermitAll
+@Produces({MediaType.APPLICATION_XML, MediaType.APPLICATION_JSON})
+public class UserResource {
+
+ public UserResource() {
+ }
+
+ private static String randomString(int count) {
+ Random rand = new Random(System.nanoTime());
+ String s = new String();
+ int nbChar = count;
+ for (int i = 0; i < nbChar; i++) {
+ char c = (char) rand.nextInt();
+ while ((c < 'a' || c > 'z') && (c < 'A' || c > 'Z') && (c < '0' || c > '9'))
+ c = (char) rand.nextInt();
+ s = s + c;
+ }
+ return s;
+ }
+
+ // I do not understand why angular request option before, but this is needed..
+ /*
+ @OPTIONS
+ public Response getOption(){
+ return Response.ok()
+ .header("Allow", "POST")
+ .header("Allow", "GET")
+ .header("Allow", "OPTIONS")
+ .build();
+ }
+ */
+ // curl http://localhost:9993/api/users
+ @GET
+ public List getUsers() {
+ System.out.println("getUsers");
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ List out = new ArrayList<>();
+ String query = "SELECT * FROM user";
+ try {
+ Statement st = entry.connection.createStatement();
+ ResultSet rs = st.executeQuery(query);
+ while (rs.next()) {
+ out.add(new UserExtern(new User(rs)));
+ }
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ entry = null;
+ return out;
+ }
+
+ // I do not understand why angular request option before, but this is needed..
+ /*
+ @OPTIONS
+ @Path("{id}")
+ public Response getTokenOption(@PathParam("id") long userId){
+ return Response.ok()
+ .header("Allow", "POST")
+ .header("Allow", "GET")
+ .header("Allow", "OPTIONS")
+ .build();
+ }
+ */
+ // curl http://localhost:9993/api/users/3
+ @Secured
+ @GET
+ @Path("{id}")
+ @RolesAllowed("USER")
+ public UserExtern getUser(@Context SecurityContext sc, @PathParam("id") long userId) {
+ System.out.println("getUser " + userId);
+ GenericContext gc = (GenericContext) sc.getUserPrincipal();
+ System.out.println("===================================================");
+ System.out.println("== USER ? " + gc.user);
+ System.out.println("===================================================");
+ return new UserExtern(UserDB.getUsers(userId));
+ }
+
+ /*
+ @OPTIONS
+ @Path("me")
+ public Response getOptionMe(){
+ return Response.ok()
+ .header("Allow", "GET")
+ .header("Allow", "OPTIONS")
+ .build();
+ }
+ */
+ // curl http://localhost:9993/api/users/3
+ @Secured
+ @GET
+ @Path("me")
+ @RolesAllowed("USER")
+ public UserPerso getMe(@Context SecurityContext sc) {
+ System.out.println("getMe()");
+ GenericContext gc = (GenericContext) sc.getUserPrincipal();
+ System.out.println("===================================================");
+ System.out.println("== USER ? " + gc.user);
+ System.out.println("===================================================");
+ return new UserPerso(gc.user);
+ }
+
+ // curl -d '{"id":3,"login":"HeeroYui","password":"bouloued","email":"yui.heero@gmail.com","emailValidate":0,"newEmail":null,"authorisationLevel":"ADMIN"}' -H "Content-Type: application/json" -X POST http://localhost:9993/api/users
+ @POST
+ public Response createUser(User user) {
+ System.out.println("getUser " + user);
+ /*
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "SELECT * FROM user WHERE id = ?";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ ps.setLong(1, userId);
+ ResultSet rs = ps.executeQuery();
+ if (rs.next()) {
+ User out = new User(rs);
+ entry.disconnect();
+ return out;
+ }
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ entry = null;
+ return null;
+ */
+ String result = "User saved ... : " + user;
+ return Response.status(201).entity(result).build();
+ }
+
+ @GET
+ @Path("/check_login")
+ public Response checkLogin(@QueryParam("login") String login) {
+ System.out.println("checkLogin: " + login);
+
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "SELECT COUNT(*) FROM user WHERE login = ?";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ ps.setString(1, login);
+ ResultSet rs = ps.executeQuery();
+ if (rs.next()) {
+ int count = rs.getInt(1);
+ entry.disconnect();
+ if (count >= 1) {
+ return Response.ok().build();
+ }
+ return Response.status(404).build();
+ }
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ }
+ entry.disconnect();
+ return Response.status(520).build();
+ }
+
+ @GET
+ @Path("/check_email")
+ public Response checkEmail(@QueryParam("email") String email) {
+ System.out.println("checkEmail: " + email);
+
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "SELECT COUNT(*) FROM user WHERE email = ?";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ ps.setString(1, email);
+ ResultSet rs = ps.executeQuery();
+ if (rs.next()) {
+ int count = rs.getInt(1);
+ entry.disconnect();
+ if (count >= 1) {
+ return Response.ok().build();
+ }
+ return Response.status(404).build();
+ }
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ }
+ entry.disconnect();
+ return Response.status(520).build();
+ }
+
+ public String getSHA512(String passwordToHash) {
+ try {
+ MessageDigest md = MessageDigest.getInstance("SHA-512");
+ byte[] bytes = md.digest(passwordToHash.getBytes(StandardCharsets.UTF_8));
+ StringBuilder sb = new StringBuilder();
+ for (int i = 0; i < bytes.length; i++) {
+ sb.append(Integer.toString((bytes[i] & 0xff) + 0x100, 16).substring(1));
+ }
+ return sb.toString();
+ } catch (NoSuchAlgorithmException e) {
+ e.printStackTrace();
+ }
+ return null;
+ }
+}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/back/src/org/kar/karideo/api/VideoResource.java b/back/src/org/kar/karideo/api/VideoResource.java
new file mode 100644
index 0000000..565c32a
--- /dev/null
+++ b/back/src/org/kar/karideo/api/VideoResource.java
@@ -0,0 +1,555 @@
+package org.kar.karideo.api;
+
+import com.fasterxml.jackson.databind.JsonNode;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.glassfish.jersey.media.multipart.FormDataContentDisposition;
+import org.glassfish.jersey.media.multipart.FormDataParam;
+import org.kar.karideo.WebLauncher;
+import org.kar.karideo.db.DBEntry;
+import org.kar.karideo.model.Data;
+import org.kar.karideo.model.MediaSmall;
+import org.kar.karideo.model.NodeSmall;
+
+import javax.annotation.security.PermitAll;
+import javax.ws.rs.*;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import java.io.IOException;
+import java.io.InputStream;
+import java.sql.*;
+import java.util.ArrayList;
+import java.util.List;
+
+@Path("/video")
+@PermitAll
+@Produces({MediaType.APPLICATION_JSON})
+public class VideoResource {
+ // UPDATE `node` SET `type` = "SEASON" WHERE `type` = "SAISON"
+ // UPDATE `node` SET `type` = "UNIVERSE" WHERE `type` = "UNIVERS"
+ // UPDATE `node` SET `type` = "SERIES" WHERE `type` = "SERIE"
+
+ @GET
+ public List get() {
+ System.out.println("VIDEO get");
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ List out = new ArrayList<>();
+ String query = "SELECT media.id," +
+ " media.name," +
+ " media.description," +
+ " media.data_id," +
+ " media.type_id," +
+ " media.universe_id," +
+ " media.series_id," +
+ " media.season_id," +
+ " media.episode," +
+ " media.date," +
+ " media.time," +
+ " media.age_limit," +
+ " (SELECT GROUP_CONCAT(tmp.data_id SEPARATOR '-')" +
+ " FROM cover_link_media tmp" +
+ " WHERE tmp.deleted = false" +
+ " AND media.id = tmp.media_id" +
+ " GROUP BY tmp.media_id) AS covers" +
+ " FROM media" +
+ " WHERE media.deleted = false " +
+ " GROUP BY media.id" +
+ " ORDER BY media.name";
+ try {
+ Statement st = entry.connection.createStatement();
+ ResultSet rs = st.executeQuery(query);
+ while (rs.next()) {
+ out.add(new MediaSmall(rs));
+ }
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ entry = null;
+ System.out.println("retrieve " + out.size() + " VIDEO");
+ return out;
+ }
+
+ @GET
+ @Path("{id}")
+ public MediaSmall get(@PathParam("id") Long id) {
+ System.out.println("VIDEO get " + id);
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "SELECT media.id," +
+ " media.name," +
+ " media.description," +
+ " media.data_id," +
+ " media.type_id," +
+ " media.universe_id," +
+ " media.series_id," +
+ " media.season_id," +
+ " media.episode," +
+ " media.date," +
+ " media.time," +
+ " media.age_limit," +
+ " (SELECT GROUP_CONCAT(tmp.data_id SEPARATOR '-')" +
+ " FROM cover_link_media tmp" +
+ " WHERE tmp.deleted = false" +
+ " AND media.id = tmp.media_id" +
+ " GROUP BY tmp.media_id) AS covers" +
+ " FROM media" +
+ " WHERE media.deleted = false " +
+ " AND media.id = ? " +
+ " GROUP BY media.id" +
+ " ORDER BY media.name";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ int iii = 1;
+ ps.setLong(iii++, id);
+ ResultSet rs = ps.executeQuery();
+ if (rs.next()) {
+ MediaSmall out = new MediaSmall(rs);
+ entry.disconnect();
+ entry = null;
+ return out;
+ }
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ entry = null;
+ return null;
+ }
+
+ @PUT
+ @Path("{id}")
+ @Consumes(MediaType.APPLICATION_JSON)
+ public Response put(@PathParam("id") Long id, String jsonRequest) {
+ ObjectMapper mapper = new ObjectMapper();
+ try {
+ JsonNode root = mapper.readTree(jsonRequest);
+ String query = "UPDATE `media` SET `modify_date`=now(3)";
+ if (!root.path("name").isMissingNode()) {
+ query += ", `name` = ? ";
+ }
+ if (!root.path("description").isMissingNode()) {
+ query += ", `description` = ? ";
+ }
+ if (!root.path("episode").isMissingNode()) {
+ query += ", `episode` = ? ";
+ }
+ if (!root.path("time").isMissingNode()) {
+ query += ", `time` = ? ";
+ }
+ if (!root.path("type_id").isMissingNode()) {
+ query += ", `type_id` = ? ";
+ }
+ if (!root.path("universe_id").isMissingNode()) {
+ query += ", `universe_id` = ? ";
+ }
+ if (!root.path("series_id").isMissingNode()) {
+ query += ", `series_id` = ? ";
+ }
+ if (!root.path("season_id").isMissingNode()) {
+ query += ", `season_id` = ? ";
+ }
+ query += " WHERE `id` = ?";
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ int iii = 1;
+ if (!root.path("name").isMissingNode()) {
+ if (root.path("name").isNull()) {
+ ps.setString(iii++, "???");
+ } else {
+ ps.setString(iii++, root.path("name").asText());
+ }
+ }
+ if (!root.path("description").isMissingNode()) {
+ if (root.path("description").isNull()) {
+ ps.setNull(iii++, Types.VARCHAR);
+ } else {
+ ps.setString(iii++, root.path("description").asText());
+ }
+ }
+ if (!root.path("episode").isMissingNode()) {
+ if (root.path("episode").isNull()) {
+ ps.setNull(iii++, Types.INTEGER);
+ } else {
+ ps.setInt(iii++, root.path("episode").asInt());
+ }
+ }
+ if (!root.path("time").isMissingNode()) {
+ if (root.path("time").isNull()) {
+ ps.setNull(iii++, Types.INTEGER);
+ } else {
+ ps.setInt(iii++, root.path("time").asInt());
+ }
+ }
+ if (!root.path("type_id").isMissingNode()) {
+ if (root.path("type_id").isNull()) {
+ ps.setNull(iii++, Types.BIGINT);
+ } else {
+ ps.setLong(iii++, root.path("type_id").asLong());
+ }
+ }
+ if (!root.path("universe_id").isMissingNode()) {
+ if (root.path("universe_id").isNull()) {
+ ps.setNull(iii++, Types.BIGINT);
+ } else {
+ ps.setLong(iii++, root.path("universe_id").asLong());
+ }
+ }
+ if (!root.path("series_id").isMissingNode()) {
+ if (root.path("series_id").isNull()) {
+ ps.setNull(iii++, Types.BIGINT);
+ } else {
+ ps.setLong(iii++, root.path("series_id").asLong());
+ }
+ }
+ if (!root.path("season_id").isMissingNode()) {
+ if (root.path("season_id").isNull()) {
+ ps.setNull(iii++, Types.BIGINT);
+ } else {
+ ps.setLong(iii++, root.path("season_id").asLong());
+ }
+ }
+ ps.setLong(iii++, id);
+ System.out.println(" request : " + ps.toString());
+ ps.executeUpdate();
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ entry.disconnect();
+ entry = null;
+ return Response.notModified("SQL error").build();
+ }
+ entry.disconnect();
+ entry = null;
+ } catch (IOException e) {
+ e.printStackTrace();
+ return Response.notModified("input json error error").build();
+ }
+ return Response.ok(get(id)).build();
+ }
+ /*
+ public static void update_time(String table, Long id, Timestamp dateCreate, Timestamp dateModify) {
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "UPDATE " + table + " SET create_date = ?, modify_date = ? WHERE id = ?";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ ps.setTimestamp(1, dateCreate);
+ ps.setTimestamp(2, dateModify);
+ ps.setLong(3, id);
+ ps.execute();
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ }
+ entry.disconnect();
+ }
+ */
+ private String multipartCorrection(String data) {
+ if (data == null) {
+ return null;
+ }
+ if (data.isEmpty()) {
+ return null;
+ }
+ if (data.contentEquals("null")) {
+ return null;
+ }
+ return data;
+ }
+
+
+
+ @POST
+ @Path("/upload/")
+ @Consumes({MediaType.MULTIPART_FORM_DATA})
+ public Response uploadFile(@FormDataParam("file_name") String file_name,
+ @FormDataParam("universe") String universe,
+ @FormDataParam("series") String series,
+ @FormDataParam("season") String season,
+ @FormDataParam("episode") String episode,
+ @FormDataParam("title") String title,
+ @FormDataParam("type_id") String type_id,
+ @FormDataParam("file") InputStream fileInputStream,
+ @FormDataParam("file") FormDataContentDisposition fileMetaData
+ ) {
+ try {
+ // correct input string stream :
+ file_name = multipartCorrection(file_name);
+ universe = multipartCorrection(universe);
+ series = multipartCorrection(series);
+ season = multipartCorrection(season);
+ episode = multipartCorrection(episode);
+ title = multipartCorrection(title);
+ type_id = multipartCorrection(type_id);
+
+ //public NodeSmall uploadFile(final FormDataMultiPart form) {
+ System.out.println("Upload media file: " + fileMetaData);
+ System.out.println(" - file_name: " + file_name);
+ System.out.println(" - universe: " + universe);
+ System.out.println(" - series: " + series);
+ System.out.println(" - season: " + season);
+ System.out.println(" - episode: " + episode);
+ System.out.println(" - title: " + title);
+ System.out.println(" - type_id: " + type_id);
+ System.out.println(" - fileInputStream: " + fileInputStream);
+ System.out.println(" - fileMetaData: " + fileMetaData);
+ System.out.flush();
+
+
+ long tmpUID = DataResource.getTmpDataId();
+ String sha512 = DataResource.saveTemporaryFile(fileInputStream, tmpUID);
+ Data data = DataResource.getWithSha512(sha512);
+ if (data == null) {
+ System.out.println("Need to add the data in the BDD ... ");
+ System.out.flush();
+ try {
+ data = DataResource.createNewData(tmpUID, file_name, sha512);
+ } catch (IOException ex) {
+ DataResource.removeTemporaryFile(tmpUID);
+ ex.printStackTrace();
+ return Response.notModified("can not create input media").build();
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ DataResource.removeTemporaryFile(tmpUID);
+ return Response.notModified("Error in SQL insertion ...").build();
+ }
+ } else if (data.deleted == true) {
+ System.out.println("Data already exist but deleted");
+ System.out.flush();
+ DataResource.undelete(data.id);
+ data.deleted = false;
+ } else {
+ System.out.println("Data already exist ... all good");
+ System.out.flush();
+ }
+ // Fist step: retive all the Id of each parents:...
+ System.out.println("Find typeNode");
+ // check if id of type exist:
+ NodeSmall typeNode = TypeResource.getWithId(Long.parseLong(type_id));
+ if (typeNode == null) {
+ DataResource.removeTemporaryFile(tmpUID);
+ return Response.notModified("TypeId does not exist ...").build();
+ }
+ System.out.println(" ==> " + typeNode);
+ System.out.println("Find universeNode");
+ // get id of universe:
+ NodeSmall universeNode = UniverseResource.getOrCreate(universe);
+
+ System.out.println(" ==> " + universeNode);
+ System.out.println("Find seriesNode");
+ // get uid of group:
+ NodeSmall seriesNode = SeriesResource.getOrCreate(series, typeNode.id);
+
+ System.out.println(" ==> " + seriesNode);
+ System.out.println("Find seasonNode");
+ // get uid of season:
+ Integer seasonId = null;
+ NodeSmall seasonNode = null;
+ try {
+ seasonId = Integer.parseInt(season);
+ seasonNode = SeasonResource.getOrCreate(Integer.parseInt(season), seriesNode.id);
+ } catch (java.lang.NumberFormatException ex) {
+ // nothing to do ....
+ }
+
+ System.out.println(" ==> " + seasonNode);
+ System.out.println("add media");
+
+
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ long uniqueSQLID = -1;
+ // real add in the BDD:
+ try {
+ // prepare the request:
+ String query = "INSERT INTO media (create_date, modify_date, name, data_id, type_id, universe_id, series_id, season_id, episode)" +
+ " VALUES (now(3), now(3), ?, ?, ?, ?, ?, ?, ?)";
+ PreparedStatement ps = entry.connection.prepareStatement(query,
+ Statement.RETURN_GENERATED_KEYS);
+ int iii = 1;
+ ps.setString(iii++, title);
+ ps.setLong(iii++, data.id);
+ ps.setLong(iii++, typeNode.id);
+ if (universeNode == null) {
+ ps.setNull(iii++, Types.BIGINT);
+ } else {
+ ps.setLong(iii++, universeNode.id);
+ }
+ if (seriesNode == null) {
+ ps.setNull(iii++, Types.BIGINT);
+ } else {
+ ps.setLong(iii++, seriesNode.id);
+ }
+ if (seasonNode == null) {
+ ps.setNull(iii++, Types.BIGINT);
+ } else {
+ ps.setLong(iii++, seasonNode.id);
+ }
+ if (episode == null || episode.contentEquals("")) {
+ ps.setNull(iii++, Types.INTEGER);
+ } else {
+ ps.setInt(iii++, Integer.parseInt(episode));
+ }
+ // execute the request
+ int affectedRows = ps.executeUpdate();
+ if (affectedRows == 0) {
+ throw new SQLException("Creating data failed, no rows affected.");
+ }
+ // retreive uid inserted
+ try (ResultSet generatedKeys = ps.getGeneratedKeys()) {
+ if (generatedKeys.next()) {
+ uniqueSQLID = generatedKeys.getLong(1);
+ } else {
+ throw new SQLException("Creating user failed, no ID obtained (1).");
+ }
+ } catch (Exception ex) {
+ System.out.println("Can not get the UID key inserted ... ");
+ ex.printStackTrace();
+ throw new SQLException("Creating user failed, no ID obtained (2).");
+ }
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ }
+ // if we do not une the file .. remove it ... otherwise this is meamory leak...
+ DataResource.removeTemporaryFile(tmpUID);
+ System.out.println("uploaded .... compleate: " + uniqueSQLID);
+ MediaSmall creation = get(uniqueSQLID);
+ return Response.ok(creation).build();
+ } catch (Exception ex) {
+ System.out.println("Cat ann unexpected error ... ");
+ ex.printStackTrace();
+ }
+ return Response.serverError().build();
+ }
+ @POST
+ @Path("{id}/add_cover")
+ @Consumes({MediaType.MULTIPART_FORM_DATA})
+ public Response uploadCover(@PathParam("id") Long id,
+ @FormDataParam("file_name") String file_name,
+ @FormDataParam("file") InputStream fileInputStream,
+ @FormDataParam("file") FormDataContentDisposition fileMetaData
+ ) {
+ try {
+ // correct input string stream :
+ file_name = multipartCorrection(file_name);
+
+ //public NodeSmall uploadFile(final FormDataMultiPart form) {
+ System.out.println("Upload media file: " + fileMetaData);
+ System.out.println(" - id: " + id);
+ System.out.println(" - file_name: " + file_name);
+ System.out.println(" - fileInputStream: " + fileInputStream);
+ System.out.println(" - fileMetaData: " + fileMetaData);
+ System.out.flush();
+ MediaSmall media = get(id);
+ if (media == null) {
+ return Response.notModified("Media Id does not exist or removed...").build();
+ }
+
+ long tmpUID = DataResource.getTmpDataId();
+ String sha512 = DataResource.saveTemporaryFile(fileInputStream, tmpUID);
+ Data data = DataResource.getWithSha512(sha512);
+ if (data == null) {
+ System.out.println("Need to add the data in the BDD ... ");
+ System.out.flush();
+ try {
+ data = DataResource.createNewData(tmpUID, file_name, sha512);
+ } catch (IOException ex) {
+ DataResource.removeTemporaryFile(tmpUID);
+ ex.printStackTrace();
+ return Response.notModified("can not create input media").build();
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ DataResource.removeTemporaryFile(tmpUID);
+ return Response.notModified("Error in SQL insertion ...").build();
+ }
+ } else if (data.deleted == true) {
+ System.out.println("Data already exist but deleted");
+ System.out.flush();
+ DataResource.undelete(data.id);
+ data.deleted = false;
+ } else {
+ System.out.println("Data already exist ... all good");
+ System.out.flush();
+ }
+ // Fist step: retrieve all the Id of each parents:...
+ System.out.println("Find typeNode");
+
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ long uniqueSQLID = -1;
+ // real add in the BDD:
+ try {
+ // prepare the request:
+ String query = "INSERT INTO cover_link_media (create_date, modify_date, media_id, data_id)" +
+ " VALUES (now(3), now(3), ?, ?)";
+ PreparedStatement ps = entry.connection.prepareStatement(query,
+ Statement.RETURN_GENERATED_KEYS);
+ int iii = 1;
+ ps.setLong(iii++, media.id);
+ ps.setLong(iii++, data.id);
+ // execute the request
+ int affectedRows = ps.executeUpdate();
+ if (affectedRows == 0) {
+ throw new SQLException("Creating data failed, no rows affected.");
+ }
+ // retreive uid inserted
+ try (ResultSet generatedKeys = ps.getGeneratedKeys()) {
+ if (generatedKeys.next()) {
+ uniqueSQLID = generatedKeys.getLong(1);
+ } else {
+ throw new SQLException("Creating user failed, no ID obtained (1).");
+ }
+ } catch (Exception ex) {
+ System.out.println("Can not get the UID key inserted ... ");
+ ex.printStackTrace();
+ throw new SQLException("Creating user failed, no ID obtained (2).");
+ }
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ }
+ // if we do not une the file .. remove it ... otherwise this is meamory leak...
+ DataResource.removeTemporaryFile(tmpUID);
+ System.out.println("uploaded .... compleate: " + uniqueSQLID);
+ MediaSmall creation = get(id);
+ return Response.ok(creation).build();
+ } catch (Exception ex) {
+ System.out.println("Cat ann unexpected error ... ");
+ ex.printStackTrace();
+ }
+ return Response.serverError().build();
+ }
+ @GET
+ @Path("{id}/rm_cover/{cover_id}")
+ public Response removeCover(@PathParam("id") Long mediaId, @PathParam("cover_id") Long coverId) {
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "UPDATE `cover_link_media` SET `modify_date`=now(3), `deleted`=true WHERE `media_id` = ? AND `data_id` = ?";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ int iii = 1;
+ ps.setLong(iii++, mediaId);
+ ps.setLong(iii++, coverId);
+ ps.executeUpdate();
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ entry.disconnect();
+ return Response.serverError().build();
+ }
+ entry.disconnect();
+ return Response.ok(get(mediaId)).build();
+ }
+
+ @DELETE
+ @Path("{id}")
+ public Response delete(@PathParam("id") Long id) {
+ DBEntry entry = new DBEntry(WebLauncher.dbConfig);
+ String query = "UPDATE `media` SET `modify_date`=now(3), `deleted`=true WHERE `id` = ? and `deleted` = false ";
+ try {
+ PreparedStatement ps = entry.connection.prepareStatement(query);
+ int iii = 1;
+ ps.setLong(iii++, id);
+ ps.executeUpdate();
+ } catch (SQLException throwables) {
+ throwables.printStackTrace();
+ entry.disconnect();
+ return Response.serverError().build();
+ }
+ entry.disconnect();
+ return Response.ok().build();
+ }
+}
diff --git a/back/src/org/kar/karideo/db/DBConfig.java b/back/src/org/kar/karideo/db/DBConfig.java
new file mode 100644
index 0000000..aadc4b2
--- /dev/null
+++ b/back/src/org/kar/karideo/db/DBConfig.java
@@ -0,0 +1,60 @@
+package org.kar.karideo.db;
+
+public class DBConfig {
+ private final String hostname;
+ private final int port;
+ private final String login;
+ private final String password;
+ private final String dbName;
+
+ public DBConfig(String hostname, Integer port, String login, String password, String dbName) {
+ if (hostname == null) {
+ this.hostname = "localhost";
+ } else {
+ this.hostname = hostname;
+ }
+ if (port == null) {
+ this.port = 3306;
+ } else {
+ this.port = port;
+ }
+ this.login = login;
+ this.password = password;
+ this.dbName = dbName;
+ }
+
+ @Override
+ public String toString() {
+ return "DBConfig{" +
+ "hostname='" + hostname + '\'' +
+ ", port=" + port +
+ ", login='" + login + '\'' +
+ ", password='" + password + '\'' +
+ ", dbName='" + dbName + '\'' +
+ '}';
+ }
+
+ public String getHostname() {
+ return hostname;
+ }
+
+ public int getPort() {
+ return port;
+ }
+
+ public String getLogin() {
+ return login;
+ }
+
+ public String getPassword() {
+ return password;
+ }
+
+ public String getDbName() {
+ return dbName;
+ }
+
+ public String getUrl() {
+ return "jdbc:mysql://" + this.hostname + ":" + this.port + "/" + this.dbName + "?useSSL=false&serverTimezone=UTC";
+ }
+}
diff --git a/back/src/org/kar/karideo/db/DBEntry.java b/back/src/org/kar/karideo/db/DBEntry.java
new file mode 100644
index 0000000..dcc4378
--- /dev/null
+++ b/back/src/org/kar/karideo/db/DBEntry.java
@@ -0,0 +1,44 @@
+package org.kar.karideo.db;
+
+import org.kar.karideo.model.User;
+
+import java.sql.*;
+
+public class DBEntry {
+ public DBConfig config;
+ public Connection connection;
+
+ public DBEntry(DBConfig config) {
+ this.config = config;
+ connect();
+ }
+
+ public void connect() {
+ try {
+ connection = DriverManager.getConnection(config.getUrl(), config.getLogin(), config.getPassword());
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ }
+
+ }
+
+ public void disconnect() {
+ try {
+ //connection.commit();
+ connection.close();
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ }
+ }
+
+ public void test() throws SQLException {
+ String query = "SELECT * FROM user";
+ Statement st = connection.createStatement();
+ ResultSet rs = st.executeQuery(query);
+ System.out.println("List of user:");
+ if (rs.next()) {
+ User user = new User(rs);
+ System.out.println(" - " + user);
+ }
+ }
+}
diff --git a/back/src/org/kar/karideo/internal/Log.java b/back/src/org/kar/karideo/internal/Log.java
new file mode 100644
index 0000000..411653b
--- /dev/null
+++ b/back/src/org/kar/karideo/internal/Log.java
@@ -0,0 +1,60 @@
+package org.kar.karideo.internal;
+
+//import io.scenarium.logger.LogLevel;
+//import io.scenarium.logger.Logger;
+
+public class Log {
+// private static final String LIB_NAME = "logger";
+// private static final String LIB_NAME_DRAW = Logger.getDrawableName(LIB_NAME);
+// private static final boolean PRINT_CRITICAL = Logger.getNeedPrint(LIB_NAME, LogLevel.CRITICAL);
+// private static final boolean PRINT_ERROR = Logger.getNeedPrint(LIB_NAME, LogLevel.ERROR);
+// private static final boolean PRINT_WARNING = Logger.getNeedPrint(LIB_NAME, LogLevel.WARNING);
+// private static final boolean PRINT_INFO = Logger.getNeedPrint(LIB_NAME, LogLevel.INFO);
+// private static final boolean PRINT_DEBUG = Logger.getNeedPrint(LIB_NAME, LogLevel.DEBUG);
+// private static final boolean PRINT_VERBOSE = Logger.getNeedPrint(LIB_NAME, LogLevel.VERBOSE);
+// private static final boolean PRINT_TODO = Logger.getNeedPrint(LIB_NAME, LogLevel.TODO);
+// private static final boolean PRINT_PRINT = Logger.getNeedPrint(LIB_NAME, LogLevel.PRINT);
+//
+// private Log() {}
+//
+// public static void print(String data) {
+// if (PRINT_PRINT)
+// Logger.print(LIB_NAME_DRAW, data);
+// }
+//
+// public static void todo(String data) {
+// if (PRINT_TODO)
+// Logger.todo(LIB_NAME_DRAW, data);
+// }
+//
+// public static void critical(String data) {
+// if (PRINT_CRITICAL)
+// Logger.critical(LIB_NAME_DRAW, data);
+// }
+//
+// public static void error(String data) {
+// if (PRINT_ERROR)
+// Logger.error(LIB_NAME_DRAW, data);
+// }
+//
+// public static void warning(String data) {
+// if (PRINT_WARNING)
+// Logger.warning(LIB_NAME_DRAW, data);
+// }
+//
+// public static void info(String data) {
+// if (PRINT_INFO)
+// Logger.info(LIB_NAME_DRAW, data);
+// }
+//
+// public static void debug(String data) {
+// if (PRINT_DEBUG)
+// Logger.debug(LIB_NAME_DRAW, data);
+// }
+//
+// public static void verbose(String data) {
+// if (PRINT_VERBOSE)
+// Logger.verbose(LIB_NAME_DRAW, data);
+// }
+
+}
diff --git a/back/src/org/kar/karideo/model/AgeLimit.java b/back/src/org/kar/karideo/model/AgeLimit.java
new file mode 100644
index 0000000..55f96b1
--- /dev/null
+++ b/back/src/org/kar/karideo/model/AgeLimit.java
@@ -0,0 +1,5 @@
+package org.kar.karideo.model;
+
+public enum AgeLimit {
+
+}
diff --git a/back/src/org/kar/karideo/model/CoverLinkMedia.java b/back/src/org/kar/karideo/model/CoverLinkMedia.java
new file mode 100644
index 0000000..4dea5a6
--- /dev/null
+++ b/back/src/org/kar/karideo/model/CoverLinkMedia.java
@@ -0,0 +1,12 @@
+package org.kar.karideo.model;
+/*
+CREATE TABLE `cover_link_media` (
+ `id` bigint NOT NULL COMMENT 'table ID' AUTO_INCREMENT PRIMARY KEY,
+ `deleted` BOOLEAN NOT NULL DEFAULT false,
+ `media_id` bigint,
+ `data_id` bigint
+) AUTO_INCREMENT=10;
+*/
+
+public class CoverLinkMedia {
+}
diff --git a/back/src/org/kar/karideo/model/CoverLinkNode.java b/back/src/org/kar/karideo/model/CoverLinkNode.java
new file mode 100644
index 0000000..0b61411
--- /dev/null
+++ b/back/src/org/kar/karideo/model/CoverLinkNode.java
@@ -0,0 +1,13 @@
+package org.kar.karideo.model;
+/*
+CREATE TABLE `cover_link_node` (
+ `id` bigint NOT NULL COMMENT 'table ID' AUTO_INCREMENT PRIMARY KEY,
+ `deleted` BOOLEAN NOT NULL DEFAULT false,
+ `node_id` bigint,
+ `data_id` bigint
+) AUTO_INCREMENT=10;
+*/
+
+public class CoverLinkNode {
+
+}
diff --git a/back/src/org/kar/karideo/model/Data.java b/back/src/org/kar/karideo/model/Data.java
new file mode 100644
index 0000000..a88be7e
--- /dev/null
+++ b/back/src/org/kar/karideo/model/Data.java
@@ -0,0 +1,32 @@
+package org.kar.karideo.model;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+public class Data {
+ public Long id;
+ public boolean deleted;
+ public String sha512;
+ public String mimeType;
+ public Long size;
+
+ public Data() {
+
+ }
+
+ public Data(ResultSet rs) {
+ int iii = 1;
+ try {
+ this.id = rs.getLong(iii++);
+ this.deleted = rs.getBoolean(iii++);
+ this.sha512 = rs.getString(iii++);
+ this.mimeType = rs.getString(iii++);
+ this.size = rs.getLong(iii++);
+ if (rs.wasNull()) {
+ this.size = null;
+ }
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ }
+ }
+}
diff --git a/back/src/org/kar/karideo/model/DataGetToken.java b/back/src/org/kar/karideo/model/DataGetToken.java
new file mode 100644
index 0000000..edb0fa8
--- /dev/null
+++ b/back/src/org/kar/karideo/model/DataGetToken.java
@@ -0,0 +1,8 @@
+package org.kar.karideo.model;
+
+public class DataGetToken {
+ public String login;
+ public String method;
+ public String time;
+ public String password;
+}
diff --git a/back/src/org/kar/karideo/model/DataSmall.java b/back/src/org/kar/karideo/model/DataSmall.java
new file mode 100644
index 0000000..c6c602e
--- /dev/null
+++ b/back/src/org/kar/karideo/model/DataSmall.java
@@ -0,0 +1,39 @@
+package org.kar.karideo.model;
+/*
+CREATE TABLE `data` (
+ `id` bigint NOT NULL COMMENT 'table ID' AUTO_INCREMENT PRIMARY KEY,
+ `deleted` BOOLEAN NOT NULL DEFAULT false,
+ `create_date` datetime NOT NULL DEFAULT now() COMMENT 'Time the element has been created',
+ `modify_date` datetime NOT NULL DEFAULT now() COMMENT 'Time the element has been update',
+ `sha512` varchar(129) COLLATE 'utf8_general_ci' NOT NULL,
+ `mime_type` varchar(128) COLLATE 'utf8_general_ci' NOT NULL,
+ `size` bigint,
+ `original_name` TEXT
+) AUTO_INCREMENT=64;
+*/
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+public class DataSmall {
+ public Long id;
+ public String sha512;
+ public String mimeType;
+ public Long size;
+
+ public DataSmall() {
+
+ }
+
+ public DataSmall(ResultSet rs) {
+ int iii = 1;
+ try {
+ this.id = rs.getLong(iii++);
+ this.sha512 = rs.getString(iii++);
+ this.mimeType = rs.getString(iii++);
+ this.size = rs.getLong(iii++);
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ }
+ }
+}
diff --git a/back/src/org/kar/karideo/model/Group.java b/back/src/org/kar/karideo/model/Group.java
new file mode 100644
index 0000000..6f55065
--- /dev/null
+++ b/back/src/org/kar/karideo/model/Group.java
@@ -0,0 +1,5 @@
+package org.kar.karideo.model;
+
+
+public class Group {
+}
diff --git a/back/src/org/kar/karideo/model/Media.java b/back/src/org/kar/karideo/model/Media.java
new file mode 100644
index 0000000..4297e35
--- /dev/null
+++ b/back/src/org/kar/karideo/model/Media.java
@@ -0,0 +1,24 @@
+package org.kar.karideo.model;
+/*
+CREATE TABLE `media` (
+ `id` bigint NOT NULL COMMENT 'table ID' AUTO_INCREMENT PRIMARY KEY,
+ `deleted` BOOLEAN NOT NULL DEFAULT false,
+ `create_date` datetime NOT NULL DEFAULT now() COMMENT 'Time the element has been created',
+ `modify_date` datetime NOT NULL DEFAULT now() COMMENT 'Time the element has been update',
+ `name` TEXT COLLATE 'utf8_general_ci' NOT NULL,
+ `description` TEXT COLLATE 'utf8_general_ci',
+ `parent_id` bigint,
+ `data_id` bigint,
+ `type_id` bigint,
+ `universe_id` bigint,
+ `series_id` bigint,
+ `season_id` bigint,
+ `episode` int,
+ `date` int,
+ `age_limit` enum("-", "5", "9", "12", "14", "16", "18") NOT NULL DEFAULT '-'
+) AUTO_INCREMENT=85;
+*/
+
+
+public class Media {
+}
diff --git a/back/src/org/kar/karideo/model/MediaSmall.java b/back/src/org/kar/karideo/model/MediaSmall.java
new file mode 100644
index 0000000..a7c37b5
--- /dev/null
+++ b/back/src/org/kar/karideo/model/MediaSmall.java
@@ -0,0 +1,86 @@
+package org.kar.karideo.model;
+/*
+CREATE TABLE `node` (
+ `id` bigint NOT NULL COMMENT 'table ID' AUTO_INCREMENT PRIMARY KEY,
+ `deleted` BOOLEAN NOT NULL DEFAULT false,
+ `create_date` datetime NOT NULL DEFAULT now() COMMENT 'Time the element has been created',
+ `modify_date` datetime NOT NULL DEFAULT now() COMMENT 'Time the element has been update',
+ `type` enum("TYPE", "UNIVERSE", "SERIES", "SEASON") NOT NULL DEFAULT 'TYPE',
+ `name` TEXT COLLATE 'utf8_general_ci' NOT NULL,
+ `description` TEXT COLLATE 'utf8_general_ci',
+ `parent_id` bigint
+) AUTO_INCREMENT=10;
+*/
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+public class MediaSmall {
+ public Long id;
+ public String name;
+ public String description;
+ public Long data_id;
+ public Long type_id;
+ public Long universe_id;
+ public Long series_id;
+ public Long season_id;
+ public Integer episode;
+ public Integer date;
+ public Integer time;
+ public String age_limit;
+ public List covers = new ArrayList<>();
+
+ public MediaSmall(ResultSet rs) {
+ int iii = 1;
+ try {
+ this.id = rs.getLong(iii++);
+ this.name = rs.getString(iii++);
+ this.description = rs.getString(iii++);
+ this.data_id = rs.getLong(iii++);
+ if (rs.wasNull()) {
+ this.data_id = null;
+ }
+ this.type_id = rs.getLong(iii++);
+ if (rs.wasNull()) {
+ this.type_id = null;
+ }
+ this.universe_id = rs.getLong(iii++);
+ if (rs.wasNull()) {
+ this.universe_id = null;
+ }
+ this.series_id = rs.getLong(iii++);
+ if (rs.wasNull()) {
+ this.series_id = null;
+ }
+ this.season_id = rs.getLong(iii++);
+ if (rs.wasNull()) {
+ this.season_id = null;
+ }
+ this.episode = rs.getInt(iii++);
+ if (rs.wasNull()) {
+ this.episode = null;
+ }
+ this.date = rs.getInt(iii++);
+ if (rs.wasNull()) {
+ this.date = null;
+ }
+ this.time = rs.getInt(iii++);
+ if (rs.wasNull()) {
+ this.time = null;
+ }
+ this.age_limit = rs.getString(iii++);
+ String coversString = rs.getString(iii++);
+ if (!rs.wasNull()) {
+ String[] elements = coversString.split("-");
+ for (String elem : elements) {
+ Long tmp = Long.parseLong(elem);
+ covers.add(tmp);
+ }
+ }
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ }
+ }
+}
diff --git a/back/src/org/kar/karideo/model/NodeSmall.java b/back/src/org/kar/karideo/model/NodeSmall.java
new file mode 100644
index 0000000..8ffe491
--- /dev/null
+++ b/back/src/org/kar/karideo/model/NodeSmall.java
@@ -0,0 +1,60 @@
+package org.kar.karideo.model;
+/*
+CREATE TABLE `node` (
+ `id` bigint NOT NULL COMMENT 'table ID' AUTO_INCREMENT PRIMARY KEY,
+ `deleted` BOOLEAN NOT NULL DEFAULT false,
+ `create_date` datetime NOT NULL DEFAULT now() COMMENT 'Time the element has been created',
+ `modify_date` datetime NOT NULL DEFAULT now() COMMENT 'Time the element has been update',
+ `type` enum("TYPE", "UNIVERS", "SERIE", "SAISON", "MEDIA") NOT NULL DEFAULT 'TYPE',
+ `name` TEXT COLLATE 'utf8_general_ci' NOT NULL,
+ `description` TEXT COLLATE 'utf8_general_ci',
+ `parent_id` bigint
+) AUTO_INCREMENT=10;
+*/
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.util.ArrayList;
+import java.util.List;
+
+public class NodeSmall {
+ public Long id;
+ public String name;
+ public String description;
+ public Long parent_id;
+ public List covers = new ArrayList<>();
+
+ public NodeSmall(ResultSet rs) {
+ int iii = 1;
+ try {
+ this.id = rs.getLong(iii++);
+ this.name = rs.getString(iii++);
+ this.description = rs.getString(iii++);
+ this.parent_id = rs.getLong(iii++);
+ if (rs.wasNull()) {
+ this.parent_id = null;
+ }
+ String coversString = rs.getString(iii++);
+ if (!rs.wasNull()) {
+ String[] elements = coversString.split("-");
+ for (String elem : elements) {
+ Long tmp = Long.parseLong(elem);
+ covers.add(tmp);
+ }
+ }
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "NodeSmall{" +
+ "id=" + id +
+ ", name='" + name + '\'' +
+ ", description='" + description + '\'' +
+ ", parent_id=" + parent_id +
+ ", covers=" + covers +
+ '}';
+ }
+}
diff --git a/back/src/org/kar/karideo/model/Saison.java b/back/src/org/kar/karideo/model/Saison.java
new file mode 100644
index 0000000..7e499c0
--- /dev/null
+++ b/back/src/org/kar/karideo/model/Saison.java
@@ -0,0 +1,4 @@
+package org.kar.karideo.model;
+
+public class Saison {
+}
diff --git a/back/src/org/kar/karideo/model/State.java b/back/src/org/kar/karideo/model/State.java
new file mode 100644
index 0000000..1847f66
--- /dev/null
+++ b/back/src/org/kar/karideo/model/State.java
@@ -0,0 +1,10 @@
+package org.kar.karideo.model;
+
+public enum State {
+ // User has remove his account
+ REMOVED,
+ // User has been blocked his account
+ BLOCKED,
+ // generic user
+ USER
+}
diff --git a/back/src/org/kar/karideo/model/Token.java b/back/src/org/kar/karideo/model/Token.java
new file mode 100644
index 0000000..bfbdb1b
--- /dev/null
+++ b/back/src/org/kar/karideo/model/Token.java
@@ -0,0 +1,57 @@
+package org.kar.karideo.model;
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+/*
+CREATE TABLE `token` (
+ `id` bigint NOT NULL COMMENT 'Unique ID of the TOKEN' AUTO_INCREMENT PRIMARY KEY,
+ `userId` bigint NOT NULL COMMENT 'Unique ID of the user',
+ `token` varchar(128) COLLATE 'latin1_bin' NOT NULL COMMENT 'Token (can be not unique)',
+ `createTime` datetime NOT NULL COMMENT 'Time the token has been created',
+ `endValidityTime` datetime NOT NULL COMMENT 'Time of the token end validity'
+) AUTO_INCREMENT=10;
+
+ */
+public class Token {
+ public Long id;
+ public Long userId;
+ public String token;
+ public String createTime;
+ public String endValidityTime;
+
+ public Token() {
+ }
+
+ public Token(long id, long userId, String token, String createTime, String endValidityTime) {
+ this.id = id;
+ this.userId = userId;
+ this.token = token;
+ this.createTime = createTime;
+ this.endValidityTime = endValidityTime;
+ }
+
+ public Token(ResultSet rs) {
+ int iii = 1;
+ try {
+ this.id = rs.getLong(iii++);
+ this.userId = rs.getLong(iii++);
+ this.token = rs.getString(iii++);
+ this.createTime = rs.getString(iii++);
+ this.endValidityTime = rs.getString(iii++);
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "Token{" +
+ "id=" + id +
+ ", userId=" + userId +
+ ", token='" + token + '\'' +
+ ", createTime=" + createTime +
+ ", endValidityTime=" + endValidityTime +
+ '}';
+ }
+}
diff --git a/back/src/org/kar/karideo/model/Type.java b/back/src/org/kar/karideo/model/Type.java
new file mode 100644
index 0000000..99be174
--- /dev/null
+++ b/back/src/org/kar/karideo/model/Type.java
@@ -0,0 +1,6 @@
+package org.kar.karideo.model;
+
+public class Type {
+
+
+}
diff --git a/back/src/org/kar/karideo/model/Univers.java b/back/src/org/kar/karideo/model/Univers.java
new file mode 100644
index 0000000..88041ae
--- /dev/null
+++ b/back/src/org/kar/karideo/model/Univers.java
@@ -0,0 +1,4 @@
+package org.kar.karideo.model;
+
+public class Univers {
+}
diff --git a/back/src/org/kar/karideo/model/User.java b/back/src/org/kar/karideo/model/User.java
new file mode 100644
index 0000000..512c207
--- /dev/null
+++ b/back/src/org/kar/karideo/model/User.java
@@ -0,0 +1,77 @@
+package org.kar.karideo.model;
+
+/*
+CREATE TABLE `user` (
+ `id` bigint NOT NULL COMMENT 'table ID' AUTO_INCREMENT PRIMARY KEY,
+ `login` varchar(128) COLLATE 'utf8_general_ci' NOT NULL COMMENT 'login of the user',
+ `email` varchar(512) COLLATE 'utf8_general_ci' NOT NULL COMMENT 'email of the user',
+ `lastConnection` datetime NOT NULL COMMENT 'last connection time',
+ `admin` enum("TRUE", "FALSE") NOT NULL DEFAULT 'FALSE',
+ `blocked` enum("TRUE", "FALSE") NOT NULL DEFAULT 'FALSE',
+ `removed` enum("TRUE", "FALSE") NOT NULL DEFAULT 'FALSE',
+ `avatar` bigint DEFAULT NULL,
+) AUTO_INCREMENT=10;
+
+ */
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+import java.sql.Timestamp;
+
+public class User {
+ public Long id;
+ public String login;
+ public String email;
+ public Timestamp lastConnection;
+ public boolean admin;
+ public boolean blocked;
+ public boolean removed;
+ public Long avatar;
+
+ public User() {
+ }
+
+ public User(Long id, String login, Timestamp lastConnection, String email, boolean admin, boolean blocked, boolean removed, Long avatar) {
+ this.id = id;
+ this.login = login;
+ this.lastConnection = lastConnection;
+ this.email = email;
+ this.admin = admin;
+ this.blocked = blocked;
+ this.removed = removed;
+ this.avatar = avatar;
+ }
+
+ public User(ResultSet rs) {
+ int iii = 1;
+ try {
+ this.id = rs.getLong(iii++);
+ this.lastConnection = rs.getTimestamp(iii++);
+ this.login = rs.getString(iii++);
+ this.email = rs.getString(iii++);
+ this.admin = Boolean.getBoolean(rs.getString(iii++));
+ this.blocked = Boolean.getBoolean(rs.getString(iii++));
+ this.removed = Boolean.getBoolean(rs.getString(iii++));
+ this.avatar = rs.getLong(iii++);
+ if (rs.wasNull()) {
+ this.avatar = null;
+ }
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ }
+ }
+
+ @Override
+ public String toString() {
+ return "User{" +
+ "id=" + id +
+ ", login='" + login + '\'' +
+ ", email='" + email + '\'' +
+ ", lastConnection='" + lastConnection + '\'' +
+ ", admin=" + admin +
+ ", blocked=" + blocked +
+ ", removed=" + removed +
+ ", avatar=" + avatar +
+ '}';
+ }
+}
diff --git a/back/src/org/kar/karideo/model/UserExtern.java b/back/src/org/kar/karideo/model/UserExtern.java
new file mode 100644
index 0000000..ac86132
--- /dev/null
+++ b/back/src/org/kar/karideo/model/UserExtern.java
@@ -0,0 +1,39 @@
+package org.kar.karideo.model;
+
+/*
+CREATE TABLE `user` (
+ `id` bigint NOT NULL COMMENT 'table ID' AUTO_INCREMENT PRIMARY KEY,
+ `login` varchar(128) COLLATE 'utf8_general_ci' NOT NULL COMMENT 'login of the user',
+ `email` varchar(512) COLLATE 'utf8_general_ci' NOT NULL COMMENT 'email of the user',
+ `lastConnection` datetime NOT NULL COMMENT 'last connection time',
+ `admin` enum("TRUE", "FALSE") NOT NULL DEFAULT 'FALSE',
+ `blocked` enum("TRUE", "FALSE") NOT NULL DEFAULT 'FALSE',
+ `removed` enum("TRUE", "FALSE") NOT NULL DEFAULT 'FALSE'
+) AUTO_INCREMENT=10;
+
+ */
+
+
+public class UserExtern {
+ public Long id;
+ public String login;
+ public boolean admin;
+ public Long avatar;
+
+ public UserExtern(User other) {
+ this.id = other.id;
+ this.login = other.login;
+ this.admin = other.admin;
+ this.avatar = other.avatar;
+ }
+
+ @Override
+ public String toString() {
+ return "User{" +
+ "id=" + id +
+ ", login='" + login + '\'' +
+ ", admin=" + admin +
+ ", avatar=" + avatar +
+ '}';
+ }
+}
diff --git a/back/src/org/kar/karideo/model/UserPerso.java b/back/src/org/kar/karideo/model/UserPerso.java
new file mode 100644
index 0000000..faf39f5
--- /dev/null
+++ b/back/src/org/kar/karideo/model/UserPerso.java
@@ -0,0 +1,48 @@
+package org.kar.karideo.model;
+
+/*
+CREATE TABLE `user` (
+ `id` bigint NOT NULL COMMENT 'table ID' AUTO_INCREMENT PRIMARY KEY,
+ `login` varchar(128) COLLATE 'utf8_general_ci' NOT NULL COMMENT 'login of the user',
+ `email` varchar(512) COLLATE 'utf8_general_ci' NOT NULL COMMENT 'email of the user',
+ `lastConnection` datetime NOT NULL COMMENT 'last connection time',
+ `admin` enum("TRUE", "FALSE") NOT NULL DEFAULT 'FALSE',
+ `blocked` enum("TRUE", "FALSE") NOT NULL DEFAULT 'FALSE',
+ `removed` enum("TRUE", "FALSE") NOT NULL DEFAULT 'FALSE'
+) AUTO_INCREMENT=10;
+
+ */
+
+
+public class UserPerso {
+ public Long id;
+ public String login;
+ public String email;
+ public boolean admin;
+ public boolean blocked;
+ public boolean removed;
+ public Long avatar;
+
+ public UserPerso(User other) {
+ this.id = other.id;
+ this.login = other.login;
+ this.email = other.email;
+ this.admin = other.admin;
+ this.blocked = other.blocked;
+ this.removed = other.removed;
+ this.avatar = other.avatar;
+ }
+
+ @Override
+ public String toString() {
+ return "User{" +
+ "id=" + id +
+ ", login='" + login + '\'' +
+ ", email='" + email + '\'' +
+ ", admin=" + admin +
+ ", blocked=" + blocked +
+ ", removed=" + removed +
+ ", avatar=" + avatar +
+ '}';
+ }
+}
diff --git a/back/src/org/kar/karideo/model/UserSmall.java b/back/src/org/kar/karideo/model/UserSmall.java
new file mode 100644
index 0000000..b550d77
--- /dev/null
+++ b/back/src/org/kar/karideo/model/UserSmall.java
@@ -0,0 +1,73 @@
+package org.kar.karideo.model;
+
+/*
+CREATE TABLE `user` (
+ `id` bigint NOT NULL COMMENT 'table ID' AUTO_INCREMENT PRIMARY KEY,
+ `login` varchar(128) COLLATE 'utf8_general_ci' NOT NULL COMMENT 'login of the user',
+ `password` varchar(128) COLLATE 'latin1_bin' NOT NULL COMMENT 'password of the user hashed (sha512)',
+ `email` varchar(512) COLLATE 'utf8_general_ci' NOT NULL COMMENT 'email of the user',
+ `emailValidate` bigint COMMENT 'date of the email validation',
+ `newEmail` varchar(512) COLLATE 'utf8_general_ci' COMMENT 'email of the user if he want to change',
+ `authorisationLevel` enum("REMOVED", "USER", "ADMIN") NOT NULL COMMENT 'user level of authorization'
+) AUTO_INCREMENT=10;
+
+ */
+
+
+import java.sql.ResultSet;
+import java.sql.SQLException;
+
+public class UserSmall {
+ public long id;
+ public String login;
+ public String email;
+ public State authorisationLevel;
+
+ public UserSmall() {
+ }
+
+ public UserSmall(long id, String login, String email, State authorisationLevel) {
+ this.id = id;
+ this.login = login;
+ this.email = email;
+ this.authorisationLevel = authorisationLevel;
+ }
+
+ public UserSmall(ResultSet rs) {
+ int iii = 1;
+ try {
+ this.id = rs.getLong(iii++);
+ this.login = rs.getString(iii++);
+ this.email = rs.getString(iii++);
+ this.authorisationLevel = State.valueOf(rs.getString(iii++));
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ }
+ }
+ /*
+ public void serialize(ResultSet rs) {
+ int iii = 1;
+ try {
+ this.id = rs.getLong(iii++);
+ this.login = rs.getString(iii++);
+ this.password = rs.getString(iii++);
+ this.email = rs.getString(iii++);
+ this.emailValidate = rs.getLong(iii++);
+ this.newEmail = rs.getString(iii++);
+ this.authorisationLevel = State.valueOf(rs.getString(iii++));
+ } catch (SQLException ex) {
+ ex.printStackTrace();
+ }
+ }
+ */
+
+ @Override
+ public String toString() {
+ return "UserSmall{" +
+ "id='" + id + '\'' +
+ ", login='" + login + '\'' +
+ ", email='" + email + '\'' +
+ ", authorisationLevel=" + authorisationLevel +
+ '}';
+ }
+}
diff --git a/back/src/tools.py b/back/src/tools.py
deleted file mode 100644
index 8a848bd..0000000
--- a/back/src/tools.py
+++ /dev/null
@@ -1,205 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2012, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-
-import os
-import shutil
-import errno
-import fnmatch
-import stat
-# Local import
-from realog import debug
-
-"""
-
-"""
-def get_run_path():
- return os.getcwd()
-
-"""
-
-"""
-def get_current_path(file):
- return os.path.dirname(os.path.realpath(file))
-
-def create_directory_of_file(file):
- debug.info("Create directory of path: '" + file + "'")
- path = os.path.dirname(file)
- debug.info("Create directory: '" + path + "'")
- try:
- os.stat(path)
- except:
- os.makedirs(path)
-
-def get_list_sub_path(path):
- # TODO : os.listdir(path)
- for dirname, dirnames, filenames in os.walk(path):
- return dirnames
- return []
-
-def remove_path_and_sub_path(path):
- if os.path.isdir(path):
- debug.verbose("remove path : '" + path + "'")
- shutil.rmtree(path)
-
-def remove_file(path):
- if os.path.isfile(path):
- os.remove(path)
- elif os.path.islink(path):
- os.remove(path)
-
-def exist(path):
- if os.path.isdir(path):
- return True
- if os.path.isfile(path):
- return True
- if os.path.islink(path):
- return True
- return False
-
-def file_size(path):
- if not os.path.isfile(path):
- return 0
- statinfo = os.stat(path)
- return statinfo.st_size
-
-def file_read_data(path, binary=False):
- debug.verbose("path= " + path)
- if not os.path.isfile(path):
- return ""
- if binary == True:
- file = open(path, "rb")
- else:
- file = open(path, "r")
- data_file = file.read()
- file.close()
- return data_file
-
-def version_to_string(version):
- version_ID = ""
- for id in version:
- if len(version_ID) != 0:
- if type(id) == str:
- version_ID += "-"
- else:
- version_ID += "."
- version_ID += str(id)
- return version_ID
-
-##
-## @brief Write data in a specific path.
-## @param[in] path Path of the data might be written.
-## @param[in] data Data To write in the file.
-## @param[in] only_if_new (default: False) Write data only if data is different.
-## @return True Something has been copied
-## @return False Nothing has been copied
-##
-def file_write_data(path, data, only_if_new=False):
- if only_if_new == True:
- if os.path.exists(path) == True:
- old_data = file_read_data(path)
- if old_data == data:
- return False
- #real write of data:
- create_directory_of_file(path)
- file = open(path, "w")
- file.write(data)
- file.close()
- return True
-
-def file_write_data_safe(path, data):
- #real write of data:
- create_directory_of_file(path)
- file = open(path + ".tmp", "w")
- file.write(data)
- file.close()
- shutil.move(path + ".tmp", path)
- return True
-
-
-def file_move(path_src, path_dst):
- create_directory_of_file(path_dst)
- shutil.move(path_src, path_dst)
- return True
-
-def file_copy(path_src, path_dst):
- create_directory_of_file(path_dst)
- shutil.copyfile(path_src, path_dst)
- return True
-
-
-def list_to_str(list):
- if type(list) == type(str()):
- return list + " "
- else:
- result = ""
- # mulyiple imput in the list ...
- for elem in list:
- result += list_to_str(elem)
- return result
-
-import hashlib
-
-def str_limit_4(_data):
- data = str(_data)
- if len(data) >= 4:
- return data
- if len(data) == 3:
- return " " + data
- if len(data) == 2:
- return " " + data
- return " " + data
-
-def int_to_human(_data, _bigger = False):
- tera = int(_data/(1024*1024*1024*1024))%1024
- giga = int(_data/(1024*1024*1024))%1024
- mega = int(_data/(1024*1024))%1024
- kilo = int(_data/(1024))%1024
- byte = int(_data)%1024
-
- tera_str = str_limit_4(tera)
- giga_str = str_limit_4(giga)
- mega_str = str_limit_4(mega)
- kilo_str = str_limit_4(kilo)
- byte_str = str_limit_4(byte)
- out = ""
- if tera != 0:
- out += tera_str + "T"
- if _bigger == True:
- return out
- if giga != 0 or len(out) != 0:
- out += giga_str + "G"
- if _bigger == True:
- return out
- if mega != 0 or len(out) != 0:
- out += mega_str + "M"
- if _bigger == True:
- return out
- if kilo != 0 or len(out) != 0:
- out += kilo_str + "k"
- if _bigger == True:
- return out
- out += byte_str + "B"
- return out
-
-def calculate_sha512(_path):
- sha1 = hashlib.sha512()
- file = open(_path, "rb")
- totalsize = os.path.getsize(_path)
- current = 0
- while True:
- body = file.read(10*1024*1024)
- if len(body) == 0:
- break;
- current += len(body)
- sha1.update(body)
- percent = current/totalsize*100
- debug.debug("\r Checking data: {percent:3.0f}% {size} / {total_size}".format(percent=percent, size=int_to_human(current), total_size=int_to_human(totalsize)))
- file.close()
- return str(sha1.hexdigest())
\ No newline at end of file
diff --git a/back/tools/sendFile.py b/back/tools/sendFile.py
deleted file mode 100755
index 17df447..0000000
--- a/back/tools/sendFile.py
+++ /dev/null
@@ -1,42 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2019, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-import os
-import sys
-import requests # pip install requests
-
-class upload_in_chunks(object):
- def __init__(self, filename, chunksize=1 << 13):
- self.filename = filename
- self.chunksize = chunksize
- self.totalsize = os.path.getsize(filename)
- self.readsofar = 0
-
- def __iter__(self):
- with open(self.filename, 'rb') as file:
- while True:
- data = file.read(self.chunksize)
- if not data:
- sys.stderr.write("\n")
- break
- self.readsofar += len(data)
- percent = self.readsofar * 1e2 / self.totalsize
- sys.stderr.write("\rSendfing data: {percent:3.0f}% {size:14.0f} / {total_size}".format(percent=percent, size=self.readsofar, total_size=self.totalsize))
- yield data
-
- def __len__(self):
- return self.totalsize
-
-filename = 'Totally_Spies.mp4'
-
-result = requests.post("http://127.0.0.1:15080/data", data=upload_in_chunks(filename, chunksize=4096))
-
-
-print("result : " + str(result) + " " + result.text)#str(dir(result)))
-
diff --git a/back/tools/sendLocalData.py b/back/tools/sendLocalData.py
deleted file mode 100755
index 10a05f8..0000000
--- a/back/tools/sendLocalData.py
+++ /dev/null
@@ -1,987 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2019, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-import os
-import copy
-import sys
-import datetime
-import hashlib
-import requests # pip install requests
-import realog.debug as debug
-#import magic
-import json
-import shutil
-from pymediainfo import MediaInfo
-
-debug.enable_color();
-
-
-def get_run_path():
- return os.getcwd()
-
-folder = get_run_path()
-src_path = folder
-dst_path = os.path.join(folder, "..", "zzz_video_push_done")
-
-property = {
- "hostname": "192.168.1.156",
- #"hostname": "127.0.0.1",
- "port": 15080,
- "login": None,
- "password": None,
-}
-
-def get_base_url():
- return "http://" + property["hostname"] + ":" + str(property["port"]) + "/"
-
-
-def check_correct_format(_file):
- media_info = MediaInfo.parse(_file)
- print("media-info: ... " + str(len(media_info.tracks)))
- need_trascode_audio = False
- for elem_track in media_info.tracks:
- data_print = "[" + str(elem_track.track_id) + "] " + str(elem_track.track_type)
- #print('track_id = ' + str(elem_track.track_id))
- #print('track_type = ' + str(elem_track.track_type))
- if elem_track.track_type == "Audio":
- data_print += " (" + str(elem_track.language) + ") enc=" + str(elem_track.format);
- #print('language = ' + str(elem_track.language))
- #print('format = ' + str(elem_track.format))
- if elem_track.format != "Opus":
- need_trascode_audio = True
- elif elem_track.track_type == "Video":
- data_print += " enc=" + str(elem_track.format);
- print(" - " + data_print)
- #print("media-info: ..." + str(dir(elem_track)))
- if need_trascode_audio == False:
- return True
- print(" ==> NEED transcoding, AUDIO in the good format...")
- return False
-
-def create_directory_of_file(file):
- path = os.path.dirname(file)
- try:
- os.stat(path)
- except:
- os.makedirs(path)
-
-def file_move(path_src, path_dst):
- #real write of data:
- print("Move file from: " + path_src)
- print(" to: " + path_dst)
- create_directory_of_file(path_dst)
- shutil.move(path_src, path_dst)
- return True
-
-
-def str_limit_4(_data):
- data = str(_data)
- if len(data) >= 4:
- return data
- if len(data) == 3:
- return " " + data
- if len(data) == 2:
- return " " + data
- return " " + data
-
-def int_to_human(_data, _bigger = False):
- tera = int(_data/(1024*1024*1024*1024))%1024
- giga = int(_data/(1024*1024*1024))%1024
- mega = int(_data/(1024*1024))%1024
- kilo = int(_data/(1024))%1024
- byte = int(_data)%1024
-
- tera_str = str_limit_4(tera)
- giga_str = str_limit_4(giga)
- mega_str = str_limit_4(mega)
- kilo_str = str_limit_4(kilo)
- byte_str = str_limit_4(byte)
- out = ""
- if tera != 0:
- out += tera_str + "T"
- if _bigger == True:
- return out
- if giga != 0 or len(out) != 0:
- out += giga_str + "G"
- if _bigger == True:
- return out
- if mega != 0 or len(out) != 0:
- out += mega_str + "M"
- if _bigger == True:
- return out
- if kilo != 0 or len(out) != 0:
- out += kilo_str + "k"
- if _bigger == True:
- return out
- out += byte_str + "B"
- return out
-
-
-class upload_in_chunks(object):
- def __init__(self, filename, chunksize=1 + 13):
- self.filename = filename
- self.chunksize = chunksize
- self.totalsize = os.path.getsize(filename)
- self.start_time = datetime.datetime.utcnow()
- self.performance_time = datetime.datetime.utcnow()
- self.performance_data = 0
- self.readsofar = 0
- self.performance_result = 0
-
- def __iter__(self):
- with open(self.filename, 'rb') as file:
- while True:
- data = file.read(self.chunksize)
- if not data:
- sys.stderr.write("\n")
- break
- self.readsofar += len(data)
- self.performance_data += len(data)
- percent = self.readsofar * 1e2 / self.totalsize
- since_time = datetime.datetime.utcnow() - self.start_time
- delta_time = datetime.datetime.utcnow() - self.performance_time
- if delta_time > datetime.timedelta(seconds=2):
- delta_seconds = delta_time.total_seconds()
- self.performance_result = self.performance_data / delta_seconds
- self.performance_time = datetime.datetime.utcnow()
- self.performance_data = 0
- #sys.stderr.write("\rSending data: {percent:3.0f}% {size:14.0f} / {total_size} {timeee}".format(percent=percent, size=self.readsofar, total_size=self.totalsize, timeee=str(since_time)))
- sys.stderr.write("\r Sending data: {percent:3.0f}% {size} / {total_size} {timeee} {speed}/s".format(percent=percent, size=int_to_human(self.readsofar), total_size=int_to_human(self.totalsize), timeee=str(since_time), speed=int_to_human(self.performance_result, True)))
- yield data
-
- def __len__(self):
- return self.totalsize
-
-#filename = 'Totally_Spies.mp4'
-#result = requests.post(get_base_url() + "data", data=upload_in_chunks(filename, chunksize=4096))
-#debug.info("result : " + str(result) + " " + result.text)#str(dir(result)))
-
-
-def extract_and_remove(_input_value, _start_mark, _stop_mark):
- values = []
- out = ""
- inside = False
- inside_data = ""
- for it in _input_value:
- if inside == False \
- and it == _start_mark:
- inside = True
- elif inside == True \
- and it == _stop_mark:
- inside = False
- values.append(inside_data)
- inside_data = ""
- elif inside == True:
- inside_data += it
- else:
- out += it
- return (out, values)
-
-def create_directory_of_file(_file):
- path = os.path.dirname(_file)
- try:
- os.stat(path)
- except:
- os.makedirs(path)
-
-##
-## @brief Write data in a specific path.
-## @param[in] path Path of the data might be written.
-## @param[in] data Data To write in the file.
-## @param[in] only_if_new (default: False) Write data only if data is different.
-## @return True Something has been copied
-## @return False Nothing has been copied
-##
-def file_write_data(_path, _data, _only_if_new=False):
- if _only_if_new == True:
- if os.path.exists(_path) == True:
- old_data = file_read_data(_path)
- if old_data == _data:
- return False
- #real write of data:
- create_directory_of_file(_path)
- file = open(_path, "w")
- file.write(_data)
- file.close()
- return True
-
-def get_modify_time(_path):
- return os.stat(_path).st_mtime
-
-def file_read_data(_path, _binary=False):
- debug.verbose("path= " + _path)
- if not os.path.isfile(_path):
- return ""
- if _binary == True:
- file = open(_path, "rb")
- else:
- file = open(_path, "r")
- data_file = file.read()
- file.close()
- return data_file
-
-def calculate_sha512(_path):
- sha1 = hashlib.sha512()
- file = open(_path, "rb")
- totalsize = os.path.getsize(_path)
- current = 0
- while True:
- body = file.read(10*1024*1024)
- if len(body) == 0:
- break;
- current += len(body)
- sha1.update(body)
- percent = current/totalsize*100
- sys.stderr.write("\r Checking data: {percent:3.0f}% {size} / {total_size}".format(percent=percent, size=int_to_human(current), total_size=int_to_human(totalsize)))
- file.close()
- sys.stderr.write("\n")
- return str(sha1.hexdigest())
-
-
-result_list_types = requests.get(get_base_url() + "type")
-debug.info(" List of types *********** : " + str(result_list_types))
-#debug.info(" " + str(result_list_types.json()))
-result_list_types = result_list_types.json()
-
-def get_type_id(_name):
- for elem in result_list_types:
- if elem["name"] == _name:
- return elem["id"]
- return None
-
-def print_list_of_type():
- print("List of type:")
- for elem in result_list_types:
- print(" - '" + elem["name"] + "'")
-
-def get_list_of_type():
- print("List of type:")
- out = []
- for elem in result_list_types:
- out.append(elem["name"])
- return out
-
-#exit(-1);
-nb_file_try_send = 0
-nb_file_sended = 0
-
-def push_video_file(_path, _basic_key={}):
- file_name, file_extension = os.path.splitext(_path);
- # internal file_extension ...
- if file_extension == "sha512":
- debug.verbose(" file: '" + _path + "' sha512 extention ...")
- return True
-
- debug.verbose(" Add media : '" + _path + "'")
- # "avi", , "mov", , "ts", "cover_1.tiff", "cover_1.bmp", "cover_1.tga"] copy only file that is supported by the html5 video player (chrome mode only)
- if file_extension[1:] not in ["webm", "mkv", "mp4"] \
- and file_name not in ["cover_1.jpg","cover_1.png"]:
- debug.verbose(" Not send file : " + _path + " Not manage file_extension... " + file_extension)
- return False
-
- debug.info("=======================================================================")
- debug.info("Send file: '" + file_name + "' with extention " + file_extension)
- debug.info("=======================================================================")
-
- if file_name in ["cover_1.jpg","cover_1.png", "cover_1.till", "cover_1.bmp", "cover_1.tga"]:
- # find a cover...
- return True
- global nb_file_try_send
- nb_file_try_send += 1
-
- mime_type = "unknown"
- # do it by myself .. it is better ...
- filename___, file_extension = os.path.splitext(_path)
- if file_extension in ["mkv", ".mkv"]:
- mime_type = "video/x-matroska"
- elif file_extension in ["mka", ".mka"]:
- mime_type = "audio/x-matroska"
- elif file_extension in ["mp4", ".mp4"]:
- mime_type = "video/mp4"
- elif file_extension in ["webm", ".webm"]:
- mime_type = "video/webm"
- elif file_extension in ["json", ".json"]:
- mime_type = "application/json"
- elif file_extension in ["jpeg", ".jpeg", ".JPEG", "JPEG", "jpg", ".jpg", ".JPG", "JPG"]:
- mime_type = "image/jpeg"
- elif file_extension in ["png", ".png"]:
- mime_type = "image/png"
- try:
- _path.encode('latin-1')
- path_send = _path;
- except UnicodeEncodeError:
- path_send = "";
- for elem in _path:
- if elem in "azertyuiopqsdfghjklmwxcvbnAZERTYUIOPQSDFGHJKLMWXCVBN1234567890_- []{})(:.,;?/\%$&~#'|@=+°*!²":
- path_send += elem
- #debug.warning(" value " + _path)
- #debug.error(" ==> " + path_send)
- headers_values = {
- 'filename': path_send,
- 'mime-type': mime_type
- }
- debug.info(" Check correct format SHA ...")
- valid = check_correct_format(_path)
- if valid == False:
- debug.warning("wrong format ====> !!!! need trancode");
- return;
- debug.info(" Calculate SHA ...")
- local_sha = calculate_sha512(_path)
- debug.info(" ==> sha is " + local_sha)
- result_check_sha = requests.get(get_base_url() + "data/exist/" + local_sha)
- remote_id_data = None
- if result_check_sha.status_code == 200:
- debug.debug(" Find the data : " + str(result_check_sha) + " " + result_check_sha.text)
- remote_id_data = result_check_sha.json()["id"]
- elif result_check_sha.status_code == 404:
- debug.info(" Did not find the file ... ==> need to send it")
- else:
- debug.warning(" error interface ...")
- if remote_id_data == None:
- result_send_data = requests.post(get_base_url() + "data", headers=headers_values, data=upload_in_chunks(_path, chunksize=4096))
- debug.debug(" result *********** : " + str(result_send_data) + " " + result_send_data.text)
- remote_id_data = result_send_data.json()["id"]
- if remote_id_data == None:
- debug.warning(" pb in file sending ....");
- return
-
- file_name = os.path.basename(file_name)
- debug.info(" Find file_name : '" + file_name + "'");
- debug.verbose("1111111");
- # Remove Date (XXXX) or other titreadsofarle
- file_name, dates = extract_and_remove(file_name, '(', ')');
- have_date = False
- have_Title = False
- debug.verbose("1111111 2222222 ");
- for it in dates:
- #debug.info(" 2222222 ==> 1 " + it);
- if len(it) == 0:
- continue
- #debug.info(" 2222222 ==> 2 ");
- if it[0] == '0' \
- or it[0] == '1' \
- or it[0] == '2' \
- or it[0] == '3' \
- or it[0] == '4' \
- or it[0] == '5' \
- or it[0] == '6' \
- or it[0] == '7' \
- or it[0] == '8' \
- or it[0] == '9':
- #debug.info(" 2222222 ==> 3 ");
- # find a date ...
- if have_date == True:
- debug.debug(" '" + file_name + "'")
- debug.error(" Parse Date error : () : " + it + " ==> multiple date")
- continue
- #debug.info(" 2222222 ==> 4 ");
- try:
- tmppppppppp = int(it)
- if tmppppppppp >= 1900 and tmppppppppp < 3000:
- _basic_key["date"] = tmppppppppp
- have_date = True
- except ValueError:
- debug.warning(" Parse Date error : () : " + it + " ==> not a date ...")
- else:
- #debug.info(" 2222222 ==> 9 ");
- if have_Title == True:
- debug.debug(" '" + file_name + "'")
- debug.error(" Parse Title error : () : " + it + " ==> multiple title")
- continue
- #debug.info(" 2222222 ==> 10 ");
- have_Title = True
- # Other title
- _basic_key["title2"] = it;
- #debug.info(" 2222222 ==> 11 ");
-
- debug.verbose("1111111 2222222 3333333 ");
- # Remove the actors [XXX YYY][EEE TTT]...
- file_name, actors = extract_and_remove(file_name, '[', ']');
- if len(actors) > 0:
- debug.info(" '" + file_name + "'")
- actor_list = []
- for it_actor in actors:
- if actor_list != "":
- actor_list += ";"
- actor_list.append(it_actor)
- _basic_key["actors"] = actor_list
- list_element_base = file_name.split('-')
- debug.debug(" ==> Title file: " + file_name)
- debug.debug(" ==> Title cut : " + str(list_element_base))
-
- debug.verbose("1111111 2222222 3333333 555555");
- list_element = [];
- tmp_start_string = "";
- iii = 0
- if len(list_element_base) == 1:
- list_element = list_element_base
- else:
- while iii < len(list_element_base):
- if list_element_base[iii][0] != 's' \
- and list_element_base[iii][0] != 'e':
- if tmp_start_string != "":
- tmp_start_string += '-'
- tmp_start_string += list_element_base[iii]
- else:
- list_element.append(tmp_start_string)
- tmp_start_string = ""
- while iii start elem: " + str(tmp_start_string))
-
- debug.verbose("1111111 2222222 3333333 555555 666666");
-
- if tmp_start_string != "":
- list_element.append(tmp_start_string)
-
- debug.debug(" ==> list_element : " + str(list_element))
-
- if len(list_element) == 1:
- # nothing to do , it might be a film ...
- _basic_key["title"] = list_element[0]
- else:
- if len(list_element) > 3 \
- and list_element[1][0] == 's' \
- and list_element[2][0] == 'e':
- debug.debug(" Parse format: xxx-sXX-eXX-kjhlkjlkj(1234).*")
- # internal formalisme ...
- saison = -1;
- episode = -1;
- series_name = list_element[0];
- if "series-name" not in _basic_key.keys():
- _basic_key["series-name"] = series_name
- full_episode_name = list_element[3]
- for yyy in range(4, len(list_element)):
- full_episode_name += "-" + list_element[yyy]
-
- _basic_key["title"] = full_episode_name
- if list_element[1][1:] == "XX":
- # saison unknow ... ==> nothing to do ...
- #saison = 123456789;
- pass
- else:
- try:
- saison = int(list_element[1][1:]);
- finally:
- pass
-
- if list_element[2][1:] == "XX":
- # episode unknow ... ==> nothing to do ...
- pass
- else:
- try:
- episode = int(list_element[2][1:]);
- _basic_key["episode"] = int(episode)
- except ValueError:
- pass
-
- debug.debug(" Find a internal mode series: :");
- debug.debug(" origin : '" + file_name + "'");
- saisonPrint = "XX";
- episodePrint = "XX";
- if saison < 0:
- # nothing to do
- pass
- else:
- try:
- saisonPrint = str(saison)
- _basic_key["saison"] = str(saison)
- except ValueError:
- pass
-
- if episode < 0:
- # nothing to do
- pass
- elif episode < 10:
- episodePrint = "0" + str(episode);
- _basic_key["episode"] = episode
- else:
- episodePrint = str(episode);
- _basic_key["episode"] = episode
-
- debug.info(" ==> '" + series_name + "-s" + saisonPrint + "-e" + episodePrint + "-" + full_episode_name + "'");
- elif len(list_element) > 2 \
- and list_element[1][0] == 'e':
- debug.debug(" Parse format: xxx-eXX-kjhlkjlkj(1234).*")
- # internal formalisme ...
- saison = -1;
- episode = -1;
- series_name = list_element[0];
-
- _basic_key["series-name"] = series_name
- full_episode_name = list_element[2]
- for yyy in range(3, len(list_element)):
- full_episode_name += "-" + list_element[yyy]
-
- _basic_key["title"] = full_episode_name
- if list_element[1][1:] == "XX":
- # episode unknow ... ==> nothing to do ...
- pass
- else:
- try:
- episode = int(list_element[1][1:]);
- _basic_key["episode"] = int(episode)
- finally:
- pass
-
- debug.debug(" Find a internal mode series: :");
- debug.debug(" origin : '" + file_name + "'");
- saisonPrint = "XX";
- episodePrint = "XX";
- if episode < 0:
- # nothing to do
- pass
- elif episode < 10:
- episodePrint = "0" + str(episode);
- _basic_key["episode"] = episode
- else:
- episodePrint = str(episode);
- _basic_key["episode"] = episode
-
- debug.info(" ==> '" + series_name + "-s" + saisonPrint + "-e" + episodePrint + "-" + full_episode_name + "'");
-
- debug.verbose("1111111 2222222 3333333 555555 666666 777777 ");
-
- if "title" not in _basic_key.keys():
- debug.warning(" ===> No title parsed ...")
- _basic_key["title"] = "---"
-
- debug.debug(" pared meta data: " + json.dumps(_basic_key, sort_keys=True, indent=4))
- data_model = {
- "type_id": _basic_key["type"],
- "data_id": remote_id_data,
- #"group_id": int,
- "name": _basic_key["title"],
- # number of second
- "time": None,
- }
- for elem in ["date", "description", "episode"]: #["actors", "date", "description", "episode", "title2"]:
- if elem in _basic_key.keys():
- data_model[elem] = _basic_key[elem]
-
- debug.verbose("1111111 2222222 3333333 555555 666666 777777 888888");
- if "series-name" in _basic_key.keys():
- result_group_data = requests.post(get_base_url() + "group/find", data=json.dumps({"name":_basic_key["series-name"]}, sort_keys=True, indent=4))
- debug.debug(" Create group ??? *********** : " + str(result_group_data) + " " + result_group_data.text)
- if result_group_data.status_code == 404:
- result_group_data = requests.post(get_base_url() + "group", data=json.dumps({"name":_basic_key["series-name"]}, sort_keys=True, indent=4))
- debug.debug(" yes we create new group *********** : " + str(result_group_data) + " " + result_group_data.text)
- group_id = result_group_data.json()["id"]
- data_model["serie_id"] = group_id
- if "saison" in _basic_key.keys():
- result_saison_data = requests.post(get_base_url() + "saison/find", data=json.dumps({"name":str(_basic_key["saison"]), "parent_id":group_id}, sort_keys=True, indent=4))
- debug.debug(" Create saison ??? *********** : " + str(result_saison_data) + " " + result_saison_data.text)
- if result_saison_data.status_code == 404:
- result_saison_data = requests.post(get_base_url() + "saison", data=json.dumps({"name":str(_basic_key["saison"]), "parent_id":group_id}, sort_keys=True, indent=4))
- debug.debug(" yes we create new saison *********** : " + str(result_saison_data) + " " + result_saison_data.text)
- saison_id = result_saison_data.json()["id"]
- data_model["saison_id"] = saison_id
-
- debug.verbose("1111111 2222222 3333333 555555 666666 777777 888888 999999 ");
- debug.debug(" Send media information : " + json.dumps(data_model, sort_keys=True))
- result_send_data = requests.post(get_base_url() + "video", data=json.dumps(data_model, sort_keys=True, indent=4))
- debug.verbose(" result: " + str(result_send_data) + " " + result_send_data.text)
- if result_send_data.status_code == 200:
- debug.info(" ====================================");
- debug.info(" == Send OK ==");
- debug.info(" ====================================");
- global nb_file_sended
- nb_file_sended += 1
- else:
- debug.warning(" ====================================");
- debug.warning(" == ERROR sending Media ==");
- debug.warning(" ====================================");
- return False
- debug.verbose("1111111 2222222 3333333 555555 666666 777777 888888 999999 101010");
- file_move(_path, os.path.join(dst_path, _path[len(src_path)+1:]))
- debug.verbose("1111111 2222222 3333333 555555 666666 777777 888888 999999 101010 111111");
- return True
-
-
-def install_video_path( _path, _basic_key = {}):
- debug.info("Parse : '" + _path + "'");
- list_sub_path = [fff for fff in os.listdir(_path) if os.path.isdir(os.path.join(_path, fff))]
- list_sub_path.sort()
- for it_path in list_sub_path:
- try:
- basic_key_tmp = copy.deepcopy(_basic_key)
- debug.info("Add Sub path: '" + it_path + "'");
- if len(basic_key_tmp) == 0:
- debug.info("find A '" + it_path + "' " + str(len(basic_key_tmp)));
- basic_key_tmp["type"] = get_type_id(it_path);
- if basic_key_tmp["type"] == None:
- debug.warning("Not supported type: '" + str(it_path) + "' availlable: " + str(get_list_of_type()))
- continue
- else:
- debug.info("find B '" + it_path + "' " + str(len(basic_key_tmp)))
- it_path_tmp = it_path.lower()
- if it_path_tmp.startswith("saison_"):
- if it_path_tmp.startswith("saison_01") or it_path_tmp == "saison_1":
- basic_key_tmp["saison"] = 1
- elif it_path_tmp.startswith("saison_02") or it_path_tmp == "saison_2":
- basic_key_tmp["saison"] = 2
- elif it_path_tmp.startswith("saison_03") or it_path_tmp == "saison_3":
- basic_key_tmp["saison"] = 3
- elif it_path_tmp.startswith("saison_04") or it_path_tmp == "saison_4":
- basic_key_tmp["saison"] = 4
- elif it_path_tmp.startswith("saison_05") or it_path_tmp == "saison_5":
- basic_key_tmp["saison"] = 5
- elif it_path_tmp.startswith("saison_06") or it_path_tmp == "saison_6":
- basic_key_tmp["saison"] = 6
- elif it_path_tmp.startswith("saison_07") or it_path_tmp == "saison_7":
- basic_key_tmp["saison"] = 7
- elif it_path_tmp.startswith("saison_08") or it_path_tmp == "saison_8":
- basic_key_tmp["saison"] = 8
- elif it_path_tmp.startswith("saison_09") or it_path_tmp == "saison_9":
- basic_key_tmp["saison"] = 9
- elif it_path_tmp.startswith("saison_10"):
- basic_key_tmp["saison"] = 10
- elif it_path_tmp.startswith("saison_11"):
- basic_key_tmp["saison"] = 11
- elif it_path_tmp.startswith("saison_12"):
- basic_key_tmp["saison"] = 12
- elif it_path_tmp.startswith("saison_13"):
- basic_key_tmp["saison"] = 13
- elif it_path_tmp.startswith("saison_14"):
- basic_key_tmp["saison"] = 14
- elif it_path_tmp.startswith("saison_15"):
- basic_key_tmp["saison"] = 15
- elif it_path_tmp.startswith("saison_16"):
- basic_key_tmp["saison"] = 16
- elif it_path_tmp.startswith("saison_17"):
- basic_key_tmp["saison"] = 17
- elif it_path_tmp.startswith("saison_18"):
- basic_key_tmp["saison"] = 18
- elif it_path_tmp.startswith("saison_19"):
- basic_key_tmp["saison"] = 19
- elif it_path_tmp.startswith("saison_20"):
- basic_key_tmp["saison"] = 20
- elif it_path_tmp.startswith("saison_21"):
- basic_key_tmp["saison"] = 21
- elif it_path_tmp.startswith("saison_22"):
- basic_key_tmp["saison"] = 22
- elif it_path_tmp.startswith("saison_23"):
- basic_key_tmp["saison"] = 23
- elif it_path_tmp.startswith("saison_24"):
- basic_key_tmp["saison"] = 24
- elif it_path_tmp.startswith("saison_25"):
- basic_key_tmp["saison"] = 25
- elif it_path_tmp.startswith("saison_26"):
- basic_key_tmp["saison"] = 26
- elif it_path_tmp.startswith("saison_27"):
- basic_key_tmp["saison"] = 27
- elif it_path_tmp.startswith("saison_28"):
- basic_key_tmp["saison"] = 28
- elif it_path_tmp.startswith("saison_29"):
- basic_key_tmp["saison"] = 29
- else:
- basic_key_tmp["saison"] = 99
- else:
- basic_key_tmp["series-name"] = it_path
- debug.info("add a path " + os.path.join(_path, it_path) + " with keys " + str(basic_key_tmp))
- install_video_path(os.path.join(_path, it_path), basic_key_tmp);
- except KeyboardInterrupt:
- print('Interrupted')
- try:
- sys.exit(0)
- except SystemExit:
- os._exit(0)
- except UnicodeEncodeError:
- debug.warning("Can not send file.1. " + os.path.join(_path, it_path))
- raise
- continue
- except:
- debug.warning("Can not send file.2. " + os.path.join(_path, it_path))
- #raise
- debug.warning( "get exception:" + str(sys.exc_info()[0]))
- #import traceback
- #traceback.print_stack()
- #continue
- raise
-
- # Add files :
- list_sub_file = [fff for fff in os.listdir(_path) if os.path.isfile(os.path.join(_path, fff))]
- for it_file in list_sub_file:
- basic_key_tmp = copy.deepcopy(_basic_key)
- try:
- push_video_file(os.path.join(_path, it_file), basic_key_tmp);
- except KeyboardInterrupt:
- print('Interrupted')
- try:
- sys.exit(0)
- except SystemExit:
- os._exit(0)
- except UnicodeEncodeError:
- debug.warning("Can not send file.3. " + os.path.join(_path, it_file))
- raise
- """
- except:
- debug.warning("Can not send file.4. " + os.path.join(_path, it_file))
- #debug.warning( "get exception:" + str(sys.exc_info()[0]))
- #debug.warning("------------------------------")
- #traceback.print_exc(file=sys.stdout)
- #continue
- #raise
- """
-
-
-
-
-import death.Arguments as arguments
-import death.ArgElement as arg_element
-
-
-my_args = arguments.Arguments()
-my_args.add_section("option", "Can be set one time in all case")
-my_args.add("h", "help", desc="Display this help")
-my_args.add("", "version", desc="Display the application version")
-my_args.add("v", "verbose", list=[
- ["0","None"],
- ["1","error"],
- ["2","warning"],
- ["3","info"],
- ["4","debug"],
- ["5","verbose"],
- ["6","extreme_verbose"],
- ], desc="display debug level (verbose) default =2")
-my_args.add("a", "action", list=[
- ["tree","List all the files in a tree view ..."],
- ["list","List all the files"],
- ["push","push a single file"],
- ["push_path","push a full folder"],
- ["types","List all the types availlable"],
- ], desc="possible action")
-my_args.add("c", "color", desc="Display message in color")
-my_args.add("f", "folder", haveParam=False, desc="Display the folder instead of the git repository name")
-local_argument = my_args.parse()
-
-##
-## @brief Display the help of this package.
-##
-def usage():
- color = debug.get_color_set()
- # generic argument displayed :
- my_args.display()
- exit(0)
-
-##
-## @brief Display the version of this package.
-##
-def version():
- color = debug.get_color_set()
- import pkg_resources
- debug.info("version: 0.0.0")
- foldername = os.path.dirname(__file__)
- debug.info("source folder is: " + foldername)
- exit(0)
-
-requestAction = "list"
-
-# preparse the argument to get the verbose element for debug mode
-def parse_arg(argument):
- debug.warning("parse arg : " + argument.get_option_name() + " " + argument.get_arg())
- if argument.get_option_name() == "help":
- usage()
- return True
- elif argument.get_option_name() == "version":
- version()
- return True
- elif argument.get_option_name() == "verbose":
- debug.set_level(int(argument.get_arg()))
- return True
- elif argument.get_option_name() == "color":
- if check_boolean(argument.get_arg()) == True:
- debug.enable_color()
- else:
- debug.disable_color()
- return True
- elif argument.get_option_name() == "folder":
- folder = argument.get_arg()
- return True
- elif argument.get_option_name() == "action":
- global requestAction
- requestAction = argument.get_arg()
- return True
- return False
-
-
-# parse default unique argument:
-for argument in local_argument:
- parse_arg(argument)
-
-debug.info("==================================");
-debug.info("== ZEUS test client start ==");
-debug.info("==================================");
-
-
-def show_video(elem_video_id, indent):
- indent_data = ""
- while indent > 0:
- indent_data += "\t"
- indent -= 1
- result_video = requests.get(get_base_url() + "video/" + str(elem_video_id) + "")
- if result_video.status_code == 200:
- video = result_video.json()
- debug.info(indent_data + "- " + str(video["generated_name"]))
- else:
- debug.warning(indent_data + "get video id: " + str(elem_video_id) + " !!!!!! " + str(result_video.status_code) + "")
-
-# ****************************************************************************************
-# ** Clear All the data base ...
-# ****************************************************************************************
-if requestAction == "clear":
- debug.info("============================================");
- debug.info("== Clear data base: ");
- debug.info("============================================");
- # TODO : Do it :
- debug.error("NEED to add check in cmd line to execute it ...");
- """
- uint32_t count = remoteServiceVideo.count().wait().get();
- debug.debug("have " + count + " medias");
- for (uint32_t iii=0; iii " + tmpMax);
- etk::Vector list = remoteServiceVideo.getIds(iii,tmpMax).wait().get();
- zeus::FutureGroup groupWait;
- for (auto& it : list:
- debug.info("remove ELEMENT : " + it);
- groupWait.add(remoteServiceVideo.remove(it));
- groupWait.waitFor(echrono::seconds(2000));
- """
- debug.info("============================================");
- debug.info("== DONE ==");
- debug.info("============================================");
-elif requestAction == "list":
- debug.info("============================================");
- debug.info("== list files: ");
- debug.info("============================================");
- list_types = requests.get(get_base_url() + "type")
- if list_types.status_code != 200:
- debug.warning(" !! ca, ot get type list ... " + str(list_types.status_code) + "")
- for elem in list_types.json():
- debug.info(" get type id: " + str(elem["id"]))
- debug.info(" name: " + str(elem["name"]))
- # get the count of video in this type
- result_count = requests.get(get_base_url() + "type/" + str(elem["id"]) + "/count")
- if result_count.status_code == 200:
- debug.info(" count: " + str(result_count.json()["count"]))
- else:
- debug.warning(" count: !!!!!! " + str(result_count.status_code) + "")
- # get all the video list
- result_video = requests.get(get_base_url() + "type/" + str(elem["id"]) + "/video")
- if result_video.status_code == 200:
- if len(result_video.json()) != 0:
- debug.info(" List video: " + str(result_video.json()))
- else:
- debug.warning(" List video: !!!!!! " + str(result_video.status_code) + "")
- # get list of groups for this type
- result_groups = requests.get(get_base_url() + "type/" + str(elem["id"]) + "/group")
- if result_groups.status_code == 200:
- if len(result_groups.json()) != 0:
- debug.info(" List group: " + str(result_groups.json()))
- else:
- debug.warning(" List group: !!!!!! " + str(result_groups.status_code) + "")
- # get list of video without groups
- result_video_solo = requests.get(get_base_url() + "type/" + str(elem["id"]) + "/video_no_group")
- if result_video_solo.status_code == 200:
- if len(result_video_solo.json()) != 0:
- debug.info(" List video solo: " + str(result_video_solo.json()))
- else:
- debug.warning(" List video solo: !!!!!! " + str(result_video_solo.status_code) + "")
-elif requestAction == "tree":
- debug.info("============================================");
- debug.info("== tree files: ");
- debug.info("============================================");
- for elem in result_list_types:
- debug.info("-------------------------------------------------")
- debug.info(" " + str(elem["name"]))
- debug.info("-------------------------------------------------")
- # Does not work anymore...
- """
- # First get all the groups:
- result_groups = requests.get(get_base_url() + "type/" + str(elem["id"]) + "/group")
- if result_groups.status_code == 200:
- for elem_group_id in result_groups.json():
- result_group = requests.get(get_base_url() + "group/" + str(elem_group_id) + "")
- if result_group.status_code == 200:
- group = result_group.json()
- debug.info("\to- " + str(group["name"]))
- # step 1: all the saison:
- result_saison_in_group = requests.get(get_base_url() + "group/" + str(elem_group_id) + "/saison")
- if result_saison_in_group.status_code == 200:
- for elem_saison_id in result_saison_in_group.json():
- result_saison = requests.get(get_base_url() + "saison/" + str(elem_saison_id) + "")
- if result_saison.status_code == 200:
- debug.info("\t\t* saison " + str(result_saison.json()["name"]))
- result_videos_in_saison = requests.get(get_base_url() + "saison/" + str(result_saison.json()["id"]) + "/video")
- if result_videos_in_saison.status_code == 200:
- for elem_video_id in result_videos_in_saison.json():
- show_video(elem_video_id, 3)
- else:
- debug.warning("\t\tget video in saison id: " + str(elem_saison_id) + " !!!!!! " + str(result_videos_in_saison.status_code) + "")
- show_video(elem_video_id, 2)
- else:
- debug.warning("\t\tget saison id: " + str(elem_saison_id) + " !!!!!! " + str(result_saison.status_code) + "")
- else:
- debug.warning("\t\tget saison in group id: " + str(elem_group_id) + " !!!!!! " + str(result_saison_in_group.status_code) + "")
- # step 2: all the video with no saison:
- result_videos_in_group = requests.get(get_base_url() + "group/" + str(elem_group_id) + "/video_no_saison")
- if result_videos_in_group.status_code == 200:
- for elem_video_id in result_videos_in_group.json():
- show_video(elem_video_id, 2)
- else:
- debug.warning("\t\tget video in group id: " + str(elem_group_id) + " !!!!!! " + str(result_videos_in_group.status_code) + "")
- else:
- debug.warning("\tget group id: " + str(elem_group_id) + " !!!!!! " + str(result_group.status_code) + "")
- else:
- debug.warning("\t\tList group: !!!!!! " + str(result_groups.status_code) + "")
- # get list of video without groups
- result_video_solo = requests.get(get_base_url() + "type/" + str(elem["id"]) + "/video_no_group")
- if result_video_solo.status_code == 200:
- for elem_video_id in result_video_solo.json():
- show_video(elem_video_id, 1)
- else:
- debug.warning("\t\tList video solo: !!!!!! " + str(result_video_solo.status_code) + "")
- """
-
- debug.info("============================================");
- debug.info("== DONE ==");
- debug.info("============================================");
-elif requestAction == "push":
- debug.info("============================================");
- debug.info("== push file: ");
- debug.info("============================================");
- push_video_file(folder);
- debug.info("============================================");
- debug.info("== DONE ==");
- debug.info("============================================");
-elif requestAction == "push_path":
- debug.info("============================================");
- debug.info("== push path: ");
- debug.info("============================================");
- install_video_path(folder);
- debug.info(" Send: " + str(nb_file_sended) + " / " + str(nb_file_try_send))
- debug.info("============================================");
- debug.info("== DONE ==");
- debug.info("============================================");
-elif requestAction == "types":
- debug.info("============================================");
- debug.info("== Display list of types: ");
- debug.info("============================================");
- print_list_of_type();
- debug.info("============================================");
- debug.info("== DONE ==");
- debug.info("============================================");
-else:
- debug.info("============================================");
- debug.error("== Unknow action: '" + requestAction + "'");
- debug.info("============================================");
diff --git a/back/tools/videoTranscode.py b/back/tools/videoTranscode.py
deleted file mode 100755
index 180af63..0000000
--- a/back/tools/videoTranscode.py
+++ /dev/null
@@ -1,354 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2016, Edouard DUPIN, all right reserved
-##
-## @license APACHE v2.0 (see license file)
-##
-import os
-import fnmatch
-import sys
-import subprocess
-import shlex
-##
-## @brief Execute the command with no get of output
-##
-def run_command(cmd_line):
- # prepare command line:
- args = shlex.split(cmd_line)
- print("[INFO] cmd = " + str(args))
- try:
- # create the subprocess
- p = subprocess.Popen(args)
- except subprocess.CalledProcessError as e:
- print("[ERROR] subprocess.CalledProcessError : " + str(args))
- return False
- #except:
- # debug.error("Exception on : " + str(args))
- # launch the subprocess:
- output, err = p.communicate()
- # Check error :
- if p.returncode == 0:
- return True
- else:
- return False
-
-
-##
-## @brief Get list of all Files in a specific path (with a regex)
-## @param[in] path (string) Full path of the machine to search files (start with / or x:)
-## @param[in] regex (string) Regular expression to search data
-## @param[in] recursive (bool) List file with recursive search
-## @param[in] remove_path (string) Data to remove in the path
-## @return (list) return files requested
-##
-def get_list_of_file_in_path(path, regex="*", recursive = False, remove_path=""):
- out = []
- if os.path.isdir(os.path.realpath(path)):
- tmp_path = os.path.realpath(path)
- tmp_rule = regex
- else:
- debug.error("path does not exist : '" + str(path) + "'")
-
- for root, dirnames, filenames in os.walk(tmp_path):
- deltaRoot = root[len(tmp_path):]
- while len(deltaRoot) > 0 \
- and ( deltaRoot[0] == '/' \
- or deltaRoot[0] == '\\' ):
- deltaRoot = deltaRoot[1:]
- if recursive == False \
- and deltaRoot != "":
- return out
- tmpList = filenames
- if len(tmp_rule) > 0:
- tmpList = fnmatch.filter(filenames, tmp_rule)
- # Import the module :
- for cycleFile in tmpList:
- #for cycleFile in filenames:
- add_file = os.path.join(tmp_path, deltaRoot, cycleFile)
- if len(remove_path) != 0:
- if add_file[:len(remove_path)] != remove_path:
- print("ERROR : Request remove start of a path that is not the same: '" + add_file[:len(remove_path)] + "' demand remove of '" + str(remove_path) + "'")
- else:
- add_file = add_file[len(remove_path)+1:]
- out.append(add_file)
- return out;
-
-#ffmpeg -i 000.ts -threads 0 -vcodec libx264 -crf 20 -force_key_frames expr:gte\(t,n_forced*1\) -s 720x540 -acodec mp2 -ac 2 -ab 192k -ar 48000 -async 1 -deinterlace 000_transcoded.ts
-#ffmpeg -i 000.ts -threads 0 -vcodec libx264 -crf 20 -force_key_frames expr:gte\(t,n_forced*1\) -acodec mp2 -ac 2 -ab 192k -ar 48000 -async 1 -deinterlace 000_transcoded.ts
-
-"""
-def remove_group(list_of_file=[], total_count_of_file=0):
- id_elem = 0
- for elem in list_of_file:
- id_elem += 1
- tmpfile_name = elem.replace(" ", "\ ").replace("!", "\\!").replace("'", "\\'")
- print(" [" + str(id_elem) + " / " + str(total_count_of_file) + "] " + tmpfile_name)
- cmd_line = "rm " + tmpfile_name
- ret = run_command(cmd_line)
-
-list_files_tmp = get_list_of_file_in_path('.', "*__", recursive = True)
-remove_group(list_files_tmp, len(list_files_tmp))
-list_files_sha512 = get_list_of_file_in_path('.', "*.sha512", recursive = True)
-remove_group(list_files_sha512, len(list_files_sha512))
-exit(0)
-"""
-
-list_files_ts = get_list_of_file_in_path('.', "*.ts", recursive = True)
-list_files_flv = get_list_of_file_in_path('.', "*.flv", recursive = True)
-list_files_mp4 = get_list_of_file_in_path('.', "*.mp4", recursive = True)
-list_files_avi = get_list_of_file_in_path('.', "*.avi", recursive = True)
-list_files_mkv = get_list_of_file_in_path('.', "*.mkv", recursive = True)
-list_files_wmv = get_list_of_file_in_path('.', "*.wmv", recursive = True)
-list_files_divx = get_list_of_file_in_path('.', "*.divx", recursive = True)
-list_files_webm = get_list_of_file_in_path('.', "*.webm", recursive = True)
-"""
-# remove all encoded element in the other files (TS)
-for elem_mkv in list_files_mkv:
- index = 0
- for elem_ts in list_files_ts:
- if elem_mkv[:-3]+"ts" == elem_ts:
- break;
- index += 1
- if index != len(list_files_ts):
- print("[INFO] remove from list '" + list_files_ts[index] + "' ==> already transcoded")
- del list_files_ts[index]
-
-
-
-# remove all encoded element in the other files (FLV)
-for elem_mkv in list_files_mkv:
- index = 0
- for elem_flv in list_files_flv:
- if elem_mkv[:-3]+"flv" == elem_flv:
- break;
- index += 1
- if index != len(list_files_flv):
- print("[INFO] remove from list '" + list_files_flv[index] + "' ==> already transcoded")
- del list_files_flv[index]
-
-
-# remove all encoded element in the other files (mp4)
-for elem_mkv in list_files_mkv:
- index = 0
- for elem_mp4 in list_files_mp4:
- if elem_mkv[:-3]+"mp4" == elem_mp4:
- break;
- index += 1
- if index != len(list_files_mp4):
- print("[INFO] remove from list '" + list_files_mp4[index] + "' ==> already transcoded")
- del list_files_mp4[index]
-
-
-# remove all encoded element in the other files (TS)
-for elem_mkv in list_files_mkv:
- index = 0
- for elem_avi in list_files_avi:
- if elem_mkv[:-3]+"ts" == elem_avi:
- break;
- index += 1
- if index != len(list_files_avi):
- print("[INFO] remove from list '" + list_files_avi[index] + "' ==> already transcoded")
- del list_files_avi[index]
-
-
-# remove all encoded element in the other files (wmv)
-for elem_mkv in list_files_mkv:
- index = 0
- for elem_wmv in list_files_wmv:
- if elem_mkv[:-3]+"wmv" == elem_wmv:
- break;
- index += 1
- if index != len(list_files_wmv):
- print("[INFO] remove from list '" + list_files_wmv[index] + "' ==> already transcoded")
- del list_files_wmv[index]
-
-# remove all encoded element in the other files (divx)
-for elem_mkv in list_files_mkv:
- index = 0
- for elem_divx in list_files_divx:
- if elem_mkv[:-3]+"divx" == elem_divx:
- break;
- index += 1
- if index != len(list_files_divx):
- print("[INFO] remove from list '" + list_files_divx[index] + "' ==> already transcoded")
- del list_files_divx[index]
-"""
-
-print("list of elements TS : ")
-for elem in list_files_ts:
- print(" - '" + str(elem) + "'")
-print("list of elements MP4 : ")
-for elem in list_files_mp4:
- print(" - '" + str(elem) + "'")
-print("list of elements FLV : ")
-for elem in list_files_flv:
- print(" - '" + str(elem) + "'")
-print("list of elements AVI : ")
-for elem in list_files_avi:
- print(" - '" + str(elem) + "'")
-print("list of elements WMV : ")
-for elem in list_files_wmv:
- print(" - '" + str(elem) + "'")
-print("list of elements MKV : ")
-for elem in list_files_mkv:
- print(" - '" + str(elem) + "'")
-print("list of elements divx : ")
-for elem in list_files_divx:
- print(" - '" + str(elem) + "'")
-print("list of elements webm : ")
-for elem in list_files_webm:
- print(" - '" + str(elem) + "'")
-
-import random
-from pymediainfo import MediaInfo
-
-for arg in sys.argv:
- print("arg: " + arg)
-
-id_value = 0
-if len(sys.argv) == 2:
- id_value = int(sys.argv[1])
-
-tmp_name_encoded_file = "zzz_transcoded_" + str(id_value) + ".mkv"
-
-print("lement name: " + tmp_name_encoded_file)
-
-element_error=[]
-
-
-def trancode_local(list_of_file=[], extention="ts", total_count_of_file=0, offset=0) :
- global element_error;
- print("Start strancoding: '." + extention + "' ... " + str(len(list_of_file)))
- id_elem = 0
- for elem in list_of_file:
- id_elem += 1
- print(" ========================================================================================")
- print(" == " + str(offset+id_elem) + " / " + str(total_count_of_file))
- print(" == Trancode: '" + elem.replace("'", "\'") + "'")
- print(" ========================================================================================")
- if not os.path.isfile(elem):
- print(" ==> file does not exist")
- continue
-
- cmd_line = "rm " + tmp_name_encoded_file
- ret = run_command(cmd_line)
-
- # collect media info ...
- #if it is a mk: .. chack the opus format...
- if extention == "mkv":
- media_info = MediaInfo.parse(elem)
- print("media-info: ... " + str(len(media_info.tracks)))
- need_trascode_audio = False
- for elem_track in media_info.tracks:
- data_print = "[" + str(elem_track.track_id) + "] " + str(elem_track.track_type)
- #print('track_id = ' + str(elem_track.track_id))
- #print('track_type = ' + str(elem_track.track_type))
- if elem_track.track_type == "Audio":
- data_print += " (" + str(elem_track.language) + ") enc=" + str(elem_track.format);
- #print('language = ' + str(elem_track.language))
- #print('format = ' + str(elem_track.format))
- if elem_track.format != "Opus":
- need_trascode_audio = True
- elif elem_track.track_type == "Video":
- data_print += " enc=" + str(elem_track.format);
- print(" - " + data_print)
- #print("media-info: ..." + str(dir(elem_track)))
- if need_trascode_audio == False:
- print(" ==> No transcoding, already in the good format...")
- continue
-
-
- """
- media_info = MediaInfo.parse(elem)
- print("media-info: ..." + str(len(media_info.tracks)))
- for elem_track in media_info.tracks:
- print('track_type = ' + str(elem_track.track_type))
- print('track_id = ' + str(elem_track.track_id))
- print('language = ' + str(elem_track.language))
- #print("media-info: ..." + str(dir(elem_track)))
- continue
- """
-
- if extention != "mkv":
- cmd_line = "ffmpeg -fflags +genpts -i "
- #cmd_line = "ffmpeg -fflags +igndts -i "
- else:
- cmd_line = "ffmpeg -i "
- cmd_line += elem.replace(" ", "\ ").replace("'", "\\'")
- #cmd_line += " -threads 4 -vcodec libx264 -crf 22 -force_key_frames expr:gte\(t,n_forced*1\) -acodec mp2 -ac 2 -ab 192k -ar 48000 -async 1 -deinterlace zzz_transcoded.mkv_tmp"
- #cmd_line += " -threads 4 -vcodec copy -acodec mp2 -ac 2 -ab 192k -ar 48000 -async 1 -deinterlace tmp_transcoded.avi"
- #cmd_line += " -threads 4 -vcodec copy -acodec mp2 -ac 2 -ab 192k -ar 48000 -async 1 -deinterlace tmp_transcoded.mp4"
- #cmd_line += " -threads 6 -c:v libvpx-vp9 -lossless 1 -c:a libopus -b:a 128k -deinterlace tmp_transcoded.webm"
- #cmd_line += " -threads 6 -c:v libvpx-vp9 -row-mt 1 -c:a libopus -b:a 128k -deinterlace tmp_transcoded.webm"
- # -map 0:v ==> copy all video stream
- # -map 0:a ==> copy all audio stream
- # -map 0:s ==> copy all subtitle stream
-
- cmd_line += " -map 0:v -map 0:a -c:v copy -c:a libopus -ac 2 -b:a 192k -r:a 48000 -deinterlace -threads 6 " + tmp_name_encoded_file
- #cmd_line += " -threads 4 -vcodec copy -acodec copy tmp_transcoded.webm"
- ret = run_command(cmd_line)
- print(" ret value = " + str(ret))
- if ret == False:
- print("[ERROR] Trancode: error occured ...")
- element_error.append(elem)
- #exit(-1)
- continue
- print(" move in: '" + elem[:-len(extention)] + "mkv'")
- # cmd_line = "mv " + elem.replace(" ", "\ ").replace("'", "\\'") + " last_transcoded.xx"
- cmd_line = "mv " + elem.replace(" ", "\ ").replace("!", "\\!").replace("'", "\\'") + " last_transcoded"
- ret = run_command(cmd_line)
- cmd_line = "mv " + tmp_name_encoded_file + " " + elem.replace(" ", "\ ").replace("!", "\\!").replace("'", "\\'")[:-len(extention)] + "mkv"
- ret = run_command(cmd_line)
-
-
- #cmd_line = "mv " + elem.replace(" ", "\ ").replace("'", "\\'") + " last_transcoded.ts"
- #ret = run_command(cmd_line)
- #break
-
-full_list_size = len(list_files_ts) + len(list_files_mp4) + len(list_files_flv) + len(list_files_avi) + len(list_files_wmv) + len(list_files_divx) + len(list_files_mkv) + len(list_files_webm)
-offset = 0;
-
-
-reverse_sort = False
-
-list_files_ts.sort(reverse=reverse_sort)
-list_files_mp4.sort(reverse=reverse_sort)
-list_files_flv.sort(reverse=reverse_sort)
-list_files_avi.sort(reverse=reverse_sort)
-list_files_wmv.sort(reverse=reverse_sort)
-list_files_divx.sort(reverse=reverse_sort)
-list_files_mkv.sort(reverse=reverse_sort)
-list_files_webm.sort(reverse=reverse_sort)
-
-random.shuffle(list_files_mp4)
-random.shuffle(list_files_avi)
-random.shuffle(list_files_mkv)
-
-trancode_local(list_files_ts , "ts", full_list_size, offset)
-offset += len(list_files_ts)
-trancode_local(list_files_mp4 , "mp4", full_list_size, offset)
-offset += len(list_files_mp4)
-trancode_local(list_files_flv , "flv", full_list_size, offset)
-offset += len(list_files_flv)
-trancode_local(list_files_avi , "avi", full_list_size, offset)
-offset += len(list_files_avi)
-trancode_local(list_files_wmv , "wmv", full_list_size, offset)
-offset += len(list_files_wmv)
-trancode_local(list_files_divx , "divx", full_list_size, offset)
-offset += len(list_files_divx)
-trancode_local(list_files_mkv , "mkv", full_list_size, offset)
-offset += len(list_files_mkv)
-#trancode_local(list_files_webm , "webm", full_list_size, offset)
-#offset += len(list_files_webm)
-
-print("List error transcode: " + len(element_error))
-for elem in element_error:
- print(" == Trancode: '" + elem.replace("'", "\'") + "'")
-
-
-## extract a thumb from a video
-## ffmpeg -i Passenger.mkv -ss 00:05:00 -f image2 -vframes 1 thumb.jpg
-
diff --git a/back/tools/video_move_correct.py b/back/tools/video_move_correct.py
deleted file mode 100755
index e9ab363..0000000
--- a/back/tools/video_move_correct.py
+++ /dev/null
@@ -1,173 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2016, Edouard DUPIN, all right reserved
-##
-## @license APACHE v2.0 (see license file)
-##
-import os
-import fnmatch
-import sys
-import subprocess
-import shlex
-import shutil
-
-##
-## @brief Execute the command with no get of output
-##
-def run_command(cmd_line):
- # prepare command line:
- args = shlex.split(cmd_line)
- print("[INFO] cmd = " + str(args))
- try:
- # create the subprocess
- p = subprocess.Popen(args)
- except subprocess.CalledProcessError as e:
- print("[ERROR] subprocess.CalledProcessError : " + str(args))
- return False
- #except:
- # debug.error("Exception on : " + str(args))
- # launch the subprocess:
- output, err = p.communicate()
- # Check error :
- if p.returncode == 0:
- return True
- else:
- return False
-
-
-##
-## @brief Get list of all Files in a specific path (with a regex)
-## @param[in] path (string) Full path of the machine to search files (start with / or x:)
-## @param[in] regex (string) Regular expression to search data
-## @param[in] recursive (bool) List file with recursive search
-## @param[in] remove_path (string) Data to remove in the path
-## @return (list) return files requested
-##
-def get_list_of_file_in_path(path, regex="*", recursive = False, remove_path=""):
- out = []
- if os.path.isdir(os.path.realpath(path)):
- tmp_path = os.path.realpath(path)
- tmp_rule = regex
- else:
- debug.error("path does not exist : '" + str(path) + "'")
-
- for root, dirnames, filenames in os.walk(tmp_path):
- deltaRoot = root[len(tmp_path):]
- while len(deltaRoot) > 0 \
- and ( deltaRoot[0] == '/' \
- or deltaRoot[0] == '\\' ):
- deltaRoot = deltaRoot[1:]
- if recursive == False \
- and deltaRoot != "":
- return out
- tmpList = filenames
- if len(tmp_rule) > 0:
- tmpList = fnmatch.filter(filenames, tmp_rule)
- # Import the module :
- for cycleFile in tmpList:
- #for cycleFile in filenames:
- add_file = os.path.join(tmp_path, deltaRoot, cycleFile)
- if len(remove_path) != 0:
- if add_file[:len(remove_path)] != remove_path:
- print("ERROR : Request remove start of a path that is not the same: '" + add_file[:len(remove_path)] + "' demand remove of '" + str(remove_path) + "'")
- else:
- add_file = add_file[len(remove_path)+1:]
- out.append(add_file)
- return out;
-
-def get_run_path():
- return os.getcwd()
-
-src_path = get_run_path()
-dst_path = os.path.join(src_path, "..", "zzz_video_push_correct")
-list_files_mkv = get_list_of_file_in_path(src_path, "*.mkv", recursive = True)
-list_files_webm = get_list_of_file_in_path(src_path, "*.webm", recursive = True)
-list_files_jpg = get_list_of_file_in_path(src_path, "*.jpg", recursive = True)
-list_files_png = get_list_of_file_in_path(src_path, "*.png", recursive = True)
-
-print("list of elements MKV : ")
-for elem in list_files_mkv:
- print(" - '" + str(elem) + "'")
-print("list of elements webm : ")
-for elem in list_files_webm:
- print(" - '" + str(elem) + "'")
-
-import random
-from pymediainfo import MediaInfo
-
-for arg in sys.argv:
- print("arg: " + arg)
-
-id_value = 0
-if len(sys.argv) == 2:
- id_value = int(sys.argv[1])
-
-
-
-def create_directory_of_file(file):
- path = os.path.dirname(file)
- try:
- os.stat(path)
- except:
- os.makedirs(path)
-
-def file_move(path_src, path_dst):
- #real write of data:
- print("Move file from: " + path_src)
- print(" to: " + path_dst)
- create_directory_of_file(path_dst)
- shutil.move(path_src, path_dst)
- return True
-
-
-def move_local(list_of_file=[], extention="mkv") :
- global element_error;
- print("Start strancoding: '." + extention + "' ... " + str(len(list_of_file)))
- id_elem = 0
- total_count_of_file = len(list_of_file)
- for elem in list_of_file:
- id_elem += 1
- print(" ========================================================================================")
- print(" == " + str(id_elem) + " / " + str(total_count_of_file))
- print(" == Trancode: '" + elem.replace("'", "\'") + "'")
- print(" ========================================================================================")
- if not os.path.isfile(elem):
- print(" ==> file does not exist")
- continue
-
- # collect media info ...
- #if it is a mk: .. chack the opus format...
- if extention == "mkv":
- media_info = MediaInfo.parse(elem)
- print("media-info: ... " + str(len(media_info.tracks)))
- need_move_file = True
- for elem_track in media_info.tracks:
- data_print = "[" + str(elem_track.track_id) + "] " + str(elem_track.track_type)
- #print('track_id = ' + str(elem_track.track_id))
- #print('track_type = ' + str(elem_track.track_type))
- if elem_track.track_type == "Audio":
- data_print += " (" + str(elem_track.language) + ") enc=" + str(elem_track.format);
- #print('language = ' + str(elem_track.language))
- #print('format = ' + str(elem_track.format))
- if elem_track.format != "Opus":
- need_move_file = False
- elif elem_track.track_type == "Video":
- data_print += " enc=" + str(elem_track.format);
- if elem_track.format != "AVC":
- need_move_file = False
- print(" - " + data_print)
- #print("media-info: ..." + str(dir(elem_track)))
- if need_move_file == False:
- print(" ==> Need transcode, NOT already in the good format...")
- continue
-
- file_move(elem, os.path.join(dst_path, elem[len(src_path)+1:]))
-
-move_local(list_files_mkv, "mkv")
-move_local(list_files_webm, "webm")
-move_local(list_files_jpg, "jpg")
-move_local(list_files_png, "png")
-
diff --git a/back/transfer_bdd/v0.0...v1.0/create_bdd.py b/back/transfer_bdd/v0.0...v1.0/create_bdd.py
deleted file mode 100755
index c577c64..0000000
--- a/back/transfer_bdd/v0.0...v1.0/create_bdd.py
+++ /dev/null
@@ -1,37 +0,0 @@
-#!/usr/bin/python3
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2012, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-#pip install paho-mqtt --user
-
-from realog import debug
-import json
-import os
-import random
-import copy
-from dateutil import parser
-data_mapping = {}
-print(" =================================================== Send DATA ");
-import transfert_data
-data_mapping = transfert_data.transfert_db()
-print(" =================================================== Send TYPE ");
-import transfert_type
-type_mapping = transfert_type.transfert_db(data_mapping)
-print(" =================================================== Send GROUP ");
-import transfert_group
-group_mapping = transfert_group.transfert_db(data_mapping, type_mapping)
-print(" =================================================== Send SAISON ");
-import transfert_saison
-saison_mapping = transfert_saison.transfert_db(data_mapping, type_mapping, group_mapping)
-##print(" =================================================== Send UNIVERS ");
-##import transfert_univers
-##univers_mapping = transfert_univers.transfert_db(data_mapping, type_mapping, group_mapping)
-print(" =================================================== Send Medias ");
-import transfert_video
-video_mapping = transfert_video.transfert_db(data_mapping, type_mapping, group_mapping, saison_mapping)
-
diff --git a/back/transfer_bdd/v0.0...v1.0/db.py b/back/transfer_bdd/v0.0...v1.0/db.py
deleted file mode 100644
index 7858e03..0000000
--- a/back/transfer_bdd/v0.0...v1.0/db.py
+++ /dev/null
@@ -1,12 +0,0 @@
-from realog import debug
-
-import psycopg2
-
-
-def connect_bdd():
- debug.info("connect BDD: ")
- conn = psycopg2.connect(dbname="karideo", user="root", password="postgress_password", host="localhost", port="15032")
- return conn
-
-
-base_bdd_name = "karideo_"
diff --git a/back/transfer_bdd/v0.0...v1.0/transfert_data.py b/back/transfer_bdd/v0.0...v1.0/transfert_data.py
deleted file mode 100755
index da21745..0000000
--- a/back/transfer_bdd/v0.0...v1.0/transfert_data.py
+++ /dev/null
@@ -1,97 +0,0 @@
-#!/usr/bin/python3
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2012, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-#pip install paho-mqtt --user
-
-from realog import debug
-import json
-import os
-import random
-import copy
-import shutil
-from dateutil import parser
-
-import db
-
-def file_read_data(path):
- if not os.path.isfile(path):
- return ""
- file = open(path, "r")
- data_file = file.read()
- file.close()
- return data_file
-
-
-
-def create_directory_of_file(file):
- debug.info("Create directory of path: '" + file + "'")
- path = os.path.dirname(file)
- debug.info("Create directory: '" + path + "'")
- try:
- os.stat(path)
- except:
- os.makedirs(path)
-
-def file_move(path_src, path_dst):
- #real write of data:
- create_directory_of_file(path_dst)
- shutil.move(path_src, path_dst)
- return True
-
-def transfert_db():
- out = {}
- out[str(None)] = None
- connection = db.connect_bdd();
-
- debug.info("Load old BDD: ")
-
- data = file_read_data('bdd_data.json')
- my_old_bdd = json.loads(data)
-
- debug.info("create the table:")
-
- c = connection.cursor()
- file_object = open("data_transfer.txt", "w")
- file_object2 = open("data_transfer2.txt", "w")
- debug.info("insert elements: ")
- iii = 0;
- for elem in my_old_bdd:
- iii+=1;
- debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
- id = elem["id"]
- time_create = elem["create_date"];
- mime_type = elem["mime_type"]
- original_name = elem["original_name"]
- sha512 = elem["sha512"]
- size = elem["size"]
- if mime_type == "unknown" and len(original_name) > 3 and original_name[-3:] == "mkv":
- mime_type = "video/x-matroska"
- request_insert = (time_create, sha512, mime_type, size, original_name)
- c.execute('INSERT INTO data (create_date, sha512, mime_type, size, original_name) VALUES (%s,%s,%s,%s,%s) RETURNING id', request_insert)
- id_of_new_row = c.fetchone()[0]
- debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
- out[str(id)] = id_of_new_row
- file_object.write("mv \"media2/" + str(id_of_new_row) + "/data\" \"media/" + str(id) + "/video\"\n")
- file_object.write("mv \"media2/" + str(id_of_new_row) + "/meta.json\" \"media/" + str(id) + "/meta.json\"\n\n")
- file_object2.write("mkdir -p \"media2/" + str(id_of_new_row) + "\"\n")
- file_object2.write("mv \"media/" + str(id) + "/video\" \"media2/" + str(id_of_new_row) + "/data\"\n")
- file_object2.write("mv \"media/" + str(id) + "/meta.json\" \"media2/" + str(id_of_new_row) + "/meta.json\"\n\n")
- #file_move("media/" + str(id) + "/video", "media2/" + str(id_of_new_row) + "/data")
- #file_move("media/" + str(id) + "/meta.json", "media2/" + str(id_of_new_row) + "/meta.json")
- file_object.close()
- file_object2.close()
- # Save (commit) the changes
- connection.commit()
-
- # We can also close the connection if we are done with it.
- # Just be sure any changes have been committed or they will be lost.
- connection.close()
-
- return out
-
diff --git a/back/transfer_bdd/v0.0...v1.0/transfert_group.py b/back/transfer_bdd/v0.0...v1.0/transfert_group.py
deleted file mode 100755
index 6085003..0000000
--- a/back/transfer_bdd/v0.0...v1.0/transfert_group.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/python3
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2012, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-#pip install paho-mqtt --user
-
-from realog import debug
-import json
-import os
-import random
-import copy
-from dateutil import parser
-import datetime
-
-import db
-
-
-def file_read_data(path):
- if not os.path.isfile(path):
- return ""
- file = open(path, "r")
- data_file = file.read()
- file.close()
- return data_file
-
-
-def transfert_db(data_mapping, type_mapping):
- out = {}
- out[str(None)] = None
-
- connection = db.connect_bdd();
-
- debug.info("Load old BDD: ")
-
- data = file_read_data('bdd_group.json')
- my_old_bdd = json.loads(data)
-
- debug.info("create the table:")
-
-
-
- c = connection.cursor()
-
-
- debug.info("insert elements: ")
- iii = 0;
- for elem in my_old_bdd:
- iii+=1;
- debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
- id = elem["id"]
- name = elem["name"]
- if "description" not in elem.keys():
- description = None
- else:
- description = elem["description"]
- if "covers" not in elem.keys():
- covers = []
- else:
- covers = elem["covers"]
- if covers == None:
- covers = [];
- request_insert = (name, description)
- c.execute('INSERT INTO node (type, name, description) VALUES (\'serie\', %s,%s) RETURNING id', request_insert)
- id_of_new_row = c.fetchone()[0]
- debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
- out[str(id)] = id_of_new_row
- connection.commit()
- for elem_cover in covers:
- request_insert = (id_of_new_row, data_mapping[str(elem_cover)])
- print(" insert cover " + str(request_insert))
- c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s) RETURNING id', request_insert)
- connection.commit()
- # Save (commit) the changes
- connection.commit()
-
- # We can also close the connection if we are done with it.
- # Just be sure any changes have been committed or they will be lost.
- connection.close()
-
- return out;
-
-
diff --git a/back/transfer_bdd/v0.0...v1.0/transfert_saison.py b/back/transfer_bdd/v0.0...v1.0/transfert_saison.py
deleted file mode 100755
index 44a0a22..0000000
--- a/back/transfer_bdd/v0.0...v1.0/transfert_saison.py
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/usr/bin/python3
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2012, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-#pip install paho-mqtt --user
-
-from realog import debug
-import json
-import os
-import random
-import copy
-from dateutil import parser
-import datetime
-
-import db
-
-
-def file_read_data(path):
- if not os.path.isfile(path):
- return ""
- file = open(path, "r")
- data_file = file.read()
- file.close()
- return data_file
-
-def transfert_db(data_mapping, type_mapping, group_mapping):
- out = {}
- out[str(None)] = None
- connection = db.connect_bdd();
- debug.info("Load old BDD: ")
-
- data = file_read_data('bdd_saison.json')
- my_old_bdd = json.loads(data)
-
- debug.info("create the table:")
-
- c = connection.cursor()
-
- debug.info("insert elements: ")
- iii = 0;
- for elem in my_old_bdd:
- iii+=1;
- debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
- id = elem["id"]
- name = elem["number"]
- if "group_id" not in elem.keys():
- group_id = None
- else:
- group_id = elem["group_id"]
- if "description" not in elem.keys():
- description = None
- else:
- description = elem["description"]
- if "covers" not in elem.keys():
- covers = []
- else:
- covers = elem["covers"]
- if covers == None:
- covers = [];
- request_insert = (name, description, group_mapping[str(group_id)])
- c.execute('INSERT INTO node (type, name, description, parent_id) VALUES (\'saison\', %s,%s,%s) RETURNING id', request_insert)
- id_of_new_row = c.fetchone()[0]
- debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
- out[str(id)] = id_of_new_row
- connection.commit()
- for elem_cover in covers:
- request_insert = (id_of_new_row, data_mapping[str(elem_cover)])
- print(" insert cover " + str(request_insert))
- c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s) RETURNING id', request_insert)
- connection.commit()
-
- # Save (commit) the changes
- connection.commit()
-
- # We can also close the connection if we are done with it.
- # Just be sure any changes have been committed or they will be lost.
- connection.close()
-
- return out
-
-
diff --git a/back/transfer_bdd/v0.0...v1.0/transfert_type.py b/back/transfer_bdd/v0.0...v1.0/transfert_type.py
deleted file mode 100755
index 5e68393..0000000
--- a/back/transfer_bdd/v0.0...v1.0/transfert_type.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/python3
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2012, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-#pip install paho-mqtt --user
-
-from realog import debug
-import json
-import os
-import random
-import copy
-from dateutil import parser
-import datetime
-
-import db
-
-
-def file_read_data(path):
- if not os.path.isfile(path):
- return ""
- file = open(path, "r")
- data_file = file.read()
- file.close()
- return data_file
-
-def transfert_db(data_mapping):
- out = {}
- out[str(None)] = None
- connection = db.connect_bdd();
- debug.info("Load old BDD: ")
-
- data = file_read_data('bdd_type.json')
- my_old_bdd = json.loads(data)
-
- debug.info("create the table:")
-
- c = connection.cursor()
-
- debug.info("insert elements: ")
- iii = 0;
- for elem in my_old_bdd:
- iii+=1;
- debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] ??? Get element " + str(elem["id"]) + " with name: '" + elem["name"] + "'")
- id = elem["id"]
- name = elem["name"]
- if name == 'Short Films':
- name = 'Short movie'
- if name == 'tv show':
- name = 'TV show'
- if name == 'Anniation tv show':
- name = 'Anniation TV show'
- request_insert = (name,)
- c.execute("SELECT id FROM node WHERE type = 'type' AND name = %s LIMIT 1", request_insert)
- id_of_new_row = c.fetchone()[0]
- debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
- out[str(id)] = id_of_new_row
-
- # Save (commit) the changes
- connection.commit()
-
- # We can also close the connection if we are done with it.
- # Just be sure any changes have been committed or they will be lost.
- connection.close()
- return out
-
diff --git a/back/transfer_bdd/v0.0...v1.0/transfert_univers.py b/back/transfer_bdd/v0.0...v1.0/transfert_univers.py
deleted file mode 100755
index 80159f5..0000000
--- a/back/transfer_bdd/v0.0...v1.0/transfert_univers.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/python3
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2012, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-#pip install paho-mqtt --user
-
-from realog import debug
-import json
-import os
-import random
-import copy
-from dateutil import parser
-import datetime
-
-import db
-
-
-def file_read_data(path):
- if not os.path.isfile(path):
- return ""
- file = open(path, "r")
- data_file = file.read()
- file.close()
- return data_file
-
-def transfert_db():
- out = {}
- out[str(None)] = None
- connection = db.connect_bdd();
- debug.info("Load old BDD: ")
-
- data = file_read_data('bdd_univers.json')
- my_old_bdd = json.loads(data)
-
- debug.info("create the table:")
-
- c = connection.cursor()
-
- debug.info("insert elements: ")
- iii = 0;
- for elem in my_old_bdd:
- iii+=1;
- debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
- id = elem["id"]
- name = elem["name"]
- if "description" not in elem.keys():
- description = None
- else:
- description = elem["description"]
- if "covers" not in elem.keys():
- covers = []
- else:
- covers = elem["covers"]
- if covers == None:
- covers = [];
- request_insert = (name, description)
- c.execute('INSERT INTO node (type, name, description) VALUES (\'univers\', %s,%s) RETURNING id', request_insert)
- id_of_new_row = c.fetchone()[0]
- debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
- out[str(id)] = id_of_new_row
- connection.commit()
- for elem_cover in covers:
- request_insert = (id_of_new_row, data_mapping[str(elem_cover)])
- print(" insert cover " + str(request_insert))
- c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s) RETURNING id', request_insert)
- connection.commit()
-
- # Save (commit) the changes
- connection.commit()
-
- # We can also close the connection if we are done with it.
- # Just be sure any changes have been committed or they will be lost.
- connection.close()
- return out
-
diff --git a/back/transfer_bdd/v0.0...v1.0/transfert_video.py b/back/transfer_bdd/v0.0...v1.0/transfert_video.py
deleted file mode 100755
index 7a1e701..0000000
--- a/back/transfer_bdd/v0.0...v1.0/transfert_video.py
+++ /dev/null
@@ -1,136 +0,0 @@
-#!/usr/bin/python3
-# -*- coding: utf-8 -*-
-##
-## @author Edouard DUPIN
-##
-## @copyright 2012, Edouard DUPIN, all right reserved
-##
-## @license MPL v2.0 (see license file)
-##
-#pip install paho-mqtt --user
-
-from realog import debug
-import json
-import os
-import random
-import copy
-from dateutil import parser
-import datetime
-
-import db
-
-def force_number(s):
- if s == None:
- return None;
- try:
- return int(s)
- except ValueError:
- return None
-
-def file_read_data(path):
- if not os.path.isfile(path):
- return ""
- file = open(path, "r")
- data_file = file.read()
- file.close()
- return data_file
-
-def transfert_db(data_mapping, type_mapping, group_mapping, saison_mapping):
- out = {}
- out[str(None)] = None
- connection = db.connect_bdd();
- debug.info("Load old BDD: ")
-
- data = file_read_data('bdd_video.json')
- my_old_bdd = json.loads(data)
-
- debug.info("create the table:")
-
- c = connection.cursor()
-
-
- debug.info("insert elements: ")
- iii = 0;
- for elem in my_old_bdd:
- iii+=1;
- debug.info("[" + str(iii) + "/" + str(len(my_old_bdd)) + "] send new element " + str(elem["id"]))
- id = elem["id"]
- time_create = elem["create_date"];
- name = elem["name"]
- if "description" not in elem.keys():
- description = None
- else:
- description = elem["description"]
- if "covers" not in elem.keys():
- covers = []
- else:
- covers = elem["covers"]
- if covers == None:
- covers = [];
- if "data_id" not in elem.keys():
- data_id = None
- else:
- data_id = elem["data_id"]
- if "type_id" not in elem.keys():
- type_id = None
- else:
- type_id = elem["type_id"]
- if "univers_id" not in elem.keys():
- univers_id = None
- else:
- univers_id = elem["univers_id"]
- if "group_id" not in elem.keys():
- group_id = None
- else:
- group_id = elem["group_id"]
- if "saison_id" not in elem.keys():
- saison_id = None
- else:
- saison_id = elem["saison_id"]
- if "date" not in elem.keys():
- date = None
- else:
- date = elem["date"]
- date = force_number(date)
- if date != None and date < 1850:
- date = None
- if "episode" not in elem.keys():
- episode = None
- else:
- episode = elem["episode"]
- if "time" not in elem.keys():
- time = None
- else:
- time = elem["time"]
- request_insert = (time_create, name, description, data_mapping[str(data_id)], type_mapping[str(type_id)], group_mapping[str(group_id)], saison_mapping[str(saison_id)], force_number(date), force_number(episode), time)
- c.execute('INSERT INTO media (type, create_date, name, description, data_id, type_id, serie_id, saison_id, date, episode, time) VALUES (\'media\',%s,%s,%s,%s,%s,%s,%s,%s,%s,%s) RETURNING id', request_insert)
-
- id_of_new_row = c.fetchone()[0]
- debug.info("data transform: " + str(id) + " => " + str(id_of_new_row))
- out[str(id)] = id_of_new_row
- connection.commit()
- for elem_cover in covers:
- request_insert = (id_of_new_row, data_mapping[str(elem_cover)])
- print(" insert cover " + str(request_insert))
- c.execute('INSERT INTO cover_link (node_id, data_id) VALUES (%s,%s) RETURNING id', request_insert)
- connection.commit()
-
- # Save (commit) the changes
- connection.commit()
-
- # def dict_factory(cursor, row):
- # d = {}
- # for idx, col in enumerate(cursor.description):
- # d[col[0]] = row[idx]
- # return d
-
- # conn.row_factory = dict_factory
- # c = conn.cursor()
- # c.execute('SELECT * FROM video WHERE deleted=false')
- # results = c.fetchall()
- # print(results)
-
- # We can also close the connection if we are done with it.
- # Just be sure any changes have been committed or they will be lost.
- connection.close()
-
diff --git a/bdd/docker-compose.yaml b/bdd/docker-compose.yaml
index a3e9750..be3d623 100644
--- a/bdd/docker-compose.yaml
+++ b/bdd/docker-compose.yaml
@@ -1,33 +1,25 @@
-version: "3.7"
+# Use root/example as user/password credentials
+version: '3.1'
services:
- bdd_service:
+ db_service:
+ image: mysql:latest
+ container_name: mysql_db
restart: always
- image: postgres:alpine
- environment:
- PGDATA: /var/lib/postgresql/data
- POSTGRES_DB: karideo
- POSTGRES_USER: root
- POSTGRES_PASSWORD: postgress_password
- #this is for debug only
- ports:
- - 15032:5432
+ command: --default-authentication-plugin=mysql_native_password
+ env_file:
+ - ./config.env
+ #environment:
+ # MYSQL_ROOT_PASSWORD: changeme
+ # MYSQL_DATABASE: mybdd
volumes:
- - /workspace/data/karideo/bdd:/var/lib/postgresql/data:rw
- adminer:
- image: adminer
+ - ./data:/var/lib/mysql
+ ports:
+ - 15306:3306
+ adminer_service:
+ image: adminer:latest
restart: always
ports:
- - 15079:8080
+ - 8080:8080
links:
- - bdd_service:db
- pgadmin_service:
- restart: always
- image: dpage/pgadmin4
- volumes:
- - /workspace/data/karideo/pgadmin:/root/.pgadmin
- ports:
- - "15078:80"
- links:
- - bdd_service:db
-
+ - db_service:db
diff --git a/docker-compose.yaml b/docker-compose.yaml
new file mode 100644
index 0000000..e8c5a08
--- /dev/null
+++ b/docker-compose.yaml
@@ -0,0 +1,42 @@
+version: '3'
+
+services:
+ karideo_db_service:
+ image: mysql:latest
+ restart: always
+ command: --default-authentication-plugin=mysql_native_password
+ env_file:
+ - ./config.env
+ volumes:
+ - /workspace/data/karideo/db:/var/lib/mysql
+
+ karideo_adminer_service:
+ image: adminer:latest
+ restart: always
+ ports:
+ - 18079:8080
+ links:
+ - karideo_db_service:db
+
+ karideo_back_service:
+ build: back/
+ restart: always
+ image: org.kar.video.back
+ ports:
+ - 18080:18080
+ env_file:
+ - ./config.env
+ links:
+ - karideo_db_service:db
+ volumes:
+ - /workspace/data/karideo/media:/application/data
+
+
+ karideo_front_service:
+ build: front/
+ restart: always
+ image: org.kar.video.front
+ container_name: karideo
+ ports:
+ #- 15081:4200
+ - 15081:80
\ No newline at end of file