[DEV] add video analyser when upload and add an api to retrive video

This commit is contained in:
Edouard DUPIN 2019-12-19 23:49:44 +01:00
parent 92c606f8ca
commit 705149cdb4
3 changed files with 26 additions and 9 deletions

View File

@ -12,6 +12,8 @@ RUN pip install realog
RUN pip install python-magic RUN pip install python-magic
RUN pip install pymediainfo
EXPOSE 80 EXPOSE 80
ADD src /application/ ADD src /application/

View File

@ -16,11 +16,16 @@ import datetime
import time, threading import time, threading
import realog.debug as debug import realog.debug as debug
from aiofiles import os as async_os
from pymediainfo import MediaInfo
from sanic import Sanic from sanic import Sanic
from sanic import response from sanic import response
from sanic import views from sanic import views
from sanic import Blueprint from sanic import Blueprint
from sanic.exceptions import ServerError from sanic.exceptions import ServerError
from sanic.response import file_stream
from sanic_simple_swagger import swagger_blueprint, openapi_blueprint from sanic_simple_swagger import swagger_blueprint, openapi_blueprint
from sanic_simple_swagger import doc from sanic_simple_swagger import doc
@ -86,15 +91,19 @@ def add(_app, _name_api):
'mime-type': _request.headers["mime-type"], 'mime-type': _request.headers["mime-type"],
"already_exist": True, "already_exist": True,
} }
await _response.write(json.dumps(answer_data, sort_keys=True, indent=4)) await _response.write(json.dumps(answer_data, sort_keys=True, indent=4))
return return
# move the file
shutil.move(temporary_file, destination_filename) shutil.move(temporary_file, destination_filename)
# collect media info ...
media_info = MediaInfo.parse(destination_filename)
data_metafile = { data_metafile = {
"sha512": str(sha1.hexdigest()), "sha512": str(sha1.hexdigest()),
"size": total_size, "size": total_size,
'filename': _request.headers["filename"], 'filename': _request.headers["filename"],
'mime-type': _request.headers["mime-type"], 'mime-type': _request.headers["mime-type"],
'media-info': json.loads(media_info.to_json())
} }
tools.file_write_data(destination_filename + ".meta", json.dumps(data_metafile, sort_keys=True, indent=4)) tools.file_write_data(destination_filename + ".meta", json.dumps(data_metafile, sort_keys=True, indent=4))
answer_data = { answer_data = {
@ -107,17 +116,22 @@ def add(_app, _name_api):
await _response.write(json.dumps(answer_data, sort_keys=True, indent=4)) await _response.write(json.dumps(answer_data, sort_keys=True, indent=4))
return response.stream(streaming, content_type='application/json') return response.stream(streaming, content_type='application/json')
""" @elem_blueprint.get('/' + _name_api + '/<id:string>', strict_slashes=True)
@elem_blueprint.get('/' + _name_api + '/<id:int>', strict_slashes=True)
@doc.summary("Show resources") @doc.summary("Show resources")
@doc.description("Display a listing of the resource.") @doc.description("Display a listing of the resource.")
@doc.produces(content_type='application/json') @doc.produces(content_type='application/json')
async def retrive(request, id): async def retrive(request, id):
value = data_global_elements.get_interface(_name_api).get(id) filename = os.path.join(_app.config['REST_MEDIA_DATA'], id)
if value != None: if os.path.isfile(filename) == True:
return response.json(value) file_stat = await async_os.stat(filename)
headers = {"Content-Length": str(file_stat.st_size)}
return await file_stream(
filename,
headers=headers,
chunked=False,
)
raise ServerError("No data found", status_code=404) raise ServerError("No data found", status_code=404)
"""
_app.blueprint(elem_blueprint) _app.blueprint(elem_blueprint)

View File

@ -11,7 +11,7 @@
import tools import tools
import json import json
from realog import debug from realog import debug
import random
from sanic.exceptions import ServerError from sanic.exceptions import ServerError
## ##
## @breif Generic interface to access to the BDD (no BDD, direct file IO) ## @breif Generic interface to access to the BDD (no BDD, direct file IO)
@ -26,6 +26,7 @@ class DataInterface():
self.last_id = 0 self.last_id = 0
if tools.exist(self.file) == False: if tools.exist(self.file) == False:
self.mark_to_store() self.mark_to_store()
self.last_id = random.randint(20, 100)
else: else:
data = tools.file_read_data(self.file) data = tools.file_read_data(self.file)
self.bdd = json.loads(data) self.bdd = json.loads(data)